mirror of
https://codeberg.org/ziglang/zig.git
synced 2026-04-27 19:09:47 +03:00
Merge remote-tracking branch 'origin/master' into llvm15
This commit is contained in:
+76
-66
@@ -12,7 +12,7 @@ if(NOT CMAKE_BUILD_TYPE)
|
||||
endif()
|
||||
|
||||
if(NOT CMAKE_INSTALL_PREFIX)
|
||||
set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/stage1" CACHE STRING
|
||||
set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/stage3" CACHE STRING
|
||||
"Directory to install zig to" FORCE)
|
||||
endif()
|
||||
|
||||
@@ -65,6 +65,9 @@ if("${ZIG_VERSION}" STREQUAL "")
|
||||
endif()
|
||||
message(STATUS "Configuring zig version ${ZIG_VERSION}")
|
||||
|
||||
set(ZIG_SKIP_INSTALL_LIB_FILES off CACHE BOOL
|
||||
"Disable copying lib/ files to install prefix during the build phase")
|
||||
|
||||
set(ZIG_STATIC off CACHE BOOL "Attempt to build a static zig executable (not compatible with glibc)")
|
||||
set(ZIG_SHARED_LLVM off CACHE BOOL "Prefer linking against shared LLVM libraries")
|
||||
set(ZIG_STATIC_LLVM off CACHE BOOL "Prefer linking against static LLVM libraries")
|
||||
@@ -333,7 +336,7 @@ set(ZIG_CONFIG_H_OUT "${CMAKE_BINARY_DIR}/config.h")
|
||||
set(ZIG_CONFIG_ZIG_OUT "${CMAKE_BINARY_DIR}/config.zig")
|
||||
|
||||
# This is our shim which will be replaced by stage1.zig.
|
||||
set(ZIG0_SOURCES
|
||||
set(ZIG1_SOURCES
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/zig0.cpp"
|
||||
)
|
||||
|
||||
@@ -373,9 +376,9 @@ set(ZIG_CPP_SOURCES
|
||||
# https://github.com/ziglang/zig/issues/6363
|
||||
"${CMAKE_SOURCE_DIR}/src/windows_sdk.cpp"
|
||||
)
|
||||
# Needed because we use cmake, not the zig build system, to build zig1.o.
|
||||
# Needed because we use cmake, not the zig build system, to build zig2.o.
|
||||
# This list is generated by building zig and then clearing the zig-cache directory,
|
||||
# then manually running the build-obj command (see BUILD_ZIG1_ARGS), and then looking
|
||||
# then manually running the build-obj command (see BUILD_ZIG2_ARGS), and then looking
|
||||
# in the zig-cache directory for the compiler-generated list of zig file dependencies.
|
||||
set(ZIG_STAGE2_SOURCES
|
||||
"${ZIG_CONFIG_ZIG_OUT}"
|
||||
@@ -942,40 +945,51 @@ if(MSVC OR MINGW)
|
||||
endif()
|
||||
|
||||
if("${ZIG_EXECUTABLE}" STREQUAL "")
|
||||
add_executable(zig0 ${ZIG0_SOURCES})
|
||||
set_target_properties(zig0 PROPERTIES
|
||||
add_executable(zig1 ${ZIG1_SOURCES})
|
||||
set_target_properties(zig1 PROPERTIES
|
||||
COMPILE_FLAGS ${EXE_CFLAGS}
|
||||
LINK_FLAGS ${EXE_LDFLAGS}
|
||||
)
|
||||
target_link_libraries(zig0 zigstage1)
|
||||
target_link_libraries(zig1 zigstage1)
|
||||
endif()
|
||||
|
||||
if(MSVC)
|
||||
set(ZIG1_OBJECT "${CMAKE_BINARY_DIR}/zig1.obj")
|
||||
set(ZIG2_OBJECT "${CMAKE_BINARY_DIR}/zig2.obj")
|
||||
else()
|
||||
set(ZIG1_OBJECT "${CMAKE_BINARY_DIR}/zig1.o")
|
||||
set(ZIG2_OBJECT "${CMAKE_BINARY_DIR}/zig2.o")
|
||||
endif()
|
||||
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
|
||||
set(ZIG1_RELEASE_ARG "")
|
||||
set(ZIG_RELEASE_ARG "")
|
||||
elseif("${CMAKE_BUILD_TYPE}" STREQUAL "RelWithDebInfo")
|
||||
set(ZIG_RELEASE_ARG -Drelease)
|
||||
else()
|
||||
set(ZIG1_RELEASE_ARG -OReleaseFast --strip)
|
||||
set(ZIG_RELEASE_ARG -Drelease -Dstrip)
|
||||
endif()
|
||||
if(ZIG_SKIP_INSTALL_LIB_FILES)
|
||||
set(ZIG_SKIP_INSTALL_LIB_FILES_ARG "-Dskip-install-lib-files")
|
||||
else()
|
||||
set(ZIG_SKIP_INSTALL_LIB_FILES_ARG "-Dskip-install-lib-files=false")
|
||||
endif()
|
||||
if(ZIG_SINGLE_THREADED)
|
||||
set(ZIG1_SINGLE_THREADED_ARG "-fsingle-threaded")
|
||||
set(ZIG_SINGLE_THREADED_ARG "-fsingle-threaded")
|
||||
else()
|
||||
set(ZIG1_SINGLE_THREADED_ARG "")
|
||||
set(ZIG_SINGLE_THREADED_ARG "")
|
||||
endif()
|
||||
if(ZIG_STATIC)
|
||||
set(ZIG_STATIC_ARG "-Duse-zig-libcxx")
|
||||
else()
|
||||
set(ZIG_STATIC_ARG "")
|
||||
endif()
|
||||
|
||||
set(BUILD_ZIG1_ARGS
|
||||
set(BUILD_ZIG2_ARGS
|
||||
"src/stage1.zig"
|
||||
-target "${ZIG_TARGET_TRIPLE}"
|
||||
"-mcpu=${ZIG_TARGET_MCPU}"
|
||||
--name zig1
|
||||
--name zig2
|
||||
--zig-lib-dir "${CMAKE_SOURCE_DIR}/lib"
|
||||
"-femit-bin=${ZIG1_OBJECT}"
|
||||
"-femit-bin=${ZIG2_OBJECT}"
|
||||
-fcompiler-rt
|
||||
"${ZIG1_RELEASE_ARG}"
|
||||
"${ZIG1_SINGLE_THREADED_ARG}"
|
||||
${ZIG_SINGLE_THREADED_ARG}
|
||||
-target "${ZIG_TARGET_TRIPLE}"
|
||||
-mcpu "${ZIG_TARGET_MCPU}"
|
||||
-lc
|
||||
--pkg-begin build_options "${ZIG_CONFIG_ZIG_OUT}"
|
||||
--pkg-end
|
||||
@@ -985,68 +999,64 @@ set(BUILD_ZIG1_ARGS
|
||||
|
||||
if("${ZIG_EXECUTABLE}" STREQUAL "")
|
||||
add_custom_command(
|
||||
OUTPUT "${ZIG1_OBJECT}"
|
||||
COMMAND zig0 ${BUILD_ZIG1_ARGS}
|
||||
DEPENDS zig0 "${ZIG_STAGE2_SOURCES}"
|
||||
COMMENT STATUS "Building self-hosted component ${ZIG1_OBJECT}"
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT "${ZIG2_OBJECT}"
|
||||
COMMAND zig1 ${BUILD_ZIG2_ARGS}
|
||||
DEPENDS zig1 "${ZIG_STAGE2_SOURCES}"
|
||||
COMMENT STATUS "Building stage2 object ${ZIG2_OBJECT}"
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
)
|
||||
set(ZIG_EXECUTABLE "${zig_BINARY_DIR}/zig")
|
||||
if (WIN32)
|
||||
set(ZIG_EXECUTABLE "${ZIG_EXECUTABLE}.exe")
|
||||
set(ZIG_EXECUTABLE "${zig2_BINARY_DIR}/zig2.exe")
|
||||
else()
|
||||
set(ZIG_EXECUTABLE "${zig2_BINARY_DIR}/zig2")
|
||||
endif()
|
||||
else()
|
||||
add_custom_command(
|
||||
OUTPUT "${ZIG1_OBJECT}"
|
||||
COMMAND "${ZIG_EXECUTABLE}" "build-obj" ${BUILD_ZIG1_ARGS}
|
||||
DEPENDS ${ZIG_STAGE2_SOURCES}
|
||||
COMMENT STATUS "Building self-hosted component ${ZIG1_OBJECT}"
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT "${ZIG2_OBJECT}"
|
||||
COMMAND "${ZIG_EXECUTABLE}" "build-obj" ${BUILD_ZIG2_ARGS}
|
||||
DEPENDS ${ZIG_STAGE2_SOURCES}
|
||||
COMMENT STATUS "Building stage2 component ${ZIG2_OBJECT}"
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
# cmake won't let us configure an executable without C sources.
|
||||
add_executable(zig "${CMAKE_SOURCE_DIR}/src/stage1/empty.cpp" "${ZIG1_OBJECT}")
|
||||
add_executable(zig2 "${CMAKE_SOURCE_DIR}/src/stage1/empty.cpp" "${ZIG2_OBJECT}")
|
||||
|
||||
set_target_properties(zig PROPERTIES
|
||||
set_target_properties(zig2 PROPERTIES
|
||||
COMPILE_FLAGS ${EXE_CFLAGS}
|
||||
LINK_FLAGS ${EXE_LDFLAGS}
|
||||
)
|
||||
target_link_libraries(zig zigstage1)
|
||||
target_link_libraries(zig2 zigstage1)
|
||||
if(MSVC)
|
||||
target_link_libraries(zig ntdll.lib)
|
||||
target_link_libraries(zig2 ntdll.lib)
|
||||
elseif(MINGW)
|
||||
target_link_libraries(zig ntdll)
|
||||
target_link_libraries(zig2 ntdll)
|
||||
endif()
|
||||
|
||||
install(TARGETS zig DESTINATION bin)
|
||||
|
||||
set(ZIG_SKIP_INSTALL_LIB_FILES off CACHE BOOL
|
||||
"Disable copying lib/ files to install prefix during the build phase")
|
||||
|
||||
# Dummy install command so that the "install" target is not missing.
|
||||
# This is redundant from the "stage3" custom target below.
|
||||
if(NOT ZIG_SKIP_INSTALL_LIB_FILES)
|
||||
set(ZIG_INSTALL_ARGS "build"
|
||||
--zig-lib-dir "${CMAKE_SOURCE_DIR}/lib"
|
||||
"-Dlib-files-only"
|
||||
--prefix "${CMAKE_INSTALL_PREFIX}"
|
||||
"-Dconfig_h=${ZIG_CONFIG_H_OUT}"
|
||||
install
|
||||
)
|
||||
|
||||
# CODE has no effect with Visual Studio build system generator, therefore
|
||||
# when using Visual Studio build system generator we resort to running
|
||||
# `zig build install` during the build phase.
|
||||
if(MSVC)
|
||||
add_custom_target(zig_install_lib_files ALL
|
||||
COMMAND zig ${ZIG_INSTALL_ARGS}
|
||||
DEPENDS zig
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
)
|
||||
else()
|
||||
get_target_property(zig_BINARY_DIR zig BINARY_DIR)
|
||||
install(CODE "set(zig_EXE \"${ZIG_EXECUTABLE}\")")
|
||||
install(CODE "set(ZIG_INSTALL_ARGS \"${ZIG_INSTALL_ARGS}\")")
|
||||
install(CODE "set(CMAKE_SOURCE_DIR \"${CMAKE_SOURCE_DIR}\")")
|
||||
install(SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/cmake/install.cmake)
|
||||
endif()
|
||||
install(FILES "lib/compiler_rt.zig" DESTINATION "lib/zig")
|
||||
endif()
|
||||
|
||||
set(ZIG_INSTALL_ARGS "build"
|
||||
--zig-lib-dir "${CMAKE_SOURCE_DIR}/lib"
|
||||
--prefix "${CMAKE_INSTALL_PREFIX}"
|
||||
"-Dconfig_h=${ZIG_CONFIG_H_OUT}"
|
||||
"-Denable-llvm"
|
||||
"-Denable-stage1"
|
||||
${ZIG_RELEASE_ARG}
|
||||
${ZIG_STATIC_ARG}
|
||||
${ZIG_SKIP_INSTALL_LIB_FILES_ARG}
|
||||
${ZIG_SINGLE_THREADED_ARG}
|
||||
"-Dtarget=${ZIG_TARGET_TRIPLE}"
|
||||
"-Dcpu=${ZIG_TARGET_MCPU}"
|
||||
)
|
||||
|
||||
add_custom_target(stage3 ALL
|
||||
COMMAND zig2 ${ZIG_INSTALL_ARGS}
|
||||
DEPENDS zig2
|
||||
COMMENT STATUS "Building stage3"
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
)
|
||||
|
||||
@@ -15,6 +15,7 @@ const stack_size = 32 * 1024 * 1024;
|
||||
|
||||
pub fn build(b: *Builder) !void {
|
||||
b.setPreferredReleaseMode(.ReleaseFast);
|
||||
const test_step = b.step("test", "Run all the tests");
|
||||
const mode = b.standardReleaseOptions();
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const single_threaded = b.option(bool, "single-threaded", "Build artifacts that run in single threaded mode");
|
||||
@@ -39,8 +40,6 @@ pub fn build(b: *Builder) !void {
|
||||
const docs_step = b.step("docs", "Build documentation");
|
||||
docs_step.dependOn(&docgen_cmd.step);
|
||||
|
||||
const toolchain_step = b.step("test-toolchain", "Run the tests for the toolchain");
|
||||
|
||||
var test_cases = b.addTest("src/test.zig");
|
||||
test_cases.stack_size = stack_size;
|
||||
test_cases.setBuildMode(mode);
|
||||
@@ -64,10 +63,9 @@ pub fn build(b: *Builder) !void {
|
||||
|
||||
const only_install_lib_files = b.option(bool, "lib-files-only", "Only install library files") orelse false;
|
||||
|
||||
const is_stage1 = b.option(bool, "stage1", "Build the stage1 compiler, put stage2 behind a feature flag") orelse false;
|
||||
const omit_stage2 = b.option(bool, "omit-stage2", "Do not include stage2 behind a feature flag inside stage1") orelse false;
|
||||
const have_stage1 = b.option(bool, "enable-stage1", "Include the stage1 compiler behind a feature flag") orelse false;
|
||||
const static_llvm = b.option(bool, "static-llvm", "Disable integration with system-installed LLVM, Clang, LLD, and libc++") orelse false;
|
||||
const enable_llvm = b.option(bool, "enable-llvm", "Build self-hosted compiler with LLVM backend enabled") orelse (is_stage1 or static_llvm);
|
||||
const enable_llvm = b.option(bool, "enable-llvm", "Build self-hosted compiler with LLVM backend enabled") orelse (have_stage1 or static_llvm);
|
||||
const llvm_has_m68k = b.option(
|
||||
bool,
|
||||
"llvm-has-m68k",
|
||||
@@ -137,7 +135,7 @@ pub fn build(b: *Builder) !void {
|
||||
};
|
||||
|
||||
const main_file: ?[]const u8 = mf: {
|
||||
if (!is_stage1) break :mf "src/main.zig";
|
||||
if (!have_stage1) break :mf "src/main.zig";
|
||||
if (use_zig0) break :mf null;
|
||||
break :mf "src/stage1.zig";
|
||||
};
|
||||
@@ -150,7 +148,7 @@ pub fn build(b: *Builder) !void {
|
||||
exe.setBuildMode(mode);
|
||||
exe.setTarget(target);
|
||||
if (!skip_stage2_tests) {
|
||||
toolchain_step.dependOn(&exe.step);
|
||||
test_step.dependOn(&exe.step);
|
||||
}
|
||||
|
||||
b.default_step.dependOn(&exe.step);
|
||||
@@ -248,7 +246,7 @@ pub fn build(b: *Builder) !void {
|
||||
}
|
||||
};
|
||||
|
||||
if (is_stage1) {
|
||||
if (have_stage1) {
|
||||
const softfloat = b.addStaticLibrary("softfloat", null);
|
||||
softfloat.setBuildMode(.ReleaseFast);
|
||||
softfloat.setTarget(target);
|
||||
@@ -360,8 +358,7 @@ pub fn build(b: *Builder) !void {
|
||||
exe_options.addOption(bool, "enable_tracy_callstack", tracy_callstack);
|
||||
exe_options.addOption(bool, "enable_tracy_allocation", tracy_allocation);
|
||||
exe_options.addOption(bool, "value_tracing", value_tracing);
|
||||
exe_options.addOption(bool, "is_stage1", is_stage1);
|
||||
exe_options.addOption(bool, "omit_stage2", omit_stage2);
|
||||
exe_options.addOption(bool, "have_stage1", have_stage1);
|
||||
if (tracy) |tracy_path| {
|
||||
const client_cpp = fs.path.join(
|
||||
b.allocator,
|
||||
@@ -396,8 +393,7 @@ pub fn build(b: *Builder) !void {
|
||||
test_cases_options.addOption(bool, "enable_link_snapshots", enable_link_snapshots);
|
||||
test_cases_options.addOption(bool, "skip_non_native", skip_non_native);
|
||||
test_cases_options.addOption(bool, "skip_stage1", skip_stage1);
|
||||
test_cases_options.addOption(bool, "is_stage1", is_stage1);
|
||||
test_cases_options.addOption(bool, "omit_stage2", omit_stage2);
|
||||
test_cases_options.addOption(bool, "have_stage1", have_stage1);
|
||||
test_cases_options.addOption(bool, "have_llvm", enable_llvm);
|
||||
test_cases_options.addOption(bool, "llvm_has_m68k", llvm_has_m68k);
|
||||
test_cases_options.addOption(bool, "llvm_has_csky", llvm_has_csky);
|
||||
@@ -418,7 +414,7 @@ pub fn build(b: *Builder) !void {
|
||||
const test_cases_step = b.step("test-cases", "Run the main compiler test cases");
|
||||
test_cases_step.dependOn(&test_cases.step);
|
||||
if (!skip_stage2_tests) {
|
||||
toolchain_step.dependOn(test_cases_step);
|
||||
test_step.dependOn(test_cases_step);
|
||||
}
|
||||
|
||||
var chosen_modes: [4]builtin.Mode = undefined;
|
||||
@@ -442,11 +438,11 @@ pub fn build(b: *Builder) !void {
|
||||
const modes = chosen_modes[0..chosen_mode_index];
|
||||
|
||||
// run stage1 `zig fmt` on this build.zig file just to make sure it works
|
||||
toolchain_step.dependOn(&fmt_build_zig.step);
|
||||
test_step.dependOn(&fmt_build_zig.step);
|
||||
const fmt_step = b.step("test-fmt", "Run zig fmt against build.zig to make sure it works");
|
||||
fmt_step.dependOn(&fmt_build_zig.step);
|
||||
|
||||
toolchain_step.dependOn(tests.addPkgTests(
|
||||
test_step.dependOn(tests.addPkgTests(
|
||||
b,
|
||||
test_filter,
|
||||
"test/behavior.zig",
|
||||
@@ -457,11 +453,10 @@ pub fn build(b: *Builder) !void {
|
||||
skip_non_native,
|
||||
skip_libc,
|
||||
skip_stage1,
|
||||
omit_stage2,
|
||||
is_stage1,
|
||||
skip_stage2_tests,
|
||||
));
|
||||
|
||||
toolchain_step.dependOn(tests.addPkgTests(
|
||||
test_step.dependOn(tests.addPkgTests(
|
||||
b,
|
||||
test_filter,
|
||||
"lib/compiler_rt.zig",
|
||||
@@ -472,11 +467,10 @@ pub fn build(b: *Builder) !void {
|
||||
skip_non_native,
|
||||
true, // skip_libc
|
||||
skip_stage1,
|
||||
omit_stage2 or true, // TODO get these all passing
|
||||
is_stage1,
|
||||
skip_stage2_tests or true, // TODO get these all passing
|
||||
));
|
||||
|
||||
toolchain_step.dependOn(tests.addPkgTests(
|
||||
test_step.dependOn(tests.addPkgTests(
|
||||
b,
|
||||
test_filter,
|
||||
"lib/c.zig",
|
||||
@@ -487,37 +481,36 @@ pub fn build(b: *Builder) !void {
|
||||
skip_non_native,
|
||||
true, // skip_libc
|
||||
skip_stage1,
|
||||
omit_stage2 or true, // TODO get these all passing
|
||||
is_stage1,
|
||||
skip_stage2_tests or true, // TODO get these all passing
|
||||
));
|
||||
|
||||
toolchain_step.dependOn(tests.addCompareOutputTests(b, test_filter, modes));
|
||||
toolchain_step.dependOn(tests.addStandaloneTests(
|
||||
test_step.dependOn(tests.addCompareOutputTests(b, test_filter, modes));
|
||||
test_step.dependOn(tests.addStandaloneTests(
|
||||
b,
|
||||
test_filter,
|
||||
modes,
|
||||
skip_non_native,
|
||||
enable_macos_sdk,
|
||||
target,
|
||||
omit_stage2,
|
||||
skip_stage2_tests,
|
||||
b.enable_darling,
|
||||
b.enable_qemu,
|
||||
b.enable_rosetta,
|
||||
b.enable_wasmtime,
|
||||
b.enable_wine,
|
||||
));
|
||||
toolchain_step.dependOn(tests.addLinkTests(b, test_filter, modes, enable_macos_sdk, omit_stage2));
|
||||
toolchain_step.dependOn(tests.addStackTraceTests(b, test_filter, modes));
|
||||
toolchain_step.dependOn(tests.addCliTests(b, test_filter, modes));
|
||||
toolchain_step.dependOn(tests.addAssembleAndLinkTests(b, test_filter, modes));
|
||||
toolchain_step.dependOn(tests.addTranslateCTests(b, test_filter));
|
||||
test_step.dependOn(tests.addLinkTests(b, test_filter, modes, enable_macos_sdk, skip_stage2_tests));
|
||||
test_step.dependOn(tests.addStackTraceTests(b, test_filter, modes));
|
||||
test_step.dependOn(tests.addCliTests(b, test_filter, modes));
|
||||
test_step.dependOn(tests.addAssembleAndLinkTests(b, test_filter, modes));
|
||||
test_step.dependOn(tests.addTranslateCTests(b, test_filter));
|
||||
if (!skip_run_translated_c) {
|
||||
toolchain_step.dependOn(tests.addRunTranslatedCTests(b, test_filter, target));
|
||||
test_step.dependOn(tests.addRunTranslatedCTests(b, test_filter, target));
|
||||
}
|
||||
// tests for this feature are disabled until we have the self-hosted compiler available
|
||||
// toolchain_step.dependOn(tests.addGenHTests(b, test_filter));
|
||||
// test_step.dependOn(tests.addGenHTests(b, test_filter));
|
||||
|
||||
const std_step = tests.addPkgTests(
|
||||
test_step.dependOn(tests.addPkgTests(
|
||||
b,
|
||||
test_filter,
|
||||
"lib/std/std.zig",
|
||||
@@ -528,14 +521,8 @@ pub fn build(b: *Builder) !void {
|
||||
skip_non_native,
|
||||
skip_libc,
|
||||
skip_stage1,
|
||||
omit_stage2 or true, // TODO get these all passing
|
||||
is_stage1,
|
||||
);
|
||||
|
||||
const test_step = b.step("test", "Run all the tests");
|
||||
test_step.dependOn(toolchain_step);
|
||||
test_step.dependOn(std_step);
|
||||
test_step.dependOn(docs_step);
|
||||
true, // TODO get these all passing
|
||||
));
|
||||
}
|
||||
|
||||
const exe_cflags = [_][]const u8{
|
||||
|
||||
@@ -1,976 +0,0 @@
|
||||
const std = @import("std");
|
||||
const builtin = std.builtin;
|
||||
const Builder = std.build.Builder;
|
||||
const BufMap = std.BufMap;
|
||||
const mem = std.mem;
|
||||
const ArrayList = std.ArrayList;
|
||||
const io = std.io;
|
||||
const fs = std.fs;
|
||||
const InstallDirectoryOptions = std.build.InstallDirectoryOptions;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
const zig_version = std.builtin.Version{ .major = 0, .minor = 10, .patch = 0 };
|
||||
|
||||
pub fn build(b: *Builder) !void {
|
||||
b.setPreferredReleaseMode(.ReleaseFast);
|
||||
const mode = b.standardReleaseOptions();
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const single_threaded = b.option(bool, "single-threaded", "Build artifacts that run in single threaded mode");
|
||||
const use_zig_libcxx = b.option(bool, "use-zig-libcxx", "If libc++ is needed, use zig's bundled version, don't try to integrate with the system") orelse false;
|
||||
|
||||
const docgen_exe = b.addExecutable("docgen", "doc/docgen.zig");
|
||||
docgen_exe.single_threaded = single_threaded;
|
||||
|
||||
const rel_zig_exe = try fs.path.relative(b.allocator, b.build_root, b.zig_exe);
|
||||
const langref_out_path = fs.path.join(
|
||||
b.allocator,
|
||||
&[_][]const u8{ b.cache_root, "langref.html" },
|
||||
) catch unreachable;
|
||||
const docgen_cmd = docgen_exe.run();
|
||||
docgen_cmd.addArgs(&[_][]const u8{
|
||||
rel_zig_exe,
|
||||
"doc" ++ fs.path.sep_str ++ "langref.html.in",
|
||||
langref_out_path,
|
||||
});
|
||||
docgen_cmd.step.dependOn(&docgen_exe.step);
|
||||
|
||||
const docs_step = b.step("docs", "Build documentation");
|
||||
docs_step.dependOn(&docgen_cmd.step);
|
||||
|
||||
const is_stage1 = b.option(bool, "stage1", "Build the stage1 compiler, put stage2 behind a feature flag") orelse false;
|
||||
const omit_stage2 = b.option(bool, "omit-stage2", "Do not include stage2 behind a feature flag inside stage1") orelse false;
|
||||
const static_llvm = b.option(bool, "static-llvm", "Disable integration with system-installed LLVM, Clang, LLD, and libc++") orelse false;
|
||||
const enable_llvm = b.option(bool, "enable-llvm", "Build self-hosted compiler with LLVM backend enabled") orelse (is_stage1 or static_llvm);
|
||||
const llvm_has_m68k = b.option(
|
||||
bool,
|
||||
"llvm-has-m68k",
|
||||
"Whether LLVM has the experimental target m68k enabled",
|
||||
) orelse false;
|
||||
const llvm_has_csky = b.option(
|
||||
bool,
|
||||
"llvm-has-csky",
|
||||
"Whether LLVM has the experimental target csky enabled",
|
||||
) orelse false;
|
||||
const llvm_has_arc = b.option(
|
||||
bool,
|
||||
"llvm-has-arc",
|
||||
"Whether LLVM has the experimental target arc enabled",
|
||||
) orelse false;
|
||||
const config_h_path_option = b.option([]const u8, "config_h", "Path to the generated config.h");
|
||||
|
||||
b.installDirectory(InstallDirectoryOptions{
|
||||
.source_dir = "lib",
|
||||
.install_dir = .lib,
|
||||
.install_subdir = "zig",
|
||||
.exclude_extensions = &[_][]const u8{
|
||||
// exclude files from lib/std/compress/
|
||||
".gz",
|
||||
".z.0",
|
||||
".z.9",
|
||||
"rfc1951.txt",
|
||||
"rfc1952.txt",
|
||||
// exclude files from lib/std/compress/deflate/testdata
|
||||
".expect",
|
||||
".expect-noinput",
|
||||
".golden",
|
||||
".input",
|
||||
"compress-e.txt",
|
||||
"compress-gettysburg.txt",
|
||||
"compress-pi.txt",
|
||||
"rfc1951.txt",
|
||||
// exclude files from lib/std/tz/
|
||||
".tzif",
|
||||
// others
|
||||
"README.md",
|
||||
},
|
||||
.blank_extensions = &[_][]const u8{
|
||||
"test.zig",
|
||||
},
|
||||
});
|
||||
|
||||
const tracy = b.option([]const u8, "tracy", "Enable Tracy integration. Supply path to Tracy source");
|
||||
const tracy_callstack = b.option(bool, "tracy-callstack", "Include callstack information with Tracy data. Does nothing if -Dtracy is not provided") orelse false;
|
||||
const tracy_allocation = b.option(bool, "tracy-allocation", "Include allocation information with Tracy data. Does nothing if -Dtracy is not provided") orelse false;
|
||||
const force_gpa = b.option(bool, "force-gpa", "Force the compiler to use GeneralPurposeAllocator") orelse false;
|
||||
const link_libc = b.option(bool, "force-link-libc", "Force self-hosted compiler to link libc") orelse enable_llvm;
|
||||
const strip = b.option(bool, "strip", "Omit debug information") orelse false;
|
||||
const value_tracing = b.option(bool, "value-tracing", "Enable extra state tracking to help troubleshoot bugs in the compiler (using the std.debug.Trace API)") orelse false;
|
||||
|
||||
const mem_leak_frames: u32 = b.option(u32, "mem-leak-frames", "How many stack frames to print when a memory leak occurs. Tests get 2x this amount.") orelse blk: {
|
||||
if (strip) break :blk @as(u32, 0);
|
||||
if (mode != .Debug) break :blk 0;
|
||||
break :blk 4;
|
||||
};
|
||||
|
||||
const main_file: ?[]const u8 = if (is_stage1) null else "src/main.zig";
|
||||
|
||||
const exe = b.addExecutable("zig", main_file);
|
||||
exe.strip = strip;
|
||||
exe.install();
|
||||
exe.setBuildMode(mode);
|
||||
exe.setTarget(target);
|
||||
|
||||
b.default_step.dependOn(&exe.step);
|
||||
exe.single_threaded = single_threaded;
|
||||
|
||||
if (target.isWindows() and target.getAbi() == .gnu) {
|
||||
// LTO is currently broken on mingw, this can be removed when it's fixed.
|
||||
exe.want_lto = false;
|
||||
}
|
||||
|
||||
const exe_options = b.addOptions();
|
||||
exe.addOptions("build_options", exe_options);
|
||||
|
||||
exe_options.addOption(u32, "mem_leak_frames", mem_leak_frames);
|
||||
exe_options.addOption(bool, "skip_non_native", false);
|
||||
exe_options.addOption(bool, "have_llvm", enable_llvm);
|
||||
exe_options.addOption(bool, "llvm_has_m68k", llvm_has_m68k);
|
||||
exe_options.addOption(bool, "llvm_has_csky", llvm_has_csky);
|
||||
exe_options.addOption(bool, "llvm_has_arc", llvm_has_arc);
|
||||
exe_options.addOption(bool, "force_gpa", force_gpa);
|
||||
|
||||
if (link_libc) {
|
||||
exe.linkLibC();
|
||||
}
|
||||
|
||||
const is_debug = mode == .Debug;
|
||||
const enable_logging = b.option(bool, "log", "Enable debug logging with --debug-log") orelse is_debug;
|
||||
const enable_link_snapshots = b.option(bool, "link-snapshot", "Whether to enable linker state snapshots") orelse false;
|
||||
|
||||
const opt_version_string = b.option([]const u8, "version-string", "Override Zig version string. Default is to find out with git.");
|
||||
const version = if (opt_version_string) |version| version else v: {
|
||||
const version_string = b.fmt("{d}.{d}.{d}", .{ zig_version.major, zig_version.minor, zig_version.patch });
|
||||
|
||||
var code: u8 = undefined;
|
||||
const git_describe_untrimmed = b.execAllowFail(&[_][]const u8{
|
||||
"git", "-C", b.build_root, "describe", "--match", "*.*.*", "--tags",
|
||||
}, &code, .Ignore) catch {
|
||||
break :v version_string;
|
||||
};
|
||||
const git_describe = mem.trim(u8, git_describe_untrimmed, " \n\r");
|
||||
|
||||
switch (mem.count(u8, git_describe, "-")) {
|
||||
0 => {
|
||||
// Tagged release version (e.g. 0.9.0).
|
||||
if (!mem.eql(u8, git_describe, version_string)) {
|
||||
std.debug.print("Zig version '{s}' does not match Git tag '{s}'\n", .{ version_string, git_describe });
|
||||
std.process.exit(1);
|
||||
}
|
||||
break :v version_string;
|
||||
},
|
||||
2 => {
|
||||
// Untagged development build (e.g. 0.9.0-dev.2025+ecf0050a9).
|
||||
var it = mem.split(u8, git_describe, "-");
|
||||
const tagged_ancestor = it.next() orelse unreachable;
|
||||
const commit_height = it.next() orelse unreachable;
|
||||
const commit_id = it.next() orelse unreachable;
|
||||
|
||||
const ancestor_ver = try std.builtin.Version.parse(tagged_ancestor);
|
||||
if (zig_version.order(ancestor_ver) != .gt) {
|
||||
std.debug.print("Zig version '{}' must be greater than tagged ancestor '{}'\n", .{ zig_version, ancestor_ver });
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
// Check that the commit hash is prefixed with a 'g' (a Git convention).
|
||||
if (commit_id.len < 1 or commit_id[0] != 'g') {
|
||||
std.debug.print("Unexpected `git describe` output: {s}\n", .{git_describe});
|
||||
break :v version_string;
|
||||
}
|
||||
|
||||
// The version is reformatted in accordance with the https://semver.org specification.
|
||||
break :v b.fmt("{s}-dev.{s}+{s}", .{ version_string, commit_height, commit_id[1..] });
|
||||
},
|
||||
else => {
|
||||
std.debug.print("Unexpected `git describe` output: {s}\n", .{git_describe});
|
||||
break :v version_string;
|
||||
},
|
||||
}
|
||||
};
|
||||
exe_options.addOption([:0]const u8, "version", try b.allocator.dupeZ(u8, version));
|
||||
|
||||
if (enable_llvm) {
|
||||
const cmake_cfg = if (static_llvm) null else findAndParseConfigH(b, config_h_path_option);
|
||||
|
||||
if (is_stage1) {
|
||||
const softfloat = b.addStaticLibrary("softfloat", null);
|
||||
softfloat.setBuildMode(.ReleaseFast);
|
||||
softfloat.setTarget(target);
|
||||
softfloat.addIncludeDir("deps/SoftFloat-3e-prebuilt");
|
||||
softfloat.addIncludeDir("deps/SoftFloat-3e/source/8086");
|
||||
softfloat.addIncludeDir("deps/SoftFloat-3e/source/include");
|
||||
softfloat.addCSourceFiles(&softfloat_sources, &[_][]const u8{ "-std=c99", "-O3" });
|
||||
softfloat.single_threaded = single_threaded;
|
||||
|
||||
const zig0 = b.addExecutable("zig0", null);
|
||||
zig0.addCSourceFiles(&.{"src/stage1/zig0.cpp"}, &exe_cflags);
|
||||
zig0.addIncludeDir("zig-cache/tmp"); // for config.h
|
||||
zig0.defineCMacro("ZIG_VERSION_MAJOR", b.fmt("{d}", .{zig_version.major}));
|
||||
zig0.defineCMacro("ZIG_VERSION_MINOR", b.fmt("{d}", .{zig_version.minor}));
|
||||
zig0.defineCMacro("ZIG_VERSION_PATCH", b.fmt("{d}", .{zig_version.patch}));
|
||||
zig0.defineCMacro("ZIG_VERSION_STRING", b.fmt("\"{s}\"", .{version}));
|
||||
|
||||
for ([_]*std.build.LibExeObjStep{ zig0, exe }) |artifact| {
|
||||
artifact.addIncludeDir("src");
|
||||
artifact.addIncludeDir("deps/SoftFloat-3e/source/include");
|
||||
artifact.addIncludeDir("deps/SoftFloat-3e-prebuilt");
|
||||
|
||||
artifact.defineCMacro("ZIG_LINK_MODE", "Static");
|
||||
|
||||
artifact.addCSourceFiles(&stage1_sources, &exe_cflags);
|
||||
artifact.addCSourceFiles(&optimized_c_sources, &[_][]const u8{ "-std=c99", "-O3" });
|
||||
|
||||
artifact.linkLibrary(softfloat);
|
||||
artifact.linkLibCpp();
|
||||
}
|
||||
|
||||
try addStaticLlvmOptionsToExe(zig0);
|
||||
|
||||
const zig1_obj_ext = target.getObjectFormat().fileExt(target.getCpuArch());
|
||||
const zig1_obj_path = b.pathJoin(&.{ "zig-cache", "tmp", b.fmt("zig1{s}", .{zig1_obj_ext}) });
|
||||
const zig1_compiler_rt_path = b.pathJoin(&.{ b.pathFromRoot("lib"), "std", "special", "compiler_rt.zig" });
|
||||
|
||||
const zig1_obj = zig0.run();
|
||||
zig1_obj.addArgs(&.{
|
||||
"src/stage1.zig",
|
||||
"-target",
|
||||
try target.zigTriple(b.allocator),
|
||||
"-mcpu=baseline",
|
||||
"--name",
|
||||
"zig1",
|
||||
"--zig-lib-dir",
|
||||
b.pathFromRoot("lib"),
|
||||
b.fmt("-femit-bin={s}", .{b.pathFromRoot(zig1_obj_path)}),
|
||||
"-fcompiler-rt",
|
||||
"-lc",
|
||||
});
|
||||
{
|
||||
zig1_obj.addArgs(&.{ "--pkg-begin", "build_options" });
|
||||
zig1_obj.addFileSourceArg(exe_options.getSource());
|
||||
zig1_obj.addArgs(&.{ "--pkg-end", "--pkg-begin", "compiler_rt", zig1_compiler_rt_path, "--pkg-end" });
|
||||
}
|
||||
switch (mode) {
|
||||
.Debug => {},
|
||||
.ReleaseFast => {
|
||||
zig1_obj.addArg("-OReleaseFast");
|
||||
zig1_obj.addArg("--strip");
|
||||
},
|
||||
.ReleaseSafe => {
|
||||
zig1_obj.addArg("-OReleaseSafe");
|
||||
zig1_obj.addArg("--strip");
|
||||
},
|
||||
.ReleaseSmall => {
|
||||
zig1_obj.addArg("-OReleaseSmall");
|
||||
zig1_obj.addArg("--strip");
|
||||
},
|
||||
}
|
||||
if (single_threaded orelse false) {
|
||||
zig1_obj.addArg("-fsingle-threaded");
|
||||
}
|
||||
|
||||
exe.step.dependOn(&zig1_obj.step);
|
||||
exe.addObjectFile(zig1_obj_path);
|
||||
|
||||
// This is intentionally a dummy path. stage1.zig tries to @import("compiler_rt") in case
|
||||
// of being built by cmake. But when built by zig it's gonna get a compiler_rt so that
|
||||
// is pointless.
|
||||
exe.addPackagePath("compiler_rt", "src/empty.zig");
|
||||
}
|
||||
if (cmake_cfg) |cfg| {
|
||||
// Inside this code path, we have to coordinate with system packaged LLVM, Clang, and LLD.
|
||||
// That means we also have to rely on stage1 compiled c++ files. We parse config.h to find
|
||||
// the information passed on to us from cmake.
|
||||
if (cfg.cmake_prefix_path.len > 0) {
|
||||
b.addSearchPrefix(cfg.cmake_prefix_path);
|
||||
}
|
||||
|
||||
try addCmakeCfgOptionsToExe(b, cfg, exe, use_zig_libcxx);
|
||||
} else {
|
||||
// Here we are -Denable-llvm but no cmake integration.
|
||||
try addStaticLlvmOptionsToExe(exe);
|
||||
}
|
||||
}
|
||||
|
||||
const semver = try std.SemanticVersion.parse(version);
|
||||
exe_options.addOption(std.SemanticVersion, "semver", semver);
|
||||
|
||||
exe_options.addOption(bool, "enable_logging", enable_logging);
|
||||
exe_options.addOption(bool, "enable_link_snapshots", enable_link_snapshots);
|
||||
exe_options.addOption(bool, "enable_tracy", tracy != null);
|
||||
exe_options.addOption(bool, "enable_tracy_callstack", tracy_callstack);
|
||||
exe_options.addOption(bool, "enable_tracy_allocation", tracy_allocation);
|
||||
exe_options.addOption(bool, "value_tracing", value_tracing);
|
||||
exe_options.addOption(bool, "is_stage1", is_stage1);
|
||||
exe_options.addOption(bool, "omit_stage2", omit_stage2);
|
||||
if (tracy) |tracy_path| {
|
||||
const client_cpp = fs.path.join(
|
||||
b.allocator,
|
||||
&[_][]const u8{ tracy_path, "TracyClient.cpp" },
|
||||
) catch unreachable;
|
||||
|
||||
// On mingw, we need to opt into windows 7+ to get some features required by tracy.
|
||||
const tracy_c_flags: []const []const u8 = if (target.isWindows() and target.getAbi() == .gnu)
|
||||
&[_][]const u8{ "-DTRACY_ENABLE=1", "-fno-sanitize=undefined", "-D_WIN32_WINNT=0x601" }
|
||||
else
|
||||
&[_][]const u8{ "-DTRACY_ENABLE=1", "-fno-sanitize=undefined" };
|
||||
|
||||
exe.addIncludeDir(tracy_path);
|
||||
exe.addCSourceFile(client_cpp, tracy_c_flags);
|
||||
if (!enable_llvm) {
|
||||
exe.linkSystemLibraryName("c++");
|
||||
}
|
||||
exe.linkLibC();
|
||||
|
||||
if (target.isWindows()) {
|
||||
exe.linkSystemLibrary("dbghelp");
|
||||
exe.linkSystemLibrary("ws2_32");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exe_cflags = [_][]const u8{
|
||||
"-std=c++14",
|
||||
"-D__STDC_CONSTANT_MACROS",
|
||||
"-D__STDC_FORMAT_MACROS",
|
||||
"-D__STDC_LIMIT_MACROS",
|
||||
"-D_GNU_SOURCE",
|
||||
"-fvisibility-inlines-hidden",
|
||||
"-fno-exceptions",
|
||||
"-fno-rtti",
|
||||
"-Werror=type-limits",
|
||||
"-Wno-missing-braces",
|
||||
"-Wno-comment",
|
||||
};
|
||||
|
||||
fn addCmakeCfgOptionsToExe(
|
||||
b: *Builder,
|
||||
cfg: CMakeConfig,
|
||||
exe: *std.build.LibExeObjStep,
|
||||
use_zig_libcxx: bool,
|
||||
) !void {
|
||||
exe.addObjectFile(fs.path.join(b.allocator, &[_][]const u8{
|
||||
cfg.cmake_binary_dir,
|
||||
"zigcpp",
|
||||
b.fmt("{s}{s}{s}", .{ exe.target.libPrefix(), "zigcpp", exe.target.staticLibSuffix() }),
|
||||
}) catch unreachable);
|
||||
assert(cfg.lld_include_dir.len != 0);
|
||||
exe.addIncludeDir(cfg.lld_include_dir);
|
||||
addCMakeLibraryList(exe, cfg.clang_libraries);
|
||||
addCMakeLibraryList(exe, cfg.lld_libraries);
|
||||
addCMakeLibraryList(exe, cfg.llvm_libraries);
|
||||
|
||||
if (use_zig_libcxx) {
|
||||
exe.linkLibCpp();
|
||||
} else {
|
||||
const need_cpp_includes = true;
|
||||
|
||||
// System -lc++ must be used because in this code path we are attempting to link
|
||||
// against system-provided LLVM, Clang, LLD.
|
||||
if (exe.target.getOsTag() == .linux) {
|
||||
// First we try to static link against gcc libstdc++. If that doesn't work,
|
||||
// we fall back to -lc++ and cross our fingers.
|
||||
addCxxKnownPath(b, cfg, exe, "libstdc++.a", "", need_cpp_includes) catch |err| switch (err) {
|
||||
error.RequiredLibraryNotFound => {
|
||||
exe.linkSystemLibrary("c++");
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
exe.linkSystemLibrary("unwind");
|
||||
} else if (exe.target.isFreeBSD()) {
|
||||
try addCxxKnownPath(b, cfg, exe, "libc++.a", null, need_cpp_includes);
|
||||
exe.linkSystemLibrary("pthread");
|
||||
} else if (exe.target.getOsTag() == .openbsd) {
|
||||
try addCxxKnownPath(b, cfg, exe, "libc++.a", null, need_cpp_includes);
|
||||
try addCxxKnownPath(b, cfg, exe, "libc++abi.a", null, need_cpp_includes);
|
||||
} else if (exe.target.isDarwin()) {
|
||||
exe.linkSystemLibrary("c++");
|
||||
}
|
||||
}
|
||||
|
||||
if (cfg.dia_guids_lib.len != 0) {
|
||||
exe.addObjectFile(cfg.dia_guids_lib);
|
||||
}
|
||||
}
|
||||
|
||||
fn addStaticLlvmOptionsToExe(
|
||||
exe: *std.build.LibExeObjStep,
|
||||
) !void {
|
||||
// Adds the Zig C++ sources which both stage1 and stage2 need.
|
||||
//
|
||||
// We need this because otherwise zig_clang_cc1_main.cpp ends up pulling
|
||||
// in a dependency on llvm::cfg::Update<llvm::BasicBlock*>::dump() which is
|
||||
// unavailable when LLVM is compiled in Release mode.
|
||||
const zig_cpp_cflags = exe_cflags ++ [_][]const u8{"-DNDEBUG=1"};
|
||||
exe.addCSourceFiles(&zig_cpp_sources, &zig_cpp_cflags);
|
||||
|
||||
for (clang_libs) |lib_name| {
|
||||
exe.linkSystemLibrary(lib_name);
|
||||
}
|
||||
|
||||
for (lld_libs) |lib_name| {
|
||||
exe.linkSystemLibrary(lib_name);
|
||||
}
|
||||
|
||||
for (llvm_libs) |lib_name| {
|
||||
exe.linkSystemLibrary(lib_name);
|
||||
}
|
||||
|
||||
exe.linkSystemLibrary("z");
|
||||
|
||||
// This means we rely on clang-or-zig-built LLVM, Clang, LLD libraries.
|
||||
exe.linkSystemLibrary("c++");
|
||||
|
||||
if (exe.target.getOs().tag == .windows) {
|
||||
exe.linkSystemLibrary("version");
|
||||
exe.linkSystemLibrary("uuid");
|
||||
exe.linkSystemLibrary("ole32");
|
||||
}
|
||||
}
|
||||
|
||||
fn addCxxKnownPath(
|
||||
b: *Builder,
|
||||
ctx: CMakeConfig,
|
||||
exe: *std.build.LibExeObjStep,
|
||||
objname: []const u8,
|
||||
errtxt: ?[]const u8,
|
||||
need_cpp_includes: bool,
|
||||
) !void {
|
||||
const path_padded = try b.exec(&[_][]const u8{
|
||||
ctx.cxx_compiler,
|
||||
b.fmt("-print-file-name={s}", .{objname}),
|
||||
});
|
||||
const path_unpadded = mem.tokenize(u8, path_padded, "\r\n").next().?;
|
||||
if (mem.eql(u8, path_unpadded, objname)) {
|
||||
if (errtxt) |msg| {
|
||||
std.debug.print("{s}", .{msg});
|
||||
} else {
|
||||
std.debug.print("Unable to determine path to {s}\n", .{objname});
|
||||
}
|
||||
return error.RequiredLibraryNotFound;
|
||||
}
|
||||
exe.addObjectFile(path_unpadded);
|
||||
|
||||
// TODO a way to integrate with system c++ include files here
|
||||
// cc -E -Wp,-v -xc++ /dev/null
|
||||
if (need_cpp_includes) {
|
||||
// I used these temporarily for testing something but we obviously need a
|
||||
// more general purpose solution here.
|
||||
//exe.addIncludeDir("/nix/store/fvf3qjqa5qpcjjkq37pb6ypnk1mzhf5h-gcc-9.3.0/lib/gcc/x86_64-unknown-linux-gnu/9.3.0/../../../../include/c++/9.3.0");
|
||||
//exe.addIncludeDir("/nix/store/fvf3qjqa5qpcjjkq37pb6ypnk1mzhf5h-gcc-9.3.0/lib/gcc/x86_64-unknown-linux-gnu/9.3.0/../../../../include/c++/9.3.0/x86_64-unknown-linux-gnu");
|
||||
//exe.addIncludeDir("/nix/store/fvf3qjqa5qpcjjkq37pb6ypnk1mzhf5h-gcc-9.3.0/lib/gcc/x86_64-unknown-linux-gnu/9.3.0/../../../../include/c++/9.3.0/backward");
|
||||
}
|
||||
}
|
||||
|
||||
fn addCMakeLibraryList(exe: *std.build.LibExeObjStep, list: []const u8) void {
|
||||
var it = mem.tokenize(u8, list, ";");
|
||||
while (it.next()) |lib| {
|
||||
if (mem.startsWith(u8, lib, "-l")) {
|
||||
exe.linkSystemLibrary(lib["-l".len..]);
|
||||
} else {
|
||||
exe.addObjectFile(lib);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const CMakeConfig = struct {
|
||||
cmake_binary_dir: []const u8,
|
||||
cmake_prefix_path: []const u8,
|
||||
cxx_compiler: []const u8,
|
||||
lld_include_dir: []const u8,
|
||||
lld_libraries: []const u8,
|
||||
clang_libraries: []const u8,
|
||||
llvm_libraries: []const u8,
|
||||
dia_guids_lib: []const u8,
|
||||
};
|
||||
|
||||
const max_config_h_bytes = 1 * 1024 * 1024;
|
||||
|
||||
fn findAndParseConfigH(b: *Builder, config_h_path_option: ?[]const u8) ?CMakeConfig {
|
||||
const config_h_text: []const u8 = if (config_h_path_option) |config_h_path| blk: {
|
||||
break :blk fs.cwd().readFileAlloc(b.allocator, config_h_path, max_config_h_bytes) catch unreachable;
|
||||
} else blk: {
|
||||
// TODO this should stop looking for config.h once it detects we hit the
|
||||
// zig source root directory.
|
||||
var check_dir = fs.path.dirname(b.zig_exe).?;
|
||||
while (true) {
|
||||
var dir = fs.cwd().openDir(check_dir, .{}) catch unreachable;
|
||||
defer dir.close();
|
||||
|
||||
break :blk dir.readFileAlloc(b.allocator, "config.h", max_config_h_bytes) catch |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
const new_check_dir = fs.path.dirname(check_dir);
|
||||
if (new_check_dir == null or mem.eql(u8, new_check_dir.?, check_dir)) {
|
||||
return null;
|
||||
}
|
||||
check_dir = new_check_dir.?;
|
||||
continue;
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
} else unreachable; // TODO should not need `else unreachable`.
|
||||
};
|
||||
|
||||
var ctx: CMakeConfig = .{
|
||||
.cmake_binary_dir = undefined,
|
||||
.cmake_prefix_path = undefined,
|
||||
.cxx_compiler = undefined,
|
||||
.lld_include_dir = undefined,
|
||||
.lld_libraries = undefined,
|
||||
.clang_libraries = undefined,
|
||||
.llvm_libraries = undefined,
|
||||
.dia_guids_lib = undefined,
|
||||
};
|
||||
|
||||
const mappings = [_]struct { prefix: []const u8, field: []const u8 }{
|
||||
.{
|
||||
.prefix = "#define ZIG_CMAKE_BINARY_DIR ",
|
||||
.field = "cmake_binary_dir",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_CMAKE_PREFIX_PATH ",
|
||||
.field = "cmake_prefix_path",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_CXX_COMPILER ",
|
||||
.field = "cxx_compiler",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_LLD_INCLUDE_PATH ",
|
||||
.field = "lld_include_dir",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_LLD_LIBRARIES ",
|
||||
.field = "lld_libraries",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_CLANG_LIBRARIES ",
|
||||
.field = "clang_libraries",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_LLVM_LIBRARIES ",
|
||||
.field = "llvm_libraries",
|
||||
},
|
||||
.{
|
||||
.prefix = "#define ZIG_DIA_GUIDS_LIB ",
|
||||
.field = "dia_guids_lib",
|
||||
},
|
||||
};
|
||||
|
||||
var lines_it = mem.tokenize(u8, config_h_text, "\r\n");
|
||||
while (lines_it.next()) |line| {
|
||||
inline for (mappings) |mapping| {
|
||||
if (mem.startsWith(u8, line, mapping.prefix)) {
|
||||
var it = mem.split(u8, line, "\"");
|
||||
_ = it.next().?; // skip the stuff before the quote
|
||||
const quoted = it.next().?; // the stuff inside the quote
|
||||
@field(ctx, mapping.field) = toNativePathSep(b, quoted);
|
||||
}
|
||||
}
|
||||
}
|
||||
return ctx;
|
||||
}
|
||||
|
||||
fn toNativePathSep(b: *Builder, s: []const u8) []u8 {
|
||||
const duplicated = b.allocator.dupe(u8, s) catch unreachable;
|
||||
for (duplicated) |*byte| switch (byte.*) {
|
||||
'/' => byte.* = fs.path.sep,
|
||||
else => {},
|
||||
};
|
||||
return duplicated;
|
||||
}
|
||||
|
||||
const softfloat_sources = [_][]const u8{
|
||||
"deps/SoftFloat-3e/source/8086/f128M_isSignalingNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/extF80M_isSignalingNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_commonNaNToF128M.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_commonNaNToExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_commonNaNToF16UI.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_commonNaNToF32UI.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_commonNaNToF64UI.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_f128MToCommonNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_extF80MToCommonNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_f16UIToCommonNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_f32UIToCommonNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_f64UIToCommonNaN.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_propagateNaNF128M.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_propagateNaNExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/8086/s_propagateNaNF16UI.c",
|
||||
"deps/SoftFloat-3e/source/8086/softfloat_raiseFlags.c",
|
||||
"deps/SoftFloat-3e/source/f128M_add.c",
|
||||
"deps/SoftFloat-3e/source/f128M_div.c",
|
||||
"deps/SoftFloat-3e/source/f128M_eq.c",
|
||||
"deps/SoftFloat-3e/source/f128M_eq_signaling.c",
|
||||
"deps/SoftFloat-3e/source/f128M_le.c",
|
||||
"deps/SoftFloat-3e/source/f128M_le_quiet.c",
|
||||
"deps/SoftFloat-3e/source/f128M_lt.c",
|
||||
"deps/SoftFloat-3e/source/f128M_lt_quiet.c",
|
||||
"deps/SoftFloat-3e/source/f128M_mul.c",
|
||||
"deps/SoftFloat-3e/source/f128M_mulAdd.c",
|
||||
"deps/SoftFloat-3e/source/f128M_rem.c",
|
||||
"deps/SoftFloat-3e/source/f128M_roundToInt.c",
|
||||
"deps/SoftFloat-3e/source/f128M_sqrt.c",
|
||||
"deps/SoftFloat-3e/source/f128M_sub.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_f16.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_f32.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_f64.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_extF80M.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_i32.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_i32_r_minMag.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_i64.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_i64_r_minMag.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_ui32.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_ui32_r_minMag.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_ui64.c",
|
||||
"deps/SoftFloat-3e/source/f128M_to_ui64_r_minMag.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_add.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_div.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_eq.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_le.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_lt.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_mul.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_rem.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_roundToInt.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_sqrt.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_sub.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_to_f16.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_to_f32.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_to_f64.c",
|
||||
"deps/SoftFloat-3e/source/extF80M_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/f16_add.c",
|
||||
"deps/SoftFloat-3e/source/f16_div.c",
|
||||
"deps/SoftFloat-3e/source/f16_eq.c",
|
||||
"deps/SoftFloat-3e/source/f16_isSignalingNaN.c",
|
||||
"deps/SoftFloat-3e/source/f16_lt.c",
|
||||
"deps/SoftFloat-3e/source/f16_mul.c",
|
||||
"deps/SoftFloat-3e/source/f16_mulAdd.c",
|
||||
"deps/SoftFloat-3e/source/f16_rem.c",
|
||||
"deps/SoftFloat-3e/source/f16_roundToInt.c",
|
||||
"deps/SoftFloat-3e/source/f16_sqrt.c",
|
||||
"deps/SoftFloat-3e/source/f16_sub.c",
|
||||
"deps/SoftFloat-3e/source/f16_to_extF80M.c",
|
||||
"deps/SoftFloat-3e/source/f16_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/f16_to_f64.c",
|
||||
"deps/SoftFloat-3e/source/f32_to_extF80M.c",
|
||||
"deps/SoftFloat-3e/source/f32_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/f64_to_extF80M.c",
|
||||
"deps/SoftFloat-3e/source/f64_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/f64_to_f16.c",
|
||||
"deps/SoftFloat-3e/source/i32_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/s_add256M.c",
|
||||
"deps/SoftFloat-3e/source/s_addCarryM.c",
|
||||
"deps/SoftFloat-3e/source/s_addComplCarryM.c",
|
||||
"deps/SoftFloat-3e/source/s_addF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_addExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/s_addM.c",
|
||||
"deps/SoftFloat-3e/source/s_addMagsF16.c",
|
||||
"deps/SoftFloat-3e/source/s_addMagsF32.c",
|
||||
"deps/SoftFloat-3e/source/s_addMagsF64.c",
|
||||
"deps/SoftFloat-3e/source/s_approxRecip32_1.c",
|
||||
"deps/SoftFloat-3e/source/s_approxRecipSqrt32_1.c",
|
||||
"deps/SoftFloat-3e/source/s_approxRecipSqrt_1Ks.c",
|
||||
"deps/SoftFloat-3e/source/s_approxRecip_1Ks.c",
|
||||
"deps/SoftFloat-3e/source/s_compare128M.c",
|
||||
"deps/SoftFloat-3e/source/s_compare96M.c",
|
||||
"deps/SoftFloat-3e/source/s_compareNonnormExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/s_countLeadingZeros16.c",
|
||||
"deps/SoftFloat-3e/source/s_countLeadingZeros32.c",
|
||||
"deps/SoftFloat-3e/source/s_countLeadingZeros64.c",
|
||||
"deps/SoftFloat-3e/source/s_countLeadingZeros8.c",
|
||||
"deps/SoftFloat-3e/source/s_eq128.c",
|
||||
"deps/SoftFloat-3e/source/s_invalidF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_invalidExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/s_isNaNF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_le128.c",
|
||||
"deps/SoftFloat-3e/source/s_lt128.c",
|
||||
"deps/SoftFloat-3e/source/s_mul128MTo256M.c",
|
||||
"deps/SoftFloat-3e/source/s_mul64To128M.c",
|
||||
"deps/SoftFloat-3e/source/s_mulAddF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_mulAddF16.c",
|
||||
"deps/SoftFloat-3e/source/s_mulAddF32.c",
|
||||
"deps/SoftFloat-3e/source/s_mulAddF64.c",
|
||||
"deps/SoftFloat-3e/source/s_negXM.c",
|
||||
"deps/SoftFloat-3e/source/s_normExtF80SigM.c",
|
||||
"deps/SoftFloat-3e/source/s_normRoundPackMToF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_normRoundPackMToExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/s_normRoundPackToF16.c",
|
||||
"deps/SoftFloat-3e/source/s_normRoundPackToF32.c",
|
||||
"deps/SoftFloat-3e/source/s_normRoundPackToF64.c",
|
||||
"deps/SoftFloat-3e/source/s_normSubnormalF128SigM.c",
|
||||
"deps/SoftFloat-3e/source/s_normSubnormalF16Sig.c",
|
||||
"deps/SoftFloat-3e/source/s_normSubnormalF32Sig.c",
|
||||
"deps/SoftFloat-3e/source/s_normSubnormalF64Sig.c",
|
||||
"deps/SoftFloat-3e/source/s_remStepMBy32.c",
|
||||
"deps/SoftFloat-3e/source/s_roundMToI64.c",
|
||||
"deps/SoftFloat-3e/source/s_roundMToUI64.c",
|
||||
"deps/SoftFloat-3e/source/s_roundPackMToExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/s_roundPackMToF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_roundPackToF16.c",
|
||||
"deps/SoftFloat-3e/source/s_roundPackToF32.c",
|
||||
"deps/SoftFloat-3e/source/s_roundPackToF64.c",
|
||||
"deps/SoftFloat-3e/source/s_roundToI32.c",
|
||||
"deps/SoftFloat-3e/source/s_roundToI64.c",
|
||||
"deps/SoftFloat-3e/source/s_roundToUI32.c",
|
||||
"deps/SoftFloat-3e/source/s_roundToUI64.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftLeftM.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftNormSigF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftRightJam256M.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftRightJam32.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftRightJam64.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftRightJamM.c",
|
||||
"deps/SoftFloat-3e/source/s_shiftRightM.c",
|
||||
"deps/SoftFloat-3e/source/s_shortShiftLeft64To96M.c",
|
||||
"deps/SoftFloat-3e/source/s_shortShiftLeftM.c",
|
||||
"deps/SoftFloat-3e/source/s_shortShiftRightExtendM.c",
|
||||
"deps/SoftFloat-3e/source/s_shortShiftRightJam64.c",
|
||||
"deps/SoftFloat-3e/source/s_shortShiftRightJamM.c",
|
||||
"deps/SoftFloat-3e/source/s_shortShiftRightM.c",
|
||||
"deps/SoftFloat-3e/source/s_sub1XM.c",
|
||||
"deps/SoftFloat-3e/source/s_sub256M.c",
|
||||
"deps/SoftFloat-3e/source/s_subM.c",
|
||||
"deps/SoftFloat-3e/source/s_subMagsF16.c",
|
||||
"deps/SoftFloat-3e/source/s_subMagsF32.c",
|
||||
"deps/SoftFloat-3e/source/s_subMagsF64.c",
|
||||
"deps/SoftFloat-3e/source/s_tryPropagateNaNF128M.c",
|
||||
"deps/SoftFloat-3e/source/s_tryPropagateNaNExtF80M.c",
|
||||
"deps/SoftFloat-3e/source/softfloat_state.c",
|
||||
"deps/SoftFloat-3e/source/ui32_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/ui64_to_f128M.c",
|
||||
"deps/SoftFloat-3e/source/ui32_to_extF80M.c",
|
||||
"deps/SoftFloat-3e/source/ui64_to_extF80M.c",
|
||||
};
|
||||
|
||||
const stage1_sources = [_][]const u8{
|
||||
"src/stage1/analyze.cpp",
|
||||
"src/stage1/astgen.cpp",
|
||||
"src/stage1/bigfloat.cpp",
|
||||
"src/stage1/bigint.cpp",
|
||||
"src/stage1/buffer.cpp",
|
||||
"src/stage1/codegen.cpp",
|
||||
"src/stage1/errmsg.cpp",
|
||||
"src/stage1/error.cpp",
|
||||
"src/stage1/heap.cpp",
|
||||
"src/stage1/ir.cpp",
|
||||
"src/stage1/ir_print.cpp",
|
||||
"src/stage1/mem.cpp",
|
||||
"src/stage1/os.cpp",
|
||||
"src/stage1/parser.cpp",
|
||||
"src/stage1/range_set.cpp",
|
||||
"src/stage1/stage1.cpp",
|
||||
"src/stage1/target.cpp",
|
||||
"src/stage1/tokenizer.cpp",
|
||||
"src/stage1/util.cpp",
|
||||
"src/stage1/softfloat_ext.cpp",
|
||||
};
|
||||
const optimized_c_sources = [_][]const u8{
|
||||
"src/stage1/parse_f128.c",
|
||||
};
|
||||
const zig_cpp_sources = [_][]const u8{
|
||||
// These are planned to stay even when we are self-hosted.
|
||||
"src/zig_llvm.cpp",
|
||||
"src/zig_clang.cpp",
|
||||
"src/zig_llvm-ar.cpp",
|
||||
"src/zig_clang_driver.cpp",
|
||||
"src/zig_clang_cc1_main.cpp",
|
||||
"src/zig_clang_cc1as_main.cpp",
|
||||
// https://github.com/ziglang/zig/issues/6363
|
||||
"src/windows_sdk.cpp",
|
||||
};
|
||||
|
||||
const clang_libs = [_][]const u8{
|
||||
"clangFrontendTool",
|
||||
"clangCodeGen",
|
||||
"clangFrontend",
|
||||
"clangDriver",
|
||||
"clangSerialization",
|
||||
"clangSema",
|
||||
"clangStaticAnalyzerFrontend",
|
||||
"clangStaticAnalyzerCheckers",
|
||||
"clangStaticAnalyzerCore",
|
||||
"clangAnalysis",
|
||||
"clangASTMatchers",
|
||||
"clangAST",
|
||||
"clangParse",
|
||||
"clangSema",
|
||||
"clangBasic",
|
||||
"clangEdit",
|
||||
"clangLex",
|
||||
"clangARCMigrate",
|
||||
"clangRewriteFrontend",
|
||||
"clangRewrite",
|
||||
"clangCrossTU",
|
||||
"clangIndex",
|
||||
"clangToolingCore",
|
||||
};
|
||||
const lld_libs = [_][]const u8{
|
||||
"lldMinGW",
|
||||
"lldELF",
|
||||
"lldCOFF",
|
||||
"lldWasm",
|
||||
"lldMachO",
|
||||
"lldCommon",
|
||||
};
|
||||
// This list can be re-generated with `llvm-config --libfiles` and then
|
||||
// reformatting using your favorite text editor. Note we do not execute
|
||||
// `llvm-config` here because we are cross compiling. Also omit LLVMTableGen
|
||||
// from these libs.
|
||||
const llvm_libs = [_][]const u8{
|
||||
"LLVMWindowsManifest",
|
||||
"LLVMXRay",
|
||||
"LLVMLibDriver",
|
||||
"LLVMDlltoolDriver",
|
||||
"LLVMCoverage",
|
||||
"LLVMLineEditor",
|
||||
"LLVMXCoreDisassembler",
|
||||
"LLVMXCoreCodeGen",
|
||||
"LLVMXCoreDesc",
|
||||
"LLVMXCoreInfo",
|
||||
"LLVMX86TargetMCA",
|
||||
"LLVMX86Disassembler",
|
||||
"LLVMX86AsmParser",
|
||||
"LLVMX86CodeGen",
|
||||
"LLVMX86Desc",
|
||||
"LLVMX86Info",
|
||||
"LLVMWebAssemblyDisassembler",
|
||||
"LLVMWebAssemblyAsmParser",
|
||||
"LLVMWebAssemblyCodeGen",
|
||||
"LLVMWebAssemblyDesc",
|
||||
"LLVMWebAssemblyUtils",
|
||||
"LLVMWebAssemblyInfo",
|
||||
"LLVMVEDisassembler",
|
||||
"LLVMVEAsmParser",
|
||||
"LLVMVECodeGen",
|
||||
"LLVMVEDesc",
|
||||
"LLVMVEInfo",
|
||||
"LLVMSystemZDisassembler",
|
||||
"LLVMSystemZAsmParser",
|
||||
"LLVMSystemZCodeGen",
|
||||
"LLVMSystemZDesc",
|
||||
"LLVMSystemZInfo",
|
||||
"LLVMSparcDisassembler",
|
||||
"LLVMSparcAsmParser",
|
||||
"LLVMSparcCodeGen",
|
||||
"LLVMSparcDesc",
|
||||
"LLVMSparcInfo",
|
||||
"LLVMRISCVDisassembler",
|
||||
"LLVMRISCVAsmParser",
|
||||
"LLVMRISCVCodeGen",
|
||||
"LLVMRISCVDesc",
|
||||
"LLVMRISCVInfo",
|
||||
"LLVMPowerPCDisassembler",
|
||||
"LLVMPowerPCAsmParser",
|
||||
"LLVMPowerPCCodeGen",
|
||||
"LLVMPowerPCDesc",
|
||||
"LLVMPowerPCInfo",
|
||||
"LLVMNVPTXCodeGen",
|
||||
"LLVMNVPTXDesc",
|
||||
"LLVMNVPTXInfo",
|
||||
"LLVMMSP430Disassembler",
|
||||
"LLVMMSP430AsmParser",
|
||||
"LLVMMSP430CodeGen",
|
||||
"LLVMMSP430Desc",
|
||||
"LLVMMSP430Info",
|
||||
"LLVMMipsDisassembler",
|
||||
"LLVMMipsAsmParser",
|
||||
"LLVMMipsCodeGen",
|
||||
"LLVMMipsDesc",
|
||||
"LLVMMipsInfo",
|
||||
"LLVMLanaiDisassembler",
|
||||
"LLVMLanaiCodeGen",
|
||||
"LLVMLanaiAsmParser",
|
||||
"LLVMLanaiDesc",
|
||||
"LLVMLanaiInfo",
|
||||
"LLVMHexagonDisassembler",
|
||||
"LLVMHexagonCodeGen",
|
||||
"LLVMHexagonAsmParser",
|
||||
"LLVMHexagonDesc",
|
||||
"LLVMHexagonInfo",
|
||||
"LLVMBPFDisassembler",
|
||||
"LLVMBPFAsmParser",
|
||||
"LLVMBPFCodeGen",
|
||||
"LLVMBPFDesc",
|
||||
"LLVMBPFInfo",
|
||||
"LLVMAVRDisassembler",
|
||||
"LLVMAVRAsmParser",
|
||||
"LLVMAVRCodeGen",
|
||||
"LLVMAVRDesc",
|
||||
"LLVMAVRInfo",
|
||||
"LLVMARMDisassembler",
|
||||
"LLVMARMAsmParser",
|
||||
"LLVMARMCodeGen",
|
||||
"LLVMARMDesc",
|
||||
"LLVMARMUtils",
|
||||
"LLVMARMInfo",
|
||||
"LLVMAMDGPUTargetMCA",
|
||||
"LLVMAMDGPUDisassembler",
|
||||
"LLVMAMDGPUAsmParser",
|
||||
"LLVMAMDGPUCodeGen",
|
||||
"LLVMAMDGPUDesc",
|
||||
"LLVMAMDGPUUtils",
|
||||
"LLVMAMDGPUInfo",
|
||||
"LLVMAArch64Disassembler",
|
||||
"LLVMAArch64AsmParser",
|
||||
"LLVMAArch64CodeGen",
|
||||
"LLVMAArch64Desc",
|
||||
"LLVMAArch64Utils",
|
||||
"LLVMAArch64Info",
|
||||
"LLVMOrcJIT",
|
||||
"LLVMMCJIT",
|
||||
"LLVMJITLink",
|
||||
"LLVMInterpreter",
|
||||
"LLVMExecutionEngine",
|
||||
"LLVMRuntimeDyld",
|
||||
"LLVMOrcTargetProcess",
|
||||
"LLVMOrcShared",
|
||||
"LLVMDWP",
|
||||
"LLVMSymbolize",
|
||||
"LLVMDebugInfoPDB",
|
||||
"LLVMDebugInfoGSYM",
|
||||
"LLVMOption",
|
||||
"LLVMObjectYAML",
|
||||
"LLVMMCA",
|
||||
"LLVMMCDisassembler",
|
||||
"LLVMLTO",
|
||||
"LLVMPasses",
|
||||
"LLVMCFGuard",
|
||||
"LLVMCoroutines",
|
||||
"LLVMObjCARCOpts",
|
||||
"LLVMipo",
|
||||
"LLVMVectorize",
|
||||
"LLVMLinker",
|
||||
"LLVMInstrumentation",
|
||||
"LLVMFrontendOpenMP",
|
||||
"LLVMFrontendOpenACC",
|
||||
"LLVMExtensions",
|
||||
"LLVMDWARFLinker",
|
||||
"LLVMGlobalISel",
|
||||
"LLVMMIRParser",
|
||||
"LLVMAsmPrinter",
|
||||
"LLVMDebugInfoMSF",
|
||||
"LLVMSelectionDAG",
|
||||
"LLVMCodeGen",
|
||||
"LLVMIRReader",
|
||||
"LLVMAsmParser",
|
||||
"LLVMInterfaceStub",
|
||||
"LLVMFileCheck",
|
||||
"LLVMFuzzMutate",
|
||||
"LLVMTarget",
|
||||
"LLVMScalarOpts",
|
||||
"LLVMInstCombine",
|
||||
"LLVMAggressiveInstCombine",
|
||||
"LLVMTransformUtils",
|
||||
"LLVMBitWriter",
|
||||
"LLVMAnalysis",
|
||||
"LLVMProfileData",
|
||||
"LLVMDebugInfoDWARF",
|
||||
"LLVMObject",
|
||||
"LLVMTextAPI",
|
||||
"LLVMMCParser",
|
||||
"LLVMMC",
|
||||
"LLVMDebugInfoCodeView",
|
||||
"LLVMBitReader",
|
||||
"LLVMCore",
|
||||
"LLVMRemarks",
|
||||
"LLVMBitstreamReader",
|
||||
"LLVMBinaryFormat",
|
||||
"LLVMSupport",
|
||||
"LLVMDemangle",
|
||||
};
|
||||
@@ -1,132 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
brew update && brew install ncurses s3cmd
|
||||
|
||||
ZIGDIR="$(pwd)"
|
||||
|
||||
HOST_ARCH="x86_64"
|
||||
HOST_TARGET="$HOST_ARCH-macos-none"
|
||||
HOST_MCPU="baseline"
|
||||
HOST_CACHE_BASENAME="zig+llvm+lld+clang-$HOST_TARGET-0.10.0-dev.2931+bdf3fa12f"
|
||||
HOST_PREFIX="$HOME/$HOST_CACHE_BASENAME"
|
||||
|
||||
ARCH="aarch64"
|
||||
TARGET="$ARCH-macos-none"
|
||||
MCPU="apple_a14"
|
||||
CACHE_BASENAME="zig+llvm+lld+clang-$TARGET-0.10.0-dev.2931+bdf3fa12f"
|
||||
PREFIX="$HOME/$CACHE_BASENAME"
|
||||
|
||||
JOBS="-j2"
|
||||
|
||||
rm -rf $HOST_PREFIX $PREFIX
|
||||
cd $HOME
|
||||
|
||||
wget -nv "https://ziglang.org/deps/$HOST_CACHE_BASENAME.tar.xz"
|
||||
wget -nv "https://ziglang.org/deps/$CACHE_BASENAME.tar.xz"
|
||||
tar xf "$HOST_CACHE_BASENAME.tar.xz"
|
||||
tar xf "$CACHE_BASENAME.tar.xz"
|
||||
|
||||
cd $ZIGDIR
|
||||
|
||||
# Make the `zig version` number consistent.
|
||||
# This will affect the cmake command below.
|
||||
git config core.abbrev 9
|
||||
git fetch --unshallow || true
|
||||
git fetch --tags
|
||||
|
||||
# Build host zig compiler in debug so that we can get the
|
||||
# current version when packaging
|
||||
|
||||
ZIG="$HOST_PREFIX/bin/zig"
|
||||
|
||||
export CC="$ZIG cc -target $HOST_TARGET -mcpu=$HOST_MCPU"
|
||||
export CXX="$ZIG c++ -target $HOST_TARGET -mcpu=$HOST_MCPU"
|
||||
|
||||
mkdir build.host
|
||||
cd build.host
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$(pwd)/release" \
|
||||
-DCMAKE_PREFIX_PATH="$HOST_PREFIX" \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DZIG_TARGET_TRIPLE="$HOST_TARGET" \
|
||||
-DZIG_TARGET_MCPU="$HOST_MCPU" \
|
||||
-DZIG_STATIC=ON \
|
||||
-DZIG_OMIT_STAGE2=ON
|
||||
|
||||
unset CC
|
||||
unset CXX
|
||||
|
||||
make $JOBS install
|
||||
|
||||
# Build zig compiler cross-compiled for arm64
|
||||
cd $ZIGDIR
|
||||
|
||||
ZIG="$ZIGDIR/build.host/release/bin/zig"
|
||||
|
||||
export CC="$ZIG cc -target $TARGET -mcpu=$MCPU"
|
||||
export CXX="$ZIG c++ -target $TARGET -mcpu=$MCPU"
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$(pwd)/release" \
|
||||
-DCMAKE_PREFIX_PATH="$PREFIX" \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DZIG_TARGET_TRIPLE="$TARGET" \
|
||||
-DZIG_TARGET_MCPU="$MCPU" \
|
||||
-DZIG_EXECUTABLE="$ZIG" \
|
||||
-DZIG_STATIC=ON
|
||||
|
||||
unset CC
|
||||
unset CXX
|
||||
|
||||
make $JOBS install
|
||||
|
||||
if [ "${BUILD_REASON}" != "PullRequest" ]; then
|
||||
mv ../LICENSE release/
|
||||
|
||||
# We do not run test suite but still need langref.
|
||||
mkdir -p release/docs
|
||||
$ZIG run ../doc/docgen.zig -- $ZIG ../doc/langref.html.in release/docs/langref.html
|
||||
|
||||
# Produce the experimental std lib documentation.
|
||||
mkdir -p release/docs/std
|
||||
$ZIG test ../lib/std/std.zig \
|
||||
--zig-lib-dir ../lib \
|
||||
-femit-docs=release/docs/std \
|
||||
-fno-emit-bin
|
||||
|
||||
mv release/bin/zig release/
|
||||
rmdir release/bin
|
||||
|
||||
VERSION=$(../build.host/release/bin/zig version)
|
||||
DIRNAME="zig-macos-$ARCH-$VERSION"
|
||||
TARBALL="$DIRNAME.tar.xz"
|
||||
mv release "$DIRNAME"
|
||||
tar cfJ "$TARBALL" "$DIRNAME"
|
||||
|
||||
mv "$DOWNLOADSECUREFILE_SECUREFILEPATH" "$HOME/.s3cfg"
|
||||
s3cmd put -P --add-header="cache-control: public, max-age=31536000, immutable" "$TARBALL" s3://ziglang.org/builds/
|
||||
|
||||
SHASUM=$(shasum -a 256 $TARBALL | cut '-d ' -f1)
|
||||
BYTESIZE=$(wc -c < $TARBALL)
|
||||
|
||||
JSONFILE="macos-$GITBRANCH.json"
|
||||
touch $JSONFILE
|
||||
echo "{\"tarball\": \"$TARBALL\"," >>$JSONFILE
|
||||
echo "\"shasum\": \"$SHASUM\"," >>$JSONFILE
|
||||
echo "\"size\": \"$BYTESIZE\"}" >>$JSONFILE
|
||||
|
||||
s3cmd put -P --add-header="Cache-Control: max-age=0, must-revalidate" "$JSONFILE" "s3://ziglang.org/builds/$JSONFILE"
|
||||
s3cmd put -P "$JSONFILE" "s3://ziglang.org/builds/$ARCH-macos-$VERSION.json"
|
||||
|
||||
# `set -x` causes these variables to be mangled.
|
||||
# See https://developercommunity.visualstudio.com/content/problem/375679/pipeline-variable-incorrectly-inserts-single-quote.html
|
||||
set +x
|
||||
echo "##vso[task.setvariable variable=tarball;isOutput=true]$TARBALL"
|
||||
echo "##vso[task.setvariable variable=shasum;isOutput=true]$SHASUM"
|
||||
echo "##vso[task.setvariable variable=bytesize;isOutput=true]$BYTESIZE"
|
||||
fi
|
||||
+12
-37
@@ -34,13 +34,12 @@ git fetch --tags
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$(pwd)/release" \
|
||||
-DCMAKE_INSTALL_PREFIX="stage3-release" \
|
||||
-DCMAKE_PREFIX_PATH="$PREFIX" \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DZIG_TARGET_TRIPLE="$TARGET" \
|
||||
-DZIG_TARGET_MCPU="$MCPU" \
|
||||
-DZIG_STATIC=ON \
|
||||
-DZIG_OMIT_STAGE2=ON
|
||||
-DZIG_STATIC=ON
|
||||
|
||||
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
|
||||
# so that installation and testing do not get affected by them.
|
||||
@@ -49,45 +48,21 @@ unset CXX
|
||||
|
||||
make $JOBS install
|
||||
|
||||
# Here we rebuild zig but this time using the Zig binary we just now produced to
|
||||
# build zig1.o rather than relying on the one built with stage0. See
|
||||
# https://github.com/ziglang/zig/issues/6830 for more details.
|
||||
cmake .. -DZIG_EXECUTABLE="$(pwd)/release/bin/zig"
|
||||
make $JOBS install
|
||||
|
||||
# Build stage2 standalone so that we can test stage2 against stage2 compiler-rt.
|
||||
release/bin/zig build -p stage2 -Denable-llvm
|
||||
|
||||
stage2/bin/zig build test-behavior
|
||||
|
||||
# TODO: upgrade these to test stage2 instead of stage1
|
||||
# TODO: upgrade these to test stage3 instead of stage2
|
||||
release/bin/zig build test-behavior -Denable-macos-sdk -Domit-stage2
|
||||
release/bin/zig build test-compiler-rt -Denable-macos-sdk
|
||||
release/bin/zig build test-std -Denable-macos-sdk
|
||||
release/bin/zig build test-universal-libc -Denable-macos-sdk
|
||||
release/bin/zig build test-compare-output -Denable-macos-sdk
|
||||
release/bin/zig build test-standalone -Denable-macos-sdk
|
||||
release/bin/zig build test-stack-traces -Denable-macos-sdk
|
||||
release/bin/zig build test-cli -Denable-macos-sdk
|
||||
release/bin/zig build test-asm-link -Denable-macos-sdk
|
||||
release/bin/zig build test-translate-c -Denable-macos-sdk
|
||||
release/bin/zig build test-run-translated-c -Denable-macos-sdk
|
||||
release/bin/zig build docs -Denable-macos-sdk
|
||||
release/bin/zig build test-fmt -Denable-macos-sdk
|
||||
release/bin/zig build test-cases -Denable-macos-sdk -Dsingle-threaded
|
||||
release/bin/zig build test-link -Denable-macos-sdk -Domit-stage2
|
||||
stage3-release/bin/zig build test docs \
|
||||
-Denable-macos-sdk \
|
||||
-Dstatic-llvm \
|
||||
--search-prefix "$PREFIX"
|
||||
|
||||
if [ "${BUILD_REASON}" != "PullRequest" ]; then
|
||||
mv ../LICENSE release/
|
||||
mv ../zig-cache/langref.html release/
|
||||
mv release/bin/zig release/
|
||||
rmdir release/bin
|
||||
mv ../LICENSE stage3-release/
|
||||
mv ../zig-cache/langref.html stage3-release/
|
||||
mv stage3-release/bin/zig stage3-release/
|
||||
rmdir stage3-release/bin
|
||||
|
||||
VERSION=$(release/zig version)
|
||||
VERSION=$(stage3-release/zig version)
|
||||
DIRNAME="zig-macos-$ARCH-$VERSION"
|
||||
TARBALL="$DIRNAME.tar.xz"
|
||||
mv release "$DIRNAME"
|
||||
mv stage3-release "$DIRNAME"
|
||||
tar cfJ "$TARBALL" "$DIRNAME"
|
||||
|
||||
mv "$DOWNLOADSECUREFILE_SECUREFILEPATH" "$HOME/.s3cfg"
|
||||
|
||||
+36
-61
@@ -10,24 +10,13 @@ jobs:
|
||||
- script: ci/azure/macos_script
|
||||
name: main
|
||||
displayName: 'Build and test'
|
||||
- job: BuildMacOS_arm64
|
||||
pool:
|
||||
vmImage: 'macOS-11'
|
||||
timeoutInMinutes: 180
|
||||
steps:
|
||||
- task: DownloadSecureFile@1
|
||||
inputs:
|
||||
secureFile: s3cfg
|
||||
- script: ci/azure/macos_arm64_script
|
||||
name: main
|
||||
displayName: 'Build'
|
||||
- job: BuildWindows
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
variables:
|
||||
TARGET: 'x86_64-windows-gnu'
|
||||
ZIG_LLVM_CLANG_LLD_NAME: 'zig+llvm+lld+clang-${{ variables.TARGET }}-0.10.0-dev.2931+bdf3fa12f'
|
||||
ZIG_LLVM_CLANG_LLD_NAME: 'zig+llvm+lld+clang-${{ variables.TARGET }}-0.10.0-dev.3733+a9af47272'
|
||||
ZIG_LLVM_CLANG_LLD_URL: 'https://ziglang.org/deps/${{ variables.ZIG_LLVM_CLANG_LLD_NAME }}.zip'
|
||||
steps:
|
||||
- pwsh: |
|
||||
@@ -37,10 +26,17 @@ jobs:
|
||||
displayName: 'Install ZIG/LLVM/CLANG/LLD'
|
||||
|
||||
- pwsh: |
|
||||
Set-Variable -Name ZIGBUILDDIR -Value "$(Get-Location)\build"
|
||||
Set-Variable -Name ZIGINSTALLDIR -Value "${ZIGBUILDDIR}\dist"
|
||||
Set-Variable -Name ZIGLIBDIR -Value "$(Get-Location)\lib"
|
||||
Set-Variable -Name ZIGINSTALLDIR -Value "$(Get-Location)\stage3-release"
|
||||
Set-Variable -Name ZIGPREFIXPATH -Value "$(Get-Location)\$(ZIG_LLVM_CLANG_LLD_NAME)"
|
||||
|
||||
function CheckLastExitCode {
|
||||
if (!$?) {
|
||||
exit 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
# Make the `zig version` number consistent.
|
||||
# This will affect the `zig build` command below which uses `git describe`.
|
||||
git config core.abbrev 9
|
||||
@@ -49,64 +45,45 @@ jobs:
|
||||
git fetch --unshallow # `git describe` won't work on a shallow repo
|
||||
}
|
||||
|
||||
# The dev kit zip file that we have here is old, and may be incompatible with
|
||||
# the build.zig script of master branch. So we keep an old version of build.zig
|
||||
# here in the CI directory.
|
||||
mv build.zig build.zig.master
|
||||
mv ci/azure/build.zig build.zig
|
||||
|
||||
mkdir $ZIGBUILDDIR
|
||||
cd $ZIGBUILDDIR
|
||||
|
||||
& "${ZIGPREFIXPATH}/bin/zig.exe" build `
|
||||
& "$ZIGPREFIXPATH\bin\zig.exe" build `
|
||||
--prefix "$ZIGINSTALLDIR" `
|
||||
--search-prefix "$ZIGPREFIXPATH" `
|
||||
-Dstage1 `
|
||||
<# stage2 is omitted until we resolve https://github.com/ziglang/zig/issues/6485 #> `
|
||||
-Domit-stage2 `
|
||||
--zig-lib-dir "$ZIGLIBDIR" `
|
||||
-Denable-stage1 `
|
||||
-Dstatic-llvm `
|
||||
-Drelease `
|
||||
-Dstrip `
|
||||
-Duse-zig-libcxx `
|
||||
-Dtarget=$(TARGET)
|
||||
|
||||
cd -
|
||||
|
||||
# Now that we have built an up-to-date zig.exe, we restore the original
|
||||
# build script from master branch.
|
||||
rm build.zig
|
||||
mv build.zig.master build.zig
|
||||
|
||||
CheckLastExitCode
|
||||
name: build
|
||||
displayName: 'Build'
|
||||
|
||||
- pwsh: |
|
||||
Set-Variable -Name ZIGINSTALLDIR -Value "$(Get-Location)\build\dist"
|
||||
Set-Variable -Name ZIGINSTALLDIR -Value "$(Get-Location)\stage3-release"
|
||||
|
||||
# Sadly, stage2 is omitted from this build to save memory on the CI server. Once self-hosted is
|
||||
# built with itself and does not gobble as much memory, we can enable these tests.
|
||||
#& "$ZIGINSTALLDIR\bin\zig.exe" test "..\test\behavior.zig" -fno-stage1 -fLLVM -I "..\test" 2>&1
|
||||
function CheckLastExitCode {
|
||||
if (!$?) {
|
||||
exit 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
& "$ZIGINSTALLDIR\bin\zig.exe" build test-toolchain -Dskip-non-native -Dskip-stage2-tests 2>&1
|
||||
& "$ZIGINSTALLDIR\bin\zig.exe" build test-std -Dskip-non-native 2>&1
|
||||
& "$ZIGINSTALLDIR\bin\zig.exe" build test docs `
|
||||
--search-prefix "$ZIGPREFIXPATH" `
|
||||
-Dstatic-llvm `
|
||||
-Dskip-non-native `
|
||||
-Dskip-stage2-tests
|
||||
CheckLastExitCode
|
||||
name: test
|
||||
displayName: 'Test'
|
||||
|
||||
- pwsh: |
|
||||
Set-Variable -Name ZIGINSTALLDIR -Value "$(Get-Location)\build\dist"
|
||||
|
||||
& "$ZIGINSTALLDIR\bin\zig.exe" build docs
|
||||
timeoutInMinutes: 60
|
||||
name: doc
|
||||
displayName: 'Documentation'
|
||||
|
||||
- task: DownloadSecureFile@1
|
||||
inputs:
|
||||
name: aws_credentials
|
||||
secureFile: aws_credentials
|
||||
|
||||
- pwsh: |
|
||||
Set-Variable -Name ZIGBUILDDIR -Value "$(Get-Location)\build"
|
||||
$Env:AWS_SHARED_CREDENTIALS_FILE = "$Env:DOWNLOADSECUREFILE_SECUREFILEPATH"
|
||||
|
||||
# Workaround Azure networking issue
|
||||
@@ -114,21 +91,20 @@ jobs:
|
||||
$Env:AWS_EC2_METADATA_DISABLED = "true"
|
||||
$Env:AWS_REGION = "us-west-2"
|
||||
|
||||
cd "$ZIGBUILDDIR"
|
||||
mv ../LICENSE dist/
|
||||
mv ../zig-cache/langref.html dist/
|
||||
mv dist/bin/zig.exe dist/
|
||||
rmdir dist/bin
|
||||
mv LICENSE stage3-release/
|
||||
mv zig-cache/langref.html stage3-release/
|
||||
mv stage3-release/bin/zig.exe stage3-release/
|
||||
rmdir stage3-release/bin
|
||||
|
||||
# Remove the unnecessary zig dir in $prefix/lib/zig/std/std.zig
|
||||
mv dist/lib/zig dist/lib2
|
||||
rmdir dist/lib
|
||||
mv dist/lib2 dist/lib
|
||||
mv stage3-release/lib/zig stage3-release/lib2
|
||||
rmdir stage3-release/lib
|
||||
mv stage3-release/lib2 stage3-release/lib
|
||||
|
||||
Set-Variable -Name VERSION -Value $(./dist/zig.exe version)
|
||||
Set-Variable -Name VERSION -Value $(./stage3-release/zig.exe version)
|
||||
Set-Variable -Name DIRNAME -Value "zig-windows-x86_64-$VERSION"
|
||||
Set-Variable -Name TARBALL -Value "$DIRNAME.zip"
|
||||
mv dist "$DIRNAME"
|
||||
mv stage3-release "$DIRNAME"
|
||||
7z a "$TARBALL" "$DIRNAME"
|
||||
|
||||
aws s3 cp `
|
||||
@@ -168,7 +144,6 @@ jobs:
|
||||
- job: OnMasterSuccess
|
||||
dependsOn:
|
||||
- BuildMacOS
|
||||
- BuildMacOS_arm64
|
||||
- BuildWindows
|
||||
condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/master'))
|
||||
strategy:
|
||||
|
||||
+21
-21
@@ -13,65 +13,65 @@ steps:
|
||||
commands:
|
||||
- ./ci/drone/linux_script_build
|
||||
|
||||
- name: test-1
|
||||
- name: behavior
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 1
|
||||
- ./ci/drone/test_linux_behavior
|
||||
|
||||
- name: test-2
|
||||
- name: std_Debug
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 2
|
||||
- ./ci/drone/test_linux_std_Debug
|
||||
|
||||
- name: test-3
|
||||
- name: std_ReleaseSafe
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 3
|
||||
- ./ci/drone/test_linux_std_ReleaseSafe
|
||||
|
||||
- name: test-4
|
||||
- name: std_ReleaseFast
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 4
|
||||
- ./ci/drone/test_linux_std_ReleaseFast
|
||||
|
||||
- name: test-5
|
||||
- name: std_ReleaseSmall
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 5
|
||||
- ./ci/drone/test_linux_std_ReleaseSmall
|
||||
|
||||
- name: test-6
|
||||
- name: misc
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 6
|
||||
- ./ci/drone/test_linux_misc
|
||||
|
||||
- name: test-7
|
||||
- name: cases
|
||||
depends_on:
|
||||
- build
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
commands:
|
||||
- ./ci/drone/linux_script_test 7
|
||||
- ./ci/drone/test_linux_cases
|
||||
|
||||
- name: finalize
|
||||
depends_on:
|
||||
- build
|
||||
- test-1
|
||||
- test-2
|
||||
- test-3
|
||||
- test-4
|
||||
- test-5
|
||||
- test-6
|
||||
- test-7
|
||||
- behavior
|
||||
- std_Debug
|
||||
- std_ReleaseSafe
|
||||
- std_ReleaseFast
|
||||
- std_ReleaseSmall
|
||||
- misc
|
||||
- cases
|
||||
image: ziglang/static-base:llvm14-aarch64-3
|
||||
environment:
|
||||
SRHT_OAUTH_TOKEN:
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# https://docs.drone.io/pipeline/docker/syntax/workspace/
|
||||
#
|
||||
# Drone automatically creates a temporary volume, known as your workspace,
|
||||
# where it clones your repository. The workspace is the current working
|
||||
# directory for each step in your pipeline.
|
||||
#
|
||||
# Because the workspace is a volume, filesystem changes are persisted between
|
||||
# pipeline steps. In other words, individual steps can communicate and share
|
||||
# state using the filesystem.
|
||||
#
|
||||
# Workspace volumes are ephemeral. They are created when the pipeline starts
|
||||
# and destroyed after the pipeline completes.
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
TRIPLEARCH="$(uname -m)"
|
||||
DISTDIR="$DRONE_WORKSPACE/dist"
|
||||
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
+10
-17
@@ -1,17 +1,16 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./ci/drone/linux_script_base
|
||||
set -x
|
||||
set -e
|
||||
|
||||
# Probe CPU/brand details.
|
||||
# TODO: `lscpu` is changing package names in EDGE to `util-linux-misc`
|
||||
apk update
|
||||
apk add util-linux
|
||||
echo "lscpu:"
|
||||
lscpu | sed 's,^, : ,'
|
||||
ARCH="$(uname -m)"
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
PREFIX="/deps/local"
|
||||
ZIG="$PREFIX/bin/zig"
|
||||
TARGET="$TRIPLEARCH-linux-musl"
|
||||
TARGET="$ARCH-linux-musl"
|
||||
MCPU="baseline"
|
||||
|
||||
export CC="$ZIG cc -target $TARGET -mcpu=$MCPU"
|
||||
@@ -30,8 +29,8 @@ cat <<'ENDFILE' >$PREFIX/bin/ranlib
|
||||
/deps/local/bin/zig ranlib $@
|
||||
ENDFILE
|
||||
|
||||
chmod +x $PREFIX/bin/ar
|
||||
chmod +x $PREFIX/bin/ranlib
|
||||
chmod +x "$PREFIX/bin/ar"
|
||||
chmod +x "$PREFIX/bin/ranlib"
|
||||
|
||||
# Make the `zig version` number consistent.
|
||||
# This will affect the cmake command below.
|
||||
@@ -42,8 +41,8 @@ git fetch --tags
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$DISTDIR" \
|
||||
-DCMAKE_PREFIX_PATH="$PREFIX" \
|
||||
-DCMAKE_INSTALL_PREFIX="$INSTALL_PREFIX" \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_AR="$PREFIX/bin/ar" \
|
||||
-DCMAKE_RANLIB="$PREFIX/bin/ranlib" \
|
||||
@@ -57,9 +56,3 @@ cmake .. \
|
||||
unset CC
|
||||
unset CXX
|
||||
samu install
|
||||
|
||||
# Here we rebuild Zig but this time using the Zig binary we just now produced to
|
||||
# build zig1.o rather than relying on the one built with stage0. See
|
||||
# https://github.com/ziglang/zig/issues/6830 for more details.
|
||||
cmake .. -DZIG_EXECUTABLE="$DISTDIR/bin/zig"
|
||||
samu install
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./ci/drone/linux_script_base
|
||||
set -x
|
||||
set -e
|
||||
|
||||
ARCH="$(uname -m)"
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
if [ -n "$DRONE_PULL_REQUEST" ]; then
|
||||
exit 0
|
||||
@@ -12,16 +18,16 @@ pip3 install s3cmd
|
||||
|
||||
cd build
|
||||
|
||||
mv ../LICENSE "$DISTDIR/"
|
||||
mv ../zig-cache/langref.html "$DISTDIR/"
|
||||
mv "$DISTDIR/bin/zig" "$DISTDIR/"
|
||||
rmdir "$DISTDIR/bin"
|
||||
mv ../LICENSE "$INSTALL_PREFIX/"
|
||||
mv ../zig-cache/langref.html "$INSTALL_PREFIX/"
|
||||
mv "$INSTALL_PREFIX/bin/zig" "$INSTALL_PREFIX/"
|
||||
rmdir "$INSTALL_PREFIX/bin"
|
||||
|
||||
GITBRANCH="$DRONE_BRANCH"
|
||||
VERSION="$("$DISTDIR/zig" version)"
|
||||
DIRNAME="zig-linux-$TRIPLEARCH-$VERSION"
|
||||
VERSION="$("$INSTALL_PREFIX/zig" version)"
|
||||
DIRNAME="zig-linux-$ARCH-$VERSION"
|
||||
TARBALL="$DIRNAME.tar.xz"
|
||||
mv "$DISTDIR" "$DIRNAME"
|
||||
mv "$INSTALL_PREFIX" "$DIRNAME"
|
||||
tar cfJ "$TARBALL" "$DIRNAME"
|
||||
|
||||
s3cmd put -P --add-header="cache-control: public, max-age=31536000, immutable" "$TARBALL" s3://ziglang.org/builds/
|
||||
@@ -35,7 +41,7 @@ echo "{\"tarball\": \"$TARBALL\"," >>$JSONFILE
|
||||
echo "\"shasum\": \"$SHASUM\"," >>$JSONFILE
|
||||
echo "\"size\": \"$BYTESIZE\"}" >>$JSONFILE
|
||||
|
||||
s3cmd put -P "$JSONFILE" "s3://ziglang.org/builds/$TRIPLEARCH-linux-$VERSION.json"
|
||||
s3cmd put -P "$JSONFILE" "s3://ziglang.org/builds/$ARCH-linux-$VERSION.json"
|
||||
if [ "$GITBRANCH" = "master" ]; then
|
||||
# avoid leaking oauth token
|
||||
set +x
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./ci/drone/linux_script_base
|
||||
|
||||
BUILD_FLAGS="-Dskip-non-native"
|
||||
|
||||
case "$1" in
|
||||
1)
|
||||
./build/zig build $BUILD_FLAGS test-behavior
|
||||
./build/zig build $BUILD_FLAGS test-compiler-rt
|
||||
./build/zig build $BUILD_FLAGS test-fmt
|
||||
./build/zig build $BUILD_FLAGS docs
|
||||
;;
|
||||
2)
|
||||
# Debug
|
||||
./build/zig build $BUILD_FLAGS test-std -Dskip-release-safe -Dskip-release-fast -Dskip-release-small
|
||||
;;
|
||||
3)
|
||||
# ReleaseSafe
|
||||
./build/zig build $BUILD_FLAGS test-std -Dskip-debug -Dskip-release-fast -Dskip-release-small -Dskip-non-native -Dskip-single-threaded
|
||||
;;
|
||||
4)
|
||||
# ReleaseFast
|
||||
./build/zig build $BUILD_FLAGS test-std -Dskip-debug -Dskip-release-safe -Dskip-release-small -Dskip-non-native -Dskip-single-threaded
|
||||
;;
|
||||
5)
|
||||
# ReleaseSmall
|
||||
./build/zig build $BUILD_FLAGS test-std -Dskip-debug -Dskip-release-safe -Dskip-release-fast
|
||||
;;
|
||||
6)
|
||||
./build/zig build $BUILD_FLAGS test-universal-libc
|
||||
./build/zig build $BUILD_FLAGS test-compare-output
|
||||
./build/zig build $BUILD_FLAGS test-standalone -Dskip-release-safe
|
||||
./build/zig build $BUILD_FLAGS test-stack-traces
|
||||
./build/zig build $BUILD_FLAGS test-cli
|
||||
./build/zig build $BUILD_FLAGS test-asm-link
|
||||
./build/zig build $BUILD_FLAGS test-translate-c
|
||||
;;
|
||||
7)
|
||||
./build/zig build $BUILD_FLAGS # test building self-hosted without LLVM
|
||||
./build/zig build $BUILD_FLAGS test-cases
|
||||
;;
|
||||
'')
|
||||
echo "error: expecting test group argument"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
echo "error: unknown test group: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
Executable
+13
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
$ZIG build test-behavior -Dskip-non-native
|
||||
$ZIG build test-compiler-rt -Dskip-non-native
|
||||
$ZIG build test-fmt
|
||||
$ZIG build docs
|
||||
Executable
+11
@@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
$ZIG build -Dskip-non-native # test building self-hosted without LLVM
|
||||
$ZIG build -Dskip-non-native test-cases
|
||||
Executable
+16
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
$ZIG build test-universal-libc -Dskip-non-native
|
||||
$ZIG build test-compare-output -Dskip-non-native
|
||||
$ZIG build test-standalone -Dskip-non-native -Dskip-release-safe
|
||||
$ZIG build test-stack-traces -Dskip-non-native
|
||||
$ZIG build test-cli -Dskip-non-native
|
||||
$ZIG build test-asm-link -Dskip-non-native
|
||||
$ZIG build test-translate-c -Dskip-non-native
|
||||
Executable
+10
@@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
$ZIG build test-std -Dskip-release-safe -Dskip-release-fast -Dskip-release-small -Dskip-non-native
|
||||
Executable
+10
@@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
$ZIG build test-std -Dskip-debug -Dskip-release-safe -Dskip-release-small -Dskip-non-native -Dskip-single-threaded
|
||||
Executable
+10
@@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
$ZIG build test-std -Dskip-debug -Dskip-release-fast -Dskip-release-small -Dskip-non-native -Dskip-single-threaded
|
||||
Executable
+16
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/stage3-release"
|
||||
ZIG="$INSTALL_PREFIX/bin/zig"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$DRONE_WORKSPACE/zig-cache"
|
||||
|
||||
# Empirically, this takes about 55 minutes on the CI, and is the bottleneck
|
||||
# causing timeouts. So this is disabled in favor of running a smaller set
|
||||
# of ReleaseSmall std lib tests.
|
||||
# $ZIG build test-std -Dskip-debug -Dskip-release-safe -Dskip-release-fast -Dskip-non-native
|
||||
|
||||
$ZIG test lib/std/std.zig -OReleaseSmall
|
||||
$ZIG test lib/std/std.zig -OReleaseSmall -lc
|
||||
+35
-20
@@ -7,7 +7,9 @@ sudo pkg update -fq
|
||||
sudo pkg install -y cmake py39-s3cmd wget curl jq samurai
|
||||
|
||||
ZIGDIR="$(pwd)"
|
||||
CACHE_BASENAME="zig+llvm+lld+clang-x86_64-freebsd-gnu-0.10.0-dev.2931+bdf3fa12f"
|
||||
TARGET="x86_64-freebsd-gnu"
|
||||
MCPU="baseline"
|
||||
CACHE_BASENAME="zig+llvm+lld+clang-$TARGET-0.10.0-dev.3524+74673b7f6"
|
||||
PREFIX="$HOME/$CACHE_BASENAME"
|
||||
|
||||
cd $HOME
|
||||
@@ -29,34 +31,47 @@ export TERM=dumb
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_PREFIX_PATH=$PREFIX \
|
||||
"-DCMAKE_INSTALL_PREFIX=$(pwd)/release" \
|
||||
-DZIG_STATIC=ON \
|
||||
-DZIG_TARGET_TRIPLE=x86_64-freebsd-gnu \
|
||||
-GNinja
|
||||
samu install
|
||||
|
||||
# TODO ld.lld: error: undefined symbol: main
|
||||
# >>> referenced by crt1_c.c:75 (/usr/src/lib/csu/amd64/crt1_c.c:75)
|
||||
# >>> /usr/lib/crt1.o:(_start)
|
||||
#release/bin/zig test ../test/behavior.zig -fno-stage1 -fLLVM -I ../test
|
||||
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_PREFIX_PATH=$PREFIX \
|
||||
-DZIG_TARGET_TRIPLE="$TARGET" \
|
||||
-DZIG_TARGET_MCPU="$MCPU" \
|
||||
-DZIG_STATIC=ON \
|
||||
-GNinja
|
||||
|
||||
# TODO: eliminate this workaround. Without this, zig does not end up passing
|
||||
# -isystem /usr/include when building libc++, resulting in #include <sys/endian.h>
|
||||
# "file not found" errors.
|
||||
echo "include_dir=/usr/include" >>libc.txt
|
||||
echo "sys_include_dir=/usr/include" >>libc.txt
|
||||
echo "crt_dir=/usr/lib" >>libc.txt
|
||||
echo "msvc_lib_dir=" >>libc.txt
|
||||
echo "kernel32_lib_dir=" >>libc.txt
|
||||
echo "gcc_dir=" >>libc.txt
|
||||
ZIG_LIBC_TXT="$(pwd)/libc.txt"
|
||||
|
||||
ZIG_LIBC="$ZIG_LIBC_TXT" samu install
|
||||
|
||||
# Here we skip some tests to save time.
|
||||
release/bin/zig build test -Dskip-stage1 -Dskip-non-native
|
||||
stage3/bin/zig build test docs \
|
||||
-Dstatic-llvm \
|
||||
--search-prefix "$PREFIX" \
|
||||
-Dskip-stage1 \
|
||||
-Dskip-non-native
|
||||
|
||||
if [ -f ~/.s3cfg ]; then
|
||||
mv ../LICENSE release/
|
||||
mv ../zig-cache/langref.html release/
|
||||
mv release/bin/zig release/
|
||||
rmdir release/bin
|
||||
mv ../LICENSE stage3/
|
||||
mv ../zig-cache/langref.html stage3/
|
||||
mv stage3/bin/zig stage3/
|
||||
rmdir stage3/bin
|
||||
|
||||
GITBRANCH=$(basename $GITHUB_REF)
|
||||
VERSION=$(release/zig version)
|
||||
VERSION=$(stage3/zig version)
|
||||
DIRNAME="zig-freebsd-x86_64-$VERSION"
|
||||
TARBALL="$DIRNAME.tar.xz"
|
||||
mv release "$DIRNAME"
|
||||
mv stage3 "$DIRNAME"
|
||||
tar cfJ "$TARBALL" "$DIRNAME"
|
||||
|
||||
s3cmd put -P --add-header="cache-control: public, max-age=31536000, immutable" "$TARBALL" s3://ziglang.org/builds/
|
||||
|
||||
@@ -100,6 +100,27 @@ cd "$SRCTARBALLDIR/ci/srht"
|
||||
CIDIR="$(pwd)"
|
||||
|
||||
cd "$HOME"
|
||||
|
||||
# Upload new stdlib autodocs
|
||||
mkdir -p docs_to_upload/documentation/master/std/
|
||||
gzip -c -9 "$ZIGDIR/docs/std/index.html" > docs_to_upload/documentation/master/std/index.html
|
||||
gzip -c -9 "$ZIGDIR/docs/std/data.js" > docs_to_upload/documentation/master/std/data.js
|
||||
gzip -c -9 "$ZIGDIR/docs/std/main.js" > docs_to_upload/documentation/master/std/main.js
|
||||
gzip -c -9 "$LANGREF" > docs_to_upload/documentation/master/index.html
|
||||
$S3CMD put -P --no-mime-magic --recursive --add-header="Content-Encoding:gzip" --add-header="Cache-Control: max-age=0, must-revalidate" "docs_to_upload/" s3://ziglang.org/
|
||||
|
||||
mkdir -p docs_src_to_upload/documentation/master/std/
|
||||
cp -r "$ZIGDIR/docs/std/src" docs_src_to_upload/documentation/master/std/
|
||||
$S3CMD put -P --no-mime-magic --recursive --add-header:"Content-Type:text/html" --add-header="Cache-Control: max-age=0, must-revalidate" "docs_src_to_upload/" s3://ziglang.org/
|
||||
|
||||
## Copy without compression:
|
||||
# mkdir -p docs_to_upload/documentation/master/std/
|
||||
# cp "$ZIGDIR/docs/std/index.html" docs_to_upload/documentation/master/std/index.html
|
||||
# cp "$ZIGDIR/docs/std/data.js" docs_to_upload/documentation/master/std/data.js
|
||||
# cp "$ZIGDIR/docs/std/main.js" docs_to_upload/documentation/master/std/main.js
|
||||
# cp "$LANGREF" docs_to_upload/documentation/master/index.html
|
||||
# $S3CMD put -P --no-mime-magic --recursive --add-header="Cache-Control: max-age=0, must-revalidate" "docs_to_upload/" s3://ziglang.org/
|
||||
|
||||
git clone --depth 1 git@github.com:ziglang/www.ziglang.org.git
|
||||
cd www.ziglang.org
|
||||
WWWDIR="$(pwd)"
|
||||
@@ -108,12 +129,6 @@ $S3CMD put -P --no-mime-magic --add-header="cache-control: public, max-age=31536
|
||||
|
||||
cd "$WWWDIR"
|
||||
cp "$CIDIR/out/index.json" data/releases.json
|
||||
mkdir -p content/documentation/master/std
|
||||
cp "$LANGREF" content/documentation/master/index.html
|
||||
cp "$ZIGDIR/docs/std/index.html" content/documentation/master/std/index.html
|
||||
cp "$ZIGDIR/docs/std/data.js" content/documentation/master/std/data.js
|
||||
cp "$ZIGDIR/docs/std/main.js" content/documentation/master/std/main.js
|
||||
git add data/releases.json
|
||||
git add content/
|
||||
git commit -m "CI: update releases and docs"
|
||||
git commit -m "CI: update releases"
|
||||
git push origin master
|
||||
|
||||
Executable
+20
@@ -0,0 +1,20 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
RELEASE_STAGING="$DRONE_WORKSPACE/_release/staging"
|
||||
TARGET="aarch64-macos-none"
|
||||
MCPU="apple_a14"
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/$TARGET"
|
||||
SEARCH_PREFIX="/deps/$TARGET"
|
||||
|
||||
"$RELEASE_STAGING/bin/zig" build \
|
||||
--prefix "$INSTALL_PREFIX" \
|
||||
--search-prefix "$SEARCH_PREFIX" \
|
||||
-Dstatic-llvm \
|
||||
-Drelease \
|
||||
-Dstrip \
|
||||
-Dtarget="$TARGET" \
|
||||
-Dmcpu="$MCPU" \
|
||||
-Denable-stage1
|
||||
Executable
+10
@@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
# Make the `zig version` number consistent.
|
||||
# This will affect the cmake commands that follow.
|
||||
# This is in its own script because git does not support this command
|
||||
# being run concurrently with itself.
|
||||
git config core.abbrev 9
|
||||
+56
-7
@@ -9,26 +9,75 @@ workspace:
|
||||
path: /workspace
|
||||
|
||||
steps:
|
||||
- name: test
|
||||
image: ci/debian-amd64:11.1-6
|
||||
- name: configure_git
|
||||
image: ci/debian-amd64:11.1-9
|
||||
commands:
|
||||
- ./ci/zinc/linux_test.sh
|
||||
- ./ci/zinc/configure_git
|
||||
|
||||
- name: package
|
||||
- name: test_stage3_debug
|
||||
depends_on:
|
||||
- test
|
||||
- configure_git
|
||||
image: ci/debian-amd64:11.1-9
|
||||
commands:
|
||||
- ./ci/zinc/linux_test_stage3_debug
|
||||
|
||||
- name: test_stage3_release
|
||||
depends_on:
|
||||
- configure_git
|
||||
image: ci/debian-amd64:11.1-9
|
||||
commands:
|
||||
- ./ci/zinc/linux_test_stage3_release
|
||||
|
||||
- name: build_aarch64_macos
|
||||
depends_on:
|
||||
- test_stage3_release
|
||||
image: ci/debian-amd64:11.1-9
|
||||
commands:
|
||||
- ./ci/zinc/build_aarch64_macos
|
||||
|
||||
- name: linux_package
|
||||
depends_on:
|
||||
- test_stage3_debug
|
||||
- test_stage3_release
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
event:
|
||||
- push
|
||||
image: ci/debian-amd64:11.1-6
|
||||
image: ci/debian-amd64:11.1-9
|
||||
environment:
|
||||
AWS_ACCESS_KEY_ID:
|
||||
from_secret: AWS_ACCESS_KEY_ID
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
from_secret: AWS_SECRET_ACCESS_KEY
|
||||
commands:
|
||||
- ./ci/zinc/linux_package
|
||||
|
||||
- name: macos_package
|
||||
depends_on:
|
||||
- test_stage3_debug
|
||||
- build_aarch64_macos
|
||||
when:
|
||||
branch:
|
||||
- master
|
||||
event:
|
||||
- push
|
||||
image: ci/debian-amd64:11.1-9
|
||||
environment:
|
||||
AWS_ACCESS_KEY_ID:
|
||||
from_secret: AWS_ACCESS_KEY_ID
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
from_secret: AWS_SECRET_ACCESS_KEY
|
||||
commands:
|
||||
- ./ci/zinc/macos_package
|
||||
|
||||
- name: notify_lavahut
|
||||
depends_on:
|
||||
- macos_package
|
||||
- linux_package
|
||||
image: ci/debian-amd64:11.1-9
|
||||
environment:
|
||||
SRHT_OAUTH_TOKEN:
|
||||
from_secret: SRHT_OAUTH_TOKEN
|
||||
commands:
|
||||
- ./ci/zinc/linux_package.sh
|
||||
- ./ci/zinc/notify_lavahut
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# https://docs.drone.io/pipeline/docker/syntax/workspace/
|
||||
#
|
||||
# Drone automatically creates a temporary volume, known as your workspace,
|
||||
# where it clones your repository. The workspace is the current working
|
||||
# directory for each step in your pipeline.
|
||||
#
|
||||
# Because the workspace is a volume, filesystem changes are persisted between
|
||||
# pipeline steps. In other words, individual steps can communicate and share
|
||||
# state using the filesystem.
|
||||
#
|
||||
# Workspace volumes are ephemeral. They are created when the pipeline starts
|
||||
# and destroyed after the pipeline completes.
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
ARCH="$(uname -m)"
|
||||
|
||||
DEPS_LOCAL="/deps/local"
|
||||
WORKSPACE="$DRONE_WORKSPACE"
|
||||
|
||||
DEBUG_STAGING="$WORKSPACE/_debug/staging"
|
||||
RELEASE_STAGING="$WORKSPACE/_release/staging"
|
||||
|
||||
export PATH=$DEPS_LOCAL/bin:$PATH
|
||||
@@ -1,25 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./ci/zinc/linux_base.sh
|
||||
set -x
|
||||
set -e
|
||||
|
||||
cp LICENSE $RELEASE_STAGING/
|
||||
cp zig-cache/langref.html $RELEASE_STAGING/docs/
|
||||
ARCH="$(uname -m)"
|
||||
OS="linux"
|
||||
RELEASE_STAGING="$DRONE_WORKSPACE/_release/staging"
|
||||
VERSION=$($RELEASE_STAGING/bin/zig version)
|
||||
BASENAME="zig-$OS-$ARCH-$VERSION"
|
||||
TARBALL="$BASENAME.tar.xz"
|
||||
|
||||
# This runs concurrently with the macos_package script, so it should not make
|
||||
# any changes to the filesystem that will cause problems for the other script.
|
||||
|
||||
cp -r "$RELEASE_STAGING" "$BASENAME"
|
||||
|
||||
# Remove the unnecessary bin dir in $prefix/bin/zig
|
||||
mv $RELEASE_STAGING/bin/zig $RELEASE_STAGING/
|
||||
rmdir $RELEASE_STAGING/bin
|
||||
mv $BASENAME/bin/zig $BASENAME/
|
||||
rmdir $BASENAME/bin
|
||||
|
||||
# Remove the unnecessary zig dir in $prefix/lib/zig/std/std.zig
|
||||
mv $RELEASE_STAGING/lib/zig $RELEASE_STAGING/lib2
|
||||
rmdir $RELEASE_STAGING/lib
|
||||
mv $RELEASE_STAGING/lib2 $RELEASE_STAGING/lib
|
||||
mv $BASENAME/lib/zig $BASENAME/lib2
|
||||
rmdir $BASENAME/lib
|
||||
mv $BASENAME/lib2 $BASENAME/lib
|
||||
|
||||
VERSION=$($RELEASE_STAGING/zig version)
|
||||
BASENAME="zig-linux-$ARCH-$VERSION"
|
||||
TARBALL="$BASENAME.tar.xz"
|
||||
mv "$RELEASE_STAGING" "$BASENAME"
|
||||
tar cfJ "$TARBALL" "$BASENAME"
|
||||
ls -l "$TARBALL"
|
||||
|
||||
SHASUM=$(sha256sum $TARBALL | cut '-d ' -f1)
|
||||
BYTESIZE=$(wc -c < $TARBALL)
|
||||
@@ -34,15 +39,7 @@ echo "\"size\": \"$BYTESIZE\"}" >>$MANIFEST
|
||||
s3cmd put -P --add-header="cache-control: public, max-age=31536000, immutable" "$TARBALL" s3://ziglang.org/builds/
|
||||
|
||||
# Publish manifest.
|
||||
s3cmd put -P --add-header="cache-control: max-age=0, must-revalidate" "$MANIFEST" "s3://ziglang.org/builds/$ARCH-linux-$VERSION.json"
|
||||
|
||||
# Avoid leaking oauth token.
|
||||
set +x
|
||||
|
||||
cd $WORKSPACE
|
||||
./ci/srht/on_master_success "$VERSION" "$SRHT_OAUTH_TOKEN"
|
||||
|
||||
set -x
|
||||
s3cmd put -P --add-header="cache-control: max-age=0, must-revalidate" "$MANIFEST" "s3://ziglang.org/builds/$ARCH-$OS-$VERSION.json"
|
||||
|
||||
# Explicit exit helps show last command duration.
|
||||
exit
|
||||
@@ -1,93 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./ci/zinc/linux_base.sh
|
||||
|
||||
OLD_ZIG="$DEPS_LOCAL/bin/zig"
|
||||
TARGET="${ARCH}-linux-musl"
|
||||
MCPU="baseline"
|
||||
|
||||
# Make the `zig version` number consistent.
|
||||
# This will affect the cmake command below.
|
||||
git config core.abbrev 9
|
||||
|
||||
echo "building debug zig with zig version $($OLD_ZIG version)"
|
||||
|
||||
export CC="$OLD_ZIG cc -target $TARGET -mcpu=$MCPU"
|
||||
export CXX="$OLD_ZIG c++ -target $TARGET -mcpu=$MCPU"
|
||||
|
||||
mkdir _debug
|
||||
cd _debug
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$DEBUG_STAGING" \
|
||||
-DCMAKE_PREFIX_PATH="$DEPS_LOCAL" \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DZIG_TARGET_TRIPLE="$TARGET" \
|
||||
-DZIG_TARGET_MCPU="$MCPU" \
|
||||
-DZIG_STATIC=ON \
|
||||
-GNinja
|
||||
|
||||
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
|
||||
# so that installation and testing do not get affected by them.
|
||||
unset CC
|
||||
unset CXX
|
||||
|
||||
ninja install
|
||||
|
||||
STAGE1_ZIG="$DEBUG_STAGING/bin/zig"
|
||||
|
||||
# Here we rebuild zig but this time using the Zig binary we just now produced to
|
||||
# build zig1.o rather than relying on the one built with stage0. See
|
||||
# https://github.com/ziglang/zig/issues/6830 for more details.
|
||||
cmake .. -DZIG_EXECUTABLE="$STAGE1_ZIG"
|
||||
ninja install
|
||||
|
||||
cd $WORKSPACE
|
||||
|
||||
echo "Looking for non-conforming code formatting..."
|
||||
echo "Formatting errors can be fixed by running 'zig fmt' on the files printed here."
|
||||
$STAGE1_ZIG fmt --check . --exclude test/cases/
|
||||
|
||||
$STAGE1_ZIG build -p stage2 -Dstatic-llvm -Dtarget=native-native-musl --search-prefix "$DEPS_LOCAL"
|
||||
stage2/bin/zig build -p stage3 -Dstatic-llvm -Dtarget=native-native-musl --search-prefix "$DEPS_LOCAL"
|
||||
stage3/bin/zig build # test building self-hosted without LLVM
|
||||
stage3/bin/zig build -Dtarget=arm-linux-musleabihf # test building self-hosted for 32-bit arm
|
||||
|
||||
stage3/bin/zig build test-compiler-rt -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-behavior -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-std -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-universal-libc -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-compare-output -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-asm-link -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-fmt -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-translate-c -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-run-translated-c -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-standalone -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-cli -fqemu -fwasmtime -Denable-llvm
|
||||
stage3/bin/zig build test-cases -fqemu -fwasmtime -Dstatic-llvm -Dtarget=native-native-musl --search-prefix "$DEPS_LOCAL"
|
||||
stage3/bin/zig build test-link -fqemu -fwasmtime -Denable-llvm
|
||||
|
||||
$STAGE1_ZIG build test-stack-traces -fqemu -fwasmtime
|
||||
$STAGE1_ZIG build docs -fqemu -fwasmtime
|
||||
|
||||
# Produce the experimental std lib documentation.
|
||||
mkdir -p "$RELEASE_STAGING/docs/std"
|
||||
stage3/bin/zig test lib/std/std.zig \
|
||||
--zig-lib-dir lib \
|
||||
-femit-docs=$RELEASE_STAGING/docs/std \
|
||||
-fno-emit-bin
|
||||
|
||||
# Look for HTML errors.
|
||||
tidy --drop-empty-elements no -qe zig-cache/langref.html
|
||||
|
||||
# Build release zig.
|
||||
stage3/bin/zig build \
|
||||
--prefix "$RELEASE_STAGING" \
|
||||
--search-prefix "$DEPS_LOCAL" \
|
||||
-Dstatic-llvm \
|
||||
-Drelease \
|
||||
-Dstrip \
|
||||
-Dtarget="$TARGET" \
|
||||
-Dstage1
|
||||
|
||||
# Explicit exit helps show last command duration.
|
||||
exit
|
||||
Executable
+61
@@ -0,0 +1,61 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
ARCH="$(uname -m)"
|
||||
DEPS_LOCAL="/deps/local"
|
||||
OLD_ZIG="$DEPS_LOCAL/bin/zig"
|
||||
TARGET="${ARCH}-linux-musl"
|
||||
MCPU="baseline"
|
||||
|
||||
export PATH=$DEPS_LOCAL/bin:$PATH
|
||||
|
||||
echo "building stage3-debug with zig version $($OLD_ZIG version)"
|
||||
|
||||
# Override the cache directories so that we don't clobber with the release
|
||||
# testing script which is running concurrently and in the same directory.
|
||||
# Normally we want processes to cooperate, but in this case we want them isolated.
|
||||
export ZIG_LOCAL_CACHE_DIR="$(pwd)/zig-cache-local-debug"
|
||||
export ZIG_GLOBAL_CACHE_DIR="$(pwd)/zig-cache-global-debug"
|
||||
|
||||
export CC="$OLD_ZIG cc -target $TARGET -mcpu=$MCPU"
|
||||
export CXX="$OLD_ZIG c++ -target $TARGET -mcpu=$MCPU"
|
||||
|
||||
mkdir build-debug
|
||||
cd build-debug
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$(pwd)/stage3" \
|
||||
-DCMAKE_PREFIX_PATH="$DEPS_LOCAL" \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DZIG_STATIC=ON \
|
||||
-DZIG_USE_LLVM_CONFIG=OFF \
|
||||
-GNinja
|
||||
|
||||
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
|
||||
# so that installation and testing do not get affected by them.
|
||||
unset CC
|
||||
unset CXX
|
||||
|
||||
ninja install
|
||||
|
||||
echo "Looking for non-conforming code formatting..."
|
||||
stage3/bin/zig fmt --check .. \
|
||||
--exclude ../test/cases/ \
|
||||
--exclude ../build-debug \
|
||||
--exclude ../build-release \
|
||||
--exclude "$ZIG_LOCAL_CACHE_DIR" \
|
||||
--exclude "$ZIG_GLOBAL_CACHE_DIR"
|
||||
|
||||
# simultaneously test building self-hosted without LLVM and with 32-bit arm
|
||||
stage3/bin/zig build -Dtarget=arm-linux-musleabihf
|
||||
|
||||
stage3/bin/zig build test \
|
||||
-fqemu \
|
||||
-fwasmtime \
|
||||
-Dstatic-llvm \
|
||||
-Dtarget=native-native-musl \
|
||||
--search-prefix "$DEPS_LOCAL"
|
||||
|
||||
# Explicit exit helps show last command duration.
|
||||
exit
|
||||
Executable
+58
@@ -0,0 +1,58 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
ARCH="$(uname -m)"
|
||||
DEPS_LOCAL="/deps/local"
|
||||
RELEASE_STAGING="$DRONE_WORKSPACE/_release/staging"
|
||||
OLD_ZIG="$DEPS_LOCAL/bin/zig"
|
||||
TARGET="${ARCH}-linux-musl"
|
||||
MCPU="baseline"
|
||||
|
||||
export PATH=$DEPS_LOCAL/bin:$PATH
|
||||
|
||||
echo "building stage3-release with zig version $($OLD_ZIG version)"
|
||||
|
||||
export CC="$OLD_ZIG cc -target $TARGET -mcpu=$MCPU"
|
||||
export CXX="$OLD_ZIG c++ -target $TARGET -mcpu=$MCPU"
|
||||
|
||||
mkdir build-release
|
||||
cd build-release
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX="$RELEASE_STAGING" \
|
||||
-DCMAKE_PREFIX_PATH="$DEPS_LOCAL" \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DZIG_TARGET_TRIPLE="$TARGET" \
|
||||
-DZIG_TARGET_MCPU="$MCPU" \
|
||||
-DZIG_STATIC=ON \
|
||||
-GNinja
|
||||
|
||||
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
|
||||
# so that installation and testing do not get affected by them.
|
||||
unset CC
|
||||
unset CXX
|
||||
|
||||
ninja install
|
||||
|
||||
"$RELEASE_STAGING/bin/zig" build test docs \
|
||||
-fqemu \
|
||||
-fwasmtime \
|
||||
-Dstatic-llvm \
|
||||
-Dtarget=native-native-musl \
|
||||
--search-prefix "$DEPS_LOCAL"
|
||||
|
||||
# Produce the experimental std lib documentation.
|
||||
mkdir -p "$RELEASE_STAGING/docs/std"
|
||||
"$RELEASE_STAGING/bin/zig" test ../lib/std/std.zig \
|
||||
-femit-docs=$RELEASE_STAGING/docs/std \
|
||||
-fno-emit-bin
|
||||
|
||||
cp ../LICENSE $RELEASE_STAGING/
|
||||
cp ../zig-cache/langref.html $RELEASE_STAGING/docs/
|
||||
|
||||
# Look for HTML errors.
|
||||
tidy --drop-empty-elements no -qe $RELEASE_STAGING/docs/langref.html
|
||||
|
||||
# Explicit exit helps show last command duration.
|
||||
exit
|
||||
Executable
+49
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
ARCH="aarch64"
|
||||
OS=macos
|
||||
ZIG_PREFIX="$DRONE_WORKSPACE/_release/staging"
|
||||
VERSION=$($ZIG_PREFIX/bin/zig version)
|
||||
TARGET="$ARCH-$OS-none"
|
||||
INSTALL_PREFIX="$DRONE_WORKSPACE/$TARGET"
|
||||
BASENAME="zig-$OS-$ARCH-$VERSION"
|
||||
TARBALL="$BASENAME.tar.xz"
|
||||
|
||||
# This runs concurrently with the linux_package script, so it should not make
|
||||
# any changes to the filesystem that will cause problems for the other script.
|
||||
|
||||
# Remove the unnecessary bin dir in $prefix/bin/zig
|
||||
mv $INSTALL_PREFIX/bin/zig $INSTALL_PREFIX/
|
||||
rmdir $INSTALL_PREFIX/bin
|
||||
|
||||
# Remove the unnecessary zig dir in $prefix/lib/zig/std/std.zig
|
||||
mv $INSTALL_PREFIX/lib/zig $INSTALL_PREFIX/lib2
|
||||
rmdir $INSTALL_PREFIX/lib
|
||||
mv $INSTALL_PREFIX/lib2 $INSTALL_PREFIX/lib
|
||||
|
||||
cp -r "$ZIG_PREFIX/docs" "$INSTALL_PREFIX/"
|
||||
cp "$ZIG_PREFIX/LICENSE" "$INSTALL_PREFIX/"
|
||||
|
||||
mv "$INSTALL_PREFIX" "$BASENAME"
|
||||
tar cfJ "$TARBALL" "$BASENAME"
|
||||
|
||||
SHASUM=$(sha256sum $TARBALL | cut '-d ' -f1)
|
||||
BYTESIZE=$(wc -c < $TARBALL)
|
||||
|
||||
MANIFEST="manifest.json"
|
||||
touch $MANIFEST
|
||||
echo "{\"tarball\": \"$TARBALL\"," >>$MANIFEST
|
||||
echo "\"shasum\": \"$SHASUM\"," >>$MANIFEST
|
||||
echo "\"size\": \"$BYTESIZE\"}" >>$MANIFEST
|
||||
|
||||
# Publish artifact.
|
||||
s3cmd put -P --add-header="cache-control: public, max-age=31536000, immutable" "$TARBALL" s3://ziglang.org/builds/
|
||||
|
||||
# Publish manifest.
|
||||
s3cmd put -P --add-header="cache-control: max-age=0, must-revalidate" "$MANIFEST" "s3://ziglang.org/builds/$ARCH-$OS-$VERSION.json"
|
||||
|
||||
# Explicit exit helps show last command duration.
|
||||
exit
|
||||
Executable
+9
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
set +x # Avoid leaking oauth token.
|
||||
set -e
|
||||
|
||||
ZIG_PREFIX="$DRONE_WORKSPACE/_release/staging"
|
||||
VERSION=$($ZIG_PREFIX/bin/zig version)
|
||||
cd $DRONE_WORKSPACE
|
||||
./ci/srht/on_master_success "$VERSION" "$SRHT_OAUTH_TOKEN"
|
||||
@@ -1,37 +0,0 @@
|
||||
message("-- Installing: ${CMAKE_INSTALL_PREFIX}/lib")
|
||||
|
||||
if(NOT EXISTS ${zig_EXE})
|
||||
message("::")
|
||||
message(":: ERROR: Executable not found")
|
||||
message(":: (execute_process)")
|
||||
message("::")
|
||||
message(":: executable: ${zig_EXE}")
|
||||
message("::")
|
||||
message(FATAL_ERROR)
|
||||
endif()
|
||||
|
||||
execute_process(COMMAND ${zig_EXE} ${ZIG_INSTALL_ARGS}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
RESULT_VARIABLE _result
|
||||
)
|
||||
if(_result)
|
||||
message("::")
|
||||
message(":: ERROR: ${_result}")
|
||||
message(":: (execute_process)")
|
||||
|
||||
string(REPLACE ";" " " s_INSTALL_LIBSTAGE2_ARGS "${ZIG_INSTALL_ARGS}")
|
||||
message("::")
|
||||
message(":: argv: ${zig_EXE} ${s_INSTALL_LIBSTAGE2_ARGS}")
|
||||
|
||||
set(_args ${zig_EXE} ${ZIG_INSTALL_ARGS})
|
||||
list(LENGTH _args _len)
|
||||
math(EXPR _len "${_len} - 1")
|
||||
message("::")
|
||||
foreach(_i RANGE 0 ${_len})
|
||||
list(GET _args ${_i} _arg)
|
||||
message(":: argv[${_i}]: ${_arg}")
|
||||
endforeach()
|
||||
|
||||
message("::")
|
||||
message(FATAL_ERROR)
|
||||
endif()
|
||||
@@ -285,6 +285,7 @@ const Code = struct {
|
||||
link_objects: []const []const u8,
|
||||
target_str: ?[]const u8,
|
||||
link_libc: bool,
|
||||
backend_stage1: bool,
|
||||
link_mode: ?std.builtin.LinkMode,
|
||||
disable_cache: bool,
|
||||
verbose_cimport: bool,
|
||||
@@ -554,6 +555,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
var link_mode: ?std.builtin.LinkMode = null;
|
||||
var disable_cache = false;
|
||||
var verbose_cimport = false;
|
||||
var backend_stage1 = false;
|
||||
|
||||
const source_token = while (true) {
|
||||
const content_tok = try eatToken(tokenizer, Token.Id.Content);
|
||||
@@ -586,6 +588,8 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
link_libc = true;
|
||||
} else if (mem.eql(u8, end_tag_name, "link_mode_dynamic")) {
|
||||
link_mode = .Dynamic;
|
||||
} else if (mem.eql(u8, end_tag_name, "backend_stage1")) {
|
||||
backend_stage1 = true;
|
||||
} else if (mem.eql(u8, end_tag_name, "code_end")) {
|
||||
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
||||
break content_tok;
|
||||
@@ -609,6 +613,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
.link_objects = link_objects.toOwnedSlice(),
|
||||
.target_str = target_str,
|
||||
.link_libc = link_libc,
|
||||
.backend_stage1 = backend_stage1,
|
||||
.link_mode = link_mode,
|
||||
.disable_cache = disable_cache,
|
||||
.verbose_cimport = verbose_cimport,
|
||||
@@ -1187,6 +1192,9 @@ fn printShell(out: anytype, shell_content: []const u8) !void {
|
||||
try out.writeAll("</samp></pre></figure>");
|
||||
}
|
||||
|
||||
// Override this to skip to later tests
|
||||
const debug_start_line = 0;
|
||||
|
||||
fn genHtml(
|
||||
allocator: Allocator,
|
||||
tokenizer: *Tokenizer,
|
||||
@@ -1266,6 +1274,13 @@ fn genHtml(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (debug_start_line > 0) {
|
||||
const loc = tokenizer.getTokenLocation(code.source_token);
|
||||
if (debug_start_line > loc.line) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const raw_source = tokenizer.buffer[code.source_token.start..code.source_token.end];
|
||||
const trimmed_raw_source = mem.trim(u8, raw_source, " \n");
|
||||
const tmp_source_file_name = try fs.path.join(
|
||||
@@ -1311,6 +1326,10 @@ fn genHtml(
|
||||
try build_args.append("-lc");
|
||||
try shell_out.print("-lc ", .{});
|
||||
}
|
||||
if (code.backend_stage1) {
|
||||
try build_args.append("-fstage1");
|
||||
try shell_out.print("-fstage1", .{});
|
||||
}
|
||||
const target = try std.zig.CrossTarget.parse(.{
|
||||
.arch_os_abi = code.target_str orelse "native",
|
||||
});
|
||||
@@ -1443,6 +1462,10 @@ fn genHtml(
|
||||
try test_args.append("-lc");
|
||||
try shell_out.print("-lc ", .{});
|
||||
}
|
||||
if (code.backend_stage1) {
|
||||
try test_args.append("-fstage1");
|
||||
try shell_out.print("-fstage1", .{});
|
||||
}
|
||||
if (code.target_str) |triple| {
|
||||
try test_args.appendSlice(&[_][]const u8{ "-target", triple });
|
||||
try shell_out.print("-target {s} ", .{triple});
|
||||
@@ -1490,6 +1513,14 @@ fn genHtml(
|
||||
try shell_out.print("-O {s} ", .{@tagName(code.mode)});
|
||||
},
|
||||
}
|
||||
if (code.link_libc) {
|
||||
try test_args.append("-lc");
|
||||
try shell_out.print("-lc ", .{});
|
||||
}
|
||||
if (code.backend_stage1) {
|
||||
try test_args.append("-fstage1");
|
||||
try shell_out.print("-fstage1", .{});
|
||||
}
|
||||
const result = try ChildProcess.exec(.{
|
||||
.allocator = allocator,
|
||||
.argv = test_args.items,
|
||||
|
||||
+87
-126
@@ -535,8 +535,8 @@ const Timestamp = struct {
|
||||
{#header_close#}
|
||||
{#header_open|Top-Level Doc Comments#}
|
||||
<p>User documentation that doesn't belong to whatever
|
||||
immediately follows it, like container level documentation, goes
|
||||
in top level doc comments. A top level doc comment is one that
|
||||
immediately follows it, like container-level documentation, goes
|
||||
in top-level doc comments. A top-level doc comment is one that
|
||||
begins with two slashes and an exclamation point:
|
||||
{#syntax#}//!{#endsyntax#}.</p>
|
||||
{#code_begin|syntax|tldoc_comments#}
|
||||
@@ -1188,6 +1188,7 @@ test "this will be skipped" {
|
||||
(The evented IO mode is enabled using the <kbd>--test-evented-io</kbd> command line parameter.)
|
||||
</p>
|
||||
{#code_begin|test|async_skip#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
|
||||
test "async skip test" {
|
||||
@@ -1520,7 +1521,8 @@ fn divide(a: i32, b: i32) i32 {
|
||||
Zig supports arbitrary bit-width integers, referenced by using
|
||||
an identifier of <code>i</code> or <code>u</code> followed by digits. For example, the identifier
|
||||
{#syntax#}i7{#endsyntax#} refers to a signed 7-bit integer. The maximum allowed bit-width of an
|
||||
integer type is {#syntax#}65535{#endsyntax#}.
|
||||
integer type is {#syntax#}65535{#endsyntax#}. For signed integer types, Zig uses a
|
||||
<a href="https://en.wikipedia.org/wiki/Two's_complement">two's complement</a> representation.
|
||||
</p>
|
||||
{#see_also|Wrapping Operations#}
|
||||
{#header_close#}
|
||||
@@ -2768,7 +2770,7 @@ test "comptime @intToPtr" {
|
||||
}
|
||||
}
|
||||
{#code_end#}
|
||||
{#see_also|Optional Pointers|@intToPtr|@ptrToInt|C Pointers|Pointers to Zero Bit Types#}
|
||||
{#see_also|Optional Pointers|@intToPtr|@ptrToInt|C Pointers#}
|
||||
{#header_open|volatile#}
|
||||
<p>Loads and stores are assumed to not have side effects. If a given load or store
|
||||
should have side effects, such as Memory Mapped Input/Output (MMIO), use {#syntax#}volatile{#endsyntax#}.
|
||||
@@ -2862,19 +2864,22 @@ var foo: u8 align(4) = 100;
|
||||
test "global variable alignment" {
|
||||
try expect(@typeInfo(@TypeOf(&foo)).Pointer.alignment == 4);
|
||||
try expect(@TypeOf(&foo) == *align(4) u8);
|
||||
const as_pointer_to_array: *[1]u8 = &foo;
|
||||
const as_slice: []u8 = as_pointer_to_array;
|
||||
try expect(@TypeOf(as_slice) == []align(4) u8);
|
||||
const as_pointer_to_array: *align(4) [1]u8 = &foo;
|
||||
const as_slice: []align(4) u8 = as_pointer_to_array;
|
||||
const as_unaligned_slice: []u8 = as_slice;
|
||||
try expect(as_unaligned_slice[0] == 100);
|
||||
}
|
||||
|
||||
fn derp() align(@sizeOf(usize) * 2) i32 { return 1234; }
|
||||
fn derp() align(@sizeOf(usize) * 2) i32 {
|
||||
return 1234;
|
||||
}
|
||||
fn noop1() align(1) void {}
|
||||
fn noop4() align(4) void {}
|
||||
|
||||
test "function alignment" {
|
||||
try expect(derp() == 1234);
|
||||
try expect(@TypeOf(noop1) == fn() align(1) void);
|
||||
try expect(@TypeOf(noop4) == fn() align(4) void);
|
||||
try expect(@TypeOf(noop1) == fn () align(1) void);
|
||||
try expect(@TypeOf(noop4) == fn () align(4) void);
|
||||
noop1();
|
||||
noop4();
|
||||
}
|
||||
@@ -3336,6 +3341,7 @@ fn doTheTest() !void {
|
||||
Zig allows the address to be taken of a non-byte-aligned field:
|
||||
</p>
|
||||
{#code_begin|test|pointer_to_non-byte_aligned_field#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -3391,7 +3397,8 @@ fn bar(x: *const u3) u3 {
|
||||
<p>
|
||||
Pointers to non-ABI-aligned fields share the same address as the other fields within their host integer:
|
||||
</p>
|
||||
{#code_begin|test|pointer_to_non-bit_aligned_field#}
|
||||
{#code_begin|test|packed_struct_field_addrs#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -3407,7 +3414,7 @@ var bit_field = BitField{
|
||||
.c = 3,
|
||||
};
|
||||
|
||||
test "pointer to non-bit-aligned field" {
|
||||
test "pointers of sub-byte-aligned fields share addresses" {
|
||||
try expect(@ptrToInt(&bit_field.a) == @ptrToInt(&bit_field.b));
|
||||
try expect(@ptrToInt(&bit_field.a) == @ptrToInt(&bit_field.c));
|
||||
}
|
||||
@@ -3438,20 +3445,22 @@ test "pointer to non-bit-aligned field" {
|
||||
}
|
||||
{#code_end#}
|
||||
<p>
|
||||
Packed structs have 1-byte alignment. However if you have an overaligned pointer to a packed struct,
|
||||
Zig should correctly understand the alignment of fields. However there is
|
||||
<a href="https://github.com/ziglang/zig/issues/1994">a bug</a>:
|
||||
Packed structs have the same alignment as their backing integer, however, overaligned
|
||||
pointers to packed structs can override this:
|
||||
</p>
|
||||
{#code_begin|test_err|expected type '*u32', found '*align(1) u32'#}
|
||||
{#code_begin|test|overaligned_packed_struct#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
const S = packed struct {
|
||||
a: u32,
|
||||
b: u32,
|
||||
};
|
||||
test "overaligned pointer to packed struct" {
|
||||
var foo: S align(4) = undefined;
|
||||
var foo: S align(4) = .{ .a = 1, .b = 2 };
|
||||
const ptr: *align(4) S = &foo;
|
||||
const ptr_to_b: *u32 = &ptr.b;
|
||||
_ = ptr_to_b;
|
||||
try expect(ptr_to_b.* == 2);
|
||||
}
|
||||
{#code_end#}
|
||||
<p>When this bug is fixed, the above test in the documentation will unexpectedly pass, which will
|
||||
@@ -3698,7 +3707,7 @@ test "@tagName" {
|
||||
<p>
|
||||
By default, enums are not guaranteed to be compatible with the C ABI:
|
||||
</p>
|
||||
{#code_begin|obj_err|parameter of type 'Foo' not allowed in function with calling convention 'C'#}
|
||||
{#code_begin|obj_err|parameter of type 'test.Foo' not allowed in function with calling convention 'C'#}
|
||||
const Foo = enum { a, b, c };
|
||||
export fn entry(foo: Foo) void { _ = foo; }
|
||||
{#code_end#}
|
||||
@@ -4004,7 +4013,7 @@ fn makeNumber() Number {
|
||||
This is typically used for type safety when interacting with C code that does not expose struct details.
|
||||
Example:
|
||||
</p>
|
||||
{#code_begin|test_err|expected type '*Derp', found '*Wat'#}
|
||||
{#code_begin|test_err|expected type '*test.Derp', found '*test.Wat'#}
|
||||
const Derp = opaque {};
|
||||
const Wat = opaque {};
|
||||
|
||||
@@ -4203,7 +4212,7 @@ test "switch on tagged union" {
|
||||
When a {#syntax#}switch{#endsyntax#} expression does not have an {#syntax#}else{#endsyntax#} clause,
|
||||
it must exhaustively list all the possible values. Failure to do so is a compile error:
|
||||
</p>
|
||||
{#code_begin|test_err|not handled in switch#}
|
||||
{#code_begin|test_err|unhandled enumeration value#}
|
||||
const Color = enum {
|
||||
auto,
|
||||
off,
|
||||
@@ -5015,8 +5024,8 @@ fn shiftLeftOne(a: u32) callconv(.Inline) u32 {
|
||||
// Another file can use @import and call sub2
|
||||
pub fn sub2(a: i8, b: i8) i8 { return a - b; }
|
||||
|
||||
// Functions can be used as values and are equivalent to pointers.
|
||||
const call2_op = fn (a: i8, b: i8) i8;
|
||||
// Function pointers are prefixed with `*const `.
|
||||
const call2_op = *const fn (a: i8, b: i8) i8;
|
||||
fn do_op(fn_call: call2_op, op1: i8, op2: i8) i8 {
|
||||
return fn_call(op1, op2);
|
||||
}
|
||||
@@ -5026,17 +5035,9 @@ test "function" {
|
||||
try expect(do_op(sub2, 5, 6) == -1);
|
||||
}
|
||||
{#code_end#}
|
||||
<p>Function values are like pointers:</p>
|
||||
{#code_begin|obj#}
|
||||
const assert = @import("std").debug.assert;
|
||||
|
||||
comptime {
|
||||
assert(@TypeOf(foo) == fn()void);
|
||||
assert(@sizeOf(fn()void) == @sizeOf(?fn()void));
|
||||
}
|
||||
|
||||
fn foo() void { }
|
||||
{#code_end#}
|
||||
<p>There is a difference between a function <em>body</em> and a function <em>pointer</em>.
|
||||
Function bodies are {#link|comptime#}-only types while function {#link|Pointers#} may be
|
||||
runtime-known.</p>
|
||||
{#header_open|Pass-by-value Parameters#}
|
||||
<p>
|
||||
Primitive types such as {#link|Integers#} and {#link|Floats#} passed as parameters
|
||||
@@ -6123,10 +6124,11 @@ test "float widening" {
|
||||
two choices about the coercion.
|
||||
</p>
|
||||
<ul>
|
||||
<li> Cast {#syntax#}54.0{#endsyntax#} to {#syntax#}comptime_int{#endsyntax#} resulting in {#syntax#}@as(comptime_int, 10){#endsyntax#}, which is casted to {#syntax#}@as(f32, 10){#endsyntax#}</li>
|
||||
<li> Cast {#syntax#}5{#endsyntax#} to {#syntax#}comptime_float{#endsyntax#} resulting in {#syntax#}@as(comptime_float, 10.8){#endsyntax#}, which is casted to {#syntax#}@as(f32, 10.8){#endsyntax#}</li>
|
||||
<li>Cast {#syntax#}54.0{#endsyntax#} to {#syntax#}comptime_int{#endsyntax#} resulting in {#syntax#}@as(comptime_int, 10){#endsyntax#}, which is casted to {#syntax#}@as(f32, 10){#endsyntax#}</li>
|
||||
<li>Cast {#syntax#}5{#endsyntax#} to {#syntax#}comptime_float{#endsyntax#} resulting in {#syntax#}@as(comptime_float, 10.8){#endsyntax#}, which is casted to {#syntax#}@as(f32, 10.8){#endsyntax#}</li>
|
||||
</ul>
|
||||
{#code_begin|test_err#}
|
||||
{#backend_stage1#}
|
||||
// Compile time coercion of float to int
|
||||
test "implicit cast to comptime_int" {
|
||||
var f: f32 = 54.0 / 5;
|
||||
@@ -6302,19 +6304,6 @@ test "coercion between unions and enums" {
|
||||
{#code_end#}
|
||||
{#see_also|union|enum#}
|
||||
{#header_close#}
|
||||
{#header_open|Type Coercion: Zero Bit Types#}
|
||||
<p>{#link|Zero Bit Types#} may be coerced to single-item {#link|Pointers#},
|
||||
regardless of const.</p>
|
||||
<p>TODO document the reasoning for this</p>
|
||||
<p>TODO document whether vice versa should work and why</p>
|
||||
{#code_begin|test|coerce_zero_bit_types#}
|
||||
test "coercion of zero bit types" {
|
||||
var x: void = {};
|
||||
var y: *void = x;
|
||||
_ = y;
|
||||
}
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_open|Type Coercion: undefined#}
|
||||
<p>{#link|undefined#} can be cast to any type.</p>
|
||||
{#header_close#}
|
||||
@@ -6467,7 +6456,6 @@ test "peer type resolution: *const T and ?*T" {
|
||||
<li>An {#link|enum#} with only 1 tag.</li>
|
||||
<li>A {#link|struct#} with all fields being zero bit types.</li>
|
||||
<li>A {#link|union#} with only 1 field which is a zero bit type.</li>
|
||||
<li>{#link|Pointers to Zero Bit Types#} are themselves zero bit types.</li>
|
||||
</ul>
|
||||
<p>
|
||||
These types can only ever have one possible value, and thus
|
||||
@@ -6527,7 +6515,7 @@ test "turn HashMap into a set with void" {
|
||||
<p>
|
||||
Expressions of type {#syntax#}void{#endsyntax#} are the only ones whose value can be ignored. For example:
|
||||
</p>
|
||||
{#code_begin|test_err|expression value is ignored#}
|
||||
{#code_begin|test_err|ignored#}
|
||||
test "ignoring expression value" {
|
||||
foo();
|
||||
}
|
||||
@@ -6553,37 +6541,6 @@ fn foo() i32 {
|
||||
}
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|Pointers to Zero Bit Types#}
|
||||
<p>Pointers to zero bit types also have zero bits. They always compare equal to each other:</p>
|
||||
{#code_begin|test|pointers_to_zero_bits#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
test "pointer to empty struct" {
|
||||
const Empty = struct {};
|
||||
var a = Empty{};
|
||||
var b = Empty{};
|
||||
var ptr_a = &a;
|
||||
var ptr_b = &b;
|
||||
comptime try expect(ptr_a == ptr_b);
|
||||
}
|
||||
{#code_end#}
|
||||
<p>The type being pointed to can only ever be one value; therefore loads and stores are
|
||||
never generated. {#link|ptrToInt#} and {#link|intToPtr#} are not allowed:</p>
|
||||
{#code_begin|test_err#}
|
||||
const Empty = struct {};
|
||||
|
||||
test "@ptrToInt for pointer to zero bit type" {
|
||||
var a = Empty{};
|
||||
_ = @ptrToInt(&a);
|
||||
}
|
||||
|
||||
test "@intToPtr for pointer to zero bit type" {
|
||||
_ = @intToPtr(*Empty, 0x1);
|
||||
}
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|Result Location Semantics#}
|
||||
@@ -6666,7 +6623,7 @@ fn gimmeTheBiggerInteger(a: u64, b: u64) u64 {
|
||||
<p>
|
||||
For example, if we were to introduce another function to the above snippet:
|
||||
</p>
|
||||
{#code_begin|test_err|values of type 'type' must be comptime known#}
|
||||
{#code_begin|test_err|unable to resolve comptime value#}
|
||||
fn max(comptime T: type, a: T, b: T) T {
|
||||
return if (a > b) a else b;
|
||||
}
|
||||
@@ -6692,7 +6649,7 @@ fn foo(condition: bool) void {
|
||||
<p>
|
||||
For example:
|
||||
</p>
|
||||
{#code_begin|test_err|operator not allowed for type 'bool'#}
|
||||
{#code_begin|test_err|operator > not allowed for type 'bool'#}
|
||||
fn max(comptime T: type, a: T, b: T) T {
|
||||
return if (a > b) a else b;
|
||||
}
|
||||
@@ -6837,7 +6794,7 @@ fn performFn(start_value: i32) i32 {
|
||||
use a {#syntax#}comptime{#endsyntax#} expression to guarantee that the expression will be evaluated at compile-time.
|
||||
If this cannot be accomplished, the compiler will emit an error. For example:
|
||||
</p>
|
||||
{#code_begin|test_err|unable to evaluate constant expression#}
|
||||
{#code_begin|test_err|comptime call of extern function#}
|
||||
extern fn exit() noreturn;
|
||||
|
||||
test "foo" {
|
||||
@@ -6889,7 +6846,7 @@ test "fibonacci" {
|
||||
<p>
|
||||
Imagine if we had forgotten the base case of the recursive function and tried to run the tests:
|
||||
</p>
|
||||
{#code_begin|test_err|operation caused overflow#}
|
||||
{#code_begin|test_err|overflow of integer type#}
|
||||
const expect = @import("std").testing.expect;
|
||||
|
||||
fn fibonacci(index: u32) u32 {
|
||||
@@ -6913,7 +6870,8 @@ test "fibonacci" {
|
||||
But what would have happened if we used a signed integer?
|
||||
</p>
|
||||
{#code_begin|test_err|evaluation exceeded 1000 backwards branches#}
|
||||
const expect = @import("std").testing.expect;
|
||||
{#backend_stage1#}
|
||||
const assert = @import("std").debug.assert;
|
||||
|
||||
fn fibonacci(index: i32) i32 {
|
||||
//if (index < 2) return index;
|
||||
@@ -6922,7 +6880,7 @@ fn fibonacci(index: i32) i32 {
|
||||
|
||||
test "fibonacci" {
|
||||
comptime {
|
||||
try expect(fibonacci(7) == 13);
|
||||
try assert(fibonacci(7) == 13);
|
||||
}
|
||||
}
|
||||
{#code_end#}
|
||||
@@ -6935,8 +6893,8 @@ test "fibonacci" {
|
||||
<p>
|
||||
What if we fix the base case, but put the wrong value in the {#syntax#}expect{#endsyntax#} line?
|
||||
</p>
|
||||
{#code_begin|test_err|test "fibonacci"... FAIL (TestUnexpectedResult)#}
|
||||
const expect = @import("std").testing.expect;
|
||||
{#code_begin|test_err|reached unreachable#}
|
||||
const assert = @import("std").debug.assert;
|
||||
|
||||
fn fibonacci(index: i32) i32 {
|
||||
if (index < 2) return index;
|
||||
@@ -6945,16 +6903,10 @@ fn fibonacci(index: i32) i32 {
|
||||
|
||||
test "fibonacci" {
|
||||
comptime {
|
||||
try expect(fibonacci(7) == 99999);
|
||||
try assert(fibonacci(7) == 99999);
|
||||
}
|
||||
}
|
||||
{#code_end#}
|
||||
<p>
|
||||
What happened is Zig started interpreting the {#syntax#}expect{#endsyntax#} function with the
|
||||
parameter {#syntax#}ok{#endsyntax#} set to {#syntax#}false{#endsyntax#}. When the interpreter hit
|
||||
{#syntax#}@panic{#endsyntax#} it emitted a compile error because a panic during compile
|
||||
causes a compile error if it is detected at compile-time.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
At container level (outside of any function), all expressions are implicitly
|
||||
@@ -7280,6 +7232,7 @@ pub fn main() void {
|
||||
</p>
|
||||
{#code_begin|exe#}
|
||||
{#target_linux_x86_64#}
|
||||
{#backend_stage1#}
|
||||
pub fn main() noreturn {
|
||||
const msg = "hello world\n";
|
||||
_ = syscall3(SYS_write, STDOUT_FILENO, @ptrToInt(msg), msg.len);
|
||||
@@ -7497,6 +7450,7 @@ test "global assembly" {
|
||||
or resumer (in the case of subsequent suspensions).
|
||||
</p>
|
||||
{#code_begin|test|suspend_no_resume#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -7524,6 +7478,7 @@ fn func() void {
|
||||
{#link|@frame#} provides access to the async function frame pointer.
|
||||
</p>
|
||||
{#code_begin|test|async_suspend_block#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -7562,6 +7517,7 @@ fn testSuspendBlock() void {
|
||||
never returns to its resumer and continues executing.
|
||||
</p>
|
||||
{#code_begin|test|resume_from_suspend#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -7598,6 +7554,7 @@ fn testResumeFromSuspend(my_result: *i32) void {
|
||||
and the return value of the async function would be lost.
|
||||
</p>
|
||||
{#code_begin|test|async_await#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -7642,6 +7599,7 @@ fn func() void {
|
||||
return value directly from the target function's frame.
|
||||
</p>
|
||||
{#code_begin|test|async_await_sequence#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -7695,6 +7653,7 @@ fn seq(c: u8) void {
|
||||
{#syntax#}async{#endsyntax#}/{#syntax#}await{#endsyntax#} usage:
|
||||
</p>
|
||||
{#code_begin|exe|async#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -7773,6 +7732,7 @@ fn readFile(allocator: Allocator, filename: []const u8) ![]u8 {
|
||||
observe the same behavior, with one tiny difference:
|
||||
</p>
|
||||
{#code_begin|exe|blocking#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@@ -7910,6 +7870,7 @@ comptime {
|
||||
{#syntax#}await{#endsyntax#} will copy the result from {#syntax#}result_ptr{#endsyntax#}.
|
||||
</p>
|
||||
{#code_begin|test|async_struct_field_fn_pointer#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
@@ -8071,8 +8032,8 @@ fn func(y: *i32) void {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@byteSwap#}
|
||||
<pre>{#syntax#}@byteSwap(comptime T: type, operand: T) T{#endsyntax#}</pre>
|
||||
<p>{#syntax#}T{#endsyntax#} must be an integer type with bit count evenly divisible by 8.</p>
|
||||
<pre>{#syntax#}@byteSwap(operand: anytype) T{#endsyntax#}</pre>
|
||||
<p>{#syntax#}@TypeOf(operand){#endsyntax#} must be an integer type or an integer vector type with bit count evenly divisible by 8.</p>
|
||||
<p>{#syntax#}operand{#endsyntax#} may be an {#link|integer|Integers#} or {#link|vector|Vectors#}.</p>
|
||||
<p>
|
||||
Swaps the byte order of the integer. This converts a big endian integer to a little endian integer,
|
||||
@@ -8089,8 +8050,8 @@ fn func(y: *i32) void {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@bitReverse#}
|
||||
<pre>{#syntax#}@bitReverse(comptime T: type, integer: T) T{#endsyntax#}</pre>
|
||||
<p>{#syntax#}T{#endsyntax#} accepts any integer type.</p>
|
||||
<pre>{#syntax#}@bitReverse(integer: anytype) T{#endsyntax#}</pre>
|
||||
<p>{#syntax#}@TypeOf(anytype){#endsyntax#} accepts any integer type or integer vector type.</p>
|
||||
<p>
|
||||
Reverses the bitpattern of an integer value, including the sign bit if applicable.
|
||||
</p>
|
||||
@@ -8229,8 +8190,8 @@ pub const CallOptions = struct {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@clz#}
|
||||
<pre>{#syntax#}@clz(comptime T: type, operand: T){#endsyntax#}</pre>
|
||||
<p>{#syntax#}T{#endsyntax#} must be an integer type.</p>
|
||||
<pre>{#syntax#}@clz(operand: anytype){#endsyntax#}</pre>
|
||||
<p>{#syntax#}@TypeOf(operand){#endsyntax#} must be an integer type or an integer vector type.</p>
|
||||
<p>{#syntax#}operand{#endsyntax#} may be an {#link|integer|Integers#} or {#link|vector|Vectors#}.</p>
|
||||
<p>
|
||||
This function counts the number of most-significant (leading in a big-Endian sense) zeroes in an integer.
|
||||
@@ -8375,8 +8336,8 @@ test "main" {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@ctz#}
|
||||
<pre>{#syntax#}@ctz(comptime T: type, operand: T){#endsyntax#}</pre>
|
||||
<p>{#syntax#}T{#endsyntax#} must be an integer type.</p>
|
||||
<pre>{#syntax#}@ctz(operand: anytype){#endsyntax#}</pre>
|
||||
<p>{#syntax#}@TypeOf(operand){#endsyntax#} must be an integer type or an integer vector type.</p>
|
||||
<p>{#syntax#}operand{#endsyntax#} may be an {#link|integer|Integers#} or {#link|vector|Vectors#}.</p>
|
||||
<p>
|
||||
This function counts the number of least-significant (trailing in a big-Endian sense) zeroes in an integer.
|
||||
@@ -8677,6 +8638,7 @@ test "decl access by string" {
|
||||
allows one to, for example, heap-allocate an async function frame:
|
||||
</p>
|
||||
{#code_begin|test|heap_allocated_frame#}
|
||||
{#backend_stage1#}
|
||||
const std = @import("std");
|
||||
|
||||
test "heap allocated frame" {
|
||||
@@ -9011,8 +8973,8 @@ test "@wasmMemoryGrow" {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@popCount#}
|
||||
<pre>{#syntax#}@popCount(comptime T: type, operand: T){#endsyntax#}</pre>
|
||||
<p>{#syntax#}T{#endsyntax#} must be an integer type.</p>
|
||||
<pre>{#syntax#}@popCount(operand: anytype){#endsyntax#}</pre>
|
||||
<p>{#syntax#}@TypeOf(operand){#endsyntax#} must be an integer type.</p>
|
||||
<p>{#syntax#}operand{#endsyntax#} may be an {#link|integer|Integers#} or {#link|vector|Vectors#}.</p>
|
||||
<p>Counts the number of bits set in an integer.</p>
|
||||
<p>
|
||||
@@ -9423,12 +9385,6 @@ const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
test "vector @reduce" {
|
||||
// This test regressed with LLVM 14:
|
||||
// https://github.com/llvm/llvm-project/issues/55522
|
||||
// We'll skip this test unless the self-hosted compiler is being used.
|
||||
// After LLVM 15 is released we can delete this line.
|
||||
if (@import("builtin").zig_backend == .stage1) return;
|
||||
|
||||
const value = @Vector(4, i32){ 1, -1, 1, -1 };
|
||||
const result = value > @splat(4, @as(i32, 0));
|
||||
// result is { true, false, true, false };
|
||||
@@ -9938,7 +9894,7 @@ pub fn main() void {
|
||||
{#header_close#}
|
||||
{#header_open|Index out of Bounds#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|index 5 outside array of size 5#}
|
||||
{#code_begin|test_err|index 5 outside array of length 5#}
|
||||
comptime {
|
||||
const array: [5]u8 = "hello".*;
|
||||
const garbage = array[5];
|
||||
@@ -9959,9 +9915,9 @@ fn foo(x: []const u8) u8 {
|
||||
{#header_close#}
|
||||
{#header_open|Cast Negative Number to Unsigned Integer#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|attempt to cast negative value to unsigned integer#}
|
||||
{#code_begin|test_err|type 'u32' cannot represent integer value '-1'#}
|
||||
comptime {
|
||||
const value: i32 = -1;
|
||||
var value: i32 = -1;
|
||||
const unsigned = @intCast(u32, value);
|
||||
_ = unsigned;
|
||||
}
|
||||
@@ -9982,7 +9938,7 @@ pub fn main() void {
|
||||
{#header_close#}
|
||||
{#header_open|Cast Truncates Data#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|cast from 'u16' to 'u8' truncates bits#}
|
||||
{#code_begin|test_err|type 'u8' cannot represent integer value '300'#}
|
||||
comptime {
|
||||
const spartan_count: u16 = 300;
|
||||
const byte = @intCast(u8, spartan_count);
|
||||
@@ -10017,7 +9973,7 @@ pub fn main() void {
|
||||
<li>{#link|@divExact#} (division)</li>
|
||||
</ul>
|
||||
<p>Example with addition at compile-time:</p>
|
||||
{#code_begin|test_err|operation caused overflow#}
|
||||
{#code_begin|test_err|overflow of integer type 'u8' with value '256'#}
|
||||
comptime {
|
||||
var byte: u8 = 255;
|
||||
byte += 1;
|
||||
@@ -10118,6 +10074,7 @@ test "wraparound addition and subtraction" {
|
||||
{#header_open|Exact Left Shift Overflow#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|operation caused overflow#}
|
||||
{#backend_stage1#}
|
||||
comptime {
|
||||
const x = @shlExact(@as(u8, 0b01010101), 2);
|
||||
_ = x;
|
||||
@@ -10137,6 +10094,7 @@ pub fn main() void {
|
||||
{#header_open|Exact Right Shift Overflow#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|exact shift shifted out 1 bits#}
|
||||
{#backend_stage1#}
|
||||
comptime {
|
||||
const x = @shrExact(@as(u8, 0b10101010), 2);
|
||||
_ = x;
|
||||
@@ -10200,6 +10158,7 @@ pub fn main() void {
|
||||
{#header_open|Exact Division Remainder#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|exact division had a remainder#}
|
||||
{#backend_stage1#}
|
||||
comptime {
|
||||
const a: u32 = 10;
|
||||
const b: u32 = 3;
|
||||
@@ -10302,7 +10261,7 @@ fn getNumberOrFail() !i32 {
|
||||
{#header_close#}
|
||||
{#header_open|Invalid Error Code#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|integer value 11 represents no error#}
|
||||
{#code_begin|test_err|integer value '11' represents no error#}
|
||||
comptime {
|
||||
const err = error.AnError;
|
||||
const number = @errorToInt(err) + 10;
|
||||
@@ -10324,7 +10283,7 @@ pub fn main() void {
|
||||
{#header_close#}
|
||||
{#header_open|Invalid Enum Cast#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|has no tag matching integer value 3#}
|
||||
{#code_begin|test_err|enum 'test.Foo' has no tag with value '3'#}
|
||||
const Foo = enum {
|
||||
a,
|
||||
b,
|
||||
@@ -10356,7 +10315,7 @@ pub fn main() void {
|
||||
|
||||
{#header_open|Invalid Error Set Cast#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|error.B not a member of error set 'Set2'#}
|
||||
{#code_begin|test_err|'error.B' not a member of error set 'error{A,C}'#}
|
||||
const Set1 = error{
|
||||
A,
|
||||
B,
|
||||
@@ -10417,7 +10376,7 @@ fn foo(bytes: []u8) u32 {
|
||||
{#header_close#}
|
||||
{#header_open|Wrong Union Field Access#}
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|accessing union field 'float' while field 'int' is set#}
|
||||
{#code_begin|test_err|access of union field 'float' while field 'int' is active#}
|
||||
comptime {
|
||||
var f = Foo{ .int = 42 };
|
||||
f.float = 12.34;
|
||||
@@ -10509,6 +10468,7 @@ fn bar(f: *Foo) void {
|
||||
</p>
|
||||
<p>At compile-time:</p>
|
||||
{#code_begin|test_err|null pointer casted to type#}
|
||||
{#backend_stage1#}
|
||||
comptime {
|
||||
const opt_ptr: ?*i32 = null;
|
||||
const ptr = @ptrCast(*i32, opt_ptr);
|
||||
@@ -10551,7 +10511,8 @@ const expect = std.testing.expect;
|
||||
|
||||
test "using an allocator" {
|
||||
var buffer: [100]u8 = undefined;
|
||||
const allocator = std.heap.FixedBufferAllocator.init(&buffer).allocator();
|
||||
var fba = std.heap.FixedBufferAllocator.init(&buffer);
|
||||
const allocator = fba.allocator();
|
||||
const result = try concat(allocator, "foo", "bar");
|
||||
try expect(std.mem.eql(u8, "foobar", result));
|
||||
}
|
||||
@@ -10647,7 +10608,7 @@ pub fn main() !void {
|
||||
<p>String literals such as {#syntax#}"foo"{#endsyntax#} are in the global constant data section.
|
||||
This is why it is an error to pass a string literal to a mutable slice, like this:
|
||||
</p>
|
||||
{#code_begin|test_err|cannot cast pointer to array literal to slice type '[]u8'#}
|
||||
{#code_begin|test_err|expected type '[]u8', found '*const [5:0]u8'#}
|
||||
fn foo(s: []u8) void {
|
||||
_ = s;
|
||||
}
|
||||
@@ -11832,8 +11793,8 @@ fn readU32Be() u32 {}
|
||||
<pre>{#syntax#}anytype{#endsyntax#}</pre>
|
||||
</th>
|
||||
<td>
|
||||
Function parameters and struct fields can be declared with {#syntax#}anytype{#endsyntax#} in place of the type.
|
||||
The type will be inferred where the function is called or the struct is instantiated.
|
||||
Function parameters can be declared with {#syntax#}anytype{#endsyntax#} in place of the type.
|
||||
The type will be inferred where the function is called.
|
||||
<ul>
|
||||
<li>See also {#link|Function Parameter Type Inference#}</li>
|
||||
</ul>
|
||||
|
||||
@@ -9,7 +9,7 @@ const normalize = common.normalize;
|
||||
pub inline fn addf3(comptime T: type, a: T, b: T) T {
|
||||
const bits = @typeInfo(T).Float.bits;
|
||||
const Z = std.meta.Int(.unsigned, bits);
|
||||
const S = std.meta.Int(.unsigned, bits - @clz(Z, @as(Z, bits) - 1));
|
||||
const S = std.meta.Int(.unsigned, bits - @clz(@as(Z, bits) - 1));
|
||||
|
||||
const typeWidth = bits;
|
||||
const significandBits = math.floatMantissaBits(T);
|
||||
@@ -118,7 +118,7 @@ pub inline fn addf3(comptime T: type, a: T, b: T) T {
|
||||
// If partial cancellation occured, we need to left-shift the result
|
||||
// and adjust the exponent:
|
||||
if (aSignificand < integerBit << 3) {
|
||||
const shift = @intCast(i32, @clz(Z, aSignificand)) - @intCast(i32, @clz(std.meta.Int(.unsigned, bits), integerBit << 3));
|
||||
const shift = @intCast(i32, @clz(aSignificand)) - @intCast(i32, @clz(integerBit << 3));
|
||||
aSignificand <<= @intCast(S, shift);
|
||||
aExponent -= shift;
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ pub fn normalize(comptime T: type, significand: *std.meta.Int(.unsigned, @typeIn
|
||||
const Z = std.meta.Int(.unsigned, @typeInfo(T).Float.bits);
|
||||
const integerBit = @as(Z, 1) << std.math.floatFractionalBits(T);
|
||||
|
||||
const shift = @clz(Z, significand.*) - @clz(Z, integerBit);
|
||||
const shift = @clz(significand.*) - @clz(integerBit);
|
||||
significand.* <<= @intCast(std.math.Log2Int(Z), shift);
|
||||
return @as(i32, 1) - shift;
|
||||
}
|
||||
|
||||
@@ -206,5 +206,7 @@ pub fn __divxf3(a: f80, b: f80) callconv(.C) f80 {
|
||||
}
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.os.tag == .windows) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/12603
|
||||
|
||||
_ = @import("divxf3_test.zig");
|
||||
}
|
||||
|
||||
@@ -56,8 +56,8 @@ pub inline fn extendf(
|
||||
// a is denormal.
|
||||
// renormalize the significand and clear the leading bit, then insert
|
||||
// the correct adjusted exponent in the destination type.
|
||||
const scale: u32 = @clz(src_rep_t, aAbs) -
|
||||
@clz(src_rep_t, @as(src_rep_t, srcMinNormal));
|
||||
const scale: u32 = @clz(aAbs) -
|
||||
@clz(@as(src_rep_t, srcMinNormal));
|
||||
absResult = @as(dst_rep_t, aAbs) << @intCast(DstShift, dstSigBits - srcSigBits + scale);
|
||||
absResult ^= dstMinNormal;
|
||||
const resultExponent: u32 = dstExpBias - srcExpBias - scale + 1;
|
||||
@@ -119,8 +119,8 @@ pub inline fn extend_f80(comptime src_t: type, a: std.meta.Int(.unsigned, @typeI
|
||||
// a is denormal.
|
||||
// renormalize the significand and clear the leading bit, then insert
|
||||
// the correct adjusted exponent in the destination type.
|
||||
const scale: u16 = @clz(src_rep_t, a_abs) -
|
||||
@clz(src_rep_t, @as(src_rep_t, src_min_normal));
|
||||
const scale: u16 = @clz(a_abs) -
|
||||
@clz(@as(src_rep_t, src_min_normal));
|
||||
|
||||
dst.fraction = @as(u64, a_abs) << @intCast(u6, dst_sig_bits - src_sig_bits + scale);
|
||||
dst.fraction |= dst_int_bit; // bit 64 is always set for normal numbers
|
||||
|
||||
@@ -38,7 +38,7 @@ fn __extendxftf2(a: f80) callconv(.C) f128 {
|
||||
// a is denormal
|
||||
// renormalize the significand and clear the leading bit and integer part,
|
||||
// then insert the correct adjusted exponent in the destination type.
|
||||
const scale: u32 = @clz(u64, a_rep.fraction);
|
||||
const scale: u32 = @clz(a_rep.fraction);
|
||||
abs_result = @as(u128, a_rep.fraction) << @intCast(u7, dst_sig_bits - src_sig_bits + scale + 1);
|
||||
abs_result ^= dst_min_normal;
|
||||
abs_result |= @as(u128, scale + 1) << dst_sig_bits;
|
||||
|
||||
@@ -243,7 +243,7 @@ inline fn div_u32(n: u32, d: u32) u32 {
|
||||
// special cases
|
||||
if (d == 0) return 0; // ?!
|
||||
if (n == 0) return 0;
|
||||
var sr = @bitCast(c_uint, @as(c_int, @clz(u32, d)) - @as(c_int, @clz(u32, n)));
|
||||
var sr = @bitCast(c_uint, @as(c_int, @clz(d)) - @as(c_int, @clz(n)));
|
||||
// 0 <= sr <= n_uword_bits - 1 or sr large
|
||||
if (sr > n_uword_bits - 1) {
|
||||
// d > r
|
||||
|
||||
@@ -23,7 +23,7 @@ pub fn intToFloat(comptime T: type, x: anytype) T {
|
||||
var result: uT = sign_bit;
|
||||
|
||||
// Compute significand
|
||||
var exp = int_bits - @clz(Z, abs_val) - 1;
|
||||
var exp = int_bits - @clz(abs_val) - 1;
|
||||
if (int_bits <= fractional_bits or exp <= fractional_bits) {
|
||||
const shift_amt = fractional_bits - @intCast(math.Log2Int(uT), exp);
|
||||
|
||||
@@ -32,7 +32,7 @@ pub fn intToFloat(comptime T: type, x: anytype) T {
|
||||
result ^= implicit_bit; // Remove implicit integer bit
|
||||
} else {
|
||||
var shift_amt = @intCast(math.Log2Int(Z), exp - fractional_bits);
|
||||
const exact_tie: bool = @ctz(Z, abs_val) == shift_amt - 1;
|
||||
const exact_tie: bool = @ctz(abs_val) == shift_amt - 1;
|
||||
|
||||
// Shift down result and remove implicit integer bit
|
||||
result = @intCast(uT, (abs_val >> (shift_amt - 1))) ^ (implicit_bit << 1);
|
||||
|
||||
@@ -186,7 +186,7 @@ fn normalize(comptime T: type, significand: *PowerOfTwoSignificandZ(T)) i32 {
|
||||
const Z = PowerOfTwoSignificandZ(T);
|
||||
const integerBit = @as(Z, 1) << math.floatFractionalBits(T);
|
||||
|
||||
const shift = @clz(Z, significand.*) - @clz(Z, integerBit);
|
||||
const shift = @clz(significand.*) - @clz(integerBit);
|
||||
significand.* <<= @intCast(math.Log2Int(Z), shift);
|
||||
return @as(i32, 1) - shift;
|
||||
}
|
||||
|
||||
@@ -75,12 +75,12 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
|
||||
r[high] = n[high] & (d[high] - 1);
|
||||
rem.* = @ptrCast(*align(@alignOf(SingleInt)) DoubleInt, &r[0]).*; // TODO issue #421
|
||||
}
|
||||
return n[high] >> @intCast(Log2SingleInt, @ctz(SingleInt, d[high]));
|
||||
return n[high] >> @intCast(Log2SingleInt, @ctz(d[high]));
|
||||
}
|
||||
// K K
|
||||
// ---
|
||||
// K 0
|
||||
sr = @bitCast(c_uint, @as(c_int, @clz(SingleInt, d[high])) - @as(c_int, @clz(SingleInt, n[high])));
|
||||
sr = @bitCast(c_uint, @as(c_int, @clz(d[high])) - @as(c_int, @clz(n[high])));
|
||||
// 0 <= sr <= single_int_bits - 2 or sr large
|
||||
if (sr > single_int_bits - 2) {
|
||||
if (maybe_rem) |rem| {
|
||||
@@ -110,7 +110,7 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
|
||||
if (d[low] == 1) {
|
||||
return a;
|
||||
}
|
||||
sr = @ctz(SingleInt, d[low]);
|
||||
sr = @ctz(d[low]);
|
||||
q[high] = n[high] >> @intCast(Log2SingleInt, sr);
|
||||
q[low] = (n[high] << @intCast(Log2SingleInt, single_int_bits - sr)) | (n[low] >> @intCast(Log2SingleInt, sr));
|
||||
return @ptrCast(*align(@alignOf(SingleInt)) DoubleInt, &q[0]).*; // TODO issue #421
|
||||
@@ -118,7 +118,7 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
|
||||
// K X
|
||||
// ---
|
||||
// 0 K
|
||||
sr = 1 + single_int_bits + @as(c_uint, @clz(SingleInt, d[low])) - @as(c_uint, @clz(SingleInt, n[high]));
|
||||
sr = 1 + single_int_bits + @as(c_uint, @clz(d[low])) - @as(c_uint, @clz(n[high]));
|
||||
// 2 <= sr <= double_int_bits - 1
|
||||
// q.all = a << (double_int_bits - sr);
|
||||
// r.all = a >> sr;
|
||||
@@ -144,7 +144,7 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
|
||||
// K X
|
||||
// ---
|
||||
// K K
|
||||
sr = @bitCast(c_uint, @as(c_int, @clz(SingleInt, d[high])) - @as(c_int, @clz(SingleInt, n[high])));
|
||||
sr = @bitCast(c_uint, @as(c_int, @clz(d[high])) - @as(c_int, @clz(n[high])));
|
||||
// 0 <= sr <= single_int_bits - 1 or sr large
|
||||
if (sr > single_int_bits - 1) {
|
||||
if (maybe_rem) |rem| {
|
||||
|
||||
+91
-72
@@ -25,9 +25,10 @@
|
||||
--search-bg-color-focus: #ffffff;
|
||||
--search-sh-color: rgba(0, 0, 0, 0.18);
|
||||
--help-sh-color: rgba(0, 0, 0, 0.75);
|
||||
--help-bg-color: #aaa;
|
||||
}
|
||||
|
||||
html, body { margin: 0; padding:0; height: 100%; }
|
||||
html, body { margin: 0; padding: 0; height: 100%; }
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
@@ -168,8 +169,8 @@
|
||||
width: 100%;
|
||||
margin-bottom: 0.8rem;
|
||||
padding: 0.5rem;
|
||||
font-size: 1rem;
|
||||
font-family: var(--ui);
|
||||
font-size: 1rem;
|
||||
color: var(--tx-color);
|
||||
background-color: var(--search-bg-color);
|
||||
border-top: 0;
|
||||
@@ -190,11 +191,11 @@
|
||||
box-shadow: 0 0.3em 1em 0.125em var(--search-sh-color);
|
||||
}
|
||||
|
||||
.docs .search::placeholder {
|
||||
font-size: 1rem;
|
||||
font-family: var(--ui);
|
||||
color: var(--tx-color);
|
||||
opacity: 0.5;
|
||||
#searchPlaceholder {
|
||||
position: absolute;
|
||||
pointer-events: none;
|
||||
top: 5px;
|
||||
left: 5px;
|
||||
}
|
||||
|
||||
.docs a {
|
||||
@@ -207,9 +208,9 @@
|
||||
|
||||
.docs pre {
|
||||
font-family: var(--mono);
|
||||
font-size:1em;
|
||||
background-color:#F5F5F5;
|
||||
padding:1em;
|
||||
font-size: 1em;
|
||||
background-color: #F5F5F5;
|
||||
padding: 1em;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
@@ -225,7 +226,7 @@
|
||||
border-bottom: 0.0625rem dashed;
|
||||
}
|
||||
|
||||
.docs h2 {
|
||||
.docs h2 {
|
||||
font-size: 1.3em;
|
||||
margin: 0.5em 0;
|
||||
padding: 0;
|
||||
@@ -289,12 +290,12 @@
|
||||
}
|
||||
|
||||
.fieldDocs {
|
||||
border: 1px solid #2A2A2A;
|
||||
border: 1px solid #F5F5F5;
|
||||
border-top: 0px;
|
||||
padding: 1px 1em;
|
||||
}
|
||||
|
||||
/* help dialog */
|
||||
/* help modal */
|
||||
.help-modal {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
@@ -308,13 +309,13 @@
|
||||
backdrop-filter: blur(0.3em);
|
||||
}
|
||||
|
||||
.help-modal > .dialog {
|
||||
.help-modal > .modal {
|
||||
max-width: 97vw;
|
||||
max-height: 97vh;
|
||||
overflow: auto;
|
||||
font-size: 1rem;
|
||||
color: #fff;
|
||||
background-color: #333;
|
||||
background-color: var(--help-bg-color);
|
||||
border: 0.125rem solid #000;
|
||||
box-shadow: 0 0.5rem 2.5rem 0.3rem var(--help-sh-color);
|
||||
}
|
||||
@@ -335,11 +336,11 @@
|
||||
margin-right: 0.5em;
|
||||
}
|
||||
|
||||
.help-modal kbd {
|
||||
kbd {
|
||||
display: inline-block;
|
||||
padding: 0.3em 0.2em;
|
||||
font-size: 1.2em;
|
||||
font-size: var(--mono);
|
||||
font-family: var(--mono);
|
||||
font-size: 1em;
|
||||
line-height: 0.8em;
|
||||
vertical-align: middle;
|
||||
color: #000;
|
||||
@@ -348,16 +349,20 @@
|
||||
border-bottom-color: #c6cbd1;
|
||||
border: solid 0.0625em;
|
||||
border-radius: 0.1875em;
|
||||
box-shadow: inset 0 -0.0625em 0 #c6cbd1;
|
||||
box-shadow: inset 0 -0.2em 0 #c6cbd1;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
#listFns > div {
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
#listFns dt {
|
||||
font-family: var(--mono);
|
||||
}
|
||||
.argBreaker {
|
||||
display: none;
|
||||
}
|
||||
#listFns dt {
|
||||
font-family: var(--mono);
|
||||
}
|
||||
.argBreaker {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* tokens */
|
||||
.tok-kw {
|
||||
@@ -391,7 +396,6 @@
|
||||
|
||||
/* dark mode */
|
||||
@media (prefers-color-scheme: dark) {
|
||||
|
||||
:root {
|
||||
--tx-color: #bbb;
|
||||
--bg-color: #111;
|
||||
@@ -408,11 +412,15 @@
|
||||
--search-bg-color-focus: #000;
|
||||
--search-sh-color: rgba(255, 255, 255, 0.28);
|
||||
--help-sh-color: rgba(142, 142, 142, 0.5);
|
||||
--help-bg-color: #333;
|
||||
}
|
||||
|
||||
.docs pre {
|
||||
background-color:#2A2A2A;
|
||||
}
|
||||
.fieldDocs {
|
||||
border-color:#2A2A2A;
|
||||
}
|
||||
#listNav {
|
||||
background-color: #333;
|
||||
}
|
||||
@@ -457,7 +465,6 @@
|
||||
.tok-type {
|
||||
color: #68f;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 750px) {
|
||||
@@ -544,7 +551,7 @@
|
||||
<body class="canvas">
|
||||
<div class="banner">
|
||||
This is a beta autodoc build; expect bugs and missing information.
|
||||
<a href="https://github.com/ziglang/zig/wiki/How-to-contribute-to-Autodoc">Report an Issue</a>,
|
||||
<a href="https://github.com/ziglang/zig/wiki/How-to-contribute-to-Autodoc">Report an Issue</a>,
|
||||
<a href="https://github.com/ziglang/zig/wiki/How-to-contribute-to-Autodoc">Contribute</a>,
|
||||
<a href="https://github.com/ziglang/zig/wiki/How-to-read-the-standard-library-source-code">Learn more about stdlib source code</a>.
|
||||
</div>
|
||||
@@ -555,43 +562,43 @@
|
||||
<div class="logo">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 400 140">
|
||||
<g fill="#F7A41D">
|
||||
<g>
|
||||
<polygon points="46,22 28,44 19,30"/>
|
||||
<polygon points="46,22 33,33 28,44 22,44 22,95 31,95 20,100 12,117 0,117 0,22" shape-rendering="crispEdges"/>
|
||||
<polygon points="31,95 12,117 4,106"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon points="56,22 62,36 37,44"/>
|
||||
<polygon points="56,22 111,22 111,44 37,44 56,32" shape-rendering="crispEdges"/>
|
||||
<polygon points="116,95 97,117 90,104"/>
|
||||
<polygon points="116,95 100,104 97,117 42,117 42,95" shape-rendering="crispEdges"/>
|
||||
<polygon points="150,0 52,117 3,140 101,22"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon points="141,22 140,40 122,45"/>
|
||||
<polygon points="153,22 153,117 106,117 120,105 125,95 131,95 131,45 122,45 132,36 141,22" shape-rendering="crispEdges"/>
|
||||
<polygon points="125,95 130,110 106,117"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon points="46,22 28,44 19,30"/>
|
||||
<polygon points="46,22 33,33 28,44 22,44 22,95 31,95 20,100 12,117 0,117 0,22" shape-rendering="crispEdges"/>
|
||||
<polygon points="31,95 12,117 4,106"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon points="56,22 62,36 37,44"/>
|
||||
<polygon points="56,22 111,22 111,44 37,44 56,32" shape-rendering="crispEdges"/>
|
||||
<polygon points="116,95 97,117 90,104"/>
|
||||
<polygon points="116,95 100,104 97,117 42,117 42,95" shape-rendering="crispEdges"/>
|
||||
<polygon points="150,0 52,117 3,140 101,22"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon points="141,22 140,40 122,45"/>
|
||||
<polygon points="153,22 153,117 106,117 120,105 125,95 131,95 131,45 122,45 132,36 141,22" shape-rendering="crispEdges"/>
|
||||
<polygon points="125,95 130,110 106,117"/>
|
||||
</g>
|
||||
</g>
|
||||
<style>
|
||||
#text { fill: #121212 }
|
||||
@media (prefers-color-scheme: dark) { #text { fill: #f2f2f2 } }
|
||||
</style>
|
||||
<g id="text">
|
||||
<g>
|
||||
<polygon points="260,22 260,37 229,40 177,40 177,22" shape-rendering="crispEdges"/>
|
||||
<polygon points="260,37 207,99 207,103 176,103 229,40 229,37"/>
|
||||
<polygon points="261,99 261,117 176,117 176,103 206,99" shape-rendering="crispEdges"/>
|
||||
</g>
|
||||
<rect x="272" y="22" shape-rendering="crispEdges" width="22" height="95"/>
|
||||
<g>
|
||||
<polygon points="394,67 394,106 376,106 376,81 360,70 346,67" shape-rendering="crispEdges"/>
|
||||
<polygon points="360,68 376,81 346,67"/>
|
||||
<path d="M394,106c-10.2,7.3-24,12-37.7,12c-29,0-51.1-20.8-51.1-48.3c0-27.3,22.5-48.1,52-48.1 c14.3,0,29.2,5.5,38.9,14l-13,15c-7.1-6.3-16.8-10-25.9-10c-17,0-30.2,12.9-30.2,29.5c0,16.8,13.3,29.6,30.3,29.6 c5.7,0,12.8-2.3,19-5.5L394,106z"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon points="260,22 260,37 229,40 177,40 177,22" shape-rendering="crispEdges"/>
|
||||
<polygon points="260,37 207,99 207,103 176,103 229,40 229,37"/>
|
||||
<polygon points="261,99 261,117 176,117 176,103 206,99" shape-rendering="crispEdges"/>
|
||||
</g>
|
||||
<rect x="272" y="22" shape-rendering="crispEdges" width="22" height="95"/>
|
||||
<g>
|
||||
<polygon points="394,67 394,106 376,106 376,81 360,70 346,67" shape-rendering="crispEdges"/>
|
||||
<polygon points="360,68 376,81 346,67"/>
|
||||
<path d="M394,106c-10.2,7.3-24,12-37.7,12c-29,0-51.1-20.8-51.1-48.3c0-27.3,22.5-48.1,52-48.1 c14.3,0,29.2,5.5,38.9,14l-13,15c-7.1-6.3-16.8-10-25.9-10c-17,0-30.2,12.9-30.2,29.5c0,16.8,13.3,29.6,30.3,29.6 c5.7,0,12.8-2.3,19-5.5L394,106z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
<div id="sectMainPkg" class="hidden">
|
||||
<h2><span>Main Package</span></h2>
|
||||
<ul class="packages">
|
||||
@@ -606,16 +613,19 @@
|
||||
<h2><span>Zig Version</span></h2>
|
||||
<p class="str" id="tdZigVer"></p>
|
||||
</div>
|
||||
<div>
|
||||
<input id="privDeclsBox" type="checkbox"/>
|
||||
<label for="privDeclsBox">Internal Doc Mode</label>
|
||||
</div>
|
||||
<div>
|
||||
<input id="privDeclsBox" type="checkbox"/>
|
||||
<label for="privDeclsBox">Internal Doc Mode</label>
|
||||
</div>
|
||||
</nav>
|
||||
</div>
|
||||
<div class="flex-right">
|
||||
<div id="docs" class="flex-right">
|
||||
<div class="wrap">
|
||||
<section class="docs">
|
||||
<input type="search" class="search" id="search" autocomplete="off" spellcheck="false" placeholder="`s` to search, `?` to see more options">
|
||||
<div style="position: relative">
|
||||
<span id="searchPlaceholder"><kbd>S</kbd> to search, <kbd>?</kbd> for more options</span>
|
||||
<input type="search" class="search" id="search" autocomplete="off" spellcheck="false" disabled>
|
||||
</div>
|
||||
<p id="status">Loading...</p>
|
||||
<div id="sectNav" class="hidden"><ul id="listNav"></ul></div>
|
||||
<div id="fnProto" class="hidden">
|
||||
@@ -647,10 +657,17 @@
|
||||
<div id="sectSearchResults" class="hidden">
|
||||
<h2>Search Results</h2>
|
||||
<ul id="listSearchResults"></ul>
|
||||
<p id="sectSearchAllResultsLink" class="hidden"><a href="">show all results</a></p>
|
||||
</div>
|
||||
<div id="sectSearchNoResults" class="hidden">
|
||||
<h2>No Results Found</h2>
|
||||
<p>Press escape to exit search and then '?' to see more options.</p>
|
||||
<p>Here are some things you can try:</p>
|
||||
<ul>
|
||||
<li>Check out the <a id="langRefLink">Language Reference</a> for the language itself.</li>
|
||||
<li>Check out the <a href="https://ziglang.org/learn/">Learn page</a> for other helpful resources for learning Zig.</li>
|
||||
<li>Use your search engine.</li>
|
||||
</ul>
|
||||
<p>Press <kbd>?</kbd> to see keyboard shortcuts and <kbd>Esc</kbd> to return.</p>
|
||||
</div>
|
||||
<div id="sectFields" class="hidden">
|
||||
<h2>Fields</h2>
|
||||
@@ -702,21 +719,23 @@
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
</div>
|
||||
<div class="flex-filler"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="helpDialog" class="hidden">
|
||||
<div id="helpModal" class="hidden">
|
||||
<div class="help-modal">
|
||||
<div class="dialog">
|
||||
<div class="modal">
|
||||
<h1>Keyboard Shortcuts</h1>
|
||||
<dl><dt><kbd>?</kbd></dt><dd>Show this help dialog</dd></dl>
|
||||
<dl><dt><kbd>Esc</kbd></dt><dd>Clear focus; close this dialog</dd></dl>
|
||||
<dl><dt><kbd>?</kbd></dt><dd>Show this help modal</dd></dl>
|
||||
<dl><dt><kbd>s</kbd></dt><dd>Focus the search field</dd></dl>
|
||||
<dl><dt><kbd>↑</kbd></dt><dd>Move up in search results</dd></dl>
|
||||
<dl><dt><kbd>↓</kbd></dt><dd>Move down in search results</dd></dl>
|
||||
<dl><dt><kbd>⏎</kbd></dt><dd>Go to active search result</dd></dl>
|
||||
<div style="margin-left: 1em">
|
||||
<dl><dt><kbd>↑</kbd></dt><dd>Move up in search results</dd></dl>
|
||||
<dl><dt><kbd>↓</kbd></dt><dd>Move down in search results</dd></dl>
|
||||
<dl><dt><kbd>⏎</kbd></dt><dd>Go to active search result</dd></dl>
|
||||
</div>
|
||||
<dl><dt><kbd>Esc</kbd></dt><dd>Clear focus; close this modal</dd></dl>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
+3328
-3116
@@ -1,3259 +1,3471 @@
|
||||
'use strict';
|
||||
"use strict";
|
||||
|
||||
var zigAnalysis;
|
||||
|
||||
(function() {
|
||||
let domStatus = (document.getElementById("status"));
|
||||
let domSectNav = (document.getElementById("sectNav"));
|
||||
let domListNav = (document.getElementById("listNav"));
|
||||
let domSectMainPkg = (document.getElementById("sectMainPkg"));
|
||||
let domSectPkgs = (document.getElementById("sectPkgs"));
|
||||
let domListPkgs = (document.getElementById("listPkgs"));
|
||||
let domSectTypes = (document.getElementById("sectTypes"));
|
||||
let domListTypes = (document.getElementById("listTypes"));
|
||||
let domSectTests = (document.getElementById("sectTests"));
|
||||
let domListTests = (document.getElementById("listTests"));
|
||||
let domSectNamespaces = (document.getElementById("sectNamespaces"));
|
||||
let domListNamespaces = (document.getElementById("listNamespaces"));
|
||||
let domSectErrSets = (document.getElementById("sectErrSets"));
|
||||
let domListErrSets = (document.getElementById("listErrSets"));
|
||||
let domSectFns = (document.getElementById("sectFns"));
|
||||
let domListFns = (document.getElementById("listFns"));
|
||||
let domSectFields = (document.getElementById("sectFields"));
|
||||
let domListFields = (document.getElementById("listFields"));
|
||||
let domSectGlobalVars = (document.getElementById("sectGlobalVars"));
|
||||
let domListGlobalVars = (document.getElementById("listGlobalVars"));
|
||||
let domSectValues = (document.getElementById("sectValues"));
|
||||
let domListValues = (document.getElementById("listValues"));
|
||||
let domFnProto = (document.getElementById("fnProto"));
|
||||
let domFnProtoCode = (document.getElementById("fnProtoCode"));
|
||||
let domSectParams = (document.getElementById("sectParams"));
|
||||
let domListParams = (document.getElementById("listParams"));
|
||||
let domTldDocs = (document.getElementById("tldDocs"));
|
||||
let domSectFnErrors = (document.getElementById("sectFnErrors"));
|
||||
let domListFnErrors = (document.getElementById("listFnErrors"));
|
||||
let domTableFnErrors =(document.getElementById("tableFnErrors"));
|
||||
let domFnErrorsAnyError = (document.getElementById("fnErrorsAnyError"));
|
||||
let domFnExamples = (document.getElementById("fnExamples"));
|
||||
// let domListFnExamples = (document.getElementById("listFnExamples"));
|
||||
let domFnNoExamples = (document.getElementById("fnNoExamples"));
|
||||
let domDeclNoRef = (document.getElementById("declNoRef"));
|
||||
let domSearch = (document.getElementById("search"));
|
||||
let domSectSearchResults = (document.getElementById("sectSearchResults"));
|
||||
(function () {
|
||||
const domStatus = document.getElementById("status");
|
||||
const domSectNav = document.getElementById("sectNav");
|
||||
const domListNav = document.getElementById("listNav");
|
||||
const domSectMainPkg = document.getElementById("sectMainPkg");
|
||||
const domSectPkgs = document.getElementById("sectPkgs");
|
||||
const domListPkgs = document.getElementById("listPkgs");
|
||||
const domSectTypes = document.getElementById("sectTypes");
|
||||
const domListTypes = document.getElementById("listTypes");
|
||||
const domSectTests = document.getElementById("sectTests");
|
||||
const domListTests = document.getElementById("listTests");
|
||||
const domSectNamespaces = document.getElementById("sectNamespaces");
|
||||
const domListNamespaces = document.getElementById("listNamespaces");
|
||||
const domSectErrSets = document.getElementById("sectErrSets");
|
||||
const domListErrSets = document.getElementById("listErrSets");
|
||||
const domSectFns = document.getElementById("sectFns");
|
||||
const domListFns = document.getElementById("listFns");
|
||||
const domSectFields = document.getElementById("sectFields");
|
||||
const domListFields = document.getElementById("listFields");
|
||||
const domSectGlobalVars = document.getElementById("sectGlobalVars");
|
||||
const domListGlobalVars = document.getElementById("listGlobalVars");
|
||||
const domSectValues = document.getElementById("sectValues");
|
||||
const domListValues = document.getElementById("listValues");
|
||||
const domFnProto = document.getElementById("fnProto");
|
||||
const domFnProtoCode = document.getElementById("fnProtoCode");
|
||||
const domSectParams = document.getElementById("sectParams");
|
||||
const domListParams = document.getElementById("listParams");
|
||||
const domTldDocs = document.getElementById("tldDocs");
|
||||
const domSectFnErrors = document.getElementById("sectFnErrors");
|
||||
const domListFnErrors = document.getElementById("listFnErrors");
|
||||
const domTableFnErrors = document.getElementById("tableFnErrors");
|
||||
const domFnErrorsAnyError = document.getElementById("fnErrorsAnyError");
|
||||
const domFnExamples = document.getElementById("fnExamples");
|
||||
// const domListFnExamples = (document.getElementById("listFnExamples"));
|
||||
const domFnNoExamples = document.getElementById("fnNoExamples");
|
||||
const domDeclNoRef = document.getElementById("declNoRef");
|
||||
const domSearch = document.getElementById("search");
|
||||
const domSectSearchResults = document.getElementById("sectSearchResults");
|
||||
const domSectSearchAllResultsLink = document.getElementById("sectSearchAllResultsLink");
|
||||
const domDocs = document.getElementById("docs");
|
||||
const domListSearchResults = document.getElementById("listSearchResults");
|
||||
const domSectSearchNoResults = document.getElementById("sectSearchNoResults");
|
||||
const domSectInfo = document.getElementById("sectInfo");
|
||||
// const domTdTarget = (document.getElementById("tdTarget"));
|
||||
const domPrivDeclsBox = document.getElementById("privDeclsBox");
|
||||
const domTdZigVer = document.getElementById("tdZigVer");
|
||||
const domHdrName = document.getElementById("hdrName");
|
||||
const domHelpModal = document.getElementById("helpModal");
|
||||
const domSearchPlaceholder = document.getElementById("searchPlaceholder");
|
||||
const sourceFileUrlTemplate = "src/{{file}}#L{{line}}"
|
||||
const domLangRefLink = document.getElementById("langRefLink");
|
||||
|
||||
let domListSearchResults = (document.getElementById("listSearchResults"));
|
||||
let domSectSearchNoResults = (document.getElementById("sectSearchNoResults"));
|
||||
let domSectInfo = (document.getElementById("sectInfo"));
|
||||
// let domTdTarget = (document.getElementById("tdTarget"));
|
||||
let domPrivDeclsBox = (document.getElementById("privDeclsBox"));
|
||||
let domTdZigVer = (document.getElementById("tdZigVer"));
|
||||
let domHdrName = (document.getElementById("hdrName"));
|
||||
let domHelpModal = (document.getElementById("helpDialog"));
|
||||
let searchTimer = null;
|
||||
let searchTrimResults = true;
|
||||
|
||||
|
||||
let searchTimer = null;
|
||||
let escapeHtmlReplacements = {
|
||||
"&": "&",
|
||||
'"': """,
|
||||
"<": "<",
|
||||
">": ">",
|
||||
};
|
||||
|
||||
|
||||
let escapeHtmlReplacements = { "&": "&", '"': """, "<": "<", ">": ">" };
|
||||
let typeKinds = indexTypeKinds();
|
||||
let typeTypeId = findTypeTypeId();
|
||||
let pointerSizeEnum = { One: 0, Many: 1, Slice: 2, C: 3 };
|
||||
|
||||
let typeKinds = (indexTypeKinds());
|
||||
let typeTypeId = (findTypeTypeId());
|
||||
let pointerSizeEnum = { One: 0, Many: 1, Slice: 2, C: 3 };
|
||||
// for each package, is an array with packages to get to this one
|
||||
let canonPkgPaths = computeCanonicalPackagePaths();
|
||||
|
||||
// for each package, is an array with packages to get to this one
|
||||
let canonPkgPaths = computeCanonicalPackagePaths();
|
||||
// for each decl, is an array with {declNames, pkgNames} to get to this one
|
||||
|
||||
|
||||
let canonDeclPaths = null; // lazy; use getCanonDeclPath
|
||||
|
||||
// for each decl, is an array with {declNames, pkgNames} to get to this one
|
||||
|
||||
let canonDeclPaths = null; // lazy; use getCanonDeclPath
|
||||
// for each type, is an array with {declNames, pkgNames} to get to this one
|
||||
|
||||
// for each type, is an array with {declNames, pkgNames} to get to this one
|
||||
|
||||
let canonTypeDecls = null; // lazy; use getCanonTypeDecl
|
||||
let canonTypeDecls = null; // lazy; use getCanonTypeDecl
|
||||
|
||||
|
||||
let curNav = {
|
||||
showPrivDecls: false,
|
||||
// each element is a package name, e.g. @import("a") then within there @import("b")
|
||||
// starting implicitly from root package
|
||||
pkgNames: [],
|
||||
// same as above except actual packages, not names
|
||||
pkgObjs: [],
|
||||
// Each element is a decl name, `a.b.c`, a is 0, b is 1, c is 2, etc.
|
||||
// empty array means refers to the package itself
|
||||
declNames: [],
|
||||
// these will be all types, except the last one may be a type or a decl
|
||||
declObjs: [],
|
||||
|
||||
|
||||
let curNav = {
|
||||
showPrivDecls: false,
|
||||
// each element is a package name, e.g. @import("a") then within there @import("b")
|
||||
// starting implicitly from root package
|
||||
pkgNames: [],
|
||||
// same as above except actual packages, not names
|
||||
pkgObjs: [],
|
||||
// Each element is a decl name, `a.b.c`, a is 0, b is 1, c is 2, etc.
|
||||
// empty array means refers to the package itself
|
||||
declNames: [],
|
||||
// these will be all types, except the last one may be a type or a decl
|
||||
declObjs: [],
|
||||
// (a, b, c, d) comptime call; result is the value the docs refer to
|
||||
callName: null,
|
||||
};
|
||||
|
||||
// (a, b, c, d) comptime call; result is the value the docs refer to
|
||||
callName: null,
|
||||
};
|
||||
let curNavSearch = "";
|
||||
let curSearchIndex = -1;
|
||||
let imFeelingLucky = false;
|
||||
|
||||
let curNavSearch = "";
|
||||
let curSearchIndex = -1;
|
||||
let imFeelingLucky = false;
|
||||
let rootIsStd = detectRootIsStd();
|
||||
|
||||
let rootIsStd = detectRootIsStd();
|
||||
// map of decl index to list of non-generic fn indexes
|
||||
// let nodesToFnsMap = indexNodesToFns();
|
||||
// map of decl index to list of comptime fn calls
|
||||
// let nodesToCallsMap = indexNodesToCalls();
|
||||
|
||||
// map of decl index to list of non-generic fn indexes
|
||||
// let nodesToFnsMap = indexNodesToFns();
|
||||
// map of decl index to list of comptime fn calls
|
||||
// let nodesToCallsMap = indexNodesToCalls();
|
||||
|
||||
domSearch.addEventListener('keydown', onSearchKeyDown, false);
|
||||
domPrivDeclsBox.addEventListener('change', function() {
|
||||
if (this.checked != curNav.showPrivDecls) {
|
||||
if (this.checked && location.hash.length > 1 && location.hash[1] != '*'){
|
||||
location.hash = "#*" + location.hash.substring(1);
|
||||
return;
|
||||
}
|
||||
if (!this.checked && location.hash.length > 1 && location.hash[1] == '*') {
|
||||
location.hash = "#" + location.hash.substring(2);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}, false);
|
||||
|
||||
if (location.hash == "") {
|
||||
location.hash = "#root";
|
||||
}
|
||||
|
||||
window.addEventListener('hashchange', onHashChange, false);
|
||||
window.addEventListener('keydown', onWindowKeyDown, false);
|
||||
domSearch.disabled = false;
|
||||
domSearch.addEventListener("keydown", onSearchKeyDown, false);
|
||||
domSearch.addEventListener("focus", ev => {
|
||||
domSearchPlaceholder.classList.add("hidden");
|
||||
});
|
||||
domSearch.addEventListener("blur", ev => {
|
||||
if (domSearch.value.length == 0)
|
||||
domSearchPlaceholder.classList.remove("hidden");
|
||||
});
|
||||
domSectSearchAllResultsLink.addEventListener('click', onClickSearchShowAllResults, false);
|
||||
function onClickSearchShowAllResults(ev) {
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
searchTrimResults = false;
|
||||
onHashChange();
|
||||
}
|
||||
|
||||
function renderTitle() {
|
||||
let list = curNav.pkgNames.concat(curNav.declNames);
|
||||
let suffix = " - Zig";
|
||||
if (list.length === 0) {
|
||||
if (rootIsStd) {
|
||||
document.title = "std" + suffix;
|
||||
} else {
|
||||
document.title = zigAnalysis.params.rootName + suffix;
|
||||
}
|
||||
} else {
|
||||
document.title = list.join('.') + suffix;
|
||||
domPrivDeclsBox.addEventListener(
|
||||
"change",
|
||||
function () {
|
||||
if (this.checked != curNav.showPrivDecls) {
|
||||
if (
|
||||
this.checked &&
|
||||
location.hash.length > 1 &&
|
||||
location.hash[1] != "*"
|
||||
) {
|
||||
location.hash = "#*" + location.hash.substring(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function isDecl(x) {
|
||||
return "value" in x;
|
||||
}
|
||||
|
||||
|
||||
function isType(x) {
|
||||
return "kind" in x && !("value" in x);
|
||||
}
|
||||
|
||||
|
||||
function isContainerType(x) {
|
||||
return isType(x) && typeKindIsContainer((x).kind) ;
|
||||
}
|
||||
|
||||
|
||||
function typeShorthandName(expr) {
|
||||
let resolvedExpr = resolveValue({expr: expr});
|
||||
if (!("type" in resolvedExpr)) {
|
||||
return null;
|
||||
if (
|
||||
!this.checked &&
|
||||
location.hash.length > 1 &&
|
||||
location.hash[1] == "*"
|
||||
) {
|
||||
location.hash = "#" + location.hash.substring(2);
|
||||
return;
|
||||
}
|
||||
let type = (zigAnalysis.types[resolvedExpr.type]);
|
||||
|
||||
outer: for (let i = 0; i < 10000; i += 1) {
|
||||
switch (type.kind) {
|
||||
case typeKinds.Optional:
|
||||
case typeKinds.Pointer:
|
||||
let child = (type).child;
|
||||
let resolvedChild = resolveValue(child);
|
||||
if ("type" in resolvedChild) {
|
||||
type = zigAnalysis.types[resolvedChild.type];
|
||||
continue;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
default:
|
||||
break outer;
|
||||
}
|
||||
|
||||
if (i == 9999) throw "Exhausted typeShorthandName quota";
|
||||
}
|
||||
|
||||
|
||||
|
||||
let name = undefined;
|
||||
if (type.kind === typeKinds.Struct) {
|
||||
name = "struct";
|
||||
} else if (type.kind === typeKinds.Enum) {
|
||||
name = "enum";
|
||||
} else if (type.kind === typeKinds.Union) {
|
||||
name = "union";
|
||||
} else {
|
||||
console.log("TODO: unhalndled case in typeShortName");
|
||||
return null;
|
||||
}
|
||||
|
||||
return escapeHtml(name);
|
||||
}
|
||||
|
||||
|
||||
function typeKindIsContainer(typeKind) {
|
||||
return typeKind === typeKinds.Struct ||
|
||||
typeKind === typeKinds.Union ||
|
||||
typeKind === typeKinds.Enum;
|
||||
}
|
||||
|
||||
|
||||
function declCanRepresentTypeKind(typeKind) {
|
||||
return typeKind === typeKinds.ErrorSet || typeKindIsContainer(typeKind);
|
||||
}
|
||||
|
||||
//
|
||||
// function findCteInRefPath(path) {
|
||||
// for (let i = path.length - 1; i >= 0; i -= 1) {
|
||||
// const ref = path[i];
|
||||
// if ("string" in ref) continue;
|
||||
// if ("comptimeExpr" in ref) return ref;
|
||||
// if ("refPath" in ref) return findCteInRefPath(ref.refPath);
|
||||
// return null;
|
||||
// }
|
||||
|
||||
// return null;
|
||||
// }
|
||||
|
||||
|
||||
function resolveValue(value) {
|
||||
let i = 0;
|
||||
while(i < 1000) {
|
||||
i += 1;
|
||||
|
||||
if ("refPath" in value.expr) {
|
||||
value = {expr: value.expr.refPath[value.expr.refPath.length -1]};
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("declRef" in value.expr) {
|
||||
value = zigAnalysis.decls[value.expr.declRef].value;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("as" in value.expr) {
|
||||
value = {
|
||||
typeRef: zigAnalysis.exprs[value.expr.as.typeRefArg],
|
||||
expr: zigAnalysis.exprs[value.expr.as.exprArg],
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
return value;
|
||||
|
||||
}
|
||||
console.assert(false);
|
||||
return ({});
|
||||
}
|
||||
|
||||
|
||||
// function typeOfDecl(decl){
|
||||
// return decl.value.typeRef;
|
||||
//
|
||||
// let i = 0;
|
||||
// while(i < 1000) {
|
||||
// i += 1;
|
||||
// console.assert(isDecl(decl));
|
||||
// if ("type" in decl.value) {
|
||||
// return ({ type: typeTypeId });
|
||||
// }
|
||||
//
|
||||
//// if ("string" in decl.value) {
|
||||
//// return ({ type: {
|
||||
//// kind: typeKinds.Pointer,
|
||||
//// size: pointerSizeEnum.One,
|
||||
//// child: });
|
||||
//// }
|
||||
//
|
||||
// if ("refPath" in decl.value) {
|
||||
// decl = ({
|
||||
// value: decl.value.refPath[decl.value.refPath.length -1]
|
||||
// });
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// if ("declRef" in decl.value) {
|
||||
// decl = zigAnalysis.decls[decl.value.declRef];
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// if ("int" in decl.value) {
|
||||
// return decl.value.int.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("float" in decl.value) {
|
||||
// return decl.value.float.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("array" in decl.value) {
|
||||
// return decl.value.array.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("struct" in decl.value) {
|
||||
// return decl.value.struct.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("comptimeExpr" in decl.value) {
|
||||
// const cte = zigAnalysis.comptimeExprs[decl.value.comptimeExpr];
|
||||
// return cte.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("call" in decl.value) {
|
||||
// const fn_call = zigAnalysis.calls[decl.value.call];
|
||||
// let fn_decl = undefined;
|
||||
// if ("declRef" in fn_call.func) {
|
||||
// fn_decl = zigAnalysis.decls[fn_call.func.declRef];
|
||||
// } else if ("refPath" in fn_call.func) {
|
||||
// console.assert("declRef" in fn_call.func.refPath[fn_call.func.refPath.length -1]);
|
||||
// fn_decl = zigAnalysis.decls[fn_call.func.refPath[fn_call.func.refPath.length -1].declRef];
|
||||
// } else throw {};
|
||||
//
|
||||
// const fn_decl_value = resolveValue(fn_decl.value);
|
||||
// console.assert("type" in fn_decl_value); //TODO handle comptimeExpr
|
||||
// const fn_type = (zigAnalysis.types[fn_decl_value.type]);
|
||||
// console.assert(fn_type.kind === typeKinds.Fn);
|
||||
// return fn_type.ret;
|
||||
// }
|
||||
//
|
||||
// if ("void" in decl.value) {
|
||||
// return ({ type: typeTypeId });
|
||||
// }
|
||||
//
|
||||
// if ("bool" in decl.value) {
|
||||
// return ({ type: typeKinds.Bool });
|
||||
// }
|
||||
//
|
||||
// console.log("TODO: handle in `typeOfDecl` more cases: ", decl);
|
||||
// console.assert(false);
|
||||
// throw {};
|
||||
// }
|
||||
// console.assert(false);
|
||||
// return ({});
|
||||
// }
|
||||
|
||||
function render() {
|
||||
domStatus.classList.add("hidden");
|
||||
domFnProto.classList.add("hidden");
|
||||
domSectParams.classList.add("hidden");
|
||||
domTldDocs.classList.add("hidden");
|
||||
domSectMainPkg.classList.add("hidden");
|
||||
domSectPkgs.classList.add("hidden");
|
||||
domSectTypes.classList.add("hidden");
|
||||
domSectTests.classList.add("hidden");
|
||||
domSectNamespaces.classList.add("hidden");
|
||||
domSectErrSets.classList.add("hidden");
|
||||
domSectFns.classList.add("hidden");
|
||||
domSectFields.classList.add("hidden");
|
||||
domSectSearchResults.classList.add("hidden");
|
||||
domSectSearchNoResults.classList.add("hidden");
|
||||
domSectInfo.classList.add("hidden");
|
||||
domHdrName.classList.add("hidden");
|
||||
domSectNav.classList.add("hidden");
|
||||
domSectFnErrors.classList.add("hidden");
|
||||
domFnExamples.classList.add("hidden");
|
||||
domFnNoExamples.classList.add("hidden");
|
||||
domDeclNoRef.classList.add("hidden");
|
||||
domFnErrorsAnyError.classList.add("hidden");
|
||||
domTableFnErrors.classList.add("hidden");
|
||||
domSectGlobalVars.classList.add("hidden");
|
||||
domSectValues.classList.add("hidden");
|
||||
|
||||
renderTitle();
|
||||
renderInfo();
|
||||
renderPkgList();
|
||||
|
||||
domPrivDeclsBox.checked = curNav.showPrivDecls;
|
||||
|
||||
if (curNavSearch !== "") {
|
||||
return renderSearch();
|
||||
}
|
||||
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
let pkg = rootPkg;
|
||||
curNav.pkgObjs = [pkg];
|
||||
for (let i = 0; i < curNav.pkgNames.length; i += 1) {
|
||||
let childPkg = zigAnalysis.packages[pkg.table[curNav.pkgNames[i]]];
|
||||
if (childPkg == null) {
|
||||
return render404();
|
||||
}
|
||||
pkg = childPkg;
|
||||
curNav.pkgObjs.push(pkg);
|
||||
}
|
||||
|
||||
|
||||
let currentType = zigAnalysis.types[pkg.main];
|
||||
curNav.declObjs = [currentType];
|
||||
for (let i = 0; i < curNav.declNames.length; i += 1) {
|
||||
|
||||
|
||||
let childDecl = findSubDecl((currentType), curNav.declNames[i]);
|
||||
if (childDecl == null) {
|
||||
return render404();
|
||||
}
|
||||
|
||||
let childDeclValue = resolveValue((childDecl).value).expr;
|
||||
if ("type" in childDeclValue) {
|
||||
|
||||
const t = zigAnalysis.types[childDeclValue.type];
|
||||
if (t.kind != typeKinds.Fn) {
|
||||
childDecl = t;
|
||||
}
|
||||
}
|
||||
|
||||
currentType = (childDecl);
|
||||
curNav.declObjs.push(currentType);
|
||||
}
|
||||
|
||||
renderNav();
|
||||
|
||||
let last = curNav.declObjs[curNav.declObjs.length - 1];
|
||||
let lastIsDecl = isDecl(last);
|
||||
let lastIsType = isType(last);
|
||||
let lastIsContainerType = isContainerType(last);
|
||||
|
||||
if (lastIsContainerType) {
|
||||
return renderContainer((last));
|
||||
}
|
||||
|
||||
if (!lastIsDecl && !lastIsType) {
|
||||
return renderUnknownDecl((last));
|
||||
}
|
||||
|
||||
if (lastIsType) {
|
||||
return renderType((last));
|
||||
}
|
||||
|
||||
if (lastIsDecl && last.kind === 'var') {
|
||||
return renderVar((last));
|
||||
}
|
||||
|
||||
if (lastIsDecl && last.kind === 'const') {
|
||||
let typeObj = zigAnalysis.types[resolveValue((last).value).expr.type];
|
||||
if (typeObj && typeObj.kind === typeKinds.Fn) {
|
||||
return renderFn((last));
|
||||
}
|
||||
|
||||
return renderValue((last));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function renderUnknownDecl(decl) {
|
||||
domDeclNoRef.classList.remove("hidden");
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
} else {
|
||||
domTldDocs.innerHTML = '<p>There are no doc comments for this declaration.</p>';
|
||||
}
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
|
||||
|
||||
function typeIsErrSet(typeIndex) {
|
||||
let typeObj = zigAnalysis.types[typeIndex];
|
||||
return typeObj.kind === typeKinds.ErrorSet;
|
||||
}
|
||||
|
||||
|
||||
function typeIsStructWithNoFields(typeIndex) {
|
||||
let typeObj = zigAnalysis.types[typeIndex];
|
||||
if (typeObj.kind !== typeKinds.Struct)
|
||||
return false;
|
||||
return (typeObj).fields.length == 0;
|
||||
}
|
||||
|
||||
|
||||
function typeIsGenericFn(typeIndex) {
|
||||
let typeObj = zigAnalysis.types[typeIndex];
|
||||
if (typeObj.kind !== typeKinds.Fn) {
|
||||
return false;
|
||||
}
|
||||
return (typeObj).generic_ret != null;
|
||||
}
|
||||
|
||||
|
||||
function renderFn(fnDecl) {
|
||||
if ("refPath" in fnDecl.value.expr) {
|
||||
let last = fnDecl.value.expr.refPath.length - 1;
|
||||
let lastExpr = fnDecl.value.expr.refPath[last];
|
||||
console.assert("declRef" in lastExpr);
|
||||
fnDecl = zigAnalysis.decls[lastExpr.declRef];
|
||||
}
|
||||
|
||||
let value = resolveValue(fnDecl.value);
|
||||
console.assert("type" in value.expr);
|
||||
let typeObj = (zigAnalysis.types[value.expr.type]);
|
||||
|
||||
domFnProtoCode.innerHTML = exprName(value.expr, {
|
||||
wantHtml: true,
|
||||
wantLink: true,
|
||||
fnDecl,
|
||||
});
|
||||
|
||||
let docsSource = null;
|
||||
let srcNode = zigAnalysis.astNodes[fnDecl.src];
|
||||
if (srcNode.docs != null) {
|
||||
docsSource = srcNode.docs;
|
||||
}
|
||||
|
||||
renderFnParamDocs(fnDecl, typeObj);
|
||||
|
||||
let retExpr = resolveValue({expr:typeObj.ret}).expr;
|
||||
if ("type" in retExpr) {
|
||||
let retIndex = retExpr.type;
|
||||
let errSetTypeIndex = (null);
|
||||
let retType = zigAnalysis.types[retIndex];
|
||||
if (retType.kind === typeKinds.ErrorSet) {
|
||||
errSetTypeIndex = retIndex;
|
||||
} else if (retType.kind === typeKinds.ErrorUnion) {
|
||||
errSetTypeIndex = (retType).err.type;
|
||||
}
|
||||
if (errSetTypeIndex != null) {
|
||||
let errSetType = (zigAnalysis.types[errSetTypeIndex]);
|
||||
renderErrorSet(errSetType);
|
||||
}
|
||||
}
|
||||
|
||||
let protoSrcIndex = fnDecl.src;
|
||||
if (typeIsGenericFn(value.expr.type)) {
|
||||
// does the generic_ret contain a container?
|
||||
var resolvedGenericRet = resolveValue({expr: typeObj.generic_ret});
|
||||
|
||||
if ("call" in resolvedGenericRet.expr){
|
||||
let call = zigAnalysis.calls[resolvedGenericRet.expr.call];
|
||||
let resolvedFunc = resolveValue({expr: call.func});
|
||||
if (!("type" in resolvedFunc.expr)) return;
|
||||
let callee = zigAnalysis.types[resolvedFunc.expr.type];
|
||||
if (!callee.generic_ret) return;
|
||||
resolvedGenericRet = resolveValue({expr: callee.generic_ret});
|
||||
}
|
||||
|
||||
// TODO: see if unwrapping the `as` here is a good idea or not.
|
||||
if ("as" in resolvedGenericRet.expr) {
|
||||
resolvedGenericRet = {
|
||||
expr: zigAnalysis.exprs[resolvedGenericRet.expr.as.exprArg]
|
||||
};
|
||||
}
|
||||
|
||||
if (!("type" in resolvedGenericRet.expr)) return;
|
||||
const genericType = zigAnalysis.types[resolvedGenericRet.expr.type];
|
||||
if (isContainerType(genericType)) {
|
||||
renderContainer(genericType)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// old code
|
||||
// let instantiations = nodesToFnsMap[protoSrcIndex];
|
||||
// let calls = nodesToCallsMap[protoSrcIndex];
|
||||
// if (instantiations == null && calls == null) {
|
||||
// domFnNoExamples.classList.remove("hidden");
|
||||
// } else if (calls != null) {
|
||||
// // if (fnObj.combined === undefined) fnObj.combined = allCompTimeFnCallsResult(calls);
|
||||
// if (fnObj.combined != null) renderContainer(fnObj.combined);
|
||||
|
||||
// resizeDomList(domListFnExamples, calls.length, '<li></li>');
|
||||
|
||||
// for (let callI = 0; callI < calls.length; callI += 1) {
|
||||
// let liDom = domListFnExamples.children[callI];
|
||||
// liDom.innerHTML = getCallHtml(fnDecl, calls[callI]);
|
||||
// }
|
||||
|
||||
// domFnExamples.classList.remove("hidden");
|
||||
// } else if (instantiations != null) {
|
||||
// // TODO
|
||||
// }
|
||||
} else {
|
||||
|
||||
domFnExamples.classList.add("hidden");
|
||||
domFnNoExamples.classList.add("hidden");
|
||||
}
|
||||
|
||||
let protoSrcNode = zigAnalysis.astNodes[protoSrcIndex];
|
||||
if (docsSource == null && protoSrcNode != null && protoSrcNode.docs != null) {
|
||||
docsSource = protoSrcNode.docs;
|
||||
}
|
||||
if (docsSource != null) {
|
||||
domTldDocs.innerHTML = markdown(docsSource);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
domFnProto.classList.remove("hidden");
|
||||
}
|
||||
|
||||
|
||||
function renderFnParamDocs(fnDecl, typeObj) {
|
||||
let docCount = 0;
|
||||
|
||||
let fnNode = zigAnalysis.astNodes[fnDecl.src];
|
||||
let fields = (fnNode.fields);
|
||||
let isVarArgs = fnNode.varArgs;
|
||||
|
||||
for (let i = 0; i < fields.length; i += 1) {
|
||||
let field = fields[i];
|
||||
let fieldNode = zigAnalysis.astNodes[field];
|
||||
if (fieldNode.docs != null) {
|
||||
docCount += 1;
|
||||
}
|
||||
}
|
||||
if (docCount == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
resizeDomList(domListParams, docCount, '<div></div>');
|
||||
let domIndex = 0;
|
||||
|
||||
for (let i = 0; i < fields.length; i += 1) {
|
||||
let field = fields[i];
|
||||
let fieldNode = zigAnalysis.astNodes[field];
|
||||
let docs = fieldNode.docs;
|
||||
if (fieldNode.docs == null) {
|
||||
continue;
|
||||
}
|
||||
let docsNonEmpty = docs !== "";
|
||||
let divDom = domListParams.children[domIndex];
|
||||
domIndex += 1;
|
||||
|
||||
|
||||
let value = typeObj.params[i];
|
||||
let preClass = docsNonEmpty ? ' class="fieldHasDocs"' : "";
|
||||
let html = '<pre' + preClass + '>' + escapeHtml((fieldNode.name)) + ": ";
|
||||
if (isVarArgs && i === typeObj.params.length - 1) {
|
||||
html += '...';
|
||||
} else {
|
||||
let name = exprName(value, {wantHtml: false, wantLink: false});
|
||||
html += '<span class="tok-kw">' + name + '</span>';
|
||||
}
|
||||
|
||||
html += ',</pre>';
|
||||
|
||||
if (docsNonEmpty) {
|
||||
html += '<div class="fieldDocs">' + markdown(docs) + '</div>';
|
||||
}
|
||||
divDom.innerHTML = html;
|
||||
}
|
||||
domSectParams.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderNav() {
|
||||
let len = curNav.pkgNames.length + curNav.declNames.length;
|
||||
resizeDomList(domListNav, len, '<li><a href="#"></a></li>');
|
||||
let list = [];
|
||||
let hrefPkgNames = [];
|
||||
let hrefDeclNames = ([]);
|
||||
for (let i = 0; i < curNav.pkgNames.length; i += 1) {
|
||||
hrefPkgNames.push(curNav.pkgNames[i]);
|
||||
let name = curNav.pkgNames[i];
|
||||
if (name == "root") name = zigAnalysis.rootPkgName;
|
||||
list.push({
|
||||
name: name,
|
||||
link: navLink(hrefPkgNames, hrefDeclNames),
|
||||
});
|
||||
}
|
||||
for (let i = 0; i < curNav.declNames.length; i += 1) {
|
||||
hrefDeclNames.push(curNav.declNames[i]);
|
||||
list.push({
|
||||
name: curNav.declNames[i],
|
||||
link: navLink(hrefPkgNames, hrefDeclNames),
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < list.length; i += 1) {
|
||||
let liDom = domListNav.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
aDom.textContent = list[i].name;
|
||||
aDom.setAttribute('href', list[i].link);
|
||||
if (i + 1 == list.length) {
|
||||
aDom.classList.add("active");
|
||||
} else {
|
||||
aDom.classList.remove("active");
|
||||
}
|
||||
}
|
||||
|
||||
domSectNav.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderInfo() {
|
||||
domTdZigVer.textContent = zigAnalysis.params.zigVersion;
|
||||
//domTdTarget.textContent = zigAnalysis.params.builds[0].target;
|
||||
|
||||
domSectInfo.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function render404() {
|
||||
domStatus.textContent = "404 Not Found";
|
||||
domStatus.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderPkgList() {
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
let list = [];
|
||||
for (let key in rootPkg.table) {
|
||||
let pkgIndex = rootPkg.table[key];
|
||||
if (zigAnalysis.packages[pkgIndex] == null) continue;
|
||||
if (key == zigAnalysis.params.rootName) continue;
|
||||
list.push({
|
||||
name: key,
|
||||
pkg: pkgIndex,
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let aDom = domSectMainPkg.children[1].children[0].children[0];
|
||||
aDom.textContent = zigAnalysis.rootPkgName;
|
||||
aDom.setAttribute('href', navLinkPkg(zigAnalysis.rootPkg));
|
||||
if (zigAnalysis.params.rootName === curNav.pkgNames[0]) {
|
||||
aDom.classList.add("active");
|
||||
} else {
|
||||
aDom.classList.remove("active");
|
||||
}
|
||||
domSectMainPkg.classList.remove("hidden");
|
||||
}
|
||||
|
||||
list.sort(function(a, b) {
|
||||
return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase());
|
||||
});
|
||||
|
||||
if (list.length !== 0) {
|
||||
resizeDomList(domListPkgs, list.length, '<li><a href="#"></a></li>');
|
||||
for (let i = 0; i < list.length; i += 1) {
|
||||
let liDom = domListPkgs.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
aDom.textContent = list[i].name;
|
||||
aDom.setAttribute('href', navLinkPkg(list[i].pkg));
|
||||
if (list[i].name === curNav.pkgNames[0]) {
|
||||
aDom.classList.add("active");
|
||||
} else {
|
||||
aDom.classList.remove("active");
|
||||
}
|
||||
}
|
||||
|
||||
domSectPkgs.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
function navLink(pkgNames, declNames, callName) {
|
||||
let base = '#';
|
||||
if (curNav.showPrivDecls) {
|
||||
base += "*";
|
||||
}
|
||||
|
||||
if (pkgNames.length === 0 && declNames.length === 0) {
|
||||
return base;
|
||||
} else if (declNames.length === 0 && callName == null) {
|
||||
return base + pkgNames.join('.');
|
||||
} else if (callName == null) {
|
||||
return base + pkgNames.join('.') + ';' + declNames.join('.');
|
||||
} else {
|
||||
return base + pkgNames.join('.') + ';' + declNames.join('.') + ';' + callName;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function navLinkPkg(pkgIndex) {
|
||||
return navLink(canonPkgPaths[pkgIndex], []);
|
||||
}
|
||||
|
||||
|
||||
function navLinkDecl(childName) {
|
||||
return navLink(curNav.pkgNames, curNav.declNames.concat([childName]));
|
||||
}
|
||||
|
||||
//
|
||||
// function navLinkCall(callObj) {
|
||||
// let declNamesCopy = curNav.declNames.concat([]);
|
||||
// let callName = (declNamesCopy.pop());
|
||||
|
||||
// callName += '(';
|
||||
// for (let arg_i = 0; arg_i < callObj.args.length; arg_i += 1) {
|
||||
// if (arg_i !== 0) callName += ',';
|
||||
// let argObj = callObj.args[arg_i];
|
||||
// callName += getValueText(argObj, argObj, false, false);
|
||||
// }
|
||||
// callName += ')';
|
||||
|
||||
// declNamesCopy.push(callName);
|
||||
// return navLink(curNav.pkgNames, declNamesCopy);
|
||||
// }
|
||||
|
||||
|
||||
function resizeDomListDl(dlDom, desiredLen) {
|
||||
// add the missing dom entries
|
||||
for (let i = dlDom.childElementCount / 2; i < desiredLen; i += 1) {
|
||||
dlDom.insertAdjacentHTML('beforeend', '<dt></dt><dd></dd>');
|
||||
}
|
||||
// remove extra dom entries
|
||||
while (desiredLen < dlDom.childElementCount / 2) {
|
||||
dlDom.removeChild(dlDom.lastChild);
|
||||
dlDom.removeChild(dlDom.lastChild);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function resizeDomList(listDom, desiredLen, templateHtml) {
|
||||
// add the missing dom entries
|
||||
for (let i = listDom.childElementCount; i < desiredLen; i += 1) {
|
||||
listDom.insertAdjacentHTML('beforeend', templateHtml);
|
||||
}
|
||||
// remove extra dom entries
|
||||
while (desiredLen < listDom.childElementCount) {
|
||||
listDom.removeChild(listDom.lastChild);
|
||||
}
|
||||
}
|
||||
|
||||
function walkResultTypeRef(wr) {
|
||||
if (wr.typeRef) return wr.typeRef;
|
||||
let resolved = resolveValue(wr);
|
||||
if (wr === resolved) {
|
||||
return {type: 0};
|
||||
}
|
||||
return walkResultTypeRef(resolved);
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
if (location.hash == "") {
|
||||
location.hash = "#root";
|
||||
}
|
||||
|
||||
// make the modal disappear if you click outside it
|
||||
domHelpModal.addEventListener("click", ev => {
|
||||
if (ev.target.className == "help-modal")
|
||||
domHelpModal.classList.add("hidden");
|
||||
});
|
||||
|
||||
window.addEventListener("hashchange", onHashChange, false);
|
||||
window.addEventListener("keydown", onWindowKeyDown, false);
|
||||
onHashChange();
|
||||
|
||||
let langRefVersion = zigAnalysis.params.zigVersion;
|
||||
if (!/^\d+\.\d+\.\d+$/.test(langRefVersion)) {
|
||||
// the version is probably not released yet
|
||||
langRefVersion = "master";
|
||||
}
|
||||
domLangRefLink.href = `https://ziglang.org/documentation/${langRefVersion}/`;
|
||||
|
||||
function renderTitle() {
|
||||
let list = curNav.pkgNames.concat(curNav.declNames);
|
||||
let suffix = " - Zig";
|
||||
if (list.length === 0) {
|
||||
if (rootIsStd) {
|
||||
document.title = "std" + suffix;
|
||||
} else {
|
||||
document.title = zigAnalysis.params.rootName + suffix;
|
||||
}
|
||||
} else {
|
||||
document.title = list.join(".") + suffix;
|
||||
}
|
||||
|
||||
function exprName(expr, opts) {
|
||||
switch (Object.keys(expr)[0]) {
|
||||
default: throw "this expression is not implemented yet";
|
||||
case "bool": {
|
||||
if (expr.bool) {
|
||||
return "true";
|
||||
}
|
||||
return "false";
|
||||
}
|
||||
case "&": {
|
||||
return "&" + exprName(zigAnalysis.exprs[expr["&"]]);
|
||||
}
|
||||
case "compileError": {
|
||||
let compileError = expr.compileError;
|
||||
return compileError;
|
||||
}
|
||||
case "enumLiteral": {
|
||||
let literal = expr.enumLiteral;
|
||||
return "." + literal;
|
||||
}
|
||||
case "void": {
|
||||
return "void";
|
||||
}
|
||||
case "slice":{
|
||||
let payloadHtml = "";
|
||||
const lhsExpr = zigAnalysis.exprs[expr.slice.lhs];
|
||||
const startExpr = zigAnalysis.exprs[expr.slice.start];
|
||||
let decl = exprName(lhsExpr);
|
||||
let start = exprName(startExpr);
|
||||
let end = "";
|
||||
let sentinel = "";
|
||||
if (expr.slice['end']) {
|
||||
const endExpr = zigAnalysis.exprs[expr.slice.end];
|
||||
let end_ = exprName(endExpr);
|
||||
end += end_;
|
||||
}
|
||||
if (expr.slice['sentinel']) {
|
||||
const sentinelExpr = zigAnalysis.exprs[expr.slice.sentinel];
|
||||
let sentinel_ = exprName(sentinelExpr);
|
||||
sentinel += " :" + sentinel_;
|
||||
}
|
||||
payloadHtml += decl + "["+ start + ".." + end + sentinel + "]";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "sliceIndex": {
|
||||
const sliceIndex = zigAnalysis.exprs[expr.sliceIndex];
|
||||
return exprName(sliceIndex, opts);
|
||||
}
|
||||
case "cmpxchg":{
|
||||
const typeIndex = zigAnalysis.exprs[expr.cmpxchg.type];
|
||||
const ptrIndex = zigAnalysis.exprs[expr.cmpxchg.ptr];
|
||||
const expectedValueIndex = zigAnalysis.exprs[expr.cmpxchg.expected_value];
|
||||
const newValueIndex = zigAnalysis.exprs[expr.cmpxchg.new_value];
|
||||
const successOrderIndex = zigAnalysis.exprs[expr.cmpxchg.success_order];
|
||||
const failureOrderIndex = zigAnalysis.exprs[expr.cmpxchg.failure_order];
|
||||
}
|
||||
|
||||
const type = exprName(typeIndex, opts);
|
||||
const ptr = exprName(ptrIndex, opts);
|
||||
const expectedValue = exprName(expectedValueIndex, opts);
|
||||
const newValue = exprName(newValueIndex, opts);
|
||||
const successOrder = exprName(successOrderIndex, opts);
|
||||
const failureOrder = exprName(failureOrderIndex, opts);
|
||||
function isDecl(x) {
|
||||
return "value" in x;
|
||||
}
|
||||
|
||||
let fnName = "@";
|
||||
function isType(x) {
|
||||
return "kind" in x && !("value" in x);
|
||||
}
|
||||
|
||||
switch (expr.cmpxchg.name) {
|
||||
case "cmpxchg_strong": {
|
||||
fnName += "cmpxchgStrong"
|
||||
break;
|
||||
}
|
||||
case "cmpxchg_weak": {
|
||||
fnName += "cmpxchgWeak"
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.log("There's only cmpxchg_strong and cmpxchg_weak");
|
||||
}
|
||||
}
|
||||
|
||||
return fnName + "(" + type + ", " + ptr + ", " + expectedValue + ", "+ newValue + ", "+"." +successOrder + ", "+ "." +failureOrder + ")";
|
||||
}
|
||||
case "cmpxchgIndex": {
|
||||
const cmpxchgIndex = zigAnalysis.exprs[expr.cmpxchgIndex];
|
||||
return exprName(cmpxchgIndex, opts);
|
||||
}
|
||||
case "switchOp":{
|
||||
let condExpr = zigAnalysis.exprs[expr.switchOp.cond_index];
|
||||
let ast = zigAnalysis.astNodes[expr.switchOp.ast];
|
||||
let file_name = expr.switchOp.file_name;
|
||||
let outer_decl_index = expr.switchOp.outer_decl;
|
||||
let outer_decl = zigAnalysis.types[outer_decl_index];
|
||||
let line = 0;
|
||||
// console.log(expr.switchOp)
|
||||
// console.log(outer_decl)
|
||||
while (outer_decl_index !== 0 && outer_decl.line_number > 0) {
|
||||
line += outer_decl.line_number;
|
||||
outer_decl_index = outer_decl.outer_decl;
|
||||
outer_decl = zigAnalysis.types[outer_decl_index];
|
||||
// console.log(outer_decl)
|
||||
}
|
||||
line += ast.line + 1;
|
||||
let payloadHtml = "";
|
||||
let cond = exprName(condExpr, opts);
|
||||
function isContainerType(x) {
|
||||
return isType(x) && typeKindIsContainer(x.kind);
|
||||
}
|
||||
|
||||
payloadHtml += "</br>" + "node_name: " + ast.name + "</br>" + "file: " + file_name + "</br>" + "line: " + line + "</br>";
|
||||
payloadHtml += "switch(" + cond + ") {" + "<a href=\"https://github.com/ziglang/zig/tree/master/lib/std/" + file_name + "#L" + line + "\">" +"..." + "</a>}";
|
||||
return payloadHtml;
|
||||
function typeShorthandName(expr) {
|
||||
let resolvedExpr = resolveValue({ expr: expr });
|
||||
if (!("type" in resolvedExpr)) {
|
||||
return null;
|
||||
}
|
||||
let type = zigAnalysis.types[resolvedExpr.type];
|
||||
|
||||
outer: for (let i = 0; i < 10000; i += 1) {
|
||||
switch (type.kind) {
|
||||
case typeKinds.Optional:
|
||||
case typeKinds.Pointer:
|
||||
let child = type.child;
|
||||
let resolvedChild = resolveValue(child);
|
||||
if ("type" in resolvedChild) {
|
||||
type = zigAnalysis.types[resolvedChild.type];
|
||||
continue;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
case "switchIndex": {
|
||||
const switchIndex = zigAnalysis.exprs[expr.switchIndex];
|
||||
return exprName(switchIndex, opts);
|
||||
default:
|
||||
break outer;
|
||||
}
|
||||
|
||||
if (i == 9999) throw "Exhausted typeShorthandName quota";
|
||||
}
|
||||
|
||||
let name = undefined;
|
||||
if (type.kind === typeKinds.Struct) {
|
||||
name = "struct";
|
||||
} else if (type.kind === typeKinds.Enum) {
|
||||
name = "enum";
|
||||
} else if (type.kind === typeKinds.Union) {
|
||||
name = "union";
|
||||
} else {
|
||||
console.log("TODO: unhalndled case in typeShortName");
|
||||
return null;
|
||||
}
|
||||
|
||||
return escapeHtml(name);
|
||||
}
|
||||
|
||||
function typeKindIsContainer(typeKind) {
|
||||
return (
|
||||
typeKind === typeKinds.Struct ||
|
||||
typeKind === typeKinds.Union ||
|
||||
typeKind === typeKinds.Enum
|
||||
);
|
||||
}
|
||||
|
||||
function declCanRepresentTypeKind(typeKind) {
|
||||
return typeKind === typeKinds.ErrorSet || typeKindIsContainer(typeKind);
|
||||
}
|
||||
|
||||
//
|
||||
// function findCteInRefPath(path) {
|
||||
// for (let i = path.length - 1; i >= 0; i -= 1) {
|
||||
// const ref = path[i];
|
||||
// if ("string" in ref) continue;
|
||||
// if ("comptimeExpr" in ref) return ref;
|
||||
// if ("refPath" in ref) return findCteInRefPath(ref.refPath);
|
||||
// return null;
|
||||
// }
|
||||
|
||||
// return null;
|
||||
// }
|
||||
|
||||
function resolveValue(value) {
|
||||
let i = 0;
|
||||
while (i < 1000) {
|
||||
i += 1;
|
||||
|
||||
if ("refPath" in value.expr) {
|
||||
value = { expr: value.expr.refPath[value.expr.refPath.length - 1] };
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("declRef" in value.expr) {
|
||||
value = zigAnalysis.decls[value.expr.declRef].value;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("as" in value.expr) {
|
||||
value = {
|
||||
typeRef: zigAnalysis.exprs[value.expr.as.typeRefArg],
|
||||
expr: zigAnalysis.exprs[value.expr.as.exprArg],
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
console.assert(false);
|
||||
return {};
|
||||
}
|
||||
|
||||
// function typeOfDecl(decl){
|
||||
// return decl.value.typeRef;
|
||||
//
|
||||
// let i = 0;
|
||||
// while(i < 1000) {
|
||||
// i += 1;
|
||||
// console.assert(isDecl(decl));
|
||||
// if ("type" in decl.value) {
|
||||
// return ({ type: typeTypeId });
|
||||
// }
|
||||
//
|
||||
//// if ("string" in decl.value) {
|
||||
//// return ({ type: {
|
||||
//// kind: typeKinds.Pointer,
|
||||
//// size: pointerSizeEnum.One,
|
||||
//// child: });
|
||||
//// }
|
||||
//
|
||||
// if ("refPath" in decl.value) {
|
||||
// decl = ({
|
||||
// value: decl.value.refPath[decl.value.refPath.length -1]
|
||||
// });
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// if ("declRef" in decl.value) {
|
||||
// decl = zigAnalysis.decls[decl.value.declRef];
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// if ("int" in decl.value) {
|
||||
// return decl.value.int.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("float" in decl.value) {
|
||||
// return decl.value.float.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("array" in decl.value) {
|
||||
// return decl.value.array.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("struct" in decl.value) {
|
||||
// return decl.value.struct.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("comptimeExpr" in decl.value) {
|
||||
// const cte = zigAnalysis.comptimeExprs[decl.value.comptimeExpr];
|
||||
// return cte.typeRef;
|
||||
// }
|
||||
//
|
||||
// if ("call" in decl.value) {
|
||||
// const fn_call = zigAnalysis.calls[decl.value.call];
|
||||
// let fn_decl = undefined;
|
||||
// if ("declRef" in fn_call.func) {
|
||||
// fn_decl = zigAnalysis.decls[fn_call.func.declRef];
|
||||
// } else if ("refPath" in fn_call.func) {
|
||||
// console.assert("declRef" in fn_call.func.refPath[fn_call.func.refPath.length -1]);
|
||||
// fn_decl = zigAnalysis.decls[fn_call.func.refPath[fn_call.func.refPath.length -1].declRef];
|
||||
// } else throw {};
|
||||
//
|
||||
// const fn_decl_value = resolveValue(fn_decl.value);
|
||||
// console.assert("type" in fn_decl_value); //TODO handle comptimeExpr
|
||||
// const fn_type = (zigAnalysis.types[fn_decl_value.type]);
|
||||
// console.assert(fn_type.kind === typeKinds.Fn);
|
||||
// return fn_type.ret;
|
||||
// }
|
||||
//
|
||||
// if ("void" in decl.value) {
|
||||
// return ({ type: typeTypeId });
|
||||
// }
|
||||
//
|
||||
// if ("bool" in decl.value) {
|
||||
// return ({ type: typeKinds.Bool });
|
||||
// }
|
||||
//
|
||||
// console.log("TODO: handle in `typeOfDecl` more cases: ", decl);
|
||||
// console.assert(false);
|
||||
// throw {};
|
||||
// }
|
||||
// console.assert(false);
|
||||
// return ({});
|
||||
// }
|
||||
|
||||
function render() {
|
||||
domStatus.classList.add("hidden");
|
||||
domFnProto.classList.add("hidden");
|
||||
domSectParams.classList.add("hidden");
|
||||
domTldDocs.classList.add("hidden");
|
||||
domSectMainPkg.classList.add("hidden");
|
||||
domSectPkgs.classList.add("hidden");
|
||||
domSectTypes.classList.add("hidden");
|
||||
domSectTests.classList.add("hidden");
|
||||
domSectNamespaces.classList.add("hidden");
|
||||
domSectErrSets.classList.add("hidden");
|
||||
domSectFns.classList.add("hidden");
|
||||
domSectFields.classList.add("hidden");
|
||||
domSectSearchResults.classList.add("hidden");
|
||||
domSectSearchAllResultsLink.classList.add("hidden");
|
||||
domSectSearchNoResults.classList.add("hidden");
|
||||
domSectInfo.classList.add("hidden");
|
||||
domHdrName.classList.add("hidden");
|
||||
domSectNav.classList.add("hidden");
|
||||
domSectFnErrors.classList.add("hidden");
|
||||
domFnExamples.classList.add("hidden");
|
||||
domFnNoExamples.classList.add("hidden");
|
||||
domDeclNoRef.classList.add("hidden");
|
||||
domFnErrorsAnyError.classList.add("hidden");
|
||||
domTableFnErrors.classList.add("hidden");
|
||||
domSectGlobalVars.classList.add("hidden");
|
||||
domSectValues.classList.add("hidden");
|
||||
|
||||
renderTitle();
|
||||
renderInfo();
|
||||
renderPkgList();
|
||||
|
||||
domPrivDeclsBox.checked = curNav.showPrivDecls;
|
||||
|
||||
if (curNavSearch !== "") {
|
||||
return renderSearch();
|
||||
}
|
||||
|
||||
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
let pkg = rootPkg;
|
||||
curNav.pkgObjs = [pkg];
|
||||
for (let i = 0; i < curNav.pkgNames.length; i += 1) {
|
||||
let childPkg = zigAnalysis.packages[pkg.table[curNav.pkgNames[i]]];
|
||||
if (childPkg == null) {
|
||||
return render404();
|
||||
}
|
||||
pkg = childPkg;
|
||||
curNav.pkgObjs.push(pkg);
|
||||
}
|
||||
|
||||
let currentType = zigAnalysis.types[pkg.main];
|
||||
curNav.declObjs = [currentType];
|
||||
for (let i = 0; i < curNav.declNames.length; i += 1) {
|
||||
let childDecl = findSubDecl(currentType, curNav.declNames[i]);
|
||||
if (childDecl == null) {
|
||||
return render404();
|
||||
}
|
||||
|
||||
let childDeclValue = resolveValue(childDecl.value).expr;
|
||||
if ("type" in childDeclValue) {
|
||||
const t = zigAnalysis.types[childDeclValue.type];
|
||||
if (t.kind != typeKinds.Fn) {
|
||||
childDecl = t;
|
||||
}
|
||||
}
|
||||
|
||||
currentType = childDecl;
|
||||
curNav.declObjs.push(currentType);
|
||||
}
|
||||
|
||||
renderNav();
|
||||
|
||||
let last = curNav.declObjs[curNav.declObjs.length - 1];
|
||||
let lastIsDecl = isDecl(last);
|
||||
let lastIsType = isType(last);
|
||||
let lastIsContainerType = isContainerType(last);
|
||||
|
||||
if (lastIsContainerType) {
|
||||
return renderContainer(last);
|
||||
}
|
||||
|
||||
if (!lastIsDecl && !lastIsType) {
|
||||
return renderUnknownDecl(last);
|
||||
}
|
||||
|
||||
if (lastIsType) {
|
||||
return renderType(last);
|
||||
}
|
||||
|
||||
if (lastIsDecl && last.kind === "var") {
|
||||
return renderVar(last);
|
||||
}
|
||||
|
||||
if (lastIsDecl && last.kind === "const") {
|
||||
let typeObj = zigAnalysis.types[resolveValue(last.value).expr.type];
|
||||
if (typeObj && typeObj.kind === typeKinds.Fn) {
|
||||
return renderFn(last);
|
||||
}
|
||||
|
||||
return renderValue(last);
|
||||
}
|
||||
}
|
||||
|
||||
function renderUnknownDecl(decl) {
|
||||
domDeclNoRef.classList.remove("hidden");
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
} else {
|
||||
domTldDocs.innerHTML =
|
||||
"<p>There are no doc comments for this declaration.</p>";
|
||||
}
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function typeIsErrSet(typeIndex) {
|
||||
let typeObj = zigAnalysis.types[typeIndex];
|
||||
return typeObj.kind === typeKinds.ErrorSet;
|
||||
}
|
||||
|
||||
function typeIsStructWithNoFields(typeIndex) {
|
||||
let typeObj = zigAnalysis.types[typeIndex];
|
||||
if (typeObj.kind !== typeKinds.Struct) return false;
|
||||
return typeObj.fields.length == 0;
|
||||
}
|
||||
|
||||
function typeIsGenericFn(typeIndex) {
|
||||
let typeObj = zigAnalysis.types[typeIndex];
|
||||
if (typeObj.kind !== typeKinds.Fn) {
|
||||
return false;
|
||||
}
|
||||
return typeObj.generic_ret != null;
|
||||
}
|
||||
|
||||
function renderFn(fnDecl) {
|
||||
if ("refPath" in fnDecl.value.expr) {
|
||||
let last = fnDecl.value.expr.refPath.length - 1;
|
||||
let lastExpr = fnDecl.value.expr.refPath[last];
|
||||
console.assert("declRef" in lastExpr);
|
||||
fnDecl = zigAnalysis.decls[lastExpr.declRef];
|
||||
}
|
||||
|
||||
let value = resolveValue(fnDecl.value);
|
||||
console.assert("type" in value.expr);
|
||||
let typeObj = zigAnalysis.types[value.expr.type];
|
||||
|
||||
domFnProtoCode.innerHTML = exprName(value.expr, {
|
||||
wantHtml: true,
|
||||
wantLink: true,
|
||||
fnDecl,
|
||||
});
|
||||
|
||||
let docsSource = null;
|
||||
let srcNode = zigAnalysis.astNodes[fnDecl.src];
|
||||
if (srcNode.docs != null) {
|
||||
docsSource = srcNode.docs;
|
||||
}
|
||||
|
||||
renderFnParamDocs(fnDecl, typeObj);
|
||||
|
||||
let retExpr = resolveValue({ expr: typeObj.ret }).expr;
|
||||
if ("type" in retExpr) {
|
||||
let retIndex = retExpr.type;
|
||||
let errSetTypeIndex = null;
|
||||
let retType = zigAnalysis.types[retIndex];
|
||||
if (retType.kind === typeKinds.ErrorSet) {
|
||||
errSetTypeIndex = retIndex;
|
||||
} else if (retType.kind === typeKinds.ErrorUnion) {
|
||||
errSetTypeIndex = retType.err.type;
|
||||
}
|
||||
if (errSetTypeIndex != null) {
|
||||
let errSetType = zigAnalysis.types[errSetTypeIndex];
|
||||
renderErrorSet(errSetType);
|
||||
}
|
||||
}
|
||||
|
||||
let protoSrcIndex = fnDecl.src;
|
||||
if (typeIsGenericFn(value.expr.type)) {
|
||||
// does the generic_ret contain a container?
|
||||
var resolvedGenericRet = resolveValue({ expr: typeObj.generic_ret });
|
||||
|
||||
if ("call" in resolvedGenericRet.expr) {
|
||||
let call = zigAnalysis.calls[resolvedGenericRet.expr.call];
|
||||
let resolvedFunc = resolveValue({ expr: call.func });
|
||||
if (!("type" in resolvedFunc.expr)) return;
|
||||
let callee = zigAnalysis.types[resolvedFunc.expr.type];
|
||||
if (!callee.generic_ret) return;
|
||||
resolvedGenericRet = resolveValue({ expr: callee.generic_ret });
|
||||
}
|
||||
|
||||
// TODO: see if unwrapping the `as` here is a good idea or not.
|
||||
if ("as" in resolvedGenericRet.expr) {
|
||||
resolvedGenericRet = {
|
||||
expr: zigAnalysis.exprs[resolvedGenericRet.expr.as.exprArg],
|
||||
};
|
||||
}
|
||||
|
||||
if (!("type" in resolvedGenericRet.expr)) return;
|
||||
const genericType = zigAnalysis.types[resolvedGenericRet.expr.type];
|
||||
if (isContainerType(genericType)) {
|
||||
renderContainer(genericType);
|
||||
}
|
||||
|
||||
// old code
|
||||
// let instantiations = nodesToFnsMap[protoSrcIndex];
|
||||
// let calls = nodesToCallsMap[protoSrcIndex];
|
||||
// if (instantiations == null && calls == null) {
|
||||
// domFnNoExamples.classList.remove("hidden");
|
||||
// } else if (calls != null) {
|
||||
// // if (fnObj.combined === undefined) fnObj.combined = allCompTimeFnCallsResult(calls);
|
||||
// if (fnObj.combined != null) renderContainer(fnObj.combined);
|
||||
|
||||
// resizeDomList(domListFnExamples, calls.length, '<li></li>');
|
||||
|
||||
// for (let callI = 0; callI < calls.length; callI += 1) {
|
||||
// let liDom = domListFnExamples.children[callI];
|
||||
// liDom.innerHTML = getCallHtml(fnDecl, calls[callI]);
|
||||
// }
|
||||
|
||||
// domFnExamples.classList.remove("hidden");
|
||||
// } else if (instantiations != null) {
|
||||
// // TODO
|
||||
// }
|
||||
} else {
|
||||
domFnExamples.classList.add("hidden");
|
||||
domFnNoExamples.classList.add("hidden");
|
||||
}
|
||||
|
||||
let protoSrcNode = zigAnalysis.astNodes[protoSrcIndex];
|
||||
if (
|
||||
docsSource == null &&
|
||||
protoSrcNode != null &&
|
||||
protoSrcNode.docs != null
|
||||
) {
|
||||
docsSource = protoSrcNode.docs;
|
||||
}
|
||||
if (docsSource != null) {
|
||||
domTldDocs.innerHTML = markdown(docsSource);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
domFnProto.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderFnParamDocs(fnDecl, typeObj) {
|
||||
let docCount = 0;
|
||||
|
||||
let fnNode = zigAnalysis.astNodes[fnDecl.src];
|
||||
let fields = fnNode.fields;
|
||||
let isVarArgs = fnNode.varArgs;
|
||||
|
||||
for (let i = 0; i < fields.length; i += 1) {
|
||||
let field = fields[i];
|
||||
let fieldNode = zigAnalysis.astNodes[field];
|
||||
if (fieldNode.docs != null) {
|
||||
docCount += 1;
|
||||
}
|
||||
}
|
||||
if (docCount == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
resizeDomList(domListParams, docCount, "<div></div>");
|
||||
let domIndex = 0;
|
||||
|
||||
for (let i = 0; i < fields.length; i += 1) {
|
||||
let field = fields[i];
|
||||
let fieldNode = zigAnalysis.astNodes[field];
|
||||
let docs = fieldNode.docs;
|
||||
if (fieldNode.docs == null) {
|
||||
continue;
|
||||
}
|
||||
let docsNonEmpty = docs !== "";
|
||||
let divDom = domListParams.children[domIndex];
|
||||
domIndex += 1;
|
||||
|
||||
let value = typeObj.params[i];
|
||||
let preClass = docsNonEmpty ? ' class="fieldHasDocs"' : "";
|
||||
let html = "<pre" + preClass + ">" + escapeHtml(fieldNode.name) + ": ";
|
||||
if (isVarArgs && i === typeObj.params.length - 1) {
|
||||
html += "...";
|
||||
} else {
|
||||
let name = exprName(value, { wantHtml: false, wantLink: false });
|
||||
html += '<span class="tok-kw">' + name + "</span>";
|
||||
}
|
||||
|
||||
html += ",</pre>";
|
||||
|
||||
if (docsNonEmpty) {
|
||||
html += '<div class="fieldDocs">' + markdown(docs) + "</div>";
|
||||
}
|
||||
divDom.innerHTML = html;
|
||||
}
|
||||
domSectParams.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderNav() {
|
||||
let len = curNav.pkgNames.length + curNav.declNames.length;
|
||||
resizeDomList(domListNav, len, '<li><a href="#"></a></li>');
|
||||
let list = [];
|
||||
let hrefPkgNames = [];
|
||||
let hrefDeclNames = [];
|
||||
for (let i = 0; i < curNav.pkgNames.length; i += 1) {
|
||||
hrefPkgNames.push(curNav.pkgNames[i]);
|
||||
let name = curNav.pkgNames[i];
|
||||
if (name == "root") name = zigAnalysis.rootPkgName;
|
||||
list.push({
|
||||
name: name,
|
||||
link: navLink(hrefPkgNames, hrefDeclNames),
|
||||
});
|
||||
}
|
||||
for (let i = 0; i < curNav.declNames.length; i += 1) {
|
||||
hrefDeclNames.push(curNav.declNames[i]);
|
||||
list.push({
|
||||
name: curNav.declNames[i],
|
||||
link: navLink(hrefPkgNames, hrefDeclNames),
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < list.length; i += 1) {
|
||||
let liDom = domListNav.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
aDom.textContent = list[i].name;
|
||||
aDom.setAttribute("href", list[i].link);
|
||||
if (i + 1 == list.length) {
|
||||
aDom.classList.add("active");
|
||||
} else {
|
||||
aDom.classList.remove("active");
|
||||
}
|
||||
}
|
||||
|
||||
domSectNav.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderInfo() {
|
||||
domTdZigVer.textContent = zigAnalysis.params.zigVersion;
|
||||
//domTdTarget.textContent = zigAnalysis.params.builds[0].target;
|
||||
|
||||
domSectInfo.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function render404() {
|
||||
domStatus.textContent = "404 Not Found";
|
||||
domStatus.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderPkgList() {
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
let list = [];
|
||||
for (let key in rootPkg.table) {
|
||||
let pkgIndex = rootPkg.table[key];
|
||||
if (zigAnalysis.packages[pkgIndex] == null) continue;
|
||||
if (key == zigAnalysis.params.rootName) continue;
|
||||
list.push({
|
||||
name: key,
|
||||
pkg: pkgIndex,
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let aDom = domSectMainPkg.children[1].children[0].children[0];
|
||||
aDom.textContent = zigAnalysis.rootPkgName;
|
||||
aDom.setAttribute("href", navLinkPkg(zigAnalysis.rootPkg));
|
||||
if (zigAnalysis.params.rootName === curNav.pkgNames[0]) {
|
||||
aDom.classList.add("active");
|
||||
} else {
|
||||
aDom.classList.remove("active");
|
||||
}
|
||||
domSectMainPkg.classList.remove("hidden");
|
||||
}
|
||||
|
||||
list.sort(function (a, b) {
|
||||
return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase());
|
||||
});
|
||||
|
||||
if (list.length !== 0) {
|
||||
resizeDomList(domListPkgs, list.length, '<li><a href="#"></a></li>');
|
||||
for (let i = 0; i < list.length; i += 1) {
|
||||
let liDom = domListPkgs.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
aDom.textContent = list[i].name;
|
||||
aDom.setAttribute("href", navLinkPkg(list[i].pkg));
|
||||
if (list[i].name === curNav.pkgNames[0]) {
|
||||
aDom.classList.add("active");
|
||||
} else {
|
||||
aDom.classList.remove("active");
|
||||
}
|
||||
}
|
||||
|
||||
domSectPkgs.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
|
||||
function navLink(pkgNames, declNames, callName) {
|
||||
let base = "#";
|
||||
if (curNav.showPrivDecls) {
|
||||
base += "*";
|
||||
}
|
||||
|
||||
if (pkgNames.length === 0 && declNames.length === 0) {
|
||||
return base;
|
||||
} else if (declNames.length === 0 && callName == null) {
|
||||
return base + pkgNames.join(".");
|
||||
} else if (callName == null) {
|
||||
return base + pkgNames.join(".") + ";" + declNames.join(".");
|
||||
} else {
|
||||
return (
|
||||
base + pkgNames.join(".") + ";" + declNames.join(".") + ";" + callName
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function navLinkPkg(pkgIndex) {
|
||||
return navLink(canonPkgPaths[pkgIndex], []);
|
||||
}
|
||||
|
||||
function navLinkDecl(childName) {
|
||||
return navLink(curNav.pkgNames, curNav.declNames.concat([childName]));
|
||||
}
|
||||
|
||||
//
|
||||
// function navLinkCall(callObj) {
|
||||
// let declNamesCopy = curNav.declNames.concat([]);
|
||||
// let callName = (declNamesCopy.pop());
|
||||
|
||||
// callName += '(';
|
||||
// for (let arg_i = 0; arg_i < callObj.args.length; arg_i += 1) {
|
||||
// if (arg_i !== 0) callName += ',';
|
||||
// let argObj = callObj.args[arg_i];
|
||||
// callName += getValueText(argObj, argObj, false, false);
|
||||
// }
|
||||
// callName += ')';
|
||||
|
||||
// declNamesCopy.push(callName);
|
||||
// return navLink(curNav.pkgNames, declNamesCopy);
|
||||
// }
|
||||
|
||||
function resizeDomListDl(dlDom, desiredLen) {
|
||||
// add the missing dom entries
|
||||
for (let i = dlDom.childElementCount / 2; i < desiredLen; i += 1) {
|
||||
dlDom.insertAdjacentHTML("beforeend", "<dt></dt><dd></dd>");
|
||||
}
|
||||
// remove extra dom entries
|
||||
while (desiredLen < dlDom.childElementCount / 2) {
|
||||
dlDom.removeChild(dlDom.lastChild);
|
||||
dlDom.removeChild(dlDom.lastChild);
|
||||
}
|
||||
}
|
||||
|
||||
function resizeDomList(listDom, desiredLen, templateHtml) {
|
||||
// add the missing dom entries
|
||||
for (let i = listDom.childElementCount; i < desiredLen; i += 1) {
|
||||
listDom.insertAdjacentHTML("beforeend", templateHtml);
|
||||
}
|
||||
// remove extra dom entries
|
||||
while (desiredLen < listDom.childElementCount) {
|
||||
listDom.removeChild(listDom.lastChild);
|
||||
}
|
||||
}
|
||||
|
||||
function walkResultTypeRef(wr) {
|
||||
if (wr.typeRef) return wr.typeRef;
|
||||
let resolved = resolveValue(wr);
|
||||
if (wr === resolved) {
|
||||
return { type: 0 };
|
||||
}
|
||||
return walkResultTypeRef(resolved);
|
||||
}
|
||||
|
||||
function exprName(expr, opts) {
|
||||
switch (Object.keys(expr)[0]) {
|
||||
default:
|
||||
throw "this expression is not implemented yet";
|
||||
case "bool": {
|
||||
if (expr.bool) {
|
||||
return "true";
|
||||
}
|
||||
return "false";
|
||||
}
|
||||
case "&": {
|
||||
return "&" + exprName(zigAnalysis.exprs[expr["&"]]);
|
||||
}
|
||||
case "compileError": {
|
||||
let compileError = expr.compileError;
|
||||
return "@compileError(" + exprName(zigAnalysis.exprs[compileError], opts) + ")";
|
||||
}
|
||||
case "enumLiteral": {
|
||||
let literal = expr.enumLiteral;
|
||||
return "." + literal;
|
||||
}
|
||||
case "void": {
|
||||
return "void";
|
||||
}
|
||||
case "slice": {
|
||||
let payloadHtml = "";
|
||||
const lhsExpr = zigAnalysis.exprs[expr.slice.lhs];
|
||||
const startExpr = zigAnalysis.exprs[expr.slice.start];
|
||||
let decl = exprName(lhsExpr);
|
||||
let start = exprName(startExpr);
|
||||
let end = "";
|
||||
let sentinel = "";
|
||||
if (expr.slice["end"]) {
|
||||
const endExpr = zigAnalysis.exprs[expr.slice.end];
|
||||
let end_ = exprName(endExpr);
|
||||
end += end_;
|
||||
}
|
||||
if (expr.slice["sentinel"]) {
|
||||
const sentinelExpr = zigAnalysis.exprs[expr.slice.sentinel];
|
||||
let sentinel_ = exprName(sentinelExpr);
|
||||
sentinel += " :" + sentinel_;
|
||||
}
|
||||
payloadHtml += decl + "[" + start + ".." + end + sentinel + "]";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "sliceIndex": {
|
||||
const sliceIndex = zigAnalysis.exprs[expr.sliceIndex];
|
||||
return exprName(sliceIndex, opts);
|
||||
}
|
||||
case "cmpxchg": {
|
||||
const typeIndex = zigAnalysis.exprs[expr.cmpxchg.type];
|
||||
const ptrIndex = zigAnalysis.exprs[expr.cmpxchg.ptr];
|
||||
const expectedValueIndex =
|
||||
zigAnalysis.exprs[expr.cmpxchg.expected_value];
|
||||
const newValueIndex = zigAnalysis.exprs[expr.cmpxchg.new_value];
|
||||
const successOrderIndex = zigAnalysis.exprs[expr.cmpxchg.success_order];
|
||||
const failureOrderIndex = zigAnalysis.exprs[expr.cmpxchg.failure_order];
|
||||
|
||||
const type = exprName(typeIndex, opts);
|
||||
const ptr = exprName(ptrIndex, opts);
|
||||
const expectedValue = exprName(expectedValueIndex, opts);
|
||||
const newValue = exprName(newValueIndex, opts);
|
||||
const successOrder = exprName(successOrderIndex, opts);
|
||||
const failureOrder = exprName(failureOrderIndex, opts);
|
||||
|
||||
let fnName = "@";
|
||||
|
||||
switch (expr.cmpxchg.name) {
|
||||
case "cmpxchg_strong": {
|
||||
fnName += "cmpxchgStrong";
|
||||
break;
|
||||
}
|
||||
case "refPath" : {
|
||||
let name = exprName(expr.refPath[0]);
|
||||
for (let i = 1; i < expr.refPath.length; i++) {
|
||||
let component = undefined;
|
||||
if ("string" in expr.refPath[i]) {
|
||||
component = expr.refPath[i].string;
|
||||
} else {
|
||||
component = exprName(expr.refPath[i]);
|
||||
}
|
||||
name += "." + component;
|
||||
case "cmpxchg_weak": {
|
||||
fnName += "cmpxchgWeak";
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.log("There's only cmpxchg_strong and cmpxchg_weak");
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
fnName +
|
||||
"(" +
|
||||
type +
|
||||
", " +
|
||||
ptr +
|
||||
", " +
|
||||
expectedValue +
|
||||
", " +
|
||||
newValue +
|
||||
", " +
|
||||
"." +
|
||||
successOrder +
|
||||
", " +
|
||||
"." +
|
||||
failureOrder +
|
||||
")"
|
||||
);
|
||||
}
|
||||
case "cmpxchgIndex": {
|
||||
const cmpxchgIndex = zigAnalysis.exprs[expr.cmpxchgIndex];
|
||||
return exprName(cmpxchgIndex, opts);
|
||||
}
|
||||
case "switchOp": {
|
||||
let condExpr = zigAnalysis.exprs[expr.switchOp.cond_index];
|
||||
let ast = zigAnalysis.astNodes[expr.switchOp.ast];
|
||||
let file_name = expr.switchOp.file_name;
|
||||
let outer_decl_index = expr.switchOp.outer_decl;
|
||||
let outer_decl = zigAnalysis.types[outer_decl_index];
|
||||
let line = 0;
|
||||
// console.log(expr.switchOp)
|
||||
// console.log(outer_decl)
|
||||
while (outer_decl_index !== 0 && outer_decl.line_number > 0) {
|
||||
line += outer_decl.line_number;
|
||||
outer_decl_index = outer_decl.outer_decl;
|
||||
outer_decl = zigAnalysis.types[outer_decl_index];
|
||||
// console.log(outer_decl)
|
||||
}
|
||||
line += ast.line + 1;
|
||||
let payloadHtml = "";
|
||||
let cond = exprName(condExpr, opts);
|
||||
|
||||
payloadHtml +=
|
||||
"</br>" +
|
||||
"node_name: " +
|
||||
ast.name +
|
||||
"</br>" +
|
||||
"file: " +
|
||||
file_name +
|
||||
"</br>" +
|
||||
"line: " +
|
||||
line +
|
||||
"</br>";
|
||||
payloadHtml +=
|
||||
"switch(" +
|
||||
cond +
|
||||
") {" +
|
||||
'<a href="/src/' +
|
||||
file_name +
|
||||
"#L" +
|
||||
line +
|
||||
'">' +
|
||||
"..." +
|
||||
"</a>}";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "switchIndex": {
|
||||
const switchIndex = zigAnalysis.exprs[expr.switchIndex];
|
||||
return exprName(switchIndex, opts);
|
||||
}
|
||||
case "refPath": {
|
||||
let name = exprName(expr.refPath[0]);
|
||||
for (let i = 1; i < expr.refPath.length; i++) {
|
||||
let component = undefined;
|
||||
if ("string" in expr.refPath[i]) {
|
||||
component = expr.refPath[i].string;
|
||||
} else {
|
||||
component = exprName(expr.refPath[i]);
|
||||
}
|
||||
name += "." + component;
|
||||
}
|
||||
return name;
|
||||
}
|
||||
case "fieldRef": {
|
||||
const enumObj = exprName({ type: expr.fieldRef.type }, opts);
|
||||
const field =
|
||||
zigAnalysis.astNodes[enumObj.ast].fields[expr.fieldRef.index];
|
||||
const name = zigAnalysis.astNodes[field].name;
|
||||
return name;
|
||||
}
|
||||
case "enumToInt": {
|
||||
const enumToInt = zigAnalysis.exprs[expr.enumToInt];
|
||||
return "@enumToInt(" + exprName(enumToInt, opts) + ")";
|
||||
}
|
||||
case "bitSizeOf": {
|
||||
const bitSizeOf = zigAnalysis.exprs[expr.bitSizeOf];
|
||||
return "@bitSizeOf(" + exprName(bitSizeOf, opts) + ")";
|
||||
}
|
||||
case "sizeOf": {
|
||||
const sizeOf = zigAnalysis.exprs[expr.sizeOf];
|
||||
return "@sizeOf(" + exprName(sizeOf, opts) + ")";
|
||||
}
|
||||
case "builtinIndex": {
|
||||
const builtinIndex = zigAnalysis.exprs[expr.builtinIndex];
|
||||
return exprName(builtinIndex, opts);
|
||||
}
|
||||
case "builtin": {
|
||||
const param_expr = zigAnalysis.exprs[expr.builtin.param];
|
||||
let param = exprName(param_expr, opts);
|
||||
|
||||
let payloadHtml = "@";
|
||||
switch (expr.builtin.name) {
|
||||
case "align_of": {
|
||||
payloadHtml += "alignOf";
|
||||
break;
|
||||
}
|
||||
case "bool_to_int": {
|
||||
payloadHtml += "boolToInt";
|
||||
break;
|
||||
}
|
||||
case "embed_file": {
|
||||
payloadHtml += "embedFile";
|
||||
break;
|
||||
}
|
||||
case "error_name": {
|
||||
payloadHtml += "errorName";
|
||||
break;
|
||||
}
|
||||
case "panic": {
|
||||
payloadHtml += "panic";
|
||||
break;
|
||||
}
|
||||
case "set_cold": {
|
||||
payloadHtml += "setCold";
|
||||
break;
|
||||
}
|
||||
case "set_runtime_safety": {
|
||||
payloadHtml += "setRuntimeSafety";
|
||||
break;
|
||||
}
|
||||
case "sqrt": {
|
||||
payloadHtml += "sqrt";
|
||||
break;
|
||||
}
|
||||
case "sin": {
|
||||
payloadHtml += "sin";
|
||||
break;
|
||||
}
|
||||
case "cos": {
|
||||
payloadHtml += "cos";
|
||||
break;
|
||||
}
|
||||
case "tan": {
|
||||
payloadHtml += "tan";
|
||||
break;
|
||||
}
|
||||
case "exp": {
|
||||
payloadHtml += "exp";
|
||||
break;
|
||||
}
|
||||
case "exp2": {
|
||||
payloadHtml += "exp2";
|
||||
break;
|
||||
}
|
||||
case "log": {
|
||||
payloadHtml += "log";
|
||||
break;
|
||||
}
|
||||
case "log2": {
|
||||
payloadHtml += "log2";
|
||||
break;
|
||||
}
|
||||
case "log10": {
|
||||
payloadHtml += "log10";
|
||||
break;
|
||||
}
|
||||
case "fabs": {
|
||||
payloadHtml += "fabs";
|
||||
break;
|
||||
}
|
||||
case "floor": {
|
||||
payloadHtml += "floor";
|
||||
break;
|
||||
}
|
||||
case "ceil": {
|
||||
payloadHtml += "ceil";
|
||||
break;
|
||||
}
|
||||
case "trunc": {
|
||||
payloadHtml += "trunc";
|
||||
break;
|
||||
}
|
||||
case "round": {
|
||||
payloadHtml += "round";
|
||||
break;
|
||||
}
|
||||
case "tag_name": {
|
||||
payloadHtml += "tagName";
|
||||
break;
|
||||
}
|
||||
case "reify": {
|
||||
payloadHtml += "Type";
|
||||
break;
|
||||
}
|
||||
case "type_name": {
|
||||
payloadHtml += "typeName";
|
||||
break;
|
||||
}
|
||||
case "frame_type": {
|
||||
payloadHtml += "Frame";
|
||||
break;
|
||||
}
|
||||
case "frame_size": {
|
||||
payloadHtml += "frameSize";
|
||||
break;
|
||||
}
|
||||
case "ptr_to_int": {
|
||||
payloadHtml += "ptrToInt";
|
||||
break;
|
||||
}
|
||||
case "error_to_int": {
|
||||
payloadHtml += "errorToInt";
|
||||
break;
|
||||
}
|
||||
case "int_to_error": {
|
||||
payloadHtml += "intToError";
|
||||
break;
|
||||
}
|
||||
case "maximum": {
|
||||
payloadHtml += "maximum";
|
||||
break;
|
||||
}
|
||||
case "minimum": {
|
||||
payloadHtml += "minimum";
|
||||
break;
|
||||
}
|
||||
case "bit_not": {
|
||||
return "~" + param;
|
||||
}
|
||||
case "clz": {
|
||||
return "@clz(T" + ", " + param + ")";
|
||||
}
|
||||
case "ctz": {
|
||||
return "@ctz(T" + ", " + param + ")";
|
||||
}
|
||||
case "pop_count": {
|
||||
return "@popCount(T" + ", " + param + ")";
|
||||
}
|
||||
case "byte_swap": {
|
||||
return "@byteSwap(T" + ", " + param + ")";
|
||||
}
|
||||
case "bit_reverse": {
|
||||
return "@bitReverse(T" + ", " + param + ")";
|
||||
}
|
||||
default:
|
||||
console.log("builtin function not handled yet or doesn't exist!");
|
||||
}
|
||||
return payloadHtml + "(" + param + ")";
|
||||
}
|
||||
case "builtinBinIndex": {
|
||||
const builtinBinIndex = zigAnalysis.exprs[expr.builtinBinIndex];
|
||||
return exprName(builtinBinIndex, opts);
|
||||
}
|
||||
case "builtinBin": {
|
||||
const lhsOp = zigAnalysis.exprs[expr.builtinBin.lhs];
|
||||
const rhsOp = zigAnalysis.exprs[expr.builtinBin.rhs];
|
||||
let lhs = exprName(lhsOp, opts);
|
||||
let rhs = exprName(rhsOp, opts);
|
||||
|
||||
let payloadHtml = "@";
|
||||
switch (expr.builtinBin.name) {
|
||||
case "float_to_int": {
|
||||
payloadHtml += "floatToInt";
|
||||
break;
|
||||
}
|
||||
case "int_to_float": {
|
||||
payloadHtml += "intToFloat";
|
||||
break;
|
||||
}
|
||||
case "int_to_ptr": {
|
||||
payloadHtml += "intToPtr";
|
||||
break;
|
||||
}
|
||||
case "int_to_enum": {
|
||||
payloadHtml += "intToEnum";
|
||||
break;
|
||||
}
|
||||
case "float_cast": {
|
||||
payloadHtml += "floatCast";
|
||||
break;
|
||||
}
|
||||
case "int_cast": {
|
||||
payloadHtml += "intCast";
|
||||
break;
|
||||
}
|
||||
case "ptr_cast": {
|
||||
payloadHtml += "ptrCast";
|
||||
break;
|
||||
}
|
||||
case "truncate": {
|
||||
payloadHtml += "truncate";
|
||||
break;
|
||||
}
|
||||
case "align_cast": {
|
||||
payloadHtml += "alignCast";
|
||||
break;
|
||||
}
|
||||
case "has_decl": {
|
||||
payloadHtml += "hasDecl";
|
||||
break;
|
||||
}
|
||||
case "has_field": {
|
||||
payloadHtml += "hasField";
|
||||
break;
|
||||
}
|
||||
case "bit_reverse": {
|
||||
payloadHtml += "bitReverse";
|
||||
break;
|
||||
}
|
||||
case "div_exact": {
|
||||
payloadHtml += "divExact";
|
||||
break;
|
||||
}
|
||||
case "div_floor": {
|
||||
payloadHtml += "divFloor";
|
||||
break;
|
||||
}
|
||||
case "div_trunc": {
|
||||
payloadHtml += "divTrunc";
|
||||
break;
|
||||
}
|
||||
case "mod": {
|
||||
payloadHtml += "mod";
|
||||
break;
|
||||
}
|
||||
case "rem": {
|
||||
payloadHtml += "rem";
|
||||
break;
|
||||
}
|
||||
case "mod_rem": {
|
||||
payloadHtml += "rem";
|
||||
break;
|
||||
}
|
||||
case "shl_exact": {
|
||||
payloadHtml += "shlExact";
|
||||
break;
|
||||
}
|
||||
case "shr_exact": {
|
||||
payloadHtml += "shrExact";
|
||||
break;
|
||||
}
|
||||
case "bitcast": {
|
||||
payloadHtml += "bitCast";
|
||||
break;
|
||||
}
|
||||
case "align_cast": {
|
||||
payloadHtml += "alignCast";
|
||||
break;
|
||||
}
|
||||
case "vector_type": {
|
||||
payloadHtml += "Vector";
|
||||
break;
|
||||
}
|
||||
case "reduce": {
|
||||
payloadHtml += "reduce";
|
||||
break;
|
||||
}
|
||||
case "splat": {
|
||||
payloadHtml += "splat";
|
||||
break;
|
||||
}
|
||||
case "offset_of": {
|
||||
payloadHtml += "offsetOf";
|
||||
break;
|
||||
}
|
||||
case "bit_offset_of": {
|
||||
payloadHtml += "bitOffsetOf";
|
||||
break;
|
||||
}
|
||||
default:
|
||||
console.log("builtin function not handled yet or doesn't exist!");
|
||||
}
|
||||
return payloadHtml + "(" + lhs + ", " + rhs + ")";
|
||||
}
|
||||
case "binOpIndex": {
|
||||
const binOpIndex = zigAnalysis.exprs[expr.binOpIndex];
|
||||
return exprName(binOpIndex, opts);
|
||||
}
|
||||
case "binOp": {
|
||||
const lhsOp = zigAnalysis.exprs[expr.binOp.lhs];
|
||||
const rhsOp = zigAnalysis.exprs[expr.binOp.rhs];
|
||||
let lhs = exprName(lhsOp, opts);
|
||||
let rhs = exprName(rhsOp, opts);
|
||||
|
||||
let print_lhs = "";
|
||||
let print_rhs = "";
|
||||
|
||||
if (lhsOp["binOpIndex"]) {
|
||||
print_lhs = "(" + lhs + ")";
|
||||
} else {
|
||||
print_lhs = lhs;
|
||||
}
|
||||
if (rhsOp["binOpIndex"]) {
|
||||
print_rhs = "(" + rhs + ")";
|
||||
} else {
|
||||
print_rhs = rhs;
|
||||
}
|
||||
|
||||
let operator = "";
|
||||
|
||||
switch (expr.binOp.name) {
|
||||
case "add": {
|
||||
operator += "+";
|
||||
break;
|
||||
}
|
||||
case "addwrap": {
|
||||
operator += "+%";
|
||||
break;
|
||||
}
|
||||
case "add_sat": {
|
||||
operator += "+|";
|
||||
break;
|
||||
}
|
||||
case "sub": {
|
||||
operator += "-";
|
||||
break;
|
||||
}
|
||||
case "subwrap": {
|
||||
operator += "-%";
|
||||
break;
|
||||
}
|
||||
case "sub_sat": {
|
||||
operator += "-|";
|
||||
break;
|
||||
}
|
||||
case "mul": {
|
||||
operator += "*";
|
||||
break;
|
||||
}
|
||||
case "mulwrap": {
|
||||
operator += "*%";
|
||||
break;
|
||||
}
|
||||
case "mul_sat": {
|
||||
operator += "*|";
|
||||
break;
|
||||
}
|
||||
case "div": {
|
||||
operator += "/";
|
||||
break;
|
||||
}
|
||||
case "shl": {
|
||||
operator += "<<";
|
||||
break;
|
||||
}
|
||||
case "shl_sat": {
|
||||
operator += "<<|";
|
||||
break;
|
||||
}
|
||||
case "shr": {
|
||||
operator += ">>";
|
||||
break;
|
||||
}
|
||||
case "bit_or": {
|
||||
operator += "|";
|
||||
break;
|
||||
}
|
||||
case "bit_and": {
|
||||
operator += "&";
|
||||
break;
|
||||
}
|
||||
case "array_cat": {
|
||||
operator += "++";
|
||||
break;
|
||||
}
|
||||
case "array_mul": {
|
||||
operator += "**";
|
||||
break;
|
||||
}
|
||||
default:
|
||||
console.log("operator not handled yet or doesn't exist!");
|
||||
}
|
||||
|
||||
return print_lhs + " " + operator + " " + print_rhs;
|
||||
}
|
||||
case "errorSets": {
|
||||
const errUnionObj = zigAnalysis.types[expr.errorSets];
|
||||
let lhs = exprName(errUnionObj.lhs, opts);
|
||||
let rhs = exprName(errUnionObj.rhs, opts);
|
||||
return lhs + " || " + rhs;
|
||||
}
|
||||
case "errorUnion": {
|
||||
const errUnionObj = zigAnalysis.types[expr.errorUnion];
|
||||
let lhs = exprName(errUnionObj.lhs, opts);
|
||||
let rhs = exprName(errUnionObj.rhs, opts);
|
||||
return lhs + "!" + rhs;
|
||||
}
|
||||
case "struct": {
|
||||
// const struct_name =
|
||||
// zigAnalysis.decls[expr.struct[0].val.typeRef.refPath[0].declRef].name;
|
||||
const struct_name = ".";
|
||||
let struct_body = "";
|
||||
struct_body += struct_name + "{ ";
|
||||
for (let i = 0; i < expr.struct.length; i++) {
|
||||
const fv = expr.struct[i];
|
||||
const field_name = fv.name;
|
||||
const field_value = exprName(fv.val.expr, opts);
|
||||
// TODO: commented out because it seems not needed. if it deals
|
||||
// with a corner case, please add a comment when re-enabling it.
|
||||
// let field_value = exprArg[Object.keys(exprArg)[0]];
|
||||
// if (field_value instanceof Object) {
|
||||
// value_field = exprName(value_field)
|
||||
// zigAnalysis.decls[value_field[0].val.typeRef.refPath[0].declRef]
|
||||
// .name;
|
||||
// }
|
||||
struct_body += "." + field_name + " = " + field_value;
|
||||
if (i !== expr.struct.length - 1) {
|
||||
struct_body += ", ";
|
||||
} else {
|
||||
struct_body += " ";
|
||||
}
|
||||
}
|
||||
struct_body += "}";
|
||||
return struct_body;
|
||||
}
|
||||
case "typeOf_peer": {
|
||||
let payloadHtml = "@TypeOf(";
|
||||
for (let i = 0; i < expr.typeOf_peer.length; i++) {
|
||||
let elem = zigAnalysis.exprs[expr.typeOf_peer[i]];
|
||||
payloadHtml += exprName(elem, { wantHtml: true, wantLink: true });
|
||||
if (i !== expr.typeOf_peer.length - 1) {
|
||||
payloadHtml += ", ";
|
||||
}
|
||||
}
|
||||
payloadHtml += ")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "alignOf": {
|
||||
const alignRefArg = zigAnalysis.exprs[expr.alignOf];
|
||||
let payloadHtml =
|
||||
"@alignOf(" +
|
||||
exprName(alignRefArg, { wantHtml: true, wantLink: true }) +
|
||||
")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "typeOf": {
|
||||
const typeRefArg = zigAnalysis.exprs[expr.typeOf];
|
||||
let payloadHtml =
|
||||
"@TypeOf(" +
|
||||
exprName(typeRefArg, { wantHtml: true, wantLink: true }) +
|
||||
")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "typeInfo": {
|
||||
const typeRefArg = zigAnalysis.exprs[expr.typeInfo];
|
||||
let payloadHtml =
|
||||
"@typeInfo(" +
|
||||
exprName(typeRefArg, { wantHtml: true, wantLink: true }) +
|
||||
")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "null": {
|
||||
return "null";
|
||||
}
|
||||
case "array": {
|
||||
let payloadHtml = ".{";
|
||||
for (let i = 0; i < expr.array.length; i++) {
|
||||
if (i != 0) payloadHtml += ", ";
|
||||
let elem = zigAnalysis.exprs[expr.array[i]];
|
||||
payloadHtml += exprName(elem, opts);
|
||||
}
|
||||
return payloadHtml + "}";
|
||||
}
|
||||
case "comptimeExpr": {
|
||||
return zigAnalysis.comptimeExprs[expr.comptimeExpr].code;
|
||||
}
|
||||
case "call": {
|
||||
let call = zigAnalysis.calls[expr.call];
|
||||
let payloadHtml = "";
|
||||
|
||||
switch (Object.keys(call.func)[0]) {
|
||||
default:
|
||||
throw "TODO";
|
||||
case "declRef":
|
||||
case "refPath": {
|
||||
payloadHtml += exprName(call.func, opts);
|
||||
break;
|
||||
}
|
||||
}
|
||||
payloadHtml += "(";
|
||||
|
||||
for (let i = 0; i < call.args.length; i++) {
|
||||
if (i != 0) payloadHtml += ", ";
|
||||
payloadHtml += exprName(call.args[i], opts);
|
||||
}
|
||||
|
||||
payloadHtml += ")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "as": {
|
||||
// @Check : this should be done in backend because there are legit @as() calls
|
||||
// const typeRefArg = zigAnalysis.exprs[expr.as.typeRefArg];
|
||||
const exprArg = zigAnalysis.exprs[expr.as.exprArg];
|
||||
// return "@as(" + exprName(typeRefArg, opts) +
|
||||
// ", " + exprName(exprArg, opts) + ")";
|
||||
return exprName(exprArg, opts);
|
||||
}
|
||||
case "declRef": {
|
||||
return zigAnalysis.decls[expr.declRef].name;
|
||||
}
|
||||
case "refPath": {
|
||||
return expr.refPath.map((x) => exprName(x, opts)).join(".");
|
||||
}
|
||||
case "int": {
|
||||
return "" + expr.int;
|
||||
}
|
||||
case "float": {
|
||||
return "" + expr.float.toFixed(2);
|
||||
}
|
||||
case "float128": {
|
||||
return "" + expr.float128.toFixed(2);
|
||||
}
|
||||
case "undefined": {
|
||||
return "undefined";
|
||||
}
|
||||
case "string": {
|
||||
return '"' + escapeHtml(expr.string) + '"';
|
||||
}
|
||||
|
||||
case "anytype": {
|
||||
return "anytype";
|
||||
}
|
||||
|
||||
case "this": {
|
||||
return "@This()";
|
||||
}
|
||||
|
||||
case "type": {
|
||||
let name = "";
|
||||
|
||||
let typeObj = expr.type;
|
||||
if (typeof typeObj === "number") typeObj = zigAnalysis.types[typeObj];
|
||||
switch (typeObj.kind) {
|
||||
default:
|
||||
throw "TODO";
|
||||
case typeKinds.Struct: {
|
||||
let structObj = typeObj;
|
||||
return structObj;
|
||||
}
|
||||
case typeKinds.Enum: {
|
||||
let enumObj = typeObj;
|
||||
return enumObj;
|
||||
}
|
||||
case typeKinds.Opaque: {
|
||||
let opaqueObj = typeObj;
|
||||
|
||||
return opaqueObj.name;
|
||||
}
|
||||
case typeKinds.ComptimeExpr: {
|
||||
return "anyopaque";
|
||||
}
|
||||
case typeKinds.Array: {
|
||||
let arrayObj = typeObj;
|
||||
let name = "[";
|
||||
let lenName = exprName(arrayObj.len, opts);
|
||||
let sentinel = arrayObj.sentinel
|
||||
? ":" + exprName(arrayObj.sentinel, opts)
|
||||
: "";
|
||||
// let is_mutable = arrayObj.is_multable ? "const " : "";
|
||||
|
||||
if (opts.wantHtml) {
|
||||
name +=
|
||||
'<span class="tok-number">' + lenName + sentinel + "</span>";
|
||||
} else {
|
||||
name += lenName + sentinel;
|
||||
}
|
||||
name += "]";
|
||||
// name += is_mutable;
|
||||
name += exprName(arrayObj.child, opts);
|
||||
return name;
|
||||
}
|
||||
case "fieldRef" : {
|
||||
const enumObj = exprName({"type":expr.fieldRef.type} ,opts);
|
||||
const field = zigAnalysis.astNodes[enumObj.ast].fields[expr.fieldRef.index];
|
||||
const name = zigAnalysis.astNodes[field].name;
|
||||
return name
|
||||
}
|
||||
case "enumToInt" : {
|
||||
const enumToInt = zigAnalysis.exprs[expr.enumToInt];
|
||||
return "@enumToInt(" + exprName(enumToInt, opts) + ")";
|
||||
}
|
||||
case "bitSizeOf" : {
|
||||
const bitSizeOf = zigAnalysis.exprs[expr.bitSizeOf];
|
||||
return "@bitSizeOf(" + exprName(bitSizeOf, opts) + ")";
|
||||
}
|
||||
case "sizeOf" : {
|
||||
const sizeOf = zigAnalysis.exprs[expr.sizeOf];
|
||||
return "@sizeOf(" + exprName(sizeOf, opts) + ")";
|
||||
}
|
||||
case "builtinIndex" : {
|
||||
const builtinIndex = zigAnalysis.exprs[expr.builtinIndex];
|
||||
return exprName(builtinIndex, opts);
|
||||
}
|
||||
case "builtin": {
|
||||
const param_expr = zigAnalysis.exprs[expr.builtin.param];
|
||||
let param = exprName(param_expr, opts);
|
||||
|
||||
|
||||
let payloadHtml = "@";
|
||||
switch (expr.builtin.name) {
|
||||
case "align_of": {
|
||||
payloadHtml += "alignOf";
|
||||
case typeKinds.Optional:
|
||||
return "?" + exprName(typeObj.child, opts);
|
||||
case typeKinds.Pointer: {
|
||||
let ptrObj = typeObj;
|
||||
let sentinel = ptrObj.sentinel
|
||||
? ":" + exprName(ptrObj.sentinel, opts)
|
||||
: "";
|
||||
let is_mutable = !ptrObj.is_mutable ? "const " : "";
|
||||
let name = "";
|
||||
switch (ptrObj.size) {
|
||||
default:
|
||||
console.log("TODO: implement unhandled pointer size case");
|
||||
case pointerSizeEnum.One:
|
||||
name += "*";
|
||||
name += is_mutable;
|
||||
break;
|
||||
}
|
||||
case "bool_to_int": {
|
||||
payloadHtml += "boolToInt";
|
||||
case pointerSizeEnum.Many:
|
||||
name += "[*";
|
||||
name += sentinel;
|
||||
name += "]";
|
||||
name += is_mutable;
|
||||
break;
|
||||
}
|
||||
case "embed_file": {
|
||||
payloadHtml += "embedFile";
|
||||
case pointerSizeEnum.Slice:
|
||||
if (ptrObj.is_ref) {
|
||||
name += "*";
|
||||
}
|
||||
name += "[";
|
||||
name += sentinel;
|
||||
name += "]";
|
||||
name += is_mutable;
|
||||
break;
|
||||
}
|
||||
case "error_name": {
|
||||
payloadHtml += "errorName";
|
||||
case pointerSizeEnum.C:
|
||||
name += "[*c";
|
||||
name += sentinel;
|
||||
name += "]";
|
||||
name += is_mutable;
|
||||
break;
|
||||
}
|
||||
case "panic": {
|
||||
payloadHtml += "panic";
|
||||
break;
|
||||
}
|
||||
case "set_cold": {
|
||||
payloadHtml += "setCold";
|
||||
break;
|
||||
}
|
||||
case "set_runtime_safety": {
|
||||
payloadHtml += "setRuntimeSafety";
|
||||
break;
|
||||
}
|
||||
case "sqrt": {
|
||||
payloadHtml += "sqrt";
|
||||
break;
|
||||
}
|
||||
case "sin": {
|
||||
payloadHtml += "sin";
|
||||
break;
|
||||
}
|
||||
case "cos": {
|
||||
payloadHtml += "cos";
|
||||
break;
|
||||
}
|
||||
case "tan": {
|
||||
payloadHtml += "tan";
|
||||
break;
|
||||
}
|
||||
case "exp": {
|
||||
payloadHtml += "exp";
|
||||
break;
|
||||
}
|
||||
case "exp2": {
|
||||
payloadHtml += "exp2";
|
||||
break;
|
||||
}
|
||||
case "log": {
|
||||
payloadHtml += "log";
|
||||
break;
|
||||
}
|
||||
case "log2": {
|
||||
payloadHtml += "log2";
|
||||
break;
|
||||
}
|
||||
case "log10": {
|
||||
payloadHtml += "log10";
|
||||
break;
|
||||
}
|
||||
case "fabs": {
|
||||
payloadHtml += "fabs";
|
||||
break;
|
||||
}
|
||||
case "floor": {
|
||||
payloadHtml += "floor";
|
||||
break;
|
||||
}
|
||||
case "ceil": {
|
||||
payloadHtml += "ceil";
|
||||
break;
|
||||
}
|
||||
case "trunc": {
|
||||
payloadHtml += "trunc";
|
||||
break;
|
||||
}
|
||||
case "round": {
|
||||
payloadHtml += "round";
|
||||
break;
|
||||
}
|
||||
case "tag_name": {
|
||||
payloadHtml += "tagName";
|
||||
break;
|
||||
}
|
||||
case "reify": {
|
||||
payloadHtml += "Type";
|
||||
break;
|
||||
}
|
||||
case "type_name": {
|
||||
payloadHtml += "typeName";
|
||||
break;
|
||||
}
|
||||
case "frame_type": {
|
||||
payloadHtml += "Frame";
|
||||
break;
|
||||
}
|
||||
case "frame_size": {
|
||||
payloadHtml += "frameSize";
|
||||
break;
|
||||
}
|
||||
case "ptr_to_int": {
|
||||
payloadHtml += "ptrToInt";
|
||||
break;
|
||||
}
|
||||
case "error_to_int": {
|
||||
payloadHtml += "errorToInt";
|
||||
break;
|
||||
}
|
||||
case "int_to_error": {
|
||||
payloadHtml += "intToError";
|
||||
break;
|
||||
}
|
||||
case "maximum": {
|
||||
payloadHtml += "maximum";
|
||||
break;
|
||||
}
|
||||
case "minimum": {
|
||||
payloadHtml += "minimum";
|
||||
break;
|
||||
}
|
||||
case "bit_not": {
|
||||
return "~" + param;
|
||||
}
|
||||
case "clz": {
|
||||
return "@clz(T" + ", " + param + ")";
|
||||
}
|
||||
case "ctz": {
|
||||
return "@ctz(T" + ", " + param + ")";
|
||||
}
|
||||
case "pop_count": {
|
||||
return "@popCount(T" + ", " + param + ")";
|
||||
}
|
||||
case "byte_swap": {
|
||||
return "@byteSwap(T" + ", " + param + ")";
|
||||
}
|
||||
case "bit_reverse": {
|
||||
return "@bitReverse(T" + ", " + param + ")";
|
||||
}
|
||||
default: console.log("builtin function not handled yet or doesn't exist!");
|
||||
};
|
||||
return payloadHtml + "(" + param + ")";
|
||||
|
||||
}
|
||||
case "builtinBinIndex" : {
|
||||
const builtinBinIndex = zigAnalysis.exprs[expr.builtinBinIndex];
|
||||
return exprName(builtinBinIndex, opts);
|
||||
}
|
||||
case "builtinBin": {
|
||||
const lhsOp = zigAnalysis.exprs[expr.builtinBin.lhs];
|
||||
const rhsOp = zigAnalysis.exprs[expr.builtinBin.rhs];
|
||||
let lhs = exprName(lhsOp, opts);
|
||||
let rhs = exprName(rhsOp, opts);
|
||||
|
||||
let payloadHtml = "@";
|
||||
switch (expr.builtinBin.name) {
|
||||
case "float_to_int": {
|
||||
payloadHtml += "floatToInt";
|
||||
break;
|
||||
}
|
||||
case "int_to_float": {
|
||||
payloadHtml += "intToFloat";
|
||||
break;
|
||||
}
|
||||
case "int_to_ptr": {
|
||||
payloadHtml += "intToPtr";
|
||||
break;
|
||||
}
|
||||
case "int_to_enum": {
|
||||
payloadHtml += "intToEnum";
|
||||
break;
|
||||
}
|
||||
case "float_cast": {
|
||||
payloadHtml += "floatCast";
|
||||
break;
|
||||
}
|
||||
case "int_cast": {
|
||||
payloadHtml += "intCast";
|
||||
break;
|
||||
}
|
||||
case "ptr_cast": {
|
||||
payloadHtml += "ptrCast";
|
||||
break;
|
||||
}
|
||||
case "truncate": {
|
||||
payloadHtml += "truncate";
|
||||
break;
|
||||
}
|
||||
case "align_cast": {
|
||||
payloadHtml += "alignCast";
|
||||
break;
|
||||
}
|
||||
case "has_decl": {
|
||||
payloadHtml += "hasDecl";
|
||||
break;
|
||||
}
|
||||
case "has_field": {
|
||||
payloadHtml += "hasField";
|
||||
break;
|
||||
}
|
||||
case "bit_reverse": {
|
||||
payloadHtml += "bitReverse";
|
||||
break;
|
||||
}
|
||||
case "div_exact": {
|
||||
payloadHtml += "divExact";
|
||||
break;
|
||||
}
|
||||
case "div_floor": {
|
||||
payloadHtml += "divFloor";
|
||||
break;
|
||||
}
|
||||
case "div_trunc": {
|
||||
payloadHtml += "divTrunc";
|
||||
break;
|
||||
}
|
||||
case "mod": {
|
||||
payloadHtml += "mod";
|
||||
break;
|
||||
}
|
||||
case "rem": {
|
||||
payloadHtml += "rem";
|
||||
break;
|
||||
}
|
||||
case "mod_rem": {
|
||||
payloadHtml += "rem";
|
||||
break;
|
||||
}
|
||||
case "shl_exact": {
|
||||
payloadHtml += "shlExact";
|
||||
break;
|
||||
}
|
||||
case "shr_exact": {
|
||||
payloadHtml += "shrExact";
|
||||
break;
|
||||
}
|
||||
case "bitcast" : {
|
||||
payloadHtml += "bitCast";
|
||||
break;
|
||||
}
|
||||
case "align_cast" : {
|
||||
payloadHtml += "alignCast";
|
||||
break;
|
||||
}
|
||||
case "vector_type" : {
|
||||
payloadHtml += "Vector";
|
||||
break;
|
||||
}
|
||||
case "reduce": {
|
||||
payloadHtml += "reduce";
|
||||
break;
|
||||
}
|
||||
case "splat": {
|
||||
payloadHtml += "splat";
|
||||
break;
|
||||
}
|
||||
case "offset_of": {
|
||||
payloadHtml += "offsetOf";
|
||||
break;
|
||||
}
|
||||
case "bit_offset_of": {
|
||||
payloadHtml += "bitOffsetOf";
|
||||
break;
|
||||
}
|
||||
default: console.log("builtin function not handled yet or doesn't exist!");
|
||||
};
|
||||
return payloadHtml + "(" + lhs + ", " + rhs + ")";
|
||||
|
||||
}
|
||||
case "binOpIndex" : {
|
||||
const binOpIndex = zigAnalysis.exprs[expr.binOpIndex];
|
||||
return exprName(binOpIndex, opts);
|
||||
}
|
||||
case "binOp": {
|
||||
const lhsOp = zigAnalysis.exprs[expr.binOp.lhs];
|
||||
const rhsOp = zigAnalysis.exprs[expr.binOp.rhs];
|
||||
let lhs = exprName(lhsOp, opts);
|
||||
let rhs = exprName(rhsOp, opts);
|
||||
|
||||
let print_lhs = "";
|
||||
let print_rhs = "";
|
||||
|
||||
if (lhsOp['binOpIndex']) {
|
||||
print_lhs = "(" + lhs + ")";
|
||||
} else {
|
||||
print_lhs = lhs;
|
||||
}
|
||||
if (rhsOp['binOpIndex']) {
|
||||
print_rhs = "(" + rhs + ")";
|
||||
} else {
|
||||
print_rhs = rhs;
|
||||
// @check: after the major changes in arrays the consts are came from switch above
|
||||
// if (!ptrObj.is_mutable) {
|
||||
// if (opts.wantHtml) {
|
||||
// name += '<span class="tok-kw">const</span> ';
|
||||
// } else {
|
||||
// name += "const ";
|
||||
// }
|
||||
// }
|
||||
if (ptrObj.is_allowzero) {
|
||||
name += "allowzero ";
|
||||
}
|
||||
|
||||
let operator = "";
|
||||
|
||||
switch (expr.binOp.name) {
|
||||
case "add": {
|
||||
operator += "+";
|
||||
break;
|
||||
if (ptrObj.is_volatile) {
|
||||
name += "volatile ";
|
||||
}
|
||||
if (ptrObj.has_addrspace) {
|
||||
name += "addrspace(";
|
||||
name += "." + "";
|
||||
name += ") ";
|
||||
}
|
||||
if (ptrObj.has_align) {
|
||||
let align = exprName(ptrObj.align, opts);
|
||||
if (opts.wantHtml) {
|
||||
name += '<span class="tok-kw">align</span>(';
|
||||
} else {
|
||||
name += "align(";
|
||||
}
|
||||
case "addwrap": {
|
||||
operator += "+%";
|
||||
break;
|
||||
if (opts.wantHtml) {
|
||||
name += '<span class="tok-number">' + align + "</span>";
|
||||
} else {
|
||||
name += align;
|
||||
}
|
||||
case "add_sat": {
|
||||
operator += "+|";
|
||||
break;
|
||||
if (ptrObj.hostIntBytes != null) {
|
||||
name += ":";
|
||||
if (opts.wantHtml) {
|
||||
name +=
|
||||
'<span class="tok-number">' +
|
||||
ptrObj.bitOffsetInHost +
|
||||
"</span>";
|
||||
} else {
|
||||
name += ptrObj.bitOffsetInHost;
|
||||
}
|
||||
name += ":";
|
||||
if (opts.wantHtml) {
|
||||
name +=
|
||||
'<span class="tok-number">' +
|
||||
ptrObj.hostIntBytes +
|
||||
"</span>";
|
||||
} else {
|
||||
name += ptrObj.hostIntBytes;
|
||||
}
|
||||
}
|
||||
case "sub": {
|
||||
operator += "-";
|
||||
break;
|
||||
}
|
||||
case "subwrap": {
|
||||
operator += "-%";
|
||||
break;
|
||||
}
|
||||
case "sub_sat": {
|
||||
operator += "-|";
|
||||
break;
|
||||
}
|
||||
case "mul": {
|
||||
operator += "*";
|
||||
break;
|
||||
}
|
||||
case "mulwrap": {
|
||||
operator += "*%";
|
||||
break;
|
||||
}
|
||||
case "mul_sat": {
|
||||
operator += "*|";
|
||||
break;
|
||||
}
|
||||
case "div": {
|
||||
operator += "/";
|
||||
break;
|
||||
}
|
||||
case "shl": {
|
||||
operator += "<<";
|
||||
break;
|
||||
}
|
||||
case "shl_sat": {
|
||||
operator += "<<|";
|
||||
break;
|
||||
}
|
||||
case "shr": {
|
||||
operator += ">>";
|
||||
break;
|
||||
}
|
||||
case "bit_or" : {
|
||||
operator += "|";
|
||||
break;
|
||||
}
|
||||
case "bit_and" : {
|
||||
operator += "&";
|
||||
break;
|
||||
}
|
||||
case "array_cat" : {
|
||||
operator += "++";
|
||||
break;
|
||||
}
|
||||
case "array_mul" : {
|
||||
operator += "**";
|
||||
break;
|
||||
}
|
||||
default: console.log("operator not handled yet or doesn't exist!");
|
||||
};
|
||||
|
||||
return print_lhs + " " + operator + " " + print_rhs;
|
||||
|
||||
name += ") ";
|
||||
}
|
||||
//name += typeValueName(ptrObj.child, wantHtml, wantSubLink, null);
|
||||
name += exprName(ptrObj.child, opts);
|
||||
return name;
|
||||
}
|
||||
case "errorSets": {
|
||||
const errUnionObj = zigAnalysis.types[expr.errorSets];
|
||||
let lhs = exprName(errUnionObj.lhs, opts);
|
||||
let rhs = exprName(errUnionObj.rhs, opts);
|
||||
return lhs + " || " + rhs;
|
||||
case typeKinds.Float: {
|
||||
let floatObj = typeObj;
|
||||
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">' + floatObj.name + "</span>";
|
||||
} else {
|
||||
return floatObj.name;
|
||||
}
|
||||
}
|
||||
case "errorUnion": {
|
||||
const errUnionObj = zigAnalysis.types[expr.errorUnion];
|
||||
case typeKinds.Int: {
|
||||
let intObj = typeObj;
|
||||
let name = intObj.name;
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">' + name + "</span>";
|
||||
} else {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
case typeKinds.ComptimeInt:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">comptime_int</span>';
|
||||
} else {
|
||||
return "comptime_int";
|
||||
}
|
||||
case typeKinds.ComptimeFloat:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">comptime_float</span>';
|
||||
} else {
|
||||
return "comptime_float";
|
||||
}
|
||||
case typeKinds.Type:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">type</span>';
|
||||
} else {
|
||||
return "type";
|
||||
}
|
||||
case typeKinds.Bool:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">bool</span>';
|
||||
} else {
|
||||
return "bool";
|
||||
}
|
||||
case typeKinds.Void:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">void</span>';
|
||||
} else {
|
||||
return "void";
|
||||
}
|
||||
case typeKinds.EnumLiteral:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">(enum literal)</span>';
|
||||
} else {
|
||||
return "(enum literal)";
|
||||
}
|
||||
case typeKinds.NoReturn:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">noreturn</span>';
|
||||
} else {
|
||||
return "noreturn";
|
||||
}
|
||||
case typeKinds.ErrorSet: {
|
||||
let errSetObj = typeObj;
|
||||
if (errSetObj.fields == null) {
|
||||
return '<span class="tok-type">anyerror</span>';
|
||||
} else if (errSetObj.fields.length == 0) {
|
||||
return "error{}";
|
||||
} else if (errSetObj.fields.length == 1) {
|
||||
return "error{" + errSetObj.fields[0].name + "}";
|
||||
} else {
|
||||
// throw "TODO";
|
||||
let html = "error{ " + errSetObj.fields[0].name;
|
||||
for (let i = 1; i < errSetObj.fields.length; i++) html += ", " + errSetObj.fields[i].name;
|
||||
html += " }";
|
||||
return html;
|
||||
}
|
||||
}
|
||||
|
||||
case typeKinds.ErrorUnion: {
|
||||
let errUnionObj = typeObj;
|
||||
let lhs = exprName(errUnionObj.lhs, opts);
|
||||
let rhs = exprName(errUnionObj.rhs, opts);
|
||||
return lhs + "!" + rhs;
|
||||
|
||||
}
|
||||
case "struct": {
|
||||
const struct_name = zigAnalysis.decls[expr.struct[0].val.typeRef.refPath[0].declRef].name;
|
||||
let struct_body = "";
|
||||
struct_body += struct_name + "{ ";
|
||||
for (let i = 0; i < expr.struct.length; i++) {
|
||||
const val = expr.struct[i].name
|
||||
const exprArg = zigAnalysis.exprs[expr.struct[i].val.expr.as.exprArg];
|
||||
let value_field = exprArg[Object.keys(exprArg)[0]];
|
||||
if (value_field instanceof Object) {
|
||||
value_field = zigAnalysis.decls[value_field[0].val.typeRef.refPath[0].declRef].name;
|
||||
};
|
||||
struct_body += "." + val + " = " + value_field;
|
||||
if (i !== expr.struct.length - 1) {
|
||||
struct_body += ", ";
|
||||
} else {
|
||||
struct_body += " ";
|
||||
case typeKinds.InferredErrorUnion: {
|
||||
let errUnionObj = typeObj;
|
||||
let payload = exprName(errUnionObj.payload, opts);
|
||||
return "!" + payload;
|
||||
}
|
||||
case typeKinds.Fn: {
|
||||
let fnObj = typeObj;
|
||||
let payloadHtml = "";
|
||||
if (opts.wantHtml) {
|
||||
if (fnObj.is_extern) {
|
||||
payloadHtml += "pub extern ";
|
||||
}
|
||||
}
|
||||
struct_body += "}";
|
||||
return struct_body;
|
||||
}
|
||||
case "typeOf_peer": {
|
||||
let payloadHtml = "@TypeOf("
|
||||
for (let i = 0; i < expr.typeOf_peer.length; i++) {
|
||||
let elem = zigAnalysis.exprs[expr.typeOf_peer[i]];
|
||||
payloadHtml += exprName(elem, {wantHtml: true, wantLink:true});
|
||||
if (i !== expr.typeOf_peer.length - 1) {
|
||||
payloadHtml += ", ";
|
||||
if (fnObj.has_lib_name) {
|
||||
payloadHtml += '"' + fnObj.lib_name + '" ';
|
||||
}
|
||||
}
|
||||
payloadHtml += ")";
|
||||
return payloadHtml;
|
||||
|
||||
}
|
||||
case "alignOf": {
|
||||
const alignRefArg = zigAnalysis.exprs[expr.alignOf];
|
||||
let payloadHtml = "@alignOf(" + exprName(alignRefArg, {wantHtml: true, wantLink:true}) + ")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "typeOf": {
|
||||
const typeRefArg = zigAnalysis.exprs[expr.typeOf];
|
||||
let payloadHtml = "@TypeOf(" + exprName(typeRefArg, {wantHtml: true, wantLink:true}) + ")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "typeInfo": {
|
||||
const typeRefArg = zigAnalysis.exprs[expr.typeInfo];
|
||||
let payloadHtml = "@typeInfo(" + exprName(typeRefArg, {wantHtml: true, wantLink:true}) + ")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "null": {
|
||||
return "null";
|
||||
}
|
||||
case "array": {
|
||||
let payloadHtml = ".{";
|
||||
for (let i = 0; i < expr.array.length; i++) {
|
||||
if (i != 0) payloadHtml += ", ";
|
||||
let elem = zigAnalysis.exprs[expr.array[i]];
|
||||
payloadHtml += exprName(elem, opts);
|
||||
}
|
||||
return payloadHtml + "}";
|
||||
}
|
||||
case "comptimeExpr": {
|
||||
return zigAnalysis.comptimeExprs[expr.comptimeExpr].code;
|
||||
}
|
||||
case "call": {
|
||||
let call = zigAnalysis.calls[expr.call];
|
||||
let payloadHtml = "";
|
||||
|
||||
|
||||
switch(Object.keys(call.func)[0]){
|
||||
default: throw "TODO";
|
||||
case "declRef":
|
||||
case "refPath": {
|
||||
payloadHtml += exprName(call.func, opts);
|
||||
break;
|
||||
}
|
||||
}
|
||||
payloadHtml += "(";
|
||||
|
||||
for (let i = 0; i < call.args.length; i++) {
|
||||
if (i != 0) payloadHtml += ", ";
|
||||
payloadHtml += exprName(call.args[i], opts);
|
||||
}
|
||||
|
||||
payloadHtml += ")";
|
||||
return payloadHtml;
|
||||
}
|
||||
case "as": {
|
||||
// @Check : this should be done in backend because there are legit @as() calls
|
||||
// const typeRefArg = zigAnalysis.exprs[expr.as.typeRefArg];
|
||||
const exprArg = zigAnalysis.exprs[expr.as.exprArg];
|
||||
// return "@as(" + exprName(typeRefArg, opts) +
|
||||
// ", " + exprName(exprArg, opts) + ")";
|
||||
return exprName(exprArg, opts);
|
||||
}
|
||||
case "declRef": {
|
||||
return zigAnalysis.decls[expr.declRef].name;
|
||||
}
|
||||
case "refPath": {
|
||||
return expr.refPath.map(x => exprName(x, opts)).join(".");
|
||||
}
|
||||
case "int": {
|
||||
return "" + expr.int;
|
||||
}
|
||||
case "float": {
|
||||
return "" + expr.float.toFixed(2);
|
||||
}
|
||||
case "float128": {
|
||||
return "" + expr.float128.toFixed(2);
|
||||
}
|
||||
case "undefined": {
|
||||
return "undefined";
|
||||
}
|
||||
case "string": {
|
||||
return "\"" + escapeHtml(expr.string) + "\"";
|
||||
}
|
||||
|
||||
case "anytype": {
|
||||
return "anytype";
|
||||
}
|
||||
|
||||
case "this":{
|
||||
return "@This()";
|
||||
}
|
||||
|
||||
case "type": {
|
||||
let name = "";
|
||||
|
||||
let typeObj = expr.type;
|
||||
if (typeof typeObj === 'number') typeObj = zigAnalysis.types[typeObj];
|
||||
switch (typeObj.kind) {
|
||||
default: throw "TODO";
|
||||
case typeKinds.Struct:
|
||||
{
|
||||
let structObj = (typeObj);
|
||||
return structObj;
|
||||
}
|
||||
case typeKinds.Enum:
|
||||
{
|
||||
let enumObj = (typeObj);
|
||||
return enumObj;
|
||||
}
|
||||
case typeKinds.Opaque:
|
||||
{
|
||||
let opaqueObj = (typeObj);
|
||||
|
||||
return opaqueObj.name;
|
||||
}
|
||||
case typeKinds.ComptimeExpr:
|
||||
{
|
||||
return "anyopaque";
|
||||
}
|
||||
case typeKinds.Array:
|
||||
{
|
||||
let arrayObj = typeObj;
|
||||
let name = "[";
|
||||
let lenName = exprName(arrayObj.len, opts);
|
||||
let sentinel = arrayObj.sentinel ? ":"+exprName(arrayObj.sentinel, opts) : "";
|
||||
// let is_mutable = arrayObj.is_multable ? "const " : "";
|
||||
|
||||
if (opts.wantHtml) {
|
||||
name +=
|
||||
'<span class="tok-number">' + lenName + sentinel + "</span>";
|
||||
} else {
|
||||
name += lenName + sentinel;
|
||||
}
|
||||
name += "]";
|
||||
// name += is_mutable;
|
||||
name += exprName(arrayObj.child, opts);
|
||||
return name;
|
||||
}
|
||||
case typeKinds.Optional:
|
||||
return "?" + exprName((typeObj).child, opts);
|
||||
case typeKinds.Pointer:
|
||||
{
|
||||
let ptrObj = (typeObj);
|
||||
let sentinel = ptrObj.sentinel ? ":"+exprName(ptrObj.sentinel, opts) : "";
|
||||
let is_mutable = !ptrObj.is_mutable ? "const " : "";
|
||||
let name = "";
|
||||
switch (ptrObj.size) {
|
||||
default:
|
||||
console.log("TODO: implement unhandled pointer size case");
|
||||
case pointerSizeEnum.One:
|
||||
name += "*";
|
||||
name += is_mutable;
|
||||
break;
|
||||
case pointerSizeEnum.Many:
|
||||
name += "[*";
|
||||
name += sentinel;
|
||||
name += "]";
|
||||
name += is_mutable;
|
||||
break;
|
||||
case pointerSizeEnum.Slice:
|
||||
if (ptrObj.is_ref) {
|
||||
name += "*";
|
||||
}
|
||||
name += "[";
|
||||
name += sentinel;
|
||||
name += "]";
|
||||
name += is_mutable;
|
||||
break;
|
||||
case pointerSizeEnum.C:
|
||||
name += "[*c";
|
||||
name += sentinel;
|
||||
name += "]";
|
||||
name += is_mutable;
|
||||
break;
|
||||
}
|
||||
// @check: after the major changes in arrays the consts are came from switch above
|
||||
// if (!ptrObj.is_mutable) {
|
||||
// if (opts.wantHtml) {
|
||||
// name += '<span class="tok-kw">const</span> ';
|
||||
// } else {
|
||||
// name += "const ";
|
||||
// }
|
||||
// }
|
||||
if (ptrObj.is_allowzero) {
|
||||
name += "allowzero ";
|
||||
}
|
||||
if (ptrObj.is_volatile) {
|
||||
name += "volatile ";
|
||||
}
|
||||
if (ptrObj.has_addrspace) {
|
||||
name += "addrspace(";
|
||||
name += "." + "";
|
||||
name += ") ";
|
||||
}
|
||||
if (ptrObj.has_align) {
|
||||
let align = exprName(ptrObj.align, opts);
|
||||
if (opts.wantHtml) {
|
||||
name += '<span class="tok-kw">align</span>(';
|
||||
} else {
|
||||
name += "align(";
|
||||
}
|
||||
if (opts.wantHtml) {
|
||||
name += '<span class="tok-number">' + align + '</span>';
|
||||
} else {
|
||||
name += align;
|
||||
}
|
||||
if (ptrObj.hostIntBytes != null) {
|
||||
name += ":";
|
||||
if (opts.wantHtml) {
|
||||
name += '<span class="tok-number">' + ptrObj.bitOffsetInHost + '</span>';
|
||||
} else {
|
||||
name += ptrObj.bitOffsetInHost;
|
||||
}
|
||||
name += ":";
|
||||
if (opts.wantHtml) {
|
||||
name += '<span class="tok-number">' + ptrObj.hostIntBytes + '</span>';
|
||||
} else {
|
||||
name += ptrObj.hostIntBytes;
|
||||
}
|
||||
}
|
||||
name += ") ";
|
||||
}
|
||||
//name += typeValueName(ptrObj.child, wantHtml, wantSubLink, null);
|
||||
name += exprName(ptrObj.child, opts);
|
||||
return name;
|
||||
}
|
||||
case typeKinds.Float:
|
||||
{
|
||||
let floatObj = (typeObj);
|
||||
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">' + floatObj.name + '</span>';
|
||||
} else {
|
||||
return floatObj.name;
|
||||
}
|
||||
}
|
||||
case typeKinds.Int:
|
||||
{
|
||||
let intObj = (typeObj);
|
||||
let name = intObj.name;
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">' + name + '</span>';
|
||||
} else {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
case typeKinds.ComptimeInt:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">comptime_int</span>';
|
||||
} else {
|
||||
return "comptime_int";
|
||||
}
|
||||
case typeKinds.ComptimeFloat:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">comptime_float</span>';
|
||||
} else {
|
||||
return "comptime_float";
|
||||
}
|
||||
case typeKinds.Type:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">type</span>';
|
||||
} else {
|
||||
return "type";
|
||||
}
|
||||
case typeKinds.Bool:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">bool</span>';
|
||||
} else {
|
||||
return "bool";
|
||||
}
|
||||
case typeKinds.Void:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">void</span>';
|
||||
} else {
|
||||
return "void";
|
||||
}
|
||||
case typeKinds.EnumLiteral:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">(enum literal)</span>';
|
||||
} else {
|
||||
return "(enum literal)";
|
||||
}
|
||||
case typeKinds.NoReturn:
|
||||
if (opts.wantHtml) {
|
||||
return '<span class="tok-type">noreturn</span>';
|
||||
} else {
|
||||
return "noreturn";
|
||||
}
|
||||
case typeKinds.ErrorSet:
|
||||
{
|
||||
let errSetObj = (typeObj);
|
||||
if (errSetObj.fields == null) {
|
||||
return '<span class="tok-type">anyerror</span>';
|
||||
} else {
|
||||
// throw "TODO";
|
||||
let html = "error{" + errSetObj.fields[0].name + "}";
|
||||
return html;
|
||||
}
|
||||
}
|
||||
|
||||
case typeKinds.ErrorUnion:
|
||||
{
|
||||
let errUnionObj = (typeObj);
|
||||
let lhs = exprName(errUnionObj.lhs, opts);
|
||||
let rhs = exprName(errUnionObj.rhs, opts);
|
||||
return lhs + "!" + rhs;
|
||||
}
|
||||
case typeKinds.InferredErrorUnion:
|
||||
{
|
||||
let errUnionObj = (typeObj);
|
||||
let payload = exprName(errUnionObj.payload, opts);
|
||||
return "!" + payload;
|
||||
}
|
||||
case typeKinds.Fn:
|
||||
{
|
||||
let fnObj = (typeObj);
|
||||
let payloadHtml = "";
|
||||
if (opts.wantHtml) {
|
||||
if (fnObj.is_extern) {
|
||||
payloadHtml += "pub extern ";
|
||||
}
|
||||
if (fnObj.has_lib_name) {
|
||||
payloadHtml += "\"" + fnObj.lib_name +"\" ";
|
||||
}
|
||||
payloadHtml += '<span class="tok-kw">fn</span>';
|
||||
if (opts.fnDecl) {
|
||||
payloadHtml += ' <span class="tok-fn">';
|
||||
if (opts.linkFnNameDecl) {
|
||||
payloadHtml += '<a href="' + opts.linkFnNameDecl + '">' +
|
||||
escapeHtml(opts.fnDecl.name) + '</a>';
|
||||
} else {
|
||||
payloadHtml += escapeHtml(opts.fnDecl.name);
|
||||
}
|
||||
payloadHtml += '</span>';
|
||||
}
|
||||
} else {
|
||||
payloadHtml += 'fn ';
|
||||
}
|
||||
payloadHtml += '(';
|
||||
if (fnObj.params) {
|
||||
let fields = null;
|
||||
let isVarArgs = false;
|
||||
let fnNode = zigAnalysis.astNodes[fnObj.src];
|
||||
fields = fnNode.fields;
|
||||
isVarArgs = fnNode.varArgs;
|
||||
|
||||
for (let i = 0; i < fnObj.params.length; i += 1) {
|
||||
if (i != 0) {
|
||||
payloadHtml += ', ';
|
||||
}
|
||||
|
||||
payloadHtml += "<span class='argBreaker'><br> </span>"
|
||||
let value = fnObj.params[i];
|
||||
let paramValue = resolveValue({expr: value});
|
||||
|
||||
if (fields != null) {
|
||||
let paramNode = zigAnalysis.astNodes[fields[i]];
|
||||
|
||||
if (paramNode.varArgs) {
|
||||
payloadHtml += '...';
|
||||
continue;
|
||||
}
|
||||
|
||||
if (paramNode.noalias) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">noalias</span> ';
|
||||
} else {
|
||||
payloadHtml += 'noalias ';
|
||||
}
|
||||
}
|
||||
|
||||
if (paramNode.comptime) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">comptime</span> ';
|
||||
} else {
|
||||
payloadHtml += 'comptime ';
|
||||
}
|
||||
}
|
||||
|
||||
let paramName = paramNode.name;
|
||||
if (paramName != null) {
|
||||
// skip if it matches the type name
|
||||
if (!shouldSkipParamName(paramValue, paramName)) {
|
||||
payloadHtml += paramName + ': ';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isVarArgs && i === fnObj.params.length - 1) {
|
||||
payloadHtml += '...';
|
||||
}
|
||||
else if ("alignOf" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">'
|
||||
+ exprName(value, opts) + '</span>';
|
||||
payloadHtml += '</a>';
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
|
||||
}
|
||||
else if ("typeOf" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">'
|
||||
+ exprName(value, opts) + '</span>';
|
||||
payloadHtml += '</a>';
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
|
||||
}
|
||||
else if ("typeOf_peer" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">'
|
||||
+ exprName(value, opts) + '</span>';
|
||||
payloadHtml += '</a>';
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
|
||||
}
|
||||
else if ("declRef" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">'
|
||||
+ exprName(value, opts) + '</span>';
|
||||
payloadHtml += '</a>';
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
|
||||
}
|
||||
else if ("call" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">'
|
||||
+ exprName(value, opts) + '</span>';
|
||||
payloadHtml += '</a>';
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
}
|
||||
else if ("refPath" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">'
|
||||
+ exprName(value, opts) + '</span>';
|
||||
payloadHtml += '</a>';
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("type" in value) {
|
||||
let name = exprName(value, {
|
||||
wantHtml: false,
|
||||
wantLink: false,
|
||||
fnDecl: opts.fnDecl,
|
||||
linkFnNameDecl: opts.linkFnNameDecl,
|
||||
});
|
||||
payloadHtml += '<span class="tok-kw">' + name + '</span>';
|
||||
} else if ("binOpIndex" in value) {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}else if ("comptimeExpr" in value) {
|
||||
let comptimeExpr = zigAnalysis.comptimeExprs[value.comptimeExpr].code;
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">' + comptimeExpr + '</span>';
|
||||
} else {
|
||||
payloadHtml += comptimeExpr;
|
||||
}
|
||||
} else if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">anytype</span>';
|
||||
} else {
|
||||
payloadHtml += 'anytype';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
payloadHtml += "<span class='argBreaker'>,<br></span>"
|
||||
payloadHtml += ') ';
|
||||
|
||||
if (fnObj.has_align) {
|
||||
let align = zigAnalysis.exprs[fnObj.align]
|
||||
payloadHtml += "align(" + exprName(align, opts) + ") ";
|
||||
}
|
||||
if (fnObj.has_cc) {
|
||||
let cc = zigAnalysis.exprs[fnObj.cc]
|
||||
if (cc) {
|
||||
payloadHtml += "callconv(." + cc.enumLiteral + ") ";
|
||||
}
|
||||
}
|
||||
|
||||
if (fnObj.is_inferred_error) {
|
||||
payloadHtml += "!";
|
||||
}
|
||||
if (fnObj.ret != null) {
|
||||
payloadHtml += exprName(fnObj.ret, opts);
|
||||
} else if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">anytype</span>';
|
||||
} else {
|
||||
payloadHtml += 'anytype';
|
||||
}
|
||||
return payloadHtml;
|
||||
}
|
||||
// if (wantHtml) {
|
||||
// return escapeHtml(typeObj.name);
|
||||
// } else {
|
||||
// return typeObj.name;
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
function shouldSkipParamName(typeRef, paramName) {
|
||||
let resolvedTypeRef = resolveValue({expr: typeRef});
|
||||
if ("type" in resolvedTypeRef) {
|
||||
let typeObj = zigAnalysis.types[resolvedTypeRef.type];
|
||||
if (typeObj.kind === typeKinds.Pointer){
|
||||
let ptrObj = (typeObj);
|
||||
if (getPtrSize(ptrObj) === pointerSizeEnum.One) {
|
||||
const value = resolveValue(ptrObj.child);
|
||||
return typeValueName(value, false, true).toLowerCase() === paramName;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
function getPtrSize(typeObj) {
|
||||
return (typeObj.size == null) ? pointerSizeEnum.One : typeObj.size;
|
||||
}
|
||||
|
||||
|
||||
function renderType(typeObj) {
|
||||
let name;
|
||||
if (rootIsStd && typeObj === zigAnalysis.types[zigAnalysis.packages[zigAnalysis.rootPkg].main]) {
|
||||
name = "std";
|
||||
} else {
|
||||
name = exprName({type:typeObj}, false, false);
|
||||
}
|
||||
if (name != null && name != "") {
|
||||
domHdrName.innerText = name + " (" + zigAnalysis.typeKinds[typeObj.kind] + ")";
|
||||
domHdrName.classList.remove("hidden");
|
||||
}
|
||||
if (typeObj.kind == typeKinds.ErrorSet) {
|
||||
renderErrorSet((typeObj));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function renderErrorSet(errSetType) {
|
||||
if (errSetType.fields == null) {
|
||||
domFnErrorsAnyError.classList.remove("hidden");
|
||||
} else {
|
||||
let errorList = [];
|
||||
for (let i = 0; i < errSetType.fields.length; i += 1) {
|
||||
let errObj = errSetType.fields[i];
|
||||
//let srcObj = zigAnalysis.astNodes[errObj.src];
|
||||
errorList.push(errObj);
|
||||
}
|
||||
errorList.sort(function(a, b) {
|
||||
return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase());
|
||||
});
|
||||
|
||||
resizeDomListDl(domListFnErrors, errorList.length);
|
||||
for (let i = 0; i < errorList.length; i += 1) {
|
||||
let nameTdDom = domListFnErrors.children[i * 2 + 0];
|
||||
let descTdDom = domListFnErrors.children[i * 2 + 1];
|
||||
nameTdDom.textContent = errorList[i].name;
|
||||
let docs = errorList[i].docs;
|
||||
if (docs != null) {
|
||||
descTdDom.innerHTML = markdown(docs);
|
||||
payloadHtml += '<span class="tok-kw">fn</span>';
|
||||
if (opts.fnDecl) {
|
||||
payloadHtml += ' <span class="tok-fn">';
|
||||
if (opts.linkFnNameDecl) {
|
||||
payloadHtml +=
|
||||
'<a href="' +
|
||||
opts.linkFnNameDecl +
|
||||
'">' +
|
||||
escapeHtml(opts.fnDecl.name) +
|
||||
"</a>";
|
||||
} else {
|
||||
descTdDom.textContent = "";
|
||||
payloadHtml += escapeHtml(opts.fnDecl.name);
|
||||
}
|
||||
payloadHtml += "</span>";
|
||||
}
|
||||
} else {
|
||||
payloadHtml += "fn ";
|
||||
}
|
||||
domTableFnErrors.classList.remove("hidden");
|
||||
payloadHtml += "(";
|
||||
if (fnObj.params) {
|
||||
let fields = null;
|
||||
let isVarArgs = false;
|
||||
let fnNode = zigAnalysis.astNodes[fnObj.src];
|
||||
fields = fnNode.fields;
|
||||
isVarArgs = fnNode.varArgs;
|
||||
|
||||
for (let i = 0; i < fnObj.params.length; i += 1) {
|
||||
if (i != 0) {
|
||||
payloadHtml += ", ";
|
||||
}
|
||||
|
||||
payloadHtml +=
|
||||
"<span class='argBreaker'><br> </span>";
|
||||
let value = fnObj.params[i];
|
||||
let paramValue = resolveValue({ expr: value });
|
||||
|
||||
if (fields != null) {
|
||||
let paramNode = zigAnalysis.astNodes[fields[i]];
|
||||
|
||||
if (paramNode.varArgs) {
|
||||
payloadHtml += "...";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (paramNode.noalias) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">noalias</span> ';
|
||||
} else {
|
||||
payloadHtml += "noalias ";
|
||||
}
|
||||
}
|
||||
|
||||
if (paramNode.comptime) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">comptime</span> ';
|
||||
} else {
|
||||
payloadHtml += "comptime ";
|
||||
}
|
||||
}
|
||||
|
||||
let paramName = paramNode.name;
|
||||
if (paramName != null) {
|
||||
// skip if it matches the type name
|
||||
if (!shouldSkipParamName(paramValue, paramName)) {
|
||||
payloadHtml += paramName + ": ";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isVarArgs && i === fnObj.params.length - 1) {
|
||||
payloadHtml += "...";
|
||||
} else if ("alignOf" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">' +
|
||||
exprName(value, opts) +
|
||||
"</span>";
|
||||
payloadHtml += "</a>";
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("typeOf" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">' +
|
||||
exprName(value, opts) +
|
||||
"</span>";
|
||||
payloadHtml += "</a>";
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("typeOf_peer" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">' +
|
||||
exprName(value, opts) +
|
||||
"</span>";
|
||||
payloadHtml += "</a>";
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("declRef" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">' +
|
||||
exprName(value, opts) +
|
||||
"</span>";
|
||||
payloadHtml += "</a>";
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("call" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">' +
|
||||
exprName(value, opts) +
|
||||
"</span>";
|
||||
payloadHtml += "</a>";
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("refPath" in value) {
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml += '<a href="">';
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw" style="color:lightblue;">' +
|
||||
exprName(value, opts) +
|
||||
"</span>";
|
||||
payloadHtml += "</a>";
|
||||
} else {
|
||||
payloadHtml += exprName(value, opts);
|
||||
}
|
||||
} else if ("type" in value) {
|
||||
let name = exprName(value, {
|
||||
wantHtml: false,
|
||||
wantLink: false,
|
||||
fnDecl: opts.fnDecl,
|
||||
linkFnNameDecl: opts.linkFnNameDecl,
|
||||
});
|
||||
payloadHtml += '<span class="tok-kw">' + name + "</span>";
|
||||
} else if ("binOpIndex" in value) {
|
||||
payloadHtml += exprName(value, opts);
|
||||
} else if ("comptimeExpr" in value) {
|
||||
let comptimeExpr =
|
||||
zigAnalysis.comptimeExprs[value.comptimeExpr].code;
|
||||
if (opts.wantHtml) {
|
||||
payloadHtml +=
|
||||
'<span class="tok-kw">' + comptimeExpr + "</span>";
|
||||
} else {
|
||||
payloadHtml += comptimeExpr;
|
||||
}
|
||||
} else if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">anytype</span>';
|
||||
} else {
|
||||
payloadHtml += "anytype";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
payloadHtml += "<span class='argBreaker'>,<br></span>";
|
||||
payloadHtml += ") ";
|
||||
|
||||
if (fnObj.has_align) {
|
||||
let align = zigAnalysis.exprs[fnObj.align];
|
||||
payloadHtml += "align(" + exprName(align, opts) + ") ";
|
||||
}
|
||||
if (fnObj.has_cc) {
|
||||
let cc = zigAnalysis.exprs[fnObj.cc];
|
||||
if (cc) {
|
||||
payloadHtml += "callconv(." + cc.enumLiteral + ") ";
|
||||
}
|
||||
}
|
||||
|
||||
if (fnObj.is_inferred_error) {
|
||||
payloadHtml += "!";
|
||||
}
|
||||
if (fnObj.ret != null) {
|
||||
payloadHtml += exprName(fnObj.ret, opts);
|
||||
} else if (opts.wantHtml) {
|
||||
payloadHtml += '<span class="tok-kw">anytype</span>';
|
||||
} else {
|
||||
payloadHtml += "anytype";
|
||||
}
|
||||
return payloadHtml;
|
||||
}
|
||||
// if (wantHtml) {
|
||||
// return escapeHtml(typeObj.name);
|
||||
// } else {
|
||||
// return typeObj.name;
|
||||
// }
|
||||
}
|
||||
domSectFnErrors.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function shouldSkipParamName(typeRef, paramName) {
|
||||
let resolvedTypeRef = resolveValue({ expr: typeRef });
|
||||
if ("type" in resolvedTypeRef) {
|
||||
let typeObj = zigAnalysis.types[resolvedTypeRef.type];
|
||||
if (typeObj.kind === typeKinds.Pointer) {
|
||||
let ptrObj = typeObj;
|
||||
if (getPtrSize(ptrObj) === pointerSizeEnum.One) {
|
||||
const value = resolveValue(ptrObj.child);
|
||||
return typeValueName(value, false, true).toLowerCase() === paramName;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function getPtrSize(typeObj) {
|
||||
return typeObj.size == null ? pointerSizeEnum.One : typeObj.size;
|
||||
}
|
||||
|
||||
function renderType(typeObj) {
|
||||
let name;
|
||||
if (
|
||||
rootIsStd &&
|
||||
typeObj ===
|
||||
zigAnalysis.types[zigAnalysis.packages[zigAnalysis.rootPkg].main]
|
||||
) {
|
||||
name = "std";
|
||||
} else {
|
||||
name = exprName({ type: typeObj }, false, false);
|
||||
}
|
||||
if (name != null && name != "") {
|
||||
domHdrName.innerText =
|
||||
name + " (" + zigAnalysis.typeKinds[typeObj.kind] + ")";
|
||||
domHdrName.classList.remove("hidden");
|
||||
}
|
||||
if (typeObj.kind == typeKinds.ErrorSet) {
|
||||
renderErrorSet(typeObj);
|
||||
}
|
||||
}
|
||||
|
||||
function renderErrorSet(errSetType) {
|
||||
if (errSetType.fields == null) {
|
||||
domFnErrorsAnyError.classList.remove("hidden");
|
||||
} else {
|
||||
let errorList = [];
|
||||
for (let i = 0; i < errSetType.fields.length; i += 1) {
|
||||
let errObj = errSetType.fields[i];
|
||||
//let srcObj = zigAnalysis.astNodes[errObj.src];
|
||||
errorList.push(errObj);
|
||||
}
|
||||
errorList.sort(function (a, b) {
|
||||
return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase());
|
||||
});
|
||||
|
||||
resizeDomListDl(domListFnErrors, errorList.length);
|
||||
for (let i = 0; i < errorList.length; i += 1) {
|
||||
let nameTdDom = domListFnErrors.children[i * 2 + 0];
|
||||
let descTdDom = domListFnErrors.children[i * 2 + 1];
|
||||
nameTdDom.textContent = errorList[i].name;
|
||||
let docs = errorList[i].docs;
|
||||
if (docs != null) {
|
||||
descTdDom.innerHTML = markdown(docs);
|
||||
} else {
|
||||
descTdDom.textContent = "";
|
||||
}
|
||||
}
|
||||
domTableFnErrors.classList.remove("hidden");
|
||||
}
|
||||
domSectFnErrors.classList.remove("hidden");
|
||||
}
|
||||
|
||||
// function allCompTimeFnCallsHaveTypeResult(typeIndex, value) {
|
||||
// let srcIndex = zigAnalysis.fns[value].src;
|
||||
// let calls = nodesToCallsMap[srcIndex];
|
||||
// if (calls == null) return false;
|
||||
// for (let i = 0; i < calls.length; i += 1) {
|
||||
// let call = zigAnalysis.calls[calls[i]];
|
||||
// if (call.result.type !== typeTypeId) return false;
|
||||
// }
|
||||
// return true;
|
||||
// }
|
||||
//
|
||||
// function allCompTimeFnCallsResult(calls) {
|
||||
// let firstTypeObj = null;
|
||||
// let containerObj = {
|
||||
// privDecls: [],
|
||||
// };
|
||||
// for (let callI = 0; callI < calls.length; callI += 1) {
|
||||
// let call = zigAnalysis.calls[calls[callI]];
|
||||
// if (call.result.type !== typeTypeId) return null;
|
||||
// let typeObj = zigAnalysis.types[call.result.value];
|
||||
// if (!typeKindIsContainer(typeObj.kind)) return null;
|
||||
// if (firstTypeObj == null) {
|
||||
// firstTypeObj = typeObj;
|
||||
// containerObj.src = typeObj.src;
|
||||
// } else if (firstTypeObj.src !== typeObj.src) {
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
// if (containerObj.fields == null) {
|
||||
// containerObj.fields = (typeObj.fields || []).concat([]);
|
||||
// } else for (let fieldI = 0; fieldI < typeObj.fields.length; fieldI += 1) {
|
||||
// let prev = containerObj.fields[fieldI];
|
||||
// let next = typeObj.fields[fieldI];
|
||||
// if (prev === next) continue;
|
||||
// if (typeof(prev) === 'object') {
|
||||
// if (prev[next] == null) prev[next] = typeObj;
|
||||
// } else {
|
||||
// containerObj.fields[fieldI] = {};
|
||||
// containerObj.fields[fieldI][prev] = firstTypeObj;
|
||||
// containerObj.fields[fieldI][next] = typeObj;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// if (containerObj.pubDecls == null) {
|
||||
// containerObj.pubDecls = (typeObj.pubDecls || []).concat([]);
|
||||
// } else for (let declI = 0; declI < typeObj.pubDecls.length; declI += 1) {
|
||||
// let prev = containerObj.pubDecls[declI];
|
||||
// let next = typeObj.pubDecls[declI];
|
||||
// if (prev === next) continue;
|
||||
// // TODO instead of showing "examples" as the public declarations,
|
||||
// // do logic like this:
|
||||
// //if (typeof(prev) !== 'object') {
|
||||
// // let newDeclId = zigAnalysis.decls.length;
|
||||
// // prev = clone(zigAnalysis.decls[prev]);
|
||||
// // prev.id = newDeclId;
|
||||
// // zigAnalysis.decls.push(prev);
|
||||
// // containerObj.pubDecls[declI] = prev;
|
||||
// //}
|
||||
// //mergeDecls(prev, next, firstTypeObj, typeObj);
|
||||
// }
|
||||
// }
|
||||
// for (let declI = 0; declI < containerObj.pubDecls.length; declI += 1) {
|
||||
// let decl = containerObj.pubDecls[declI];
|
||||
// if (typeof(decl) === 'object') {
|
||||
// containerObj.pubDecls[declI] = containerObj.pubDecls[declI].id;
|
||||
// }
|
||||
// }
|
||||
// return containerObj;
|
||||
// }
|
||||
|
||||
function renderValue(decl) {
|
||||
let resolvedValue = resolveValue(decl.value);
|
||||
|
||||
if (resolvedValue.expr.fieldRef) {
|
||||
const declRef = decl.value.expr.refPath[0].declRef;
|
||||
const type = zigAnalysis.decls[declRef];
|
||||
domFnProtoCode.innerHTML =
|
||||
'<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) +
|
||||
": " +
|
||||
type.name +
|
||||
" = " +
|
||||
exprName(decl.value.expr, { wantHtml: true, wantLink: true }) +
|
||||
";";
|
||||
} else if (
|
||||
resolvedValue.expr.string !== undefined ||
|
||||
resolvedValue.expr.call !== undefined ||
|
||||
resolvedValue.expr.comptimeExpr
|
||||
) {
|
||||
domFnProtoCode.innerHTML =
|
||||
'<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) +
|
||||
": " +
|
||||
exprName(resolvedValue.expr, { wantHtml: true, wantLink: true }) +
|
||||
" = " +
|
||||
exprName(decl.value.expr, { wantHtml: true, wantLink: true }) +
|
||||
";";
|
||||
} else if (resolvedValue.expr.compileError) {
|
||||
domFnProtoCode.innerHTML =
|
||||
'<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) +
|
||||
" = " +
|
||||
exprName(decl.value.expr, { wantHtml: true, wantLink: true }) +
|
||||
";";
|
||||
} else {
|
||||
domFnProtoCode.innerHTML =
|
||||
'<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) +
|
||||
": " +
|
||||
exprName(resolvedValue.typeRef, { wantHtml: true, wantLink: true }) +
|
||||
" = " +
|
||||
exprName(decl.value.expr, { wantHtml: true, wantLink: true }) +
|
||||
";";
|
||||
}
|
||||
|
||||
// function allCompTimeFnCallsHaveTypeResult(typeIndex, value) {
|
||||
// let srcIndex = zigAnalysis.fns[value].src;
|
||||
// let calls = nodesToCallsMap[srcIndex];
|
||||
// if (calls == null) return false;
|
||||
// for (let i = 0; i < calls.length; i += 1) {
|
||||
// let call = zigAnalysis.calls[calls[i]];
|
||||
// if (call.result.type !== typeTypeId) return false;
|
||||
// }
|
||||
// return true;
|
||||
// }
|
||||
//
|
||||
// function allCompTimeFnCallsResult(calls) {
|
||||
// let firstTypeObj = null;
|
||||
// let containerObj = {
|
||||
// privDecls: [],
|
||||
// };
|
||||
// for (let callI = 0; callI < calls.length; callI += 1) {
|
||||
// let call = zigAnalysis.calls[calls[callI]];
|
||||
// if (call.result.type !== typeTypeId) return null;
|
||||
// let typeObj = zigAnalysis.types[call.result.value];
|
||||
// if (!typeKindIsContainer(typeObj.kind)) return null;
|
||||
// if (firstTypeObj == null) {
|
||||
// firstTypeObj = typeObj;
|
||||
// containerObj.src = typeObj.src;
|
||||
// } else if (firstTypeObj.src !== typeObj.src) {
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
// if (containerObj.fields == null) {
|
||||
// containerObj.fields = (typeObj.fields || []).concat([]);
|
||||
// } else for (let fieldI = 0; fieldI < typeObj.fields.length; fieldI += 1) {
|
||||
// let prev = containerObj.fields[fieldI];
|
||||
// let next = typeObj.fields[fieldI];
|
||||
// if (prev === next) continue;
|
||||
// if (typeof(prev) === 'object') {
|
||||
// if (prev[next] == null) prev[next] = typeObj;
|
||||
// } else {
|
||||
// containerObj.fields[fieldI] = {};
|
||||
// containerObj.fields[fieldI][prev] = firstTypeObj;
|
||||
// containerObj.fields[fieldI][next] = typeObj;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// if (containerObj.pubDecls == null) {
|
||||
// containerObj.pubDecls = (typeObj.pubDecls || []).concat([]);
|
||||
// } else for (let declI = 0; declI < typeObj.pubDecls.length; declI += 1) {
|
||||
// let prev = containerObj.pubDecls[declI];
|
||||
// let next = typeObj.pubDecls[declI];
|
||||
// if (prev === next) continue;
|
||||
// // TODO instead of showing "examples" as the public declarations,
|
||||
// // do logic like this:
|
||||
// //if (typeof(prev) !== 'object') {
|
||||
// // let newDeclId = zigAnalysis.decls.length;
|
||||
// // prev = clone(zigAnalysis.decls[prev]);
|
||||
// // prev.id = newDeclId;
|
||||
// // zigAnalysis.decls.push(prev);
|
||||
// // containerObj.pubDecls[declI] = prev;
|
||||
// //}
|
||||
// //mergeDecls(prev, next, firstTypeObj, typeObj);
|
||||
// }
|
||||
// }
|
||||
// for (let declI = 0; declI < containerObj.pubDecls.length; declI += 1) {
|
||||
// let decl = containerObj.pubDecls[declI];
|
||||
// if (typeof(decl) === 'object') {
|
||||
// containerObj.pubDecls[declI] = containerObj.pubDecls[declI].id;
|
||||
// }
|
||||
// }
|
||||
// return containerObj;
|
||||
// }
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
|
||||
domFnProto.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function renderVar(decl) {
|
||||
let declTypeRef = typeOfDecl(decl);
|
||||
domFnProtoCode.innerHTML =
|
||||
'<span class="tok-kw">var</span> ' +
|
||||
escapeHtml(decl.name) +
|
||||
": " +
|
||||
typeValueName(declTypeRef, true, true);
|
||||
|
||||
|
||||
function renderValue(decl) {
|
||||
let resolvedValue = resolveValue(decl.value)
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (resolvedValue.expr.fieldRef) {
|
||||
const declRef = decl.value.expr.refPath[0].declRef;
|
||||
const type = zigAnalysis.decls[declRef];
|
||||
domFnProtoCode.innerHTML = '<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) + ': ' + type.name +
|
||||
" = " + exprName(decl.value.expr, {wantHtml: true, wantLink:true}) + ";";
|
||||
} else if (resolvedValue.expr.string !== undefined || resolvedValue.expr.call !== undefined || resolvedValue.expr.comptimeExpr) {
|
||||
domFnProtoCode.innerHTML = '<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) + ': ' + exprName(resolvedValue.expr, {wantHtml: true, wantLink:true}) +
|
||||
" = " + exprName(decl.value.expr, {wantHtml: true, wantLink:true}) + ";";
|
||||
} else if (resolvedValue.expr.compileError) {
|
||||
domFnProtoCode.innerHTML = '<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) + " = " + exprName(decl.value.expr, {wantHtml: true, wantLink:true}) + ";";
|
||||
}
|
||||
else {
|
||||
domFnProtoCode.innerHTML = '<span class="tok-kw">const</span> ' +
|
||||
escapeHtml(decl.name) + ': ' + exprName(resolvedValue.typeRef, {wantHtml: true, wantLink:true}) +
|
||||
" = " + exprName(decl.value.expr, {wantHtml: true, wantLink:true}) + ";";
|
||||
domFnProto.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function categorizeDecls(
|
||||
decls,
|
||||
typesList,
|
||||
namespacesList,
|
||||
errSetsList,
|
||||
fnsList,
|
||||
varsList,
|
||||
valsList,
|
||||
testsList
|
||||
) {
|
||||
for (let i = 0; i < decls.length; i += 1) {
|
||||
let decl = zigAnalysis.decls[decls[i]];
|
||||
let declValue = resolveValue(decl.value);
|
||||
|
||||
if (decl.isTest) {
|
||||
testsList.push(decl);
|
||||
continue;
|
||||
}
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
if (decl.kind === "var") {
|
||||
varsList.push(decl);
|
||||
continue;
|
||||
}
|
||||
|
||||
domFnProto.classList.remove("hidden");
|
||||
}
|
||||
|
||||
|
||||
function renderVar(decl) {
|
||||
let declTypeRef = typeOfDecl(decl);
|
||||
domFnProtoCode.innerHTML = '<span class="tok-kw">var</span> ' +
|
||||
escapeHtml(decl.name) + ': ' + typeValueName(declTypeRef, true, true);
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
|
||||
domFnProto.classList.remove("hidden");
|
||||
}
|
||||
|
||||
|
||||
|
||||
function categorizeDecls(decls,
|
||||
typesList, namespacesList, errSetsList,
|
||||
fnsList, varsList, valsList, testsList) {
|
||||
|
||||
for (let i = 0; i < decls.length; i += 1) {
|
||||
let decl = zigAnalysis.decls[decls[i]];
|
||||
let declValue = resolveValue(decl.value);
|
||||
|
||||
if (decl.isTest) {
|
||||
testsList.push(decl);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (decl.kind === 'var') {
|
||||
varsList.push(decl);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (decl.kind === 'const') {
|
||||
if ("type" in declValue.expr) {
|
||||
// We have the actual type expression at hand.
|
||||
const typeExpr = zigAnalysis.types[declValue.expr.type];
|
||||
if (typeExpr.kind == typeKinds.Fn) {
|
||||
const funcRetExpr = resolveValue({
|
||||
expr: (typeExpr).ret
|
||||
});
|
||||
if ("type" in funcRetExpr.expr && funcRetExpr.expr.type == typeTypeId) {
|
||||
if (typeIsErrSet(declValue.expr.type)) {
|
||||
errSetsList.push(decl);
|
||||
} else if (typeIsStructWithNoFields(declValue.expr.type)) {
|
||||
namespacesList.push(decl);
|
||||
} else {
|
||||
typesList.push(decl);
|
||||
}
|
||||
} else {
|
||||
fnsList.push(decl);
|
||||
}
|
||||
} else {
|
||||
if (typeIsErrSet(declValue.expr.type)) {
|
||||
errSetsList.push(decl);
|
||||
} else if (typeIsStructWithNoFields(declValue.expr.type)) {
|
||||
namespacesList.push(decl);
|
||||
} else {
|
||||
typesList.push(decl);
|
||||
}
|
||||
}
|
||||
} else if ("typeRef" in declValue) {
|
||||
if ("type" in declValue.typeRef && declValue.typeRef == typeTypeId) {
|
||||
// We don't know what the type expression is, but we know it's a type.
|
||||
typesList.push(decl);
|
||||
} else {
|
||||
valsList.push(decl);
|
||||
}
|
||||
} else {
|
||||
valsList.push(decl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function renderContainer(container) {
|
||||
|
||||
let typesList = [];
|
||||
|
||||
let namespacesList = [];
|
||||
|
||||
let errSetsList = [];
|
||||
|
||||
let fnsList = [];
|
||||
|
||||
let varsList = [];
|
||||
|
||||
let valsList = [];
|
||||
|
||||
let testsList = [];
|
||||
|
||||
categorizeDecls(container.pubDecls,
|
||||
typesList, namespacesList, errSetsList,
|
||||
fnsList, varsList, valsList, testsList);
|
||||
if (curNav.showPrivDecls) categorizeDecls(container.privDecls,
|
||||
typesList, namespacesList, errSetsList,
|
||||
fnsList, varsList, valsList, testsList);
|
||||
|
||||
|
||||
typesList.sort(byNameProperty);
|
||||
namespacesList.sort(byNameProperty);
|
||||
errSetsList.sort(byNameProperty);
|
||||
fnsList.sort(byNameProperty);
|
||||
varsList.sort(byNameProperty);
|
||||
valsList.sort(byNameProperty);
|
||||
testsList.sort(byNameProperty);
|
||||
|
||||
if (container.src != null) {
|
||||
let docs = zigAnalysis.astNodes[container.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
|
||||
if (typesList.length !== 0) {
|
||||
window.x = typesList;
|
||||
resizeDomList(domListTypes, typesList.length, '<li><a href="#"></a></li>');
|
||||
for (let i = 0; i < typesList.length; i += 1) {
|
||||
let liDom = domListTypes.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let decl = typesList[i];
|
||||
aDom.textContent = decl.name;
|
||||
aDom.setAttribute('href', navLinkDecl(decl.name));
|
||||
}
|
||||
domSectTypes.classList.remove("hidden");
|
||||
}
|
||||
if (namespacesList.length !== 0) {
|
||||
resizeDomList(domListNamespaces, namespacesList.length, '<li><a href="#"></a></li>');
|
||||
for (let i = 0; i < namespacesList.length; i += 1) {
|
||||
let liDom = domListNamespaces.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let decl = namespacesList[i];
|
||||
aDom.textContent = decl.name;
|
||||
aDom.setAttribute('href', navLinkDecl(decl.name));
|
||||
}
|
||||
domSectNamespaces.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (errSetsList.length !== 0) {
|
||||
resizeDomList(domListErrSets, errSetsList.length, '<li><a href="#"></a></li>');
|
||||
for (let i = 0; i < errSetsList.length; i += 1) {
|
||||
let liDom = domListErrSets.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let decl = errSetsList[i];
|
||||
aDom.textContent = decl.name;
|
||||
aDom.setAttribute('href', navLinkDecl(decl.name));
|
||||
}
|
||||
domSectErrSets.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (fnsList.length !== 0) {
|
||||
resizeDomList(domListFns, fnsList.length, '<div><dt></dt><dd></dd></div>');
|
||||
|
||||
for (let i = 0; i < fnsList.length; i += 1) {
|
||||
let decl = fnsList[i];
|
||||
let trDom = domListFns.children[i];
|
||||
|
||||
let tdFnCode = trDom.children[0];
|
||||
let tdDesc = trDom.children[1];
|
||||
|
||||
let declType = resolveValue(decl.value);
|
||||
console.assert("type" in declType.expr);
|
||||
|
||||
tdFnCode.innerHTML = exprName(declType.expr,{
|
||||
wantHtml: true,
|
||||
wantLink: true,
|
||||
fnDecl: decl,
|
||||
linkFnNameDecl: navLinkDecl(decl.name),
|
||||
});
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectFns.classList.remove("hidden");
|
||||
}
|
||||
|
||||
let containerNode = zigAnalysis.astNodes[container.src];
|
||||
if (containerNode.fields && containerNode.fields.length > 0) {
|
||||
resizeDomList(domListFields, containerNode.fields.length, '<div></div>');
|
||||
|
||||
for (let i = 0; i < containerNode.fields.length; i += 1) {
|
||||
let fieldNode = zigAnalysis.astNodes[containerNode.fields[i]];
|
||||
let divDom = domListFields.children[i];
|
||||
let fieldName = (fieldNode.name);
|
||||
let docs = fieldNode.docs;
|
||||
let docsNonEmpty = docs != null && docs !== "";
|
||||
let extraPreClass = docsNonEmpty ? " fieldHasDocs" : "";
|
||||
|
||||
let html = '<div class="mobile-scroll-container"><pre class="scroll-item' + extraPreClass + '">' + escapeHtml(fieldName);
|
||||
|
||||
if (container.kind === typeKinds.Enum) {
|
||||
html += ' = <span class="tok-number">' + fieldName + '</span>';
|
||||
} else {
|
||||
let fieldTypeExpr = container.fields[i];
|
||||
html += ": ";
|
||||
let name = exprName(fieldTypeExpr, false, false);
|
||||
html += '<span class="tok-kw">'+ name +'</span>';
|
||||
let tsn = typeShorthandName(fieldTypeExpr);
|
||||
if (tsn) {
|
||||
html += '<span> ('+ tsn +')</span>';
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
html += ',</pre></div>';
|
||||
|
||||
if (docsNonEmpty) {
|
||||
html += '<div class="fieldDocs">' + markdown(docs) + '</div>';
|
||||
}
|
||||
divDom.innerHTML = html;
|
||||
}
|
||||
domSectFields.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (varsList.length !== 0) {
|
||||
resizeDomList(domListGlobalVars, varsList.length,
|
||||
'<tr><td><a href="#"></a></td><td></td><td></td></tr>');
|
||||
for (let i = 0; i < varsList.length; i += 1) {
|
||||
let decl = varsList[i];
|
||||
let trDom = domListGlobalVars.children[i];
|
||||
|
||||
let tdName = trDom.children[0];
|
||||
let tdNameA = tdName.children[0];
|
||||
let tdType = trDom.children[1];
|
||||
let tdDesc = trDom.children[2];
|
||||
|
||||
tdNameA.setAttribute('href', navLinkDecl(decl.name));
|
||||
tdNameA.textContent = decl.name;
|
||||
|
||||
tdType.innerHTML = typeValueName(typeOfDecl(decl), true, true);
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectGlobalVars.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (valsList.length !== 0) {
|
||||
resizeDomList(domListValues, valsList.length,
|
||||
'<tr><td><a href="#"></a></td><td></td><td></td></tr>');
|
||||
for (let i = 0; i < valsList.length; i += 1) {
|
||||
let decl = valsList[i];
|
||||
let trDom = domListValues.children[i];
|
||||
|
||||
let tdName = trDom.children[0];
|
||||
let tdNameA = tdName.children[0];
|
||||
let tdType = trDom.children[1];
|
||||
let tdDesc = trDom.children[2];
|
||||
|
||||
tdNameA.setAttribute('href', navLinkDecl(decl.name));
|
||||
tdNameA.textContent = decl.name;
|
||||
|
||||
tdType.innerHTML = exprName(walkResultTypeRef(decl.value),
|
||||
{wantHtml:true, wantLink:true});
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectValues.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (testsList.length !== 0) {
|
||||
resizeDomList(domListTests, testsList.length,
|
||||
'<tr><td><a href="#"></a></td><td></td><td></td></tr>');
|
||||
for (let i = 0; i < testsList.length; i += 1) {
|
||||
let decl = testsList[i];
|
||||
let trDom = domListTests.children[i];
|
||||
|
||||
let tdName = trDom.children[0];
|
||||
let tdNameA = tdName.children[0];
|
||||
let tdType = trDom.children[1];
|
||||
let tdDesc = trDom.children[2];
|
||||
|
||||
tdNameA.setAttribute('href', navLinkDecl(decl.name));
|
||||
tdNameA.textContent = decl.name;
|
||||
|
||||
tdType.innerHTML = exprName(walkResultTypeRef(decl.value),
|
||||
{wantHtml:true, wantLink:true});
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectTests.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
function operatorCompare(a, b) {
|
||||
if (a === b) {
|
||||
return 0;
|
||||
} else if (a < b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
function detectRootIsStd() {
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
if (rootPkg.table["std"] == null) {
|
||||
// no std mapped into the root package
|
||||
return false;
|
||||
}
|
||||
let stdPkg = zigAnalysis.packages[rootPkg.table["std"]];
|
||||
if (stdPkg == null) return false;
|
||||
return rootPkg.file === stdPkg.file;
|
||||
}
|
||||
|
||||
function indexTypeKinds() {
|
||||
let map = ({});
|
||||
for (let i = 0; i < zigAnalysis.typeKinds.length; i += 1) {
|
||||
map[zigAnalysis.typeKinds[i]] = i;
|
||||
}
|
||||
// This is just for debugging purposes, not needed to function
|
||||
let assertList = ["Type","Void","Bool","NoReturn","Int","Float","Pointer","Array","Struct",
|
||||
"ComptimeFloat","ComptimeInt","Undefined","Null","Optional","ErrorUnion","ErrorSet","Enum",
|
||||
"Union","Fn","BoundFn","Opaque","Frame","AnyFrame","Vector","EnumLiteral"];
|
||||
for (let i = 0; i < assertList.length; i += 1) {
|
||||
if (map[assertList[i]] == null) throw new Error("No type kind '" + assertList[i] + "' found");
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function findTypeTypeId() {
|
||||
for (let i = 0; i < zigAnalysis.types.length; i += 1) {
|
||||
if (zigAnalysis.types[i].kind == typeKinds.Type) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
throw new Error("No type 'type' found");
|
||||
}
|
||||
|
||||
function updateCurNav() {
|
||||
|
||||
curNav = {
|
||||
showPrivDecls: false,
|
||||
pkgNames: [],
|
||||
pkgObjs: [],
|
||||
declNames: [],
|
||||
declObjs: [],
|
||||
callName: null,
|
||||
};
|
||||
curNavSearch = "";
|
||||
|
||||
if (location.hash[0] === '#' && location.hash.length > 1) {
|
||||
let query = location.hash.substring(1);
|
||||
if (query[0] === '*') {
|
||||
curNav.showPrivDecls = true;
|
||||
query = query.substring(1);
|
||||
}
|
||||
|
||||
let qpos = query.indexOf("?");
|
||||
let nonSearchPart;
|
||||
if (qpos === -1) {
|
||||
nonSearchPart = query;
|
||||
if (decl.kind === "const") {
|
||||
if ("type" in declValue.expr) {
|
||||
// We have the actual type expression at hand.
|
||||
const typeExpr = zigAnalysis.types[declValue.expr.type];
|
||||
if (typeExpr.kind == typeKinds.Fn) {
|
||||
const funcRetExpr = resolveValue({
|
||||
expr: typeExpr.ret,
|
||||
});
|
||||
if (
|
||||
"type" in funcRetExpr.expr &&
|
||||
funcRetExpr.expr.type == typeTypeId
|
||||
) {
|
||||
if (typeIsErrSet(declValue.expr.type)) {
|
||||
errSetsList.push(decl);
|
||||
} else if (typeIsStructWithNoFields(declValue.expr.type)) {
|
||||
namespacesList.push(decl);
|
||||
} else {
|
||||
typesList.push(decl);
|
||||
}
|
||||
} else {
|
||||
nonSearchPart = query.substring(0, qpos);
|
||||
curNavSearch = decodeURIComponent(query.substring(qpos + 1));
|
||||
fnsList.push(decl);
|
||||
}
|
||||
|
||||
let parts = nonSearchPart.split(";");
|
||||
curNav.pkgNames = decodeURIComponent(parts[0]).split(".");
|
||||
if (parts[1] != null) {
|
||||
curNav.declNames = decodeURIComponent(parts[1]).split(".");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onHashChange() {
|
||||
updateCurNav();
|
||||
if (domSearch.value !== curNavSearch) {
|
||||
domSearch.value = curNavSearch;
|
||||
}
|
||||
render();
|
||||
if (imFeelingLucky) {
|
||||
imFeelingLucky = false;
|
||||
activateSelectedResult();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function findSubDecl(parentType, childName) {
|
||||
{
|
||||
// Generic functions
|
||||
if ("value" in parentType) {
|
||||
const rv = resolveValue(parentType.value);
|
||||
if ("type" in rv.expr) {
|
||||
const t = zigAnalysis.types[rv.expr.type];
|
||||
if (t.kind == typeKinds.Fn && t.generic_ret != null) {
|
||||
const rgr = resolveValue({expr: t.generic_ret});
|
||||
if ("type" in rgr.expr) {
|
||||
parentType = zigAnalysis.types[rgr.expr.type];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!parentType.pubDecls) return null;
|
||||
for (let i = 0; i < parentType.pubDecls.length; i += 1) {
|
||||
let declIndex = parentType.pubDecls[i];
|
||||
let childDecl = zigAnalysis.decls[declIndex];
|
||||
if (childDecl.name === childName) {
|
||||
return childDecl;
|
||||
}
|
||||
}
|
||||
if (!parentType.privDecls) return null;
|
||||
for (let i = 0; i < parentType.privDecls.length; i += 1) {
|
||||
let declIndex = parentType.privDecls[i];
|
||||
let childDecl = zigAnalysis.decls[declIndex];
|
||||
if (childDecl.name === childName) {
|
||||
return childDecl;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function computeCanonicalPackagePaths() {
|
||||
let list = new Array(zigAnalysis.packages.length);
|
||||
// Now we try to find all the packages from root.
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
// Breadth-first to keep the path shortest possible.
|
||||
let stack = [{
|
||||
path: ([]),
|
||||
pkg: rootPkg,
|
||||
}];
|
||||
while (stack.length !== 0) {
|
||||
let item = (stack.shift());
|
||||
for (let key in item.pkg.table) {
|
||||
let childPkgIndex = item.pkg.table[key];
|
||||
if (list[childPkgIndex] != null) continue;
|
||||
let childPkg = zigAnalysis.packages[childPkgIndex];
|
||||
if (childPkg == null) continue;
|
||||
|
||||
let newPath = item.path.concat([key])
|
||||
list[childPkgIndex] = newPath;
|
||||
stack.push({
|
||||
path: newPath,
|
||||
pkg: childPkg,
|
||||
});
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
|
||||
|
||||
function computeCanonDeclPaths() {
|
||||
let list = new Array(zigAnalysis.decls.length);
|
||||
canonTypeDecls = new Array(zigAnalysis.types.length);
|
||||
|
||||
for (let pkgI = 0; pkgI < zigAnalysis.packages.length; pkgI += 1) {
|
||||
if (pkgI === zigAnalysis.rootPkg && rootIsStd) continue;
|
||||
let pkg = zigAnalysis.packages[pkgI];
|
||||
let pkgNames = canonPkgPaths[pkgI];
|
||||
if (pkgNames === undefined) continue;
|
||||
|
||||
let stack = [{
|
||||
declNames: ([]),
|
||||
type: zigAnalysis.types[pkg.main],
|
||||
}];
|
||||
while (stack.length !== 0) {
|
||||
let item = (stack.shift());
|
||||
|
||||
if (isContainerType(item.type)) {
|
||||
let t = (item.type);
|
||||
|
||||
let len = t.pubDecls ? t.pubDecls.length : 0;
|
||||
for (let declI = 0; declI < len; declI += 1) {
|
||||
let mainDeclIndex = t.pubDecls[declI];
|
||||
if (list[mainDeclIndex] != null) continue;
|
||||
|
||||
let decl = zigAnalysis.decls[mainDeclIndex];
|
||||
let declVal = resolveValue(decl.value);
|
||||
let declNames = item.declNames.concat([decl.name]);
|
||||
list[mainDeclIndex] = {
|
||||
pkgNames: pkgNames,
|
||||
declNames: declNames,
|
||||
};
|
||||
if ("type" in declVal.expr) {
|
||||
let value = zigAnalysis.types[declVal.expr.type];
|
||||
if (declCanRepresentTypeKind(value.kind))
|
||||
{
|
||||
canonTypeDecls[declVal.type] = mainDeclIndex;
|
||||
}
|
||||
|
||||
if (isContainerType(value)) {
|
||||
stack.push({
|
||||
declNames: declNames,
|
||||
type:value,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Generic fun/ction
|
||||
if (value.kind == typeKinds.Fn && value.generic_ret != null) {
|
||||
let resolvedVal = resolveValue({ expr: value.generic_ret});
|
||||
if ("type" in resolvedVal.expr) {
|
||||
let generic_type = zigAnalysis.types[resolvedVal.expr.type];
|
||||
if (isContainerType(generic_type)){
|
||||
stack.push({
|
||||
declNames: declNames,
|
||||
type: generic_type,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
|
||||
function getCanonDeclPath(index) {
|
||||
if (canonDeclPaths == null) {
|
||||
canonDeclPaths = computeCanonDeclPaths();
|
||||
}
|
||||
//let cd = (canonDeclPaths);
|
||||
return canonDeclPaths[index];
|
||||
}
|
||||
|
||||
|
||||
function getCanonTypeDecl(index) {
|
||||
getCanonDeclPath(0);
|
||||
//let ct = (canonTypeDecls);
|
||||
return canonTypeDecls[index];
|
||||
}
|
||||
|
||||
|
||||
function escapeHtml(text) {
|
||||
return text.replace(/[&"<>]/g, function (m) {
|
||||
return escapeHtmlReplacements[m];
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function shortDescMarkdown(docs) {
|
||||
const trimmed_docs = docs.trim();
|
||||
let index = trimmed_docs.indexOf('.');
|
||||
if (index < 0) {
|
||||
index = trimmed_docs.indexOf('\n');
|
||||
if (index < 0) {
|
||||
index = trimmed_docs.length;
|
||||
} else {
|
||||
if (typeIsErrSet(declValue.expr.type)) {
|
||||
errSetsList.push(decl);
|
||||
} else if (typeIsStructWithNoFields(declValue.expr.type)) {
|
||||
namespacesList.push(decl);
|
||||
} else {
|
||||
typesList.push(decl);
|
||||
}
|
||||
}
|
||||
} else if ("typeRef" in declValue) {
|
||||
if ("type" in declValue.typeRef && declValue.typeRef == typeTypeId) {
|
||||
// We don't know what the type expression is, but we know it's a type.
|
||||
typesList.push(decl);
|
||||
} else {
|
||||
valsList.push(decl);
|
||||
}
|
||||
} else {
|
||||
index += 1; // include the period
|
||||
valsList.push(decl);
|
||||
}
|
||||
const slice = trimmed_docs.slice(0, index);
|
||||
return markdown(slice);
|
||||
}
|
||||
}
|
||||
}
|
||||
function renderSourceFileLink(decl) {
|
||||
let srcNode = zigAnalysis.astNodes[decl.src];
|
||||
|
||||
return "<a style=\"float: right;\" href=\"" +
|
||||
sourceFileUrlTemplate.replace("{{file}}",
|
||||
zigAnalysis.files[srcNode.file]).replace("{{line}}", srcNode.line) + "\">[src]</a>";
|
||||
}
|
||||
|
||||
function renderContainer(container) {
|
||||
let typesList = [];
|
||||
|
||||
let namespacesList = [];
|
||||
|
||||
let errSetsList = [];
|
||||
|
||||
let fnsList = [];
|
||||
|
||||
let varsList = [];
|
||||
|
||||
let valsList = [];
|
||||
|
||||
let testsList = [];
|
||||
|
||||
categorizeDecls(
|
||||
container.pubDecls,
|
||||
typesList,
|
||||
namespacesList,
|
||||
errSetsList,
|
||||
fnsList,
|
||||
varsList,
|
||||
valsList,
|
||||
testsList
|
||||
);
|
||||
if (curNav.showPrivDecls)
|
||||
categorizeDecls(
|
||||
container.privDecls,
|
||||
typesList,
|
||||
namespacesList,
|
||||
errSetsList,
|
||||
fnsList,
|
||||
varsList,
|
||||
valsList,
|
||||
testsList
|
||||
);
|
||||
|
||||
typesList.sort(byNameProperty);
|
||||
namespacesList.sort(byNameProperty);
|
||||
errSetsList.sort(byNameProperty);
|
||||
fnsList.sort(byNameProperty);
|
||||
varsList.sort(byNameProperty);
|
||||
valsList.sort(byNameProperty);
|
||||
testsList.sort(byNameProperty);
|
||||
|
||||
if (container.src != null) {
|
||||
let docs = zigAnalysis.astNodes[container.src].docs;
|
||||
if (docs != null) {
|
||||
domTldDocs.innerHTML = markdown(docs);
|
||||
domTldDocs.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
|
||||
if (typesList.length !== 0) {
|
||||
window.x = typesList;
|
||||
resizeDomList(
|
||||
domListTypes,
|
||||
typesList.length,
|
||||
'<li><a href="#"></a></li>'
|
||||
);
|
||||
for (let i = 0; i < typesList.length; i += 1) {
|
||||
let liDom = domListTypes.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let decl = typesList[i];
|
||||
aDom.textContent = decl.name;
|
||||
aDom.setAttribute("href", navLinkDecl(decl.name));
|
||||
}
|
||||
domSectTypes.classList.remove("hidden");
|
||||
}
|
||||
if (namespacesList.length !== 0) {
|
||||
resizeDomList(
|
||||
domListNamespaces,
|
||||
namespacesList.length,
|
||||
'<li><a href="#"></a></li>'
|
||||
);
|
||||
for (let i = 0; i < namespacesList.length; i += 1) {
|
||||
let liDom = domListNamespaces.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let decl = namespacesList[i];
|
||||
aDom.textContent = decl.name;
|
||||
aDom.setAttribute("href", navLinkDecl(decl.name));
|
||||
}
|
||||
domSectNamespaces.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function markdown(input) {
|
||||
const raw_lines = input.split('\n'); // zig allows no '\r', so we don't need to split on CR
|
||||
|
||||
const lines = [];
|
||||
if (errSetsList.length !== 0) {
|
||||
resizeDomList(
|
||||
domListErrSets,
|
||||
errSetsList.length,
|
||||
'<li><a href="#"></a></li>'
|
||||
);
|
||||
for (let i = 0; i < errSetsList.length; i += 1) {
|
||||
let liDom = domListErrSets.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let decl = errSetsList[i];
|
||||
aDom.textContent = decl.name;
|
||||
aDom.setAttribute("href", navLinkDecl(decl.name));
|
||||
}
|
||||
domSectErrSets.classList.remove("hidden");
|
||||
}
|
||||
|
||||
// PHASE 1:
|
||||
// Dissect lines and determine the type for each line.
|
||||
// Also computes indentation level and removes unnecessary whitespace
|
||||
if (fnsList.length !== 0) {
|
||||
resizeDomList(
|
||||
domListFns,
|
||||
fnsList.length,
|
||||
"<div><dt></dt><dd></dd></div>"
|
||||
);
|
||||
|
||||
let is_reading_code = false;
|
||||
let code_indent = 0;
|
||||
for (let line_no = 0; line_no < raw_lines.length; line_no++) {
|
||||
const raw_line = raw_lines[line_no];
|
||||
for (let i = 0; i < fnsList.length; i += 1) {
|
||||
let decl = fnsList[i];
|
||||
let trDom = domListFns.children[i];
|
||||
|
||||
const line = {
|
||||
indent: 0,
|
||||
raw_text: raw_line,
|
||||
text: raw_line.trim(),
|
||||
type: "p", // p, h1 … h6, code, ul, ol, blockquote, skip, empty
|
||||
ordered_number: -1, // NOTE: hack to make the type checker happy
|
||||
};
|
||||
let tdFnCode = trDom.children[0];
|
||||
let tdDesc = trDom.children[1];
|
||||
|
||||
if (!is_reading_code) {
|
||||
while ((line.indent < line.raw_text.length) && line.raw_text[line.indent] == ' ') {
|
||||
line.indent += 1;
|
||||
}
|
||||
let declType = resolveValue(decl.value);
|
||||
console.assert("type" in declType.expr);
|
||||
tdFnCode.innerHTML = exprName(declType.expr, {
|
||||
wantHtml: true,
|
||||
wantLink: true,
|
||||
fnDecl: decl,
|
||||
linkFnNameDecl: navLinkDecl(decl.name),
|
||||
}) + renderSourceFileLink(decl);
|
||||
|
||||
if (line.text.startsWith("######")) {
|
||||
line.type = "h6";
|
||||
line.text = line.text.substr(6);
|
||||
}
|
||||
else if (line.text.startsWith("#####")) {
|
||||
line.type = "h5";
|
||||
line.text = line.text.substr(5);
|
||||
}
|
||||
else if (line.text.startsWith("####")) {
|
||||
line.type = "h4";
|
||||
line.text = line.text.substr(4);
|
||||
}
|
||||
else if (line.text.startsWith("###")) {
|
||||
line.type = "h3";
|
||||
line.text = line.text.substr(3);
|
||||
}
|
||||
else if (line.text.startsWith("##")) {
|
||||
line.type = "h2";
|
||||
line.text = line.text.substr(2);
|
||||
}
|
||||
else if (line.text.startsWith("#")) {
|
||||
line.type = "h1";
|
||||
line.text = line.text.substr(1);
|
||||
}
|
||||
else if (line.text.startsWith("-")) {
|
||||
line.type = "ul";
|
||||
line.text = line.text.substr(1);
|
||||
}
|
||||
else if (line.text.match(/^\d+\..*$/)) { // if line starts with {number}{dot}
|
||||
const match = (line.text.match(/(\d+)\./));
|
||||
line.type = "ul";
|
||||
line.text = line.text.substr(match[0].length);
|
||||
line.ordered_number = Number(match[1].length);
|
||||
}
|
||||
else if (line.text == "```") {
|
||||
line.type = "skip";
|
||||
is_reading_code = true;
|
||||
code_indent = line.indent;
|
||||
}
|
||||
else if (line.text == "") {
|
||||
line.type = "empty";
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (line.text == "```") {
|
||||
is_reading_code = false;
|
||||
line.type = "skip";
|
||||
} else {
|
||||
line.type = "code";
|
||||
line.text = line.raw_text.substr(code_indent); // remove the indent of the ``` from all the code block
|
||||
}
|
||||
}
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectFns.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (line.type != "skip") {
|
||||
lines.push(line);
|
||||
}
|
||||
let containerNode = zigAnalysis.astNodes[container.src];
|
||||
if (containerNode.fields && containerNode.fields.length > 0) {
|
||||
resizeDomList(domListFields, containerNode.fields.length, "<div></div>");
|
||||
|
||||
for (let i = 0; i < containerNode.fields.length; i += 1) {
|
||||
let fieldNode = zigAnalysis.astNodes[containerNode.fields[i]];
|
||||
let divDom = domListFields.children[i];
|
||||
let fieldName = fieldNode.name;
|
||||
let docs = fieldNode.docs;
|
||||
let docsNonEmpty = docs != null && docs !== "";
|
||||
let extraPreClass = docsNonEmpty ? " fieldHasDocs" : "";
|
||||
|
||||
let html =
|
||||
'<div class="mobile-scroll-container"><pre class="scroll-item' +
|
||||
extraPreClass +
|
||||
'">' +
|
||||
escapeHtml(fieldName);
|
||||
|
||||
if (container.kind === typeKinds.Enum) {
|
||||
html += ' = <span class="tok-number">' + fieldName + "</span>";
|
||||
} else {
|
||||
let fieldTypeExpr = container.fields[i];
|
||||
html += ": ";
|
||||
let name = exprName(fieldTypeExpr, false, false);
|
||||
html += '<span class="tok-kw">' + name + "</span>";
|
||||
let tsn = typeShorthandName(fieldTypeExpr);
|
||||
if (tsn) {
|
||||
html += "<span> (" + tsn + ")</span>";
|
||||
}
|
||||
}
|
||||
|
||||
// PHASE 2:
|
||||
// Render HTML from markdown lines.
|
||||
// Look at each line and emit fitting HTML code
|
||||
html += ",</pre></div>";
|
||||
|
||||
|
||||
function markdownInlines(innerText) {
|
||||
if (docsNonEmpty) {
|
||||
html += '<div class="fieldDocs">' + markdown(docs) + "</div>";
|
||||
}
|
||||
divDom.innerHTML = html;
|
||||
}
|
||||
domSectFields.classList.remove("hidden");
|
||||
}
|
||||
|
||||
// inline types:
|
||||
// **{INLINE}** : <strong>
|
||||
// __{INLINE}__ : <u>
|
||||
// ~~{INLINE}~~ : <s>
|
||||
// *{INLINE}* : <emph>
|
||||
// _{INLINE}_ : <emph>
|
||||
// `{TEXT}` : <code>
|
||||
// [{INLINE}]({URL}) : <a>
|
||||
//  : <img>
|
||||
// [[std;format.fmt]] : <a> (inner link)
|
||||
if (varsList.length !== 0) {
|
||||
resizeDomList(
|
||||
domListGlobalVars,
|
||||
varsList.length,
|
||||
'<tr><td><a href="#"></a></td><td></td><td></td></tr>'
|
||||
);
|
||||
for (let i = 0; i < varsList.length; i += 1) {
|
||||
let decl = varsList[i];
|
||||
let trDom = domListGlobalVars.children[i];
|
||||
|
||||
|
||||
|
||||
const formats = [
|
||||
{
|
||||
marker: "**",
|
||||
tag: "strong",
|
||||
},
|
||||
{
|
||||
marker: "~~",
|
||||
tag: "s",
|
||||
},
|
||||
{
|
||||
marker: "__",
|
||||
tag: "u",
|
||||
},
|
||||
{
|
||||
marker: "*",
|
||||
tag: "em",
|
||||
}
|
||||
];
|
||||
let tdName = trDom.children[0];
|
||||
let tdNameA = tdName.children[0];
|
||||
let tdType = trDom.children[1];
|
||||
let tdDesc = trDom.children[2];
|
||||
|
||||
|
||||
const stack = [];
|
||||
tdNameA.setAttribute("href", navLinkDecl(decl.name));
|
||||
tdNameA.textContent = decl.name;
|
||||
|
||||
let innerHTML = "";
|
||||
let currentRun = "";
|
||||
tdType.innerHTML = typeValueName(typeOfDecl(decl), true, true);
|
||||
|
||||
function flushRun() {
|
||||
if (currentRun != "") {
|
||||
innerHTML += escapeHtml(currentRun);
|
||||
}
|
||||
currentRun = "";
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectGlobalVars.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (valsList.length !== 0) {
|
||||
resizeDomList(
|
||||
domListValues,
|
||||
valsList.length,
|
||||
'<tr><td><a href="#"></a></td><td></td><td></td></tr>'
|
||||
);
|
||||
for (let i = 0; i < valsList.length; i += 1) {
|
||||
let decl = valsList[i];
|
||||
let trDom = domListValues.children[i];
|
||||
|
||||
let tdName = trDom.children[0];
|
||||
let tdNameA = tdName.children[0];
|
||||
let tdType = trDom.children[1];
|
||||
let tdDesc = trDom.children[2];
|
||||
|
||||
tdNameA.setAttribute("href", navLinkDecl(decl.name));
|
||||
tdNameA.textContent = decl.name;
|
||||
|
||||
tdType.innerHTML = exprName(walkResultTypeRef(decl.value), {
|
||||
wantHtml: true,
|
||||
wantLink: true,
|
||||
});
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectValues.classList.remove("hidden");
|
||||
}
|
||||
|
||||
if (testsList.length !== 0) {
|
||||
resizeDomList(
|
||||
domListTests,
|
||||
testsList.length,
|
||||
'<tr><td><a href="#"></a></td><td></td><td></td></tr>'
|
||||
);
|
||||
for (let i = 0; i < testsList.length; i += 1) {
|
||||
let decl = testsList[i];
|
||||
let trDom = domListTests.children[i];
|
||||
|
||||
let tdName = trDom.children[0];
|
||||
let tdNameA = tdName.children[0];
|
||||
let tdType = trDom.children[1];
|
||||
let tdDesc = trDom.children[2];
|
||||
|
||||
tdNameA.setAttribute("href", navLinkDecl(decl.name));
|
||||
tdNameA.textContent = decl.name;
|
||||
|
||||
tdType.innerHTML = exprName(walkResultTypeRef(decl.value), {
|
||||
wantHtml: true,
|
||||
wantLink: true,
|
||||
});
|
||||
|
||||
let docs = zigAnalysis.astNodes[decl.src].docs;
|
||||
if (docs != null) {
|
||||
tdDesc.innerHTML = shortDescMarkdown(docs);
|
||||
} else {
|
||||
tdDesc.textContent = "";
|
||||
}
|
||||
}
|
||||
domSectTests.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
|
||||
function operatorCompare(a, b) {
|
||||
if (a === b) {
|
||||
return 0;
|
||||
} else if (a < b) {
|
||||
return -1;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
function detectRootIsStd() {
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
if (rootPkg.table["std"] == null) {
|
||||
// no std mapped into the root package
|
||||
return false;
|
||||
}
|
||||
let stdPkg = zigAnalysis.packages[rootPkg.table["std"]];
|
||||
if (stdPkg == null) return false;
|
||||
return rootPkg.file === stdPkg.file;
|
||||
}
|
||||
|
||||
function indexTypeKinds() {
|
||||
let map = {};
|
||||
for (let i = 0; i < zigAnalysis.typeKinds.length; i += 1) {
|
||||
map[zigAnalysis.typeKinds[i]] = i;
|
||||
}
|
||||
// This is just for debugging purposes, not needed to function
|
||||
let assertList = [
|
||||
"Type",
|
||||
"Void",
|
||||
"Bool",
|
||||
"NoReturn",
|
||||
"Int",
|
||||
"Float",
|
||||
"Pointer",
|
||||
"Array",
|
||||
"Struct",
|
||||
"ComptimeFloat",
|
||||
"ComptimeInt",
|
||||
"Undefined",
|
||||
"Null",
|
||||
"Optional",
|
||||
"ErrorUnion",
|
||||
"ErrorSet",
|
||||
"Enum",
|
||||
"Union",
|
||||
"Fn",
|
||||
"BoundFn",
|
||||
"Opaque",
|
||||
"Frame",
|
||||
"AnyFrame",
|
||||
"Vector",
|
||||
"EnumLiteral",
|
||||
];
|
||||
for (let i = 0; i < assertList.length; i += 1) {
|
||||
if (map[assertList[i]] == null)
|
||||
throw new Error("No type kind '" + assertList[i] + "' found");
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function findTypeTypeId() {
|
||||
for (let i = 0; i < zigAnalysis.types.length; i += 1) {
|
||||
if (zigAnalysis.types[i].kind == typeKinds.Type) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
throw new Error("No type 'type' found");
|
||||
}
|
||||
|
||||
function updateCurNav() {
|
||||
curNav = {
|
||||
showPrivDecls: false,
|
||||
pkgNames: [],
|
||||
pkgObjs: [],
|
||||
declNames: [],
|
||||
declObjs: [],
|
||||
callName: null,
|
||||
};
|
||||
curNavSearch = "";
|
||||
|
||||
if (location.hash[0] === "#" && location.hash.length > 1) {
|
||||
let query = location.hash.substring(1);
|
||||
if (query[0] === "*") {
|
||||
curNav.showPrivDecls = true;
|
||||
query = query.substring(1);
|
||||
}
|
||||
|
||||
let qpos = query.indexOf("?");
|
||||
let nonSearchPart;
|
||||
if (qpos === -1) {
|
||||
nonSearchPart = query;
|
||||
} else {
|
||||
nonSearchPart = query.substring(0, qpos);
|
||||
curNavSearch = decodeURIComponent(query.substring(qpos + 1));
|
||||
}
|
||||
|
||||
let parts = nonSearchPart.split(";");
|
||||
curNav.pkgNames = decodeURIComponent(parts[0]).split(".");
|
||||
if (parts[1] != null) {
|
||||
curNav.declNames = decodeURIComponent(parts[1]).split(".");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onHashChange() {
|
||||
updateCurNav();
|
||||
if (domSearch.value !== curNavSearch) {
|
||||
domSearch.value = curNavSearch;
|
||||
if (domSearch.value.length == 0)
|
||||
domSearchPlaceholder.classList.remove("hidden");
|
||||
else
|
||||
domSearchPlaceholder.classList.add("hidden");
|
||||
}
|
||||
render();
|
||||
if (imFeelingLucky) {
|
||||
imFeelingLucky = false;
|
||||
activateSelectedResult();
|
||||
}
|
||||
}
|
||||
|
||||
function findSubDecl(parentType, childName) {
|
||||
{
|
||||
// Generic functions
|
||||
if ("value" in parentType) {
|
||||
const rv = resolveValue(parentType.value);
|
||||
if ("type" in rv.expr) {
|
||||
const t = zigAnalysis.types[rv.expr.type];
|
||||
if (t.kind == typeKinds.Fn && t.generic_ret != null) {
|
||||
const rgr = resolveValue({ expr: t.generic_ret });
|
||||
if ("type" in rgr.expr) {
|
||||
parentType = zigAnalysis.types[rgr.expr.type];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let parsing_code = false;
|
||||
let codetag = "";
|
||||
let in_code = false;
|
||||
if (!parentType.pubDecls) return null;
|
||||
for (let i = 0; i < parentType.pubDecls.length; i += 1) {
|
||||
let declIndex = parentType.pubDecls[i];
|
||||
let childDecl = zigAnalysis.decls[declIndex];
|
||||
if (childDecl.name === childName) {
|
||||
return childDecl;
|
||||
}
|
||||
}
|
||||
if (!parentType.privDecls) return null;
|
||||
for (let i = 0; i < parentType.privDecls.length; i += 1) {
|
||||
let declIndex = parentType.privDecls[i];
|
||||
let childDecl = zigAnalysis.decls[declIndex];
|
||||
if (childDecl.name === childName) {
|
||||
return childDecl;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
for (let i = 0; i < innerText.length; i++) {
|
||||
function computeCanonicalPackagePaths() {
|
||||
let list = new Array(zigAnalysis.packages.length);
|
||||
// Now we try to find all the packages from root.
|
||||
let rootPkg = zigAnalysis.packages[zigAnalysis.rootPkg];
|
||||
// Breadth-first to keep the path shortest possible.
|
||||
let stack = [
|
||||
{
|
||||
path: [],
|
||||
pkg: rootPkg,
|
||||
},
|
||||
];
|
||||
while (stack.length !== 0) {
|
||||
let item = stack.shift();
|
||||
for (let key in item.pkg.table) {
|
||||
let childPkgIndex = item.pkg.table[key];
|
||||
if (list[childPkgIndex] != null) continue;
|
||||
let childPkg = zigAnalysis.packages[childPkgIndex];
|
||||
if (childPkg == null) continue;
|
||||
|
||||
if (parsing_code && in_code) {
|
||||
if (innerText.substr(i, codetag.length) == codetag) {
|
||||
// remove leading and trailing whitespace if string both starts and ends with one.
|
||||
if (currentRun[0] == " " && currentRun[currentRun.length - 1] == " ") {
|
||||
currentRun = currentRun.substr(1, currentRun.length - 2);
|
||||
}
|
||||
flushRun();
|
||||
i += codetag.length - 1;
|
||||
in_code = false;
|
||||
parsing_code = false;
|
||||
innerHTML += "</code>";
|
||||
codetag = "";
|
||||
} else {
|
||||
currentRun += innerText[i];
|
||||
}
|
||||
continue;
|
||||
let newPath = item.path.concat([key]);
|
||||
list[childPkgIndex] = newPath;
|
||||
stack.push({
|
||||
path: newPath,
|
||||
pkg: childPkg,
|
||||
});
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
function computeCanonDeclPaths() {
|
||||
let list = new Array(zigAnalysis.decls.length);
|
||||
canonTypeDecls = new Array(zigAnalysis.types.length);
|
||||
|
||||
for (let pkgI = 0; pkgI < zigAnalysis.packages.length; pkgI += 1) {
|
||||
if (pkgI === zigAnalysis.rootPkg && rootIsStd) continue;
|
||||
let pkg = zigAnalysis.packages[pkgI];
|
||||
let pkgNames = canonPkgPaths[pkgI];
|
||||
if (pkgNames === undefined) continue;
|
||||
|
||||
let stack = [
|
||||
{
|
||||
declNames: [],
|
||||
type: zigAnalysis.types[pkg.main],
|
||||
},
|
||||
];
|
||||
while (stack.length !== 0) {
|
||||
let item = stack.shift();
|
||||
|
||||
if (isContainerType(item.type)) {
|
||||
let t = item.type;
|
||||
|
||||
let len = t.pubDecls ? t.pubDecls.length : 0;
|
||||
for (let declI = 0; declI < len; declI += 1) {
|
||||
let mainDeclIndex = t.pubDecls[declI];
|
||||
if (list[mainDeclIndex] != null) continue;
|
||||
|
||||
let decl = zigAnalysis.decls[mainDeclIndex];
|
||||
let declVal = resolveValue(decl.value);
|
||||
let declNames = item.declNames.concat([decl.name]);
|
||||
list[mainDeclIndex] = {
|
||||
pkgNames: pkgNames,
|
||||
declNames: declNames,
|
||||
};
|
||||
if ("type" in declVal.expr) {
|
||||
let value = zigAnalysis.types[declVal.expr.type];
|
||||
if (declCanRepresentTypeKind(value.kind)) {
|
||||
canonTypeDecls[declVal.type] = mainDeclIndex;
|
||||
}
|
||||
|
||||
if (isContainerType(value)) {
|
||||
stack.push({
|
||||
declNames: declNames,
|
||||
type: value,
|
||||
});
|
||||
}
|
||||
|
||||
// Generic function
|
||||
if (value.kind == typeKinds.Fn && value.generic_ret != null) {
|
||||
let resolvedVal = resolveValue({ expr: value.generic_ret });
|
||||
if ("type" in resolvedVal.expr) {
|
||||
let generic_type = zigAnalysis.types[resolvedVal.expr.type];
|
||||
if (isContainerType(generic_type)) {
|
||||
stack.push({
|
||||
declNames: declNames,
|
||||
type: generic_type,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
if (innerText[i] == "`") {
|
||||
flushRun();
|
||||
if (!parsing_code) {
|
||||
innerHTML += "<code>";
|
||||
}
|
||||
parsing_code = true;
|
||||
codetag += "`";
|
||||
continue;
|
||||
}
|
||||
function getCanonDeclPath(index) {
|
||||
if (canonDeclPaths == null) {
|
||||
canonDeclPaths = computeCanonDeclPaths();
|
||||
}
|
||||
//let cd = (canonDeclPaths);
|
||||
return canonDeclPaths[index];
|
||||
}
|
||||
|
||||
if (parsing_code) {
|
||||
currentRun += innerText[i];
|
||||
in_code = true;
|
||||
} else {
|
||||
let any = false;
|
||||
for (let idx = (stack.length > 0 ? -1 : 0); idx < formats.length; idx++) {
|
||||
const fmt = idx >= 0 ? formats[idx] : stack[stack.length - 1];
|
||||
if (innerText.substr(i, fmt.marker.length) == fmt.marker) {
|
||||
flushRun();
|
||||
if (stack[stack.length - 1] == fmt) {
|
||||
stack.pop();
|
||||
innerHTML += "</" + fmt.tag + ">";
|
||||
} else {
|
||||
stack.push(fmt);
|
||||
innerHTML += "<" + fmt.tag + ">";
|
||||
}
|
||||
i += fmt.marker.length - 1;
|
||||
any = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!any) {
|
||||
currentRun += innerText[i];
|
||||
}
|
||||
}
|
||||
function getCanonTypeDecl(index) {
|
||||
getCanonDeclPath(0);
|
||||
//let ct = (canonTypeDecls);
|
||||
return canonTypeDecls[index];
|
||||
}
|
||||
|
||||
function escapeHtml(text) {
|
||||
return text.replace(/[&"<>]/g, function (m) {
|
||||
return escapeHtmlReplacements[m];
|
||||
});
|
||||
}
|
||||
|
||||
function shortDescMarkdown(docs) {
|
||||
const trimmed_docs = docs.trim();
|
||||
let index = trimmed_docs.indexOf("\n\n");
|
||||
let cut = false;
|
||||
|
||||
if (index < 0 || index > 80) {
|
||||
if (trimmed_docs.length > 80) {
|
||||
index = 80;
|
||||
cut = true;
|
||||
} else {
|
||||
index = trimmed_docs.length;
|
||||
}
|
||||
}
|
||||
|
||||
let slice = trimmed_docs.slice(0, index);
|
||||
if (cut) slice += "...";
|
||||
return markdown(slice);
|
||||
}
|
||||
|
||||
function markdown(input) {
|
||||
const raw_lines = input.split("\n"); // zig allows no '\r', so we don't need to split on CR
|
||||
|
||||
const lines = [];
|
||||
|
||||
// PHASE 1:
|
||||
// Dissect lines and determine the type for each line.
|
||||
// Also computes indentation level and removes unnecessary whitespace
|
||||
|
||||
let is_reading_code = false;
|
||||
let code_indent = 0;
|
||||
for (let line_no = 0; line_no < raw_lines.length; line_no++) {
|
||||
const raw_line = raw_lines[line_no];
|
||||
|
||||
const line = {
|
||||
indent: 0,
|
||||
raw_text: raw_line,
|
||||
text: raw_line.trim(),
|
||||
type: "p", // p, h1 … h6, code, ul, ol, blockquote, skip, empty
|
||||
ordered_number: -1, // NOTE: hack to make the type checker happy
|
||||
};
|
||||
|
||||
if (!is_reading_code) {
|
||||
while (
|
||||
line.indent < line.raw_text.length &&
|
||||
line.raw_text[line.indent] == " "
|
||||
) {
|
||||
line.indent += 1;
|
||||
}
|
||||
|
||||
if (line.text.startsWith("######")) {
|
||||
line.type = "h6";
|
||||
line.text = line.text.substr(6);
|
||||
} else if (line.text.startsWith("#####")) {
|
||||
line.type = "h5";
|
||||
line.text = line.text.substr(5);
|
||||
} else if (line.text.startsWith("####")) {
|
||||
line.type = "h4";
|
||||
line.text = line.text.substr(4);
|
||||
} else if (line.text.startsWith("###")) {
|
||||
line.type = "h3";
|
||||
line.text = line.text.substr(3);
|
||||
} else if (line.text.startsWith("##")) {
|
||||
line.type = "h2";
|
||||
line.text = line.text.substr(2);
|
||||
} else if (line.text.startsWith("#")) {
|
||||
line.type = "h1";
|
||||
line.text = line.text.substr(1);
|
||||
} else if (line.text.startsWith("-")) {
|
||||
line.type = "ul";
|
||||
line.text = line.text.substr(1);
|
||||
} else if (line.text.match(/^\d+\..*$/)) {
|
||||
// if line starts with {number}{dot}
|
||||
const match = line.text.match(/(\d+)\./);
|
||||
line.type = "ul";
|
||||
line.text = line.text.substr(match[0].length);
|
||||
line.ordered_number = Number(match[1].length);
|
||||
} else if (line.text == "```") {
|
||||
line.type = "skip";
|
||||
is_reading_code = true;
|
||||
code_indent = line.indent;
|
||||
} else if (line.text == "") {
|
||||
line.type = "empty";
|
||||
}
|
||||
} else {
|
||||
if (line.text == "```") {
|
||||
is_reading_code = false;
|
||||
line.type = "skip";
|
||||
} else {
|
||||
line.type = "code";
|
||||
line.text = line.raw_text.substr(code_indent); // remove the indent of the ``` from all the code block
|
||||
}
|
||||
}
|
||||
|
||||
if (line.type != "skip") {
|
||||
lines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
// PHASE 2:
|
||||
// Render HTML from markdown lines.
|
||||
// Look at each line and emit fitting HTML code
|
||||
|
||||
function markdownInlines(innerText) {
|
||||
// inline types:
|
||||
// **{INLINE}** : <strong>
|
||||
// __{INLINE}__ : <u>
|
||||
// ~~{INLINE}~~ : <s>
|
||||
// *{INLINE}* : <emph>
|
||||
// _{INLINE}_ : <emph>
|
||||
// `{TEXT}` : <code>
|
||||
// [{INLINE}]({URL}) : <a>
|
||||
//  : <img>
|
||||
// [[std;format.fmt]] : <a> (inner link)
|
||||
|
||||
const formats = [
|
||||
{
|
||||
marker: "**",
|
||||
tag: "strong",
|
||||
},
|
||||
{
|
||||
marker: "~~",
|
||||
tag: "s",
|
||||
},
|
||||
{
|
||||
marker: "__",
|
||||
tag: "u",
|
||||
},
|
||||
{
|
||||
marker: "*",
|
||||
tag: "em",
|
||||
},
|
||||
];
|
||||
|
||||
const stack = [];
|
||||
|
||||
let innerHTML = "";
|
||||
let currentRun = "";
|
||||
|
||||
function flushRun() {
|
||||
if (currentRun != "") {
|
||||
innerHTML += escapeHtml(currentRun);
|
||||
}
|
||||
currentRun = "";
|
||||
}
|
||||
|
||||
let parsing_code = false;
|
||||
let codetag = "";
|
||||
let in_code = false;
|
||||
|
||||
for (let i = 0; i < innerText.length; i++) {
|
||||
if (parsing_code && in_code) {
|
||||
if (innerText.substr(i, codetag.length) == codetag) {
|
||||
// remove leading and trailing whitespace if string both starts and ends with one.
|
||||
if (
|
||||
currentRun[0] == " " &&
|
||||
currentRun[currentRun.length - 1] == " "
|
||||
) {
|
||||
currentRun = currentRun.substr(1, currentRun.length - 2);
|
||||
}
|
||||
flushRun();
|
||||
|
||||
while (stack.length > 0) {
|
||||
const fmt = (stack.pop());
|
||||
innerHTML += "</" + fmt.tag + ">";
|
||||
}
|
||||
|
||||
return innerHTML;
|
||||
i += codetag.length - 1;
|
||||
in_code = false;
|
||||
parsing_code = false;
|
||||
innerHTML += "</code>";
|
||||
codetag = "";
|
||||
} else {
|
||||
currentRun += innerText[i];
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
function previousLineIs(type, line_no) {
|
||||
if (line_no > 0) {
|
||||
return (lines[line_no - 1].type == type);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
if (innerText[i] == "`") {
|
||||
flushRun();
|
||||
if (!parsing_code) {
|
||||
innerHTML += "<code>";
|
||||
}
|
||||
parsing_code = true;
|
||||
codetag += "`";
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
function nextLineIs(type, line_no) {
|
||||
if (line_no < (lines.length - 1)) {
|
||||
return (lines[line_no + 1].type == type);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function getPreviousLineIndent(line_no) {
|
||||
if (line_no > 0) {
|
||||
return lines[line_no - 1].indent;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function getNextLineIndent(line_no) {
|
||||
if (line_no < (lines.length - 1)) {
|
||||
return lines[line_no + 1].indent;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
let html = "";
|
||||
for (let line_no = 0; line_no < lines.length; line_no++) {
|
||||
const line = lines[line_no];
|
||||
|
||||
|
||||
|
||||
switch (line.type) {
|
||||
case "h1":
|
||||
case "h2":
|
||||
case "h3":
|
||||
case "h4":
|
||||
case "h5":
|
||||
case "h6":
|
||||
html += "<" + line.type + ">" + markdownInlines(line.text) + "</" + line.type + ">\n";
|
||||
break;
|
||||
|
||||
case "ul":
|
||||
case "ol":
|
||||
if (!previousLineIs("ul", line_no) || getPreviousLineIndent(line_no) < line.indent) {
|
||||
html += "<" + line.type + ">\n";
|
||||
}
|
||||
|
||||
html += "<li>" + markdownInlines(line.text) + "</li>\n";
|
||||
|
||||
if (!nextLineIs("ul", line_no) || getNextLineIndent(line_no) < line.indent) {
|
||||
html += "</" + line.type + ">\n";
|
||||
}
|
||||
break;
|
||||
|
||||
case "p":
|
||||
if (!previousLineIs("p", line_no)) {
|
||||
html += "<p>\n";
|
||||
}
|
||||
html += markdownInlines(line.text) + "\n";
|
||||
if (!nextLineIs("p", line_no)) {
|
||||
html += "</p>\n";
|
||||
}
|
||||
break;
|
||||
|
||||
case "code":
|
||||
if (!previousLineIs("code", line_no)) {
|
||||
html += "<pre><code>";
|
||||
}
|
||||
html += escapeHtml(line.text) + "\n";
|
||||
if (!nextLineIs("code", line_no)) {
|
||||
html += "</code></pre>\n";
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return html;
|
||||
}
|
||||
|
||||
function activateSelectedResult() {
|
||||
if (domSectSearchResults.classList.contains("hidden")) {
|
||||
return;
|
||||
}
|
||||
|
||||
let liDom = domListSearchResults.children[curSearchIndex];
|
||||
if (liDom == null && domListSearchResults.children.length !== 0) {
|
||||
liDom = domListSearchResults.children[0];
|
||||
}
|
||||
if (liDom != null) {
|
||||
let aDom = liDom.children[0];
|
||||
location.href = (aDom.getAttribute("href"));
|
||||
curSearchIndex = -1;
|
||||
}
|
||||
domSearch.blur();
|
||||
}
|
||||
|
||||
|
||||
function onSearchKeyDown(ev) {
|
||||
switch (getKeyString(ev)) {
|
||||
case "Enter":
|
||||
// detect if this search changes anything
|
||||
let terms1 = getSearchTerms();
|
||||
startSearch();
|
||||
updateCurNav();
|
||||
let terms2 = getSearchTerms();
|
||||
// we might have to wait for onHashChange to trigger
|
||||
imFeelingLucky = (terms1.join(' ') !== terms2.join(' '));
|
||||
if (!imFeelingLucky) activateSelectedResult();
|
||||
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
return;
|
||||
case "Esc":
|
||||
domSearch.value = "";
|
||||
domSearch.blur();
|
||||
curSearchIndex = -1;
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
startSearch();
|
||||
return;
|
||||
case "Up":
|
||||
moveSearchCursor(-1);
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
return;
|
||||
case "Down":
|
||||
moveSearchCursor(1);
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
return;
|
||||
default:
|
||||
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
|
||||
|
||||
curSearchIndex = -1;
|
||||
ev.stopPropagation();
|
||||
startAsyncSearch();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
function moveSearchCursor(dir) {
|
||||
if (curSearchIndex < 0 || curSearchIndex >= domListSearchResults.children.length) {
|
||||
if (dir > 0) {
|
||||
curSearchIndex = -1 + dir;
|
||||
} else if (dir < 0) {
|
||||
curSearchIndex = domListSearchResults.children.length + dir;
|
||||
}
|
||||
if (parsing_code) {
|
||||
currentRun += innerText[i];
|
||||
in_code = true;
|
||||
} else {
|
||||
curSearchIndex += dir;
|
||||
let any = false;
|
||||
for (
|
||||
let idx = stack.length > 0 ? -1 : 0;
|
||||
idx < formats.length;
|
||||
idx++
|
||||
) {
|
||||
const fmt = idx >= 0 ? formats[idx] : stack[stack.length - 1];
|
||||
if (innerText.substr(i, fmt.marker.length) == fmt.marker) {
|
||||
flushRun();
|
||||
if (stack[stack.length - 1] == fmt) {
|
||||
stack.pop();
|
||||
innerHTML += "</" + fmt.tag + ">";
|
||||
} else {
|
||||
stack.push(fmt);
|
||||
innerHTML += "<" + fmt.tag + ">";
|
||||
}
|
||||
i += fmt.marker.length - 1;
|
||||
any = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!any) {
|
||||
currentRun += innerText[i];
|
||||
}
|
||||
}
|
||||
if (curSearchIndex < 0) {
|
||||
curSearchIndex = 0;
|
||||
}
|
||||
if (curSearchIndex >= domListSearchResults.children.length) {
|
||||
curSearchIndex = domListSearchResults.children.length - 1;
|
||||
}
|
||||
renderSearchCursor();
|
||||
}
|
||||
flushRun();
|
||||
|
||||
while (stack.length > 0) {
|
||||
const fmt = stack.pop();
|
||||
innerHTML += "</" + fmt.tag + ">";
|
||||
}
|
||||
|
||||
return innerHTML;
|
||||
}
|
||||
|
||||
|
||||
function getKeyString(ev) {
|
||||
let name;
|
||||
let ignoreShift = false;
|
||||
switch (ev.which) {
|
||||
case 13:
|
||||
name = "Enter";
|
||||
break;
|
||||
case 27:
|
||||
name = "Esc";
|
||||
break;
|
||||
case 38:
|
||||
name = "Up";
|
||||
break;
|
||||
case 40:
|
||||
name = "Down";
|
||||
break;
|
||||
default:
|
||||
ignoreShift = true;
|
||||
name = (ev.key != null) ? ev.key : String.fromCharCode(ev.charCode || ev.keyCode);
|
||||
}
|
||||
if (!ignoreShift && ev.shiftKey) name = "Shift+" + name;
|
||||
if (ev.altKey) name = "Alt+" + name;
|
||||
if (ev.ctrlKey) name = "Ctrl+" + name;
|
||||
return name;
|
||||
function previousLineIs(type, line_no) {
|
||||
if (line_no > 0) {
|
||||
return lines[line_no - 1].type == type;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function onWindowKeyDown(ev) {
|
||||
switch (getKeyString(ev)) {
|
||||
case "Esc":
|
||||
if (!domHelpModal.classList.contains("hidden")) {
|
||||
domHelpModal.classList.add("hidden");
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
}
|
||||
break;
|
||||
case "s":
|
||||
domSearch.focus();
|
||||
domSearch.select();
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
startAsyncSearch();
|
||||
break;
|
||||
case "?":
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
showHelpModal();
|
||||
break;
|
||||
}
|
||||
function nextLineIs(type, line_no) {
|
||||
if (line_no < lines.length - 1) {
|
||||
return lines[line_no + 1].type == type;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function showHelpModal() {
|
||||
function getPreviousLineIndent(line_no) {
|
||||
if (line_no > 0) {
|
||||
return lines[line_no - 1].indent;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function getNextLineIndent(line_no) {
|
||||
if (line_no < lines.length - 1) {
|
||||
return lines[line_no + 1].indent;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
let html = "";
|
||||
for (let line_no = 0; line_no < lines.length; line_no++) {
|
||||
const line = lines[line_no];
|
||||
|
||||
switch (line.type) {
|
||||
case "h1":
|
||||
case "h2":
|
||||
case "h3":
|
||||
case "h4":
|
||||
case "h5":
|
||||
case "h6":
|
||||
html +=
|
||||
"<" +
|
||||
line.type +
|
||||
">" +
|
||||
markdownInlines(line.text) +
|
||||
"</" +
|
||||
line.type +
|
||||
">\n";
|
||||
break;
|
||||
|
||||
case "ul":
|
||||
case "ol":
|
||||
if (
|
||||
!previousLineIs("ul", line_no) ||
|
||||
getPreviousLineIndent(line_no) < line.indent
|
||||
) {
|
||||
html += "<" + line.type + ">\n";
|
||||
}
|
||||
|
||||
html += "<li>" + markdownInlines(line.text) + "</li>\n";
|
||||
|
||||
if (
|
||||
!nextLineIs("ul", line_no) ||
|
||||
getNextLineIndent(line_no) < line.indent
|
||||
) {
|
||||
html += "</" + line.type + ">\n";
|
||||
}
|
||||
break;
|
||||
|
||||
case "p":
|
||||
if (!previousLineIs("p", line_no)) {
|
||||
html += "<p>\n";
|
||||
}
|
||||
html += markdownInlines(line.text) + "\n";
|
||||
if (!nextLineIs("p", line_no)) {
|
||||
html += "</p>\n";
|
||||
}
|
||||
break;
|
||||
|
||||
case "code":
|
||||
if (!previousLineIs("code", line_no)) {
|
||||
html += "<pre><code>";
|
||||
}
|
||||
html += escapeHtml(line.text) + "\n";
|
||||
if (!nextLineIs("code", line_no)) {
|
||||
html += "</code></pre>\n";
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return html;
|
||||
}
|
||||
|
||||
function activateSelectedResult() {
|
||||
if (domSectSearchResults.classList.contains("hidden")) {
|
||||
return;
|
||||
}
|
||||
|
||||
let liDom = domListSearchResults.children[curSearchIndex];
|
||||
if (liDom == null && domListSearchResults.children.length !== 0) {
|
||||
liDom = domListSearchResults.children[0];
|
||||
}
|
||||
if (liDom != null) {
|
||||
let aDom = liDom.children[0];
|
||||
location.href = aDom.getAttribute("href");
|
||||
curSearchIndex = -1;
|
||||
}
|
||||
domSearch.blur();
|
||||
}
|
||||
|
||||
// hide the modal if it's visible or return to the previous result page and unfocus the search
|
||||
function onEscape(ev) {
|
||||
if (!domHelpModal.classList.contains("hidden")) {
|
||||
domHelpModal.classList.add("hidden");
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
} else {
|
||||
domSearch.value = "";
|
||||
domSearch.blur();
|
||||
domSearchPlaceholder.classList.remove("hidden");
|
||||
curSearchIndex = -1;
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
startSearch();
|
||||
}
|
||||
}
|
||||
|
||||
function onSearchKeyDown(ev) {
|
||||
switch (getKeyString(ev)) {
|
||||
case "Enter":
|
||||
// detect if this search changes anything
|
||||
let terms1 = getSearchTerms();
|
||||
startSearch();
|
||||
updateCurNav();
|
||||
let terms2 = getSearchTerms();
|
||||
// we might have to wait for onHashChange to trigger
|
||||
imFeelingLucky = terms1.join(" ") !== terms2.join(" ");
|
||||
if (!imFeelingLucky) activateSelectedResult();
|
||||
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
return;
|
||||
case "Esc":
|
||||
onEscape(ev);
|
||||
return
|
||||
case "Up":
|
||||
moveSearchCursor(-1);
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
return;
|
||||
case "Down":
|
||||
// TODO: make the page scroll down if the search cursor is out of the screen
|
||||
moveSearchCursor(1);
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
return;
|
||||
default:
|
||||
if (ev.shiftKey || ev.ctrlKey || ev.altKey) return;
|
||||
|
||||
curSearchIndex = -1;
|
||||
ev.stopPropagation();
|
||||
startAsyncSearch();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
function moveSearchCursor(dir) {
|
||||
if (
|
||||
curSearchIndex < 0 ||
|
||||
curSearchIndex >= domListSearchResults.children.length
|
||||
) {
|
||||
if (dir > 0) {
|
||||
curSearchIndex = -1 + dir;
|
||||
} else if (dir < 0) {
|
||||
curSearchIndex = domListSearchResults.children.length + dir;
|
||||
}
|
||||
} else {
|
||||
curSearchIndex += dir;
|
||||
}
|
||||
if (curSearchIndex < 0) {
|
||||
curSearchIndex = 0;
|
||||
}
|
||||
if (curSearchIndex >= domListSearchResults.children.length) {
|
||||
curSearchIndex = domListSearchResults.children.length - 1;
|
||||
}
|
||||
renderSearchCursor();
|
||||
}
|
||||
|
||||
function getKeyString(ev) {
|
||||
let name;
|
||||
let ignoreShift = false;
|
||||
switch (ev.which) {
|
||||
case 13:
|
||||
name = "Enter";
|
||||
break;
|
||||
case 27:
|
||||
name = "Esc";
|
||||
break;
|
||||
case 38:
|
||||
name = "Up";
|
||||
break;
|
||||
case 40:
|
||||
name = "Down";
|
||||
break;
|
||||
default:
|
||||
ignoreShift = true;
|
||||
name =
|
||||
ev.key != null
|
||||
? ev.key
|
||||
: String.fromCharCode(ev.charCode || ev.keyCode);
|
||||
}
|
||||
if (!ignoreShift && ev.shiftKey) name = "Shift+" + name;
|
||||
if (ev.altKey) name = "Alt+" + name;
|
||||
if (ev.ctrlKey) name = "Ctrl+" + name;
|
||||
return name;
|
||||
}
|
||||
|
||||
function onWindowKeyDown(ev) {
|
||||
switch (getKeyString(ev)) {
|
||||
case "Esc":
|
||||
onEscape(ev);
|
||||
break;
|
||||
case "s":
|
||||
if (domHelpModal.classList.contains("hidden")) {
|
||||
if (ev.target == domSearch) break;
|
||||
|
||||
domSearch.focus();
|
||||
domSearch.select();
|
||||
domDocs.scrollTo(0, 0);
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
startAsyncSearch();
|
||||
}
|
||||
break;
|
||||
case "?":
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
showHelpModal();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function showHelpModal() {
|
||||
domHelpModal.classList.remove("hidden");
|
||||
domHelpModal.style.left = (window.innerWidth / 2 - domHelpModal.clientWidth / 2) + "px";
|
||||
domHelpModal.style.top = (window.innerHeight / 2 - domHelpModal.clientHeight / 2) + "px";
|
||||
domHelpModal.style.left =
|
||||
window.innerWidth / 2 - domHelpModal.clientWidth / 2 + "px";
|
||||
domHelpModal.style.top =
|
||||
window.innerHeight / 2 - domHelpModal.clientHeight / 2 + "px";
|
||||
domHelpModal.focus();
|
||||
}
|
||||
domSearch.blur();
|
||||
}
|
||||
|
||||
function clearAsyncSearch() {
|
||||
function clearAsyncSearch() {
|
||||
if (searchTimer != null) {
|
||||
clearTimeout(searchTimer);
|
||||
searchTimer = null;
|
||||
clearTimeout(searchTimer);
|
||||
searchTimer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function startAsyncSearch() {
|
||||
function startAsyncSearch() {
|
||||
clearAsyncSearch();
|
||||
searchTimer = setTimeout(startSearch, 100);
|
||||
}
|
||||
function startSearch() {
|
||||
}
|
||||
function startSearch() {
|
||||
clearAsyncSearch();
|
||||
let oldHash = location.hash;
|
||||
let parts = oldHash.split("?");
|
||||
let newPart2 = (domSearch.value === "") ? "" : ("?" + domSearch.value);
|
||||
location.hash = (parts.length === 1) ? (oldHash + newPart2) : (parts[0] + newPart2);
|
||||
}
|
||||
function getSearchTerms() {
|
||||
let newPart2 = domSearch.value === "" ? "" : "?" + domSearch.value;
|
||||
location.replace(parts.length === 1 ? oldHash + newPart2 : parts[0] + newPart2);
|
||||
}
|
||||
function getSearchTerms() {
|
||||
let list = curNavSearch.trim().split(/[ \r\n\t]+/);
|
||||
list.sort();
|
||||
return list;
|
||||
}
|
||||
function renderSearch() {
|
||||
}
|
||||
|
||||
function renderSearch() {
|
||||
let matchedItems = [];
|
||||
let ignoreCase = (curNavSearch.toLowerCase() === curNavSearch);
|
||||
let ignoreCase = curNavSearch.toLowerCase() === curNavSearch;
|
||||
let terms = getSearchTerms();
|
||||
|
||||
decl_loop: for (let declIndex = 0; declIndex < zigAnalysis.decls.length; declIndex += 1) {
|
||||
let canonPath = getCanonDeclPath(declIndex);
|
||||
if (canonPath == null) continue;
|
||||
decl_loop: for (
|
||||
let declIndex = 0;
|
||||
declIndex < zigAnalysis.decls.length;
|
||||
declIndex += 1
|
||||
) {
|
||||
let canonPath = getCanonDeclPath(declIndex);
|
||||
if (canonPath == null) continue;
|
||||
|
||||
let decl = zigAnalysis.decls[declIndex];
|
||||
let lastPkgName = canonPath.pkgNames[canonPath.pkgNames.length - 1];
|
||||
let fullPathSearchText = lastPkgName + "." + canonPath.declNames.join('.');
|
||||
let astNode = zigAnalysis.astNodes[decl.src];
|
||||
let fileAndDocs = "" //zigAnalysis.files[astNode.file];
|
||||
// TODO: understand what this piece of code is trying to achieve
|
||||
// also right now `files` are expressed as a hashmap.
|
||||
if (astNode.docs != null) {
|
||||
fileAndDocs += "\n" + astNode.docs;
|
||||
let decl = zigAnalysis.decls[declIndex];
|
||||
let lastPkgName = canonPath.pkgNames[canonPath.pkgNames.length - 1];
|
||||
let fullPathSearchText =
|
||||
lastPkgName + "." + canonPath.declNames.join(".");
|
||||
let astNode = zigAnalysis.astNodes[decl.src];
|
||||
let fileAndDocs = ""; //zigAnalysis.files[astNode.file];
|
||||
// TODO: understand what this piece of code is trying to achieve
|
||||
// also right now `files` are expressed as a hashmap.
|
||||
if (astNode.docs != null) {
|
||||
fileAndDocs += "\n" + astNode.docs;
|
||||
}
|
||||
let fullPathSearchTextLower = fullPathSearchText;
|
||||
if (ignoreCase) {
|
||||
fullPathSearchTextLower = fullPathSearchTextLower.toLowerCase();
|
||||
fileAndDocs = fileAndDocs.toLowerCase();
|
||||
}
|
||||
|
||||
let points = 0;
|
||||
for (let termIndex = 0; termIndex < terms.length; termIndex += 1) {
|
||||
let term = terms[termIndex];
|
||||
|
||||
// exact, case sensitive match of full decl path
|
||||
if (fullPathSearchText === term) {
|
||||
points += 4;
|
||||
continue;
|
||||
}
|
||||
let fullPathSearchTextLower = fullPathSearchText;
|
||||
if (ignoreCase) {
|
||||
fullPathSearchTextLower = fullPathSearchTextLower.toLowerCase();
|
||||
fileAndDocs = fileAndDocs.toLowerCase();
|
||||
// exact, case sensitive match of just decl name
|
||||
if (decl.name == term) {
|
||||
points += 3;
|
||||
continue;
|
||||
}
|
||||
// substring, case insensitive match of full decl path
|
||||
if (fullPathSearchTextLower.indexOf(term) >= 0) {
|
||||
points += 2;
|
||||
continue;
|
||||
}
|
||||
if (fileAndDocs.indexOf(term) >= 0) {
|
||||
points += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
let points = 0;
|
||||
for (let termIndex = 0; termIndex < terms.length; termIndex += 1) {
|
||||
let term = terms[termIndex];
|
||||
continue decl_loop;
|
||||
}
|
||||
|
||||
// exact, case sensitive match of full decl path
|
||||
if (fullPathSearchText === term) {
|
||||
points += 4;
|
||||
continue;
|
||||
}
|
||||
// exact, case sensitive match of just decl name
|
||||
if (decl.name == term) {
|
||||
points += 3;
|
||||
continue;
|
||||
}
|
||||
// substring, case insensitive match of full decl path
|
||||
if (fullPathSearchTextLower.indexOf(term) >= 0) {
|
||||
points += 2;
|
||||
continue;
|
||||
}
|
||||
if (fileAndDocs.indexOf(term) >= 0) {
|
||||
points += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
continue decl_loop;
|
||||
}
|
||||
|
||||
matchedItems.push({
|
||||
decl: decl,
|
||||
path: canonPath,
|
||||
points: points,
|
||||
});
|
||||
matchedItems.push({
|
||||
decl: decl,
|
||||
path: canonPath,
|
||||
points: points,
|
||||
});
|
||||
}
|
||||
|
||||
if (matchedItems.length !== 0) {
|
||||
resizeDomList(domListSearchResults, matchedItems.length, '<li><a href="#"></a></li>');
|
||||
matchedItems.sort(function (a, b) {
|
||||
let cmp = operatorCompare(b.points, a.points);
|
||||
if (cmp != 0) return cmp;
|
||||
return operatorCompare(a.decl.name, b.decl.name);
|
||||
});
|
||||
|
||||
matchedItems.sort(function(a, b) {
|
||||
let cmp = operatorCompare(b.points, a.points);
|
||||
if (cmp != 0) return cmp;
|
||||
return operatorCompare(a.decl.name, b.decl.name);
|
||||
});
|
||||
let searchTrimmed = false;
|
||||
const searchTrimResultsMaxItems = 200;
|
||||
if (searchTrimResults && matchedItems.length > searchTrimResultsMaxItems) {
|
||||
matchedItems = matchedItems.slice(0, searchTrimResultsMaxItems);
|
||||
searchTrimmed = true;
|
||||
}
|
||||
|
||||
for (let i = 0; i < matchedItems.length; i += 1) {
|
||||
let liDom = domListSearchResults.children[i];
|
||||
let aDom = liDom.children[0];
|
||||
let match = matchedItems[i];
|
||||
let lastPkgName = match.path.pkgNames[match.path.pkgNames.length - 1];
|
||||
aDom.textContent = lastPkgName + "." + match.path.declNames.join('.');
|
||||
aDom.setAttribute('href', navLink(match.path.pkgNames, match.path.declNames));
|
||||
}
|
||||
renderSearchCursor();
|
||||
// Build up the list of search results
|
||||
let matchedItemsHTML = "";
|
||||
|
||||
domSectSearchResults.classList.remove("hidden");
|
||||
for (let i = 0; i < matchedItems.length; i += 1) {
|
||||
const match = matchedItems[i];
|
||||
const lastPkgName = match.path.pkgNames[match.path.pkgNames.length - 1];
|
||||
|
||||
const text = lastPkgName + "." + match.path.declNames.join(".");
|
||||
const href = navLink(match.path.pkgNames, match.path.declNames);
|
||||
|
||||
matchedItemsHTML += "<li><a href=\"" + href + "\">" + text + "</a></li>";
|
||||
}
|
||||
|
||||
// Replace the search results using our newly constructed HTML string
|
||||
domListSearchResults.innerHTML = matchedItemsHTML;
|
||||
if (searchTrimmed) {
|
||||
domSectSearchAllResultsLink.classList.remove("hidden");
|
||||
}
|
||||
renderSearchCursor();
|
||||
|
||||
domSectSearchResults.classList.remove("hidden");
|
||||
} else {
|
||||
domSectSearchNoResults.classList.remove("hidden");
|
||||
domSectSearchNoResults.classList.remove("hidden");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function renderSearchCursor() {
|
||||
function renderSearchCursor() {
|
||||
for (let i = 0; i < domListSearchResults.children.length; i += 1) {
|
||||
let liDom = (domListSearchResults.children[i]);
|
||||
if (curSearchIndex === i) {
|
||||
liDom.classList.add("selected");
|
||||
} else {
|
||||
liDom.classList.remove("selected");
|
||||
}
|
||||
let liDom = domListSearchResults.children[i];
|
||||
if (curSearchIndex === i) {
|
||||
liDom.classList.add("selected");
|
||||
} else {
|
||||
liDom.classList.remove("selected");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// function indexNodesToCalls() {
|
||||
// let map = {};
|
||||
// for (let i = 0; i < zigAnalysis.calls.length; i += 1) {
|
||||
// let call = zigAnalysis.calls[i];
|
||||
// let fn = zigAnalysis.fns[call.fn];
|
||||
// if (map[fn.src] == null) {
|
||||
// map[fn.src] = [i];
|
||||
// } else {
|
||||
// map[fn.src].push(i);
|
||||
// }
|
||||
// }
|
||||
// return map;
|
||||
// }
|
||||
|
||||
|
||||
// function indexNodesToCalls() {
|
||||
// let map = {};
|
||||
// for (let i = 0; i < zigAnalysis.calls.length; i += 1) {
|
||||
// let call = zigAnalysis.calls[i];
|
||||
// let fn = zigAnalysis.fns[call.fn];
|
||||
// if (map[fn.src] == null) {
|
||||
// map[fn.src] = [i];
|
||||
// } else {
|
||||
// map[fn.src].push(i);
|
||||
// }
|
||||
// }
|
||||
// return map;
|
||||
// }
|
||||
|
||||
|
||||
|
||||
function byNameProperty(a, b) {
|
||||
function byNameProperty(a, b) {
|
||||
return operatorCompare(a.name, b.name);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
})();
|
||||
|
||||
Vendored
BIN
Binary file not shown.
@@ -703,7 +703,7 @@ const PosixImpl = struct {
|
||||
const max_multiplier_bits = @bitSizeOf(usize);
|
||||
const fibonacci_multiplier = 0x9E3779B97F4A7C15 >> (64 - max_multiplier_bits);
|
||||
|
||||
const max_bucket_bits = @ctz(usize, buckets.len);
|
||||
const max_bucket_bits = @ctz(buckets.len);
|
||||
comptime assert(std.math.isPowerOfTwo(buckets.len));
|
||||
|
||||
const index = (address *% fibonacci_multiplier) >> (max_multiplier_bits - max_bucket_bits);
|
||||
@@ -721,7 +721,7 @@ const PosixImpl = struct {
|
||||
// then cut off the zero bits from the alignment to get the unique address.
|
||||
const addr = @ptrToInt(ptr);
|
||||
assert(addr & (alignment - 1) == 0);
|
||||
return addr >> @ctz(usize, alignment);
|
||||
return addr >> @ctz(alignment);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ const FutexImpl = struct {
|
||||
// - they both seem to mark the cache-line as modified regardless: https://stackoverflow.com/a/63350048
|
||||
// - `lock bts` is smaller instruction-wise which makes it better for inlining
|
||||
if (comptime builtin.target.cpu.arch.isX86()) {
|
||||
const locked_bit = @ctz(u32, @as(u32, locked));
|
||||
const locked_bit = @ctz(@as(u32, locked));
|
||||
return self.state.bitSet(locked_bit, .Acquire) == 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -168,8 +168,8 @@ pub const DefaultRwLock = struct {
|
||||
const IS_WRITING: usize = 1;
|
||||
const WRITER: usize = 1 << 1;
|
||||
const READER: usize = 1 << (1 + @bitSizeOf(Count));
|
||||
const WRITER_MASK: usize = std.math.maxInt(Count) << @ctz(usize, WRITER);
|
||||
const READER_MASK: usize = std.math.maxInt(Count) << @ctz(usize, READER);
|
||||
const WRITER_MASK: usize = std.math.maxInt(Count) << @ctz(WRITER);
|
||||
const READER_MASK: usize = std.math.maxInt(Count) << @ctz(READER);
|
||||
const Count = std.meta.Int(.unsigned, @divFloor(@bitSizeOf(usize) - 1, 2));
|
||||
|
||||
pub fn tryLock(rwl: *DefaultRwLock) bool {
|
||||
|
||||
@@ -221,6 +221,30 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
|
||||
mem.copy(T, self.items[old_len..], items);
|
||||
}
|
||||
|
||||
/// Append an unaligned slice of items to the list. Allocates more
|
||||
/// memory as necessary. Only call this function if calling
|
||||
/// `appendSlice` instead would be a compile error.
|
||||
pub fn appendUnalignedSlice(self: *Self, items: []align(1) const T) Allocator.Error!void {
|
||||
try self.ensureUnusedCapacity(items.len);
|
||||
self.appendUnalignedSliceAssumeCapacity(items);
|
||||
}
|
||||
|
||||
/// Append the slice of items to the list, asserting the capacity is already
|
||||
/// enough to store the new items. **Does not** invalidate pointers.
|
||||
/// Only call this function if calling `appendSliceAssumeCapacity` instead
|
||||
/// would be a compile error.
|
||||
pub fn appendUnalignedSliceAssumeCapacity(self: *Self, items: []align(1) const T) void {
|
||||
const old_len = self.items.len;
|
||||
const new_len = old_len + items.len;
|
||||
assert(new_len <= self.capacity);
|
||||
self.items.len = new_len;
|
||||
@memcpy(
|
||||
@ptrCast([*]align(@alignOf(T)) u8, self.items.ptr + old_len),
|
||||
@ptrCast([*]const u8, items.ptr),
|
||||
items.len * @sizeOf(T),
|
||||
);
|
||||
}
|
||||
|
||||
pub const Writer = if (T != u8)
|
||||
@compileError("The Writer interface is only defined for ArrayList(u8) " ++
|
||||
"but the given type is ArrayList(" ++ @typeName(T) ++ ")")
|
||||
@@ -592,6 +616,29 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
|
||||
mem.copy(T, self.items[old_len..], items);
|
||||
}
|
||||
|
||||
/// Append the slice of items to the list. Allocates more
|
||||
/// memory as necessary. Only call this function if a call to `appendSlice` instead would
|
||||
/// be a compile error.
|
||||
pub fn appendUnalignedSlice(self: *Self, allocator: Allocator, items: []align(1) const T) Allocator.Error!void {
|
||||
try self.ensureUnusedCapacity(allocator, items.len);
|
||||
self.appendUnalignedSliceAssumeCapacity(items);
|
||||
}
|
||||
|
||||
/// Append an unaligned slice of items to the list, asserting the capacity is enough
|
||||
/// to store the new items. Only call this function if a call to `appendSliceAssumeCapacity`
|
||||
/// instead would be a compile error.
|
||||
pub fn appendUnalignedSliceAssumeCapacity(self: *Self, items: []align(1) const T) void {
|
||||
const old_len = self.items.len;
|
||||
const new_len = old_len + items.len;
|
||||
assert(new_len <= self.capacity);
|
||||
self.items.len = new_len;
|
||||
@memcpy(
|
||||
@ptrCast([*]align(@alignOf(T)) u8, self.items.ptr + old_len),
|
||||
@ptrCast([*]const u8, items.ptr),
|
||||
items.len * @sizeOf(T),
|
||||
);
|
||||
}
|
||||
|
||||
pub const WriterContext = struct {
|
||||
self: *Self,
|
||||
allocator: Allocator,
|
||||
@@ -899,6 +946,14 @@ test "std.ArrayList/ArrayListUnmanaged.basic" {
|
||||
try testing.expect(list.pop() == 1);
|
||||
try testing.expect(list.items.len == 9);
|
||||
|
||||
var unaligned: [3]i32 align(1) = [_]i32{ 4, 5, 6 };
|
||||
list.appendUnalignedSlice(&unaligned) catch unreachable;
|
||||
try testing.expect(list.items.len == 12);
|
||||
try testing.expect(list.pop() == 6);
|
||||
try testing.expect(list.pop() == 5);
|
||||
try testing.expect(list.pop() == 4);
|
||||
try testing.expect(list.items.len == 9);
|
||||
|
||||
list.appendSlice(&[_]i32{}) catch unreachable;
|
||||
try testing.expect(list.items.len == 9);
|
||||
|
||||
@@ -941,6 +996,14 @@ test "std.ArrayList/ArrayListUnmanaged.basic" {
|
||||
try testing.expect(list.pop() == 1);
|
||||
try testing.expect(list.items.len == 9);
|
||||
|
||||
var unaligned: [3]i32 align(1) = [_]i32{ 4, 5, 6 };
|
||||
list.appendUnalignedSlice(a, &unaligned) catch unreachable;
|
||||
try testing.expect(list.items.len == 12);
|
||||
try testing.expect(list.pop() == 6);
|
||||
try testing.expect(list.pop() == 5);
|
||||
try testing.expect(list.pop() == 4);
|
||||
try testing.expect(list.items.len == 9);
|
||||
|
||||
list.appendSlice(a, &[_]i32{}) catch unreachable;
|
||||
try testing.expect(list.items.len == 9);
|
||||
|
||||
|
||||
+13
-13
@@ -91,7 +91,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
|
||||
|
||||
/// Returns the total number of set bits in this bit set.
|
||||
pub fn count(self: Self) usize {
|
||||
return @popCount(MaskInt, self.mask);
|
||||
return @popCount(self.mask);
|
||||
}
|
||||
|
||||
/// Changes the value of the specified bit of the bit
|
||||
@@ -179,7 +179,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
|
||||
pub fn findFirstSet(self: Self) ?usize {
|
||||
const mask = self.mask;
|
||||
if (mask == 0) return null;
|
||||
return @ctz(MaskInt, mask);
|
||||
return @ctz(mask);
|
||||
}
|
||||
|
||||
/// Finds the index of the first set bit, and unsets it.
|
||||
@@ -187,7 +187,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
|
||||
pub fn toggleFirstSet(self: *Self) ?usize {
|
||||
const mask = self.mask;
|
||||
if (mask == 0) return null;
|
||||
const index = @ctz(MaskInt, mask);
|
||||
const index = @ctz(mask);
|
||||
self.mask = mask & (mask - 1);
|
||||
return index;
|
||||
}
|
||||
@@ -222,12 +222,12 @@ pub fn IntegerBitSet(comptime size: u16) type {
|
||||
|
||||
switch (direction) {
|
||||
.forward => {
|
||||
const next_index = @ctz(MaskInt, self.bits_remain);
|
||||
const next_index = @ctz(self.bits_remain);
|
||||
self.bits_remain &= self.bits_remain - 1;
|
||||
return next_index;
|
||||
},
|
||||
.reverse => {
|
||||
const leading_zeroes = @clz(MaskInt, self.bits_remain);
|
||||
const leading_zeroes = @clz(self.bits_remain);
|
||||
const top_bit = (@bitSizeOf(MaskInt) - 1) - leading_zeroes;
|
||||
self.bits_remain &= (@as(MaskInt, 1) << @intCast(ShiftInt, top_bit)) - 1;
|
||||
return top_bit;
|
||||
@@ -347,7 +347,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
|
||||
pub fn count(self: Self) usize {
|
||||
var total: usize = 0;
|
||||
for (self.masks) |mask| {
|
||||
total += @popCount(MaskInt, mask);
|
||||
total += @popCount(mask);
|
||||
}
|
||||
return total;
|
||||
}
|
||||
@@ -475,7 +475,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
|
||||
if (mask != 0) break mask;
|
||||
offset += @bitSizeOf(MaskInt);
|
||||
} else return null;
|
||||
return offset + @ctz(MaskInt, mask);
|
||||
return offset + @ctz(mask);
|
||||
}
|
||||
|
||||
/// Finds the index of the first set bit, and unsets it.
|
||||
@@ -486,7 +486,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
|
||||
if (mask.* != 0) break mask;
|
||||
offset += @bitSizeOf(MaskInt);
|
||||
} else return null;
|
||||
const index = @ctz(MaskInt, mask.*);
|
||||
const index = @ctz(mask.*);
|
||||
mask.* &= (mask.* - 1);
|
||||
return offset + index;
|
||||
}
|
||||
@@ -657,7 +657,7 @@ pub const DynamicBitSetUnmanaged = struct {
|
||||
var total: usize = 0;
|
||||
for (self.masks[0..num_masks]) |mask| {
|
||||
// Note: This is where we depend on padding bits being zero
|
||||
total += @popCount(MaskInt, mask);
|
||||
total += @popCount(mask);
|
||||
}
|
||||
return total;
|
||||
}
|
||||
@@ -795,7 +795,7 @@ pub const DynamicBitSetUnmanaged = struct {
|
||||
mask += 1;
|
||||
offset += @bitSizeOf(MaskInt);
|
||||
} else return null;
|
||||
return offset + @ctz(MaskInt, mask[0]);
|
||||
return offset + @ctz(mask[0]);
|
||||
}
|
||||
|
||||
/// Finds the index of the first set bit, and unsets it.
|
||||
@@ -808,7 +808,7 @@ pub const DynamicBitSetUnmanaged = struct {
|
||||
mask += 1;
|
||||
offset += @bitSizeOf(MaskInt);
|
||||
} else return null;
|
||||
const index = @ctz(MaskInt, mask[0]);
|
||||
const index = @ctz(mask[0]);
|
||||
mask[0] &= (mask[0] - 1);
|
||||
return offset + index;
|
||||
}
|
||||
@@ -1067,12 +1067,12 @@ fn BitSetIterator(comptime MaskInt: type, comptime options: IteratorOptions) typ
|
||||
|
||||
switch (direction) {
|
||||
.forward => {
|
||||
const next_index = @ctz(MaskInt, self.bits_remain) + self.bit_offset;
|
||||
const next_index = @ctz(self.bits_remain) + self.bit_offset;
|
||||
self.bits_remain &= self.bits_remain - 1;
|
||||
return next_index;
|
||||
},
|
||||
.reverse => {
|
||||
const leading_zeroes = @clz(MaskInt, self.bits_remain);
|
||||
const leading_zeroes = @clz(self.bits_remain);
|
||||
const top_bit = (@bitSizeOf(MaskInt) - 1) - leading_zeroes;
|
||||
const no_top_bit_mask = (@as(MaskInt, 1) << @intCast(ShiftInt, top_bit)) - 1;
|
||||
self.bits_remain &= no_top_bit_mask;
|
||||
|
||||
@@ -15,16 +15,16 @@ const testing = std.testing;
|
||||
/// var slice = a.slice(); // a slice of the 64-byte array
|
||||
/// var a_clone = a; // creates a copy - the structure doesn't use any internal pointers
|
||||
/// ```
|
||||
pub fn BoundedArray(comptime T: type, comptime capacity: usize) type {
|
||||
pub fn BoundedArray(comptime T: type, comptime buffer_capacity: usize) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
buffer: [capacity]T = undefined,
|
||||
buffer: [buffer_capacity]T = undefined,
|
||||
len: usize = 0,
|
||||
|
||||
/// Set the actual length of the slice.
|
||||
/// Returns error.Overflow if it exceeds the length of the backing array.
|
||||
pub fn init(len: usize) error{Overflow}!Self {
|
||||
if (len > capacity) return error.Overflow;
|
||||
if (len > buffer_capacity) return error.Overflow;
|
||||
return Self{ .len = len };
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ pub fn BoundedArray(comptime T: type, comptime capacity: usize) type {
|
||||
/// Adjust the slice's length to `len`.
|
||||
/// Does not initialize added items if any.
|
||||
pub fn resize(self: *Self, len: usize) error{Overflow}!void {
|
||||
if (len > capacity) return error.Overflow;
|
||||
if (len > buffer_capacity) return error.Overflow;
|
||||
self.len = len;
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ pub fn BoundedArray(comptime T: type, comptime capacity: usize) type {
|
||||
|
||||
/// Check that the slice can hold at least `additional_count` items.
|
||||
pub fn ensureUnusedCapacity(self: Self, additional_count: usize) error{Overflow}!void {
|
||||
if (self.len + additional_count > capacity) {
|
||||
if (self.len + additional_count > buffer_capacity) {
|
||||
return error.Overflow;
|
||||
}
|
||||
}
|
||||
@@ -83,7 +83,7 @@ pub fn BoundedArray(comptime T: type, comptime capacity: usize) type {
|
||||
/// Increase length by 1, returning pointer to the new item.
|
||||
/// Asserts that there is space for the new item.
|
||||
pub fn addOneAssumeCapacity(self: *Self) *T {
|
||||
assert(self.len < capacity);
|
||||
assert(self.len < buffer_capacity);
|
||||
self.len += 1;
|
||||
return &self.slice()[self.len - 1];
|
||||
}
|
||||
@@ -236,7 +236,7 @@ pub fn BoundedArray(comptime T: type, comptime capacity: usize) type {
|
||||
pub fn appendNTimesAssumeCapacity(self: *Self, value: T, n: usize) void {
|
||||
const old_len = self.len;
|
||||
self.len += n;
|
||||
assert(self.len <= capacity);
|
||||
assert(self.len <= buffer_capacity);
|
||||
mem.set(T, self.slice()[old_len..self.len], value);
|
||||
}
|
||||
|
||||
@@ -275,7 +275,7 @@ test "BoundedArray" {
|
||||
try testing.expectEqualSlices(u8, &x, a.constSlice());
|
||||
|
||||
var a2 = a;
|
||||
try testing.expectEqualSlices(u8, a.constSlice(), a.constSlice());
|
||||
try testing.expectEqualSlices(u8, a.constSlice(), a2.constSlice());
|
||||
a2.set(0, 0);
|
||||
try testing.expect(a.get(0) != a2.get(0));
|
||||
|
||||
|
||||
+11
-4
@@ -1495,6 +1495,7 @@ pub const LibExeObjStep = struct {
|
||||
emit_h: bool = false,
|
||||
bundle_compiler_rt: ?bool = null,
|
||||
single_threaded: ?bool = null,
|
||||
stack_protector: ?bool = null,
|
||||
disable_stack_probing: bool,
|
||||
disable_sanitize_c: bool,
|
||||
sanitize_thread: bool,
|
||||
@@ -1896,13 +1897,12 @@ pub const LibExeObjStep = struct {
|
||||
/// When a binary cannot be ran through emulation or the option is disabled, a warning
|
||||
/// will be printed and the binary will *NOT* be ran.
|
||||
pub fn runEmulatable(exe: *LibExeObjStep) *EmulatableRunStep {
|
||||
assert(exe.kind == .exe or exe.kind == .text_exe);
|
||||
assert(exe.kind == .exe or exe.kind == .test_exe);
|
||||
|
||||
const run_step = EmulatableRunStep.create(exe.builder.fmt("run {s}", .{exe.step.name}), exe);
|
||||
const run_step = EmulatableRunStep.create(exe.builder, exe.builder.fmt("run {s}", .{exe.step.name}), exe);
|
||||
if (exe.vcpkg_bin_path) |path| {
|
||||
run_step.addPathDir(path);
|
||||
RunStep.addPathDirInternal(&run_step.step, exe.builder, path);
|
||||
}
|
||||
|
||||
return run_step;
|
||||
}
|
||||
|
||||
@@ -2826,6 +2826,13 @@ pub const LibExeObjStep = struct {
|
||||
if (self.disable_stack_probing) {
|
||||
try zig_args.append("-fno-stack-check");
|
||||
}
|
||||
if (self.stack_protector) |stack_protector| {
|
||||
if (stack_protector) {
|
||||
try zig_args.append("-fstack-protector");
|
||||
} else {
|
||||
try zig_args.append("-fno-stack-protector");
|
||||
}
|
||||
}
|
||||
if (self.red_zone) |red_zone| {
|
||||
if (red_zone) {
|
||||
try zig_args.append("-mred-zone");
|
||||
|
||||
@@ -171,6 +171,7 @@ fn printLiteral(out: anytype, val: anytype, indent: u8) !void {
|
||||
.Void,
|
||||
.Bool,
|
||||
.Int,
|
||||
.ComptimeInt,
|
||||
.Float,
|
||||
.Null,
|
||||
=> try out.print("{any}", .{val}),
|
||||
@@ -302,6 +303,7 @@ test "OptionsStep" {
|
||||
options.addOption(usize, "option1", 1);
|
||||
options.addOption(?usize, "option2", null);
|
||||
options.addOption(?usize, "option3", 3);
|
||||
options.addOption(comptime_int, "option4", 4);
|
||||
options.addOption([]const u8, "string", "zigisthebest");
|
||||
options.addOption(?[]const u8, "optional_string", null);
|
||||
options.addOption([2][2]u16, "nested_array", nested_array);
|
||||
@@ -314,6 +316,7 @@ test "OptionsStep" {
|
||||
\\pub const option1: usize = 1;
|
||||
\\pub const option2: ?usize = null;
|
||||
\\pub const option3: ?usize = 3;
|
||||
\\pub const option4: comptime_int = 4;
|
||||
\\pub const string: []const u8 = "zigisthebest";
|
||||
\\pub const optional_string: ?[]const u8 = null;
|
||||
\\pub const nested_array: [2][2]u16 = [2][2]u16 {
|
||||
|
||||
@@ -101,7 +101,7 @@ pub fn addPathDir(self: *RunStep, search_path: []const u8) void {
|
||||
}
|
||||
|
||||
/// For internal use only, users of `RunStep` should use `addPathDir` directly.
|
||||
fn addPathDirInternal(step: *Step, builder: *Builder, search_path: []const u8) void {
|
||||
pub fn addPathDirInternal(step: *Step, builder: *Builder, search_path: []const u8) void {
|
||||
const env_map = getEnvMapInternal(step, builder.allocator);
|
||||
|
||||
const key = "PATH";
|
||||
|
||||
@@ -21,6 +21,7 @@ output_dir: ?[]const u8,
|
||||
out_basename: []const u8,
|
||||
target: CrossTarget = CrossTarget{},
|
||||
output_file: build.GeneratedFile,
|
||||
use_stage1: ?bool = null,
|
||||
|
||||
pub fn create(builder: *Builder, source: build.FileSource) *TranslateCStep {
|
||||
const self = builder.allocator.create(TranslateCStep) catch unreachable;
|
||||
@@ -91,6 +92,19 @@ fn make(step: *Step) !void {
|
||||
try argv_list.append("-D");
|
||||
try argv_list.append(c_macro);
|
||||
}
|
||||
if (self.use_stage1) |stage1| {
|
||||
if (stage1) {
|
||||
try argv_list.append("-fstage1");
|
||||
} else {
|
||||
try argv_list.append("-fno-stage1");
|
||||
}
|
||||
} else if (self.builder.use_stage1) |stage1| {
|
||||
if (stage1) {
|
||||
try argv_list.append("-fstage1");
|
||||
} else {
|
||||
try argv_list.append("-fno-stage1");
|
||||
}
|
||||
}
|
||||
|
||||
try argv_list.append(self.source.getPath(self.builder));
|
||||
|
||||
|
||||
+4
-3
@@ -294,6 +294,8 @@ pub const Type = union(enum) {
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Struct = struct {
|
||||
layout: ContainerLayout,
|
||||
/// Only valid if layout is .Packed
|
||||
backing_integer: ?type = null,
|
||||
fields: []const StructField,
|
||||
decls: []const Declaration,
|
||||
is_tuple: bool,
|
||||
@@ -864,13 +866,12 @@ pub fn panicUnwrapError(st: ?*StackTrace, err: anyerror) noreturn {
|
||||
|
||||
pub fn panicOutOfBounds(index: usize, len: usize) noreturn {
|
||||
@setCold(true);
|
||||
std.debug.panic("attempt to index out of bound: index {d}, len {d}", .{ index, len });
|
||||
std.debug.panic("index out of bounds: index {d}, len {d}", .{ index, len });
|
||||
}
|
||||
|
||||
pub noinline fn returnError(maybe_st: ?*StackTrace) void {
|
||||
pub noinline fn returnError(st: *StackTrace) void {
|
||||
@setCold(true);
|
||||
@setRuntimeSafety(false);
|
||||
const st = maybe_st orelse return;
|
||||
addErrRetTraceAddr(st, @returnAddress());
|
||||
}
|
||||
|
||||
|
||||
+6
-2
@@ -20,7 +20,7 @@ pub const Tokenizer = tokenizer.Tokenizer;
|
||||
/// If linking gnu libc (glibc), the `ok` value will be true if the target
|
||||
/// version is greater than or equal to `glibc_version`.
|
||||
/// If linking a libc other than these, returns `false`.
|
||||
pub fn versionCheck(glibc_version: std.builtin.Version) type {
|
||||
pub fn versionCheck(comptime glibc_version: std.builtin.Version) type {
|
||||
return struct {
|
||||
pub const ok = blk: {
|
||||
if (!builtin.link_libc) break :blk false;
|
||||
@@ -263,7 +263,11 @@ const PThreadForkFn = if (builtin.zig_backend == .stage1)
|
||||
fn () callconv(.C) void
|
||||
else
|
||||
*const fn () callconv(.C) void;
|
||||
pub extern "c" fn pthread_key_create(key: *c.pthread_key_t, destructor: ?fn (value: *anyopaque) callconv(.C) void) c.E;
|
||||
pub extern "c" fn pthread_key_create(key: *c.pthread_key_t, destructor: ?PThreadKeyCreateFn) c.E;
|
||||
const PThreadKeyCreateFn = if (builtin.zig_backend == .stage1)
|
||||
fn (value: *anyopaque) callconv(.C) void
|
||||
else
|
||||
*const fn (value: *anyopaque) callconv(.C) void;
|
||||
pub extern "c" fn pthread_key_delete(key: c.pthread_key_t) c.E;
|
||||
pub extern "c" fn pthread_getspecific(key: c.pthread_key_t) ?*anyopaque;
|
||||
pub extern "c" fn pthread_setspecific(key: c.pthread_key_t, value: ?*anyopaque) c_int;
|
||||
|
||||
@@ -814,10 +814,10 @@ pub const sigset_t = u32;
|
||||
pub const empty_sigset: sigset_t = 0;
|
||||
|
||||
pub const SIG = struct {
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const HOLD = @intToPtr(?Sigaction.sigaction_fn, 5);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
pub const HOLD = @intToPtr(?Sigaction.handler_fn, 5);
|
||||
|
||||
/// block specified signal set
|
||||
pub const _BLOCK = 1;
|
||||
|
||||
@@ -609,9 +609,9 @@ pub const S = struct {
|
||||
pub const BADSIG = SIG.ERR;
|
||||
|
||||
pub const SIG = struct {
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
|
||||
pub const BLOCK = 1;
|
||||
pub const UNBLOCK = 2;
|
||||
|
||||
@@ -670,9 +670,9 @@ pub const SIG = struct {
|
||||
pub const UNBLOCK = 2;
|
||||
pub const SETMASK = 3;
|
||||
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
|
||||
pub const WORDS = 4;
|
||||
pub const MAXSIG = 128;
|
||||
|
||||
+2
-2
@@ -702,7 +702,7 @@ pub const T = struct {
|
||||
pub const CSETAF = 0x8002;
|
||||
pub const CSETAW = 0x8003;
|
||||
pub const CWAITEVENT = 0x8004;
|
||||
pub const CSBRK = 08005;
|
||||
pub const CSBRK = 0x8005;
|
||||
pub const CFLSH = 0x8006;
|
||||
pub const CXONC = 0x8007;
|
||||
pub const CQUERYCONNECTED = 0x8008;
|
||||
@@ -874,7 +874,7 @@ pub const S = struct {
|
||||
pub const IFDIR = 0o040000;
|
||||
pub const IFCHR = 0o020000;
|
||||
pub const IFIFO = 0o010000;
|
||||
pub const INDEX_DIR = 04000000000;
|
||||
pub const INDEX_DIR = 0o4000000000;
|
||||
|
||||
pub const IUMSK = 0o7777;
|
||||
pub const ISUID = 0o4000;
|
||||
|
||||
@@ -910,9 +910,9 @@ pub const winsize = extern struct {
|
||||
const NSIG = 32;
|
||||
|
||||
pub const SIG = struct {
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
|
||||
pub const WORDS = 4;
|
||||
pub const MAXSIG = 128;
|
||||
|
||||
+6
-21
@@ -982,11 +982,11 @@ pub const winsize = extern struct {
|
||||
const NSIG = 33;
|
||||
|
||||
pub const SIG = struct {
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const CATCH = @intToPtr(?Sigaction.sigaction_fn, 2);
|
||||
pub const HOLD = @intToPtr(?Sigaction.sigaction_fn, 3);
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
pub const CATCH = @intToPtr(?Sigaction.handler_fn, 2);
|
||||
pub const HOLD = @intToPtr(?Sigaction.handler_fn, 3);
|
||||
|
||||
pub const HUP = 1;
|
||||
pub const INT = 2;
|
||||
@@ -1119,26 +1119,11 @@ pub usingnamespace switch (builtin.cpu.arch) {
|
||||
sc_rsp: c_long,
|
||||
sc_ss: c_long,
|
||||
|
||||
sc_fpstate: fxsave64,
|
||||
sc_fpstate: *anyopaque, // struct fxsave64 *
|
||||
__sc_unused: c_int,
|
||||
sc_mask: c_int,
|
||||
sc_cookie: c_long,
|
||||
};
|
||||
|
||||
pub const fxsave64 = packed struct {
|
||||
fx_fcw: u16,
|
||||
fx_fsw: u16,
|
||||
fx_ftw: u8,
|
||||
fx_unused1: u8,
|
||||
fx_fop: u16,
|
||||
fx_rip: u64,
|
||||
fx_rdp: u64,
|
||||
fx_mxcsr: u32,
|
||||
fx_mxcsr_mask: u32,
|
||||
fx_st: [8][2]u64,
|
||||
fx_xmm: [16][2]u64,
|
||||
fx_unused3: [96]u8,
|
||||
};
|
||||
},
|
||||
else => struct {},
|
||||
};
|
||||
|
||||
@@ -879,10 +879,10 @@ pub const winsize = extern struct {
|
||||
const NSIG = 75;
|
||||
|
||||
pub const SIG = struct {
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const HOLD = @intToPtr(?Sigaction.sigaction_fn, 2);
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
pub const HOLD = @intToPtr(?Sigaction.handler_fn, 2);
|
||||
|
||||
pub const WORDS = 4;
|
||||
pub const MAXSIG = 75;
|
||||
|
||||
+982
-257
@@ -1,14 +1,731 @@
|
||||
const std = @import("std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const io = std.io;
|
||||
const mem = std.mem;
|
||||
const os = std.os;
|
||||
const File = std.fs.File;
|
||||
const fs = std.fs;
|
||||
|
||||
// CoffHeader.machine values
|
||||
// see https://msdn.microsoft.com/en-us/library/windows/desktop/ms680313(v=vs.85).aspx
|
||||
const IMAGE_FILE_MACHINE_I386 = 0x014c;
|
||||
const IMAGE_FILE_MACHINE_IA64 = 0x0200;
|
||||
const IMAGE_FILE_MACHINE_AMD64 = 0x8664;
|
||||
pub const CoffHeaderFlags = packed struct {
|
||||
/// Image only, Windows CE, and Microsoft Windows NT and later.
|
||||
/// This indicates that the file does not contain base relocations
|
||||
/// and must therefore be loaded at its preferred base address.
|
||||
/// If the base address is not available, the loader reports an error.
|
||||
/// The default behavior of the linker is to strip base relocations
|
||||
/// from executable (EXE) files.
|
||||
RELOCS_STRIPPED: u1 = 0,
|
||||
|
||||
/// Image only. This indicates that the image file is valid and can be run.
|
||||
/// If this flag is not set, it indicates a linker error.
|
||||
EXECUTABLE_IMAGE: u1 = 0,
|
||||
|
||||
/// COFF line numbers have been removed. This flag is deprecated and should be zero.
|
||||
LINE_NUMS_STRIPPED: u1 = 0,
|
||||
|
||||
/// COFF symbol table entries for local symbols have been removed.
|
||||
/// This flag is deprecated and should be zero.
|
||||
LOCAL_SYMS_STRIPPED: u1 = 0,
|
||||
|
||||
/// Obsolete. Aggressively trim working set.
|
||||
/// This flag is deprecated for Windows 2000 and later and must be zero.
|
||||
AGGRESSIVE_WS_TRIM: u1 = 0,
|
||||
|
||||
/// Application can handle > 2-GB addresses.
|
||||
LARGE_ADDRESS_AWARE: u1 = 0,
|
||||
|
||||
/// This flag is reserved for future use.
|
||||
RESERVED: u1 = 0,
|
||||
|
||||
/// Little endian: the least significant bit (LSB) precedes the
|
||||
/// most significant bit (MSB) in memory. This flag is deprecated and should be zero.
|
||||
BYTES_REVERSED_LO: u1 = 0,
|
||||
|
||||
/// Machine is based on a 32-bit-word architecture.
|
||||
@"32BIT_MACHINE": u1 = 0,
|
||||
|
||||
/// Debugging information is removed from the image file.
|
||||
DEBUG_STRIPPED: u1 = 0,
|
||||
|
||||
/// If the image is on removable media, fully load it and copy it to the swap file.
|
||||
REMOVABLE_RUN_FROM_SWAP: u1 = 0,
|
||||
|
||||
/// If the image is on network media, fully load it and copy it to the swap file.
|
||||
NET_RUN_FROM_SWAP: u1 = 0,
|
||||
|
||||
/// The image file is a system file, not a user program.
|
||||
SYSTEM: u1 = 0,
|
||||
|
||||
/// The image file is a dynamic-link library (DLL).
|
||||
/// Such files are considered executable files for almost all purposes,
|
||||
/// although they cannot be directly run.
|
||||
DLL: u1 = 0,
|
||||
|
||||
/// The file should be run only on a uniprocessor machine.
|
||||
UP_SYSTEM_ONLY: u1 = 0,
|
||||
|
||||
/// Big endian: the MSB precedes the LSB in memory. This flag is deprecated and should be zero.
|
||||
BYTES_REVERSED_HI: u1 = 0,
|
||||
};
|
||||
|
||||
pub const CoffHeader = extern struct {
|
||||
/// The number that identifies the type of target machine.
|
||||
machine: MachineType,
|
||||
|
||||
/// The number of sections. This indicates the size of the section table, which immediately follows the headers.
|
||||
number_of_sections: u16,
|
||||
|
||||
/// The low 32 bits of the number of seconds since 00:00 January 1, 1970 (a C run-time time_t value),
|
||||
/// which indicates when the file was created.
|
||||
time_date_stamp: u32,
|
||||
|
||||
/// The file offset of the COFF symbol table, or zero if no COFF symbol table is present.
|
||||
/// This value should be zero for an image because COFF debugging information is deprecated.
|
||||
pointer_to_symbol_table: u32,
|
||||
|
||||
/// The number of entries in the symbol table.
|
||||
/// This data can be used to locate the string table, which immediately follows the symbol table.
|
||||
/// This value should be zero for an image because COFF debugging information is deprecated.
|
||||
number_of_symbols: u32,
|
||||
|
||||
/// The size of the optional header, which is required for executable files but not for object files.
|
||||
/// This value should be zero for an object file. For a description of the header format, see Optional Header (Image Only).
|
||||
size_of_optional_header: u16,
|
||||
|
||||
/// The flags that indicate the attributes of the file.
|
||||
flags: CoffHeaderFlags,
|
||||
};
|
||||
|
||||
// OptionalHeader.magic values
|
||||
// see https://msdn.microsoft.com/en-us/library/windows/desktop/ms680339(v=vs.85).aspx
|
||||
pub const IMAGE_NT_OPTIONAL_HDR32_MAGIC = 0x10b;
|
||||
pub const IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b;
|
||||
|
||||
pub const DllFlags = packed struct {
|
||||
_reserved_0: u5 = 0,
|
||||
|
||||
/// Image can handle a high entropy 64-bit virtual address space.
|
||||
HIGH_ENTROPY_VA: u1 = 0,
|
||||
|
||||
/// DLL can be relocated at load time.
|
||||
DYNAMIC_BASE: u1 = 0,
|
||||
|
||||
/// Code Integrity checks are enforced.
|
||||
FORCE_INTEGRITY: u1 = 0,
|
||||
|
||||
/// Image is NX compatible.
|
||||
NX_COMPAT: u1 = 0,
|
||||
|
||||
/// Isolation aware, but do not isolate the image.
|
||||
NO_ISOLATION: u1 = 0,
|
||||
|
||||
/// Does not use structured exception (SE) handling. No SE handler may be called in this image.
|
||||
NO_SEH: u1 = 0,
|
||||
|
||||
/// Do not bind the image.
|
||||
NO_BIND: u1 = 0,
|
||||
|
||||
/// Image must execute in an AppContainer.
|
||||
APPCONTAINER: u1 = 0,
|
||||
|
||||
/// A WDM driver.
|
||||
WDM_DRIVER: u1 = 0,
|
||||
|
||||
/// Image supports Control Flow Guard.
|
||||
GUARD_CF: u1 = 0,
|
||||
|
||||
/// Terminal Server aware.
|
||||
TERMINAL_SERVER_AWARE: u1 = 0,
|
||||
};
|
||||
|
||||
pub const Subsystem = enum(u16) {
|
||||
/// An unknown subsystem
|
||||
UNKNOWN = 0,
|
||||
|
||||
/// Device drivers and native Windows processes
|
||||
NATIVE = 1,
|
||||
|
||||
/// The Windows graphical user interface (GUI) subsystem
|
||||
WINDOWS_GUI = 2,
|
||||
|
||||
/// The Windows character subsystem
|
||||
WINDOWS_CUI = 3,
|
||||
|
||||
/// The OS/2 character subsystem
|
||||
OS2_CUI = 5,
|
||||
|
||||
/// The Posix character subsystem
|
||||
POSIX_CUI = 7,
|
||||
|
||||
/// Native Win9x driver
|
||||
NATIVE_WINDOWS = 8,
|
||||
|
||||
/// Windows CE
|
||||
WINDOWS_CE_GUI = 9,
|
||||
|
||||
/// An Extensible Firmware Interface (EFI) application
|
||||
EFI_APPLICATION = 10,
|
||||
|
||||
/// An EFI driver with boot services
|
||||
EFI_BOOT_SERVICE_DRIVER = 11,
|
||||
|
||||
/// An EFI driver with run-time services
|
||||
EFI_RUNTIME_DRIVER = 12,
|
||||
|
||||
/// An EFI ROM image
|
||||
EFI_ROM = 13,
|
||||
|
||||
/// XBOX
|
||||
XBOX = 14,
|
||||
|
||||
/// Windows boot application
|
||||
WINDOWS_BOOT_APPLICATION = 16,
|
||||
};
|
||||
|
||||
pub const OptionalHeader = extern struct {
|
||||
magic: u16,
|
||||
major_linker_version: u8,
|
||||
minor_linker_version: u8,
|
||||
size_of_code: u32,
|
||||
size_of_initialized_data: u32,
|
||||
size_of_uninitialized_data: u32,
|
||||
address_of_entry_point: u32,
|
||||
base_of_code: u32,
|
||||
};
|
||||
|
||||
pub const OptionalHeaderPE32 = extern struct {
|
||||
magic: u16,
|
||||
major_linker_version: u8,
|
||||
minor_linker_version: u8,
|
||||
size_of_code: u32,
|
||||
size_of_initialized_data: u32,
|
||||
size_of_uninitialized_data: u32,
|
||||
address_of_entry_point: u32,
|
||||
base_of_code: u32,
|
||||
base_of_data: u32,
|
||||
image_base: u32,
|
||||
section_alignment: u32,
|
||||
file_alignment: u32,
|
||||
major_operating_system_version: u16,
|
||||
minor_operating_system_version: u16,
|
||||
major_image_version: u16,
|
||||
minor_image_version: u16,
|
||||
major_subsystem_version: u16,
|
||||
minor_subsystem_version: u16,
|
||||
win32_version_value: u32,
|
||||
size_of_image: u32,
|
||||
size_of_headers: u32,
|
||||
checksum: u32,
|
||||
subsystem: Subsystem,
|
||||
dll_flags: DllFlags,
|
||||
size_of_stack_reserve: u32,
|
||||
size_of_stack_commit: u32,
|
||||
size_of_heap_reserve: u32,
|
||||
size_of_heap_commit: u32,
|
||||
loader_flags: u32,
|
||||
number_of_rva_and_sizes: u32,
|
||||
};
|
||||
|
||||
pub const OptionalHeaderPE64 = extern struct {
|
||||
magic: u16,
|
||||
major_linker_version: u8,
|
||||
minor_linker_version: u8,
|
||||
size_of_code: u32,
|
||||
size_of_initialized_data: u32,
|
||||
size_of_uninitialized_data: u32,
|
||||
address_of_entry_point: u32,
|
||||
base_of_code: u32,
|
||||
image_base: u64,
|
||||
section_alignment: u32,
|
||||
file_alignment: u32,
|
||||
major_operating_system_version: u16,
|
||||
minor_operating_system_version: u16,
|
||||
major_image_version: u16,
|
||||
minor_image_version: u16,
|
||||
major_subsystem_version: u16,
|
||||
minor_subsystem_version: u16,
|
||||
win32_version_value: u32,
|
||||
size_of_image: u32,
|
||||
size_of_headers: u32,
|
||||
checksum: u32,
|
||||
subsystem: Subsystem,
|
||||
dll_flags: DllFlags,
|
||||
size_of_stack_reserve: u64,
|
||||
size_of_stack_commit: u64,
|
||||
size_of_heap_reserve: u64,
|
||||
size_of_heap_commit: u64,
|
||||
loader_flags: u32,
|
||||
number_of_rva_and_sizes: u32,
|
||||
};
|
||||
|
||||
pub const DebugDirectoryEntry = extern struct {
|
||||
characteristiccs: u32,
|
||||
time_date_stamp: u32,
|
||||
major_version: u16,
|
||||
minor_version: u16,
|
||||
@"type": u32,
|
||||
size_of_data: u32,
|
||||
address_of_raw_data: u32,
|
||||
pointer_to_raw_data: u32,
|
||||
};
|
||||
|
||||
pub const ImageDataDirectory = extern struct {
|
||||
virtual_address: u32,
|
||||
size: u32,
|
||||
};
|
||||
|
||||
pub const SectionHeader = extern struct {
|
||||
name: [8]u8,
|
||||
virtual_size: u32,
|
||||
virtual_address: u32,
|
||||
size_of_raw_data: u32,
|
||||
pointer_to_raw_data: u32,
|
||||
pointer_to_relocations: u32,
|
||||
pointer_to_linenumbers: u32,
|
||||
number_of_relocations: u16,
|
||||
number_of_linenumbers: u16,
|
||||
flags: SectionHeaderFlags,
|
||||
|
||||
pub fn getName(self: *align(1) const SectionHeader) ?[]const u8 {
|
||||
if (self.name[0] == '/') return null;
|
||||
const len = std.mem.indexOfScalar(u8, &self.name, @as(u8, 0)) orelse self.name.len;
|
||||
return self.name[0..len];
|
||||
}
|
||||
|
||||
pub fn getNameOffset(self: SectionHeader) ?u32 {
|
||||
if (self.name[0] != '/') return null;
|
||||
const len = std.mem.indexOfScalar(u8, &self.name, @as(u8, 0)) orelse self.name.len;
|
||||
const offset = std.fmt.parseInt(u32, self.name[1..len], 10) catch unreachable;
|
||||
return offset;
|
||||
}
|
||||
|
||||
/// Applicable only to section headers in COFF objects.
|
||||
pub fn getAlignment(self: SectionHeader) ?u16 {
|
||||
if (self.flags.ALIGN == 0) return null;
|
||||
return std.math.powi(u16, 2, self.flags.ALIGN - 1) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn isComdat(self: SectionHeader) bool {
|
||||
return self.flags.LNK_COMDAT == 0b1;
|
||||
}
|
||||
};
|
||||
|
||||
pub const SectionHeaderFlags = packed struct {
|
||||
_reserved_0: u3 = 0,
|
||||
|
||||
/// The section should not be padded to the next boundary.
|
||||
/// This flag is obsolete and is replaced by IMAGE_SCN_ALIGN_1BYTES.
|
||||
/// This is valid only for object files.
|
||||
TYPE_NO_PAD: u1 = 0,
|
||||
|
||||
_reserved_1: u1 = 0,
|
||||
|
||||
/// The section contains executable code.
|
||||
CNT_CODE: u1 = 0,
|
||||
|
||||
/// The section contains initialized data.
|
||||
CNT_INITIALIZED_DATA: u1 = 0,
|
||||
|
||||
/// The section contains uninitialized data.
|
||||
CNT_UNINITIALIZED_DATA: u1 = 0,
|
||||
|
||||
/// Reserved for future use.
|
||||
LNK_OTHER: u1 = 0,
|
||||
|
||||
/// The section contains comments or other information.
|
||||
/// The .drectve section has this type.
|
||||
/// This is valid for object files only.
|
||||
LNK_INFO: u1 = 0,
|
||||
|
||||
_reserverd_2: u1 = 0,
|
||||
|
||||
/// The section will not become part of the image.
|
||||
/// This is valid only for object files.
|
||||
LNK_REMOVE: u1 = 0,
|
||||
|
||||
/// The section contains COMDAT data.
|
||||
/// For more information, see COMDAT Sections (Object Only).
|
||||
/// This is valid only for object files.
|
||||
LNK_COMDAT: u1 = 0,
|
||||
|
||||
_reserved_3: u2 = 0,
|
||||
|
||||
/// The section contains data referenced through the global pointer (GP).
|
||||
GPREL: u1 = 0,
|
||||
|
||||
/// Reserved for future use.
|
||||
MEM_PURGEABLE: u1 = 0,
|
||||
|
||||
/// Reserved for future use.
|
||||
MEM_16BIT: u1 = 0,
|
||||
|
||||
/// Reserved for future use.
|
||||
MEM_LOCKED: u1 = 0,
|
||||
|
||||
/// Reserved for future use.
|
||||
MEM_PRELOAD: u1 = 0,
|
||||
|
||||
/// Takes on multiple values according to flags:
|
||||
/// pub const IMAGE_SCN_ALIGN_1BYTES: u32 = 0x100000;
|
||||
/// pub const IMAGE_SCN_ALIGN_2BYTES: u32 = 0x200000;
|
||||
/// pub const IMAGE_SCN_ALIGN_4BYTES: u32 = 0x300000;
|
||||
/// pub const IMAGE_SCN_ALIGN_8BYTES: u32 = 0x400000;
|
||||
/// pub const IMAGE_SCN_ALIGN_16BYTES: u32 = 0x500000;
|
||||
/// pub const IMAGE_SCN_ALIGN_32BYTES: u32 = 0x600000;
|
||||
/// pub const IMAGE_SCN_ALIGN_64BYTES: u32 = 0x700000;
|
||||
/// pub const IMAGE_SCN_ALIGN_128BYTES: u32 = 0x800000;
|
||||
/// pub const IMAGE_SCN_ALIGN_256BYTES: u32 = 0x900000;
|
||||
/// pub const IMAGE_SCN_ALIGN_512BYTES: u32 = 0xA00000;
|
||||
/// pub const IMAGE_SCN_ALIGN_1024BYTES: u32 = 0xB00000;
|
||||
/// pub const IMAGE_SCN_ALIGN_2048BYTES: u32 = 0xC00000;
|
||||
/// pub const IMAGE_SCN_ALIGN_4096BYTES: u32 = 0xD00000;
|
||||
/// pub const IMAGE_SCN_ALIGN_8192BYTES: u32 = 0xE00000;
|
||||
ALIGN: u4 = 0,
|
||||
|
||||
/// The section contains extended relocations.
|
||||
LNK_NRELOC_OVFL: u1 = 0,
|
||||
|
||||
/// The section can be discarded as needed.
|
||||
MEM_DISCARDABLE: u1 = 0,
|
||||
|
||||
/// The section cannot be cached.
|
||||
MEM_NOT_CACHED: u1 = 0,
|
||||
|
||||
/// The section is not pageable.
|
||||
MEM_NOT_PAGED: u1 = 0,
|
||||
|
||||
/// The section can be shared in memory.
|
||||
MEM_SHARED: u1 = 0,
|
||||
|
||||
/// The section can be executed as code.
|
||||
MEM_EXECUTE: u1 = 0,
|
||||
|
||||
/// The section can be read.
|
||||
MEM_READ: u1 = 0,
|
||||
|
||||
/// The section can be written to.
|
||||
MEM_WRITE: u1 = 0,
|
||||
};
|
||||
|
||||
pub const Symbol = struct {
|
||||
name: [8]u8,
|
||||
value: u32,
|
||||
section_number: SectionNumber,
|
||||
@"type": SymType,
|
||||
storage_class: StorageClass,
|
||||
number_of_aux_symbols: u8,
|
||||
|
||||
pub fn sizeOf() usize {
|
||||
return 18;
|
||||
}
|
||||
|
||||
pub fn getName(self: *const Symbol) ?[]const u8 {
|
||||
if (std.mem.eql(u8, self.name[0..4], "\x00\x00\x00\x00")) return null;
|
||||
const len = std.mem.indexOfScalar(u8, &self.name, @as(u8, 0)) orelse self.name.len;
|
||||
return self.name[0..len];
|
||||
}
|
||||
|
||||
pub fn getNameOffset(self: Symbol) ?u32 {
|
||||
if (!std.mem.eql(u8, self.name[0..4], "\x00\x00\x00\x00")) return null;
|
||||
const offset = std.mem.readIntLittle(u32, self.name[4..8]);
|
||||
return offset;
|
||||
}
|
||||
};
|
||||
|
||||
pub const SectionNumber = enum(u16) {
|
||||
/// The symbol record is not yet assigned a section.
|
||||
/// A value of zero indicates that a reference to an external symbol is defined elsewhere.
|
||||
/// A value of non-zero is a common symbol with a size that is specified by the value.
|
||||
UNDEFINED = 0,
|
||||
|
||||
/// The symbol has an absolute (non-relocatable) value and is not an address.
|
||||
ABSOLUTE = 0xffff,
|
||||
|
||||
/// The symbol provides general type or debugging information but does not correspond to a section.
|
||||
/// Microsoft tools use this setting along with .file records (storage class FILE).
|
||||
DEBUG = 0xfffe,
|
||||
_,
|
||||
};
|
||||
|
||||
pub const SymType = packed struct {
|
||||
complex_type: ComplexType,
|
||||
base_type: BaseType,
|
||||
};
|
||||
|
||||
pub const BaseType = enum(u8) {
|
||||
/// No type information or unknown base type. Microsoft tools use this setting
|
||||
NULL = 0,
|
||||
|
||||
/// No valid type; used with void pointers and functions
|
||||
VOID = 1,
|
||||
|
||||
/// A character (signed byte)
|
||||
CHAR = 2,
|
||||
|
||||
/// A 2-byte signed integer
|
||||
SHORT = 3,
|
||||
|
||||
/// A natural integer type (normally 4 bytes in Windows)
|
||||
INT = 4,
|
||||
|
||||
/// A 4-byte signed integer
|
||||
LONG = 5,
|
||||
|
||||
/// A 4-byte floating-point number
|
||||
FLOAT = 6,
|
||||
|
||||
/// An 8-byte floating-point number
|
||||
DOUBLE = 7,
|
||||
|
||||
/// A structure
|
||||
STRUCT = 8,
|
||||
|
||||
/// A union
|
||||
UNION = 9,
|
||||
|
||||
/// An enumerated type
|
||||
ENUM = 10,
|
||||
|
||||
/// A member of enumeration (a specified value)
|
||||
MOE = 11,
|
||||
|
||||
/// A byte; unsigned 1-byte integer
|
||||
BYTE = 12,
|
||||
|
||||
/// A word; unsigned 2-byte integer
|
||||
WORD = 13,
|
||||
|
||||
/// An unsigned integer of natural size (normally, 4 bytes)
|
||||
UINT = 14,
|
||||
|
||||
/// An unsigned 4-byte integer
|
||||
DWORD = 15,
|
||||
};
|
||||
|
||||
pub const ComplexType = enum(u8) {
|
||||
/// No derived type; the symbol is a simple scalar variable.
|
||||
NULL = 0,
|
||||
|
||||
/// The symbol is a pointer to base type.
|
||||
POINTER = 16,
|
||||
|
||||
/// The symbol is a function that returns a base type.
|
||||
FUNCTION = 32,
|
||||
|
||||
/// The symbol is an array of base type.
|
||||
ARRAY = 48,
|
||||
};
|
||||
|
||||
pub const StorageClass = enum(u8) {
|
||||
/// A special symbol that represents the end of function, for debugging purposes.
|
||||
END_OF_FUNCTION = 0xff,
|
||||
|
||||
/// No assigned storage class.
|
||||
NULL = 0,
|
||||
|
||||
/// The automatic (stack) variable. The Value field specifies the stack frame offset.
|
||||
AUTOMATIC = 1,
|
||||
|
||||
/// A value that Microsoft tools use for external symbols.
|
||||
/// The Value field indicates the size if the section number is IMAGE_SYM_UNDEFINED (0).
|
||||
/// If the section number is not zero, then the Value field specifies the offset within the section.
|
||||
EXTERNAL = 2,
|
||||
|
||||
/// The offset of the symbol within the section.
|
||||
/// If the Value field is zero, then the symbol represents a section name.
|
||||
STATIC = 3,
|
||||
|
||||
/// A register variable.
|
||||
/// The Value field specifies the register number.
|
||||
REGISTER = 4,
|
||||
|
||||
/// A symbol that is defined externally.
|
||||
EXTERNAL_DEF = 5,
|
||||
|
||||
/// A code label that is defined within the module.
|
||||
/// The Value field specifies the offset of the symbol within the section.
|
||||
LABEL = 6,
|
||||
|
||||
/// A reference to a code label that is not defined.
|
||||
UNDEFINED_LABEL = 7,
|
||||
|
||||
/// The structure member. The Value field specifies the n th member.
|
||||
MEMBER_OF_STRUCT = 8,
|
||||
|
||||
/// A formal argument (parameter) of a function. The Value field specifies the n th argument.
|
||||
ARGUMENT = 9,
|
||||
|
||||
/// The structure tag-name entry.
|
||||
STRUCT_TAG = 10,
|
||||
|
||||
/// A union member. The Value field specifies the n th member.
|
||||
MEMBER_OF_UNION = 11,
|
||||
|
||||
/// The Union tag-name entry.
|
||||
UNION_TAG = 12,
|
||||
|
||||
/// A Typedef entry.
|
||||
TYPE_DEFINITION = 13,
|
||||
|
||||
/// A static data declaration.
|
||||
UNDEFINED_STATIC = 14,
|
||||
|
||||
/// An enumerated type tagname entry.
|
||||
ENUM_TAG = 15,
|
||||
|
||||
/// A member of an enumeration. The Value field specifies the n th member.
|
||||
MEMBER_OF_ENUM = 16,
|
||||
|
||||
/// A register parameter.
|
||||
REGISTER_PARAM = 17,
|
||||
|
||||
/// A bit-field reference. The Value field specifies the n th bit in the bit field.
|
||||
BIT_FIELD = 18,
|
||||
|
||||
/// A .bb (beginning of block) or .eb (end of block) record.
|
||||
/// The Value field is the relocatable address of the code location.
|
||||
BLOCK = 100,
|
||||
|
||||
/// A value that Microsoft tools use for symbol records that define the extent of a function: begin function (.bf ), end function ( .ef ), and lines in function ( .lf ).
|
||||
/// For .lf records, the Value field gives the number of source lines in the function.
|
||||
/// For .ef records, the Value field gives the size of the function code.
|
||||
FUNCTION = 101,
|
||||
|
||||
/// An end-of-structure entry.
|
||||
END_OF_STRUCT = 102,
|
||||
|
||||
/// A value that Microsoft tools, as well as traditional COFF format, use for the source-file symbol record.
|
||||
/// The symbol is followed by auxiliary records that name the file.
|
||||
FILE = 103,
|
||||
|
||||
/// A definition of a section (Microsoft tools use STATIC storage class instead).
|
||||
SECTION = 104,
|
||||
|
||||
/// A weak external. For more information, see Auxiliary Format 3: Weak Externals.
|
||||
WEAK_EXTERNAL = 105,
|
||||
|
||||
/// A CLR token symbol. The name is an ASCII string that consists of the hexadecimal value of the token.
|
||||
/// For more information, see CLR Token Definition (Object Only).
|
||||
CLR_TOKEN = 107,
|
||||
};
|
||||
|
||||
pub const FunctionDefinition = struct {
|
||||
/// The symbol-table index of the corresponding .bf (begin function) symbol record.
|
||||
tag_index: u32,
|
||||
|
||||
/// The size of the executable code for the function itself.
|
||||
/// If the function is in its own section, the SizeOfRawData in the section header is greater or equal to this field,
|
||||
/// depending on alignment considerations.
|
||||
total_size: u32,
|
||||
|
||||
/// The file offset of the first COFF line-number entry for the function, or zero if none exists.
|
||||
pointer_to_linenumber: u32,
|
||||
|
||||
/// The symbol-table index of the record for the next function.
|
||||
/// If the function is the last in the symbol table, this field is set to zero.
|
||||
pointer_to_next_function: u32,
|
||||
|
||||
unused: [2]u8,
|
||||
};
|
||||
|
||||
pub const SectionDefinition = struct {
|
||||
/// The size of section data; the same as SizeOfRawData in the section header.
|
||||
length: u32,
|
||||
|
||||
/// The number of relocation entries for the section.
|
||||
number_of_relocations: u16,
|
||||
|
||||
/// The number of line-number entries for the section.
|
||||
number_of_linenumbers: u16,
|
||||
|
||||
/// The checksum for communal data. It is applicable if the IMAGE_SCN_LNK_COMDAT flag is set in the section header.
|
||||
checksum: u32,
|
||||
|
||||
/// One-based index into the section table for the associated section. This is used when the COMDAT selection setting is 5.
|
||||
number: u16,
|
||||
|
||||
/// The COMDAT selection number. This is applicable if the section is a COMDAT section.
|
||||
selection: ComdatSelection,
|
||||
|
||||
unused: [3]u8,
|
||||
};
|
||||
|
||||
pub const FileDefinition = struct {
|
||||
/// An ANSI string that gives the name of the source file.
|
||||
/// This is padded with nulls if it is less than the maximum length.
|
||||
file_name: [18]u8,
|
||||
|
||||
pub fn getFileName(self: *const FileDefinition) []const u8 {
|
||||
const len = std.mem.indexOfScalar(u8, &self.file_name, @as(u8, 0)) orelse self.file_name.len;
|
||||
return self.file_name[0..len];
|
||||
}
|
||||
};
|
||||
|
||||
pub const WeakExternalDefinition = struct {
|
||||
/// The symbol-table index of sym2, the symbol to be linked if sym1 is not found.
|
||||
tag_index: u32,
|
||||
|
||||
/// A value of IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY indicates that no library search for sym1 should be performed.
|
||||
/// A value of IMAGE_WEAK_EXTERN_SEARCH_LIBRARY indicates that a library search for sym1 should be performed.
|
||||
/// A value of IMAGE_WEAK_EXTERN_SEARCH_ALIAS indicates that sym1 is an alias for sym2.
|
||||
flag: WeakExternalFlag,
|
||||
|
||||
unused: [10]u8,
|
||||
};
|
||||
|
||||
// https://github.com/tpn/winsdk-10/blob/master/Include/10.0.16299.0/km/ntimage.h
|
||||
pub const WeakExternalFlag = enum(u32) {
|
||||
SEARCH_NOLIBRARY = 1,
|
||||
SEARCH_LIBRARY = 2,
|
||||
SEARCH_ALIAS = 3,
|
||||
ANTI_DEPENDENCY = 4,
|
||||
};
|
||||
|
||||
pub const ComdatSelection = enum(u8) {
|
||||
/// Not a COMDAT section.
|
||||
NONE = 0,
|
||||
|
||||
/// If this symbol is already defined, the linker issues a "multiply defined symbol" error.
|
||||
NODUPLICATES = 1,
|
||||
|
||||
/// Any section that defines the same COMDAT symbol can be linked; the rest are removed.
|
||||
ANY = 2,
|
||||
|
||||
/// The linker chooses an arbitrary section among the definitions for this symbol.
|
||||
/// If all definitions are not the same size, a "multiply defined symbol" error is issued.
|
||||
SAME_SIZE = 3,
|
||||
|
||||
/// The linker chooses an arbitrary section among the definitions for this symbol.
|
||||
/// If all definitions do not match exactly, a "multiply defined symbol" error is issued.
|
||||
EXACT_MATCH = 4,
|
||||
|
||||
/// The section is linked if a certain other COMDAT section is linked.
|
||||
/// This other section is indicated by the Number field of the auxiliary symbol record for the section definition.
|
||||
/// This setting is useful for definitions that have components in multiple sections
|
||||
/// (for example, code in one and data in another), but where all must be linked or discarded as a set.
|
||||
/// The other section this section is associated with must be a COMDAT section, which can be another
|
||||
/// associative COMDAT section. An associative COMDAT section's section association chain can't form a loop.
|
||||
/// The section association chain must eventually come to a COMDAT section that doesn't have IMAGE_COMDAT_SELECT_ASSOCIATIVE set.
|
||||
ASSOCIATIVE = 5,
|
||||
|
||||
/// The linker chooses the largest definition from among all of the definitions for this symbol.
|
||||
/// If multiple definitions have this size, the choice between them is arbitrary.
|
||||
LARGEST = 6,
|
||||
};
|
||||
|
||||
pub const DebugInfoDefinition = struct {
|
||||
unused_1: [4]u8,
|
||||
|
||||
/// The actual ordinal line number (1, 2, 3, and so on) within the source file, corresponding to the .bf or .ef record.
|
||||
linenumber: u16,
|
||||
|
||||
unused_2: [6]u8,
|
||||
|
||||
/// The symbol-table index of the next .bf symbol record.
|
||||
/// If the function is the last in the symbol table, this field is set to zero.
|
||||
/// It is not used for .ef records.
|
||||
pointer_to_next_function: u32,
|
||||
|
||||
unused_3: [2]u8,
|
||||
};
|
||||
|
||||
pub const MachineType = enum(u16) {
|
||||
Unknown = 0x0,
|
||||
@@ -77,25 +794,6 @@ pub const MachineType = enum(u16) {
|
||||
}
|
||||
};
|
||||
|
||||
// OptionalHeader.magic values
|
||||
// see https://msdn.microsoft.com/en-us/library/windows/desktop/ms680339(v=vs.85).aspx
|
||||
const IMAGE_NT_OPTIONAL_HDR32_MAGIC = 0x10b;
|
||||
const IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b;
|
||||
|
||||
// Image Characteristics
|
||||
pub const IMAGE_FILE_RELOCS_STRIPPED = 0x1;
|
||||
pub const IMAGE_FILE_DEBUG_STRIPPED = 0x200;
|
||||
pub const IMAGE_FILE_EXECUTABLE_IMAGE = 0x2;
|
||||
pub const IMAGE_FILE_32BIT_MACHINE = 0x100;
|
||||
pub const IMAGE_FILE_LARGE_ADDRESS_AWARE = 0x20;
|
||||
|
||||
// Section flags
|
||||
pub const IMAGE_SCN_CNT_INITIALIZED_DATA = 0x40;
|
||||
pub const IMAGE_SCN_MEM_READ = 0x40000000;
|
||||
pub const IMAGE_SCN_CNT_CODE = 0x20;
|
||||
pub const IMAGE_SCN_MEM_EXECUTE = 0x20000000;
|
||||
pub const IMAGE_SCN_MEM_WRITE = 0x80000000;
|
||||
|
||||
const IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16;
|
||||
const IMAGE_DEBUG_TYPE_CODEVIEW = 2;
|
||||
const DEBUG_DIRECTORY = 6;
|
||||
@@ -104,166 +802,87 @@ pub const CoffError = error{
|
||||
InvalidPEMagic,
|
||||
InvalidPEHeader,
|
||||
InvalidMachine,
|
||||
MissingPEHeader,
|
||||
MissingCoffSection,
|
||||
MissingStringTable,
|
||||
};
|
||||
|
||||
// Official documentation of the format: https://docs.microsoft.com/en-us/windows/win32/debug/pe-format
|
||||
pub const Coff = struct {
|
||||
in_file: File,
|
||||
allocator: mem.Allocator,
|
||||
data: []const u8 = undefined,
|
||||
is_image: bool = false,
|
||||
coff_header_offset: usize = 0,
|
||||
|
||||
coff_header: CoffHeader,
|
||||
pe_header: OptionalHeader,
|
||||
sections: std.ArrayListUnmanaged(Section) = .{},
|
||||
|
||||
guid: [16]u8,
|
||||
age: u32,
|
||||
|
||||
pub fn init(allocator: mem.Allocator, in_file: File) Coff {
|
||||
return Coff{
|
||||
.in_file = in_file,
|
||||
.allocator = allocator,
|
||||
.coff_header = undefined,
|
||||
.pe_header = undefined,
|
||||
.guid = undefined,
|
||||
.age = undefined,
|
||||
};
|
||||
}
|
||||
guid: [16]u8 = undefined,
|
||||
age: u32 = undefined,
|
||||
|
||||
pub fn deinit(self: *Coff) void {
|
||||
self.sections.deinit(self.allocator);
|
||||
self.allocator.free(self.data);
|
||||
}
|
||||
|
||||
pub fn loadHeader(self: *Coff) !void {
|
||||
/// Takes ownership of `data`.
|
||||
pub fn parse(self: *Coff, data: []const u8) !void {
|
||||
self.data = data;
|
||||
|
||||
const pe_pointer_offset = 0x3C;
|
||||
const pe_magic = "PE\x00\x00";
|
||||
|
||||
const in = self.in_file.reader();
|
||||
var stream = std.io.fixedBufferStream(self.data);
|
||||
const reader = stream.reader();
|
||||
try stream.seekTo(pe_pointer_offset);
|
||||
const coff_header_offset = try reader.readByte();
|
||||
try stream.seekTo(coff_header_offset);
|
||||
var buf: [4]u8 = undefined;
|
||||
try reader.readNoEof(&buf);
|
||||
self.is_image = mem.eql(u8, pe_magic, &buf);
|
||||
|
||||
var magic: [2]u8 = undefined;
|
||||
try in.readNoEof(magic[0..]);
|
||||
if (!mem.eql(u8, &magic, "MZ"))
|
||||
return error.InvalidPEMagic;
|
||||
|
||||
// Seek to PE File Header (coff header)
|
||||
try self.in_file.seekTo(pe_pointer_offset);
|
||||
const pe_magic_offset = try in.readIntLittle(u32);
|
||||
try self.in_file.seekTo(pe_magic_offset);
|
||||
|
||||
var pe_header_magic: [4]u8 = undefined;
|
||||
try in.readNoEof(pe_header_magic[0..]);
|
||||
if (!mem.eql(u8, &pe_header_magic, &[_]u8{ 'P', 'E', 0, 0 }))
|
||||
return error.InvalidPEHeader;
|
||||
|
||||
self.coff_header = CoffHeader{
|
||||
.machine = try in.readIntLittle(u16),
|
||||
.number_of_sections = try in.readIntLittle(u16),
|
||||
.timedate_stamp = try in.readIntLittle(u32),
|
||||
.pointer_to_symbol_table = try in.readIntLittle(u32),
|
||||
.number_of_symbols = try in.readIntLittle(u32),
|
||||
.size_of_optional_header = try in.readIntLittle(u16),
|
||||
.characteristics = try in.readIntLittle(u16),
|
||||
};
|
||||
|
||||
switch (self.coff_header.machine) {
|
||||
IMAGE_FILE_MACHINE_I386, IMAGE_FILE_MACHINE_AMD64, IMAGE_FILE_MACHINE_IA64 => {},
|
||||
else => return error.InvalidMachine,
|
||||
// Do some basic validation upfront
|
||||
if (self.is_image) {
|
||||
self.coff_header_offset = coff_header_offset + 4;
|
||||
const coff_header = self.getCoffHeader();
|
||||
if (coff_header.size_of_optional_header == 0) return error.MissingPEHeader;
|
||||
}
|
||||
|
||||
try self.loadOptionalHeader();
|
||||
}
|
||||
|
||||
fn readStringFromTable(self: *Coff, offset: usize, buf: []u8) ![]const u8 {
|
||||
if (self.coff_header.pointer_to_symbol_table == 0) {
|
||||
// No symbol table therefore no string table
|
||||
return error.MissingStringTable;
|
||||
}
|
||||
// The string table is at the end of the symbol table and symbols are 18 bytes long
|
||||
const string_table_offset = self.coff_header.pointer_to_symbol_table + (self.coff_header.number_of_symbols * 18) + offset;
|
||||
const in = self.in_file.reader();
|
||||
const old_pos = try self.in_file.getPos();
|
||||
|
||||
try self.in_file.seekTo(string_table_offset);
|
||||
defer {
|
||||
self.in_file.seekTo(old_pos) catch unreachable;
|
||||
}
|
||||
|
||||
const str = try in.readUntilDelimiterOrEof(buf, 0);
|
||||
return str orelse "";
|
||||
}
|
||||
|
||||
fn loadOptionalHeader(self: *Coff) !void {
|
||||
const in = self.in_file.reader();
|
||||
const opt_header_pos = try self.in_file.getPos();
|
||||
|
||||
self.pe_header.magic = try in.readIntLittle(u16);
|
||||
try self.in_file.seekTo(opt_header_pos + 16);
|
||||
self.pe_header.entry_addr = try in.readIntLittle(u32);
|
||||
try self.in_file.seekTo(opt_header_pos + 20);
|
||||
self.pe_header.code_base = try in.readIntLittle(u32);
|
||||
|
||||
// The header structure is different for 32 or 64 bit
|
||||
var num_rva_pos: u64 = undefined;
|
||||
if (self.pe_header.magic == IMAGE_NT_OPTIONAL_HDR32_MAGIC) {
|
||||
num_rva_pos = opt_header_pos + 92;
|
||||
|
||||
try self.in_file.seekTo(opt_header_pos + 28);
|
||||
const image_base32 = try in.readIntLittle(u32);
|
||||
self.pe_header.image_base = image_base32;
|
||||
} else if (self.pe_header.magic == IMAGE_NT_OPTIONAL_HDR64_MAGIC) {
|
||||
num_rva_pos = opt_header_pos + 108;
|
||||
|
||||
try self.in_file.seekTo(opt_header_pos + 24);
|
||||
self.pe_header.image_base = try in.readIntLittle(u64);
|
||||
} else return error.InvalidPEMagic;
|
||||
|
||||
try self.in_file.seekTo(num_rva_pos);
|
||||
|
||||
const number_of_rva_and_sizes = try in.readIntLittle(u32);
|
||||
if (number_of_rva_and_sizes != IMAGE_NUMBEROF_DIRECTORY_ENTRIES)
|
||||
return error.InvalidPEHeader;
|
||||
|
||||
for (self.pe_header.data_directory) |*data_dir| {
|
||||
data_dir.* = OptionalHeader.DataDirectory{
|
||||
.virtual_address = try in.readIntLittle(u32),
|
||||
.size = try in.readIntLittle(u32),
|
||||
};
|
||||
}
|
||||
// JK: we used to check for architecture here and throw an error if not x86 or derivative.
|
||||
// However I am willing to take a leap of faith and let aarch64 have a shot also.
|
||||
}
|
||||
|
||||
pub fn getPdbPath(self: *Coff, buffer: []u8) !usize {
|
||||
try self.loadSections();
|
||||
assert(self.is_image);
|
||||
|
||||
const header = blk: {
|
||||
if (self.getSection(".buildid")) |section| {
|
||||
break :blk section.header;
|
||||
} else if (self.getSection(".rdata")) |section| {
|
||||
break :blk section.header;
|
||||
if (self.getSectionByName(".buildid")) |hdr| {
|
||||
break :blk hdr;
|
||||
} else if (self.getSectionByName(".rdata")) |hdr| {
|
||||
break :blk hdr;
|
||||
} else {
|
||||
return error.MissingCoffSection;
|
||||
}
|
||||
};
|
||||
|
||||
const debug_dir = &self.pe_header.data_directory[DEBUG_DIRECTORY];
|
||||
const data_dirs = self.getDataDirectories();
|
||||
const debug_dir = data_dirs[DEBUG_DIRECTORY];
|
||||
const file_offset = debug_dir.virtual_address - header.virtual_address + header.pointer_to_raw_data;
|
||||
|
||||
const in = self.in_file.reader();
|
||||
try self.in_file.seekTo(file_offset);
|
||||
var stream = std.io.fixedBufferStream(self.data);
|
||||
const reader = stream.reader();
|
||||
try stream.seekTo(file_offset);
|
||||
|
||||
// Find the correct DebugDirectoryEntry, and where its data is stored.
|
||||
// It can be in any section.
|
||||
const debug_dir_entry_count = debug_dir.size / @sizeOf(DebugDirectoryEntry);
|
||||
var i: u32 = 0;
|
||||
blk: while (i < debug_dir_entry_count) : (i += 1) {
|
||||
const debug_dir_entry = try in.readStruct(DebugDirectoryEntry);
|
||||
const debug_dir_entry = try reader.readStruct(DebugDirectoryEntry);
|
||||
if (debug_dir_entry.type == IMAGE_DEBUG_TYPE_CODEVIEW) {
|
||||
for (self.sections.items) |*section| {
|
||||
const section_start = section.header.virtual_address;
|
||||
const section_size = section.header.misc.virtual_size;
|
||||
for (self.getSectionHeaders()) |*section| {
|
||||
const section_start = section.virtual_address;
|
||||
const section_size = section.virtual_size;
|
||||
const rva = debug_dir_entry.address_of_raw_data;
|
||||
const offset = rva - section_start;
|
||||
if (section_start <= rva and offset < section_size and debug_dir_entry.size_of_data <= section_size - offset) {
|
||||
try self.in_file.seekTo(section.header.pointer_to_raw_data + offset);
|
||||
try stream.seekTo(section.pointer_to_raw_data + offset);
|
||||
break :blk;
|
||||
}
|
||||
}
|
||||
@@ -271,19 +890,19 @@ pub const Coff = struct {
|
||||
}
|
||||
|
||||
var cv_signature: [4]u8 = undefined; // CodeView signature
|
||||
try in.readNoEof(cv_signature[0..]);
|
||||
try reader.readNoEof(cv_signature[0..]);
|
||||
// 'RSDS' indicates PDB70 format, used by lld.
|
||||
if (!mem.eql(u8, &cv_signature, "RSDS"))
|
||||
return error.InvalidPEMagic;
|
||||
try in.readNoEof(self.guid[0..]);
|
||||
self.age = try in.readIntLittle(u32);
|
||||
try reader.readNoEof(self.guid[0..]);
|
||||
self.age = try reader.readIntLittle(u32);
|
||||
|
||||
// Finally read the null-terminated string.
|
||||
var byte = try in.readByte();
|
||||
var byte = try reader.readByte();
|
||||
i = 0;
|
||||
while (byte != 0 and i < buffer.len) : (i += 1) {
|
||||
buffer[i] = byte;
|
||||
byte = try in.readByte();
|
||||
byte = try reader.readByte();
|
||||
}
|
||||
|
||||
if (byte != 0 and i == buffer.len)
|
||||
@@ -292,126 +911,232 @@ pub const Coff = struct {
|
||||
return @as(usize, i);
|
||||
}
|
||||
|
||||
pub fn loadSections(self: *Coff) !void {
|
||||
if (self.sections.items.len == self.coff_header.number_of_sections)
|
||||
return;
|
||||
|
||||
try self.sections.ensureTotalCapacityPrecise(self.allocator, self.coff_header.number_of_sections);
|
||||
|
||||
const in = self.in_file.reader();
|
||||
|
||||
var name: [32]u8 = undefined;
|
||||
|
||||
var i: u16 = 0;
|
||||
while (i < self.coff_header.number_of_sections) : (i += 1) {
|
||||
try in.readNoEof(name[0..8]);
|
||||
|
||||
if (name[0] == '/') {
|
||||
// This is a long name and stored in the string table
|
||||
const offset_len = mem.indexOfScalar(u8, name[1..], 0) orelse 7;
|
||||
|
||||
const str_offset = try std.fmt.parseInt(u32, name[1 .. offset_len + 1], 10);
|
||||
const str = try self.readStringFromTable(str_offset, &name);
|
||||
std.mem.set(u8, name[str.len..], 0);
|
||||
} else {
|
||||
std.mem.set(u8, name[8..], 0);
|
||||
}
|
||||
|
||||
self.sections.appendAssumeCapacity(Section{
|
||||
.header = SectionHeader{
|
||||
.name = name,
|
||||
.misc = SectionHeader.Misc{ .virtual_size = try in.readIntLittle(u32) },
|
||||
.virtual_address = try in.readIntLittle(u32),
|
||||
.size_of_raw_data = try in.readIntLittle(u32),
|
||||
.pointer_to_raw_data = try in.readIntLittle(u32),
|
||||
.pointer_to_relocations = try in.readIntLittle(u32),
|
||||
.pointer_to_line_numbers = try in.readIntLittle(u32),
|
||||
.number_of_relocations = try in.readIntLittle(u16),
|
||||
.number_of_line_numbers = try in.readIntLittle(u16),
|
||||
.characteristics = try in.readIntLittle(u32),
|
||||
},
|
||||
});
|
||||
}
|
||||
pub fn getCoffHeader(self: Coff) CoffHeader {
|
||||
return @ptrCast(*align(1) const CoffHeader, self.data[self.coff_header_offset..][0..@sizeOf(CoffHeader)]).*;
|
||||
}
|
||||
|
||||
pub fn getSection(self: *Coff, comptime name: []const u8) ?*Section {
|
||||
for (self.sections.items) |*sec| {
|
||||
if (mem.eql(u8, sec.header.name[0..name.len], name)) {
|
||||
return sec;
|
||||
pub fn getOptionalHeader(self: Coff) OptionalHeader {
|
||||
assert(self.is_image);
|
||||
const offset = self.coff_header_offset + @sizeOf(CoffHeader);
|
||||
return @ptrCast(*align(1) const OptionalHeader, self.data[offset..][0..@sizeOf(OptionalHeader)]).*;
|
||||
}
|
||||
|
||||
pub fn getOptionalHeader32(self: Coff) OptionalHeaderPE32 {
|
||||
assert(self.is_image);
|
||||
const offset = self.coff_header_offset + @sizeOf(CoffHeader);
|
||||
return @ptrCast(*align(1) const OptionalHeaderPE32, self.data[offset..][0..@sizeOf(OptionalHeaderPE32)]).*;
|
||||
}
|
||||
|
||||
pub fn getOptionalHeader64(self: Coff) OptionalHeaderPE64 {
|
||||
assert(self.is_image);
|
||||
const offset = self.coff_header_offset + @sizeOf(CoffHeader);
|
||||
return @ptrCast(*align(1) const OptionalHeaderPE64, self.data[offset..][0..@sizeOf(OptionalHeaderPE64)]).*;
|
||||
}
|
||||
|
||||
pub fn getImageBase(self: Coff) u64 {
|
||||
const hdr = self.getOptionalHeader();
|
||||
return switch (hdr.magic) {
|
||||
IMAGE_NT_OPTIONAL_HDR32_MAGIC => self.getOptionalHeader32().image_base,
|
||||
IMAGE_NT_OPTIONAL_HDR64_MAGIC => self.getOptionalHeader64().image_base,
|
||||
else => unreachable, // We assume we have validated the header already
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getNumberOfDataDirectories(self: Coff) u32 {
|
||||
const hdr = self.getOptionalHeader();
|
||||
return switch (hdr.magic) {
|
||||
IMAGE_NT_OPTIONAL_HDR32_MAGIC => self.getOptionalHeader32().number_of_rva_and_sizes,
|
||||
IMAGE_NT_OPTIONAL_HDR64_MAGIC => self.getOptionalHeader64().number_of_rva_and_sizes,
|
||||
else => unreachable, // We assume we have validated the header already
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getDataDirectories(self: *const Coff) []align(1) const ImageDataDirectory {
|
||||
const hdr = self.getOptionalHeader();
|
||||
const size: usize = switch (hdr.magic) {
|
||||
IMAGE_NT_OPTIONAL_HDR32_MAGIC => @sizeOf(OptionalHeaderPE32),
|
||||
IMAGE_NT_OPTIONAL_HDR64_MAGIC => @sizeOf(OptionalHeaderPE64),
|
||||
else => unreachable, // We assume we have validated the header already
|
||||
};
|
||||
const offset = self.coff_header_offset + @sizeOf(CoffHeader) + size;
|
||||
return @ptrCast([*]align(1) const ImageDataDirectory, self.data[offset..])[0..self.getNumberOfDataDirectories()];
|
||||
}
|
||||
|
||||
pub fn getSymtab(self: *const Coff) ?Symtab {
|
||||
const coff_header = self.getCoffHeader();
|
||||
if (coff_header.pointer_to_symbol_table == 0) return null;
|
||||
|
||||
const offset = coff_header.pointer_to_symbol_table;
|
||||
const size = coff_header.number_of_symbols * Symbol.sizeOf();
|
||||
return .{ .buffer = self.data[offset..][0..size] };
|
||||
}
|
||||
|
||||
pub fn getStrtab(self: *const Coff) ?Strtab {
|
||||
const coff_header = self.getCoffHeader();
|
||||
if (coff_header.pointer_to_symbol_table == 0) return null;
|
||||
|
||||
const offset = coff_header.pointer_to_symbol_table + Symbol.sizeOf() * coff_header.number_of_symbols;
|
||||
const size = mem.readIntLittle(u32, self.data[offset..][0..4]);
|
||||
return Strtab{ .buffer = self.data[offset..][0..size] };
|
||||
}
|
||||
|
||||
pub fn getSectionHeaders(self: *const Coff) []align(1) const SectionHeader {
|
||||
const coff_header = self.getCoffHeader();
|
||||
const offset = self.coff_header_offset + @sizeOf(CoffHeader) + coff_header.size_of_optional_header;
|
||||
return @ptrCast([*]align(1) const SectionHeader, self.data.ptr + offset)[0..coff_header.number_of_sections];
|
||||
}
|
||||
|
||||
pub fn getSectionName(self: *const Coff, sect_hdr: *align(1) const SectionHeader) []const u8 {
|
||||
const name = sect_hdr.getName() orelse blk: {
|
||||
const strtab = self.getStrtab().?;
|
||||
const name_offset = sect_hdr.getNameOffset().?;
|
||||
break :blk strtab.get(name_offset);
|
||||
};
|
||||
return name;
|
||||
}
|
||||
|
||||
pub fn getSectionByName(self: *const Coff, comptime name: []const u8) ?*align(1) const SectionHeader {
|
||||
for (self.getSectionHeaders()) |*sect| {
|
||||
if (mem.eql(u8, self.getSectionName(sect), name)) {
|
||||
return sect;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Return an owned slice full of the section data
|
||||
pub fn getSectionData(self: *Coff, comptime name: []const u8, allocator: mem.Allocator) ![]u8 {
|
||||
const sec = for (self.sections.items) |*sec| {
|
||||
if (mem.eql(u8, sec.header.name[0..name.len], name)) {
|
||||
break sec;
|
||||
}
|
||||
} else {
|
||||
return error.MissingCoffSection;
|
||||
};
|
||||
const in = self.in_file.reader();
|
||||
try self.in_file.seekTo(sec.header.pointer_to_raw_data);
|
||||
const out_buff = try allocator.alloc(u8, sec.header.misc.virtual_size);
|
||||
try in.readNoEof(out_buff);
|
||||
pub fn getSectionDataAlloc(self: *const Coff, comptime name: []const u8, allocator: mem.Allocator) ![]u8 {
|
||||
const sec = self.getSectionByName(name) orelse return error.MissingCoffSection;
|
||||
const out_buff = try allocator.alloc(u8, sec.virtual_size);
|
||||
mem.copy(u8, out_buff, self.data[sec.pointer_to_raw_data..][0..sec.virtual_size]);
|
||||
return out_buff;
|
||||
}
|
||||
};
|
||||
|
||||
const CoffHeader = struct {
|
||||
machine: u16,
|
||||
number_of_sections: u16,
|
||||
timedate_stamp: u32,
|
||||
pointer_to_symbol_table: u32,
|
||||
number_of_symbols: u32,
|
||||
size_of_optional_header: u16,
|
||||
characteristics: u16,
|
||||
};
|
||||
pub const Symtab = struct {
|
||||
buffer: []const u8,
|
||||
|
||||
const OptionalHeader = struct {
|
||||
const DataDirectory = struct {
|
||||
virtual_address: u32,
|
||||
size: u32,
|
||||
fn len(self: Symtab) usize {
|
||||
return @divExact(self.buffer.len, Symbol.sizeOf());
|
||||
}
|
||||
|
||||
const Tag = enum {
|
||||
symbol,
|
||||
func_def,
|
||||
debug_info,
|
||||
weak_ext,
|
||||
file_def,
|
||||
sect_def,
|
||||
};
|
||||
|
||||
const Record = union(Tag) {
|
||||
symbol: Symbol,
|
||||
debug_info: DebugInfoDefinition,
|
||||
func_def: FunctionDefinition,
|
||||
weak_ext: WeakExternalDefinition,
|
||||
file_def: FileDefinition,
|
||||
sect_def: SectionDefinition,
|
||||
};
|
||||
|
||||
/// Lives as long as Symtab instance.
|
||||
fn at(self: Symtab, index: usize, tag: Tag) Record {
|
||||
const offset = index * Symbol.sizeOf();
|
||||
const raw = self.buffer[offset..][0..Symbol.sizeOf()];
|
||||
return switch (tag) {
|
||||
.symbol => .{ .symbol = asSymbol(raw) },
|
||||
.debug_info => .{ .debug_info = asDebugInfo(raw) },
|
||||
.func_def => .{ .func_def = asFuncDef(raw) },
|
||||
.weak_ext => .{ .weak_ext = asWeakExtDef(raw) },
|
||||
.file_def => .{ .file_def = asFileDef(raw) },
|
||||
.sect_def => .{ .sect_def = asSectDef(raw) },
|
||||
};
|
||||
}
|
||||
|
||||
fn asSymbol(raw: []const u8) Symbol {
|
||||
return .{
|
||||
.name = raw[0..8].*,
|
||||
.value = mem.readIntLittle(u32, raw[8..12]),
|
||||
.section_number = @intToEnum(SectionNumber, mem.readIntLittle(u16, raw[12..14])),
|
||||
.@"type" = @bitCast(SymType, mem.readIntLittle(u16, raw[14..16])),
|
||||
.storage_class = @intToEnum(StorageClass, raw[16]),
|
||||
.number_of_aux_symbols = raw[17],
|
||||
};
|
||||
}
|
||||
|
||||
fn asDebugInfo(raw: []const u8) DebugInfoDefinition {
|
||||
return .{
|
||||
.unused_1 = raw[0..4].*,
|
||||
.linenumber = mem.readIntLittle(u16, raw[4..6]),
|
||||
.unused_2 = raw[6..12].*,
|
||||
.pointer_to_next_function = mem.readIntLittle(u32, raw[12..16]),
|
||||
.unused_3 = raw[16..18].*,
|
||||
};
|
||||
}
|
||||
|
||||
fn asFuncDef(raw: []const u8) FunctionDefinition {
|
||||
return .{
|
||||
.tag_index = mem.readIntLittle(u32, raw[0..4]),
|
||||
.total_size = mem.readIntLittle(u32, raw[4..8]),
|
||||
.pointer_to_linenumber = mem.readIntLittle(u32, raw[8..12]),
|
||||
.pointer_to_next_function = mem.readIntLittle(u32, raw[12..16]),
|
||||
.unused = raw[16..18].*,
|
||||
};
|
||||
}
|
||||
|
||||
fn asWeakExtDef(raw: []const u8) WeakExternalDefinition {
|
||||
return .{
|
||||
.tag_index = mem.readIntLittle(u32, raw[0..4]),
|
||||
.flag = @intToEnum(WeakExternalFlag, mem.readIntLittle(u32, raw[4..8])),
|
||||
.unused = raw[8..18].*,
|
||||
};
|
||||
}
|
||||
|
||||
fn asFileDef(raw: []const u8) FileDefinition {
|
||||
return .{
|
||||
.file_name = raw[0..18].*,
|
||||
};
|
||||
}
|
||||
|
||||
fn asSectDef(raw: []const u8) SectionDefinition {
|
||||
return .{
|
||||
.length = mem.readIntLittle(u32, raw[0..4]),
|
||||
.number_of_relocations = mem.readIntLittle(u16, raw[4..6]),
|
||||
.number_of_linenumbers = mem.readIntLittle(u16, raw[6..8]),
|
||||
.checksum = mem.readIntLittle(u32, raw[8..12]),
|
||||
.number = mem.readIntLittle(u16, raw[12..14]),
|
||||
.selection = @intToEnum(ComdatSelection, raw[14]),
|
||||
.unused = raw[15..18].*,
|
||||
};
|
||||
}
|
||||
|
||||
const Slice = struct {
|
||||
buffer: []const u8,
|
||||
num: usize,
|
||||
count: usize = 0,
|
||||
|
||||
/// Lives as long as Symtab instance.
|
||||
fn next(self: *Slice) ?Symbol {
|
||||
if (self.count >= self.num) return null;
|
||||
const sym = asSymbol(self.buffer[0..Symbol.sizeOf()]);
|
||||
self.count += 1;
|
||||
self.buffer = self.buffer[Symbol.sizeOf()..];
|
||||
return sym;
|
||||
}
|
||||
};
|
||||
|
||||
fn slice(self: Symtab, start: usize, end: ?usize) Slice {
|
||||
const offset = start * Symbol.sizeOf();
|
||||
const llen = if (end) |e| e * Symbol.sizeOf() else self.buffer.len;
|
||||
const num = @divExact(llen - offset, Symbol.sizeOf());
|
||||
return Slice{ .buffer = self.buffer[offset..][0..llen], .num = num };
|
||||
}
|
||||
};
|
||||
|
||||
magic: u16,
|
||||
data_directory: [IMAGE_NUMBEROF_DIRECTORY_ENTRIES]DataDirectory,
|
||||
entry_addr: u32,
|
||||
code_base: u32,
|
||||
image_base: u64,
|
||||
};
|
||||
pub const Strtab = struct {
|
||||
buffer: []const u8,
|
||||
|
||||
const DebugDirectoryEntry = packed struct {
|
||||
characteristiccs: u32,
|
||||
time_date_stamp: u32,
|
||||
major_version: u16,
|
||||
minor_version: u16,
|
||||
@"type": u32,
|
||||
size_of_data: u32,
|
||||
address_of_raw_data: u32,
|
||||
pointer_to_raw_data: u32,
|
||||
};
|
||||
|
||||
pub const Section = struct {
|
||||
header: SectionHeader,
|
||||
};
|
||||
|
||||
const SectionHeader = struct {
|
||||
const Misc = union {
|
||||
physical_address: u32,
|
||||
virtual_size: u32,
|
||||
fn get(self: Strtab, off: u32) []const u8 {
|
||||
assert(off < self.buffer.len);
|
||||
return mem.sliceTo(@ptrCast([*:0]const u8, self.buffer.ptr + off), 0);
|
||||
}
|
||||
};
|
||||
|
||||
name: [32]u8,
|
||||
misc: Misc,
|
||||
virtual_address: u32,
|
||||
size_of_raw_data: u32,
|
||||
pointer_to_raw_data: u32,
|
||||
pointer_to_relocations: u32,
|
||||
pointer_to_line_numbers: u32,
|
||||
number_of_relocations: u16,
|
||||
number_of_line_numbers: u16,
|
||||
characteristics: u32,
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ const math = @import("std").math;
|
||||
|
||||
// Reverse bit-by-bit a N-bit code.
|
||||
pub fn bitReverse(comptime T: type, value: T, N: usize) T {
|
||||
const r = @bitReverse(T, value);
|
||||
const r = @bitReverse(value);
|
||||
return r >> @intCast(math.Log2Int(T), @typeInfo(T).Int.bits - N);
|
||||
}
|
||||
|
||||
|
||||
@@ -355,7 +355,9 @@ test "ed25519 batch verification" {
|
||||
try Ed25519.verifyBatch(2, signature_batch);
|
||||
|
||||
signature_batch[1].sig = sig1;
|
||||
try std.testing.expectError(error.SignatureVerificationFailed, Ed25519.verifyBatch(signature_batch.len, signature_batch));
|
||||
// TODO https://github.com/ziglang/zig/issues/12240
|
||||
const sig_len = signature_batch.len;
|
||||
try std.testing.expectError(error.SignatureVerificationFailed, Ed25519.verifyBatch(sig_len, signature_batch));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ fn AesOcb(comptime Aes: anytype) type {
|
||||
var offset = [_]u8{0} ** 16;
|
||||
var i: usize = 0;
|
||||
while (i < full_blocks) : (i += 1) {
|
||||
xorWith(&offset, lt[@ctz(usize, i + 1)]);
|
||||
xorWith(&offset, lt[@ctz(i + 1)]);
|
||||
var e = xorBlocks(offset, a[i * 16 ..][0..16].*);
|
||||
aes_enc_ctx.encrypt(&e, &e);
|
||||
xorWith(&sum, e);
|
||||
@@ -129,7 +129,7 @@ fn AesOcb(comptime Aes: anytype) type {
|
||||
var es: [16 * wb]u8 align(16) = undefined;
|
||||
var j: usize = 0;
|
||||
while (j < wb) : (j += 1) {
|
||||
xorWith(&offset, lt[@ctz(usize, i + 1 + j)]);
|
||||
xorWith(&offset, lt[@ctz(i + 1 + j)]);
|
||||
offsets[j] = offset;
|
||||
const p = m[(i + j) * 16 ..][0..16].*;
|
||||
mem.copy(u8, es[j * 16 ..][0..16], &xorBlocks(p, offsets[j]));
|
||||
@@ -143,7 +143,7 @@ fn AesOcb(comptime Aes: anytype) type {
|
||||
}
|
||||
}
|
||||
while (i < full_blocks) : (i += 1) {
|
||||
xorWith(&offset, lt[@ctz(usize, i + 1)]);
|
||||
xorWith(&offset, lt[@ctz(i + 1)]);
|
||||
const p = m[i * 16 ..][0..16].*;
|
||||
var e = xorBlocks(p, offset);
|
||||
aes_enc_ctx.encrypt(&e, &e);
|
||||
@@ -193,7 +193,7 @@ fn AesOcb(comptime Aes: anytype) type {
|
||||
var es: [16 * wb]u8 align(16) = undefined;
|
||||
var j: usize = 0;
|
||||
while (j < wb) : (j += 1) {
|
||||
xorWith(&offset, lt[@ctz(usize, i + 1 + j)]);
|
||||
xorWith(&offset, lt[@ctz(i + 1 + j)]);
|
||||
offsets[j] = offset;
|
||||
const q = c[(i + j) * 16 ..][0..16].*;
|
||||
mem.copy(u8, es[j * 16 ..][0..16], &xorBlocks(q, offsets[j]));
|
||||
@@ -207,7 +207,7 @@ fn AesOcb(comptime Aes: anytype) type {
|
||||
}
|
||||
}
|
||||
while (i < full_blocks) : (i += 1) {
|
||||
xorWith(&offset, lt[@ctz(usize, i + 1)]);
|
||||
xorWith(&offset, lt[@ctz(i + 1)]);
|
||||
const q = c[i * 16 ..][0..16].*;
|
||||
var e = xorBlocks(q, offset);
|
||||
aes_dec_ctx.decrypt(&e, &e);
|
||||
|
||||
+16
-16
@@ -41,8 +41,8 @@ pub const Ghash = struct {
|
||||
pub fn init(key: *const [key_length]u8) Ghash {
|
||||
const h1 = mem.readIntBig(u64, key[0..8]);
|
||||
const h0 = mem.readIntBig(u64, key[8..16]);
|
||||
const h1r = @bitReverse(u64, h1);
|
||||
const h0r = @bitReverse(u64, h0);
|
||||
const h1r = @bitReverse(h1);
|
||||
const h0r = @bitReverse(h0);
|
||||
const h2 = h0 ^ h1;
|
||||
const h2r = h0r ^ h1r;
|
||||
|
||||
@@ -68,8 +68,8 @@ pub const Ghash = struct {
|
||||
hh.update(key);
|
||||
const hh1 = hh.y1;
|
||||
const hh0 = hh.y0;
|
||||
const hh1r = @bitReverse(u64, hh1);
|
||||
const hh0r = @bitReverse(u64, hh0);
|
||||
const hh1r = @bitReverse(hh1);
|
||||
const hh0r = @bitReverse(hh0);
|
||||
const hh2 = hh0 ^ hh1;
|
||||
const hh2r = hh0r ^ hh1r;
|
||||
|
||||
@@ -156,8 +156,8 @@ pub const Ghash = struct {
|
||||
y1 ^= mem.readIntBig(u64, msg[i..][0..8]);
|
||||
y0 ^= mem.readIntBig(u64, msg[i..][8..16]);
|
||||
|
||||
const y1r = @bitReverse(u64, y1);
|
||||
const y0r = @bitReverse(u64, y0);
|
||||
const y1r = @bitReverse(y1);
|
||||
const y0r = @bitReverse(y0);
|
||||
const y2 = y0 ^ y1;
|
||||
const y2r = y0r ^ y1r;
|
||||
|
||||
@@ -172,8 +172,8 @@ pub const Ghash = struct {
|
||||
const sy1 = mem.readIntBig(u64, msg[i..][16..24]);
|
||||
const sy0 = mem.readIntBig(u64, msg[i..][24..32]);
|
||||
|
||||
const sy1r = @bitReverse(u64, sy1);
|
||||
const sy0r = @bitReverse(u64, sy0);
|
||||
const sy1r = @bitReverse(sy1);
|
||||
const sy0r = @bitReverse(sy0);
|
||||
const sy2 = sy0 ^ sy1;
|
||||
const sy2r = sy0r ^ sy1r;
|
||||
|
||||
@@ -191,9 +191,9 @@ pub const Ghash = struct {
|
||||
z0h ^= sz0h;
|
||||
z1h ^= sz1h;
|
||||
z2h ^= sz2h;
|
||||
z0h = @bitReverse(u64, z0h) >> 1;
|
||||
z1h = @bitReverse(u64, z1h) >> 1;
|
||||
z2h = @bitReverse(u64, z2h) >> 1;
|
||||
z0h = @bitReverse(z0h) >> 1;
|
||||
z1h = @bitReverse(z1h) >> 1;
|
||||
z2h = @bitReverse(z2h) >> 1;
|
||||
|
||||
var v3 = z1h;
|
||||
var v2 = z1 ^ z2h;
|
||||
@@ -217,8 +217,8 @@ pub const Ghash = struct {
|
||||
y1 ^= mem.readIntBig(u64, msg[i..][0..8]);
|
||||
y0 ^= mem.readIntBig(u64, msg[i..][8..16]);
|
||||
|
||||
const y1r = @bitReverse(u64, y1);
|
||||
const y0r = @bitReverse(u64, y0);
|
||||
const y1r = @bitReverse(y1);
|
||||
const y0r = @bitReverse(y0);
|
||||
const y2 = y0 ^ y1;
|
||||
const y2r = y0r ^ y1r;
|
||||
|
||||
@@ -228,9 +228,9 @@ pub const Ghash = struct {
|
||||
var z0h = clmul(y0r, st.h0r);
|
||||
var z1h = clmul(y1r, st.h1r);
|
||||
var z2h = clmul(y2r, st.h2r) ^ z0h ^ z1h;
|
||||
z0h = @bitReverse(u64, z0h) >> 1;
|
||||
z1h = @bitReverse(u64, z1h) >> 1;
|
||||
z2h = @bitReverse(u64, z2h) >> 1;
|
||||
z0h = @bitReverse(z0h) >> 1;
|
||||
z1h = @bitReverse(z1h) >> 1;
|
||||
z2h = @bitReverse(z2h) >> 1;
|
||||
|
||||
// shift & reduce
|
||||
var v3 = z1h;
|
||||
|
||||
+184
-126
@@ -816,11 +816,11 @@ pub fn openSelfDebugInfo(allocator: mem.Allocator) anyerror!DebugInfo {
|
||||
/// TODO it's weird to take ownership even on error, rework this code.
|
||||
fn readCoffDebugInfo(allocator: mem.Allocator, coff_file: File) !ModuleDebugInfo {
|
||||
nosuspend {
|
||||
errdefer coff_file.close();
|
||||
defer coff_file.close();
|
||||
|
||||
const coff_obj = try allocator.create(coff.Coff);
|
||||
errdefer allocator.destroy(coff_obj);
|
||||
coff_obj.* = coff.Coff.init(allocator, coff_file);
|
||||
coff_obj.* = .{ .allocator = allocator };
|
||||
|
||||
var di = ModuleDebugInfo{
|
||||
.base_address = undefined,
|
||||
@@ -828,27 +828,42 @@ fn readCoffDebugInfo(allocator: mem.Allocator, coff_file: File) !ModuleDebugInfo
|
||||
.debug_data = undefined,
|
||||
};
|
||||
|
||||
try di.coff.loadHeader();
|
||||
try di.coff.loadSections();
|
||||
if (di.coff.getSection(".debug_info")) |sec| {
|
||||
// TODO convert to Windows' memory-mapped file API
|
||||
const file_len = math.cast(usize, try coff_file.getEndPos()) orelse math.maxInt(usize);
|
||||
const data = try coff_file.readToEndAlloc(allocator, file_len);
|
||||
try di.coff.parse(data);
|
||||
|
||||
if (di.coff.getSectionByName(".debug_info")) |sec| {
|
||||
// This coff file has embedded DWARF debug info
|
||||
_ = sec;
|
||||
// TODO: free the section data slices
|
||||
const debug_info_data = di.coff.getSectionData(".debug_info", allocator) catch null;
|
||||
const debug_abbrev_data = di.coff.getSectionData(".debug_abbrev", allocator) catch null;
|
||||
const debug_str_data = di.coff.getSectionData(".debug_str", allocator) catch null;
|
||||
const debug_line_data = di.coff.getSectionData(".debug_line", allocator) catch null;
|
||||
const debug_line_str_data = di.coff.getSectionData(".debug_line_str", allocator) catch null;
|
||||
const debug_ranges_data = di.coff.getSectionData(".debug_ranges", allocator) catch null;
|
||||
const debug_info = di.coff.getSectionDataAlloc(".debug_info", allocator) catch null;
|
||||
const debug_abbrev = di.coff.getSectionDataAlloc(".debug_abbrev", allocator) catch null;
|
||||
const debug_str = di.coff.getSectionDataAlloc(".debug_str", allocator) catch null;
|
||||
const debug_str_offsets = di.coff.getSectionDataAlloc(".debug_str_offsets", allocator) catch null;
|
||||
const debug_line = di.coff.getSectionDataAlloc(".debug_line", allocator) catch null;
|
||||
const debug_line_str = di.coff.getSectionDataAlloc(".debug_line_str", allocator) catch null;
|
||||
const debug_ranges = di.coff.getSectionDataAlloc(".debug_ranges", allocator) catch null;
|
||||
const debug_loclists = di.coff.getSectionDataAlloc(".debug_loclists", allocator) catch null;
|
||||
const debug_rnglists = di.coff.getSectionDataAlloc(".debug_rnglists", allocator) catch null;
|
||||
const debug_addr = di.coff.getSectionDataAlloc(".debug_addr", allocator) catch null;
|
||||
const debug_names = di.coff.getSectionDataAlloc(".debug_names", allocator) catch null;
|
||||
const debug_frame = di.coff.getSectionDataAlloc(".debug_frame", allocator) catch null;
|
||||
|
||||
var dwarf = DW.DwarfInfo{
|
||||
.endian = native_endian,
|
||||
.debug_info = debug_info_data orelse return error.MissingDebugInfo,
|
||||
.debug_abbrev = debug_abbrev_data orelse return error.MissingDebugInfo,
|
||||
.debug_str = debug_str_data orelse return error.MissingDebugInfo,
|
||||
.debug_line = debug_line_data orelse return error.MissingDebugInfo,
|
||||
.debug_line_str = debug_line_str_data,
|
||||
.debug_ranges = debug_ranges_data,
|
||||
.debug_info = debug_info orelse return error.MissingDebugInfo,
|
||||
.debug_abbrev = debug_abbrev orelse return error.MissingDebugInfo,
|
||||
.debug_str = debug_str orelse return error.MissingDebugInfo,
|
||||
.debug_str_offsets = debug_str_offsets,
|
||||
.debug_line = debug_line orelse return error.MissingDebugInfo,
|
||||
.debug_line_str = debug_line_str,
|
||||
.debug_ranges = debug_ranges,
|
||||
.debug_loclists = debug_loclists,
|
||||
.debug_rnglists = debug_rnglists,
|
||||
.debug_addr = debug_addr,
|
||||
.debug_names = debug_names,
|
||||
.debug_frame = debug_frame,
|
||||
};
|
||||
try DW.openDwarfDebugInfo(&dwarf, allocator);
|
||||
di.debug_data = PdbOrDwarf{ .dwarf = dwarf };
|
||||
@@ -863,7 +878,10 @@ fn readCoffDebugInfo(allocator: mem.Allocator, coff_file: File) !ModuleDebugInfo
|
||||
defer allocator.free(path);
|
||||
|
||||
di.debug_data = PdbOrDwarf{ .pdb = undefined };
|
||||
di.debug_data.pdb = try pdb.Pdb.init(allocator, path);
|
||||
di.debug_data.pdb = pdb.Pdb.init(allocator, path) catch |err| switch (err) {
|
||||
error.FileNotFound, error.IsDir => return error.MissingDebugInfo,
|
||||
else => return err,
|
||||
};
|
||||
try di.debug_data.pdb.parseInfoStream();
|
||||
try di.debug_data.pdb.parseDbiStream();
|
||||
|
||||
@@ -912,9 +930,15 @@ pub fn readElfDebugInfo(allocator: mem.Allocator, elf_file: File) !ModuleDebugIn
|
||||
var opt_debug_info: ?[]const u8 = null;
|
||||
var opt_debug_abbrev: ?[]const u8 = null;
|
||||
var opt_debug_str: ?[]const u8 = null;
|
||||
var opt_debug_str_offsets: ?[]const u8 = null;
|
||||
var opt_debug_line: ?[]const u8 = null;
|
||||
var opt_debug_line_str: ?[]const u8 = null;
|
||||
var opt_debug_ranges: ?[]const u8 = null;
|
||||
var opt_debug_loclists: ?[]const u8 = null;
|
||||
var opt_debug_rnglists: ?[]const u8 = null;
|
||||
var opt_debug_addr: ?[]const u8 = null;
|
||||
var opt_debug_names: ?[]const u8 = null;
|
||||
var opt_debug_frame: ?[]const u8 = null;
|
||||
|
||||
for (shdrs) |*shdr| {
|
||||
if (shdr.sh_type == elf.SHT_NULL) continue;
|
||||
@@ -926,12 +950,24 @@ pub fn readElfDebugInfo(allocator: mem.Allocator, elf_file: File) !ModuleDebugIn
|
||||
opt_debug_abbrev = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_str")) {
|
||||
opt_debug_str = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_str_offsets")) {
|
||||
opt_debug_str_offsets = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_line")) {
|
||||
opt_debug_line = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_line_str")) {
|
||||
opt_debug_line_str = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_ranges")) {
|
||||
opt_debug_ranges = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_loclists")) {
|
||||
opt_debug_loclists = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_rnglists")) {
|
||||
opt_debug_rnglists = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_addr")) {
|
||||
opt_debug_addr = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_names")) {
|
||||
opt_debug_names = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
} else if (mem.eql(u8, name, ".debug_frame")) {
|
||||
opt_debug_frame = try chopSlice(mapped_mem, shdr.sh_offset, shdr.sh_size);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -940,9 +976,15 @@ pub fn readElfDebugInfo(allocator: mem.Allocator, elf_file: File) !ModuleDebugIn
|
||||
.debug_info = opt_debug_info orelse return error.MissingDebugInfo,
|
||||
.debug_abbrev = opt_debug_abbrev orelse return error.MissingDebugInfo,
|
||||
.debug_str = opt_debug_str orelse return error.MissingDebugInfo,
|
||||
.debug_str_offsets = opt_debug_str_offsets,
|
||||
.debug_line = opt_debug_line orelse return error.MissingDebugInfo,
|
||||
.debug_line_str = opt_debug_line_str,
|
||||
.debug_ranges = opt_debug_ranges,
|
||||
.debug_loclists = opt_debug_loclists,
|
||||
.debug_rnglists = opt_debug_rnglists,
|
||||
.debug_addr = opt_debug_addr,
|
||||
.debug_names = opt_debug_names,
|
||||
.debug_frame = opt_debug_frame,
|
||||
};
|
||||
|
||||
try DW.openDwarfDebugInfo(&di, allocator);
|
||||
@@ -968,24 +1010,20 @@ fn readMachODebugInfo(allocator: mem.Allocator, macho_file: File) !ModuleDebugIn
|
||||
if (hdr.magic != macho.MH_MAGIC_64)
|
||||
return error.InvalidDebugInfo;
|
||||
|
||||
const hdr_base = @ptrCast([*]const u8, hdr);
|
||||
var ptr = hdr_base + @sizeOf(macho.mach_header_64);
|
||||
var ncmd: u32 = hdr.ncmds;
|
||||
const symtab = while (ncmd != 0) : (ncmd -= 1) {
|
||||
const lc = @ptrCast(*const std.macho.load_command, ptr);
|
||||
switch (lc.cmd) {
|
||||
.SYMTAB => break @ptrCast(*const std.macho.symtab_command, ptr),
|
||||
else => {},
|
||||
}
|
||||
ptr = @alignCast(@alignOf(std.macho.load_command), ptr + lc.cmdsize);
|
||||
} else {
|
||||
return error.MissingDebugInfo;
|
||||
var it = macho.LoadCommandIterator{
|
||||
.ncmds = hdr.ncmds,
|
||||
.buffer = mapped_mem[@sizeOf(macho.mach_header_64)..][0..hdr.sizeofcmds],
|
||||
};
|
||||
const symtab = while (it.next()) |cmd| switch (cmd.cmd()) {
|
||||
.SYMTAB => break cmd.cast(macho.symtab_command).?,
|
||||
else => {},
|
||||
} else return error.MissingDebugInfo;
|
||||
|
||||
const syms = @ptrCast(
|
||||
[*]const macho.nlist_64,
|
||||
@alignCast(@alignOf(macho.nlist_64), hdr_base + symtab.symoff),
|
||||
@alignCast(@alignOf(macho.nlist_64), &mapped_mem[symtab.symoff]),
|
||||
)[0..symtab.nsyms];
|
||||
const strings = @ptrCast([*]const u8, hdr_base + symtab.stroff)[0 .. symtab.strsize - 1 :0];
|
||||
const strings = mapped_mem[symtab.stroff..][0 .. symtab.strsize - 1 :0];
|
||||
|
||||
const symbols_buf = try allocator.alloc(MachoSymbol, syms.len);
|
||||
|
||||
@@ -1200,48 +1238,46 @@ pub const DebugInfo = struct {
|
||||
if (address < base_address) continue;
|
||||
|
||||
const header = std.c._dyld_get_image_header(i) orelse continue;
|
||||
// The array of load commands is right after the header
|
||||
var cmd_ptr = @intToPtr([*]u8, @ptrToInt(header) + @sizeOf(macho.mach_header_64));
|
||||
|
||||
var cmds = header.ncmds;
|
||||
while (cmds != 0) : (cmds -= 1) {
|
||||
const lc = @ptrCast(
|
||||
*macho.load_command,
|
||||
@alignCast(@alignOf(macho.load_command), cmd_ptr),
|
||||
);
|
||||
cmd_ptr += lc.cmdsize;
|
||||
if (lc.cmd != .SEGMENT_64) continue;
|
||||
var it = macho.LoadCommandIterator{
|
||||
.ncmds = header.ncmds,
|
||||
.buffer = @alignCast(@alignOf(u64), @intToPtr(
|
||||
[*]u8,
|
||||
@ptrToInt(header) + @sizeOf(macho.mach_header_64),
|
||||
))[0..header.sizeofcmds],
|
||||
};
|
||||
while (it.next()) |cmd| switch (cmd.cmd()) {
|
||||
.SEGMENT_64 => {
|
||||
const segment_cmd = cmd.cast(macho.segment_command_64).?;
|
||||
const rebased_address = address - base_address;
|
||||
const seg_start = segment_cmd.vmaddr;
|
||||
const seg_end = seg_start + segment_cmd.vmsize;
|
||||
|
||||
const segment_cmd = @ptrCast(
|
||||
*const std.macho.segment_command_64,
|
||||
@alignCast(@alignOf(std.macho.segment_command_64), lc),
|
||||
);
|
||||
if (rebased_address >= seg_start and rebased_address < seg_end) {
|
||||
if (self.address_map.get(base_address)) |obj_di| {
|
||||
return obj_di;
|
||||
}
|
||||
|
||||
const rebased_address = address - base_address;
|
||||
const seg_start = segment_cmd.vmaddr;
|
||||
const seg_end = seg_start + segment_cmd.vmsize;
|
||||
const obj_di = try self.allocator.create(ModuleDebugInfo);
|
||||
errdefer self.allocator.destroy(obj_di);
|
||||
|
||||
const macho_path = mem.sliceTo(std.c._dyld_get_image_name(i), 0);
|
||||
const macho_file = fs.cwd().openFile(macho_path, .{
|
||||
.intended_io_mode = .blocking,
|
||||
}) catch |err| switch (err) {
|
||||
error.FileNotFound => return error.MissingDebugInfo,
|
||||
else => return err,
|
||||
};
|
||||
obj_di.* = try readMachODebugInfo(self.allocator, macho_file);
|
||||
obj_di.base_address = base_address;
|
||||
|
||||
try self.address_map.putNoClobber(base_address, obj_di);
|
||||
|
||||
if (rebased_address >= seg_start and rebased_address < seg_end) {
|
||||
if (self.address_map.get(base_address)) |obj_di| {
|
||||
return obj_di;
|
||||
}
|
||||
|
||||
const obj_di = try self.allocator.create(ModuleDebugInfo);
|
||||
errdefer self.allocator.destroy(obj_di);
|
||||
|
||||
const macho_path = mem.sliceTo(std.c._dyld_get_image_name(i), 0);
|
||||
const macho_file = fs.cwd().openFile(macho_path, .{ .intended_io_mode = .blocking }) catch |err| switch (err) {
|
||||
error.FileNotFound => return error.MissingDebugInfo,
|
||||
else => return err,
|
||||
};
|
||||
obj_di.* = try readMachODebugInfo(self.allocator, macho_file);
|
||||
obj_di.base_address = base_address;
|
||||
|
||||
try self.address_map.putNoClobber(base_address, obj_di);
|
||||
|
||||
return obj_di;
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
};
|
||||
}
|
||||
|
||||
return error.MissingDebugInfo;
|
||||
@@ -1445,44 +1481,31 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
if (hdr.magic != std.macho.MH_MAGIC_64)
|
||||
return error.InvalidDebugInfo;
|
||||
|
||||
const hdr_base = @ptrCast([*]const u8, hdr);
|
||||
var ptr = hdr_base + @sizeOf(macho.mach_header_64);
|
||||
var segptr = ptr;
|
||||
var ncmd: u32 = hdr.ncmds;
|
||||
var segcmd: ?*const macho.segment_command_64 = null;
|
||||
var symtabcmd: ?*const macho.symtab_command = null;
|
||||
|
||||
while (ncmd != 0) : (ncmd -= 1) {
|
||||
const lc = @ptrCast(*const std.macho.load_command, ptr);
|
||||
switch (lc.cmd) {
|
||||
.SEGMENT_64 => {
|
||||
segcmd = @ptrCast(
|
||||
*const std.macho.segment_command_64,
|
||||
@alignCast(@alignOf(std.macho.segment_command_64), ptr),
|
||||
);
|
||||
segptr = ptr;
|
||||
},
|
||||
.SYMTAB => {
|
||||
symtabcmd = @ptrCast(
|
||||
*const std.macho.symtab_command,
|
||||
@alignCast(@alignOf(std.macho.symtab_command), ptr),
|
||||
);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
ptr = @alignCast(@alignOf(std.macho.load_command), ptr + lc.cmdsize);
|
||||
}
|
||||
var segcmd: ?macho.LoadCommandIterator.LoadCommand = null;
|
||||
var symtabcmd: ?macho.symtab_command = null;
|
||||
var it = macho.LoadCommandIterator{
|
||||
.ncmds = hdr.ncmds,
|
||||
.buffer = mapped_mem[@sizeOf(macho.mach_header_64)..][0..hdr.sizeofcmds],
|
||||
};
|
||||
while (it.next()) |cmd| switch (cmd.cmd()) {
|
||||
.SEGMENT_64 => segcmd = cmd,
|
||||
.SYMTAB => symtabcmd = cmd.cast(macho.symtab_command).?,
|
||||
else => {},
|
||||
};
|
||||
|
||||
if (segcmd == null or symtabcmd == null) return error.MissingDebugInfo;
|
||||
|
||||
// Parse symbols
|
||||
const strtab = @ptrCast(
|
||||
[*]const u8,
|
||||
hdr_base + symtabcmd.?.stroff,
|
||||
&mapped_mem[symtabcmd.?.stroff],
|
||||
)[0 .. symtabcmd.?.strsize - 1 :0];
|
||||
const symtab = @ptrCast(
|
||||
[*]const macho.nlist_64,
|
||||
@alignCast(@alignOf(macho.nlist_64), hdr_base + symtabcmd.?.symoff),
|
||||
@alignCast(
|
||||
@alignOf(macho.nlist_64),
|
||||
&mapped_mem[symtabcmd.?.symoff],
|
||||
),
|
||||
)[0..symtabcmd.?.nsyms];
|
||||
|
||||
// TODO handle tentative (common) symbols
|
||||
@@ -1496,25 +1519,21 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
addr_table.putAssumeCapacityNoClobber(sym_name, sym.n_value);
|
||||
}
|
||||
|
||||
var opt_debug_line: ?*const macho.section_64 = null;
|
||||
var opt_debug_info: ?*const macho.section_64 = null;
|
||||
var opt_debug_abbrev: ?*const macho.section_64 = null;
|
||||
var opt_debug_str: ?*const macho.section_64 = null;
|
||||
var opt_debug_line_str: ?*const macho.section_64 = null;
|
||||
var opt_debug_ranges: ?*const macho.section_64 = null;
|
||||
|
||||
const sections = @ptrCast(
|
||||
[*]const macho.section_64,
|
||||
@alignCast(@alignOf(macho.section_64), segptr + @sizeOf(std.macho.segment_command_64)),
|
||||
)[0..segcmd.?.nsects];
|
||||
for (sections) |*sect| {
|
||||
// The section name may not exceed 16 chars and a trailing null may
|
||||
// not be present
|
||||
const name = if (mem.indexOfScalar(u8, sect.sectname[0..], 0)) |last|
|
||||
sect.sectname[0..last]
|
||||
else
|
||||
sect.sectname[0..];
|
||||
var opt_debug_line: ?macho.section_64 = null;
|
||||
var opt_debug_info: ?macho.section_64 = null;
|
||||
var opt_debug_abbrev: ?macho.section_64 = null;
|
||||
var opt_debug_str: ?macho.section_64 = null;
|
||||
var opt_debug_str_offsets: ?macho.section_64 = null;
|
||||
var opt_debug_line_str: ?macho.section_64 = null;
|
||||
var opt_debug_ranges: ?macho.section_64 = null;
|
||||
var opt_debug_loclists: ?macho.section_64 = null;
|
||||
var opt_debug_rnglists: ?macho.section_64 = null;
|
||||
var opt_debug_addr: ?macho.section_64 = null;
|
||||
var opt_debug_names: ?macho.section_64 = null;
|
||||
var opt_debug_frame: ?macho.section_64 = null;
|
||||
|
||||
for (segcmd.?.getSections()) |sect| {
|
||||
const name = sect.sectName();
|
||||
if (mem.eql(u8, name, "__debug_line")) {
|
||||
opt_debug_line = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_info")) {
|
||||
@@ -1523,10 +1542,22 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
opt_debug_abbrev = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_str")) {
|
||||
opt_debug_str = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_str_offsets")) {
|
||||
opt_debug_str_offsets = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_line_str")) {
|
||||
opt_debug_line_str = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_ranges")) {
|
||||
opt_debug_ranges = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_loclists")) {
|
||||
opt_debug_loclists = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_rnglists")) {
|
||||
opt_debug_rnglists = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_addr")) {
|
||||
opt_debug_addr = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_names")) {
|
||||
opt_debug_names = sect;
|
||||
} else if (mem.eql(u8, name, "__debug_frame")) {
|
||||
opt_debug_frame = sect;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1544,6 +1575,10 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
.debug_info = try chopSlice(mapped_mem, debug_info.offset, debug_info.size),
|
||||
.debug_abbrev = try chopSlice(mapped_mem, debug_abbrev.offset, debug_abbrev.size),
|
||||
.debug_str = try chopSlice(mapped_mem, debug_str.offset, debug_str.size),
|
||||
.debug_str_offsets = if (opt_debug_str_offsets) |debug_str_offsets|
|
||||
try chopSlice(mapped_mem, debug_str_offsets.offset, debug_str_offsets.size)
|
||||
else
|
||||
null,
|
||||
.debug_line = try chopSlice(mapped_mem, debug_line.offset, debug_line.size),
|
||||
.debug_line_str = if (opt_debug_line_str) |debug_line_str|
|
||||
try chopSlice(mapped_mem, debug_line_str.offset, debug_line_str.size)
|
||||
@@ -1553,6 +1588,26 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
try chopSlice(mapped_mem, debug_ranges.offset, debug_ranges.size)
|
||||
else
|
||||
null,
|
||||
.debug_loclists = if (opt_debug_loclists) |debug_loclists|
|
||||
try chopSlice(mapped_mem, debug_loclists.offset, debug_loclists.size)
|
||||
else
|
||||
null,
|
||||
.debug_rnglists = if (opt_debug_rnglists) |debug_rnglists|
|
||||
try chopSlice(mapped_mem, debug_rnglists.offset, debug_rnglists.size)
|
||||
else
|
||||
null,
|
||||
.debug_addr = if (opt_debug_addr) |debug_addr|
|
||||
try chopSlice(mapped_mem, debug_addr.offset, debug_addr.size)
|
||||
else
|
||||
null,
|
||||
.debug_names = if (opt_debug_names) |debug_names|
|
||||
try chopSlice(mapped_mem, debug_names.offset, debug_names.size)
|
||||
else
|
||||
null,
|
||||
.debug_frame = if (opt_debug_frame) |debug_frame|
|
||||
try chopSlice(mapped_mem, debug_frame.offset, debug_frame.size)
|
||||
else
|
||||
null,
|
||||
};
|
||||
|
||||
try DW.openDwarfDebugInfo(&di, allocator);
|
||||
@@ -1607,6 +1662,8 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
.compile_unit_name = compile_unit.die.getAttrString(
|
||||
o_file_di,
|
||||
DW.AT.name,
|
||||
o_file_di.debug_str,
|
||||
compile_unit.*,
|
||||
) catch |err| switch (err) {
|
||||
error.MissingDebugInfo, error.InvalidDebugInfo => "???",
|
||||
},
|
||||
@@ -1647,7 +1704,7 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
|
||||
switch (self.debug_data) {
|
||||
.dwarf => |*dwarf| {
|
||||
const dwarf_address = relocated_address + self.coff.pe_header.image_base;
|
||||
const dwarf_address = relocated_address + self.coff.getImageBase();
|
||||
return getSymbolFromDwarf(allocator, dwarf_address, dwarf);
|
||||
},
|
||||
.pdb => {
|
||||
@@ -1655,13 +1712,14 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
},
|
||||
}
|
||||
|
||||
var coff_section: *coff.Section = undefined;
|
||||
var coff_section: *align(1) const coff.SectionHeader = undefined;
|
||||
const mod_index = for (self.debug_data.pdb.sect_contribs) |sect_contrib| {
|
||||
if (sect_contrib.Section > self.coff.sections.items.len) continue;
|
||||
const sections = self.coff.getSectionHeaders();
|
||||
if (sect_contrib.Section > sections.len) continue;
|
||||
// Remember that SectionContribEntry.Section is 1-based.
|
||||
coff_section = &self.coff.sections.items[sect_contrib.Section - 1];
|
||||
coff_section = §ions[sect_contrib.Section - 1];
|
||||
|
||||
const vaddr_start = coff_section.header.virtual_address + sect_contrib.Offset;
|
||||
const vaddr_start = coff_section.virtual_address + sect_contrib.Offset;
|
||||
const vaddr_end = vaddr_start + sect_contrib.Size;
|
||||
if (relocated_address >= vaddr_start and relocated_address < vaddr_end) {
|
||||
break sect_contrib.ModuleIndex;
|
||||
@@ -1677,11 +1735,11 @@ pub const ModuleDebugInfo = switch (native_os) {
|
||||
|
||||
const symbol_name = self.debug_data.pdb.getSymbolName(
|
||||
module,
|
||||
relocated_address - coff_section.header.virtual_address,
|
||||
relocated_address - coff_section.virtual_address,
|
||||
) orelse "???";
|
||||
const opt_line_info = try self.debug_data.pdb.getLineNumberInfo(
|
||||
module,
|
||||
relocated_address - coff_section.header.virtual_address,
|
||||
relocated_address - coff_section.virtual_address,
|
||||
);
|
||||
|
||||
return SymbolInfo{
|
||||
@@ -1727,7 +1785,7 @@ fn getSymbolFromDwarf(allocator: mem.Allocator, address: u64, di: *DW.DwarfInfo)
|
||||
if (nosuspend di.findCompileUnit(address)) |compile_unit| {
|
||||
return SymbolInfo{
|
||||
.symbol_name = nosuspend di.getSymbolName(address) orelse "???",
|
||||
.compile_unit_name = compile_unit.die.getAttrString(di, DW.AT.name) catch |err| switch (err) {
|
||||
.compile_unit_name = compile_unit.die.getAttrString(di, DW.AT.name, di.debug_str, compile_unit.*) catch |err| switch (err) {
|
||||
error.MissingDebugInfo, error.InvalidDebugInfo => "???",
|
||||
},
|
||||
.line_info = nosuspend di.getLineNumberInfo(allocator, compile_unit.*, address) catch |err| switch (err) {
|
||||
@@ -1816,7 +1874,7 @@ fn resetSegfaultHandler() void {
|
||||
return;
|
||||
}
|
||||
var act = os.Sigaction{
|
||||
.handler = .{ .sigaction = os.SIG.DFL },
|
||||
.handler = .{ .handler = os.SIG.DFL },
|
||||
.mask = os.empty_sigset,
|
||||
.flags = 0,
|
||||
};
|
||||
@@ -1976,7 +2034,7 @@ noinline fn showMyTrace() usize {
|
||||
/// For more advanced usage, see `ConfigurableTrace`.
|
||||
pub const Trace = ConfigurableTrace(2, 4, builtin.mode == .Debug);
|
||||
|
||||
pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize, comptime enabled: bool) type {
|
||||
pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize, comptime is_enabled: bool) type {
|
||||
return struct {
|
||||
addrs: [actual_size][stack_frame_count]usize = undefined,
|
||||
notes: [actual_size][]const u8 = undefined,
|
||||
@@ -1985,7 +2043,7 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize
|
||||
const actual_size = if (enabled) size else 0;
|
||||
const Index = if (enabled) usize else u0;
|
||||
|
||||
pub const enabled = enabled;
|
||||
pub const enabled = is_enabled;
|
||||
|
||||
pub const add = if (enabled) addNoInline else addNoOp;
|
||||
|
||||
|
||||
+383
-156
@@ -168,6 +168,11 @@ const CompileUnit = struct {
|
||||
is_64: bool,
|
||||
die: *Die,
|
||||
pc_range: ?PcRange,
|
||||
|
||||
str_offsets_base: usize,
|
||||
addr_base: usize,
|
||||
rnglists_base: usize,
|
||||
loclists_base: usize,
|
||||
};
|
||||
|
||||
const AbbrevTable = std.ArrayList(AbbrevTableEntry);
|
||||
@@ -205,6 +210,7 @@ const AbbrevAttr = struct {
|
||||
|
||||
const FormValue = union(enum) {
|
||||
Address: u64,
|
||||
AddrOffset: usize,
|
||||
Block: []u8,
|
||||
Const: Constant,
|
||||
ExprLoc: []u8,
|
||||
@@ -214,15 +220,46 @@ const FormValue = union(enum) {
|
||||
RefAddr: u64,
|
||||
String: []const u8,
|
||||
StrPtr: u64,
|
||||
StrOffset: usize,
|
||||
LineStrPtr: u64,
|
||||
LocListOffset: u64,
|
||||
RangeListOffset: u64,
|
||||
data16: [16]u8,
|
||||
|
||||
fn getString(fv: FormValue, di: DwarfInfo) ![]const u8 {
|
||||
switch (fv) {
|
||||
.String => |s| return s,
|
||||
.StrPtr => |off| return di.getString(off),
|
||||
.LineStrPtr => |off| return di.getLineString(off),
|
||||
else => return badDwarf(),
|
||||
}
|
||||
}
|
||||
|
||||
fn getUInt(fv: FormValue, comptime U: type) !U {
|
||||
switch (fv) {
|
||||
.Const => |c| {
|
||||
const int = try c.asUnsignedLe();
|
||||
return math.cast(U, int) orelse return badDwarf();
|
||||
},
|
||||
.SecOffset => |x| return math.cast(U, x) orelse return badDwarf(),
|
||||
else => return badDwarf(),
|
||||
}
|
||||
}
|
||||
|
||||
fn getData16(fv: FormValue) ![16]u8 {
|
||||
switch (fv) {
|
||||
.data16 => |d| return d,
|
||||
else => return badDwarf(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const Constant = struct {
|
||||
payload: u64,
|
||||
signed: bool,
|
||||
|
||||
fn asUnsignedLe(self: *const Constant) !u64 {
|
||||
if (self.signed) return error.InvalidDebugInfo;
|
||||
fn asUnsignedLe(self: Constant) !u64 {
|
||||
if (self.signed) return badDwarf();
|
||||
return self.payload;
|
||||
}
|
||||
};
|
||||
@@ -251,21 +288,46 @@ const Die = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
fn getAttrAddr(self: *const Die, id: u64) !u64 {
|
||||
fn getAttrAddr(
|
||||
self: *const Die,
|
||||
di: *DwarfInfo,
|
||||
id: u64,
|
||||
compile_unit: CompileUnit,
|
||||
) error{ InvalidDebugInfo, MissingDebugInfo }!u64 {
|
||||
const form_value = self.getAttr(id) orelse return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.Address => |value| value,
|
||||
FormValue.AddrOffset => |index| {
|
||||
const debug_addr = di.debug_addr orelse return badDwarf();
|
||||
// addr_base points to the first item after the header, however we
|
||||
// need to read the header to know the size of each item. Empirically,
|
||||
// it may disagree with is_64 on the compile unit.
|
||||
// The header is 8 or 12 bytes depending on is_64.
|
||||
if (compile_unit.addr_base < 8) return badDwarf();
|
||||
|
||||
const version = mem.readInt(u16, debug_addr[compile_unit.addr_base - 4 ..][0..2], di.endian);
|
||||
if (version != 5) return badDwarf();
|
||||
|
||||
const addr_size = debug_addr[compile_unit.addr_base - 2];
|
||||
const seg_size = debug_addr[compile_unit.addr_base - 1];
|
||||
|
||||
const byte_offset = compile_unit.addr_base + (addr_size + seg_size) * index;
|
||||
if (byte_offset + addr_size > debug_addr.len) return badDwarf();
|
||||
switch (addr_size) {
|
||||
1 => return debug_addr[byte_offset],
|
||||
2 => return mem.readInt(u16, debug_addr[byte_offset..][0..2], di.endian),
|
||||
4 => return mem.readInt(u32, debug_addr[byte_offset..][0..4], di.endian),
|
||||
8 => return mem.readInt(u64, debug_addr[byte_offset..][0..8], di.endian),
|
||||
else => return badDwarf(),
|
||||
}
|
||||
},
|
||||
else => error.InvalidDebugInfo,
|
||||
};
|
||||
}
|
||||
|
||||
fn getAttrSecOffset(self: *const Die, id: u64) !u64 {
|
||||
const form_value = self.getAttr(id) orelse return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.Const => |value| value.asUnsignedLe(),
|
||||
FormValue.SecOffset => |value| value,
|
||||
else => error.InvalidDebugInfo,
|
||||
};
|
||||
return form_value.getUInt(u64);
|
||||
}
|
||||
|
||||
fn getAttrUnsignedLe(self: *const Die, id: u64) !u64 {
|
||||
@@ -284,22 +346,44 @@ const Die = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getAttrString(self: *const Die, di: *DwarfInfo, id: u64) ![]const u8 {
|
||||
pub fn getAttrString(
|
||||
self: *const Die,
|
||||
di: *DwarfInfo,
|
||||
id: u64,
|
||||
opt_str: ?[]const u8,
|
||||
compile_unit: CompileUnit,
|
||||
) error{ InvalidDebugInfo, MissingDebugInfo }![]const u8 {
|
||||
const form_value = self.getAttr(id) orelse return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.String => |value| value,
|
||||
FormValue.StrPtr => |offset| di.getString(offset),
|
||||
FormValue.LineStrPtr => |offset| di.getLineString(offset),
|
||||
else => error.InvalidDebugInfo,
|
||||
};
|
||||
switch (form_value.*) {
|
||||
FormValue.String => |value| return value,
|
||||
FormValue.StrPtr => |offset| return di.getString(offset),
|
||||
FormValue.StrOffset => |index| {
|
||||
const debug_str_offsets = di.debug_str_offsets orelse return badDwarf();
|
||||
if (compile_unit.str_offsets_base == 0) return badDwarf();
|
||||
if (compile_unit.is_64) {
|
||||
const byte_offset = compile_unit.str_offsets_base + 8 * index;
|
||||
if (byte_offset + 8 > debug_str_offsets.len) return badDwarf();
|
||||
const offset = mem.readInt(u64, debug_str_offsets[byte_offset..][0..8], di.endian);
|
||||
return getStringGeneric(opt_str, offset);
|
||||
} else {
|
||||
const byte_offset = compile_unit.str_offsets_base + 4 * index;
|
||||
if (byte_offset + 4 > debug_str_offsets.len) return badDwarf();
|
||||
const offset = mem.readInt(u32, debug_str_offsets[byte_offset..][0..4], di.endian);
|
||||
return getStringGeneric(opt_str, offset);
|
||||
}
|
||||
},
|
||||
FormValue.LineStrPtr => |offset| return di.getLineString(offset),
|
||||
else => return badDwarf(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const FileEntry = struct {
|
||||
file_name: []const u8,
|
||||
dir_index: usize,
|
||||
mtime: usize,
|
||||
len_bytes: usize,
|
||||
path: []const u8,
|
||||
dir_index: u32 = 0,
|
||||
mtime: u64 = 0,
|
||||
size: u64 = 0,
|
||||
md5: [16]u8 = [1]u8{0} ** 16,
|
||||
};
|
||||
|
||||
const LineNumberProgram = struct {
|
||||
@@ -307,13 +391,14 @@ const LineNumberProgram = struct {
|
||||
file: usize,
|
||||
line: i64,
|
||||
column: u64,
|
||||
version: u16,
|
||||
is_stmt: bool,
|
||||
basic_block: bool,
|
||||
end_sequence: bool,
|
||||
|
||||
default_is_stmt: bool,
|
||||
target_address: u64,
|
||||
include_dirs: []const []const u8,
|
||||
include_dirs: []const FileEntry,
|
||||
|
||||
prev_valid: bool,
|
||||
prev_address: u64,
|
||||
@@ -344,12 +429,18 @@ const LineNumberProgram = struct {
|
||||
self.prev_end_sequence = undefined;
|
||||
}
|
||||
|
||||
pub fn init(is_stmt: bool, include_dirs: []const []const u8, target_address: u64) LineNumberProgram {
|
||||
pub fn init(
|
||||
is_stmt: bool,
|
||||
include_dirs: []const FileEntry,
|
||||
target_address: u64,
|
||||
version: u16,
|
||||
) LineNumberProgram {
|
||||
return LineNumberProgram{
|
||||
.address = 0,
|
||||
.file = 1,
|
||||
.line = 1,
|
||||
.column = 0,
|
||||
.version = version,
|
||||
.is_stmt = is_stmt,
|
||||
.basic_block = false,
|
||||
.end_sequence = false,
|
||||
@@ -372,18 +463,24 @@ const LineNumberProgram = struct {
|
||||
allocator: mem.Allocator,
|
||||
file_entries: []const FileEntry,
|
||||
) !?debug.LineInfo {
|
||||
if (self.prev_valid and self.target_address >= self.prev_address and self.target_address < self.address) {
|
||||
const file_entry = if (self.prev_file == 0) {
|
||||
return error.MissingDebugInfo;
|
||||
} else if (self.prev_file - 1 >= file_entries.len) {
|
||||
return error.InvalidDebugInfo;
|
||||
} else &file_entries[self.prev_file - 1];
|
||||
if (self.prev_valid and
|
||||
self.target_address >= self.prev_address and
|
||||
self.target_address < self.address)
|
||||
{
|
||||
const file_index = if (self.version >= 5) self.prev_file else i: {
|
||||
if (self.prev_file == 0) return missingDwarf();
|
||||
break :i self.prev_file - 1;
|
||||
};
|
||||
|
||||
const dir_name = if (file_entry.dir_index >= self.include_dirs.len) {
|
||||
return error.InvalidDebugInfo;
|
||||
} else self.include_dirs[file_entry.dir_index];
|
||||
if (file_index >= file_entries.len) return badDwarf();
|
||||
const file_entry = &file_entries[file_index];
|
||||
|
||||
const file_name = try fs.path.join(allocator, &[_][]const u8{ dir_name, file_entry.file_name });
|
||||
if (file_entry.dir_index >= self.include_dirs.len) return badDwarf();
|
||||
const dir_name = self.include_dirs[file_entry.dir_index].path;
|
||||
|
||||
const file_name = try fs.path.join(allocator, &[_][]const u8{
|
||||
dir_name, file_entry.path,
|
||||
});
|
||||
|
||||
return debug.LineInfo{
|
||||
.line = if (self.prev_line >= 0) @intCast(u64, self.prev_line) else 0,
|
||||
@@ -410,7 +507,7 @@ fn readUnitLength(in_stream: anytype, endian: std.builtin.Endian, is_64: *bool)
|
||||
if (is_64.*) {
|
||||
return in_stream.readInt(u64, endian);
|
||||
} else {
|
||||
if (first_32_bits >= 0xfffffff0) return error.InvalidDebugInfo;
|
||||
if (first_32_bits >= 0xfffffff0) return badDwarf();
|
||||
// TODO this cast should not be needed
|
||||
return @as(u64, first_32_bits);
|
||||
}
|
||||
@@ -487,6 +584,12 @@ fn parseFormValueRef(in_stream: anytype, endian: std.builtin.Endian, size: i32)
|
||||
fn parseFormValue(allocator: mem.Allocator, in_stream: anytype, form_id: u64, endian: std.builtin.Endian, is_64: bool) anyerror!FormValue {
|
||||
return switch (form_id) {
|
||||
FORM.addr => FormValue{ .Address = try readAddress(in_stream, endian, @sizeOf(usize) == 8) },
|
||||
FORM.addrx1 => return FormValue{ .AddrOffset = try in_stream.readInt(u8, endian) },
|
||||
FORM.addrx2 => return FormValue{ .AddrOffset = try in_stream.readInt(u16, endian) },
|
||||
FORM.addrx3 => return FormValue{ .AddrOffset = try in_stream.readInt(u24, endian) },
|
||||
FORM.addrx4 => return FormValue{ .AddrOffset = try in_stream.readInt(u32, endian) },
|
||||
FORM.addrx => return FormValue{ .AddrOffset = try nosuspend leb.readULEB128(usize, in_stream) },
|
||||
|
||||
FORM.block1 => parseFormValueBlock(allocator, in_stream, endian, 1),
|
||||
FORM.block2 => parseFormValueBlock(allocator, in_stream, endian, 2),
|
||||
FORM.block4 => parseFormValueBlock(allocator, in_stream, endian, 4),
|
||||
@@ -498,6 +601,11 @@ fn parseFormValue(allocator: mem.Allocator, in_stream: anytype, form_id: u64, en
|
||||
FORM.data2 => parseFormValueConstant(in_stream, false, endian, 2),
|
||||
FORM.data4 => parseFormValueConstant(in_stream, false, endian, 4),
|
||||
FORM.data8 => parseFormValueConstant(in_stream, false, endian, 8),
|
||||
FORM.data16 => {
|
||||
var buf: [16]u8 = undefined;
|
||||
if ((try nosuspend in_stream.readAll(&buf)) < 16) return error.EndOfFile;
|
||||
return FormValue{ .data16 = buf };
|
||||
},
|
||||
FORM.udata, FORM.sdata => {
|
||||
const signed = form_id == FORM.sdata;
|
||||
return parseFormValueConstant(in_stream, signed, endian, -1);
|
||||
@@ -522,6 +630,11 @@ fn parseFormValue(allocator: mem.Allocator, in_stream: anytype, form_id: u64, en
|
||||
|
||||
FORM.string => FormValue{ .String = try in_stream.readUntilDelimiterAlloc(allocator, 0, math.maxInt(usize)) },
|
||||
FORM.strp => FormValue{ .StrPtr = try readAddress(in_stream, endian, is_64) },
|
||||
FORM.strx1 => return FormValue{ .StrOffset = try in_stream.readInt(u8, endian) },
|
||||
FORM.strx2 => return FormValue{ .StrOffset = try in_stream.readInt(u16, endian) },
|
||||
FORM.strx3 => return FormValue{ .StrOffset = try in_stream.readInt(u24, endian) },
|
||||
FORM.strx4 => return FormValue{ .StrOffset = try in_stream.readInt(u32, endian) },
|
||||
FORM.strx => return FormValue{ .StrOffset = try nosuspend leb.readULEB128(usize, in_stream) },
|
||||
FORM.line_strp => FormValue{ .LineStrPtr = try readAddress(in_stream, endian, is_64) },
|
||||
FORM.indirect => {
|
||||
const child_form_id = try nosuspend leb.readULEB128(u64, in_stream);
|
||||
@@ -534,9 +647,11 @@ fn parseFormValue(allocator: mem.Allocator, in_stream: anytype, form_id: u64, en
|
||||
return await @asyncCall(frame, {}, parseFormValue, .{ allocator, in_stream, child_form_id, endian, is_64 });
|
||||
},
|
||||
FORM.implicit_const => FormValue{ .Const = Constant{ .signed = true, .payload = undefined } },
|
||||
|
||||
FORM.loclistx => return FormValue{ .LocListOffset = try nosuspend leb.readULEB128(u64, in_stream) },
|
||||
FORM.rnglistx => return FormValue{ .RangeListOffset = try nosuspend leb.readULEB128(u64, in_stream) },
|
||||
else => {
|
||||
return error.InvalidDebugInfo;
|
||||
//std.debug.print("unrecognized form id: {x}\n", .{form_id});
|
||||
return badDwarf();
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -554,9 +669,15 @@ pub const DwarfInfo = struct {
|
||||
debug_info: []const u8,
|
||||
debug_abbrev: []const u8,
|
||||
debug_str: []const u8,
|
||||
debug_str_offsets: ?[]const u8,
|
||||
debug_line: []const u8,
|
||||
debug_line_str: ?[]const u8,
|
||||
debug_ranges: ?[]const u8,
|
||||
debug_loclists: ?[]const u8,
|
||||
debug_rnglists: ?[]const u8,
|
||||
debug_addr: ?[]const u8,
|
||||
debug_names: ?[]const u8,
|
||||
debug_frame: ?[]const u8,
|
||||
// Filled later by the initializer
|
||||
abbrev_table_list: std.ArrayListUnmanaged(AbbrevTableHeader) = .{},
|
||||
compile_unit_list: std.ArrayListUnmanaged(CompileUnit) = .{},
|
||||
@@ -592,7 +713,7 @@ pub const DwarfInfo = struct {
|
||||
|
||||
fn scanAllFunctions(di: *DwarfInfo, allocator: mem.Allocator) !void {
|
||||
var stream = io.fixedBufferStream(di.debug_info);
|
||||
const in = &stream.reader();
|
||||
const in = stream.reader();
|
||||
const seekable = &stream.seekableStream();
|
||||
var this_unit_offset: u64 = 0;
|
||||
|
||||
@@ -609,29 +730,26 @@ pub const DwarfInfo = struct {
|
||||
const next_offset = unit_length + (if (is_64) @as(usize, 12) else @as(usize, 4));
|
||||
|
||||
const version = try in.readInt(u16, di.endian);
|
||||
if (version < 2 or version > 5) return error.InvalidDebugInfo;
|
||||
if (version < 2 or version > 5) return badDwarf();
|
||||
|
||||
var address_size: u8 = undefined;
|
||||
var debug_abbrev_offset: u64 = undefined;
|
||||
switch (version) {
|
||||
5 => {
|
||||
const unit_type = try in.readInt(u8, di.endian);
|
||||
if (unit_type != UT.compile) return error.InvalidDebugInfo;
|
||||
address_size = try in.readByte();
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
},
|
||||
else => {
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
address_size = try in.readByte();
|
||||
},
|
||||
if (version >= 5) {
|
||||
const unit_type = try in.readInt(u8, di.endian);
|
||||
if (unit_type != UT.compile) return badDwarf();
|
||||
address_size = try in.readByte();
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
} else {
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
address_size = try in.readByte();
|
||||
}
|
||||
if (address_size != @sizeOf(usize)) return error.InvalidDebugInfo;
|
||||
if (address_size != @sizeOf(usize)) return badDwarf();
|
||||
|
||||
const compile_unit_pos = try seekable.getPos();
|
||||
const abbrev_table = try di.getAbbrevTable(allocator, debug_abbrev_offset);
|
||||
@@ -640,11 +758,26 @@ pub const DwarfInfo = struct {
|
||||
|
||||
const next_unit_pos = this_unit_offset + next_offset;
|
||||
|
||||
var compile_unit: CompileUnit = undefined;
|
||||
|
||||
while ((try seekable.getPos()) < next_unit_pos) {
|
||||
const die_obj = (try di.parseDie(arena, in, abbrev_table, is_64)) orelse continue;
|
||||
var die_obj = (try di.parseDie(arena, in, abbrev_table, is_64)) orelse continue;
|
||||
const after_die_offset = try seekable.getPos();
|
||||
|
||||
switch (die_obj.tag_id) {
|
||||
TAG.compile_unit => {
|
||||
compile_unit = .{
|
||||
.version = version,
|
||||
.is_64 = is_64,
|
||||
.die = &die_obj,
|
||||
.pc_range = null,
|
||||
|
||||
.str_offsets_base = if (die_obj.getAttr(AT.str_offsets_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
.addr_base = if (die_obj.getAttr(AT.addr_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
.rnglists_base = if (die_obj.getAttr(AT.rnglists_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
.loclists_base = if (die_obj.getAttr(AT.loclists_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
};
|
||||
},
|
||||
TAG.subprogram, TAG.inlined_subroutine, TAG.subroutine, TAG.entry_point => {
|
||||
const fn_name = x: {
|
||||
var depth: i32 = 3;
|
||||
@@ -652,30 +785,30 @@ pub const DwarfInfo = struct {
|
||||
// Prevent endless loops
|
||||
while (depth > 0) : (depth -= 1) {
|
||||
if (this_die_obj.getAttr(AT.name)) |_| {
|
||||
const name = try this_die_obj.getAttrString(di, AT.name);
|
||||
const name = try this_die_obj.getAttrString(di, AT.name, di.debug_str, compile_unit);
|
||||
break :x try allocator.dupe(u8, name);
|
||||
} else if (this_die_obj.getAttr(AT.abstract_origin)) |_| {
|
||||
// Follow the DIE it points to and repeat
|
||||
const ref_offset = try this_die_obj.getAttrRef(AT.abstract_origin);
|
||||
if (ref_offset > next_offset) return error.InvalidDebugInfo;
|
||||
if (ref_offset > next_offset) return badDwarf();
|
||||
try seekable.seekTo(this_unit_offset + ref_offset);
|
||||
this_die_obj = (try di.parseDie(
|
||||
arena,
|
||||
in,
|
||||
abbrev_table,
|
||||
is_64,
|
||||
)) orelse return error.InvalidDebugInfo;
|
||||
)) orelse return badDwarf();
|
||||
} else if (this_die_obj.getAttr(AT.specification)) |_| {
|
||||
// Follow the DIE it points to and repeat
|
||||
const ref_offset = try this_die_obj.getAttrRef(AT.specification);
|
||||
if (ref_offset > next_offset) return error.InvalidDebugInfo;
|
||||
if (ref_offset > next_offset) return badDwarf();
|
||||
try seekable.seekTo(this_unit_offset + ref_offset);
|
||||
this_die_obj = (try di.parseDie(
|
||||
arena,
|
||||
in,
|
||||
abbrev_table,
|
||||
is_64,
|
||||
)) orelse return error.InvalidDebugInfo;
|
||||
)) orelse return badDwarf();
|
||||
} else {
|
||||
break :x null;
|
||||
}
|
||||
@@ -685,7 +818,7 @@ pub const DwarfInfo = struct {
|
||||
};
|
||||
|
||||
const pc_range = x: {
|
||||
if (die_obj.getAttrAddr(AT.low_pc)) |low_pc| {
|
||||
if (die_obj.getAttrAddr(di, AT.low_pc, compile_unit)) |low_pc| {
|
||||
if (die_obj.getAttr(AT.high_pc)) |high_pc_value| {
|
||||
const pc_end = switch (high_pc_value.*) {
|
||||
FormValue.Address => |value| value,
|
||||
@@ -693,7 +826,7 @@ pub const DwarfInfo = struct {
|
||||
const offset = try value.asUnsignedLe();
|
||||
break :b (low_pc + offset);
|
||||
},
|
||||
else => return error.InvalidDebugInfo,
|
||||
else => return badDwarf(),
|
||||
};
|
||||
break :x PcRange{
|
||||
.start = low_pc,
|
||||
@@ -738,29 +871,26 @@ pub const DwarfInfo = struct {
|
||||
const next_offset = unit_length + (if (is_64) @as(usize, 12) else @as(usize, 4));
|
||||
|
||||
const version = try in.readInt(u16, di.endian);
|
||||
if (version < 2 or version > 5) return error.InvalidDebugInfo;
|
||||
if (version < 2 or version > 5) return badDwarf();
|
||||
|
||||
var address_size: u8 = undefined;
|
||||
var debug_abbrev_offset: u64 = undefined;
|
||||
switch (version) {
|
||||
5 => {
|
||||
const unit_type = try in.readInt(u8, di.endian);
|
||||
if (unit_type != UT.compile) return error.InvalidDebugInfo;
|
||||
address_size = try in.readByte();
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
},
|
||||
else => {
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
address_size = try in.readByte();
|
||||
},
|
||||
if (version >= 5) {
|
||||
const unit_type = try in.readInt(u8, di.endian);
|
||||
if (unit_type != UT.compile) return badDwarf();
|
||||
address_size = try in.readByte();
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
} else {
|
||||
debug_abbrev_offset = if (is_64)
|
||||
try in.readInt(u64, di.endian)
|
||||
else
|
||||
try in.readInt(u32, di.endian);
|
||||
address_size = try in.readByte();
|
||||
}
|
||||
if (address_size != @sizeOf(usize)) return error.InvalidDebugInfo;
|
||||
if (address_size != @sizeOf(usize)) return badDwarf();
|
||||
|
||||
const compile_unit_pos = try seekable.getPos();
|
||||
const abbrev_table = try di.getAbbrevTable(allocator, debug_abbrev_offset);
|
||||
@@ -770,12 +900,23 @@ pub const DwarfInfo = struct {
|
||||
const compile_unit_die = try allocator.create(Die);
|
||||
errdefer allocator.destroy(compile_unit_die);
|
||||
compile_unit_die.* = (try di.parseDie(allocator, in, abbrev_table, is_64)) orelse
|
||||
return error.InvalidDebugInfo;
|
||||
return badDwarf();
|
||||
|
||||
if (compile_unit_die.tag_id != TAG.compile_unit) return error.InvalidDebugInfo;
|
||||
if (compile_unit_die.tag_id != TAG.compile_unit) return badDwarf();
|
||||
|
||||
const pc_range = x: {
|
||||
if (compile_unit_die.getAttrAddr(AT.low_pc)) |low_pc| {
|
||||
var compile_unit: CompileUnit = .{
|
||||
.version = version,
|
||||
.is_64 = is_64,
|
||||
.pc_range = null,
|
||||
.die = compile_unit_die,
|
||||
.str_offsets_base = if (compile_unit_die.getAttr(AT.str_offsets_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
.addr_base = if (compile_unit_die.getAttr(AT.addr_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
.rnglists_base = if (compile_unit_die.getAttr(AT.rnglists_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
.loclists_base = if (compile_unit_die.getAttr(AT.loclists_base)) |fv| try fv.getUInt(usize) else 0,
|
||||
};
|
||||
|
||||
compile_unit.pc_range = x: {
|
||||
if (compile_unit_die.getAttrAddr(di, AT.low_pc, compile_unit)) |low_pc| {
|
||||
if (compile_unit_die.getAttr(AT.high_pc)) |high_pc_value| {
|
||||
const pc_end = switch (high_pc_value.*) {
|
||||
FormValue.Address => |value| value,
|
||||
@@ -783,7 +924,7 @@ pub const DwarfInfo = struct {
|
||||
const offset = try value.asUnsignedLe();
|
||||
break :b (low_pc + offset);
|
||||
},
|
||||
else => return error.InvalidDebugInfo,
|
||||
else => return badDwarf(),
|
||||
};
|
||||
break :x PcRange{
|
||||
.start = low_pc,
|
||||
@@ -798,12 +939,7 @@ pub const DwarfInfo = struct {
|
||||
}
|
||||
};
|
||||
|
||||
try di.compile_unit_list.append(allocator, CompileUnit{
|
||||
.version = version,
|
||||
.is_64 = is_64,
|
||||
.pc_range = pc_range,
|
||||
.die = compile_unit_die,
|
||||
});
|
||||
try di.compile_unit_list.append(allocator, compile_unit);
|
||||
|
||||
this_unit_offset += next_offset;
|
||||
}
|
||||
@@ -824,7 +960,7 @@ pub const DwarfInfo = struct {
|
||||
// specified by DW_AT.low_pc or to some other value encoded
|
||||
// in the list itself.
|
||||
// If no starting value is specified use zero.
|
||||
var base_address = compile_unit.die.getAttrAddr(AT.low_pc) catch |err| switch (err) {
|
||||
var base_address = compile_unit.die.getAttrAddr(di, AT.low_pc, compile_unit.*) catch |err| switch (err) {
|
||||
error.MissingDebugInfo => @as(u64, 0), // TODO https://github.com/ziglang/zig/issues/11135
|
||||
else => return err,
|
||||
};
|
||||
@@ -852,7 +988,7 @@ pub const DwarfInfo = struct {
|
||||
}
|
||||
}
|
||||
}
|
||||
return error.MissingDebugInfo;
|
||||
return missingDwarf();
|
||||
}
|
||||
|
||||
/// Gets an already existing AbbrevTable given the abbrev_offset, or if not found,
|
||||
@@ -919,7 +1055,7 @@ pub const DwarfInfo = struct {
|
||||
) !?Die {
|
||||
const abbrev_code = try leb.readULEB128(u64, in_stream);
|
||||
if (abbrev_code == 0) return null;
|
||||
const table_entry = getAbbrevTableEntry(abbrev_table, abbrev_code) orelse return error.InvalidDebugInfo;
|
||||
const table_entry = getAbbrevTableEntry(abbrev_table, abbrev_code) orelse return badDwarf();
|
||||
|
||||
var result = Die{
|
||||
// Lives as long as the Die.
|
||||
@@ -956,7 +1092,7 @@ pub const DwarfInfo = struct {
|
||||
const in = &stream.reader();
|
||||
const seekable = &stream.seekableStream();
|
||||
|
||||
const compile_unit_cwd = try compile_unit.die.getAttrString(di, AT.comp_dir);
|
||||
const compile_unit_cwd = try compile_unit.die.getAttrString(di, AT.comp_dir, di.debug_line_str, compile_unit);
|
||||
const line_info_offset = try compile_unit.die.getAttrSecOffset(AT.stmt_list);
|
||||
|
||||
try seekable.seekTo(line_info_offset);
|
||||
@@ -964,18 +1100,25 @@ pub const DwarfInfo = struct {
|
||||
var is_64: bool = undefined;
|
||||
const unit_length = try readUnitLength(in, di.endian, &is_64);
|
||||
if (unit_length == 0) {
|
||||
return error.MissingDebugInfo;
|
||||
return missingDwarf();
|
||||
}
|
||||
const next_offset = unit_length + (if (is_64) @as(usize, 12) else @as(usize, 4));
|
||||
|
||||
const version = try in.readInt(u16, di.endian);
|
||||
if (version < 2 or version > 4) return error.InvalidDebugInfo;
|
||||
if (version < 2) return badDwarf();
|
||||
|
||||
var addr_size: u8 = if (is_64) 8 else 4;
|
||||
var seg_size: u8 = 0;
|
||||
if (version >= 5) {
|
||||
addr_size = try in.readByte();
|
||||
seg_size = try in.readByte();
|
||||
}
|
||||
|
||||
const prologue_length = if (is_64) try in.readInt(u64, di.endian) else try in.readInt(u32, di.endian);
|
||||
const prog_start_offset = (try seekable.getPos()) + prologue_length;
|
||||
|
||||
const minimum_instruction_length = try in.readByte();
|
||||
if (minimum_instruction_length == 0) return error.InvalidDebugInfo;
|
||||
if (minimum_instruction_length == 0) return badDwarf();
|
||||
|
||||
if (version >= 4) {
|
||||
// maximum_operations_per_instruction
|
||||
@@ -986,7 +1129,7 @@ pub const DwarfInfo = struct {
|
||||
const line_base = try in.readByteSigned();
|
||||
|
||||
const line_range = try in.readByte();
|
||||
if (line_range == 0) return error.InvalidDebugInfo;
|
||||
if (line_range == 0) return badDwarf();
|
||||
|
||||
const opcode_base = try in.readByte();
|
||||
|
||||
@@ -1004,36 +1147,120 @@ pub const DwarfInfo = struct {
|
||||
defer tmp_arena.deinit();
|
||||
const arena = tmp_arena.allocator();
|
||||
|
||||
var include_directories = std.ArrayList([]const u8).init(arena);
|
||||
try include_directories.append(compile_unit_cwd);
|
||||
var include_directories = std.ArrayList(FileEntry).init(arena);
|
||||
var file_entries = std.ArrayList(FileEntry).init(arena);
|
||||
|
||||
while (true) {
|
||||
const dir = try in.readUntilDelimiterAlloc(arena, 0, math.maxInt(usize));
|
||||
if (dir.len == 0) break;
|
||||
try include_directories.append(dir);
|
||||
if (version < 5) {
|
||||
try include_directories.append(.{ .path = compile_unit_cwd });
|
||||
|
||||
while (true) {
|
||||
const dir = try in.readUntilDelimiterAlloc(arena, 0, math.maxInt(usize));
|
||||
if (dir.len == 0) break;
|
||||
try include_directories.append(.{ .path = dir });
|
||||
}
|
||||
|
||||
while (true) {
|
||||
const file_name = try in.readUntilDelimiterAlloc(arena, 0, math.maxInt(usize));
|
||||
if (file_name.len == 0) break;
|
||||
const dir_index = try leb.readULEB128(u32, in);
|
||||
const mtime = try leb.readULEB128(u64, in);
|
||||
const size = try leb.readULEB128(u64, in);
|
||||
try file_entries.append(FileEntry{
|
||||
.path = file_name,
|
||||
.dir_index = dir_index,
|
||||
.mtime = mtime,
|
||||
.size = size,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const FileEntFmt = struct {
|
||||
content_type_code: u8,
|
||||
form_code: u16,
|
||||
};
|
||||
{
|
||||
var dir_ent_fmt_buf: [10]FileEntFmt = undefined;
|
||||
const directory_entry_format_count = try in.readByte();
|
||||
if (directory_entry_format_count > dir_ent_fmt_buf.len) return badDwarf();
|
||||
for (dir_ent_fmt_buf[0..directory_entry_format_count]) |*ent_fmt| {
|
||||
ent_fmt.* = .{
|
||||
.content_type_code = try leb.readULEB128(u8, in),
|
||||
.form_code = try leb.readULEB128(u16, in),
|
||||
};
|
||||
}
|
||||
|
||||
const directories_count = try leb.readULEB128(usize, in);
|
||||
try include_directories.ensureUnusedCapacity(directories_count);
|
||||
{
|
||||
var i: usize = 0;
|
||||
while (i < directories_count) : (i += 1) {
|
||||
var e: FileEntry = .{ .path = &.{} };
|
||||
for (dir_ent_fmt_buf[0..directory_entry_format_count]) |ent_fmt| {
|
||||
const form_value = try parseFormValue(
|
||||
arena,
|
||||
in,
|
||||
ent_fmt.form_code,
|
||||
di.endian,
|
||||
is_64,
|
||||
);
|
||||
switch (ent_fmt.content_type_code) {
|
||||
LNCT.path => e.path = try form_value.getString(di.*),
|
||||
LNCT.directory_index => e.dir_index = try form_value.getUInt(u32),
|
||||
LNCT.timestamp => e.mtime = try form_value.getUInt(u64),
|
||||
LNCT.size => e.size = try form_value.getUInt(u64),
|
||||
LNCT.MD5 => e.md5 = try form_value.getData16(),
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
include_directories.appendAssumeCapacity(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var file_ent_fmt_buf: [10]FileEntFmt = undefined;
|
||||
const file_name_entry_format_count = try in.readByte();
|
||||
if (file_name_entry_format_count > file_ent_fmt_buf.len) return badDwarf();
|
||||
for (file_ent_fmt_buf[0..file_name_entry_format_count]) |*ent_fmt| {
|
||||
ent_fmt.* = .{
|
||||
.content_type_code = try leb.readULEB128(u8, in),
|
||||
.form_code = try leb.readULEB128(u16, in),
|
||||
};
|
||||
}
|
||||
|
||||
const file_names_count = try leb.readULEB128(usize, in);
|
||||
try file_entries.ensureUnusedCapacity(file_names_count);
|
||||
{
|
||||
var i: usize = 0;
|
||||
while (i < file_names_count) : (i += 1) {
|
||||
var e: FileEntry = .{ .path = &.{} };
|
||||
for (file_ent_fmt_buf[0..file_name_entry_format_count]) |ent_fmt| {
|
||||
const form_value = try parseFormValue(
|
||||
arena,
|
||||
in,
|
||||
ent_fmt.form_code,
|
||||
di.endian,
|
||||
is_64,
|
||||
);
|
||||
switch (ent_fmt.content_type_code) {
|
||||
LNCT.path => e.path = try form_value.getString(di.*),
|
||||
LNCT.directory_index => e.dir_index = try form_value.getUInt(u32),
|
||||
LNCT.timestamp => e.mtime = try form_value.getUInt(u64),
|
||||
LNCT.size => e.size = try form_value.getUInt(u64),
|
||||
LNCT.MD5 => e.md5 = try form_value.getData16(),
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
file_entries.appendAssumeCapacity(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var file_entries = std.ArrayList(FileEntry).init(arena);
|
||||
var prog = LineNumberProgram.init(
|
||||
default_is_stmt,
|
||||
include_directories.items,
|
||||
target_address,
|
||||
version,
|
||||
);
|
||||
|
||||
while (true) {
|
||||
const file_name = try in.readUntilDelimiterAlloc(arena, 0, math.maxInt(usize));
|
||||
if (file_name.len == 0) break;
|
||||
const dir_index = try leb.readULEB128(usize, in);
|
||||
const mtime = try leb.readULEB128(usize, in);
|
||||
const len_bytes = try leb.readULEB128(usize, in);
|
||||
try file_entries.append(FileEntry{
|
||||
.file_name = file_name,
|
||||
.dir_index = dir_index,
|
||||
.mtime = mtime,
|
||||
.len_bytes = len_bytes,
|
||||
});
|
||||
}
|
||||
|
||||
try seekable.seekTo(prog_start_offset);
|
||||
|
||||
const next_unit_pos = line_info_offset + next_offset;
|
||||
@@ -1043,7 +1270,7 @@ pub const DwarfInfo = struct {
|
||||
|
||||
if (opcode == LNS.extended_op) {
|
||||
const op_size = try leb.readULEB128(u64, in);
|
||||
if (op_size < 1) return error.InvalidDebugInfo;
|
||||
if (op_size < 1) return badDwarf();
|
||||
var sub_op = try in.readByte();
|
||||
switch (sub_op) {
|
||||
LNE.end_sequence => {
|
||||
@@ -1056,19 +1283,19 @@ pub const DwarfInfo = struct {
|
||||
prog.address = addr;
|
||||
},
|
||||
LNE.define_file => {
|
||||
const file_name = try in.readUntilDelimiterAlloc(arena, 0, math.maxInt(usize));
|
||||
const dir_index = try leb.readULEB128(usize, in);
|
||||
const mtime = try leb.readULEB128(usize, in);
|
||||
const len_bytes = try leb.readULEB128(usize, in);
|
||||
const path = try in.readUntilDelimiterAlloc(arena, 0, math.maxInt(usize));
|
||||
const dir_index = try leb.readULEB128(u32, in);
|
||||
const mtime = try leb.readULEB128(u64, in);
|
||||
const size = try leb.readULEB128(u64, in);
|
||||
try file_entries.append(FileEntry{
|
||||
.file_name = file_name,
|
||||
.path = path,
|
||||
.dir_index = dir_index,
|
||||
.mtime = mtime,
|
||||
.len_bytes = len_bytes,
|
||||
.size = size,
|
||||
});
|
||||
},
|
||||
else => {
|
||||
const fwd_amt = math.cast(isize, op_size - 1) orelse return error.InvalidDebugInfo;
|
||||
const fwd_amt = math.cast(isize, op_size - 1) orelse return badDwarf();
|
||||
try seekable.seekBy(fwd_amt);
|
||||
},
|
||||
}
|
||||
@@ -1119,7 +1346,7 @@ pub const DwarfInfo = struct {
|
||||
},
|
||||
LNS.set_prologue_end => {},
|
||||
else => {
|
||||
if (opcode - 1 >= standard_opcode_lengths.len) return error.InvalidDebugInfo;
|
||||
if (opcode - 1 >= standard_opcode_lengths.len) return badDwarf();
|
||||
const len_bytes = standard_opcode_lengths[opcode - 1];
|
||||
try seekable.seekBy(len_bytes);
|
||||
},
|
||||
@@ -1127,36 +1354,15 @@ pub const DwarfInfo = struct {
|
||||
}
|
||||
}
|
||||
|
||||
return error.MissingDebugInfo;
|
||||
return missingDwarf();
|
||||
}
|
||||
|
||||
fn getString(di: *DwarfInfo, offset: u64) ![]const u8 {
|
||||
if (offset > di.debug_str.len)
|
||||
return error.InvalidDebugInfo;
|
||||
const casted_offset = math.cast(usize, offset) orelse
|
||||
return error.InvalidDebugInfo;
|
||||
|
||||
// Valid strings always have a terminating zero byte
|
||||
if (mem.indexOfScalarPos(u8, di.debug_str, casted_offset, 0)) |last| {
|
||||
return di.debug_str[casted_offset..last];
|
||||
}
|
||||
|
||||
return error.InvalidDebugInfo;
|
||||
fn getString(di: DwarfInfo, offset: u64) ![]const u8 {
|
||||
return getStringGeneric(di.debug_str, offset);
|
||||
}
|
||||
|
||||
fn getLineString(di: *DwarfInfo, offset: u64) ![]const u8 {
|
||||
const debug_line_str = di.debug_line_str orelse return error.InvalidDebugInfo;
|
||||
if (offset > debug_line_str.len)
|
||||
return error.InvalidDebugInfo;
|
||||
const casted_offset = math.cast(usize, offset) orelse
|
||||
return error.InvalidDebugInfo;
|
||||
|
||||
// Valid strings always have a terminating zero byte
|
||||
if (mem.indexOfScalarPos(u8, debug_line_str, casted_offset, 0)) |last| {
|
||||
return debug_line_str[casted_offset..last];
|
||||
}
|
||||
|
||||
return error.InvalidDebugInfo;
|
||||
fn getLineString(di: DwarfInfo, offset: u64) ![]const u8 {
|
||||
return getStringGeneric(di.debug_line_str, offset);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1166,3 +1372,24 @@ pub fn openDwarfDebugInfo(di: *DwarfInfo, allocator: mem.Allocator) !void {
|
||||
try di.scanAllFunctions(allocator);
|
||||
try di.scanAllCompileUnits(allocator);
|
||||
}
|
||||
|
||||
/// This function is to make it handy to comment out the return and make it
|
||||
/// into a crash when working on this file.
|
||||
fn badDwarf() error{InvalidDebugInfo} {
|
||||
//std.os.abort(); // can be handy to uncomment when working on this file
|
||||
return error.InvalidDebugInfo;
|
||||
}
|
||||
|
||||
fn missingDwarf() error{MissingDebugInfo} {
|
||||
//std.os.abort(); // can be handy to uncomment when working on this file
|
||||
return error.MissingDebugInfo;
|
||||
}
|
||||
|
||||
fn getStringGeneric(opt_str: ?[]const u8, offset: u64) ![:0]const u8 {
|
||||
const str = opt_str orelse return badDwarf();
|
||||
if (offset > str.len) return badDwarf();
|
||||
const casted_offset = math.cast(usize, offset) orelse return badDwarf();
|
||||
// Valid strings always have a terminating zero byte
|
||||
const last = mem.indexOfScalarPos(u8, str, casted_offset, 0) orelse return badDwarf();
|
||||
return str[casted_offset..last :0];
|
||||
}
|
||||
|
||||
+12
-12
@@ -3,7 +3,7 @@ const io = std.io;
|
||||
const os = std.os;
|
||||
const math = std.math;
|
||||
const mem = std.mem;
|
||||
const debug = std.debug;
|
||||
const assert = std.debug.assert;
|
||||
const File = std.fs.File;
|
||||
const native_endian = @import("builtin").target.cpu.arch.endian();
|
||||
|
||||
@@ -387,7 +387,7 @@ pub const Header = struct {
|
||||
|
||||
const machine = if (need_bswap) blk: {
|
||||
const value = @enumToInt(hdr32.e_machine);
|
||||
break :blk @intToEnum(EM, @byteSwap(@TypeOf(value), value));
|
||||
break :blk @intToEnum(EM, @byteSwap(value));
|
||||
} else hdr32.e_machine;
|
||||
|
||||
return @as(Header, .{
|
||||
@@ -406,7 +406,7 @@ pub const Header = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn ProgramHeaderIterator(ParseSource: anytype) type {
|
||||
pub fn ProgramHeaderIterator(comptime ParseSource: anytype) type {
|
||||
return struct {
|
||||
elf_header: Header,
|
||||
parse_source: ParseSource,
|
||||
@@ -456,7 +456,7 @@ pub fn ProgramHeaderIterator(ParseSource: anytype) type {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn SectionHeaderIterator(ParseSource: anytype) type {
|
||||
pub fn SectionHeaderIterator(comptime ParseSource: anytype) type {
|
||||
return struct {
|
||||
elf_header: Header,
|
||||
parse_source: ParseSource,
|
||||
@@ -511,7 +511,7 @@ pub fn SectionHeaderIterator(ParseSource: anytype) type {
|
||||
pub fn int(is_64: bool, need_bswap: bool, int_32: anytype, int_64: anytype) @TypeOf(int_64) {
|
||||
if (is_64) {
|
||||
if (need_bswap) {
|
||||
return @byteSwap(@TypeOf(int_64), int_64);
|
||||
return @byteSwap(int_64);
|
||||
} else {
|
||||
return int_64;
|
||||
}
|
||||
@@ -522,7 +522,7 @@ pub fn int(is_64: bool, need_bswap: bool, int_32: anytype, int_64: anytype) @Typ
|
||||
|
||||
pub fn int32(need_bswap: bool, int_32: anytype, comptime Int64: anytype) Int64 {
|
||||
if (need_bswap) {
|
||||
return @byteSwap(@TypeOf(int_32), int_32);
|
||||
return @byteSwap(int_32);
|
||||
} else {
|
||||
return int_32;
|
||||
}
|
||||
@@ -872,14 +872,14 @@ pub const Elf_MIPS_ABIFlags_v0 = extern struct {
|
||||
};
|
||||
|
||||
comptime {
|
||||
debug.assert(@sizeOf(Elf32_Ehdr) == 52);
|
||||
debug.assert(@sizeOf(Elf64_Ehdr) == 64);
|
||||
assert(@sizeOf(Elf32_Ehdr) == 52);
|
||||
assert(@sizeOf(Elf64_Ehdr) == 64);
|
||||
|
||||
debug.assert(@sizeOf(Elf32_Phdr) == 32);
|
||||
debug.assert(@sizeOf(Elf64_Phdr) == 56);
|
||||
assert(@sizeOf(Elf32_Phdr) == 32);
|
||||
assert(@sizeOf(Elf64_Phdr) == 56);
|
||||
|
||||
debug.assert(@sizeOf(Elf32_Shdr) == 40);
|
||||
debug.assert(@sizeOf(Elf64_Shdr) == 64);
|
||||
assert(@sizeOf(Elf32_Shdr) == 40);
|
||||
assert(@sizeOf(Elf64_Shdr) == 64);
|
||||
}
|
||||
|
||||
pub const Auxv = switch (@sizeOf(usize)) {
|
||||
|
||||
+1
-1
@@ -57,7 +57,7 @@ pub fn values(comptime E: type) []const E {
|
||||
/// the total number of items which have no matching enum key (holes in the enum
|
||||
/// numbering). So for example, if an enum has values 1, 2, 5, and 6, max_unused_slots
|
||||
/// must be at least 3, to allow unused slots 0, 3, and 4.
|
||||
fn directEnumArrayLen(comptime E: type, comptime max_unused_slots: comptime_int) comptime_int {
|
||||
pub fn directEnumArrayLen(comptime E: type, comptime max_unused_slots: comptime_int) comptime_int {
|
||||
var max_value: comptime_int = -1;
|
||||
const max_usize: comptime_int = ~@as(usize, 0);
|
||||
const fields = std.meta.fields(E);
|
||||
|
||||
@@ -56,7 +56,7 @@ pub fn Channel(comptime T: type) type {
|
||||
pub fn init(self: *SelfChannel, buffer: []T) void {
|
||||
// The ring buffer implementation only works with power of 2 buffer sizes
|
||||
// because of relying on subtracting across zero. For example (0 -% 1) % 10 == 5
|
||||
assert(buffer.len == 0 or @popCount(usize, buffer.len) == 1);
|
||||
assert(buffer.len == 0 or @popCount(buffer.len) == 1);
|
||||
|
||||
self.* = SelfChannel{
|
||||
.buffer_len = 0,
|
||||
|
||||
+2
-2
@@ -195,7 +195,7 @@ pub fn format(
|
||||
}
|
||||
|
||||
if (comptime arg_state.hasUnusedArgs()) {
|
||||
const missing_count = arg_state.args_len - @popCount(ArgSetType, arg_state.used_args);
|
||||
const missing_count = arg_state.args_len - @popCount(arg_state.used_args);
|
||||
switch (missing_count) {
|
||||
0 => unreachable,
|
||||
1 => @compileError("unused argument in '" ++ fmt ++ "'"),
|
||||
@@ -380,7 +380,7 @@ const ArgState = struct {
|
||||
args_len: usize,
|
||||
|
||||
fn hasUnusedArgs(self: *@This()) bool {
|
||||
return @popCount(ArgSetType, self.used_args) != self.args_len;
|
||||
return @popCount(self.used_args) != self.args_len;
|
||||
}
|
||||
|
||||
fn nextArg(self: *@This(), arg_index: ?usize) ?usize {
|
||||
|
||||
@@ -36,7 +36,7 @@ pub fn convertEiselLemire(comptime T: type, q: i64, w_: u64) ?BiasedFp(f64) {
|
||||
}
|
||||
|
||||
// Normalize our significant digits, so the most-significant bit is set.
|
||||
const lz = @clz(u64, @bitCast(u64, w));
|
||||
const lz = @clz(@bitCast(u64, w));
|
||||
w = math.shl(u64, w, lz);
|
||||
|
||||
const r = computeProductApprox(q, w, float_info.mantissa_explicit_bits + 3);
|
||||
|
||||
+4
-2
@@ -877,8 +877,9 @@ pub const IterableDir = struct {
|
||||
/// a reference to the path.
|
||||
pub fn next(self: *Walker) !?WalkerEntry {
|
||||
while (self.stack.items.len != 0) {
|
||||
// `top` becomes invalid after appending to `self.stack`
|
||||
// `top` and `containing` become invalid after appending to `self.stack`
|
||||
var top = &self.stack.items[self.stack.items.len - 1];
|
||||
var containing = top;
|
||||
var dirname_len = top.dirname_len;
|
||||
if (try top.iter.next()) |base| {
|
||||
self.name_buffer.shrinkRetainingCapacity(dirname_len);
|
||||
@@ -899,10 +900,11 @@ pub const IterableDir = struct {
|
||||
.dirname_len = self.name_buffer.items.len,
|
||||
});
|
||||
top = &self.stack.items[self.stack.items.len - 1];
|
||||
containing = &self.stack.items[self.stack.items.len - 2];
|
||||
}
|
||||
}
|
||||
return WalkerEntry{
|
||||
.dir = top.iter.dir,
|
||||
.dir = containing.iter.dir,
|
||||
.basename = self.name_buffer.items[dirname_len..],
|
||||
.path = self.name_buffer.items,
|
||||
.kind = base.kind,
|
||||
|
||||
+1
-1
@@ -42,7 +42,7 @@ pub fn isSep(byte: u8) bool {
|
||||
|
||||
/// This is different from mem.join in that the separator will not be repeated if
|
||||
/// it is found at the end or beginning of a pair of consecutive paths.
|
||||
fn joinSepMaybeZ(allocator: Allocator, separator: u8, sepPredicate: fn (u8) bool, paths: []const []const u8, zero: bool) ![]u8 {
|
||||
fn joinSepMaybeZ(allocator: Allocator, separator: u8, comptime sepPredicate: fn (u8) bool, paths: []const []const u8, zero: bool) ![]u8 {
|
||||
if (paths.len == 0) return if (zero) try allocator.dupe(u8, &[1]u8{0}) else &[0]u8{};
|
||||
|
||||
// Find first non-empty path index.
|
||||
|
||||
@@ -1058,6 +1058,9 @@ test "walker" {
|
||||
std.debug.print("found unexpected path: {s}\n", .{std.fmt.fmtSliceEscapeLower(entry.path)});
|
||||
return err;
|
||||
};
|
||||
// make sure that the entry.dir is the containing dir
|
||||
var entry_dir = try entry.dir.openDir(entry.basename, .{});
|
||||
defer entry_dir.close();
|
||||
num_walked += 1;
|
||||
}
|
||||
try testing.expectEqual(expected_paths.kvs.len, num_walked);
|
||||
|
||||
+19
-20
@@ -30,13 +30,15 @@ pub fn hashPointer(hasher: anytype, key: anytype, comptime strat: HashStrategy)
|
||||
.DeepRecursive => hash(hasher, key.*, .DeepRecursive),
|
||||
},
|
||||
|
||||
.Slice => switch (strat) {
|
||||
.Shallow => {
|
||||
hashPointer(hasher, key.ptr, .Shallow);
|
||||
hash(hasher, key.len, .Shallow);
|
||||
},
|
||||
.Deep => hashArray(hasher, key, .Shallow),
|
||||
.DeepRecursive => hashArray(hasher, key, .DeepRecursive),
|
||||
.Slice => {
|
||||
switch (strat) {
|
||||
.Shallow => {
|
||||
hashPointer(hasher, key.ptr, .Shallow);
|
||||
},
|
||||
.Deep => hashArray(hasher, key, .Shallow),
|
||||
.DeepRecursive => hashArray(hasher, key, .DeepRecursive),
|
||||
}
|
||||
hash(hasher, key.len, .Shallow);
|
||||
},
|
||||
|
||||
.Many,
|
||||
@@ -53,17 +55,8 @@ pub fn hashPointer(hasher: anytype, key: anytype, comptime strat: HashStrategy)
|
||||
|
||||
/// Helper function to hash a set of contiguous objects, from an array or slice.
|
||||
pub fn hashArray(hasher: anytype, key: anytype, comptime strat: HashStrategy) void {
|
||||
switch (strat) {
|
||||
.Shallow => {
|
||||
for (key) |element| {
|
||||
hash(hasher, element, .Shallow);
|
||||
}
|
||||
},
|
||||
else => {
|
||||
for (key) |element| {
|
||||
hash(hasher, element, strat);
|
||||
}
|
||||
},
|
||||
for (key) |element| {
|
||||
hash(hasher, element, strat);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,8 +186,8 @@ fn typeContainsSlice(comptime K: type) bool {
|
||||
pub fn autoHash(hasher: anytype, key: anytype) void {
|
||||
const Key = @TypeOf(key);
|
||||
if (comptime typeContainsSlice(Key)) {
|
||||
@compileError("std.auto_hash.autoHash does not allow slices as well as unions and structs containing slices here (" ++ @typeName(Key) ++
|
||||
") because the intent is unclear. Consider using std.auto_hash.hash or providing your own hash function instead.");
|
||||
@compileError("std.hash.autoHash does not allow slices as well as unions and structs containing slices here (" ++ @typeName(Key) ++
|
||||
") because the intent is unclear. Consider using std.hash.autoHashStrat or providing your own hash function instead.");
|
||||
}
|
||||
|
||||
hash(hasher, key, .Shallow);
|
||||
@@ -359,6 +352,12 @@ test "testHash array" {
|
||||
try testing.expectEqual(h, hasher.final());
|
||||
}
|
||||
|
||||
test "testHash multi-dimensional array" {
|
||||
const a = [_][]const u32{ &.{ 1, 2, 3 }, &.{ 4, 5 } };
|
||||
const b = [_][]const u32{ &.{ 1, 2 }, &.{ 3, 4, 5 } };
|
||||
try testing.expect(testHash(a) != testHash(b));
|
||||
}
|
||||
|
||||
test "testHash struct" {
|
||||
const Foo = struct {
|
||||
a: u32 = 1,
|
||||
|
||||
@@ -143,9 +143,9 @@ pub const CityHash32 = struct {
|
||||
h = rotr32(h, 19);
|
||||
h = h *% 5 +% 0xe6546b64;
|
||||
g ^= b4;
|
||||
g = @byteSwap(u32, g) *% 5;
|
||||
g = @byteSwap(g) *% 5;
|
||||
h +%= b4 *% 5;
|
||||
h = @byteSwap(u32, h);
|
||||
h = @byteSwap(h);
|
||||
f +%= b0;
|
||||
const t: u32 = h;
|
||||
h = f;
|
||||
@@ -252,11 +252,11 @@ pub const CityHash64 = struct {
|
||||
|
||||
const u: u64 = rotr64(a +% g, 43) +% (rotr64(b, 30) +% c) *% 9;
|
||||
const v: u64 = ((a +% g) ^ d) +% f +% 1;
|
||||
const w: u64 = @byteSwap(u64, (u +% v) *% mul) +% h;
|
||||
const w: u64 = @byteSwap((u +% v) *% mul) +% h;
|
||||
const x: u64 = rotr64(e +% f, 42) +% c;
|
||||
const y: u64 = (@byteSwap(u64, (v +% w) *% mul) +% g) *% mul;
|
||||
const y: u64 = (@byteSwap((v +% w) *% mul) +% g) *% mul;
|
||||
const z: u64 = e +% f +% c;
|
||||
const a1: u64 = @byteSwap(u64, (x +% z) *% mul +% y) +% b;
|
||||
const a1: u64 = @byteSwap((x +% z) *% mul +% y) +% b;
|
||||
const b1: u64 = shiftmix((z +% a1) *% mul +% d +% h) *% mul;
|
||||
return b1 +% x;
|
||||
}
|
||||
|
||||
+11
-11
@@ -19,7 +19,7 @@ pub const Murmur2_32 = struct {
|
||||
for (@ptrCast([*]align(1) const u32, str.ptr)[0..(len >> 2)]) |v| {
|
||||
var k1: u32 = v;
|
||||
if (native_endian == .Big)
|
||||
k1 = @byteSwap(u32, k1);
|
||||
k1 = @byteSwap(k1);
|
||||
k1 *%= m;
|
||||
k1 ^= k1 >> 24;
|
||||
k1 *%= m;
|
||||
@@ -104,7 +104,7 @@ pub const Murmur2_64 = struct {
|
||||
for (@ptrCast([*]align(1) const u64, str.ptr)[0..@intCast(usize, len >> 3)]) |v| {
|
||||
var k1: u64 = v;
|
||||
if (native_endian == .Big)
|
||||
k1 = @byteSwap(u64, k1);
|
||||
k1 = @byteSwap(k1);
|
||||
k1 *%= m;
|
||||
k1 ^= k1 >> 47;
|
||||
k1 *%= m;
|
||||
@@ -117,7 +117,7 @@ pub const Murmur2_64 = struct {
|
||||
var k1: u64 = 0;
|
||||
@memcpy(@ptrCast([*]u8, &k1), @ptrCast([*]const u8, &str[@intCast(usize, offset)]), @intCast(usize, rest));
|
||||
if (native_endian == .Big)
|
||||
k1 = @byteSwap(u64, k1);
|
||||
k1 = @byteSwap(k1);
|
||||
h1 ^= k1;
|
||||
h1 *%= m;
|
||||
}
|
||||
@@ -184,7 +184,7 @@ pub const Murmur3_32 = struct {
|
||||
for (@ptrCast([*]align(1) const u32, str.ptr)[0..(len >> 2)]) |v| {
|
||||
var k1: u32 = v;
|
||||
if (native_endian == .Big)
|
||||
k1 = @byteSwap(u32, k1);
|
||||
k1 = @byteSwap(k1);
|
||||
k1 *%= c1;
|
||||
k1 = rotl32(k1, 15);
|
||||
k1 *%= c2;
|
||||
@@ -296,7 +296,7 @@ fn SMHasherTest(comptime hash_fn: anytype, comptime hashbits: u32) u32 {
|
||||
|
||||
var h = hash_fn(key[0..i], 256 - i);
|
||||
if (native_endian == .Big)
|
||||
h = @byteSwap(@TypeOf(h), h);
|
||||
h = @byteSwap(h);
|
||||
@memcpy(@ptrCast([*]u8, &hashes[i * hashbytes]), @ptrCast([*]u8, &h), hashbytes);
|
||||
}
|
||||
|
||||
@@ -310,8 +310,8 @@ test "murmur2_32" {
|
||||
var v0le: u32 = v0;
|
||||
var v1le: u64 = v1;
|
||||
if (native_endian == .Big) {
|
||||
v0le = @byteSwap(u32, v0le);
|
||||
v1le = @byteSwap(u64, v1le);
|
||||
v0le = @byteSwap(v0le);
|
||||
v1le = @byteSwap(v1le);
|
||||
}
|
||||
try testing.expectEqual(Murmur2_32.hash(@ptrCast([*]u8, &v0le)[0..4]), Murmur2_32.hashUint32(v0));
|
||||
try testing.expectEqual(Murmur2_32.hash(@ptrCast([*]u8, &v1le)[0..8]), Murmur2_32.hashUint64(v1));
|
||||
@@ -324,8 +324,8 @@ test "murmur2_64" {
|
||||
var v0le: u32 = v0;
|
||||
var v1le: u64 = v1;
|
||||
if (native_endian == .Big) {
|
||||
v0le = @byteSwap(u32, v0le);
|
||||
v1le = @byteSwap(u64, v1le);
|
||||
v0le = @byteSwap(v0le);
|
||||
v1le = @byteSwap(v1le);
|
||||
}
|
||||
try testing.expectEqual(Murmur2_64.hash(@ptrCast([*]u8, &v0le)[0..4]), Murmur2_64.hashUint32(v0));
|
||||
try testing.expectEqual(Murmur2_64.hash(@ptrCast([*]u8, &v1le)[0..8]), Murmur2_64.hashUint64(v1));
|
||||
@@ -338,8 +338,8 @@ test "murmur3_32" {
|
||||
var v0le: u32 = v0;
|
||||
var v1le: u64 = v1;
|
||||
if (native_endian == .Big) {
|
||||
v0le = @byteSwap(u32, v0le);
|
||||
v1le = @byteSwap(u64, v1le);
|
||||
v0le = @byteSwap(v0le);
|
||||
v1le = @byteSwap(v1le);
|
||||
}
|
||||
try testing.expectEqual(Murmur3_32.hash(@ptrCast([*]u8, &v0le)[0..4]), Murmur3_32.hashUint32(v0));
|
||||
try testing.expectEqual(Murmur3_32.hash(@ptrCast([*]u8, &v1le)[0..8]), Murmur3_32.hashUint64(v1));
|
||||
|
||||
+2
-2
@@ -479,7 +479,7 @@ const WasmPageAllocator = struct {
|
||||
@setCold(true);
|
||||
for (self.data) |segment, i| {
|
||||
const spills_into_next = @bitCast(i128, segment) < 0;
|
||||
const has_enough_bits = @popCount(u128, segment) >= num_pages;
|
||||
const has_enough_bits = @popCount(segment) >= num_pages;
|
||||
|
||||
if (!spills_into_next and !has_enough_bits) continue;
|
||||
|
||||
@@ -1185,7 +1185,7 @@ pub fn testAllocatorLargeAlignment(base_allocator: mem.Allocator) !void {
|
||||
const large_align = @as(u29, mem.page_size << 2);
|
||||
|
||||
var align_mask: usize = undefined;
|
||||
_ = @shlWithOverflow(usize, ~@as(usize, 0), @as(USizeShift, @ctz(u29, large_align)), &align_mask);
|
||||
_ = @shlWithOverflow(usize, ~@as(usize, 0), @as(USizeShift, @ctz(large_align)), &align_mask);
|
||||
|
||||
var slice = try allocator.alignedAlloc(u8, large_align, 500);
|
||||
try testing.expect(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
|
||||
|
||||
@@ -7,7 +7,7 @@ const meta = std.meta;
|
||||
const math = std.math;
|
||||
|
||||
/// Creates a stream which allows for reading bit fields from another stream
|
||||
pub fn BitReader(endian: std.builtin.Endian, comptime ReaderType: type) type {
|
||||
pub fn BitReader(comptime endian: std.builtin.Endian, comptime ReaderType: type) type {
|
||||
return struct {
|
||||
forward_reader: ReaderType,
|
||||
bit_buffer: u7,
|
||||
|
||||
@@ -7,7 +7,7 @@ const meta = std.meta;
|
||||
const math = std.math;
|
||||
|
||||
/// Creates a stream which allows for writing bit fields to another stream
|
||||
pub fn BitWriter(endian: std.builtin.Endian, comptime WriterType: type) type {
|
||||
pub fn BitWriter(comptime endian: std.builtin.Endian, comptime WriterType: type) type {
|
||||
return struct {
|
||||
forward_writer: WriterType,
|
||||
bit_buffer: u8,
|
||||
|
||||
@@ -247,6 +247,27 @@ pub fn Reader(
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/// Reads bytes into the bounded array, until
|
||||
/// the bounded array is full, or the stream ends.
|
||||
pub fn readIntoBoundedBytes(
|
||||
self: Self,
|
||||
comptime num_bytes: usize,
|
||||
bounded: *std.BoundedArray(u8, num_bytes),
|
||||
) !void {
|
||||
while (bounded.len < num_bytes) {
|
||||
const bytes_read = try self.read(bounded.unusedCapacitySlice());
|
||||
if (bytes_read == 0) return;
|
||||
bounded.len += bytes_read;
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads at most `num_bytes` and returns as a bounded array.
|
||||
pub fn readBoundedBytes(self: Self, comptime num_bytes: usize) !std.BoundedArray(u8, num_bytes) {
|
||||
var result = std.BoundedArray(u8, num_bytes){};
|
||||
try self.readIntoBoundedBytes(num_bytes, &result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/// Reads a native-endian integer
|
||||
pub fn readIntNative(self: Self, comptime T: type) !T {
|
||||
const bytes = try self.readBytesNoEof((@typeInfo(T).Int.bits + 7) / 8);
|
||||
|
||||
+1
-1
@@ -317,7 +317,7 @@ fn test_write_leb128(value: anytype) !void {
|
||||
const bytes_needed = bn: {
|
||||
if (@typeInfo(T).Int.bits <= 7) break :bn @as(u16, 1);
|
||||
|
||||
const unused_bits = if (value < 0) @clz(T, ~value) else @clz(T, value);
|
||||
const unused_bits = if (value < 0) @clz(~value) else @clz(value);
|
||||
const used_bits: u16 = (@typeInfo(T).Int.bits - unused_bits) + @boolToInt(t_signed);
|
||||
if (used_bits <= 7) break :bn @as(u16, 1);
|
||||
break :bn ((used_bits + 6) / 7);
|
||||
|
||||
+3
-3
@@ -1146,7 +1146,7 @@ pub fn ceilPowerOfTwoPromote(comptime T: type, value: T) std.meta.Int(@typeInfo(
|
||||
assert(value != 0);
|
||||
const PromotedType = std.meta.Int(@typeInfo(T).Int.signedness, @typeInfo(T).Int.bits + 1);
|
||||
const ShiftType = std.math.Log2Int(PromotedType);
|
||||
return @as(PromotedType, 1) << @intCast(ShiftType, @typeInfo(T).Int.bits - @clz(T, value - 1));
|
||||
return @as(PromotedType, 1) << @intCast(ShiftType, @typeInfo(T).Int.bits - @clz(value - 1));
|
||||
}
|
||||
|
||||
/// Returns the next power of two (if the value is not already a power of two).
|
||||
@@ -1212,7 +1212,7 @@ pub fn log2_int(comptime T: type, x: T) Log2Int(T) {
|
||||
if (@typeInfo(T) != .Int or @typeInfo(T).Int.signedness != .unsigned)
|
||||
@compileError("log2_int requires an unsigned integer, found " ++ @typeName(T));
|
||||
assert(x != 0);
|
||||
return @intCast(Log2Int(T), @typeInfo(T).Int.bits - 1 - @clz(T, x));
|
||||
return @intCast(Log2Int(T), @typeInfo(T).Int.bits - 1 - @clz(x));
|
||||
}
|
||||
|
||||
/// Return the log base 2 of integer value x, rounding up to the
|
||||
@@ -1548,7 +1548,7 @@ test "boolMask" {
|
||||
}
|
||||
|
||||
/// Return the mod of `num` with the smallest integer type
|
||||
pub fn comptimeMod(num: anytype, denom: comptime_int) IntFittingRange(0, denom - 1) {
|
||||
pub fn comptimeMod(num: anytype, comptime denom: comptime_int) IntFittingRange(0, denom - 1) {
|
||||
return @intCast(IntFittingRange(0, denom - 1), @mod(num, denom));
|
||||
}
|
||||
|
||||
|
||||
@@ -887,7 +887,7 @@ pub const Mutable = struct {
|
||||
|
||||
var sum: Limb = 0;
|
||||
for (r.limbs[0..r.len]) |limb| {
|
||||
sum += @popCount(Limb, limb);
|
||||
sum += @popCount(limb);
|
||||
}
|
||||
r.set(sum);
|
||||
}
|
||||
@@ -1520,7 +1520,7 @@ pub const Mutable = struct {
|
||||
) void {
|
||||
// 0.
|
||||
// Normalize so that y[t] > b/2
|
||||
const lz = @clz(Limb, y.limbs[y.len - 1]);
|
||||
const lz = @clz(y.limbs[y.len - 1]);
|
||||
const norm_shift = if (lz == 0 and y.toConst().isOdd())
|
||||
limb_bits // Force an extra limb so that y is even.
|
||||
else
|
||||
@@ -1917,7 +1917,7 @@ pub const Const = struct {
|
||||
|
||||
/// Returns the number of bits required to represent the absolute value of an integer.
|
||||
pub fn bitCountAbs(self: Const) usize {
|
||||
return (self.limbs.len - 1) * limb_bits + (limb_bits - @clz(Limb, self.limbs[self.limbs.len - 1]));
|
||||
return (self.limbs.len - 1) * limb_bits + (limb_bits - @clz(self.limbs[self.limbs.len - 1]));
|
||||
}
|
||||
|
||||
/// Returns the number of bits required to represent the integer in twos-complement form.
|
||||
@@ -1936,9 +1936,9 @@ pub const Const = struct {
|
||||
if (!self.positive) block: {
|
||||
bits += 1;
|
||||
|
||||
if (@popCount(Limb, self.limbs[self.limbs.len - 1]) == 1) {
|
||||
if (@popCount(self.limbs[self.limbs.len - 1]) == 1) {
|
||||
for (self.limbs[0 .. self.limbs.len - 1]) |limb| {
|
||||
if (@popCount(Limb, limb) != 0) {
|
||||
if (@popCount(limb) != 0) {
|
||||
break :block;
|
||||
}
|
||||
}
|
||||
@@ -3895,8 +3895,8 @@ fn llpow(r: []Limb, a: []const Limb, b: u32, tmp_limbs: []Limb) void {
|
||||
// The initial assignment makes the result end in `r` so an extra memory
|
||||
// copy is saved, each 1 flips the index twice so it's only the zeros that
|
||||
// matter.
|
||||
const b_leading_zeros = @clz(u32, b);
|
||||
const exp_zeros = @popCount(u32, ~b) - b_leading_zeros;
|
||||
const b_leading_zeros = @clz(b);
|
||||
const exp_zeros = @popCount(~b) - b_leading_zeros;
|
||||
if (exp_zeros & 1 != 0) {
|
||||
tmp1 = tmp_limbs;
|
||||
tmp2 = r;
|
||||
|
||||
@@ -8,7 +8,7 @@ inline fn mantissaOne(comptime T: type) comptime_int {
|
||||
}
|
||||
|
||||
/// Creates floating point type T from an unbiased exponent and raw mantissa.
|
||||
inline fn reconstructFloat(comptime T: type, exponent: comptime_int, mantissa: comptime_int) T {
|
||||
inline fn reconstructFloat(comptime T: type, comptime exponent: comptime_int, comptime mantissa: comptime_int) T {
|
||||
const TBits = @Type(.{ .Int = .{ .signedness = .unsigned, .bits = @bitSizeOf(T) } });
|
||||
const biased_exponent = @as(TBits, exponent + floatExponentMax(T));
|
||||
return @bitCast(T, (biased_exponent << floatMantissaBits(T)) | @as(TBits, mantissa));
|
||||
|
||||
+22
-19
@@ -267,7 +267,7 @@ pub fn zeroes(comptime T: type) T {
|
||||
return null;
|
||||
},
|
||||
.Struct => |struct_info| {
|
||||
if (@sizeOf(T) == 0) return T{};
|
||||
if (@sizeOf(T) == 0) return undefined;
|
||||
if (struct_info.layout == .Extern) {
|
||||
var item: T = undefined;
|
||||
set(u8, asBytes(&item), 0);
|
||||
@@ -424,6 +424,9 @@ test "zeroes" {
|
||||
|
||||
comptime var comptime_union = zeroes(C_union);
|
||||
try testing.expectEqual(@as(u8, 0), comptime_union.a);
|
||||
|
||||
// Ensure zero sized struct with fields is initialized correctly.
|
||||
_ = zeroes(struct { handle: void });
|
||||
}
|
||||
|
||||
/// Initializes all fields of the struct with their default value, or zero values if no default value is present.
|
||||
@@ -1316,7 +1319,7 @@ pub fn readIntNative(comptime T: type, bytes: *const [@divExact(@typeInfo(T).Int
|
||||
/// This function cannot fail and cannot cause undefined behavior.
|
||||
/// Assumes the endianness of memory is foreign, so it must byte-swap.
|
||||
pub fn readIntForeign(comptime T: type, bytes: *const [@divExact(@typeInfo(T).Int.bits, 8)]u8) T {
|
||||
return @byteSwap(T, readIntNative(T, bytes));
|
||||
return @byteSwap(readIntNative(T, bytes));
|
||||
}
|
||||
|
||||
pub const readIntLittle = switch (native_endian) {
|
||||
@@ -1345,7 +1348,7 @@ pub fn readIntSliceNative(comptime T: type, bytes: []const u8) T {
|
||||
/// The bit count of T must be evenly divisible by 8.
|
||||
/// Assumes the endianness of memory is foreign, so it must byte-swap.
|
||||
pub fn readIntSliceForeign(comptime T: type, bytes: []const u8) T {
|
||||
return @byteSwap(T, readIntSliceNative(T, bytes));
|
||||
return @byteSwap(readIntSliceNative(T, bytes));
|
||||
}
|
||||
|
||||
pub const readIntSliceLittle = switch (native_endian) {
|
||||
@@ -1427,7 +1430,7 @@ pub fn writeIntNative(comptime T: type, buf: *[(@typeInfo(T).Int.bits + 7) / 8]u
|
||||
/// the integer bit width must be divisible by 8.
|
||||
/// This function stores in foreign endian, which means it does a @byteSwap first.
|
||||
pub fn writeIntForeign(comptime T: type, buf: *[@divExact(@typeInfo(T).Int.bits, 8)]u8, value: T) void {
|
||||
writeIntNative(T, buf, @byteSwap(T, value));
|
||||
writeIntNative(T, buf, @byteSwap(value));
|
||||
}
|
||||
|
||||
pub const writeIntLittle = switch (native_endian) {
|
||||
@@ -1572,7 +1575,7 @@ pub const bswapAllFields = @compileError("bswapAllFields has been renamed to byt
|
||||
pub fn byteSwapAllFields(comptime S: type, ptr: *S) void {
|
||||
if (@typeInfo(S) != .Struct) @compileError("byteSwapAllFields expects a struct as the first argument");
|
||||
inline for (std.meta.fields(S)) |f| {
|
||||
@field(ptr, f.name) = @byteSwap(f.field_type, @field(ptr, f.name));
|
||||
@field(ptr, f.name) = @byteSwap(@field(ptr, f.name));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2749,14 +2752,14 @@ test "replaceOwned" {
|
||||
pub fn littleToNative(comptime T: type, x: T) T {
|
||||
return switch (native_endian) {
|
||||
.Little => x,
|
||||
.Big => @byteSwap(T, x),
|
||||
.Big => @byteSwap(x),
|
||||
};
|
||||
}
|
||||
|
||||
/// Converts a big-endian integer to host endianness.
|
||||
pub fn bigToNative(comptime T: type, x: T) T {
|
||||
return switch (native_endian) {
|
||||
.Little => @byteSwap(T, x),
|
||||
.Little => @byteSwap(x),
|
||||
.Big => x,
|
||||
};
|
||||
}
|
||||
@@ -2781,14 +2784,14 @@ pub fn nativeTo(comptime T: type, x: T, desired_endianness: Endian) T {
|
||||
pub fn nativeToLittle(comptime T: type, x: T) T {
|
||||
return switch (native_endian) {
|
||||
.Little => x,
|
||||
.Big => @byteSwap(T, x),
|
||||
.Big => @byteSwap(x),
|
||||
};
|
||||
}
|
||||
|
||||
/// Converts an integer which has host endianness to big endian.
|
||||
pub fn nativeToBig(comptime T: type, x: T) T {
|
||||
return switch (native_endian) {
|
||||
.Little => @byteSwap(T, x),
|
||||
.Little => @byteSwap(x),
|
||||
.Big => x,
|
||||
};
|
||||
}
|
||||
@@ -2800,7 +2803,7 @@ pub fn nativeToBig(comptime T: type, x: T) T {
|
||||
/// - The delta required to align the pointer is not a multiple of the pointee's
|
||||
/// type.
|
||||
pub fn alignPointerOffset(ptr: anytype, align_to: u29) ?usize {
|
||||
assert(align_to != 0 and @popCount(u29, align_to) == 1);
|
||||
assert(align_to != 0 and @popCount(align_to) == 1);
|
||||
|
||||
const T = @TypeOf(ptr);
|
||||
const info = @typeInfo(T);
|
||||
@@ -3249,13 +3252,13 @@ test "sliceAsBytes preserves pointer attributes" {
|
||||
try testing.expectEqual(in.alignment, out.alignment);
|
||||
}
|
||||
|
||||
/// Round an address up to the nearest aligned address
|
||||
/// Round an address up to the next (or current) aligned address.
|
||||
/// The alignment must be a power of 2 and greater than 0.
|
||||
pub fn alignForward(addr: usize, alignment: usize) usize {
|
||||
return alignForwardGeneric(usize, addr, alignment);
|
||||
}
|
||||
|
||||
/// Round an address up to the nearest aligned address
|
||||
/// Round an address up to the next (or current) aligned address.
|
||||
/// The alignment must be a power of 2 and greater than 0.
|
||||
pub fn alignForwardGeneric(comptime T: type, addr: T, alignment: T) T {
|
||||
return alignBackwardGeneric(T, addr + (alignment - 1), alignment);
|
||||
@@ -3287,25 +3290,25 @@ test "alignForward" {
|
||||
try testing.expect(alignForward(17, 8) == 24);
|
||||
}
|
||||
|
||||
/// Round an address up to the previous aligned address
|
||||
/// Round an address down to the previous (or current) aligned address.
|
||||
/// Unlike `alignBackward`, `alignment` can be any positive number, not just a power of 2.
|
||||
pub fn alignBackwardAnyAlign(i: usize, alignment: usize) usize {
|
||||
if (@popCount(usize, alignment) == 1)
|
||||
if (@popCount(alignment) == 1)
|
||||
return alignBackward(i, alignment);
|
||||
assert(alignment != 0);
|
||||
return i - @mod(i, alignment);
|
||||
}
|
||||
|
||||
/// Round an address up to the previous aligned address
|
||||
/// Round an address down to the previous (or current) aligned address.
|
||||
/// The alignment must be a power of 2 and greater than 0.
|
||||
pub fn alignBackward(addr: usize, alignment: usize) usize {
|
||||
return alignBackwardGeneric(usize, addr, alignment);
|
||||
}
|
||||
|
||||
/// Round an address up to the previous aligned address
|
||||
/// Round an address down to the previous (or current) aligned address.
|
||||
/// The alignment must be a power of 2 and greater than 0.
|
||||
pub fn alignBackwardGeneric(comptime T: type, addr: T, alignment: T) T {
|
||||
assert(@popCount(T, alignment) == 1);
|
||||
assert(@popCount(alignment) == 1);
|
||||
// 000010000 // example alignment
|
||||
// 000001111 // subtract 1
|
||||
// 111110000 // binary not
|
||||
@@ -3315,11 +3318,11 @@ pub fn alignBackwardGeneric(comptime T: type, addr: T, alignment: T) T {
|
||||
/// Returns whether `alignment` is a valid alignment, meaning it is
|
||||
/// a positive power of 2.
|
||||
pub fn isValidAlign(alignment: u29) bool {
|
||||
return @popCount(u29, alignment) == 1;
|
||||
return @popCount(alignment) == 1;
|
||||
}
|
||||
|
||||
pub fn isAlignedAnyAlign(i: usize, alignment: usize) bool {
|
||||
if (@popCount(usize, alignment) == 1)
|
||||
if (@popCount(alignment) == 1)
|
||||
return isAligned(i, alignment);
|
||||
assert(alignment != 0);
|
||||
return 0 == @mod(i, alignment);
|
||||
|
||||
+58
-19
@@ -764,7 +764,7 @@ const TagPayloadType = TagPayload;
|
||||
|
||||
///Given a tagged union type, and an enum, return the type of the union
|
||||
/// field corresponding to the enum tag.
|
||||
pub fn TagPayload(comptime U: type, tag: Tag(U)) type {
|
||||
pub fn TagPayload(comptime U: type, comptime tag: Tag(U)) type {
|
||||
comptime debug.assert(trait.is(.Union)(U));
|
||||
|
||||
const info = @typeInfo(U).Union;
|
||||
@@ -1024,28 +1024,13 @@ pub fn ArgsTuple(comptime Function: type) type {
|
||||
if (function_info.is_var_args)
|
||||
@compileError("Cannot create ArgsTuple for variadic function");
|
||||
|
||||
var argument_field_list: [function_info.args.len]std.builtin.Type.StructField = undefined;
|
||||
var argument_field_list: [function_info.args.len]type = undefined;
|
||||
inline for (function_info.args) |arg, i| {
|
||||
const T = arg.arg_type.?;
|
||||
@setEvalBranchQuota(10_000);
|
||||
var num_buf: [128]u8 = undefined;
|
||||
argument_field_list[i] = .{
|
||||
.name = std.fmt.bufPrint(&num_buf, "{d}", .{i}) catch unreachable,
|
||||
.field_type = T,
|
||||
.default_value = null,
|
||||
.is_comptime = false,
|
||||
.alignment = if (@sizeOf(T) > 0) @alignOf(T) else 0,
|
||||
};
|
||||
argument_field_list[i] = T;
|
||||
}
|
||||
|
||||
return @Type(.{
|
||||
.Struct = .{
|
||||
.is_tuple = true,
|
||||
.layout = .Auto,
|
||||
.decls = &.{},
|
||||
.fields = &argument_field_list,
|
||||
},
|
||||
});
|
||||
return CreateUniqueTuple(argument_field_list.len, argument_field_list);
|
||||
}
|
||||
|
||||
/// For a given anonymous list of types, returns a new tuple type
|
||||
@@ -1056,6 +1041,10 @@ pub fn ArgsTuple(comptime Function: type) type {
|
||||
/// - `Tuple(&[_]type {f32})` ⇒ `tuple { f32 }`
|
||||
/// - `Tuple(&[_]type {f32,u32})` ⇒ `tuple { f32, u32 }`
|
||||
pub fn Tuple(comptime types: []const type) type {
|
||||
return CreateUniqueTuple(types.len, types[0..types.len].*);
|
||||
}
|
||||
|
||||
fn CreateUniqueTuple(comptime N: comptime_int, comptime types: [N]type) type {
|
||||
var tuple_fields: [types.len]std.builtin.Type.StructField = undefined;
|
||||
inline for (types) |T, i| {
|
||||
@setEvalBranchQuota(10_000);
|
||||
@@ -1118,6 +1107,32 @@ test "Tuple" {
|
||||
TupleTester.assertTuple(.{ u32, f16, []const u8, void }, Tuple(&[_]type{ u32, f16, []const u8, void }));
|
||||
}
|
||||
|
||||
test "Tuple deduplication" {
|
||||
const T1 = std.meta.Tuple(&.{ u32, f32, i8 });
|
||||
const T2 = std.meta.Tuple(&.{ u32, f32, i8 });
|
||||
const T3 = std.meta.Tuple(&.{ u32, f32, i7 });
|
||||
|
||||
if (T1 != T2) {
|
||||
@compileError("std.meta.Tuple doesn't deduplicate tuple types.");
|
||||
}
|
||||
if (T1 == T3) {
|
||||
@compileError("std.meta.Tuple fails to generate different types.");
|
||||
}
|
||||
}
|
||||
|
||||
test "ArgsTuple forwarding" {
|
||||
const T1 = std.meta.Tuple(&.{ u32, f32, i8 });
|
||||
const T2 = std.meta.ArgsTuple(fn (u32, f32, i8) void);
|
||||
const T3 = std.meta.ArgsTuple(fn (u32, f32, i8) callconv(.C) noreturn);
|
||||
|
||||
if (T1 != T2) {
|
||||
@compileError("std.meta.ArgsTuple produces different types than std.meta.Tuple");
|
||||
}
|
||||
if (T1 != T3) {
|
||||
@compileError("std.meta.ArgsTuple produces different types for the same argument lists.");
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO: https://github.com/ziglang/zig/issues/425
|
||||
pub fn globalOption(comptime name: []const u8, comptime T: type) ?T {
|
||||
if (!@hasDecl(root, name))
|
||||
@@ -1134,3 +1149,27 @@ test "isError" {
|
||||
try std.testing.expect(isError(math.absInt(@as(i8, -128))));
|
||||
try std.testing.expect(!isError(math.absInt(@as(i8, -127))));
|
||||
}
|
||||
|
||||
/// This function returns a function pointer for a given function signature.
|
||||
/// It's a helper to make code compatible to both stage1 and stage2.
|
||||
///
|
||||
/// **WARNING:** This function is deprecated and will be removed together with stage1.
|
||||
pub fn FnPtr(comptime Fn: type) type {
|
||||
return if (@import("builtin").zig_backend != .stage1)
|
||||
*const Fn
|
||||
else
|
||||
Fn;
|
||||
}
|
||||
|
||||
test "FnPtr" {
|
||||
var func: FnPtr(fn () i64) = undefined;
|
||||
|
||||
// verify that we can perform runtime exchange
|
||||
// and not have a function body in stage2:
|
||||
|
||||
func = std.time.timestamp;
|
||||
_ = func();
|
||||
|
||||
func = std.time.milliTimestamp;
|
||||
_ = func();
|
||||
}
|
||||
|
||||
@@ -459,7 +459,7 @@ pub fn MultiArrayList(comptime S: type) type {
|
||||
return self.bytes[0..capacityInBytes(self.capacity)];
|
||||
}
|
||||
|
||||
fn FieldType(field: Field) type {
|
||||
fn FieldType(comptime field: Field) type {
|
||||
return meta.fieldInfo(S, field).field_type;
|
||||
}
|
||||
|
||||
|
||||
+11
-11
@@ -475,10 +475,9 @@ pub fn abort() noreturn {
|
||||
|
||||
// Install default handler so that the tkill below will terminate.
|
||||
const sigact = Sigaction{
|
||||
.handler = .{ .sigaction = SIG.DFL },
|
||||
.mask = undefined,
|
||||
.flags = undefined,
|
||||
.restorer = undefined,
|
||||
.handler = .{ .handler = SIG.DFL },
|
||||
.mask = empty_sigset,
|
||||
.flags = 0,
|
||||
};
|
||||
sigaction(SIG.ABRT, &sigact, null) catch |err| switch (err) {
|
||||
error.OperationNotSupported => unreachable,
|
||||
@@ -953,6 +952,10 @@ pub const WriteError = error{
|
||||
OperationAborted,
|
||||
NotOpenForWriting,
|
||||
|
||||
/// The process cannot access the file because another process has locked
|
||||
/// a portion of the file. Windows-only.
|
||||
LockViolation,
|
||||
|
||||
/// This error occurs when no global event loop is configured,
|
||||
/// and reading from the file descriptor would block.
|
||||
WouldBlock,
|
||||
@@ -2648,6 +2651,7 @@ pub fn renameatW(
|
||||
.creation = windows.FILE_OPEN,
|
||||
.io_mode = .blocking,
|
||||
.filter = .any, // This function is supposed to rename both files and directories.
|
||||
.follow_symlinks = false,
|
||||
}) catch |err| switch (err) {
|
||||
error.WouldBlock => unreachable, // Not possible without `.share_access_nonblocking = true`.
|
||||
else => |e| return e,
|
||||
@@ -5443,11 +5447,7 @@ pub fn toPosixPath(file_path: []const u8) ![MAX_PATH_BYTES - 1:0]u8 {
|
||||
/// if this happens the fix is to add the error code to the corresponding
|
||||
/// switch expression, possibly introduce a new error in the error set, and
|
||||
/// send a patch to Zig.
|
||||
/// The self-hosted compiler is not fully capable of handle the related code.
|
||||
/// Until then, unexpected error tracing is disabled for the self-hosted compiler.
|
||||
/// TODO remove this once self-hosted is capable enough to handle printing and
|
||||
/// stack trace dumping.
|
||||
pub const unexpected_error_tracing = builtin.zig_backend == .stage1 and builtin.mode == .Debug;
|
||||
pub const unexpected_error_tracing = (builtin.zig_backend == .stage1 or builtin.zig_backend == .stage2_llvm) and builtin.mode == .Debug;
|
||||
|
||||
pub const UnexpectedError = error{
|
||||
/// The Operating System returned an undocumented error code.
|
||||
@@ -6251,7 +6251,7 @@ pub const CopyFileRangeError = error{
|
||||
NoSpaceLeft,
|
||||
Unseekable,
|
||||
PermissionDenied,
|
||||
FileBusy,
|
||||
SwapFile,
|
||||
} || PReadError || PWriteError || UnexpectedError;
|
||||
|
||||
var has_copy_file_range_syscall = std.atomic.Atomic(bool).init(true);
|
||||
@@ -6305,7 +6305,7 @@ pub fn copy_file_range(fd_in: fd_t, off_in: u64, fd_out: fd_t, off_out: u64, len
|
||||
.NOSPC => return error.NoSpaceLeft,
|
||||
.OVERFLOW => return error.Unseekable,
|
||||
.PERM => return error.PermissionDenied,
|
||||
.TXTBSY => return error.FileBusy,
|
||||
.TXTBSY => return error.SwapFile,
|
||||
// these may not be regular files, try fallback
|
||||
.INVAL => {},
|
||||
// support for cross-filesystem copy added in Linux 5.3, use fallback
|
||||
|
||||
+10
-10
@@ -1945,9 +1945,9 @@ pub const SIG = if (is_mips) struct {
|
||||
pub const SYS = 31;
|
||||
pub const UNUSED = SIG.SYS;
|
||||
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
} else if (is_sparc) struct {
|
||||
pub const BLOCK = 1;
|
||||
pub const UNBLOCK = 2;
|
||||
@@ -1989,9 +1989,9 @@ pub const SIG = if (is_mips) struct {
|
||||
pub const PWR = LOST;
|
||||
pub const IO = SIG.POLL;
|
||||
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
} else struct {
|
||||
pub const BLOCK = 0;
|
||||
pub const UNBLOCK = 1;
|
||||
@@ -2032,9 +2032,9 @@ pub const SIG = if (is_mips) struct {
|
||||
pub const SYS = 31;
|
||||
pub const UNUSED = SIG.SYS;
|
||||
|
||||
pub const ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
pub const ERR = @intToPtr(?Sigaction.handler_fn, maxInt(usize));
|
||||
pub const DFL = @intToPtr(?Sigaction.handler_fn, 0);
|
||||
pub const IGN = @intToPtr(?Sigaction.handler_fn, 1);
|
||||
};
|
||||
|
||||
pub const kernel_rwf = u32;
|
||||
@@ -3377,7 +3377,7 @@ pub const cpu_count_t = std.meta.Int(.unsigned, std.math.log2(CPU_SETSIZE * 8));
|
||||
pub fn CPU_COUNT(set: cpu_set_t) cpu_count_t {
|
||||
var sum: cpu_count_t = 0;
|
||||
for (set) |x| {
|
||||
sum += @popCount(usize, x);
|
||||
sum += @popCount(x);
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
@@ -458,7 +458,7 @@ pub const Insn = packed struct {
|
||||
else
|
||||
ImmOrReg{ .imm = src };
|
||||
|
||||
const src_type = switch (imm_or_reg) {
|
||||
const src_type: u8 = switch (imm_or_reg) {
|
||||
.imm => K,
|
||||
.reg => X,
|
||||
};
|
||||
|
||||
@@ -3485,6 +3485,7 @@ pub const RiscV64 = enum(usize) {
|
||||
landlock_create_ruleset = 444,
|
||||
landlock_add_rule = 445,
|
||||
landlock_restrict_self = 446,
|
||||
memfd_secret = 447,
|
||||
process_mrelease = 448,
|
||||
futex_waitv = 449,
|
||||
set_mempolicy_home_node = 450,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const std = @import("../std.zig");
|
||||
const builtin = @import("builtin");
|
||||
|
||||
pub const syscall_bits = switch (builtin.stage2_arch) {
|
||||
pub const syscall_bits = switch (builtin.cpu.arch) {
|
||||
.x86_64 => @import("plan9/x86_64.zig"),
|
||||
else => @compileError("more plan9 syscall implementations (needs more inline asm in stage2"),
|
||||
};
|
||||
|
||||
+1
-1
@@ -785,7 +785,7 @@ test "sigaction" {
|
||||
try testing.expect(signal_test_failed == false);
|
||||
// Check if the handler has been correctly reset to SIG_DFL
|
||||
try os.sigaction(os.SIG.USR1, null, &old_sa);
|
||||
try testing.expectEqual(os.SIG.DFL, old_sa.handler.sigaction);
|
||||
try testing.expectEqual(os.SIG.DFL, old_sa.handler.handler);
|
||||
}
|
||||
|
||||
test "dup & dup2" {
|
||||
|
||||
+3
-3
@@ -55,9 +55,9 @@ pub const Guid = extern struct {
|
||||
if (f.len == 0) {
|
||||
const fmt = std.fmt.fmtSliceHexLower;
|
||||
|
||||
const time_low = @byteSwap(u32, self.time_low);
|
||||
const time_mid = @byteSwap(u16, self.time_mid);
|
||||
const time_high_and_version = @byteSwap(u16, self.time_high_and_version);
|
||||
const time_low = @byteSwap(self.time_low);
|
||||
const time_mid = @byteSwap(self.time_mid);
|
||||
const time_high_and_version = @byteSwap(self.time_high_and_version);
|
||||
|
||||
return std.fmt.format(writer, "{:0>8}-{:0>4}-{:0>4}-{:0>2}{:0>2}-{:0>12}", .{
|
||||
fmt(std.mem.asBytes(&time_low)),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user