flatten lib/std/special and improve "pkg inside another" logic

stage2: change logic for detecting whether the main package is inside
the std package. Previously it relied on realpath() which is not portable.
This uses resolve() which is how imports already work.

 * stage2: fix cleanup bug when creating Module
 * flatten lib/std/special/* to lib/*
   - this was motivated by making main_pkg_is_inside_std false for
     compiler_rt & friends.
 * rename "mini libc" to "universal libc"
This commit is contained in:
Andrew Kelley
2022-05-06 19:22:40 -07:00
parent 3b60ab4872
commit ec95e00e28
160 changed files with 115 additions and 126 deletions
+223
View File
@@ -0,0 +1,223 @@
const native_arch = @import("builtin").cpu.arch;
// Zig's own stack-probe routine (available only on x86 and x86_64)
pub fn zig_probe_stack() callconv(.Naked) void {
@setRuntimeSafety(false);
// Versions of the Linux kernel before 5.1 treat any access below SP as
// invalid so let's update it on the go, otherwise we'll get a segfault
// instead of triggering the stack growth.
switch (native_arch) {
.x86_64 => {
// %rax = probe length, %rsp = stack pointer
asm volatile (
\\ push %%rcx
\\ mov %%rax, %%rcx
\\ cmp $0x1000,%%rcx
\\ jb 2f
\\ 1:
\\ sub $0x1000,%%rsp
\\ orl $0,16(%%rsp)
\\ sub $0x1000,%%rcx
\\ cmp $0x1000,%%rcx
\\ ja 1b
\\ 2:
\\ sub %%rcx, %%rsp
\\ orl $0,16(%%rsp)
\\ add %%rax,%%rsp
\\ pop %%rcx
\\ ret
);
},
.i386 => {
// %eax = probe length, %esp = stack pointer
asm volatile (
\\ push %%ecx
\\ mov %%eax, %%ecx
\\ cmp $0x1000,%%ecx
\\ jb 2f
\\ 1:
\\ sub $0x1000,%%esp
\\ orl $0,8(%%esp)
\\ sub $0x1000,%%ecx
\\ cmp $0x1000,%%ecx
\\ ja 1b
\\ 2:
\\ sub %%ecx, %%esp
\\ orl $0,8(%%esp)
\\ add %%eax,%%esp
\\ pop %%ecx
\\ ret
);
},
else => {},
}
unreachable;
}
fn win_probe_stack_only() void {
@setRuntimeSafety(false);
switch (native_arch) {
.x86_64 => {
asm volatile (
\\ push %%rcx
\\ push %%rax
\\ cmp $0x1000,%%rax
\\ lea 24(%%rsp),%%rcx
\\ jb 1f
\\ 2:
\\ sub $0x1000,%%rcx
\\ test %%rcx,(%%rcx)
\\ sub $0x1000,%%rax
\\ cmp $0x1000,%%rax
\\ ja 2b
\\ 1:
\\ sub %%rax,%%rcx
\\ test %%rcx,(%%rcx)
\\ pop %%rax
\\ pop %%rcx
\\ ret
);
},
.i386 => {
asm volatile (
\\ push %%ecx
\\ push %%eax
\\ cmp $0x1000,%%eax
\\ lea 12(%%esp),%%ecx
\\ jb 1f
\\ 2:
\\ sub $0x1000,%%ecx
\\ test %%ecx,(%%ecx)
\\ sub $0x1000,%%eax
\\ cmp $0x1000,%%eax
\\ ja 2b
\\ 1:
\\ sub %%eax,%%ecx
\\ test %%ecx,(%%ecx)
\\ pop %%eax
\\ pop %%ecx
\\ ret
);
},
else => {},
}
if (comptime native_arch.isAARCH64()) {
// NOTE: page size hardcoded to 4096 for now
asm volatile (
\\ lsl x16, x15, #4
\\ mov x17, sp
\\1:
\\
\\ sub x17, x17, 4096
\\ subs x16, x16, 4096
\\ ldr xzr, [x17]
\\ b.gt 1b
\\
\\ ret
);
}
unreachable;
}
fn win_probe_stack_adjust_sp() void {
@setRuntimeSafety(false);
switch (native_arch) {
.x86_64 => {
asm volatile (
\\ push %%rcx
\\ cmp $0x1000,%%rax
\\ lea 16(%%rsp),%%rcx
\\ jb 1f
\\ 2:
\\ sub $0x1000,%%rcx
\\ test %%rcx,(%%rcx)
\\ sub $0x1000,%%rax
\\ cmp $0x1000,%%rax
\\ ja 2b
\\ 1:
\\ sub %%rax,%%rcx
\\ test %%rcx,(%%rcx)
\\
\\ lea 8(%%rsp),%%rax
\\ mov %%rcx,%%rsp
\\ mov -8(%%rax),%%rcx
\\ push (%%rax)
\\ sub %%rsp,%%rax
\\ ret
);
},
.i386 => {
asm volatile (
\\ push %%ecx
\\ cmp $0x1000,%%eax
\\ lea 8(%%esp),%%ecx
\\ jb 1f
\\ 2:
\\ sub $0x1000,%%ecx
\\ test %%ecx,(%%ecx)
\\ sub $0x1000,%%eax
\\ cmp $0x1000,%%eax
\\ ja 2b
\\ 1:
\\ sub %%eax,%%ecx
\\ test %%ecx,(%%ecx)
\\
\\ lea 4(%%esp),%%eax
\\ mov %%ecx,%%esp
\\ mov -4(%%eax),%%ecx
\\ push (%%eax)
\\ sub %%esp,%%eax
\\ ret
);
},
else => {},
}
unreachable;
}
// Windows has a multitude of stack-probing functions with similar names and
// slightly different behaviours: some behave as alloca() and update the stack
// pointer after probing the stack, other do not.
//
// Function name | Adjusts the SP? |
// | x86 | x86_64 |
// ----------------------------------------
// _chkstk (_alloca) | yes | yes |
// __chkstk | yes | no |
// __chkstk_ms | no | no |
// ___chkstk (__alloca) | yes | yes |
// ___chkstk_ms | no | no |
pub fn _chkstk() callconv(.Naked) void {
@setRuntimeSafety(false);
@call(.{ .modifier = .always_inline }, win_probe_stack_adjust_sp, .{});
}
pub fn __chkstk() callconv(.Naked) void {
@setRuntimeSafety(false);
if (comptime native_arch.isAARCH64()) {
@call(.{ .modifier = .always_inline }, win_probe_stack_only, .{});
} else switch (native_arch) {
.i386 => @call(.{ .modifier = .always_inline }, win_probe_stack_adjust_sp, .{}),
.x86_64 => @call(.{ .modifier = .always_inline }, win_probe_stack_only, .{}),
else => unreachable,
}
}
pub fn ___chkstk() callconv(.Naked) void {
@setRuntimeSafety(false);
@call(.{ .modifier = .always_inline }, win_probe_stack_adjust_sp, .{});
}
pub fn __chkstk_ms() callconv(.Naked) void {
@setRuntimeSafety(false);
@call(.{ .modifier = .always_inline }, win_probe_stack_only, .{});
}
pub fn ___chkstk_ms() callconv(.Naked) void {
@setRuntimeSafety(false);
@call(.{ .modifier = .always_inline }, win_probe_stack_only, .{});
}