Updates all uses of StackFallbackAllocator

This commit is contained in:
Mason Remaley
2026-04-13 02:59:29 -07:00
committed by Andrew Kelley
parent a4d8e9608e
commit 8c96487bb9
21 changed files with 161 additions and 141 deletions
+3 -2
View File
@@ -54,8 +54,9 @@ return_label: Ir.Ref = undefined,
compound_assign_dummy: ?Ir.Ref = null,
fn fail(c: *CodeGen, comptime fmt: []const u8, args: anytype) error{ FatalError, OutOfMemory } {
var sf = std.heap.stackFallback(1024, c.comp.gpa);
const allocator = sf.get();
var sf_buf: [u8]1024 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, c.comp.gpa);
const allocator = sf.allocator();
var buf: std.ArrayList(u8) = .empty;
defer buf.deinit(allocator);
+9 -6
View File
@@ -1761,8 +1761,9 @@ fn addToSearchPath(comp: *Compilation, include: Include, verbose: bool) !void {
try comp.search_path.append(comp.gpa, include);
}
fn removeDuplicateSearchPaths(comp: *Compilation, start: usize, verbose: bool) !void {
var sf = std.heap.stackFallback(1024, comp.gpa);
const allocator = sf.get();
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, comp.gpa);
const allocator = sf.allocator();
var seen_includes: std.StringHashMapUnmanaged(void) = .empty;
defer seen_includes.deinit(allocator);
var seen_frameworks: std.StringHashMapUnmanaged(void) = .empty;
@@ -1976,8 +1977,9 @@ const FindInclude = struct {
) Allocator.Error!?Result {
const comp = find.comp;
var stack_fallback = std.heap.stackFallback(path_buf_stack_limit, comp.gpa);
const sfa = stack_fallback.get();
var stack_fallback_buf: [path_buf_stack_limit]u8 = undefined;
var stack_fallback: std.heap.StackFallbackAllocator = .init(&stack_fallback_buf, comp.gpa);
const sfa = stack_fallback.allocator();
const header_path = try std.fmt.allocPrint(sfa, format, args);
defer sfa.free(header_path);
find.comp.normalizePath(header_path);
@@ -2068,8 +2070,9 @@ pub fn findEmbed(
}
}
var stack_fallback = std.heap.stackFallback(path_buf_stack_limit, comp.gpa);
const sf_allocator = stack_fallback.get();
var stack_fallback_buf: [path_buf_stack_limit]u8 = undefined;
var stack_fallback: std.heap.StackFallbackAllocator = .init(&stack_fallback_buf, comp.gpa);
const sf_allocator = stack_fallback.allocator();
switch (include_type) {
.quotes, .cli => {
+9 -6
View File
@@ -947,8 +947,9 @@ fn addImacros(d: *Driver, path: []const u8) !void {
}
pub fn err(d: *Driver, fmt: []const u8, args: anytype) Compilation.Error!void {
var sf = std.heap.stackFallback(1024, d.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, d.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, fmt, args) catch return error.OutOfMemory;
@@ -956,8 +957,9 @@ pub fn err(d: *Driver, fmt: []const u8, args: anytype) Compilation.Error!void {
}
pub fn warn(d: *Driver, fmt: []const u8, args: anytype) Compilation.Error!void {
var sf = std.heap.stackFallback(1024, d.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, d.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, fmt, args) catch return error.OutOfMemory;
@@ -1101,8 +1103,9 @@ fn parseTarget(d: *Driver, arch_os_abi: []const u8, opt_cpu_features: ?[]const u
}
pub fn fatal(d: *Driver, comptime fmt: []const u8, args: anytype) error{ FatalError, OutOfMemory } {
var sf = std.heap.stackFallback(1024, d.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, d.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, fmt, args) catch return error.OutOfMemory;
+18 -12
View File
@@ -215,8 +215,9 @@ fn checkIdentifierCodepointWarnings(p: *Parser, codepoint: u21, loc: Source.Loca
assert(codepoint >= 0x80);
const prev_total = p.diagnostics.total;
var sf = std.heap.stackFallback(1024, p.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, p.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
if (!char_info.isC99IdChar(codepoint)) {
@@ -429,8 +430,9 @@ pub fn err(p: *Parser, tok_i: TokenIndex, diagnostic: Diagnostic, args: anytype)
if (diagnostic.suppress_unless_version) |some| if (!p.comp.langopts.standard.atLeast(some)) return;
if (p.diagnostics.effectiveKind(diagnostic) == .off) return;
var sf = std.heap.stackFallback(1024, p.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, p.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
p.formatArgs(&allocating.writer, diagnostic.fmt, args) catch return error.OutOfMemory;
@@ -1537,8 +1539,9 @@ fn staticAssert(p: *Parser) Error!bool {
}
} else {
if (!res.val.toBool(p.comp)) {
var sf = std.heap.stackFallback(1024, gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
if (p.staticAssertMessage(res_node, str, &allocating) catch return error.OutOfMemory) |message| {
@@ -4837,8 +4840,9 @@ fn gnuAsmStmt(p: *Parser, quals: Tree.GNUAssemblyQualifiers, asm_tok: TokenIndex
const expected_items = 8; // arbitrarily chosen, most assembly will have fewer than 8 inputs/outputs/constraints/names
const bytes_needed = expected_items * @sizeOf(Tree.Node.AsmStmt.Operand) + expected_items * 2 * @sizeOf(Node.Index);
var stack_fallback = std.heap.stackFallback(bytes_needed, gpa);
const allocator = stack_fallback.get();
var stack_fallback_buf: [bytes_needed]u8 = undefined;
var stack_fallback: std.heap.StackFallbackAllocator = .init(&stack_fallback_buf, gpa);
const allocator = stack_fallback.allocator();
var operands: std.ArrayList(Tree.Node.AsmStmt.Operand) = .empty;
defer operands.deinit(allocator);
@@ -9922,8 +9926,9 @@ fn primaryExpr(p: *Parser) Error!?Result {
if (p.func.pretty_ident) |some| {
qt = some.qt;
} else if (p.func.qt) |func_qt| {
var sf = std.heap.stackFallback(1024, gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
func_qt.printNamed(p.tokSlice(p.func.name), p.comp, &allocating.writer) catch return error.OutOfMemory;
@@ -10212,8 +10217,9 @@ fn charLiteral(p: *Parser) Error!?Result {
};
const max_chars_expected = 4;
var sf = std.heap.stackFallback(max_chars_expected * @sizeOf(u32), gpa);
const allocator = sf.get();
var sf_buf: [max_chars_expected]u32 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(@ptrCast(&sf_buf), gpa);
const allocator = sf.allocator();
var chars: std.ArrayList(u32) = .empty;
defer chars.deinit(allocator);
+3 -2
View File
@@ -212,8 +212,9 @@ pub const Diagnostic = struct {
};
pub fn err(pp: *Preprocessor, tok_i: TokenIndex, diagnostic: Diagnostic, args: anytype) Compilation.Error!void {
var sf = std.heap.stackFallback(1024, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, diagnostic.fmt, args) catch return error.OutOfMemory;
+9 -6
View File
@@ -1023,8 +1023,9 @@ fn err(pp: *Preprocessor, loc: anytype, diagnostic: Diagnostic, args: anytype) C
defer pp.diagnostics.state.suppress_system_headers = old_suppress_system;
if (diagnostic.show_in_system_headers) pp.diagnostics.state.suppress_system_headers = false;
var sf = std.heap.stackFallback(1024, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, diagnostic.fmt, args) catch return error.OutOfMemory;
@@ -1052,8 +1053,9 @@ fn err(pp: *Preprocessor, loc: anytype, diagnostic: Diagnostic, args: anytype) C
}
fn fatal(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args: anytype) Compilation.Error {
var sf = std.heap.stackFallback(1024, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, fmt, args) catch return error.OutOfMemory;
@@ -1074,8 +1076,9 @@ fn fatalNotFound(pp: *Preprocessor, tok: TokenWithExpansionLocs, filename: []con
pp.diagnostics.state.fatal_errors = true;
defer pp.diagnostics.state.fatal_errors = old;
var sf = std.heap.stackFallback(1024, pp.comp.gpa);
const allocator = sf.get();
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, pp.comp.gpa);
const allocator = sf.allocator();
var buf: std.ArrayList(u8) = .empty;
defer buf.deinit(allocator);
+3 -2
View File
@@ -44,8 +44,9 @@ fn preprocessorHandler(_: *Pragma, pp: *Preprocessor, start_idx: TokenIndex) Pra
const diagnostic: Pragma.Diagnostic = .pragma_message;
var sf = std.heap.stackFallback(1024, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, pp.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
Diagnostics.formatArgs(&allocating.writer, diagnostic.fmt, .{str}) catch return error.OutOfMemory;
+3 -2
View File
@@ -315,8 +315,9 @@ pub const Parser = struct {
if (p.errored) return;
if (p.comp.diagnostics.effectiveKind(diagnostic) == .off) return;
var sf = std.heap.stackFallback(1024, p.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.get());
var sf_buf: [1024]u8 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, p.comp.gpa);
var allocating: std.Io.Writer.Allocating = .init(sf.allocator());
defer allocating.deinit();
formatArgs(&allocating.writer, diagnostic.fmt, args) catch return error.OutOfMemory;
+3 -2
View File
@@ -68,8 +68,9 @@ fn serializeFloat(comptime T: type, value: T, w: *std.Io.Writer) !void {
pub fn todo(c: *AsmCodeGen, msg: []const u8, tok: Tree.TokenIndex) Error {
const loc: Source.Location = c.tree.tokens.items(.loc)[tok];
var sf = std.heap.stackFallback(1024, c.comp.gpa);
const allocator = sf.get();
var sf_buf: [u8]1024 = undefined;
var sf: std.heap.StackFallbackAllocator = .init(&sf_buf, c.comp.gpa);
const allocator = sf.allocator();
var buf: std.ArrayList(u8) = .empty;
defer buf.deinit(allocator);
+6 -4
View File
@@ -894,8 +894,9 @@ pub fn resolve(allocator: Allocator, paths: []const []const u8) Allocator.Error!
pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) Allocator.Error![]u8 {
// Avoid heap allocation when paths.len is <= @bitSizeOf(usize) * 2
// (we use `* 3` because stackFallback uses 1 usize as a length)
var bit_set_allocator_state = std.heap.stackFallback(@sizeOf(usize) * 3, allocator);
const bit_set_allocator = bit_set_allocator_state.get();
var buf: [3]usize = undefined;
var bit_set_allocator_state: std.heap.StackFallbackAllocator = .init(@ptrCast(&buf), allocator);
const bit_set_allocator = bit_set_allocator_state.allocator();
var relevant_paths = try std.bit_set.DynamicBitSetUnmanaged.initEmpty(bit_set_allocator, paths.len);
defer relevant_paths.deinit(bit_set_allocator);
@@ -1642,7 +1643,8 @@ fn windowsResolveAgainstCwd(
parsed: WindowsPath2(u8),
) ![]u8 {
// Space for 256 WTF-16 code units; potentially 3 WTF-8 bytes per WTF-16 code unit
var temp_allocator_state = std.heap.stackFallback(256 * 3, gpa);
var buf: [256 * 3]u8 = undefined;
var temp_allocator_state: std.heap.StackFallbackAllocator = .init(&buf, gpa);
return switch (parsed.kind) {
.drive_absolute,
.unc_absolute,
@@ -1668,7 +1670,7 @@ fn windowsResolveAgainstCwd(
}
},
.drive_relative => blk: {
const temp_allocator = temp_allocator_state.get();
const temp_allocator = temp_allocator_state.allocator();
const drive_cwd = drive_cwd: {
const parsed_cwd = parsePathWindows(u8, cwd);
+9 -6
View File
@@ -1776,8 +1776,9 @@ fn structInitExpr(
}
{
var sfba = std.heap.stackFallback(256, astgen.arena);
const sfba_allocator = sfba.get();
var sfba_buf: [256]u8 = undefined;
var sfba: std.heap.StackFallbackAllocator = .init(&sfba_buf, astgen.arena);
const sfba_allocator = sfba.allocator();
var duplicate_names: std.array_hash_map.Auto(Zir.NullTerminatedString, ArrayList(Ast.TokenIndex)) = .empty;
try duplicate_names.ensureTotalCapacity(sfba_allocator, @intCast(struct_init.ast.fields.len));
@@ -8405,8 +8406,9 @@ fn tunnelThroughClosure(
// Otherwise we need a tunnel. First, figure out the path of namespaces we
// are tunneling through. This is usually only going to be one or two, so
// use an SFBA to optimize for the common case.
var sfba = std.heap.stackFallback(@sizeOf(usize) * 2, astgen.arena);
var intermediate_tunnels = try sfba.get().alloc(*Scope.Namespace, num_tunnels - 1);
var sfba_buf: [2]usize = undefined;
var sfba: std.heap.StackFallbackAllocator = .init(@ptrCast(&sfba_buf), astgen.arena);
var intermediate_tunnels = try sfba.allocator().alloc(*Scope.Namespace, num_tunnels - 1);
const root_ns = ns: {
var i: usize = num_tunnels - 1;
@@ -12927,8 +12929,9 @@ fn scanContainer(
};
// The maps below are allocated into this SFBA to avoid using the GPA for small namespaces.
var sfba_state = std.heap.stackFallback(512, astgen.gpa);
const sfba = sfba_state.get();
var sfba_buf: [512]u8 = undefined;
var sfba_state: std.heap.StackFallbackAllocator = .init(&sfba_buf, astgen.gpa);
const sfba = sfba_state.allocator();
var names: std.AutoArrayHashMapUnmanaged(Zir.NullTerminatedString, NameEntry) = .empty;
var test_names: std.AutoArrayHashMapUnmanaged(Zir.NullTerminatedString, NameEntry) = .empty;
+3 -2
View File
@@ -427,8 +427,9 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
});
// For short initializers, track the names on the stack rather than going through gpa.
var sfba_state = std.heap.stackFallback(256, gpa);
const sfba = sfba_state.get();
var sfba_buf: [256]u8 = undefined;
var sfba_state: std.heap.StackFallbackAllocator = .init(&sfba_buf, gpa);
const sfba = sfba_state.allocator();
var field_names: std.AutoHashMapUnmanaged(Zoir.NullTerminatedString, Ast.TokenIndex) = .empty;
defer field_names.deinit(sfba);
+12 -12
View File
@@ -7638,9 +7638,9 @@ pub const Constant = enum(u32) {
std.math.big.int.calcToStringLimbsBufferLen(expected_limbs, 10)
]std.math.big.Limb,
};
var stack align(@alignOf(ExpectedContents)) =
std.heap.stackFallback(@sizeOf(ExpectedContents), data.builder.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), data.builder.gpa);
const allocator = stack.allocator();
const str = bigint.toStringAlloc(allocator, 10, undefined) catch return error.WriteFailed;
defer allocator.free(str);
try w.writeAll(str);
@@ -9209,9 +9209,9 @@ pub fn getIntrinsic(
fields: [expected_fields_len]Type,
},
};
var stack align(@max(@alignOf(std.heap.StackFallbackAllocator(0)), @alignOf(ExpectedContents))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
const name = name: {
{
@@ -10607,9 +10607,9 @@ pub fn print(self: *Builder, w: *Writer) (Writer.Error || Allocator.Error)!void
std.math.big.int.calcToStringLimbsBufferLen(expected_limbs, 10)
]std.math.big.Limb,
};
var stack align(@alignOf(ExpectedContents)) =
std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
const limbs = self.metadata_limbs.items[extra.limbs_index..][0..extra.limbs_len];
const bigint: std.math.big.int.Const = .{
@@ -11129,9 +11129,9 @@ fn bigIntConstAssumeCapacity(
const bits = type_item.data;
const ExpectedContents = [64 / @sizeOf(std.math.big.Limb)]std.math.big.Limb;
var stack align(@alignOf(ExpectedContents)) =
std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
var limbs: []std.math.big.Limb = &.{};
defer allocator.free(limbs);
+12 -9
View File
@@ -1122,8 +1122,9 @@ fn scalarizeShuffleOneBlockPayload(l: *Legalize, orig_inst: Air.Inst.Index) Erro
//
// So we must first compute `out_idxs` and `in_idxs`.
var sfba_state = std.heap.stackFallback(512, gpa);
const sfba = sfba_state.get();
var sfba_buf: [512]u8 = undefined;
var sfba_state: std.heap.StackFallbackAllocator = .init(&sfba_buf, gpa);
const sfba = sfba_state.allocator();
const out_idxs_buf = try sfba.alloc(InternPool.Index, shuffle.mask.len);
defer sfba.free(out_idxs_buf);
@@ -1212,8 +1213,9 @@ fn scalarizeShuffleTwoBlockPayload(l: *Legalize, orig_inst: Air.Inst.Index) Erro
// %8 = br(%1, %7)
// })
var sfba_state = std.heap.stackFallback(512, gpa);
const sfba = sfba_state.get();
var sfba_buf: [512]u8 = undefined;
var sfba_state: std.heap.StackFallbackAllocator = .init(&sfba_buf, gpa);
const sfba = sfba_state.allocator();
const out_idxs_buf = try sfba.alloc(InternPool.Index, shuffle.mask.len);
defer sfba.free(out_idxs_buf);
@@ -2394,9 +2396,9 @@ fn packedStoreBlockPayload(l: *Legalize, orig_inst: Air.Inst.Index) Error!Air.In
}).toRef(),
.rhs = Air.internedToRef((keep_mask: {
const ExpectedContents = [std.math.big.int.calcTwosCompLimbCount(256)]std.math.big.Limb;
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), zcu.gpa);
const gpa = stack.get();
var buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&buf), zcu.gpa);
const gpa = stack.allocator();
var mask_big_int: std.math.big.int.Mutable = .{
.limbs = try gpa.alloc(
@@ -2489,8 +2491,9 @@ fn packedAggregateInitBlockPayload(l: *Legalize, orig_inst: Air.Inst.Index) Erro
const agg_ty = orig_ty_pl.ty.toType();
const agg_field_count = agg_ty.structFieldCount(zcu);
var sfba_state = std.heap.stackFallback(@sizeOf([4 * 32 + 2]Air.Inst.Index), gpa);
const sfba = sfba_state.get();
var sfba_buf: [4 * 32 + 2]Air.Inst.Index = undefined;
var sfba_state: std.heap.StackFallbackAllocator = .init(@ptrCast(&sfba_buf), gpa);
const sfba = sfba_state.allocator();
const inst_buf = try sfba.alloc(Air.Inst.Index, 4 * agg_field_count + 2);
defer sfba.free(inst_buf);
+3 -2
View File
@@ -882,8 +882,9 @@ pub fn fieldValue(val: Value, pt: Zcu.PerThread, index: usize) !Value {
else => unreachable,
};
// Avoid hitting gpa for accesses to small packed structs
var sfba_state = std.heap.stackFallback(128, zcu.comp.gpa);
const sfba = sfba_state.get();
var sfba_buf: [128]u8 = undefined;
var sfba_state: std.heap.StackFallbackAllocator = .init(&sfba_buf, zcu.comp.gpa);
const sfba = sfba_state.allocator();
const buf = try sfba.alloc(u8, @intCast((ty.bitSize(zcu) + 7) / 8));
defer sfba.free(buf);
int_val.writeToPackedMemory(zcu, buf, 0) catch |err| switch (err) {
+3 -2
View File
@@ -4841,8 +4841,9 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
{
const asm_source = unwrapped_asm.source;
var stack = std.heap.stackFallback(256, f.dg.gpa);
const allocator = stack.get();
var stack_buf: [256]u8 = undefined;
var stack: std.heap.StackFallbackAllocator = .init(&stack_buf, f.dg.gpa);
const allocator = stack.allocator();
const fixed_asm_source = try allocator.alloc(u8, asm_source.len);
defer allocator.free(fixed_asm_source);
+15 -25
View File
@@ -3605,11 +3605,9 @@ pub const Object = struct {
vals: [Builder.expected_fields_len]Builder.Constant,
fields: [Builder.expected_fields_len]Builder.Type,
};
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), o.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), o.gpa);
const allocator = stack.allocator();
const vals = try allocator.alloc(Builder.Constant, elems.len);
defer allocator.free(vals);
const fields = try allocator.alloc(Builder.Type, elems.len);
@@ -3636,11 +3634,9 @@ pub const Object = struct {
vals: [Builder.expected_fields_len]Builder.Constant,
fields: [Builder.expected_fields_len]Builder.Type,
};
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), o.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), o.gpa);
const allocator = stack.allocator();
const vals = try allocator.alloc(Builder.Constant, len_including_sentinel);
defer allocator.free(vals);
const fields = try allocator.alloc(Builder.Type, len_including_sentinel);
@@ -3668,11 +3664,9 @@ pub const Object = struct {
switch (aggregate.storage) {
.bytes, .elems => {
const ExpectedContents = [Builder.expected_fields_len]Builder.Constant;
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), o.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), o.gpa);
const allocator = stack.allocator();
const vals = try allocator.alloc(Builder.Constant, vector_type.len);
defer allocator.free(vals);
@@ -3701,11 +3695,9 @@ pub const Object = struct {
vals: [Builder.expected_fields_len]Builder.Constant,
fields: [Builder.expected_fields_len]Builder.Type,
};
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), o.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), o.gpa);
const allocator = stack.allocator();
const vals = try allocator.alloc(Builder.Constant, llvm_len);
defer allocator.free(vals);
const fields = try allocator.alloc(Builder.Type, llvm_len);
@@ -3779,11 +3771,9 @@ pub const Object = struct {
vals: [Builder.expected_fields_len]Builder.Constant,
fields: [Builder.expected_fields_len]Builder.Type,
};
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), o.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), o.gpa);
const allocator = stack.allocator();
const vals = try allocator.alloc(Builder.Constant, llvm_len);
defer allocator.free(vals);
const fields = try allocator.alloc(Builder.Type, llvm_len);
+6 -10
View File
@@ -3530,11 +3530,9 @@ fn airDivFloor(self: *FuncGen, inst: Air.Inst.Index, fast: Builder.FastMathKind)
const inst_llvm_ty = try o.lowerType(inst_ty);
const ExpectedContents = [std.math.big.int.calcTwosCompLimbCount(256)]std.math.big.Limb;
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
const scalar_bits = scalar_ty.intInfo(zcu).bits;
var smin_big_int: std.math.big.int.Mutable = .{
@@ -3616,11 +3614,9 @@ fn airMod(self: *FuncGen, inst: Air.Inst.Index, fast: Builder.FastMathKind) Allo
}
if (scalar_ty.isSignedInt(zcu)) {
const ExpectedContents = [std.math.big.int.calcTwosCompLimbCount(256)]std.math.big.Limb;
var stack align(@max(
@alignOf(std.heap.StackFallbackAllocator(0)),
@alignOf(ExpectedContents),
)) = std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
const scalar_bits = scalar_ty.intInfo(zcu).bits;
var smin_big_int: std.math.big.int.Mutable = .{
+7 -6
View File
@@ -671,11 +671,12 @@ fn restoreState(func: *Func, state: State, deaths: []const Air.Inst.Index, compt
for (deaths) |death| try func.processDeath(death);
const ExpectedContents = [@typeInfo(RegisterManager.TrackedRegisters).array.len]RegisterLock;
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
if (opts.update_tracking) {} else std.heap.stackFallback(@sizeOf(ExpectedContents), func.gpa);
const stack_buf_len = if (opts.update_tracking) 0 else 1;
var stack_buf: [stack_buf_len]ExpectedContents = undefined;
var stack = if (opts.update_tracking) {} else std.heap.StackFallbackAllocator.init(@ptrCast(&stack_buf), func.gpa);
var reg_locks = if (opts.update_tracking) {} else try std.array_list.Managed(RegisterLock).initCapacity(
stack.get(),
stack.allocator(),
@typeInfo(ExpectedContents).array.len,
);
defer if (!opts.update_tracking) {
@@ -4807,9 +4808,9 @@ fn airCall(func: *Func, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const ExpectedContents = extern struct {
vals: [expected_num_args][@sizeOf(MCValue)]u8 align(@alignOf(MCValue)),
};
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), func.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), func.gpa);
const allocator = stack.allocator();
const arg_tys = try allocator.alloc(Type, arg_refs.len);
defer allocator.free(arg_tys);
+22 -21
View File
@@ -173820,9 +173820,9 @@ fn genLazy(cg: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void {
var err_temp = try cg.tempInit(err_ty, err_mcv);
const ExpectedContents = [32]Mir.Inst.Index;
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), cg.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), cg.gpa);
const allocator = stack.allocator();
const relocs = try allocator.alloc(Mir.Inst.Index, error_set_type.names.len);
defer allocator.free(relocs);
@@ -174220,11 +174220,12 @@ fn restoreState(self: *CodeGen, state: State, deaths: []const Air.Inst.Index, co
for (deaths) |death| try self.processDeath(death, .{ .emit_instructions = opts.emit_instructions });
const ExpectedContents = [@typeInfo(RegisterManager.TrackedRegisters).array.len]RegisterLock;
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
if (opts.update_tracking) {} else std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const stack_buf_len = if (opts.update_tracking) 0 else 1;
var stack_buf: [stack_buf_len]ExpectedContents = undefined;
var stack = if (opts.update_tracking) {} else std.heap.StackFallbackAllocator.init(@ptrCast(&stack_buf), self.gpa);
var reg_locks = if (opts.update_tracking) {} else try std.array_list.Managed(RegisterLock).initCapacity(
stack.get(),
stack.allocator(),
@typeInfo(ExpectedContents).array.len,
);
defer if (!opts.update_tracking) {
@@ -175929,9 +175930,9 @@ fn airCall(self: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
tys: [32][@sizeOf(Type)]u8 align(@alignOf(Type)),
vals: [32][@sizeOf(MCValue)]u8 align(@alignOf(MCValue)),
};
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: [1]ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
const arg_tys = try allocator.alloc(Type, arg_refs.len);
defer allocator.free(arg_tys);
@@ -175985,9 +175986,9 @@ fn genCall(self: *CodeGen, info: union(enum) {
frame_indices: [32]FrameIndex,
reg_locks: [32][@sizeOf(?RegisterLock)]u8 align(@alignOf(?RegisterLock)),
};
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), self.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), self.gpa);
const allocator = stack.allocator();
const var_args = try allocator.alloc(Type, args.len - fn_info.param_types.len);
defer allocator.free(var_args);
@@ -176588,9 +176589,9 @@ fn lowerSwitchBr(
bigint_limbs: [std.math.big.int.calcTwosCompLimbCount(1 << 10)]std.math.big.Limb,
relocs: [1 << 6]Mir.Inst.Index,
};
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), cg.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), cg.gpa);
const allocator = stack.allocator();
const state = try cg.saveState();
@@ -181154,9 +181155,9 @@ fn resolveCallingConventionValues(
const ExpectedContents = extern struct {
param_types: [32][@sizeOf(Type)]u8 align(@alignOf(Type)),
};
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), cg.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), cg.gpa);
const allocator = stack.allocator();
const param_types = try allocator.alloc(Type, fn_info.param_types.len + var_args.len);
defer allocator.free(param_types);
@@ -188706,9 +188707,9 @@ const Select = struct {
}
const ExpectedContents = [std.math.big.int.calcTwosCompLimbCount(1 << 10)]std.math.big.Limb;
var stack align(@max(@alignOf(ExpectedContents), @alignOf(std.heap.StackFallbackAllocator(0)))) =
std.heap.stackFallback(@sizeOf(ExpectedContents), cg.gpa);
const allocator = stack.get();
var stack_buf: ExpectedContents = undefined;
var stack: std.heap.StackFallbackAllocator = .init(@ptrCast(&stack_buf), cg.gpa);
const allocator = stack.allocator();
var res_big_int: std.math.big.int.Mutable = .{
.limbs = try allocator.alloc(
std.math.big.Limb,
+3 -2
View File
@@ -2690,8 +2690,9 @@ pub fn ensureUnusedRelocCapacity(elf: *Elf, loc_si: Symbol.Index, len: usize) !v
const shndx = loc_si.shndx(elf);
const sh = shndx.get(elf);
if (sh.rela_si == .null) {
var stack = std.heap.stackFallback(32, gpa);
const allocator = stack.get();
var stack_buf: [32]u8 = undefined;
var stack: std.heap.StackFallbackAllocator = .init(&stack_buf, gpa);
const allocator = stack.allocator();
const rela_name =
try std.fmt.allocPrint(allocator, ".rela{s}", .{elf.sectionName(sh.si)});