Audit usages of toOwnedSlice (#32001)

Followup to #30769

I grepped for `try .*toOwnedSlice` and checked all of them by hand.

Fixes a bunch of memory leaks removes usages or `errdefer` and `vars` in some places. I also switched array_list.Managed to ArrayList where it was convenient.

Reviewed-on: https://codeberg.org/ziglang/zig/pulls/32001
Reviewed-by: Andrew Kelley <andrew@ziglang.org>
This commit is contained in:
andrew.kraevskii
2026-04-22 19:35:46 +02:00
committed by Andrew Kelley
parent 03955476ad
commit bbab366b78
20 changed files with 198 additions and 182 deletions
+5 -2
View File
@@ -863,11 +863,14 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
.start = @as(u32, @intCast(ctx.buf.items.len)),
});
try ctx.buf.shrinkToLenSentinel(gpa);
try ctx.extra_data.shrinkToLen(gpa);
return .{
.source = try ctx.buf.toOwnedSliceSentinel(gpa, 0),
.source = ctx.buf.toOwnedSliceSentinelAssert(0),
.tokens = ctx.tokens.toOwnedSlice(),
.nodes = ctx.nodes.toOwnedSlice(),
.extra_data = try ctx.extra_data.toOwnedSlice(gpa),
.extra_data = ctx.extra_data.toOwnedSliceAssert(),
.errors = &.{},
.mode = .zig,
};
+5 -9
View File
@@ -311,17 +311,13 @@ pub fn endInput(p: *Parser) Allocator.Error!Document {
p.scratch_string.items.len = 0;
p.scratch_extra.items.len = 0;
var nodes = p.nodes.toOwnedSlice();
errdefer nodes.deinit(p.allocator);
const extra = try p.extra.toOwnedSlice(p.allocator);
errdefer p.allocator.free(extra);
const string_bytes = try p.string_bytes.toOwnedSlice(p.allocator);
errdefer p.allocator.free(string_bytes);
try p.extra.shrinkToLen(p.allocator);
try p.string_bytes.shrinkToLen(p.allocator);
return .{
.nodes = nodes,
.extra = extra,
.string_bytes = string_bytes,
.nodes = p.nodes.toOwnedSlice(),
.extra = p.extra.toOwnedSliceAssert(),
.string_bytes = p.string_bytes.toOwnedSliceAssert(),
};
}
+18 -15
View File
@@ -778,43 +778,46 @@ pub fn runPkgConfig(step: *Step, lib_name: []const u8) !PkgConfigResult {
else => return err,
};
var zig_cflags = std.array_list.Managed([]const u8).init(b.allocator);
defer zig_cflags.deinit();
var zig_libs = std.array_list.Managed([]const u8).init(b.allocator);
defer zig_libs.deinit();
var zig_cflags: std.ArrayList([]const u8) = .empty;
defer zig_cflags.deinit(b.allocator);
var zig_libs: std.ArrayList([]const u8) = .empty;
defer zig_libs.deinit(b.allocator);
var arg_it = mem.tokenizeAny(u8, stdout, " \r\n\t");
while (arg_it.next()) |arg| {
if (mem.eql(u8, arg, "-I")) {
const dir = arg_it.next() orelse return error.PkgConfigInvalidOutput;
try zig_cflags.appendSlice(&[_][]const u8{ "-I", dir });
try zig_cflags.appendSlice(b.allocator, &.{ "-I", dir });
} else if (mem.startsWith(u8, arg, "-I")) {
try zig_cflags.append(arg);
try zig_cflags.append(b.allocator, arg);
} else if (mem.eql(u8, arg, "-L")) {
const dir = arg_it.next() orelse return error.PkgConfigInvalidOutput;
try zig_libs.appendSlice(&[_][]const u8{ "-L", dir });
try zig_libs.appendSlice(b.allocator, &.{ "-L", dir });
} else if (mem.startsWith(u8, arg, "-L")) {
try zig_libs.append(arg);
try zig_libs.append(b.allocator, arg);
} else if (mem.eql(u8, arg, "-l")) {
const lib = arg_it.next() orelse return error.PkgConfigInvalidOutput;
try zig_libs.appendSlice(&[_][]const u8{ "-l", lib });
try zig_libs.appendSlice(b.allocator, &.{ "-l", lib });
} else if (mem.startsWith(u8, arg, "-l")) {
try zig_libs.append(arg);
try zig_libs.append(b.allocator, arg);
} else if (mem.eql(u8, arg, "-D")) {
const macro = arg_it.next() orelse return error.PkgConfigInvalidOutput;
try zig_cflags.appendSlice(&[_][]const u8{ "-D", macro });
try zig_cflags.appendSlice(b.allocator, &.{ "-D", macro });
} else if (mem.startsWith(u8, arg, "-D")) {
try zig_cflags.append(arg);
try zig_cflags.append(b.allocator, arg);
} else if (mem.startsWith(u8, arg, wl_rpath_prefix)) {
try zig_cflags.appendSlice(&[_][]const u8{ "-rpath", arg[wl_rpath_prefix.len..] });
try zig_cflags.appendSlice(b.allocator, &.{ "-rpath", arg[wl_rpath_prefix.len..] });
} else if (b.debug_pkg_config) {
return step.fail("unknown pkg-config flag '{s}'", .{arg});
}
}
try zig_cflags.shrinkToLen(b.allocator);
try zig_libs.shrinkToLen(b.allocator);
return .{
.cflags = try zig_cflags.toOwnedSlice(),
.libs = try zig_libs.toOwnedSlice(),
.cflags = zig_cflags.toOwnedSliceAssert(),
.libs = zig_libs.toOwnedSliceAssert(),
};
}
+2
View File
@@ -2699,7 +2699,9 @@ fn evalGeneric(run: *Run, spawn_options: process.SpawnOptions) !EvalGenericResul
try multi_reader.checkAnyError();
// TODO: this string can leak since alloc below can return error.
stdout_bytes = try multi_reader.toOwnedSlice(0);
// TODO: this string can leak since its allocated using gpa and `try child.wait(io)` below can fail.
stderr_bytes = try multi_reader.toOwnedSlice(1);
} else {
var stdout_reader = stdout.readerStreaming(io, &.{});
+14 -11
View File
@@ -879,16 +879,16 @@ fn parseAbbrevTable(di: *Dwarf, gpa: Allocator, offset: u64) !Abbrev.Table {
var fr: Reader = .fixed(di.section(.debug_abbrev).?);
fr.seek = cast(usize, offset) orelse return bad();
var abbrevs = std.array_list.Managed(Abbrev).init(gpa);
var abbrevs: std.ArrayList(Abbrev) = .empty;
defer {
for (abbrevs.items) |*abbrev| {
abbrev.deinit(gpa);
}
abbrevs.deinit();
abbrevs.deinit(gpa);
}
var attrs = std.array_list.Managed(Abbrev.Attr).init(gpa);
defer attrs.deinit();
var attrs: std.ArrayList(Abbrev.Attr) = .empty;
defer attrs.deinit(gpa);
while (true) {
const code = try fr.takeLeb128(u64);
@@ -900,7 +900,7 @@ fn parseAbbrevTable(di: *Dwarf, gpa: Allocator, offset: u64) !Abbrev.Table {
const attr_id = try fr.takeLeb128(u64);
const form_id = try fr.takeLeb128(u64);
if (attr_id == 0 and form_id == 0) break;
try attrs.append(.{
try attrs.append(gpa, .{
.id = attr_id,
.form_id = form_id,
.payload = switch (form_id) {
@@ -909,18 +909,18 @@ fn parseAbbrevTable(di: *Dwarf, gpa: Allocator, offset: u64) !Abbrev.Table {
},
});
}
try abbrevs.append(.{
try abbrevs.ensureUnusedCapacity(gpa, 1);
abbrevs.appendAssumeCapacity(.{
.code = code,
.tag_id = tag_id,
.has_children = has_children,
.attrs = try attrs.toOwnedSlice(),
.attrs = try attrs.toOwnedSlice(gpa),
});
}
return .{
.offset = offset,
.abbrevs = try abbrevs.toOwnedSlice(),
.abbrevs = try abbrevs.toOwnedSlice(gpa),
};
}
@@ -1204,10 +1204,13 @@ fn runLineNumberProgram(d: *Dwarf, gpa: Allocator, endian: Endian, compile_unit:
}
}{ .keys = line_table.keys() });
try directories.shrinkToLen(gpa);
try file_entries.shrinkToLen(gpa);
return .{
.line_table = line_table,
.directories = try directories.toOwnedSlice(gpa),
.files = try file_entries.toOwnedSlice(gpa),
.directories = directories.toOwnedSliceAssert(),
.files = file_entries.toOwnedSliceAssert(),
.version = version,
};
}
+24 -16
View File
@@ -83,8 +83,8 @@ pub fn parseDbiStream(self: *Pdb) !void {
const mod_info_size = header.mod_info_size;
const section_contrib_size = header.section_contribution_size;
var modules = std.array_list.Managed(Module).init(gpa);
errdefer modules.deinit();
var modules: std.ArrayList(Module) = .empty;
defer modules.deinit(gpa);
// Module Info Substream
var mod_info_offset: usize = 0;
@@ -113,11 +113,16 @@ pub fn parseDbiStream(self: *Pdb) !void {
this_record_len += march_forward_bytes;
}
try modules.append(.{
.mod_info = mod_info,
.module_name = try module_name.toOwnedSlice(),
.obj_file_name = try obj_file_name.toOwnedSlice(),
try modules.ensureUnusedCapacity(gpa, 1);
const module_name_slice = try module_name.toOwnedSlice();
errdefer gpa.free(module_name_slice);
const obj_file_name_slice = try obj_file_name.toOwnedSlice();
errdefer gpa.free(obj_file_name_slice);
modules.appendAssumeCapacity(.{
.mod_info = mod_info,
.module_name = module_name_slice,
.obj_file_name = obj_file_name_slice,
.populated = false,
.symbols = undefined,
.subsect_info = undefined,
@@ -131,8 +136,8 @@ pub fn parseDbiStream(self: *Pdb) !void {
}
// Section Contribution Substream
var sect_contribs = std.array_list.Managed(pdb.SectionContribEntry).init(gpa);
errdefer sect_contribs.deinit();
var sect_contribs: std.ArrayList(pdb.SectionContribEntry) = .empty;
defer sect_contribs.deinit(gpa);
var sect_cont_offset: usize = 0;
if (section_contrib_size != 0) {
@@ -144,7 +149,7 @@ pub fn parseDbiStream(self: *Pdb) !void {
sect_cont_offset += @sizeOf(u32);
}
while (sect_cont_offset != section_contrib_size) {
const entry = try sect_contribs.addOne();
const entry = try sect_contribs.addOne(gpa);
entry.* = try reader.takeStruct(pdb.SectionContribEntry, .little);
sect_cont_offset += @sizeOf(pdb.SectionContribEntry);
@@ -152,8 +157,11 @@ pub fn parseDbiStream(self: *Pdb) !void {
return error.InvalidDebugInfo;
}
self.modules = try modules.toOwnedSlice();
self.sect_contribs = try sect_contribs.toOwnedSlice();
try sect_contribs.shrinkToLen(gpa);
try modules.shrinkToLen(gpa);
self.sect_contribs = sect_contribs.toOwnedSliceAssert();
self.modules = modules.toOwnedSliceAssert();
}
pub fn parseIpiStream(self: *Pdb) !void {
@@ -1098,22 +1106,22 @@ const MsfStream = struct {
}
};
fn readSparseBitVector(reader: *Io.Reader, allocator: Allocator) ![]u32 {
fn readSparseBitVector(reader: *Io.Reader, gpa: Allocator) ![]u32 {
const num_words = try reader.takeInt(u32, .little);
var list = std.array_list.Managed(u32).init(allocator);
errdefer list.deinit();
var list: std.ArrayList(u32) = .empty;
defer list.deinit(gpa);
var word_i: u32 = 0;
while (word_i != num_words) : (word_i += 1) {
const word = try reader.takeInt(u32, .little);
var bit_i: u5 = 0;
while (true) : (bit_i += 1) {
if (word & (@as(u32, 1) << bit_i) != 0) {
try list.append(word_i * 32 + bit_i);
try list.append(gpa, word_i * 32 + bit_i);
}
if (bit_i == std.math.maxInt(u5)) break;
}
}
return try list.toOwnedSlice();
return try list.toOwnedSlice(gpa);
}
fn blockCountFromSize(size: u32, block_size: u32) u32 {
+5 -7
View File
@@ -196,19 +196,17 @@ pub fn parseTokens(
.zon => try parser.parseZon(),
}
const extra_data = try parser.extra_data.toOwnedSlice(gpa);
errdefer gpa.free(extra_data);
const errors = try parser.errors.toOwnedSlice(gpa);
errdefer gpa.free(errors);
try parser.extra_data.shrinkToLen(gpa);
try parser.errors.shrinkToLen(gpa);
// TODO experiment with compacting the MultiArrayList slices here
return Ast{
return .{
.source = source,
.mode = mode,
.tokens = tokens,
.nodes = parser.nodes.toOwnedSlice(),
.extra_data = extra_data,
.errors = errors,
.extra_data = parser.extra_data.toOwnedSliceAssert(),
.errors = parser.errors.toOwnedSliceAssert(),
};
}
+5 -2
View File
@@ -243,10 +243,13 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
}
}
try astgen.extra.shrinkToLen(gpa);
try astgen.string_bytes.shrinkToLen(gpa);
return .{
.instructions = if (fatal) .empty else astgen.instructions.toOwnedSlice(),
.string_bytes = try astgen.string_bytes.toOwnedSlice(gpa),
.extra = try astgen.extra.toOwnedSlice(gpa),
.string_bytes = astgen.string_bytes.toOwnedSliceAssert(),
.extra = astgen.extra.toOwnedSliceAssert(),
};
}
+6 -2
View File
@@ -397,9 +397,13 @@ pub const Wip = struct {
});
try wip.extra.appendSlice(gpa, @as([]const u32, @ptrCast(wip.root_list.items)));
wip.root_list.clearAndFree(gpa);
try wip.string_bytes.shrinkToLen(gpa);
try wip.extra.shrinkToLen(gpa);
return .{
.string_bytes = try wip.string_bytes.toOwnedSlice(gpa),
.extra = try wip.extra.toOwnedSlice(gpa),
.string_bytes = wip.string_bytes.toOwnedSliceAssert(),
.extra = wip.extra.toOwnedSliceAssert(),
};
}
+13 -21
View File
@@ -67,38 +67,30 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
}
if (zg.compile_errors.items.len > 0) {
const string_bytes = try zg.string_bytes.toOwnedSlice(gpa);
errdefer gpa.free(string_bytes);
const compile_errors = try zg.compile_errors.toOwnedSlice(gpa);
errdefer gpa.free(compile_errors);
const error_notes = try zg.error_notes.toOwnedSlice(gpa);
errdefer gpa.free(error_notes);
try zg.string_bytes.shrinkToLen(gpa);
try zg.compile_errors.shrinkToLen(gpa);
try zg.error_notes.shrinkToLen(gpa);
return .{
.nodes = .empty,
.extra = &.{},
.limbs = &.{},
.string_bytes = string_bytes,
.compile_errors = compile_errors,
.error_notes = error_notes,
.string_bytes = zg.string_bytes.toOwnedSliceAssert(),
.compile_errors = zg.compile_errors.toOwnedSliceAssert(),
.error_notes = zg.error_notes.toOwnedSliceAssert(),
};
} else {
assert(zg.error_notes.items.len == 0);
var nodes = zg.nodes.toOwnedSlice();
errdefer nodes.deinit(gpa);
const extra = try zg.extra.toOwnedSlice(gpa);
errdefer gpa.free(extra);
const limbs = try zg.limbs.toOwnedSlice(gpa);
errdefer gpa.free(limbs);
const string_bytes = try zg.string_bytes.toOwnedSlice(gpa);
errdefer gpa.free(string_bytes);
try zg.extra.shrinkToLen(gpa);
try zg.limbs.shrinkToLen(gpa);
try zg.string_bytes.shrinkToLen(gpa);
return .{
.nodes = nodes,
.extra = extra,
.limbs = limbs,
.string_bytes = string_bytes,
.nodes = zg.nodes.toOwnedSlice(),
.extra = zg.extra.toOwnedSliceAssert(),
.limbs = zg.limbs.toOwnedSliceAssert(),
.string_bytes = zg.string_bytes.toOwnedSliceAssert(),
.compile_errors = &.{},
.error_notes = &.{},
};
+10 -15
View File
@@ -1200,37 +1200,32 @@ pub const CObject = struct {
.end_block => |block| switch (@as(BlockId, @enumFromInt(block.id))) {
.Meta => {},
.Diag => {
try stack.items[stack.items.len - 2].sub_diags.ensureUnusedCapacity(gpa, 1);
try stack.items[stack.items.len - 1].src_ranges.shrinkToLen(gpa);
try stack.items[stack.items.len - 1].sub_diags.shrinkToLen(gpa);
var wip_diag = stack.pop().?;
errdefer wip_diag.deinit(gpa);
const src_ranges = try wip_diag.src_ranges.toOwnedSlice(gpa);
errdefer gpa.free(src_ranges);
const sub_diags = try wip_diag.sub_diags.toOwnedSlice(gpa);
errdefer {
for (sub_diags) |*sub_diag| sub_diag.deinit(gpa);
gpa.free(sub_diags);
}
try stack.items[stack.items.len - 1].sub_diags.append(gpa, .{
stack.items[stack.items.len - 1].sub_diags.appendAssumeCapacity(.{
.level = wip_diag.level,
.category = wip_diag.category,
.msg = wip_diag.msg,
.src_loc = wip_diag.src_loc,
.src_ranges = src_ranges,
.sub_diags = sub_diags,
.src_ranges = wip_diag.src_ranges.toOwnedSliceAssert(),
.sub_diags = wip_diag.sub_diags.toOwnedSliceAssert(),
});
},
_ => {},
},
};
assert(stack.items.len == 1);
try stack.items[0].sub_diags.shrinkToLen(gpa);
const bundle = try gpa.create(Bundle);
assert(stack.items.len == 1);
bundle.* = .{
.file_names = file_names,
.category_names = category_names,
.diags = try stack.items[0].sub_diags.toOwnedSlice(gpa),
.diags = stack.items[0].sub_diags.toOwnedSliceAssert(),
};
return bundle;
}
+17 -16
View File
@@ -170,26 +170,27 @@ pub fn generate(
const prologue = isel.instructions.items.len;
const epilogue = try isel.layout(param_it, is_sysv_var_args, saved_gra_len, saved_vra_len, mod);
const instructions = try isel.instructions.toOwnedSlice(gpa);
var mir: Mir = .{
try isel.instructions.shrinkToLen(gpa);
try isel.literals.shrinkToLen(gpa);
try isel.nav_relocs.shrinkToLen(gpa);
try isel.uav_relocs.shrinkToLen(gpa);
try isel.lazy_relocs.shrinkToLen(gpa);
try isel.global_relocs.shrinkToLen(gpa);
try isel.literal_relocs.shrinkToLen(gpa);
const instructions = isel.instructions.toOwnedSliceAssert();
return .{
.prologue = instructions[prologue..epilogue],
.body = instructions[0..prologue],
.epilogue = instructions[epilogue..],
.literals = &.{},
.nav_relocs = &.{},
.uav_relocs = &.{},
.lazy_relocs = &.{},
.global_relocs = &.{},
.literal_relocs = &.{},
.literals = isel.literals.toOwnedSliceAssert(),
.nav_relocs = isel.nav_relocs.toOwnedSliceAssert(),
.uav_relocs = isel.uav_relocs.toOwnedSliceAssert(),
.lazy_relocs = isel.lazy_relocs.toOwnedSliceAssert(),
.global_relocs = isel.global_relocs.toOwnedSliceAssert(),
.literal_relocs = isel.literal_relocs.toOwnedSliceAssert(),
};
errdefer mir.deinit(gpa);
mir.literals = try isel.literals.toOwnedSlice(gpa);
mir.nav_relocs = try isel.nav_relocs.toOwnedSlice(gpa);
mir.uav_relocs = try isel.uav_relocs.toOwnedSlice(gpa);
mir.lazy_relocs = try isel.lazy_relocs.toOwnedSlice(gpa);
mir.global_relocs = try isel.global_relocs.toOwnedSlice(gpa);
mir.literal_relocs = try isel.literal_relocs.toOwnedSlice(gpa);
return mir;
}
test {
+4 -5
View File
@@ -324,13 +324,12 @@ pub fn generate(
else => |e| return e,
};
var mir: Mir = .{
try function.mir_extra.shrinkToLen(gpa);
return .{
.instructions = function.mir_instructions.toOwnedSlice(),
.extra = &.{}, // fallible, so populated after errdefer
.extra = function.mir_extra.toOwnedSliceAssert(),
};
errdefer mir.deinit(gpa);
mir.extra = try function.mir_extra.toOwnedSlice(gpa);
return mir;
}
fn gen(self: *Self) !void {
+6 -7
View File
@@ -852,10 +852,13 @@ fn generateInner(cg: *CodeGen, any_returns: bool) InnerError!Mir {
try cg.addTag(.end);
try cg.addTag(.dbg_epilogue_begin);
var mir: Mir = .{
try cg.mir_extra.shrinkToLen(cg.gpa);
try cg.mir_locals.shrinkToLen(cg.gpa);
return .{
.instructions = cg.mir_instructions.toOwnedSlice(),
.extra = &.{}, // fallible so assigned after errdefer
.locals = &.{}, // fallible so assigned after errdefer
.extra = cg.mir_extra.toOwnedSliceAssert(),
.locals = cg.mir_locals.toOwnedSliceAssert(),
.prologue = if (cg.initial_stack_value == .none) .none else .{
.sp_local = cg.initial_stack_value.local.value,
.flags = .{ .stack_alignment = cg.stack_alignment },
@@ -867,10 +870,6 @@ fn generateInner(cg: *CodeGen, any_returns: bool) InnerError!Mir {
.func_tys = cg.mir_func_tys.move(),
.error_name_table_ref_count = cg.error_name_table_ref_count,
};
errdefer mir.deinit(cg.gpa);
mir.extra = try cg.mir_extra.toOwnedSlice(cg.gpa);
mir.locals = try cg.mir_locals.toOwnedSlice(cg.gpa);
return mir;
}
const CallWValues = struct {
+12 -15
View File
@@ -998,22 +998,19 @@ pub fn generate(
} },
});
var mir: Mir = .{
.instructions = .empty,
.extra = &.{},
.string_bytes = &.{},
.locals = &.{},
.table = &.{},
.frame_locs = .empty,
try function.mir_extra.shrinkToLen(gpa);
try function.mir_string_bytes.shrinkToLen(gpa);
try function.mir_locals.shrinkToLen(gpa);
try function.mir_table.shrinkToLen(gpa);
return .{
.instructions = function.mir_instructions.toOwnedSlice(),
.extra = function.mir_extra.toOwnedSliceAssert(),
.string_bytes = function.mir_string_bytes.toOwnedSliceAssert(),
.locals = function.mir_locals.toOwnedSliceAssert(),
.table = function.mir_table.toOwnedSliceAssert(),
.frame_locs = function.frame_locs.toOwnedSlice(),
};
errdefer mir.deinit(gpa);
mir.instructions = function.mir_instructions.toOwnedSlice();
mir.extra = try function.mir_extra.toOwnedSlice(gpa);
mir.string_bytes = try function.mir_string_bytes.toOwnedSlice(gpa);
mir.locals = try function.mir_locals.toOwnedSlice(gpa);
mir.table = try function.mir_table.toOwnedSlice(gpa);
mir.frame_locs = function.frame_locs.toOwnedSlice();
return mir;
}
pub fn getTmpMir(cg: *CodeGen) Mir {
+10 -9
View File
@@ -171,8 +171,8 @@ pub const Diags = struct {
) Allocator.Error!void {
const gpa = diags.gpa;
var context_lines = std.array_list.Managed([]const u8).init(gpa);
defer context_lines.deinit();
var context_lines: std.ArrayList([]const u8) = .empty;
defer context_lines.deinit(gpa);
var current_err: ?*Lld = null;
var lines = mem.splitSequence(u8, stderr, if (builtin.os.tag == .windows) "\r\n" else "\n");
@@ -181,16 +181,17 @@ pub const Diags = struct {
mem.eql(u8, line[0..prefix.len], prefix) and line[prefix.len] == ':')
{
if (current_err) |err| {
err.context_lines = try context_lines.toOwnedSlice();
err.context_lines = try context_lines.toOwnedSlice(gpa);
}
var split = mem.splitSequence(u8, line, "error: ");
_ = split.first();
const duped_msg = try std.fmt.allocPrint(gpa, "{s}: {s}", .{ prefix, split.rest() });
errdefer gpa.free(duped_msg);
try diags.lld.ensureUnusedCapacity(gpa, 1);
current_err = try diags.lld.addOne(gpa);
const duped_msg = try std.fmt.allocPrint(gpa, "{s}: {s}", .{ prefix, split.rest() });
current_err = diags.lld.addOneAssumeCapacity();
current_err.?.* = .{ .msg = duped_msg };
} else if (current_err != null) {
const context_prefix = ">>> ";
@@ -200,14 +201,14 @@ pub const Diags = struct {
}
if (trimmed.len > 0) {
const duped_line = try gpa.dupe(u8, trimmed);
try context_lines.append(duped_line);
try context_lines.ensureUnusedCapacity(gpa, 1);
context_lines.appendAssumeCapacity(try gpa.dupe(u8, trimmed));
}
}
}
if (current_err) |err| {
err.context_lines = try context_lines.toOwnedSlice();
err.context_lines = try context_lines.toOwnedSlice(gpa);
}
}
+5 -2
View File
@@ -107,9 +107,12 @@ pub fn parse(
try objects.append(gpa, object);
}
try objects.shrinkToLen(gpa);
try strtab.shrinkToLen(gpa);
return .{
.objects = try objects.toOwnedSlice(gpa),
.strtab = try strtab.toOwnedSlice(gpa),
.objects = objects.toOwnedSliceAssert(),
.strtab = strtab.toOwnedSliceAssert(),
};
}
+11 -5
View File
@@ -314,15 +314,21 @@ pub fn parse(
header.sections = &.{};
errdefer gpa.free(sections);
try strtab.shrinkToLen(gpa);
try nonlocal_esyms.shrinkToLen(gpa);
try nonlocal_versyms.shrinkToLen(gpa);
try nonlocal_symbols.shrinkToLen(gpa);
try verstrings.shrinkToLen(gpa);
return .{
.sections = sections,
.stat = header.stat,
.soname_index = header.soname_index,
.strtab = try strtab.toOwnedSlice(gpa),
.symtab = try nonlocal_esyms.toOwnedSlice(gpa),
.versyms = try nonlocal_versyms.toOwnedSlice(gpa),
.symbols = try nonlocal_symbols.toOwnedSlice(gpa),
.verstrings = try verstrings.toOwnedSlice(gpa),
.strtab = strtab.toOwnedSliceAssert(),
.symtab = nonlocal_esyms.toOwnedSliceAssert(),
.versyms = nonlocal_versyms.toOwnedSliceAssert(),
.symbols = nonlocal_symbols.toOwnedSliceAssert(),
.verstrings = verstrings.toOwnedSliceAssert(),
};
}
+6 -8
View File
@@ -211,9 +211,8 @@ pub const Value = union(enum) {
.float => return Value{ .float = math.lossyCast(f64, input) },
.@"struct" => |info| if (info.is_tuple) {
var list = std.array_list.Managed(Value).init(arena);
errdefer list.deinit();
try list.ensureTotalCapacityPrecise(info.fields.len);
var list: std.ArrayList(Value) = try .initCapacity(arena);
defer list.deinit();
inline for (info.fields) |field| {
if (try encode(arena, @field(input, field.name))) |value| {
@@ -221,7 +220,7 @@ pub const Value = union(enum) {
}
}
return Value{ .list = try list.toOwnedSlice() };
return Value{ .list = try list.toOwnedSlice(arena) };
} else {
var map = Map.init(arena);
errdefer map.deinit();
@@ -262,9 +261,8 @@ pub const Value = union(enum) {
return Value{ .string = try arena.dupe(u8, input) };
}
var list = std.array_list.Managed(Value).init(arena);
errdefer list.deinit();
try list.ensureTotalCapacityPrecise(input.len);
var list: std.ArrayList(Value) = .initCapacity(input.len);
defer list.deinit(arena);
for (input) |elem| {
if (try encode(arena, elem)) |value| {
@@ -275,7 +273,7 @@ pub const Value = union(enum) {
}
}
return Value{ .list = try list.toOwnedSlice() };
return Value{ .list = try list.toOwnedSlice(arena) };
},
else => {
@compileError("Unhandled type: {s}" ++ @typeName(@TypeOf(input)));
+20 -15
View File
@@ -337,20 +337,20 @@ const Action = enum {
close,
};
fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
var urls = std.StringHashMap(Token).init(allocator);
fn genToc(gpa: Allocator, tokenizer: *Tokenizer) !Toc {
var urls = std.StringHashMap(Token).init(gpa);
errdefer urls.deinit();
var header_stack_size: usize = 0;
var last_action: Action = .open;
var last_columns: ?u8 = null;
var toc_buf: Writer.Allocating = .init(allocator);
var toc_buf: Writer.Allocating = .init(gpa);
defer toc_buf.deinit();
const toc = &toc_buf.writer;
var nodes = std.array_list.Managed(Node).init(allocator);
var nodes = std.array_list.Managed(Node).init(gpa);
defer nodes.deinit();
try toc.writeByte('\n');
@@ -408,7 +408,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
header_stack_size += 1;
const urlized = try urlize(allocator, content);
const urlized = try urlize(gpa, content);
try nodes.append(Node{
.HeaderOpen = HeaderOpen{
.name = content,
@@ -450,7 +450,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
last_action = .close;
}
} else if (mem.eql(u8, tag_name, "see_also")) {
var list = std.array_list.Managed(SeeAlsoItem).init(allocator);
var list = std.array_list.Managed(SeeAlsoItem).init(gpa);
errdefer list.deinit();
while (true) {
@@ -465,7 +465,8 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
},
.separator => {},
.bracket_close => {
try nodes.append(Node{ .SeeAlso = try list.toOwnedSlice() });
try nodes.ensureUnusedCapacity(1);
nodes.appendAssumeCapacity(.{ .SeeAlso = try list.toOwnedSlice() });
break;
},
else => return parseError(tokenizer, see_also_tok, "invalid see_also token", .{}),
@@ -491,7 +492,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
try nodes.append(Node{
.Link = Link{
.url = try urlize(allocator, url_name),
.url = try urlize(gpa, url_name),
.name = name,
.token = name_tok,
},
@@ -592,9 +593,14 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
}
}
const nodes_slice = try nodes.toOwnedSlice();
errdefer gpa.free(nodes_slice);
const toc_slice = try toc_buf.toOwnedSlice();
errdefer gpa.free(toc_slice);
return .{
.nodes = try nodes.toOwnedSlice(),
.toc = try toc_buf.toOwnedSlice(),
.nodes = nodes_slice,
.toc = toc_slice,
.urls = urls,
};
}
@@ -617,12 +623,11 @@ fn urlize(gpa: Allocator, input: []const u8) ![]u8 {
return try buf.toOwnedSlice(gpa);
}
fn escapeHtml(allocator: Allocator, input: []const u8) ![]u8 {
var buf = std.array_list.Managed(u8).init(allocator);
defer buf.deinit();
fn escapeHtml(gpa: Allocator, input: []const u8) ![]u8 {
var buf: std.Io.Writer.Allocating = .init(gpa);
defer buf.deinit(gpa);
const out = buf.writer();
try writeEscaped(out, input);
try writeEscaped(&buf.writer, input);
return try buf.toOwnedSlice();
}