Skip to content

Commit 6e93c4e

Browse files
committed
new allocator interface after Andrew Kelley review
1 parent dc9648f commit 6e93c4e

File tree

6 files changed

+48
-58
lines changed

6 files changed

+48
-58
lines changed

lib/std/array_list.zig

-2
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,6 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
220220
}
221221

222222
const new_memory = try self.allocator.reallocAtLeast(self.allocatedSlice(), better_capacity);
223-
assert(new_memory.len >= better_capacity);
224223
self.items.ptr = new_memory.ptr;
225224
self.capacity = new_memory.len;
226225
}
@@ -443,7 +442,6 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
443442
}
444443

445444
const new_memory = try allocator.reallocAtLeast(self.allocatedSlice(), better_capacity);
446-
assert(new_memory.len >= better_capacity);
447445
self.items.ptr = new_memory.ptr;
448446
self.capacity = new_memory.len;
449447
}

lib/std/heap.zig

+17-19
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ usingnamespace if (comptime @hasDecl(c, "malloc_size")) struct {
2525
pub const supports_malloc_size = false;
2626
};
2727

28-
pub const c_allocator = mem.getAllocatorPtr(&c_allocator_state);
28+
pub const c_allocator = &c_allocator_state;
2929
var c_allocator_state = Allocator{
3030
.allocFn = cAlloc,
3131
.resizeFn = cResize,
@@ -38,7 +38,7 @@ fn cAlloc(self: *Allocator, len: usize, ptr_align: u29, len_align: u29) Allocato
3838
return ptr[0..len];
3939
}
4040
const full_len = init: {
41-
if (comptime supports_malloc_size) {
41+
if (supports_malloc_size) {
4242
const s = malloc_size(ptr);
4343
assert(s >= len);
4444
break :init s;
@@ -56,24 +56,23 @@ fn cResize(self: *Allocator, buf: []u8, new_len: usize, len_align: u29) Allocato
5656
if (new_len <= buf.len) {
5757
return mem.alignAllocLen(buf.len, new_len, len_align);
5858
}
59-
if (comptime supports_malloc_size) {
59+
if (supports_malloc_size) {
6060
const full_len = malloc_size(buf.ptr);
6161
if (new_len <= full_len) {
6262
return mem.alignAllocLen(full_len, new_len, len_align);
6363
}
6464
}
65-
// TODO: could we still use realloc? are there any cases where we can guarantee that realloc won't move memory?
6665
return error.OutOfMemory;
6766
}
6867

6968
/// This allocator makes a syscall directly for every allocation and free.
7069
/// Thread-safe and lock-free.
7170
pub const page_allocator = if (std.Target.current.isWasm())
72-
mem.getAllocatorPtr(&wasm_page_allocator_state)
71+
&wasm_page_allocator_state
7372
else if (std.Target.current.os.tag == .freestanding)
7473
root.os.heap.page_allocator
7574
else
76-
mem.getAllocatorPtr(&page_allocator_state);
75+
&page_allocator_state;
7776

7877
var page_allocator_state = Allocator{
7978
.allocFn = PageAllocator.alloc,
@@ -507,9 +506,9 @@ pub const FixedBufferAllocator = struct {
507506
return sliceContainsSlice(self.buffer, slice);
508507
}
509508

510-
// NOTE: this will not work in all cases, if the last allocation had an adjusted_index
511-
// then we won't be able to determine what the last allocation was. This is because
512-
// the alignForward operation done in alloc is not reverisible.
509+
/// NOTE: this will not work in all cases, if the last allocation had an adjusted_index
510+
/// then we won't be able to determine what the last allocation was. This is because
511+
/// the alignForward operation done in alloc is not reverisible.
513512
pub fn isLastAllocation(self: *FixedBufferAllocator, buf: []u8) bool {
514513
return buf.ptr + buf.len == self.buffer.ptr + self.end_index;
515514
}
@@ -546,7 +545,6 @@ pub const FixedBufferAllocator = struct {
546545

547546
var add = new_size - buf.len;
548547
if (add + self.end_index > self.buffer.len) {
549-
//add = self.buffer.len - self.end_index;
550548
return error.OutOfMemory;
551549
}
552550
self.end_index += add;
@@ -735,7 +733,7 @@ test "ArenaAllocator" {
735733

736734
var test_fixed_buffer_allocator_memory: [800000 * @sizeOf(u64)]u8 = undefined;
737735
test "FixedBufferAllocator" {
738-
var fixed_buffer_allocator = mem.sanityWrap(FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]));
736+
var fixed_buffer_allocator = mem.validationWrap(FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]));
739737

740738
try testAllocator(&fixed_buffer_allocator.allocator);
741739
try testAllocatorAligned(&fixed_buffer_allocator.allocator, 16);
@@ -802,8 +800,8 @@ test "ThreadSafeFixedBufferAllocator" {
802800
}
803801

804802
fn testAllocator(base_allocator: *mem.Allocator) !void {
805-
var sanityAllocator = mem.sanityWrap(base_allocator);
806-
const allocator = &sanityAllocator.allocator;
803+
var validationAllocator = mem.validationWrap(base_allocator);
804+
const allocator = &validationAllocator.allocator;
807805

808806
var slice = try allocator.alloc(*i32, 100);
809807
testing.expect(slice.len == 100);
@@ -833,8 +831,8 @@ fn testAllocator(base_allocator: *mem.Allocator) !void {
833831
}
834832

835833
fn testAllocatorAligned(base_allocator: *mem.Allocator, comptime alignment: u29) !void {
836-
var sanityAllocator = mem.sanityWrap(base_allocator);
837-
const allocator = &sanityAllocator.allocator;
834+
var validationAllocator = mem.validationWrap(base_allocator);
835+
const allocator = &validationAllocator.allocator;
838836

839837
// initial
840838
var slice = try allocator.alignedAlloc(u8, alignment, 10);
@@ -860,8 +858,8 @@ fn testAllocatorAligned(base_allocator: *mem.Allocator, comptime alignment: u29)
860858
}
861859

862860
fn testAllocatorLargeAlignment(base_allocator: *mem.Allocator) mem.Allocator.Error!void {
863-
var sanityAllocator = mem.sanityWrap(base_allocator);
864-
const allocator = &sanityAllocator.allocator;
861+
var validationAllocator = mem.validationWrap(base_allocator);
862+
const allocator = &validationAllocator.allocator;
865863

866864
//Maybe a platform's page_size is actually the same as or
867865
// very near usize?
@@ -892,8 +890,8 @@ fn testAllocatorLargeAlignment(base_allocator: *mem.Allocator) mem.Allocator.Err
892890
}
893891

894892
fn testAllocatorAlignedShrink(base_allocator: *mem.Allocator) mem.Allocator.Error!void {
895-
var sanityAllocator = mem.sanityWrap(base_allocator);
896-
const allocator = &sanityAllocator.allocator;
893+
var validationAllocator = mem.validationWrap(base_allocator);
894+
const allocator = &validationAllocator.allocator;
897895

898896
var debug_buffer: [1000]u8 = undefined;
899897
const debug_allocator = &FixedBufferAllocator.init(&debug_buffer).allocator;

lib/std/heap/arena_allocator.zig

+1-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ pub const ArenaAllocator = struct {
7777
}
7878
const result = cur_buf[adjusted_index..new_end_index];
7979
self.state.end_index = new_end_index;
80-
return result[0..mem.alignAllocLen(result.len, n, len_align)];
80+
return result;
8181
}
8282
}
8383
};

lib/std/heap/logging_allocator.zig

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ test "LoggingAllocator" {
7070
var fbs = std.io.fixedBufferStream(&log_buf);
7171

7272
var allocator_buf: [10]u8 = undefined;
73-
var fixedBufferAllocator = std.mem.sanityWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
73+
var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
7474
const allocator = &loggingAllocator(&fixedBufferAllocator.allocator, fbs.outStream()).allocator;
7575

7676
var a = try allocator.alloc(u8, 10);

lib/std/mem.zig

+28-34
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,9 @@ pub const Allocator = struct {
141141
const new_mem = try self.callAllocFn(new_len, new_alignment, len_align);
142142
@memcpy(new_mem.ptr, old_mem.ptr, std.math.min(new_len, old_mem.len));
143143
// DISABLED TO AVOID BUGS IN TRANSLATE C
144+
// use './zig build test-translate-c' to reproduce, some of the symbols in the
145+
// generated C code will be a sequence of 0xaa (the undefined value), meaning
146+
// it is printing data that has been freed
144147
//@memset(old_mem.ptr, undefined, old_mem.len);
145148
_ = self.shrinkBytes(old_mem, 0, 0);
146149
return new_mem;
@@ -214,18 +217,19 @@ pub const Allocator = struct {
214217
return self.allocWithOptions(Elem, n, null, sentinel);
215218
}
216219

220+
/// Deprecated: use `allocAdvanced`
217221
pub fn alignedAlloc(
218222
self: *Allocator,
219223
comptime T: type,
220224
/// null means naturally aligned
221225
comptime alignment: ?u29,
222226
n: usize,
223227
) Error![]align(alignment orelse @alignOf(T)) T {
224-
return self.alignedAlloc2(T, alignment, n, .exact);
228+
return self.allocAdvanced(T, alignment, n, .exact);
225229
}
226230

227-
const Exact = enum {exact,atLeast};
228-
pub fn alignedAlloc2(
231+
const Exact = enum {exact,at_least};
232+
pub fn allocAdvanced(
229233
self: *Allocator,
230234
comptime T: type,
231235
/// null means naturally aligned
@@ -234,7 +238,7 @@ pub const Allocator = struct {
234238
exact: Exact,
235239
) Error![]align(alignment orelse @alignOf(T)) T {
236240
const a = if (alignment) |a| blk: {
237-
if (a == @alignOf(T)) return alignedAlloc2(self, T, null, n, exact);
241+
if (a == @alignOf(T)) return allocAdvanced(self, T, null, n, exact);
238242
break :blk a;
239243
} else @alignOf(T);
240244

@@ -248,7 +252,10 @@ pub const Allocator = struct {
248252
// functions that heap-allocate their own frame with @Frame(func).
249253
const sizeOfT = if (alignment == null) @intCast(u29, @divExact(byte_count, n)) else @sizeOf(T);
250254
const byte_slice = try self.callAllocFn(byte_count, a, if (exact == .exact) @as(u29, 0) else sizeOfT);
251-
assert(if (exact == .exact) byte_slice.len == byte_count else byte_slice.len >= byte_count);
255+
switch (exact) {
256+
.exact => assert(byte_slice.len == byte_count),
257+
.at_least => assert(byte_slice.len >= byte_count),
258+
}
252259
@memset(byte_slice.ptr, undefined, byte_slice.len);
253260
if (alignment == null) {
254261
// This if block is a workaround (see comment above)
@@ -273,33 +280,31 @@ pub const Allocator = struct {
273280
break :t Error![]align(Slice.alignment) Slice.child;
274281
} {
275282
const old_alignment = @typeInfo(@TypeOf(old_mem)).Pointer.alignment;
276-
return self.alignedRealloc2(old_mem, old_alignment, new_n, .exact);
283+
return self.reallocAdvanced(old_mem, old_alignment, new_n, .exact);
277284
}
278285

279286
pub fn reallocAtLeast(self: *Allocator, old_mem: var, new_n: usize) t: {
280287
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
281288
break :t Error![]align(Slice.alignment) Slice.child;
282289
} {
283290
const old_alignment = @typeInfo(@TypeOf(old_mem)).Pointer.alignment;
284-
return self.alignedRealloc2(old_mem, old_alignment, new_n, .atLeast);
291+
return self.reallocAdvanced(old_mem, old_alignment, new_n, .at_least);
285292
}
286293

287-
/// This is the same as `realloc`, except caller may additionally request
288-
/// a new alignment, which can be larger, smaller, or the same as the old
289-
/// allocation.
294+
// Deprecated: use `reallocAdvanced`
290295
pub fn alignedRealloc(
291296
self: *Allocator,
292297
old_mem: var,
293298
comptime new_alignment: u29,
294299
new_n: usize,
295300
) Error![]align(new_alignment) @typeInfo(@TypeOf(old_mem)).Pointer.child {
296-
return self.alignedRealloc2(old_mem, new_alignment, new_n, .exact);
301+
return self.reallocAdvanced(old_mem, new_alignment, new_n, .exact);
297302
}
298303

299304
/// This is the same as `realloc`, except caller may additionally request
300305
/// a new alignment, which can be larger, smaller, or the same as the old
301306
/// allocation.
302-
pub fn alignedRealloc2(
307+
pub fn reallocAdvanced(
303308
self: *Allocator,
304309
old_mem: var,
305310
comptime new_alignment: u29,
@@ -309,7 +314,7 @@ pub const Allocator = struct {
309314
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
310315
const T = Slice.child;
311316
if (old_mem.len == 0) {
312-
return self.alignedAlloc2(T, new_alignment, new_n, exact);
317+
return self.allocAdvanced(T, new_alignment, new_n, exact);
313318
}
314319
if (new_n == 0) {
315320
self.free(old_mem);
@@ -392,24 +397,9 @@ pub const Allocator = struct {
392397
}
393398
};
394399

395-
/// Given a pointer to an allocator, return the *Allocator for it. `allocatorStatePtr` can
396-
/// either be a `*Allocator`, in which case it is returned as-is, otherwise, the address of
397-
/// the `allocator` field is returned.
398-
pub fn getAllocatorPtr(allocatorStatePtr: var) *Allocator {
399-
// allocator must be a pointer or else this function will return a copy of the allocator which
400-
// is not what this is for
401-
const T = @TypeOf(allocatorStatePtr);
402-
switch (@typeInfo(T)) {
403-
.Pointer => {},
404-
else => @compileError("getAllocatorPtr expects a pointer to an allocator but got: " ++ @typeName(T)),
405-
}
406-
if (T == *Allocator)
407-
return allocatorStatePtr;
408-
return &allocatorStatePtr.allocator;
409-
}
410-
411-
/// Detects and asserts if the std.mem.Allocator interface is violated
412-
pub fn SanityAllocator(comptime T: type) type { return struct {
400+
/// Detects and asserts if the std.mem.Allocator interface is violated by the caller
401+
/// or the allocator.
402+
pub fn ValidationAllocator(comptime T: type) type { return struct {
413403
const Self = @This();
414404
allocator: Allocator,
415405
underlying_allocator: T,
@@ -424,7 +414,8 @@ pub fn SanityAllocator(comptime T: type) type { return struct {
424414
}
425415
fn getUnderlyingAllocatorPtr(self: *@This()) *Allocator {
426416
if (T == *Allocator) return self.underlying_allocator;
427-
return getAllocatorPtr(&self.underlying_allocator);
417+
if (*T == *Allocator) return &self.underlying_allocator;
418+
return &self.underlying_allocator.allocator;
428419
}
429420
pub fn alloc(allocator: *Allocator, n: usize, ptr_align: u29, len_align: u29) Allocator.Error![]u8 {
430421
assert(n > 0);
@@ -436,6 +427,7 @@ pub fn SanityAllocator(comptime T: type) type { return struct {
436427

437428
const self = @fieldParentPtr(@This(), "allocator", allocator);
438429
const result = try self.getUnderlyingAllocatorPtr().callAllocFn(n, ptr_align, len_align);
430+
assert(mem.isAligned(@ptrToInt(result.ptr), ptr_align));
439431
if (len_align == 0) {
440432
assert(result.len == n);
441433
} else {
@@ -467,8 +459,8 @@ pub fn SanityAllocator(comptime T: type) type { return struct {
467459
};
468460
};}
469461

470-
pub fn sanityWrap(allocator: var) SanityAllocator(@TypeOf(allocator)) {
471-
return SanityAllocator(@TypeOf(allocator)).init(allocator);
462+
pub fn validationWrap(allocator: var) ValidationAllocator(@TypeOf(allocator)) {
463+
return ValidationAllocator(@TypeOf(allocator)).init(allocator);
472464
}
473465

474466
/// An allocator helper function. Adjusts an allocation length satisfy `len_align`.
@@ -2377,6 +2369,8 @@ test "alignForward" {
23772369
testing.expect(alignForward(17, 8) == 24);
23782370
}
23792371

2372+
/// Round an address up to the previous aligned address
2373+
/// Unlike `alignBackward`, `alignment` can be any positive number, not just a power of 2.
23802374
pub fn alignBackwardAnyAlign(i: usize, alignment: usize) usize {
23812375
if (@popCount(usize, alignment) == 1)
23822376
return alignBackward(i, alignment);

lib/std/testing.zig

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ pub var allocator_instance = LeakCountAllocator.init(&base_allocator_instance.al
1111
pub const failing_allocator = &failing_allocator_instance.allocator;
1212
pub var failing_allocator_instance = FailingAllocator.init(&base_allocator_instance.allocator, 0);
1313

14-
pub var base_allocator_instance = std.mem.sanityWrap(std.heap.ThreadSafeFixedBufferAllocator.init(allocator_mem[0..]));
14+
pub var base_allocator_instance = std.mem.validationWrap(std.heap.ThreadSafeFixedBufferAllocator.init(allocator_mem[0..]));
1515
var allocator_mem: [2 * 1024 * 1024]u8 = undefined;
1616

1717
/// This function is intended to be used only in tests. It prints diagnostics to stderr

0 commit comments

Comments
 (0)