const std = @import("../std.zig");
const assert = std.debug.assert;
const math = std.math;
const mem = std.mem;
const Allocator = @This();
const builtin = @import("builtin");
pub const Error = error{OutOfMemory};
ptr: *anyopaque,
vtable: *const VTable,
pub const VTable = struct {
alloc: std.meta.FnPtr(fn (ptr: *anyopaque, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8),
resize: std.meta.FnPtr(fn (ptr: *anyopaque, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize),
free: std.meta.FnPtr(fn (ptr: *anyopaque, buf: []u8, buf_align: u29, ret_addr: usize) void),
};
pub fn init(
pointer: anytype,
comptime allocFn: fn (ptr: @TypeOf(pointer), len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8,
comptime resizeFn: fn (ptr: @TypeOf(pointer), buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize,
comptime freeFn: fn (ptr: @TypeOf(pointer), buf: []u8, buf_align: u29, ret_addr: usize) void,
) Allocator {
const Ptr = @TypeOf(pointer);
const ptr_info = @typeInfo(Ptr);
assert(ptr_info == .Pointer);
assert(ptr_info.Pointer.size == .One);
const alignment = ptr_info.Pointer.alignment;
const gen = struct {
fn allocImpl(ptr: *anyopaque, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8 {
const self = @ptrCast(Ptr, @alignCast(alignment, ptr));
return @call(.{ .modifier = .always_inline }, allocFn, .{ self, len, ptr_align, len_align, ret_addr });
}
fn resizeImpl(ptr: *anyopaque, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize {
assert(new_len != 0);
const self = @ptrCast(Ptr, @alignCast(alignment, ptr));
return @call(.{ .modifier = .always_inline }, resizeFn, .{ self, buf, buf_align, new_len, len_align, ret_addr });
}
fn freeImpl(ptr: *anyopaque, buf: []u8, buf_align: u29, ret_addr: usize) void {
const self = @ptrCast(Ptr, @alignCast(alignment, ptr));
@call(.{ .modifier = .always_inline }, freeFn, .{ self, buf, buf_align, ret_addr });
}
const vtable = VTable{
.alloc = allocImpl,
.resize = resizeImpl,
.free = freeImpl,
};
};
return .{
.ptr = pointer,
.vtable = &gen.vtable,
};
}
pub fn NoResize(comptime AllocatorType: type) type {
return struct {
pub fn noResize(
self: *AllocatorType,
buf: []u8,
buf_align: u29,
new_len: usize,
len_align: u29,
ret_addr: usize,
) ?usize {
_ = self;
_ = buf_align;
_ = len_align;
_ = ret_addr;
return if (new_len > buf.len) null else new_len;
}
};
}
pub fn NoOpFree(comptime AllocatorType: type) type {
return struct {
pub fn noOpFree(
self: *AllocatorType,
buf: []u8,
buf_align: u29,
ret_addr: usize,
) void {
_ = self;
_ = buf;
_ = buf_align;
_ = ret_addr;
}
};
}
pub fn PanicFree(comptime AllocatorType: type) type {
return struct {
pub fn panicFree(
self: *AllocatorType,
buf: []u8,
buf_align: u29,
ret_addr: usize,
) void {
_ = self;
_ = buf;
_ = buf_align;
_ = ret_addr;
@panic("free is not a supported operation for the allocator: " ++ @typeName(AllocatorType));
}
};
}
pub inline fn rawAlloc(self: Allocator, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8 {
return self.vtable.alloc(self.ptr, len, ptr_align, len_align, ret_addr);
}
pub inline fn rawResize(self: Allocator, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize {
return self.vtable.resize(self.ptr, buf, buf_align, new_len, len_align, ret_addr);
}
pub inline fn rawFree(self: Allocator, buf: []u8, buf_align: u29, ret_addr: usize) void {
return self.vtable.free(self.ptr, buf, buf_align, ret_addr);
}
pub fn create(self: Allocator, comptime T: type) Error!*T {
if (@sizeOf(T) == 0) return @as(*T, undefined);
const slice = try self.allocAdvancedWithRetAddr(T, null, 1, .exact, @returnAddress());
return &slice[0];
}
pub fn destroy(self: Allocator, ptr: anytype) void {
const info = @typeInfo(@TypeOf(ptr)).Pointer;
const T = info.child;
if (@sizeOf(T) == 0) return;
const non_const_ptr = @intToPtr([*]u8, @ptrToInt(ptr));
self.rawFree(non_const_ptr[0..@sizeOf(T)], info.alignment, @returnAddress());
}
pub fn alloc(self: Allocator, comptime T: type, n: usize) Error![]T {
return self.allocAdvancedWithRetAddr(T, null, n, .exact, @returnAddress());
}
pub fn allocWithOptions(
self: Allocator,
comptime Elem: type,
n: usize,
comptime optional_alignment: ?u29,
comptime optional_sentinel: ?Elem,
) Error!AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
return self.allocWithOptionsRetAddr(Elem, n, optional_alignment, optional_sentinel, @returnAddress());
}
pub fn allocWithOptionsRetAddr(
self: Allocator,
comptime Elem: type,
n: usize,
comptime optional_alignment: ?u29,
comptime optional_sentinel: ?Elem,
return_address: usize,
) Error!AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
if (optional_sentinel) |sentinel| {
const ptr = try self.allocAdvancedWithRetAddr(Elem, optional_alignment, n + 1, .exact, return_address);
ptr[n] = sentinel;
return ptr[0..n :sentinel];
} else {
return self.allocAdvancedWithRetAddr(Elem, optional_alignment, n, .exact, return_address);
}
}
fn AllocWithOptionsPayload(comptime Elem: type, comptime alignment: ?u29, comptime sentinel: ?Elem) type {
if (sentinel) |s| {
return [:s]align(alignment orelse @alignOf(Elem)) Elem;
} else {
return []align(alignment orelse @alignOf(Elem)) Elem;
}
}
pub fn allocSentinel(
self: Allocator,
comptime Elem: type,
n: usize,
comptime sentinel: Elem,
) Error![:sentinel]Elem {
return self.allocWithOptionsRetAddr(Elem, n, null, sentinel, @returnAddress());
}
pub fn alignedAlloc(
self: Allocator,
comptime T: type,
comptime alignment: ?u29,
n: usize,
) Error![]align(alignment orelse @alignOf(T)) T {
return self.allocAdvancedWithRetAddr(T, alignment, n, .exact, @returnAddress());
}
pub fn allocAdvanced(
self: Allocator,
comptime T: type,
comptime alignment: ?u29,
n: usize,
exact: Exact,
) Error![]align(alignment orelse @alignOf(T)) T {
return self.allocAdvancedWithRetAddr(T, alignment, n, exact, @returnAddress());
}
pub const Exact = enum { exact, at_least };
pub fn allocAdvancedWithRetAddr(
self: Allocator,
comptime T: type,
comptime alignment: ?u29,
n: usize,
exact: Exact,
return_address: usize,
) Error![]align(alignment orelse @alignOf(T)) T {
const a = if (alignment) |a| blk: {
if (a == @alignOf(T)) return allocAdvancedWithRetAddr(self, T, null, n, exact, return_address);
break :blk a;
} else @alignOf(T);
if (n == 0) {
const ptr = comptime std.mem.alignBackward(std.math.maxInt(usize), a);
return @intToPtr([*]align(a) T, ptr)[0..0];
}
const byte_count = math.mul(usize, @sizeOf(T), n) catch return Error.OutOfMemory;
const size_of_T: usize = if (alignment == null) @divExact(byte_count, n) else @sizeOf(T);
const len_align: u29 = switch (exact) {
.exact => 0,
.at_least => math.cast(u29, size_of_T) orelse 0,
};
const byte_slice = try self.rawAlloc(byte_count, a, len_align, return_address);
switch (exact) {
.exact => assert(byte_slice.len == byte_count),
.at_least => assert(byte_slice.len >= byte_count),
}
@memset(byte_slice.ptr, undefined, byte_slice.len);
if (alignment == null) {
return @intToPtr([*]T, @ptrToInt(byte_slice.ptr))[0..@divExact(byte_slice.len, @sizeOf(T))];
} else {
return mem.bytesAsSlice(T, @alignCast(a, byte_slice));
}
}
pub fn resize(self: Allocator, old_mem: anytype, new_n: usize) ?@TypeOf(old_mem) {
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
const T = Slice.child;
if (new_n == 0) {
self.free(old_mem);
return &[0]T{};
}
const old_byte_slice = mem.sliceAsBytes(old_mem);
const new_byte_count = math.mul(usize, @sizeOf(T), new_n) catch return null;
const rc = self.rawResize(old_byte_slice, Slice.alignment, new_byte_count, 0, @returnAddress()) orelse return null;
assert(rc == new_byte_count);
const new_byte_slice = old_byte_slice.ptr[0..new_byte_count];
return mem.bytesAsSlice(T, new_byte_slice);
}
pub fn realloc(self: Allocator, old_mem: anytype, new_n: usize) t: {
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
break :t Error![]align(Slice.alignment) Slice.child;
} {
const old_alignment = @typeInfo(@TypeOf(old_mem)).Pointer.alignment;
return self.reallocAdvancedWithRetAddr(old_mem, old_alignment, new_n, .exact, @returnAddress());
}
pub fn reallocAtLeast(self: Allocator, old_mem: anytype, new_n: usize) t: {
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
break :t Error![]align(Slice.alignment) Slice.child;
} {
const old_alignment = @typeInfo(@TypeOf(old_mem)).Pointer.alignment;
return self.reallocAdvancedWithRetAddr(old_mem, old_alignment, new_n, .at_least, @returnAddress());
}
pub fn reallocAdvanced(
self: Allocator,
old_mem: anytype,
comptime new_alignment: u29,
new_n: usize,
exact: Exact,
) Error![]align(new_alignment) @typeInfo(@TypeOf(old_mem)).Pointer.child {
return self.reallocAdvancedWithRetAddr(old_mem, new_alignment, new_n, exact, @returnAddress());
}
pub fn reallocAdvancedWithRetAddr(
self: Allocator,
old_mem: anytype,
comptime new_alignment: u29,
new_n: usize,
exact: Exact,
return_address: usize,
) Error![]align(new_alignment) @typeInfo(@TypeOf(old_mem)).Pointer.child {
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
const T = Slice.child;
if (old_mem.len == 0) {
return self.allocAdvancedWithRetAddr(T, new_alignment, new_n, exact, return_address);
}
if (new_n == 0) {
self.free(old_mem);
const ptr = comptime std.mem.alignBackward(std.math.maxInt(usize), new_alignment);
return @intToPtr([*]align(new_alignment) T, ptr)[0..0];
}
const old_byte_slice = mem.sliceAsBytes(old_mem);
const byte_count = math.mul(usize, @sizeOf(T), new_n) catch return Error.OutOfMemory;
const len_align: u29 = switch (exact) {
.exact => 0,
.at_least => math.cast(u29, @as(usize, @sizeOf(T))) orelse 0,
};
if (mem.isAligned(@ptrToInt(old_byte_slice.ptr), new_alignment)) {
if (byte_count <= old_byte_slice.len) {
const shrunk_len = self.shrinkBytes(old_byte_slice, Slice.alignment, byte_count, len_align, return_address);
return mem.bytesAsSlice(T, @alignCast(new_alignment, old_byte_slice.ptr[0..shrunk_len]));
}
if (self.rawResize(old_byte_slice, Slice.alignment, byte_count, len_align, return_address)) |resized_len| {
@memset(old_byte_slice.ptr + byte_count, undefined, resized_len - byte_count);
return mem.bytesAsSlice(T, @alignCast(new_alignment, old_byte_slice.ptr[0..resized_len]));
}
}
if (byte_count <= old_byte_slice.len and new_alignment <= Slice.alignment) {
return error.OutOfMemory;
}
const new_mem = try self.rawAlloc(byte_count, new_alignment, len_align, return_address);
@memcpy(new_mem.ptr, old_byte_slice.ptr, math.min(byte_count, old_byte_slice.len));
@memset(old_byte_slice.ptr, undefined, old_byte_slice.len);
self.rawFree(old_byte_slice, Slice.alignment, return_address);
return mem.bytesAsSlice(T, @alignCast(new_alignment, new_mem));
}
pub fn shrink(self: Allocator, old_mem: anytype, new_n: usize) t: {
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
break :t []align(Slice.alignment) Slice.child;
} {
const old_alignment = @typeInfo(@TypeOf(old_mem)).Pointer.alignment;
return self.alignedShrinkWithRetAddr(old_mem, old_alignment, new_n, @returnAddress());
}
pub fn alignedShrink(
self: Allocator,
old_mem: anytype,
comptime new_alignment: u29,
new_n: usize,
) []align(new_alignment) @typeInfo(@TypeOf(old_mem)).Pointer.child {
return self.alignedShrinkWithRetAddr(old_mem, new_alignment, new_n, @returnAddress());
}
pub fn alignedShrinkWithRetAddr(
self: Allocator,
old_mem: anytype,
comptime new_alignment: u29,
new_n: usize,
return_address: usize,
) []align(new_alignment) @typeInfo(@TypeOf(old_mem)).Pointer.child {
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
const T = Slice.child;
if (new_n == old_mem.len)
return old_mem;
if (new_n == 0) {
self.free(old_mem);
const ptr = comptime std.mem.alignBackward(std.math.maxInt(usize), new_alignment);
return @intToPtr([*]align(new_alignment) T, ptr)[0..0];
}
assert(new_n < old_mem.len);
assert(new_alignment <= Slice.alignment);
const byte_count = @sizeOf(T) * new_n;
const old_byte_slice = mem.sliceAsBytes(old_mem);
@memset(old_byte_slice.ptr + byte_count, undefined, old_byte_slice.len - byte_count);
_ = self.shrinkBytes(old_byte_slice, Slice.alignment, byte_count, 0, return_address);
return old_mem[0..new_n];
}
pub fn free(self: Allocator, memory: anytype) void {
const Slice = @typeInfo(@TypeOf(memory)).Pointer;
const bytes = mem.sliceAsBytes(memory);
const bytes_len = bytes.len + if (Slice.sentinel != null) @sizeOf(Slice.child) else 0;
if (bytes_len == 0) return;
const non_const_ptr = @intToPtr([*]u8, @ptrToInt(bytes.ptr));
@memset(non_const_ptr, undefined, bytes_len);
self.rawFree(non_const_ptr[0..bytes_len], Slice.alignment, @returnAddress());
}
pub fn dupe(allocator: Allocator, comptime T: type, m: []const T) ![]T {
const new_buf = try allocator.alloc(T, m.len);
mem.copy(T, new_buf, m);
return new_buf;
}
pub fn dupeZ(allocator: Allocator, comptime T: type, m: []const T) ![:0]T {
const new_buf = try allocator.alloc(T, m.len + 1);
mem.copy(T, new_buf, m);
new_buf[m.len] = 0;
return new_buf[0..m.len :0];
}
pub fn allocBytes(
self: Allocator,
alignment: u29,
byte_count: usize,
len_align: u29,
return_address: usize,
) Error![]u8 {
const new_mem = try self.rawAlloc(byte_count, alignment, len_align, return_address);
@memset(new_mem.ptr, undefined, new_mem.len);
return new_mem;
}
test "allocBytes" {
const number_of_bytes: usize = 10;
var runtime_alignment: u29 = 2;
{
const new_mem = try std.testing.allocator.allocBytes(runtime_alignment, number_of_bytes, 0, @returnAddress());
defer std.testing.allocator.free(new_mem);
try std.testing.expectEqual(number_of_bytes, new_mem.len);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
runtime_alignment = 8;
{
const new_mem = try std.testing.allocator.allocBytes(runtime_alignment, number_of_bytes, 0, @returnAddress());
defer std.testing.allocator.free(new_mem);
try std.testing.expectEqual(number_of_bytes, new_mem.len);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
}
test "allocBytes non-zero len_align" {
const number_of_bytes: usize = 10;
var runtime_alignment: u29 = 1;
var len_align: u29 = 2;
{
const new_mem = try std.testing.allocator.allocBytes(runtime_alignment, number_of_bytes, len_align, @returnAddress());
defer std.testing.allocator.free(new_mem);
try std.testing.expect(new_mem.len >= number_of_bytes);
try std.testing.expect(new_mem.len % len_align == 0);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
runtime_alignment = 16;
len_align = 5;
{
const new_mem = try std.testing.allocator.allocBytes(runtime_alignment, number_of_bytes, len_align, @returnAddress());
defer std.testing.allocator.free(new_mem);
try std.testing.expect(new_mem.len >= number_of_bytes);
try std.testing.expect(new_mem.len % len_align == 0);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
}
pub fn reallocBytes(
self: Allocator,
old_mem: []u8,
old_alignment: u29,
new_byte_count: usize,
new_alignment: u29,
len_align: u29,
return_address: usize,
) Error![]u8 {
if (old_mem.len == 0) {
return self.allocBytes(new_alignment, new_byte_count, len_align, return_address);
}
if (new_byte_count == 0) {
@memset(old_mem.ptr, undefined, old_mem.len);
self.rawFree(old_mem, old_alignment, return_address);
return &[0]u8{};
}
if (mem.isAligned(@ptrToInt(old_mem.ptr), new_alignment)) {
if (new_byte_count <= old_mem.len) {
const shrunk_len = self.shrinkBytes(old_mem, old_alignment, new_byte_count, len_align, return_address);
return old_mem.ptr[0..shrunk_len];
}
if (self.rawResize(old_mem, old_alignment, new_byte_count, len_align, return_address)) |resized_len| {
assert(resized_len >= new_byte_count);
@memset(old_mem.ptr + new_byte_count, undefined, resized_len - new_byte_count);
return old_mem.ptr[0..resized_len];
}
}
if (new_byte_count <= old_mem.len and new_alignment <= old_alignment) {
return error.OutOfMemory;
}
const new_mem = try self.rawAlloc(new_byte_count, new_alignment, len_align, return_address);
@memcpy(new_mem.ptr, old_mem.ptr, math.min(new_byte_count, old_mem.len));
@memset(old_mem.ptr, undefined, old_mem.len);
self.rawFree(old_mem, old_alignment, return_address);
return new_mem;
}
test "reallocBytes" {
var new_mem: []u8 = &.{};
var new_byte_count: usize = 16;
var runtime_alignment: u29 = 4;
{
new_mem = try std.testing.allocator.reallocBytes(new_mem, undefined, new_byte_count, runtime_alignment, 0, @returnAddress());
try std.testing.expectEqual(new_byte_count, new_mem.len);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
new_byte_count = 14;
{
new_mem = try std.testing.allocator.reallocBytes(new_mem, runtime_alignment, new_byte_count, runtime_alignment, 0, @returnAddress());
try std.testing.expectEqual(new_byte_count, new_mem.len);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
runtime_alignment = 2;
new_byte_count = 12;
{
new_mem = try std.testing.allocator.reallocBytes(new_mem, 4, new_byte_count, runtime_alignment, 0, @returnAddress());
try std.testing.expectEqual(new_byte_count, new_mem.len);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
runtime_alignment = 8;
new_byte_count = 32;
{
new_mem = try std.testing.allocator.reallocBytes(new_mem, 2, new_byte_count, runtime_alignment, 0, @returnAddress());
try std.testing.expectEqual(new_byte_count, new_mem.len);
try std.testing.expect(mem.isAligned(@ptrToInt(new_mem.ptr), runtime_alignment));
}
new_byte_count = 0;
{
new_mem = try std.testing.allocator.reallocBytes(new_mem, runtime_alignment, new_byte_count, runtime_alignment, 0, @returnAddress());
try std.testing.expectEqual(new_byte_count, new_mem.len);
}
}
pub fn shrinkBytes(
self: Allocator,
buf: []u8,
buf_align: u29,
new_len: usize,
len_align: u29,
return_address: usize,
) usize {
assert(new_len <= buf.len);
return self.rawResize(buf, buf_align, new_len, len_align, return_address) orelse unreachable;
}