diff --git a/arena.go b/arena.go index 2fdf30c..eb23aaf 100644 --- a/arena.go +++ b/arena.go @@ -9,7 +9,8 @@ import ( // Arena is an interface that describes a memory allocation arena. type Arena interface { // Alloc allocates memory of the given size and returns a pointer to it. - Alloc(size int) unsafe.Pointer + // The alignment parameter specifies the alignment of the allocated memory. + Alloc(size, alignment uintptr) unsafe.Pointer // Reset resets the arena's state, optionally releasing the memory. // After invoking this method any pointer previously returned by Alloc becomes immediately invalid. @@ -22,7 +23,7 @@ type Arena interface { func New[T any](a Arena) *T { if a != nil { var x T - if ptr := a.Alloc(int(unsafe.Sizeof(x))); ptr != nil { + if ptr := a.Alloc(unsafe.Sizeof(x), unsafe.Alignof(x)); ptr != nil { return (*T)(ptr) } } @@ -37,7 +38,7 @@ func MakeSlice[T any](a Arena, len, cap int) []T { if a != nil { var x T bufSize := int(unsafe.Sizeof(x)) * cap - if ptr := (*T)(a.Alloc(bufSize)); ptr != nil { + if ptr := (*T)(a.Alloc(uintptr(bufSize), unsafe.Alignof(x))); ptr != nil { s := unsafe.Slice(ptr, cap) return s[:len] } diff --git a/concurrent_arena.go b/concurrent_arena.go index 428d6b6..95a6459 100644 --- a/concurrent_arena.go +++ b/concurrent_arena.go @@ -19,9 +19,9 @@ func NewConcurrentArena(a Arena) Arena { } // Alloc satisfies the Arena interface. -func (a *concurrentArena) Alloc(size int) unsafe.Pointer { +func (a *concurrentArena) Alloc(size, alignment uintptr) unsafe.Pointer { a.mtx.Lock() - ptr := a.a.Alloc(size) + ptr := a.a.Alloc(size, alignment) a.mtx.Unlock() return ptr } diff --git a/slab_arena.go b/slab_arena.go index fa63aaa..032260c 100644 --- a/slab_arena.go +++ b/slab_arena.go @@ -12,24 +12,30 @@ type slabArena struct { type slab struct { ptr unsafe.Pointer - offset int - size int + offset uintptr + size uintptr } func newSlab(size int) *slab { - return &slab{size: size} + return &slab{size: uintptr(size)} } -func (s *slab) alloc(size int) (unsafe.Pointer, bool) { +func (s *slab) alloc(size, alignment uintptr) (unsafe.Pointer, bool) { if s.ptr == nil { buf := make([]byte, s.size) // allocate slab buffer lazily s.ptr = unsafe.Pointer(unsafe.SliceData(buf)) } - if s.availableBytes() < size { + alignOffset := uintptr(0) + for alignedPtr := uintptr(s.ptr) + s.offset; alignedPtr%alignment != 0; alignedPtr++ { + alignOffset++ + } + allocSize := size + alignOffset + + if s.availableBytes() < allocSize { return nil, false } - ptr := unsafe.Pointer(uintptr(s.ptr) + uintptr(s.offset)) - s.offset += size + ptr := unsafe.Pointer(uintptr(s.ptr) + s.offset + alignOffset) + s.offset += allocSize return ptr, true } @@ -59,7 +65,7 @@ func (s *slab) zeroOutBuffer() { } } -func (s *slab) availableBytes() int { +func (s *slab) availableBytes() uintptr { return s.size - s.offset } @@ -73,9 +79,9 @@ func NewSlabArena(slabSize, slabCount int) Arena { } // Alloc satisfies the Arena interface. -func (a *slabArena) Alloc(size int) unsafe.Pointer { +func (a *slabArena) Alloc(size, alignment uintptr) unsafe.Pointer { for i := 0; i < len(a.slabs); i++ { - ptr, ok := a.slabs[i].alloc(size) + ptr, ok := a.slabs[i].alloc(size, alignment) if ok { return ptr } diff --git a/slab_arena_test.go b/slab_arena_test.go index cad6aea..379c3cf 100644 --- a/slab_arena_test.go +++ b/slab_arena_test.go @@ -83,6 +83,16 @@ func TestSlabArenaReset(t *testing.T) { func TestSlabArenaAllocateSlice(t *testing.T) {} +func TestSlabArenaMultipleTypes(t *testing.T) { + arena := NewSlabArena(8182, 1) // 8KB + + var b = New[byte](arena) + var p = New[*int](arena) + + require.Equal(t, *b, byte(0)) + require.True(t, *p == nil) +} + func isSlabArenaPtr(a Arena, ptr unsafe.Pointer) bool { sa := a.(*slabArena) for _, s := range sa.slabs { @@ -90,7 +100,7 @@ func isSlabArenaPtr(a Arena, ptr unsafe.Pointer) bool { break } beginPtr := uintptr(s.ptr) - endPtr := uintptr(s.ptr) + uintptr(s.size) + endPtr := uintptr(s.ptr) + s.size if uintptr(ptr) >= beginPtr && uintptr(ptr) < endPtr { return true @@ -114,7 +124,7 @@ func BenchmarkRuntimeNewObject(b *testing.B) { } func BenchmarkSlabArenaNewObject(b *testing.B) { - slabArena := NewSlabArena(1024*1024, 128) // 1Mb slab size (128 MB) + slabArena := NewSlabArena(2*1024*1024, 64) // 2Mb slab size (64Mb max size) a := newArenaAllocator[int](slabArena) for _, objectCount := range []int{100, 1_000, 10_000, 100_000} { @@ -131,7 +141,7 @@ func BenchmarkSlabArenaNewObject(b *testing.B) { } func BenchmarkConcurrentSlabArenaNewObject(b *testing.B) { - slabArena := NewSlabArena(1024*1024, 128) // 1Mb slab size (128 MB) + slabArena := NewSlabArena(2*1024*1024, 64) // 2Mb slab size (64Mb max size) a := newArenaAllocator[int](NewConcurrentArena(slabArena)) for _, objectCount := range []int{100, 1_000, 10_000, 100_000} { @@ -162,7 +172,7 @@ func BenchmarkRuntimeMakeSlice(b *testing.B) { } func BenchmarkSlabArenaMakeSlice(b *testing.B) { - slabArena := NewSlabArena(1024*1024, 128) // 1Mb slab size (128 MB) + slabArena := NewSlabArena(2*1024*1024, 64) // 2Mb slab size (64Mb max size) a := newArenaAllocator[int](slabArena) for _, objectCount := range []int{100, 1_000, 10_000, 100_000} { @@ -179,7 +189,7 @@ func BenchmarkSlabArenaMakeSlice(b *testing.B) { } func BenchmarkConcurrentSlabArenaMakeSlice(b *testing.B) { - slabArena := NewSlabArena(1024*1024, 128) // 1Mb slab size (128 MB) + slabArena := NewSlabArena(2*1024*1024, 64) // 2Mb slab size (64Mb max size) a := newArenaAllocator[int](NewConcurrentArena(slabArena)) for _, objectCount := range []int{100, 1_000, 10_000, 100_000} { diff --git a/slice_test.go b/slice_test.go index b829531..d9e3d51 100644 --- a/slice_test.go +++ b/slice_test.go @@ -13,7 +13,7 @@ import ( // It simply allocates memory using Go's built-in make function. type mockArena struct{} -func (m *mockArena) Alloc(size int) unsafe.Pointer { +func (m *mockArena) Alloc(size, _ uintptr) unsafe.Pointer { return unsafe.Pointer(&make([]byte, size)[0]) }