Skip to content

Commit

Permalink
move memory_block_header into class arena and get rid of next_superbl…
Browse files Browse the repository at this point in the history
…ock field; make ptr_diff(), move_ptr(), get_arena_semispace_id_of_object(), mem_block_header() into static member functions of class arena
  • Loading branch information
stevenmeker committed Nov 16, 2024
1 parent acf641f commit 9f646a6
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 63 deletions.
74 changes: 37 additions & 37 deletions include/runtime/arena.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,26 +51,49 @@ class arena {
// It is used before garbage collection.
void arena_swap_and_clear();

// Given two pointers to objects allocated in the same arena, return the number
// of bytes they are separated by within the virtual block of memory represented
// by the blocks of that arena. This difference will include blocks containing
// sentinel bytes. Undefined behavior will result if the pointers belong to
// different arenas.
static ssize_t ptr_diff(char *ptr1, char *ptr2);

// Given a starting pointer to an address allocated in an arena and a size in
// bytes, this function returns a pointer to an address allocated in the
// same arena after size bytes from the starting pointer.
//
// 1st argument: the starting pointer
// 2nd argument: the size in bytes to add to the starting pointer
// 3rd argument: the address of last allocated byte in the arena plus 1
// Return value: the address allocated in the arena after size bytes from the
// starting pointer, or 0 if this is equal to the 3rd argument.
static char *move_ptr(char *ptr, size_t size, char const *arena_end_ptr);

// Returns the ID of the semispace where the given address was allocated.
// The behavior is undefined if called with an address that has not been
// allocated within an arena.
static char get_arena_semispace_id_of_object(void *ptr);

private:
struct memory_block_header {
char *next_block;
char semispace;
};

void fresh_block();
static memory_block_header *mem_block_header(void *ptr);

// helper function for `kore_arena_alloc`. Do not call directly.
void *do_alloc_slow(size_t requested);

char *first_block;
char *block;
char *block_start;
char *block_end;
char *first_collection_block;
size_t num_blocks;
size_t num_collection_blocks;
char allocation_semispace_id;
};

using memory_block_header = struct {
char *next_block;
char *next_superblock;
char semispace;
char *first_block; // beginning of first block
char *block; // where allocations are being made in current block
char *block_start; // start of current block
char *block_end; // 1 past end of current block
char *first_collection_block; // beginning of other semispace
size_t num_blocks; // number of blocks in current semispace
size_t num_collection_blocks; // number of blocks in other semispace
char allocation_semispace_id; // id of current semispace
};

// Macro to define a new arena with the given ID. Supports IDs ranging from 0 to
Expand All @@ -91,11 +114,6 @@ extern thread_local bool time_for_collection;

size_t get_gc_threshold();

// Returns the ID of the semispace where the given address was allocated.
// The behavior is undefined if called with an address that has not been
// allocated within an arena.
char get_arena_semispace_id_of_object(void *);

// Allocates the requested number of bytes as a contiguous region and returns a
// pointer to the first allocated byte.
// If called with requested size greater than the maximun single allocation
Expand All @@ -112,24 +130,6 @@ inline void *arena::kore_arena_alloc(size_t requested) {
return result;
}

// Given a starting pointer to an address allocated in an arena and a size in
// bytes, this function returns a pointer to an address allocated in the
// same arena after size bytes from the starting pointer.
//
// 1st argument: the starting pointer
// 2nd argument: the size in bytes to add to the starting pointer
// 3rd argument: the address of last allocated byte in the arena plus 1
// Return value: the address allocated in the arena after size bytes from the
// starting pointer, or 0 if this is equal to the 3rd argument.
char *move_ptr(char *, size_t, char const *);

// Given two pointers to objects allocated in the same arena, return the number
// of bytes they are separated by within the virtual block of memory represented
// by the blocks of that arena. This difference will include blocks containing
// sentinel bytes. Undefined behavior will result if the pointers belong to
// different arenas.
ssize_t ptr_diff(char *, char *);

// Deallocates all the memory allocated for registered arenas.
void free_all_memory(void);
}
Expand Down
36 changes: 18 additions & 18 deletions runtime/alloc/arena.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@

extern size_t const VAR_BLOCK_SIZE = BLOCK_SIZE;

__attribute__((always_inline)) memory_block_header *
mem_block_header(void *ptr) {
__attribute__((always_inline)) arena::memory_block_header *
arena::mem_block_header(void *ptr) {
// NOLINTNEXTLINE(*-reinterpret-cast)
return reinterpret_cast<memory_block_header *>(
return reinterpret_cast<arena::memory_block_header *>(
((uintptr_t)(ptr)-1) & ~(BLOCK_SIZE - 1));
}

Expand All @@ -25,7 +25,7 @@ arena::get_arena_collection_semispace_id() const {
}

__attribute__((always_inline)) char
get_arena_semispace_id_of_object(void *ptr) {
arena::get_arena_semispace_id_of_object(void *ptr) {
return mem_block_header(ptr)->semispace;
}

Expand Down Expand Up @@ -97,7 +97,7 @@ void arena::fresh_block() {
if (block_start == nullptr) {
next_block = (char *)megabyte_malloc();
first_block = next_block;
auto *next_header = (memory_block_header *)next_block;
auto *next_header = (arena::memory_block_header *)next_block;
next_header->next_block = nullptr;
next_header->semispace = allocation_semispace_id;
num_blocks++;
Expand All @@ -117,7 +117,7 @@ void arena::fresh_block() {
allocation_semispace_id);
next_block = (char *)megabyte_malloc();
*(char **)block_start = next_block;
auto *next_header = (memory_block_header *)next_block;
auto *next_header = (arena::memory_block_header *)next_block;
next_header->next_block = nullptr;
next_header->semispace = allocation_semispace_id;
num_blocks++;
Expand All @@ -127,12 +127,12 @@ void arena::fresh_block() {
if (!*(char **)next_block && num_blocks >= get_gc_threshold()) {
time_for_collection = true;
}
block = next_block + sizeof(memory_block_header);
block = next_block + sizeof(arena::memory_block_header);
block_start = next_block;
block_end = next_block + BLOCK_SIZE;
MEM_LOG(
"New block at %p (remaining %zd)\n", block,
BLOCK_SIZE - sizeof(memory_block_header));
BLOCK_SIZE - sizeof(arena::memory_block_header));
}

#ifdef __MACH__
Expand All @@ -148,7 +148,7 @@ __attribute__((noinline)) void *arena::do_alloc_slow(size_t requested) {
MEM_LOG(
"Block at %p too small, %zd remaining but %zd needed\n", block,
block_end - block, requested);
if (requested > BLOCK_SIZE - sizeof(memory_block_header)) {
if (requested > BLOCK_SIZE - sizeof(arena::memory_block_header)) {
return malloc(requested);
}
fresh_block();
Expand Down Expand Up @@ -181,20 +181,20 @@ __attribute__((always_inline)) void arena::arena_swap_and_clear() {
}

__attribute__((always_inline)) void arena::arena_clear() {
block = first_block ? first_block + sizeof(memory_block_header) : nullptr;
block = first_block ? first_block + sizeof(arena::memory_block_header) : nullptr;
block_start = first_block;
block_end = first_block ? first_block + BLOCK_SIZE : nullptr;
}

__attribute__((always_inline)) char *arena::arena_start_ptr() const {
return first_block ? first_block + sizeof(memory_block_header) : nullptr;
return first_block ? first_block + sizeof(arena::memory_block_header) : nullptr;
}

__attribute__((always_inline)) char **arena::arena_end_ptr() {
return &block;
}

char *move_ptr(char *ptr, size_t size, char const *arena_end_ptr) {
char *arena::move_ptr(char *ptr, size_t size, char const *arena_end_ptr) {
char *next_ptr = ptr + size;
if (next_ptr == arena_end_ptr) {
return nullptr;
Expand All @@ -206,23 +206,23 @@ char *move_ptr(char *ptr, size_t size, char const *arena_end_ptr) {
if (!next_block) {
return nullptr;
}
return next_block + sizeof(memory_block_header);
return next_block + sizeof(arena::memory_block_header);
}

ssize_t ptr_diff(char *ptr1, char *ptr2) {
ssize_t arena::ptr_diff(char *ptr1, char *ptr2) {
if (MEM_BLOCK_START(ptr1) == MEM_BLOCK_START(ptr2)) {
return ptr1 - ptr2;
}
memory_block_header *hdr = mem_block_header(ptr2);
arena::memory_block_header *hdr = mem_block_header(ptr2);
ssize_t result = 0;
while (hdr != mem_block_header(ptr1) && hdr->next_block) {
if (ptr2) {
result += ((char *)hdr + BLOCK_SIZE) - ptr2;
ptr2 = nullptr;
} else {
result += (BLOCK_SIZE - sizeof(memory_block_header));
result += (BLOCK_SIZE - sizeof(arena::memory_block_header));
}
hdr = (memory_block_header *)hdr->next_block;
hdr = (arena::memory_block_header *)hdr->next_block;
}
if (hdr == mem_block_header(ptr1)) {
result += ptr1 - (char *)(hdr + 1);
Expand All @@ -240,5 +240,5 @@ ssize_t ptr_diff(char *ptr1, char *ptr2) {
size_t arena::arena_size() const {
return (num_blocks > num_collection_blocks ? num_blocks
: num_collection_blocks)
* (BLOCK_SIZE - sizeof(memory_block_header));
* (BLOCK_SIZE - sizeof(arena::memory_block_header));
}
12 changes: 6 additions & 6 deletions runtime/collect/collect.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,9 @@ void migrate_once(block **block_ptr) {
return;
}
if (youngspace_collection_id()
== get_arena_semispace_id_of_object((void *)curr_block)
== arena::get_arena_semispace_id_of_object((void *)curr_block)
|| oldspace_collection_id()
== get_arena_semispace_id_of_object((void *)curr_block)) {
== arena::get_arena_semispace_id_of_object((void *)curr_block)) {
migrate(block_ptr);
}
}
Expand Down Expand Up @@ -255,7 +255,7 @@ static char *evacuate(char *scan_ptr, char **alloc_ptr) {
migrate_child(curr_block, layout_data->args, i, false);
}
}
return move_ptr(scan_ptr, get_size(hdr, layout_int), *alloc_ptr);
return arena::move_ptr(scan_ptr, get_size(hdr, layout_int), *alloc_ptr);
}

// Contains the decision logic for collecting the old generation.
Expand Down Expand Up @@ -325,9 +325,9 @@ void kore_collect(
// allocation pointer is invalid and does not actually point to the next
// address that would have been allocated at, according to the logic of
// kore_arena_alloc, which will have allocated a fresh memory block and put
// the allocation at the start of it. Thus, we use move_ptr with a size
// the allocation at the start of it. Thus, we use arena::move_ptr with a size
// of zero to adjust and get the true address of the allocation.
scan_ptr = move_ptr(previous_oldspace_alloc_ptr, 0, *old_alloc_ptr());
scan_ptr = arena::move_ptr(previous_oldspace_alloc_ptr, 0, *old_alloc_ptr());
} else {
scan_ptr = previous_oldspace_alloc_ptr;
}
Expand All @@ -340,7 +340,7 @@ void kore_collect(
}
#ifdef GC_DBG
ssize_t numBytesAllocedSinceLastCollection
= ptr_diff(current_alloc_ptr, last_alloc_ptr);
= arena::ptr_diff(current_alloc_ptr, last_alloc_ptr);
assert(numBytesAllocedSinceLastCollection >= 0);
fwrite(&numBytesAllocedSinceLastCollection, sizeof(ssize_t), 1, stderr);
last_alloc_ptr = *young_alloc_ptr();
Expand Down
4 changes: 2 additions & 2 deletions runtime/collect/migrate_collection.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
void migrate_collection_node(void **node_ptr) {
string *curr_block = STRUCT_BASE(string, data, *node_ptr);
if (youngspace_collection_id()
!= get_arena_semispace_id_of_object((void *)curr_block)
!= arena::get_arena_semispace_id_of_object((void *)curr_block)
&& oldspace_collection_id()
!= get_arena_semispace_id_of_object((void *)curr_block)) {
!= arena::get_arena_semispace_id_of_object((void *)curr_block)) {
return;
}
uint64_t const hdr = curr_block->h.hdr;
Expand Down

0 comments on commit 9f646a6

Please sign in to comment.