Skip to content

Commit

Permalink
fix formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
stevenmeker committed Nov 15, 2024
1 parent a379fc8 commit 5c92ee8
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 28 deletions.
22 changes: 9 additions & 13 deletions include/runtime/arena.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@ extern "C" {
// once.
class arena {
public:
arena(char id) : allocation_semispace_id(id) {}
arena(char id)
: allocation_semispace_id(id) { }
void *kore_arena_alloc(size_t requested);

// Returns the address of the first byte that belongs in the given arena.
// Returns 0 if nothing has been allocated ever in that arena.
char *arena_start_ptr() const;
Expand All @@ -27,7 +28,7 @@ class arena {
// return the total number of allocatable bytes currently in the arena in its
// active semispace.
size_t arena_size() const;

// Clears the current allocation space by setting its start back to its first
// block. It is used during garbage collection to effectively collect all of the
// arena.
Expand All @@ -38,21 +39,21 @@ class arena {
// Returns the address of the byte following the last newlly allocated byte when
// the resize succeeds, returns 0 otherwise.
void *arena_resize_last_alloc(ssize_t increase);

// Returns the given arena's current collection semispace ID.
// Each arena has 2 semispace IDs one equal to the arena ID and the other equal
// to the 1's complement of the arena ID. At any time one of these semispaces
// is used for allocation and the other is used for collection.
char get_arena_collection_semispace_id() const;

// Exchanges the current allocation and collection semispaces and clears the new
// current allocation semispace by setting its start back to its first block.
// It is used before garbage collection.
void arena_swap_and_clear();

private:
void fresh_block();

// helper function for `kore_arena_alloc`. Do not call directly.
void *do_alloc_slow(size_t requested);

Expand All @@ -74,8 +75,7 @@ using memory_block_header = struct {

// Macro to define a new arena with the given ID. Supports IDs ranging from 0 to
// 127.
#define REGISTER_ARENA(name, id) \
static thread_local arena name(id)
#define REGISTER_ARENA(name, id) static thread_local arena name(id)

#define MEM_BLOCK_START(ptr) \
((char *)(((uintptr_t)(ptr)-1) & ~(BLOCK_SIZE - 1)))
Expand All @@ -91,19 +91,16 @@ extern thread_local bool time_for_collection;

size_t get_gc_threshold();


// Returns the ID of the semispace where the given address was allocated.
// The behavior is undefined if called with an address that has not been
// allocated within an arena.
char get_arena_semispace_id_of_object(void *);


// Allocates the requested number of bytes as a contiguous region and returns a
// pointer to the first allocated byte.
// If called with requested size greater than the maximun single allocation
// size, the space is allocated in a general (not garbage collected pool).
inline void
*arena::kore_arena_alloc(size_t requested) {
inline void *arena::kore_arena_alloc(size_t requested) {
if (block + requested > block_end) {
return do_alloc_slow(requested);
}
Expand All @@ -115,7 +112,6 @@ inline void
return result;
}


// Given a starting pointer to an address allocated in an arena and a size in
// bytes, this function returns a pointer to an address allocated in the
// same arena after size bytes from the starting pointer.
Expand Down
24 changes: 9 additions & 15 deletions runtime/alloc/arena.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,10 @@ void arena::fresh_block() {
next_block = *(char **)block_start;
if (block != block_end) {
if (block_end - block == 8) {
*(uint64_t *)block
= NOT_YOUNG_OBJECT_BIT; // 8 bit sentinel value
*(uint64_t *)block = NOT_YOUNG_OBJECT_BIT; // 8 bit sentinel value
} else {
*(uint64_t *)block = block_end - block
- 8; // 16-bit or more sentinel value
*(uint64_t *)block
= block_end - block - 8; // 16-bit or more sentinel value
}
}
if (!next_block) {
Expand Down Expand Up @@ -145,8 +144,7 @@ bool gc_enabled = true;
thread_local bool gc_enabled = true;
#endif

__attribute__((noinline)) void *
arena::do_alloc_slow(size_t requested) {
__attribute__((noinline)) void *arena::do_alloc_slow(size_t requested) {
MEM_LOG(
"Block at %p too small, %zd remaining but %zd needed\n", block,
block_end - block, requested);
Expand Down Expand Up @@ -188,14 +186,11 @@ __attribute__((always_inline)) void arena::arena_clear() {
block_end = first_block ? first_block + BLOCK_SIZE : nullptr;
}

__attribute__((always_inline)) char *
arena::arena_start_ptr() const {
return first_block ? first_block + sizeof(memory_block_header)
: nullptr;
__attribute__((always_inline)) char *arena::arena_start_ptr() const {
return first_block ? first_block + sizeof(memory_block_header) : nullptr;
}

__attribute__((always_inline)) char **
arena::arena_end_ptr() {
__attribute__((always_inline)) char **arena::arena_end_ptr() {
return █
}

Expand Down Expand Up @@ -243,8 +238,7 @@ ssize_t ptr_diff(char *ptr1, char *ptr2) {
}

size_t arena::arena_size() const {
return (num_blocks > num_collection_blocks
? num_blocks
: num_collection_blocks)
return (num_blocks > num_collection_blocks ? num_blocks
: num_collection_blocks)
* (BLOCK_SIZE - sizeof(memory_block_header));
}

0 comments on commit 5c92ee8

Please sign in to comment.