in src/mem/corealloc.h [206:288]
static SNMALLOC_FAST_PATH void alloc_new_list(
capptr::Chunk<void>& bumpptr,
Metaslab* meta,
size_t rsize,
size_t slab_size,
LocalEntropy& entropy)
{
auto slab_end = pointer_offset(bumpptr, slab_size + 1 - rsize);
auto& key = entropy.get_free_list_key();
auto& b = meta->free_queue;
#ifdef SNMALLOC_CHECK_CLIENT
// Structure to represent the temporary list elements
struct PreAllocObject
{
capptr::AllocFull<PreAllocObject> next;
};
// The following code implements Sattolo's algorithm for generating
// random cyclic permutations. This implementation is in the opposite
// direction, so that the original space does not need initialising. This
// is described as outside-in without citation on Wikipedia, appears to be
// Folklore algorithm.
// Note the wide bounds on curr relative to each of the ->next fields;
// curr is not persisted once the list is built.
capptr::Chunk<PreAllocObject> curr =
pointer_offset(bumpptr, 0).template as_static<PreAllocObject>();
curr->next = Aal::capptr_bound<PreAllocObject, capptr::bounds::AllocFull>(
curr, rsize);
uint16_t count = 1;
for (curr =
pointer_offset(curr, rsize).template as_static<PreAllocObject>();
curr.as_void() < slab_end;
curr =
pointer_offset(curr, rsize).template as_static<PreAllocObject>())
{
size_t insert_index = entropy.sample(count);
curr->next = std::exchange(
pointer_offset(bumpptr, insert_index * rsize)
.template as_static<PreAllocObject>()
->next,
Aal::capptr_bound<PreAllocObject, capptr::bounds::AllocFull>(
curr, rsize));
count++;
}
// Pick entry into space, and then build linked list by traversing cycle
// to the start. Use ->next to jump from Chunk to Alloc.
auto start_index = entropy.sample(count);
auto start_ptr = pointer_offset(bumpptr, start_index * rsize)
.template as_static<PreAllocObject>()
->next;
auto curr_ptr = start_ptr;
do
{
b.add(
// Here begins our treatment of the heap as containing Wild pointers
freelist::Object::make<capptr::bounds::AllocWild>(
capptr_to_user_address_control(curr_ptr.as_void())),
key,
entropy);
curr_ptr = curr_ptr->next;
} while (curr_ptr != start_ptr);
#else
auto p = bumpptr;
do
{
b.add(
// Here begins our treatment of the heap as containing Wild pointers
freelist::Object::make<capptr::bounds::AllocWild>(
capptr_to_user_address_control(
Aal::capptr_bound<void, capptr::bounds::AllocFull>(
p.as_void(), rsize))),
key);
p = pointer_offset(p, rsize);
} while (p < slab_end);
#endif
// This code consumes everything up to slab_end.
bumpptr = slab_end;
}