/*
* Disable purging (-1) so that all dirty pages remain committed, to
* make use-after-free tolerable.
*/
unsigned arena_ind = do_arena_create(-1, -1);
int flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE;
size_t stashed_after = read_tcache_stashed_bytes(
arena_ind);
/*
* An edge case is the deallocation above triggering the
* tcache GC event, in which case the stashed pointers
* may get flushed immediately, before returning from
* free(). Treat these cases as checked already.
*/
if (stashed_after <= stashed_before) {
fake_abort_called = true;
}
}
/* Flush tcache (including stashed). */
assert_d_eq(mallctl("thread.tcache.flush", NULL, NULL, NULL, 0),
0, "Unexpected tcache flush failure");
}
expect_true(junked, "Aligned ptr not junked");
if (write_after_free) {
test_write_after_free_post();
}
for (unsigned i = 0; i < n_alloc; i++) {
allocated[i] = mallocx(sizeof(void *) * 8, flags);
assert_ptr_not_null(allocated[i], "Unexpected mallocx failure");
if (i > 0) {
/* Emulate a circular list. */
*(void **)allocated[i] = allocated[i - 1];
}
}
*(void **)allocated[0] = allocated[n_alloc - 1];
expect_true(check_allocated_intact(allocated, n_alloc),
"Allocated data corrupted");
for (unsigned i = 0; i < n_alloc; i++) {
free(allocated[i]);
}
/* Read-after-free */
expect_false(check_allocated_intact(allocated, n_alloc),
"Junk-filling not detected");
test_write_after_free_pre();
for (unsigned i = 0; i < n_alloc; i++) {
allocated[i] = mallocx(sizeof(void *), flags);
assert_ptr_not_null(allocated[i], "Unexpected mallocx failure");
*(void **)allocated[i] = (void *)(uintptr_t)i;
}
/* Write-after-free */
for (unsigned i = 0; i < n_alloc; i++) {
free(allocated[i]);
*(void **)allocated[i] = NULL;
}
test_write_after_free_post();
}
TEST_END
int
main(void) {
return test(
test_read_after_free,
test_write_after_free,
test_use_after_free_integration);
}