void gum_spinlock_free (GumSpinlock * spinlock) { GumSpinlockImpl * self = (GumSpinlockImpl *) spinlock; gum_free_pages (self->code); }
static void test_thumb_relocator_fixture_teardown (TestThumbRelocatorFixture * fixture, gconstpointer data) { gum_thumb_relocator_free (&fixture->rl); gum_thumb_writer_free (&fixture->tw); gum_free_pages (fixture->output); }
static void test_arm64_relocator_fixture_teardown (TestArm64RelocatorFixture * fixture, gconstpointer data) { gum_arm64_relocator_free (&fixture->rl); gum_arm64_writer_free (&fixture->aw); gum_free_pages (fixture->output); }
void lowlevel_helpers_deinit (void) { g_assert (clobber_test_function != NULL); gum_free_pages (GUM_FUNCPTR_TO_POINTER (clobber_test_function)); clobber_test_function = NULL; }
static void test_relocator_fixture_teardown (TestRelocatorFixture * fixture, gconstpointer data) { gum_x86_relocator_free (&fixture->rl); gum_x86_writer_free (&fixture->cw); gum_free_pages (fixture->output); }
static gpointer gum_capstone_malloc (gsize size) { do { GumPool * head, * pool; GumBlock * block, * next_block; gsize aligned_block_size, pool_size, pages; gpointer pool_start, pool_end; head = pools; pool = NULL; for (pool = pools; pool != NULL; pool = pool->next) { if (pool->block_size == size) { do { block = pool->free; if (block == NULL) break; } while (!g_atomic_pointer_compare_and_exchange (&pool->free, block, block->next)); if (block != NULL) return GUM_BLOCK_TO_DATA_POINTER (block); } } aligned_block_size = GUM_BLOCK_HEADER_SIZE + GUM_ALIGNED_SIZE (size); pool_size = GUM_POOL_HEADER_SIZE + (100 * aligned_block_size); pages = pool_size / page_size; if (pool_size % page_size != 0) pages++; pool_start = gum_alloc_n_pages (pages, GUM_PAGE_RW); pool_end = (guint8 *) pool_start + pool_size; pool = (GumPool *) pool_start; pool->block_size = size; block = (GumBlock *) ((guint8 *) pool_start + GUM_POOL_HEADER_SIZE); pool->free = block; do { next_block = (GumBlock *) ((guint8 *) block + aligned_block_size); if (next_block == pool_end) next_block = NULL; block->pool = pool; block->next = next_block; block = next_block; } while (next_block != NULL); pool->next = head; if (!g_atomic_pointer_compare_and_exchange (&pools, head, pool)) gum_free_pages (pool); } while (TRUE); }
static void test_memory_access_monitor_fixture_teardown (TestMAMonitorFixture * fixture, gconstpointer data) { if (fixture->monitor != NULL) g_object_unref (fixture->monitor); gum_free_pages (GSIZE_TO_POINTER (fixture->range.base_address)); }
void gum_metal_array_free (GumMetalArray * array) { array->element_size = 0; array->capacity = 0; array->length = 0; gum_free_pages (array->data); array->data = NULL; }
static void test_stalker_fixture_teardown (TestStalkerFixture * fixture, gconstpointer data) { g_object_unref (fixture->sink); g_object_unref (fixture->stalker); if (fixture->code != NULL) gum_free_pages (fixture->code); }
static void gum_page_pool_finalize (GObject * object) { GumPagePool * self = GUM_PAGE_POOL (object); GumPagePoolPrivate * priv = GUM_PAGE_POOL_GET_PRIVATE (self); gum_free (priv->block_details); gum_free_pages (priv->pool); G_OBJECT_CLASS (gum_page_pool_parent_class)->finalize (object); }
static void gum_capstone_deinit (void) { while (pools != NULL) { GumPool * next; next = pools->next; gum_free_pages (pools); pools = next; } }
static guint8 * test_stalker_fixture_dup_code (TestStalkerFixture * fixture, const guint8 * tpl_code, guint tpl_size) { if (fixture->code != NULL) gum_free_pages (fixture->code); fixture->code = (guint8 *) gum_alloc_n_pages ( (tpl_size / gum_query_page_size ()) + 1, GUM_PAGE_RWX); memcpy (fixture->code, tpl_code, tpl_size); return fixture->code; }
void gum_metal_array_ensure_capacity (GumMetalArray * self, guint capacity) { guint size_in_bytes, page_size, size_in_pages; gpointer new_data; if (self->capacity >= capacity) return; size_in_bytes = capacity * self->element_size; page_size = gum_query_page_size (); size_in_pages = size_in_bytes / page_size; if (size_in_bytes % page_size != 0) size_in_pages++; new_data = gum_alloc_n_pages (size_in_pages, GUM_PAGE_RW); gum_memcpy (new_data, self->data, self->length * self->element_size); gum_free_pages (self->data); self->data = new_data; self->capacity = (size_in_pages * page_size) / self->element_size; }
static void gum_code_pages_unref (GumCodePages * self) { self->ref_count--; if (self->ref_count == 0) { if (self->segment != NULL) { gum_code_segment_free (self->segment); } else { GumMemoryRange range; gum_free_pages (self->data); gum_query_page_allocation_range (self->data, self->size, &range); gum_cloak_remove_range (&range); } g_slice_free1 (self->allocator->pages_metadata_size, self); } }
static void gum_code_page_free (GumCodePage * self) { gum_free_pages (self); }