Example #1
0
/* I hold the allocation lock.	Normally called by collector.		*/
void GC_check_heap_proc(void)
{
#   ifndef SMALL_CONFIG
      /* Ignore gcc no effect warning on the following.		*/
      GC_STATIC_ASSERT((sizeof(oh) & (GRANULE_BYTES - 1)) == 0);
      /* FIXME: Should we check for twice that alignment?	*/
#   endif
    GC_apply_to_all_blocks(GC_check_heap_block, (word)0);
}
Example #2
0
/* We hold the allocator lock.  */
GC_INNER void GC_destroy_thread_local(GC_tlfs p)
{
    int k;

    /* We currently only do this from the thread itself or from */
    /* the fork handler for a child process.                    */
    GC_STATIC_ASSERT(THREAD_FREELISTS_KINDS <= MAXOBJKINDS);
    for (k = 0; k < THREAD_FREELISTS_KINDS; ++k) {
        if (k == (int)GC_n_kinds)
            break; /* kind is not created */
        return_freelists(p -> _freelists[k], GC_obj_kinds[k].ok_freelist);
    }
#   ifdef GC_GCJ_SUPPORT
        return_freelists(p -> gcj_freelists, (void **)GC_gcjobjfreelist);
#   endif
}
Example #3
0
/* We hold the allocator lock.  */
GC_INNER void GC_destroy_thread_local(GC_tlfs p)
{
    int i;

    /* We currently only do this from the thread itself or from */
    /* the fork handler for a child process.                    */
    GC_STATIC_ASSERT(PREDEFINED_KINDS >= THREAD_FREELISTS_KINDS);
    for (i = 0; i < THREAD_FREELISTS_KINDS; ++i) {
        return_freelists(p -> _freelists[i], GC_freelists[i]);
    }
#   ifdef GC_GCJ_SUPPORT
        return_freelists(p -> gcj_freelists, (void **)GC_gcjobjfreelist);
#   endif
#   ifdef ENABLE_DISCLAIM
        return_freelists(p -> finalized_freelists,
                         (void **)GC_finalized_objfreelist);
#   endif
}
Example #4
0
/* Will fail to do anything if we are out of memory.                    */
GC_INNER void GC_new_hblk(size_t gran, int kind)
{
  struct hblk *h;       /* the new heap block */
  GC_bool clear = GC_obj_kinds[kind].ok_init;

  GC_STATIC_ASSERT((sizeof (struct hblk)) == HBLKSIZE);

  if (GC_debugging_started) clear = TRUE;

  /* Allocate a new heap block */
    h = GC_allochblk(GRANULES_TO_BYTES(gran), kind, 0);
    if (h == 0) return;

  /* Mark all objects if appropriate. */
      if (IS_UNCOLLECTABLE(kind)) GC_set_hdr_marks(HDR(h));

  /* Build the free list */
      GC_obj_kinds[kind].ok_freelist[gran] =
        GC_build_fl(h, GRANULES_TO_WORDS(gran), clear,
                    GC_obj_kinds[kind].ok_freelist[gran]);
}
Example #5
0
STATIC void GC_init_explicit_typing(void)
{
    unsigned i;

    GC_STATIC_ASSERT(sizeof(struct LeafDescriptor) % sizeof(word) == 0);
    /* Set up object kind with simple indirect descriptor. */
      GC_eobjfreelist = (ptr_t *)GC_new_free_list_inner();
      GC_explicit_kind = GC_new_kind_inner(
                            (void **)GC_eobjfreelist,
                            (WORDS_TO_BYTES((word)-1) | GC_DS_PER_OBJECT),
                            TRUE, TRUE);
                /* Descriptors are in the last word of the object. */
      GC_typed_mark_proc_index = GC_new_proc_inner(GC_typed_mark_proc);
    /* Set up object kind with array descriptor. */
      GC_array_mark_proc_index = GC_new_proc_inner(GC_array_mark_proc);
      GC_array_kind = GC_new_kind_inner(GC_new_free_list_inner(),
                            GC_MAKE_PROC(GC_array_mark_proc_index, 0),
                            FALSE, TRUE);
      GC_bm_table[0] = GC_DS_BITMAP;
      for (i = 1; i < WORDSZ/2; i++) {
          GC_bm_table[i] = (((word)-1) << (WORDSZ - i)) | GC_DS_BITMAP;
      }
}
Example #6
0
/* I hold the allocation lock.  Normally called by collector.           */
STATIC void GC_check_heap_proc(void)
{
  GC_STATIC_ASSERT((sizeof(oh) & (GRANULE_BYTES - 1)) == 0);
  /* FIXME: Should we check for twice that alignment?   */
  GC_apply_to_all_blocks(GC_check_heap_block, 0);
}