예제 #1
0
파일: Evac.c 프로젝트: DJacquard/ghc
void
evacuate_BLACKHOLE(StgClosure **p)
{
    bdescr *bd;
    uint32_t gen_no;
    StgClosure *q;
    const StgInfoTable *info;
    q = *p;

    // closure is required to be a heap-allocated BLACKHOLE
    ASSERT(HEAP_ALLOCED_GC(q));
    ASSERT(GET_CLOSURE_TAG(q) == 0);

    bd = Bdescr((P_)q);

    // blackholes can't be in a compact
    ASSERT((bd->flags & BF_COMPACT) == 0);

    // blackholes *can* be in a large object: when raiseAsync() creates an
    // AP_STACK the payload might be large enough to create a large object.
    // See #14497.
    if (bd->flags & BF_LARGE) {
        evacuate_large((P_)q);
        return;
    }
    if (bd->flags & BF_EVACUATED) {
        if (bd->gen_no < gct->evac_gen_no) {
            gct->failed_to_evac = true;
            TICK_GC_FAILED_PROMOTION();
        }
        return;
    }
    if (bd->flags & BF_MARKED) {
        if (!is_marked((P_)q,bd)) {
            mark((P_)q,bd);
            push_mark_stack((P_)q);
        }
        return;
    }
    gen_no = bd->dest_no;
    info = q->header.info;
    if (IS_FORWARDING_PTR(info))
    {
        StgClosure *e = (StgClosure*)UN_FORWARDING_PTR(info);
        *p = e;
        if (gen_no < gct->evac_gen_no) {  // optimisation
            if (Bdescr((P_)e)->gen_no < gct->evac_gen_no) {
                gct->failed_to_evac = true;
                TICK_GC_FAILED_PROMOTION();
            }
        }
        return;
    }

    ASSERT(INFO_PTR_TO_STRUCT(info)->type == BLACKHOLE);
    copy(p,info,q,sizeofW(StgInd),gen_no);
}
예제 #2
0
파일: Evac.c 프로젝트: 23Skidoo/ghc
REGPARM1 GNUC_ATTR_HOT void
evacuate(StgClosure **p)
{
  bdescr *bd = NULL;
  nat gen_no;
  StgClosure *q;
  const StgInfoTable *info;
  StgWord tag;

  q = *p;

loop:
  /* The tag and the pointer are split, to be merged after evacing */
  tag = GET_CLOSURE_TAG(q);
  q = UNTAG_CLOSURE(q);

  ASSERTM(LOOKS_LIKE_CLOSURE_PTR(q), "invalid closure, info=%p", q->header.info);

  if (!HEAP_ALLOCED_GC(q)) {

      if (!major_gc) return;

      info = get_itbl(q);
      switch (info->type) {

      case THUNK_STATIC:
          if (info->srt_bitmap != 0) {
              evacuate_static_object(THUNK_STATIC_LINK((StgClosure *)q), q);
          }
          return;

      case FUN_STATIC:
          if (info->srt_bitmap != 0) {
              evacuate_static_object(FUN_STATIC_LINK((StgClosure *)q), q);
          }
          return;

      case IND_STATIC:
          /* If q->saved_info != NULL, then it's a revertible CAF - it'll be
           * on the CAF list, so don't do anything with it here (we'll
           * scavenge it later).
           */
          evacuate_static_object(IND_STATIC_LINK((StgClosure *)q), q);
          return;

      case CONSTR_STATIC:
          evacuate_static_object(STATIC_LINK(info,(StgClosure *)q), q);
          return;

      case CONSTR_NOCAF_STATIC:
          /* no need to put these on the static linked list, they don't need
           * to be scavenged.
           */
          return;

      default:
          barf("evacuate(static): strange closure type %d", (int)(info->type));
      }
  }

  bd = Bdescr((P_)q);

  if ((bd->flags & (BF_LARGE | BF_MARKED | BF_EVACUATED)) != 0) {

      // pointer into to-space: just return it.  It might be a pointer
      // into a generation that we aren't collecting (> N), or it
      // might just be a pointer into to-space.  The latter doesn't
      // happen often, but allowing it makes certain things a bit
      // easier; e.g. scavenging an object is idempotent, so it's OK to
      // have an object on the mutable list multiple times.
      if (bd->flags & BF_EVACUATED) {
          // We aren't copying this object, so we have to check
          // whether it is already in the target generation.  (this is
          // the write barrier).
          if (bd->gen_no < gct->evac_gen_no) {
              gct->failed_to_evac = rtsTrue;
              TICK_GC_FAILED_PROMOTION();
          }
          return;
      }

      /* evacuate large objects by re-linking them onto a different list.
       */
      if (bd->flags & BF_LARGE) {
          evacuate_large((P_)q);
          return;
      }

      /* If the object is in a gen that we're compacting, then we
       * need to use an alternative evacuate procedure.
       */
      if (!is_marked((P_)q,bd)) {
          mark((P_)q,bd);
          push_mark_stack((P_)q);
      }
      return;
  }

  gen_no = bd->dest_no;

  info = q->header.info;
  if (IS_FORWARDING_PTR(info))
  {
    /* Already evacuated, just return the forwarding address.
     * HOWEVER: if the requested destination generation (gct->evac_gen) is
     * older than the actual generation (because the object was
     * already evacuated to a younger generation) then we have to
     * set the gct->failed_to_evac flag to indicate that we couldn't
     * manage to promote the object to the desired generation.
     */
    /*
     * Optimisation: the check is fairly expensive, but we can often
     * shortcut it if either the required generation is 0, or the
     * current object (the EVACUATED) is in a high enough generation.
     * We know that an EVACUATED always points to an object in the
     * same or an older generation.  gen is the lowest generation that the
     * current object would be evacuated to, so we only do the full
     * check if gen is too low.
     */
      StgClosure *e = (StgClosure*)UN_FORWARDING_PTR(info);
      *p = TAG_CLOSURE(tag,e);
      if (gen_no < gct->evac_gen_no) {  // optimisation
          if (Bdescr((P_)e)->gen_no < gct->evac_gen_no) {
              gct->failed_to_evac = rtsTrue;
              TICK_GC_FAILED_PROMOTION();
          }
      }
      return;
  }

  switch (INFO_PTR_TO_STRUCT(info)->type) {

  case WHITEHOLE:
      goto loop;

  // For ints and chars of low value, save space by replacing references to
  //    these with closures with references to common, shared ones in the RTS.
  //
  // * Except when compiling into Windows DLLs which don't support cross-package
  //    data references very well.
  //
  case CONSTR_0_1:
  {
#if defined(COMPILING_WINDOWS_DLL)
      copy_tag_nolock(p,info,q,sizeofW(StgHeader)+1,gen_no,tag);
#else
      StgWord w = (StgWord)q->payload[0];
      if (info == Czh_con_info &&
          // unsigned, so always true:  (StgChar)w >= MIN_CHARLIKE &&
          (StgChar)w <= MAX_CHARLIKE) {
          *p =  TAG_CLOSURE(tag,
                            (StgClosure *)CHARLIKE_CLOSURE((StgChar)w)
                           );
      }
      else if (info == Izh_con_info &&
          (StgInt)w >= MIN_INTLIKE && (StgInt)w <= MAX_INTLIKE) {
          *p = TAG_CLOSURE(tag,
                             (StgClosure *)INTLIKE_CLOSURE((StgInt)w)
                             );
      }
      else {
          copy_tag_nolock(p,info,q,sizeofW(StgHeader)+1,gen_no,tag);
      }
#endif
      return;
  }

  case FUN_0_1:
  case FUN_1_0:
  case CONSTR_1_0:
      copy_tag_nolock(p,info,q,sizeofW(StgHeader)+1,gen_no,tag);
      return;

  case THUNK_1_0:
  case THUNK_0_1:
      copy(p,info,q,sizeofW(StgThunk)+1,gen_no);
      return;

  case THUNK_1_1:
  case THUNK_2_0:
  case THUNK_0_2:
#ifdef NO_PROMOTE_THUNKS
#error bitrotted
#endif
    copy(p,info,q,sizeofW(StgThunk)+2,gen_no);
    return;

  case FUN_1_1:
  case FUN_2_0:
  case FUN_0_2:
  case CONSTR_1_1:
  case CONSTR_2_0:
      copy_tag_nolock(p,info,q,sizeofW(StgHeader)+2,gen_no,tag);
      return;

  case CONSTR_0_2:
      copy_tag_nolock(p,info,q,sizeofW(StgHeader)+2,gen_no,tag);
      return;

  case THUNK:
      copy(p,info,q,thunk_sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen_no);
      return;

  case FUN:
  case CONSTR:
      copy_tag_nolock(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen_no,tag);
      return;

  case BLACKHOLE:
  {
      StgClosure *r;
      const StgInfoTable *i;
      r = ((StgInd*)q)->indirectee;
      if (GET_CLOSURE_TAG(r) == 0) {
          i = r->header.info;
          if (IS_FORWARDING_PTR(i)) {
              r = (StgClosure *)UN_FORWARDING_PTR(i);
              i = r->header.info;
          }
          if (i == &stg_TSO_info
              || i == &stg_WHITEHOLE_info
              || i == &stg_BLOCKING_QUEUE_CLEAN_info
              || i == &stg_BLOCKING_QUEUE_DIRTY_info) {
              copy(p,info,q,sizeofW(StgInd),gen_no);
              return;
          }
          ASSERT(i != &stg_IND_info);
      }
      q = r;
      *p = r;
      goto loop;
  }

  case MUT_VAR_CLEAN:
  case MUT_VAR_DIRTY:
  case MVAR_CLEAN:
  case MVAR_DIRTY:
  case TVAR:
  case BLOCKING_QUEUE:
  case WEAK:
  case PRIM:
  case MUT_PRIM:
      copy(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen_no);
      return;

  case BCO:
      copy(p,info,q,bco_sizeW((StgBCO *)q),gen_no);
      return;

  case THUNK_SELECTOR:
      eval_thunk_selector(p, (StgSelector *)q, rtsTrue);
      return;

  case IND:
    // follow chains of indirections, don't evacuate them
    q = ((StgInd*)q)->indirectee;
    *p = q;
    goto loop;

  case RET_BCO:
  case RET_SMALL:
  case RET_BIG:
  case UPDATE_FRAME:
  case UNDERFLOW_FRAME:
  case STOP_FRAME:
  case CATCH_FRAME:
  case CATCH_STM_FRAME:
  case CATCH_RETRY_FRAME:
  case ATOMICALLY_FRAME:
    // shouldn't see these
    barf("evacuate: stack frame at %p\n", q);

  case PAP:
      copy(p,info,q,pap_sizeW((StgPAP*)q),gen_no);
      return;

  case AP:
      copy(p,info,q,ap_sizeW((StgAP*)q),gen_no);
      return;

  case AP_STACK:
      copy(p,info,q,ap_stack_sizeW((StgAP_STACK*)q),gen_no);
      return;

  case ARR_WORDS:
      // just copy the block
      copy(p,info,q,arr_words_sizeW((StgArrBytes *)q),gen_no);
      return;

  case MUT_ARR_PTRS_CLEAN:
  case MUT_ARR_PTRS_DIRTY:
  case MUT_ARR_PTRS_FROZEN:
  case MUT_ARR_PTRS_FROZEN0:
      // just copy the block
      copy(p,info,q,mut_arr_ptrs_sizeW((StgMutArrPtrs *)q),gen_no);
      return;

  case SMALL_MUT_ARR_PTRS_CLEAN:
  case SMALL_MUT_ARR_PTRS_DIRTY:
  case SMALL_MUT_ARR_PTRS_FROZEN:
  case SMALL_MUT_ARR_PTRS_FROZEN0:
      // just copy the block
      copy(p,info,q,small_mut_arr_ptrs_sizeW((StgSmallMutArrPtrs *)q),gen_no);
      return;

  case TSO:
      copy(p,info,q,sizeofW(StgTSO),gen_no);
      return;

  case STACK:
    {
      StgStack *stack = (StgStack *)q;

      /* To evacuate a small STACK, we need to adjust the stack pointer
       */
      {
          StgStack *new_stack;
          StgPtr r, s;
          rtsBool mine;

          mine = copyPart(p,(StgClosure *)stack, stack_sizeW(stack),
                          sizeofW(StgStack), gen_no);
          if (mine) {
              new_stack = (StgStack *)*p;
              move_STACK(stack, new_stack);
              for (r = stack->sp, s = new_stack->sp;
                   r < stack->stack + stack->stack_size;) {
                  *s++ = *r++;
              }
          }
          return;
      }
    }

  case TREC_CHUNK:
      copy(p,info,q,sizeofW(StgTRecChunk),gen_no);
      return;

  default:
    barf("evacuate: strange closure type %d", (int)(INFO_PTR_TO_STRUCT(info)->type));
  }

  barf("evacuate");
}