Example #1
0
static void init_parameters(void) {
  char *freq, *est; int e=0;
  /* set GC_FREQUENCY from environment. */
  freq = getenv("GC_FREQUENCY");
  if (freq!=NULL) GC_FREQUENCY=atoi(freq);
  if (GC_FREQUENCY<=0) GC_FREQUENCY=1;
  /* set 'estimated max_live_bytes' from environment.  Make sure you
   * err on the 'too low' side!  This eliminates all GC until you
   * reach the 'estimated' heap size, for faster runs. */
  est = getenv("MAX_LIVE_ESTIMATE");
  if (est!=NULL) e=atoi(est);
  if (e<=0) e=0;
  UPDATE_STATS(heap_max_live_arr_bytes, e);
  UPDATE_STATS(heap_max_live_obj_bytes, e);
}
Example #2
0
static inline void split(mchunkptr p, size_t       offset)
{
  size_t       room = p->size - offset;
  if (room >= MINSIZE)
  {
    mbinptr   bn = size2bin(room);                  /* new bin */
    mchunkptr h  = &(bn->hd);                       /* its head */
    mchunkptr b  = h->bk;                           /* old back element */
    mchunkptr t = (mchunkptr)((char*)(p) + offset); /* remaindered chunk */
    
    /* set size */
    t->size = *((int*)((char*)(t) + room    - SIZE_SZ)) = room;

    /* link up */
    t->bk = b;  t->fd = h;  h->bk = b->fd = t;
    
    /* adjust maxbin (h == b means was empty) */
    if (h == b && bn > malloc_maxbin) malloc_maxbin = bn; 

    /* adjust size of chunk to be returned */
    p->size = *((int*)((char*)(p) + offset  - SIZE_SZ)) = offset;

    UPDATE_STATS ((++n_split, ++n_avail));
  }
}
Example #3
0
// Returns a global reference to the wrapped Java object.
// If the object has become "detached" this will re-attach
// it to ensure the Java object will not get collected.
jobject JavaObject::getJavaObject()
{
	if (useGlobalRefs) {
		ASSERT(javaObject_ != NULL);

		// We must always return a valid Java proxy reference.
		// Otherwise we risk crashing in the calling code.
		// If we are "detached" we will re-attach whenever the Java
		// proxy is requested.
		if (isDetached()) {
			attach(NULL);
		}

		return javaObject_;
	} else {
		if (isWeakRef_) {
			UPDATE_STATS(0, -1);
			jobject javaObject = ReferenceTable::clearWeakReference(refTableKey_);
			if (javaObject == NULL) {
				LOGE(TAG, "Java object reference has been invalidated.");
			}
			isWeakRef_ = false;
			handle_.MakeWeak(this, DetachCallback);
			return javaObject;
		}
		return ReferenceTable::getReference(refTableKey_);
	}
}
Example #4
0
JavaObject::~JavaObject()
{
	UPDATE_STATS(-1, isDetached() ? -1 : 0);

	if (javaObject_) {
		deleteGlobalRef();
	}
}
Example #5
0
void *heapstats_alloc2(jsize length, jint isArray) {
  void *result;
  stat_t ttl;
  static int skipped=0;
  FLEX_MUTEX_DECLARE_STATIC(skipped_lock);
  /* do the allocation & register finalizer */
  result = GC_malloc(length);
  GC_register_finalizer_no_order(result, isArray ?
				 heapstats_finalizer_array :
				 heapstats_finalizer_object,
				 (GC_PTR) ((ptroff_t) length), NULL, NULL);
  /* (sometimes) collect all dead objects */
  FLEX_MUTEX_LOCK(&skipped_lock);
  if (skipped ||
      0 == ((FETCH_STATS(heap_total_alloc_obj_count) +
	     FETCH_STATS(heap_total_alloc_arr_count)) % GC_FREQUENCY))
    if (isArray ?
	(FETCH_STATS(heap_current_live_arr_bytes)+length)
	> FETCH_STATS(heap_max_live_arr_bytes) :
	(FETCH_STATS(heap_current_live_obj_bytes)+length)
	> FETCH_STATS(heap_max_live_obj_bytes)) {
      GC_gcollect(); skipped = 0;
    } else skipped = 1;
  FLEX_MUTEX_UNLOCK(&skipped_lock);
  if (isArray) {
    /* update total and current live */
    INCREMENT_STATS(heap_total_alloc_arr_count, 1);
    INCREMENT_STATS(heap_total_alloc_arr_bytes, length);
    INCREMENT_STATS(heap_current_live_arr_bytes, length);
    /* update max_live */
    ttl = FETCH_STATS(heap_current_live_arr_bytes);
    UPDATE_STATS(heap_max_live_arr_bytes,
		 ttl > _old_value_ ? ttl : _old_value_);
  } else {
    /* update total and current live */
    INCREMENT_STATS(heap_total_alloc_obj_count, 1);
    INCREMENT_STATS(heap_total_alloc_obj_bytes, length);
    INCREMENT_STATS(heap_current_live_obj_bytes, length);
    /* update max_live */
    ttl = FETCH_STATS(heap_current_live_obj_bytes);
    UPDATE_STATS(heap_max_live_obj_bytes,
		 ttl > _old_value_ ? ttl : _old_value_);
  }
  /* done */
  return result;
}
/*
 * update_stats(class_id, nbytes)
 *
 * if ipgpc_gather_stats == TRUE
 * updates the statistics for class pointed to be the input classid
 * and the global ipgpc kstats
 * updates the last time the class was matched with the current hrtime value,
 * number of packets and number of bytes with nbytes
 */
static void
update_stats(int class_id, uint_t nbytes)
{
	if (ipgpc_gather_stats) {
		/* update global stats */
		BUMP_STATS(ipgpc_npackets);
		UPDATE_STATS(ipgpc_nbytes, nbytes);
		if (ipgpc_cid_list[class_id].aclass.gather_stats) {
			/* update per class stats */
			SET_STATS(ipgpc_cid_list[class_id].stats.last_match,
			    gethrtime());
			BUMP_STATS(ipgpc_cid_list[class_id].stats.npackets);
			UPDATE_STATS(ipgpc_cid_list[class_id].stats.nbytes,
			    nbytes);
		}
	}
}
Example #7
0
JavaObject::JavaObject()
	: EventEmitter()
	, javaObject_(NULL)
	, refTableKey_(0)
	, isWeakRef_(false)
{
	UPDATE_STATS(1, 1); // add one to total counter, and 'detached' counter
}
Example #8
0
JavaObject::~JavaObject()
{
	UPDATE_STATS(-1, isDetached() ? -1 : 0);

	if (javaObject_ || refTableKey_ > 0) {
		deleteGlobalRef();
	}
}
Example #9
0
static inline void unlink(mchunkptr p)
{
  mchunkptr b = p->bk;
  mchunkptr f = p->fd;

  f->bk = b;  b->fd = f;

  UPDATE_STATS (--n_avail);
}
Example #10
0
void JavaObject::attach(jobject javaObject)
{
	UPDATE_STATS(0, -1);

	handle_.MakeWeak(this, DetachCallback);

	javaObject_ = javaObject;
	newGlobalRef();
}
Example #11
0
void free(void* mem)
{
  if (mem != 0)
  {
    mchunkptr p = mem2chunk(mem);
    UPDATE_STATS(do_free_stats(p));
    frontlink(p);
  }
}
void JavaObject::detach()
{
	UPDATE_STATS(0, 1);

	// Keep JavaScript object around until finalization.
	handle_.ClearWeak();

	weakGlobalRef();
}
Example #13
0
void JavaObject::detach()
{
	UPDATE_STATS(0, 1);

	// Keep JavaScript object around until finalization.
	handle_.ClearWeak();

	// Release reference to Java object so it can get finalized.
	deleteGlobalRef();
}
Example #14
0
JavaObject::JavaObject(jobject javaObject)
	: EventEmitter()
	, javaObject_(NULL)
{
	UPDATE_STATS(1, 1);

	if (javaObject) {
		attach(javaObject);
	}
}
Example #15
0
void* realloc(void* mem, size_t       bytes)
{
  if (mem == 0) 
    return malloc(bytes);
  else
  {
    size_t       nb      = request2size(bytes);
    mchunkptr    p       = mem2chunk(mem);
    size_t       oldsize = p->size;
    int          room;
    mchunkptr    nxt;

    UPDATE_STATS((++n_reallocs, requested_mem += bytes-oldsize));
    
    /* try to expand (even if already big enough), to clean up chunk */

    while (!inuse(nxt = next_chunk(p)))
    {
      UPDATE_STATS ((malloced_mem += nxt->size, ++n_consol));
      unlink(nxt);
      set_size(p, p->size + nxt->size);
    }

    room = p->size - nb;
    if (room >= 0)
    {
      split(p, nb);
      UPDATE_STATS(malloced_mem -= room);
      return chunk2mem(p);
    }
    else /* do the obvious */
    {
      void* newmem;
      set_inuse(p);    /* don't let malloc consolidate us yet! */
      newmem = malloc(nb);
      bcopy(mem, newmem, oldsize - SIZE_SZ);
      free(mem);
      UPDATE_STATS(++n_reallocs_with_copy);
      return newmem;
    }
  }
}
void JavaObject::attach(jobject javaObject)
{
	ASSERT((javaObject && javaObject_ == NULL) || javaObject == NULL);
	UPDATE_STATS(0, -1);

	handle_.MakeWeak(this, DetachCallback);

	if (javaObject) {
		javaObject_ = javaObject;
	}
	newGlobalRef();
}
Example #17
0
void JavaObject::detach()
{
	handle_.MakeWeak(this, DetachCallback);

	if (isDetached()) {
		return;
	}

	UPDATE_STATS(0, 1);

	weakGlobalRef();
}
JavaObject::JavaObject(jobject javaObject)
	: EventEmitter()
	, javaObject_(NULL)
	, refTableKey_(0)
	, isWeakRef_(false)
{
	UPDATE_STATS(1, 1);

	if (javaObject) {
		attach(javaObject);
	}
}
Example #19
0
static inline void consollink(mchunkptr p)
{
  mbinptr   bn = size2bin(p->size);
  mchunkptr h  = &(bn->hd);
  mchunkptr b  = h->bk;

  p->bk = b;  p->fd = h;  h->bk = b->fd = p;

  if (h == b && bn > malloc_maxbin) malloc_maxbin = bn; 

  UPDATE_STATS(++n_avail);
}
JavaObject::~JavaObject()
{
	UPDATE_STATS(-1, isDetached() ? -1 : 0);

	if (javaObject_ || refTableKey_ > 0) {
		deleteGlobalRef();
	}

	if (persistent().IsEmpty())
		return;
	assert(persistent().IsNearDeath());
	persistent().ClearWeak();
	persistent().Reset();
}
Example #21
0
static inline void frontlink(mchunkptr p)
{
  mbinptr   bn = size2bin(p->size);
  mchunkptr h  = &(bn->hd);
  mchunkptr f  = h->fd;

  p->bk = h;  p->fd = f;  f->bk = h->fd = p;

  if (h == f && bn > malloc_maxbin) malloc_maxbin = bn;  

  bn->dirty = 1;

  UPDATE_STATS(++n_avail);
}
Example #22
0
JavaObject::~JavaObject()
{
	UPDATE_STATS(-1, 0); // remove one from total counter

	// If we have anything wrapped, get rid of it in JNI/JVM
	if (javaObject_ || refTableKey_ != 0) {
		DeleteJavaRef();
	}

	// Make sure we wipe the persistent, in case we called delete on the proxy and didn't get deleted as a result of the NativeObject WeakCallback
	if (persistent().IsEmpty())
		return;
	persistent().Reset();
}
void JavaObject::detach()
{
	// WAIT A SECOND V8!!! DON'T KILL MY OBJECT YET! THE JVM MAY STILL WANT IT!
	persistent().ClearWeak();

	if (isDetached()) {
		return;
	}

	// V8 says we don't need the object on the JS side
	// Let's make the object weak in the JVM now...
	UPDATE_STATS(0, 1);
	weakGlobalRef();
}
Example #24
0
// Attaches the Java object to this native wrapper.
// This wrapper will create a global reference to the
// Java object and keep it from becoming collected by Dalvik
// until it is detached or made weak (weakGlobalRef()).
void JavaObject::attach(jobject javaObject)
{
	// Make sure we're wrapping something
	ASSERT(javaObject != NULL);
	UPDATE_STATS(0, -1); // subtract one from the 'detached' counter

	javaObject_ = javaObject;

	Ref(); // increment our reference counter to represent that we are...
	MakeJavaStrong(); // adding a strong reference to the the Java object we wrap in JVM-land
	// Now we should never truly kill the JS object unless the destructor is called for this Proxy explicitly,
	// or we get a notification from ReferenceTable that our Java object was GC'd and we therefore remove our reference!

	MakeJSWeak();
}
// Attaches the Java object to this native wrapper.
// This wrapper will create a global reference to the
// Java object and keep it from becoming collected by Dalvik
// until it is detached or made weak (weakGlobalRef()).
void JavaObject::attach(jobject javaObject)
{
	ASSERT((javaObject && javaObject_ == NULL) || javaObject == NULL);
	UPDATE_STATS(0, -1);

	if (javaObject) {
		javaObject_ = javaObject;
	}
	// make strong ref to Java object in JVM
	newGlobalRef();

	// So let's mark this JS object as independent and weak so V8 can tell us
	// when the JS object is ready to be GCed, which is first step in it's death
	persistent().SetWeak(this, DetachCallback);
	persistent().MarkIndependent();
}
Example #26
0
void JavaObject::MakeJavaStrong()
{
	if (useGlobalRefs) {
		ASSERT(javaObject_ != NULL);
		JNIEnv *env = JNIUtil::getJNIEnv();
		ASSERT(env != NULL);
		jobject globalRef = env->NewGlobalRef(javaObject_);
		if (isWeakRef_) { // if we're going from weak back to strong...
			env->DeleteWeakGlobalRef(javaObject_); // delete the weak ref we had
		}
		javaObject_ = globalRef;

		// When we're done we should always have an object, but no key
		ASSERT(refTableKey_ == 0);
		ASSERT(javaObject_ != NULL);
	} else {
		if (isWeakRef_) { // if we are weak, upgrade back to strong
			// Make sure we have a key
			ASSERT(refTableKey_ != 0);
			JNIEnv *env = JNIUtil::getJNIEnv();
			ASSERT(env != NULL);
			jobject stored = ReferenceTable::clearWeakReference(refTableKey_);
			if (stored == NULL) {
				// Sanity check. Did we get into a state where it was weak on Java, got GC'd but the C++ proxy didn't get deleted yet?
				LOGE(TAG, "!!! OH NO! We tried to move a weak Java object back to strong, but it's aleady been GC'd by JVM! We're in a bad state! Key: %d", refTableKey_);
			}
			env->DeleteLocalRef(stored);
		} else {
			// New entry, make sure we have no key, have an object, get a new key
			ASSERT(javaObject_ != NULL);
			ASSERT(refTableKey_ == 0); // make sure we haven't already stored something
			refTableKey_ = ReferenceTable::createReference(javaObject_); // make strong ref on Java side
			javaObject_ = NULL; // toss out the java object copy here, it's in ReferenceTable's HashMap
		}
		// When we're done we should always have a reference key, but no object
		ASSERT(refTableKey_ != 0);
		ASSERT(javaObject_ == NULL);
	}
	// no longer a weak reference
	isWeakRef_ = false;
	UPDATE_STATS(0, -1); // one less detached
}
Example #27
0
void JavaObject::MakeJavaWeak()
{
	// Make sure we're not trying to make a weak reference weak again!
	ASSERT(!isWeakRef_);
	if (useGlobalRefs) {
		JNIEnv *env = JNIUtil::getJNIEnv();
		ASSERT(env != NULL);
		ASSERT(javaObject_ != NULL);
		// Convert our global ref to a weak global ref
		jweak weakRef = env->NewWeakGlobalRef(javaObject_);
		env->DeleteGlobalRef(javaObject_);
		javaObject_ = weakRef;
	} else {
		ASSERT(refTableKey_ != 0);
		ReferenceTable::makeWeakReference(refTableKey_);
	}

	UPDATE_STATS(0, 1); // add one to "detached" counter
	isWeakRef_ = true; // remember that our ref on Java side is weak
}
// Returns a global reference to the wrapped Java object.
// If the object has become "detached" this will re-attach
// it to ensure the Java object will not get collected.
jobject JavaObject::getJavaObject()
{
	if (useGlobalRefs) {
		ASSERT(javaObject_ != NULL);

		// We must always return a valid Java proxy reference.
		// Otherwise we risk crashing in the calling code.
		// If we are "detached" we will re-attach whenever the Java
		// proxy is requested.
		if (isDetached()) {
			attach(NULL);
		}

		return javaObject_;
	} else {
		if (isWeakRef_) { // Did JS side try to collect our object already?
			// OH SNAP, DON'T KILL OUR OBJECT YET JVM!
			// make reference strong again on Java side if we can...
			jobject javaObject = ReferenceTable::clearWeakReference(refTableKey_);
			UPDATE_STATS(0, -1);
			if (javaObject == NULL) {
				// SHIT! Java collected it. ummmm, not much we can do here.
				// Maybe we can... Nope. It's gone. Live with it.
				LOGE(TAG, "Java object reference has been invalidated.");
			}

			isWeakRef_ = false; // not weak on Java side anymore

			// tell V8 to let us know when it thinks the JS object can be collected again
			persistent().SetWeak(this, DetachCallback);
			persistent().MarkIndependent();

			return javaObject;
		}
		return ReferenceTable::getReference(refTableKey_);
	}
}
Example #29
0
void* malloc(size_t       bytes)
{
  size_t       nb  = request2size(bytes);  /* padded request size */
  mbinptr      b   = size2bin(nb);         /* corresponding bin */
  mchunkptr    hd  = &(b->hd);             /* head of its list */
  mchunkptr    p   = hd->fd;               /* chunk traverser */

  UPDATE_STATS((requested_mem+=bytes, ++n_malloc_bins));

  /* Try a (near) exact match in own bin */
  /* clean out unusable but consolidatable chunks in bin while traversing */

  while (p != hd)
  {
    UPDATE_STATS(++n_malloc_chunks);
    if (p->size >= nb)
      goto found;
    else    /* try to consolidate; same code as malloc_find_space */
    {
      mchunkptr nextp = p->fd;       /* save, in case of relinks */
      int consolidated = 0;          /* only unlink/relink if consolidated */
      
      mchunkptr t;

      while (!inuse(t = prev_chunk(p))) /* consolidate backward */
      {
        if (!consolidated) { consolidated = 1; unlink(p); }
        if (t == nextp) nextp = t->fd;
        unlink(t);
        set_size(t, t->size + p->size);
        p = t;
        UPDATE_STATS (++n_consol);
      }
      
      while (!inuse(t = next_chunk(p))) /* consolidate forward */
      {
        if (!consolidated) { consolidated = 1; unlink(p); }
        if (t == nextp) nextp = t->fd;
        unlink(t);
        set_size(p, p->size + t->size);
        UPDATE_STATS (++n_consol);
      }
      
      if (consolidated)
      {
        if (p->size >= nb)
        {
          /* make it safe to unlink again below */
          UPDATE_STATS(++n_avail);
          p->fd = p->bk = p;
          goto found;
        }
        else
          consollink(p);
      }

      p = nextp;

    }
  }

  b->dirty = 0; /* true if got here */

  /*  Scan bigger bins for a victim */

  while (++b <= malloc_maxbin)
  {
    UPDATE_STATS(++n_malloc_bins);
    if ((p = b->hd.bk) != &(b->hd))    /* no need to check size */
      goto found;
  }

  /* Consolidate or sbrk */

  p = malloc_find_space(nb);

  if (p == 0) return 0; /* allocation failure */

 found:   /* Use what we found */

  unlink(p);
  split(p, nb); 
  UPDATE_STATS(do_malloc_stats(p));
  return chunk2mem(p);
}
Example #30
0
static mchunkptr malloc_find_space(size_t       nb)
{
  mbinptr b;

  /* first, re-adjust max used bin */

  while (malloc_maxbin >= FIRSTBIN && 
         malloc_maxbin->hd.bk == &(malloc_maxbin->hd))
  {
    malloc_maxbin->dirty = 0;
    --malloc_maxbin;
  }

  for (b = malloc_maxbin; b >= FIRSTBIN; --b)
  {
    UPDATE_STATS(++n_malloc_bins);

    if (b->dirty)
    {
      mchunkptr h = &(b->hd);         /* head of list */
      mchunkptr p = h->fd;            /* chunk traverser */

      while (p != h)
      {
        mchunkptr nextp = p->fd;       /* save, in case of relinks */
        int consolidated = 0;          /* only unlink/relink if consolidated */

        mchunkptr t;

        UPDATE_STATS(++n_malloc_chunks);

        while (!inuse(t = prev_chunk(p))) /* consolidate backward */
        {
          if (!consolidated) { consolidated = 1; unlink(p); }
          if (t == nextp) nextp = t->fd;
          unlink(t);
          set_size(t, t->size + p->size);
          p = t;
          UPDATE_STATS (++n_consol);
        }
        
        while (!inuse(t = next_chunk(p))) /* consolidate forward */
        {
          if (!consolidated) { consolidated = 1; unlink(p); }
          if (t == nextp) nextp = t->fd;
          unlink(t);
          set_size(p, p->size + t->size);
          UPDATE_STATS (++n_consol);
        }

       if (consolidated)
       {
          if (p->size >= nb)
          {
            /* make it safe to unlink in malloc */
            UPDATE_STATS(++n_avail);
            p->fd = p->bk = p;
            return p;
          }
          else
            consollink(p);
        }

        p = nextp;

      }

      b->dirty = 0;

    }
  }

  /* nothing available - sbrk some more */

  return malloc_from_sys(nb);
}