Esempio n. 1
0
inline markOop markOopDesc::prototype_for_object(oop obj) {
#ifdef ASSERT
  markOop prototype_header = obj->klass()->prototype_header();
  assert(prototype_header == prototype() || prototype_header->has_bias_pattern(), "corrupt prototype header");
#endif
  return obj->klass()->prototype_header();
}
inline void PSPromotionManager::promotion_trace_event(oop new_obj, oop old_obj,
        size_t obj_size,
        uint age, bool tenured,
        const PSPromotionLAB* lab) {
    // Skip if memory allocation failed
    if (new_obj != NULL) {
        const ParallelScavengeTracer* gc_tracer = PSScavenge::gc_tracer();

        if (lab != NULL) {
            // Promotion of object through newly allocated PLAB
            if (gc_tracer->should_report_promotion_in_new_plab_event()) {
                size_t obj_bytes = obj_size * HeapWordSize;
                size_t lab_size = lab->capacity();
                gc_tracer->report_promotion_in_new_plab_event(old_obj->klass(), obj_bytes,
                        age, tenured, lab_size);
            }
        } else {
            // Promotion of object directly to heap
            if (gc_tracer->should_report_promotion_outside_plab_event()) {
                size_t obj_bytes = obj_size * HeapWordSize;
                gc_tracer->report_promotion_outside_plab_event(old_obj->klass(), obj_bytes,
                        age, tenured);
            }
        }
    }
}
Esempio n. 3
0
void G1ParScanThreadState::report_promotion_event(InCSetState const dest_state,
        oop const old, size_t word_sz, uint age,
        HeapWord * const obj_ptr,
        const AllocationContext_t context) const {
    G1PLAB* alloc_buf = _plab_allocator->alloc_buffer(dest_state, context);
    if (alloc_buf->contains(obj_ptr)) {
        _g1h->_gc_tracer_stw->report_promotion_in_new_plab_event(old->klass(), word_sz, age,
                dest_state.value() == InCSetState::Old,
                alloc_buf->word_sz());
    } else {
        _g1h->_gc_tracer_stw->report_promotion_outside_plab_event(old->klass(), word_sz, age,
                dest_state.value() == InCSetState::Old);
    }
}
Esempio n. 4
0
 void ia_64_verify_oop(const char* message, oop o) {
   uintptr_t x = (uintptr_t) o;
   if ( x & 7) fatal(message);
   if (o != NULL) {
     x = (uintptr_t) o->klass();
     if ( x & 7) fatal(message);
   }
 }
Esempio n. 5
0
  void print_object(outputStream* out, oop obj) {
#ifdef PRODUCT
    klassOop k = obj->klass();
    const char* class_name = instanceKlass::cast(k)->external_name();
    out->print_cr("class name %s", class_name);
#else // PRODUCT
    obj->print_on(out);
#endif // PRODUCT
  }
Esempio n. 6
0
bool validate_lookup(oop receiver, symbolOop selector) {
 LookupKey key(receiver->klass(), selector);
 if (lookupCache::lookup(&key).is_empty()) {
   std->print_cr("Lookup error");
   key.print_on(std);
   std->cr();
   return false;
 }
 return true;
}
void ObjArrayKlass::oop_oop_iterate(oop obj, OopClosureType* closure) {
  assert (obj->is_array(), "obj must be array");
  objArrayOop a = objArrayOop(obj);

  if (Devirtualizer<nv>::do_metadata(closure)) {
    Devirtualizer<nv>::do_klass(closure, obj->klass());
  }

  oop_oop_iterate_elements<nv>(a, closure);
}
Esempio n. 8
0
static bool is_java_lang_ref_Reference_access(oop o, jlong offset) {
  if (offset == java_lang_ref_Reference::referent_offset && o != NULL) {
    Klass* k = o->klass();
    if (InstanceKlass::cast(k)->reference_type() != REF_NONE) {
      assert(InstanceKlass::cast(k)->is_subclass_of(SystemDictionary::Reference_klass()), "sanity");
      return true;
    }
  }
  return false;
}
Esempio n. 9
0
extern "C" void bad_compiled_vtable_index(JavaThread* thread, oop receiver, int index) {
  ResourceMark rm;
  HandleMark hm;
  klassOop klass = receiver->klass();
  instanceKlass* ik = instanceKlass::cast(klass);
  klassVtable* vt = ik->vtable();
  klass->print();
  fatal(err_msg("bad compiled vtable dispatch: receiver " INTPTR_FORMAT ", "
                "index %d (vtable length %d)",
                (address)receiver, index, vt->length()));
}
Esempio n. 10
0
// Return false if the entry could not be recorded on account
// of running out of space required to create a new entry.
bool KlassInfoTable::record_instance(const oop obj) {
  klassOop      k = obj->klass();
  KlassInfoEntry* elt = lookup(k);
  // elt may be NULL if it's a new klass for which we
  // could not allocate space for a new entry in the hashtable.
  if (elt != NULL) {
    elt->set_count(elt->count() + 1);
    elt->set_words(elt->words() + obj->size());
    return true;
  } else {
    return false;
  }
}
Esempio n. 11
0
static HeuristicsResult update_heuristics(oop o, bool allow_rebias) {
  markOop mark = o->mark();
  if (!mark->has_bias_pattern()) {
    return HR_NOT_BIASED;
  }

  // Heuristics to attempt to throttle the number of revocations.
  // Stages:
  // 1. Revoke the biases of all objects in the heap of this type,
  //    but allow rebiasing of those objects if unlocked.
  // 2. Revoke the biases of all objects in the heap of this type
  //    and don't allow rebiasing of these objects. Disable
  //    allocation of objects of that type with the bias bit set.
  Klass* k = o->klass();
  jlong cur_time = os::javaTimeMillis();
  jlong last_bulk_revocation_time = k->last_biased_lock_bulk_revocation_time();
  int revocation_count = k->biased_lock_revocation_count();
  if ((revocation_count >= BiasedLockingBulkRebiasThreshold) &&
      (revocation_count <  BiasedLockingBulkRevokeThreshold) &&
      (last_bulk_revocation_time != 0) &&
      (cur_time - last_bulk_revocation_time >= BiasedLockingDecayTime)) {
    // This is the first revocation we've seen in a while of an
    // object of this type since the last time we performed a bulk
    // rebiasing operation. The application is allocating objects in
    // bulk which are biased toward a thread and then handing them
    // off to another thread. We can cope with this allocation
    // pattern via the bulk rebiasing mechanism so we reset the
    // klass's revocation count rather than allow it to increase
    // monotonically. If we see the need to perform another bulk
    // rebias operation later, we will, and if subsequently we see
    // many more revocation operations in a short period of time we
    // will completely disable biasing for this type.
    k->set_biased_lock_revocation_count(0);
    revocation_count = 0;
  }

  // Make revocation count saturate just beyond BiasedLockingBulkRevokeThreshold
  if (revocation_count <= BiasedLockingBulkRevokeThreshold) {
    revocation_count = k->atomic_incr_biased_lock_revocation_count();
  }

  if (revocation_count == BiasedLockingBulkRevokeThreshold) {
    return HR_BULK_REVOKE;
  }

  if (revocation_count == BiasedLockingBulkRebiasThreshold) {
    return HR_BULK_REBIAS;
  }

  return HR_SINGLE_REVOKE;
}
Esempio n. 12
0
oop InterpretedIC::does_not_understand(oop receiver, InterpretedIC* ic, frame* f) {
  memOop msg;
  symbolOop sel;
  {
    // message not understood...
    BlockScavenge bs; // make sure that no scavenge happens
    klassOop msgKlass = klassOop(Universe::find_global("Message"));
    oop obj = msgKlass->klass_part()->allocateObject();
    assert(obj->is_mem(), "just checkin'...");
    msg = memOop(obj);
    int nofArgs = ic->selector()->number_of_arguments();
    objArrayOop args = oopFactory::new_objArray(nofArgs);
    for (int i = 1; i <= nofArgs; i++) {
      args->obj_at_put(i, f->expr(nofArgs - i));
    }
    // for now: assume instance variables are there...
    // later: should check this or use a VM interface:
    // msg->set_receiver(recv);
    // msg->set_selector(ic->selector());
    // msg->set_arguments(args);
    msg->raw_at_put(2, receiver);
    msg->raw_at_put(3, ic->selector());
    msg->raw_at_put(4, args);
    sel = oopFactory::new_symbol("doesNotUnderstand:");
    if (interpreter_normal_lookup(receiver->klass(), sel).is_empty()) {
      // doesNotUnderstand: not found ==> process error
      { ResourceMark rm;
      std->print("LOOKUP ERROR\n");
      sel->print_value(); std->print(" not found\n");
      }
      if (DeltaProcess::active()->is_scheduler()) {
        DeltaProcess::active()->trace_stack();
        fatal("lookup error in scheduler");
      } else {
        DeltaProcess::active()->suspend(::lookup_error);
      }
      ShouldNotReachHere();
    }
  }
  // return marked result of doesNotUnderstand: message
  return Delta::call(receiver, sel, msg);

  // Solution: use an nmethod-like stub-routine called from
  // within a (possibly one-element) PIC (in order to keep
  // the method selector in the IC. That stub does the
  // Delta:call and pops the right number of arguments
  // taken from the selector (the no. of arguments from
  // the selector can be optimized).
}
Esempio n. 13
0
int ObjArrayKlass::oop_oop_iterate(oop obj, OopClosureType* closure) {
  assert (obj->is_array(), "obj must be array");
  objArrayOop a = objArrayOop(obj);

  // Get size before changing pointers.
  // Don't call size() or oop_size() since that is a virtual call.
  int size = a->object_size();
  if (Devirtualizer<nv>::do_metadata(closure)) {
    Devirtualizer<nv>::do_klass(closure, obj->klass());
  }

  oop_oop_iterate_elements<nv>(a, closure);

  return size;
}
Esempio n. 14
0
  // Return true if oop represents an object that is "visible"
  // to the java world.
  static inline bool visible_oop(oop o) {
    // the sentinel for deleted handles isn't visible
    if (o == JNIHandles::deleted_handle()) {
      return false;
    }

    // instance
    if (o->is_instance()) {
      // instance objects are visible
      if (o->klass() != SystemDictionary::Class_klass()) {
        return true;
      }
      if (java_lang_Class::is_primitive(o)) {
        return true;
      }
      // java.lang.Classes are visible
      Klass* k = java_lang_Class::as_Klass(o);
      if (k->is_klass()) {
        // if it's a class for an object, an object array, or
        // primitive (type) array then it's visible.
        if (k->is_instance_klass()) {
          return true;
        }
        if (k->is_objArray_klass()) {
          return true;
        }
        if (k->is_typeArray_klass()) {
          return true;
        }
      }
      return false;
    }
    // object arrays are visible if they aren't system object arrays
    if (o->is_objArray()) {
        return true;
    }
    // type arrays are visible
    if (o->is_typeArray()) {
      return true;
    }
    // everything else (Method*s, ...) aren't visible
    return false;
  };   // end of visible_oop()
Esempio n. 15
0
  void do_object(oop obj) {
    obj->oop_iterate_header(&resolve);
    obj->oop_iterate(&resolve);

    assert(obj->klass()->is_shared(), "Klass not pointing into shared space.");

    // If the object is a Java object or class which might (in the
    // future) contain a reference to a young gen object, add it to the
    // list.

    if (obj->is_klass() || obj->is_instance()) {
      if (obj->is_klass() ||
          obj->is_a(SystemDictionary::Class_klass()) ||
          obj->is_a(SystemDictionary::Throwable_klass())) {
        // Do nothing
      }
      else if (obj->is_a(SystemDictionary::String_klass())) {
        // immutable objects.
      } else {
        // someone added an object we hadn't accounted for.
        ShouldNotReachHere();
      }
    }
  }
Esempio n. 16
0
 // Testing
 static bool is_instance(oop obj) {
   return obj != NULL && obj->klass() == SystemDictionary::Class_klass();
 }
Esempio n. 17
0
PRIM_DECL_2(behaviorPrimitives::is_class_of, oop receiver, oop obj) {
  PROLOGUE_2("is_class_of", receiver, obj);
  ASSERT_RECEIVER;
  return obj->klass() == receiver ? trueObj : falseObj;
}
Esempio n. 18
0
static BiasedLocking::Condition revoke_bias(oop obj, bool allow_rebias, bool is_bulk, JavaThread* requesting_thread) {
  markOop mark = obj->mark();
  if (!mark->has_bias_pattern()) {
    if (TraceBiasedLocking) {
      ResourceMark rm;
      tty->print_cr("  (Skipping revocation of object of type %s because it's no longer biased)",
                    obj->klass()->external_name());
    }
    return BiasedLocking::NOT_BIASED;
  }

  uint age = mark->age();
  markOop   biased_prototype = markOopDesc::biased_locking_prototype()->set_age(age);
  markOop unbiased_prototype = markOopDesc::prototype()->set_age(age);

  if (TraceBiasedLocking && (Verbose || !is_bulk)) {
    ResourceMark rm;
    tty->print_cr("Revoking bias of object " INTPTR_FORMAT " , mark " INTPTR_FORMAT " , type %s , prototype header " INTPTR_FORMAT " , allow rebias %d , requesting thread " INTPTR_FORMAT,
                  p2i((void *)obj), (intptr_t) mark, obj->klass()->external_name(), (intptr_t) obj->klass()->prototype_header(), (allow_rebias ? 1 : 0), (intptr_t) requesting_thread);
  }

  JavaThread* biased_thread = mark->biased_locker();
  if (biased_thread == NULL) {
    // Object is anonymously biased. We can get here if, for
    // example, we revoke the bias due to an identity hash code
    // being computed for an object.
    if (!allow_rebias) {
      obj->set_mark(unbiased_prototype);
    }
    if (TraceBiasedLocking && (Verbose || !is_bulk)) {
      tty->print_cr("  Revoked bias of anonymously-biased object");
    }
    return BiasedLocking::BIAS_REVOKED;
  }

  // Handle case where the thread toward which the object was biased has exited
  bool thread_is_alive = false;
  if (requesting_thread == biased_thread) {
    thread_is_alive = true;
  } else {
    for (JavaThread* cur_thread = Threads::first(); cur_thread != NULL; cur_thread = cur_thread->next()) {
      if (cur_thread == biased_thread) {
        thread_is_alive = true;
        break;
      }
    }
  }
  if (!thread_is_alive) {
    if (allow_rebias) {
      obj->set_mark(biased_prototype);
    } else {
      obj->set_mark(unbiased_prototype);
    }
    if (TraceBiasedLocking && (Verbose || !is_bulk)) {
      tty->print_cr("  Revoked bias of object biased toward dead thread");
    }
    return BiasedLocking::BIAS_REVOKED;
  }

  // Thread owning bias is alive.
  // Check to see whether it currently owns the lock and, if so,
  // write down the needed displaced headers to the thread's stack.
  // Otherwise, restore the object's header either to the unlocked
  // or unbiased state.
  GrowableArray<MonitorInfo*>* cached_monitor_info = get_or_compute_monitor_info(biased_thread);
  BasicLock* highest_lock = NULL;
  for (int i = 0; i < cached_monitor_info->length(); i++) {
    MonitorInfo* mon_info = cached_monitor_info->at(i);
    if (mon_info->owner() == obj) {
      if (TraceBiasedLocking && Verbose) {
        tty->print_cr("   mon_info->owner (" PTR_FORMAT ") == obj (" PTR_FORMAT ")",
                      p2i((void *) mon_info->owner()),
                      p2i((void *) obj));
      }
      // Assume recursive case and fix up highest lock later
      markOop mark = markOopDesc::encode((BasicLock*) NULL);
      highest_lock = mon_info->lock();
      highest_lock->set_displaced_header(mark);
    } else {
      if (TraceBiasedLocking && Verbose) {
        tty->print_cr("   mon_info->owner (" PTR_FORMAT ") != obj (" PTR_FORMAT ")",
                      p2i((void *) mon_info->owner()),
                      p2i((void *) obj));
      }
    }
  }
  if (highest_lock != NULL) {
    // Fix up highest lock to contain displaced header and point
    // object at it
    highest_lock->set_displaced_header(unbiased_prototype);
    // Reset object header to point to displaced mark.
    // Must release storing the lock address for platforms without TSO
    // ordering (e.g. ppc).
    obj->release_set_mark(markOopDesc::encode(highest_lock));
    assert(!obj->mark()->has_bias_pattern(), "illegal mark state: stack lock used bias bit");
    if (TraceBiasedLocking && (Verbose || !is_bulk)) {
      tty->print_cr("  Revoked bias of currently-locked object");
    }
  } else {
    if (TraceBiasedLocking && (Verbose || !is_bulk)) {
      tty->print_cr("  Revoked bias of currently-unlocked object");
    }
    if (allow_rebias) {
      obj->set_mark(biased_prototype);
    } else {
      // Store the unlocked value into the object's header.
      obj->set_mark(unbiased_prototype);
    }
  }

  return BiasedLocking::BIAS_REVOKED;
}
Esempio n. 19
0
void KlassInfoTable::record_instance(const oop obj) {
  klassOop      k = obj->klass();
  KlassInfoEntry* elt = lookup(k);
  elt->set_count(elt->count() + 1);
  elt->set_words(elt->words() + obj->size());
}
Esempio n. 20
0
static BiasedLocking::Condition bulk_revoke_or_rebias_at_safepoint(oop o,
                                                                   bool bulk_rebias,
                                                                   bool attempt_rebias_of_object,
                                                                   JavaThread* requesting_thread) {
  assert(SafepointSynchronize::is_at_safepoint(), "must be done at safepoint");

  if (TraceBiasedLocking) {
    tty->print_cr("* Beginning bulk revocation (kind == %s) because of object "
                  INTPTR_FORMAT " , mark " INTPTR_FORMAT " , type %s",
                  (bulk_rebias ? "rebias" : "revoke"),
                  p2i((void *) o), (intptr_t) o->mark(), o->klass()->external_name());
  }

  jlong cur_time = os::javaTimeMillis();
  o->klass()->set_last_biased_lock_bulk_revocation_time(cur_time);


  Klass* k_o = o->klass();
  Klass* klass = k_o;

  if (bulk_rebias) {
    // Use the epoch in the klass of the object to implicitly revoke
    // all biases of objects of this data type and force them to be
    // reacquired. However, we also need to walk the stacks of all
    // threads and update the headers of lightweight locked objects
    // with biases to have the current epoch.

    // If the prototype header doesn't have the bias pattern, don't
    // try to update the epoch -- assume another VM operation came in
    // and reset the header to the unbiased state, which will
    // implicitly cause all existing biases to be revoked
    if (klass->prototype_header()->has_bias_pattern()) {
      int prev_epoch = klass->prototype_header()->bias_epoch();
      klass->set_prototype_header(klass->prototype_header()->incr_bias_epoch());
      int cur_epoch = klass->prototype_header()->bias_epoch();

      // Now walk all threads' stacks and adjust epochs of any biased
      // and locked objects of this data type we encounter
      for (JavaThread* thr = Threads::first(); thr != NULL; thr = thr->next()) {
        GrowableArray<MonitorInfo*>* cached_monitor_info = get_or_compute_monitor_info(thr);
        for (int i = 0; i < cached_monitor_info->length(); i++) {
          MonitorInfo* mon_info = cached_monitor_info->at(i);
          oop owner = mon_info->owner();
          markOop mark = owner->mark();
          if ((owner->klass() == k_o) && mark->has_bias_pattern()) {
            // We might have encountered this object already in the case of recursive locking
            assert(mark->bias_epoch() == prev_epoch || mark->bias_epoch() == cur_epoch, "error in bias epoch adjustment");
            owner->set_mark(mark->set_bias_epoch(cur_epoch));
          }
        }
      }
    }

    // At this point we're done. All we have to do is potentially
    // adjust the header of the given object to revoke its bias.
    revoke_bias(o, attempt_rebias_of_object && klass->prototype_header()->has_bias_pattern(), true, requesting_thread);
  } else {
    if (TraceBiasedLocking) {
      ResourceMark rm;
      tty->print_cr("* Disabling biased locking for type %s", klass->external_name());
    }

    // Disable biased locking for this data type. Not only will this
    // cause future instances to not be biased, but existing biased
    // instances will notice that this implicitly caused their biases
    // to be revoked.
    klass->set_prototype_header(markOopDesc::prototype());

    // Now walk all threads' stacks and forcibly revoke the biases of
    // any locked and biased objects of this data type we encounter.
    for (JavaThread* thr = Threads::first(); thr != NULL; thr = thr->next()) {
      GrowableArray<MonitorInfo*>* cached_monitor_info = get_or_compute_monitor_info(thr);
      for (int i = 0; i < cached_monitor_info->length(); i++) {
        MonitorInfo* mon_info = cached_monitor_info->at(i);
        oop owner = mon_info->owner();
        markOop mark = owner->mark();
        if ((owner->klass() == k_o) && mark->has_bias_pattern()) {
          revoke_bias(owner, false, true, requesting_thread);
        }
      }
    }

    // Must force the bias of the passed object to be forcibly revoked
    // as well to ensure guarantees to callers
    revoke_bias(o, false, true, requesting_thread);
  }

  if (TraceBiasedLocking) {
    tty->print_cr("* Ending bulk revocation");
  }

  BiasedLocking::Condition status_code = BiasedLocking::BIAS_REVOKED;

  if (attempt_rebias_of_object &&
      o->mark()->has_bias_pattern() &&
      klass->prototype_header()->has_bias_pattern()) {
    markOop new_mark = markOopDesc::encode(requesting_thread, o->mark()->age(),
                                           klass->prototype_header()->bias_epoch());
    o->set_mark(new_mark);
    status_code = BiasedLocking::BIAS_REVOKED_AND_REBIASED;
    if (TraceBiasedLocking) {
      tty->print_cr("  Rebiased object toward thread " INTPTR_FORMAT, (intptr_t) requesting_thread);
    }
  }

  assert(!o->mark()->has_bias_pattern() ||
         (attempt_rebias_of_object && (o->mark()->biased_locker() == requesting_thread)),
         "bug in bulk bias revocation");

  return status_code;
}