bool LIR_OopMapGenerator::exception_handler_covers(CodeEmitInfo* info,
                                                   BlockBegin* handler)
{
  int bci = info->bci();
  for (IRScope* scope = info->scope(); scope != NULL; bci = scope->caller_bci(), scope = scope->caller()) {
    if (scope == handler->scope()) {
      XHandlers* xh = scope->xhandlers();
      for (int i = 0; i < xh->number_of_handlers(); i++) {
        if (xh->handler_at(i)->handler_bci() == handler->bci()) {
          if (xh->handler_at(i)->covers(bci)) {
#ifndef PRODUCT
            if (TraceLIROopMaps) {
              tty->print_cr("    Found exception handler: block %d", handler->block_id());
            }
#endif
            return true;
          }
        }
      }
    }
  }
#ifndef PRODUCT
  if (TraceLIROopMaps) {
    tty->print_cr("  No exception handler found");
  }
#endif
  return false;
}
PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {
    IRScope* scope = info->scope();
    Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
    if (Bytecodes::has_optional_appendix(bc_raw)) {
        return PatchingStub::load_appendix_id;
    }
    return PatchingStub::load_mirror_id;
}
Beispiel #3
0
void LIR_Assembler::record_non_safepoint_debug_info() {
  int         pc_offset = _pending_non_safepoint_offset;
  ValueStack* vstack    = debug_info(_pending_non_safepoint);
  int         bci       = vstack->bci();

  DebugInformationRecorder* debug_info = compilation()->debug_info_recorder();
  assert(debug_info->recording_non_safepoints(), "sanity");

  debug_info->add_non_safepoint(pc_offset);

  // Visit scopes from oldest to youngest.
  for (int n = 0; ; n++) {
    int s_bci = bci;
    ValueStack* s = nth_oldest(vstack, n, s_bci);
    if (s == NULL)  break;
    IRScope* scope = s->scope();
    //Always pass false for reexecute since these ScopeDescs are never used for deopt
    debug_info->describe_scope(pc_offset, scope->method(), s->bci(), false/*reexecute*/);
  }

  debug_info->end_non_safepoint(pc_offset);
}
ValueStack* ValueStack::pop_scope(bool should_eliminate_stores, int bci) {
  assert(_scope->caller() != NULL, "scope must have caller");
  IRScope* scope = _scope->caller();
  int max_stack = max_stack_size() - _scope->method()->max_stack();
  assert(max_stack >= 0, "stack underflow");
  ValueStack* res = new ValueStack(scope,
                                   scope->method()->max_locals(),
                                   max_stack);
  // Preserves stack and monitors. Restores local and store state from caller scope.
  res->_stack.appendAll(&_stack);
  res->_locks.appendAll(&_locks);
  ValueStack* caller = caller_state();
  if (caller != NULL) {
    for (int i = 0; i < caller->_locals.length(); i++) {
      res->_locals.at_put(i, caller->_locals.at(i));
      res->_stores.at_put(i, caller->_stores.at(i));
    }
    assert(res->_locals.length() == res->scope()->method()->max_locals(), "just checking");
    assert(res->_stores.length() == res->scope()->method()->max_locals(), "just checking");
  }
  assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
  if (EliminateStores && should_eliminate_stores) eliminate_stores(bci);
  return res;
}