void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
    verify_oop_map(op->info());

    if (os::is_MP()) {
        // must align calls sites, otherwise they can't be updated atomically on MP hardware
        align_call(op->code());
    }

    // emit the static call stub stuff out of line
    emit_static_call_stub();
    CHECK_BAILOUT();

    switch (op->code()) {
    case lir_static_call:
    case lir_dynamic_call:
        call(op, relocInfo::static_call_type);
        break;
    case lir_optvirtual_call:
        call(op, relocInfo::opt_virtual_call_type);
        break;
    case lir_icvirtual_call:
        ic_call(op);
        break;
    case lir_virtual_call:
        vtable_call(op);
        break;
    default:
        fatal("unexpected op code: %s", op->name());
        break;
    }

    // JSR 292
    // Record if this method has MethodHandle invokes.
    if (op->is_method_handle_invoke()) {
        compilation()->set_has_method_handle_invokes(true);
    }

#if defined(X86) && defined(TIERED)
    // C2 leave fpu stack dirty clean it
    if (UseSSE < 2) {
        int i;
        for ( i = 1; i <= 7 ; i++ ) {
            ffree(i);
        }
        if (!op->result_opr()->is_float_kind()) {
            ffree(0);
        }
    }
#endif // X86 && TIERED
}
Example #2
0
void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
  verify_oop_map(op->info());

  if (os::is_MP()) {
    // must align calls sites, otherwise they can't be updated atomically on MP hardware
    align_call(op->code());
  }

  // emit the static call stub stuff out of line
  emit_static_call_stub();

  switch (op->code()) {
  case lir_static_call:
    call(op->addr(), relocInfo::static_call_type, op->info());
    break;
  case lir_optvirtual_call:
    call(op->addr(), relocInfo::opt_virtual_call_type, op->info());
    break;
  case lir_icvirtual_call:
    ic_call(op->addr(), op->info());
    break;
  case lir_virtual_call:
    vtable_call(op->vtable_offset(), op->info());
    break;
  default: ShouldNotReachHere();
  }
#if defined(X86) && defined(TIERED)
  // C2 leave fpu stack dirty clean it
  if (UseSSE < 2) {
    int i;
    for ( i = 1; i <= 7 ; i++ ) {
      ffree(i);
    }
    if (!op->result_opr()->is_float_kind()) {
      ffree(0);
    }
  }
#endif // X86 && TIERED
}