void Canonicalizer::do_ShiftOp (ShiftOp* x) { ValueType* t = x->x()->type(); ValueType* t2 = x->y()->type(); if (t->is_constant()) { switch (t->tag()) { case intTag : if (t->as_IntConstant()->value() == 0) { set_constant(0); return; } break; case longTag : if (t->as_LongConstant()->value() == (jlong)0) { set_constant(jlong_cast(0)); return; } break; default : ShouldNotReachHere(); } if (t2->is_constant()) { if (t->tag() == intTag) { int value = t->as_IntConstant()->value(); int shift = t2->as_IntConstant()->value() & 31; jint mask = ~(~0 << (32 - shift)); if (shift == 0) mask = ~0; switch (x->op()) { case Bytecodes::_ishl: set_constant(value << shift); return; case Bytecodes::_ishr: set_constant(value >> shift); return; case Bytecodes::_iushr: set_constant((value >> shift) & mask); return; } } else if (t->tag() == longTag) { jlong value = t->as_LongConstant()->value(); int shift = t2->as_IntConstant()->value() & 63; jlong mask = ~(~jlong_cast(0) << (64 - shift)); if (shift == 0) mask = ~jlong_cast(0); switch (x->op()) { case Bytecodes::_lshl: set_constant(value << shift); return; case Bytecodes::_lshr: set_constant(value >> shift); return; case Bytecodes::_lushr: set_constant((value >> shift) & mask); return; } }
void Canonicalizer::do_ShiftOp (ShiftOp* x) { ValueType* t = x->x()->type(); if (t->is_constant()) { switch (t->tag()) { case intTag : if (t->as_IntConstant()->value() == 0) set_constant(0); return; case longTag : if (t->as_LongConstant()->value() == (jlong)0) set_constant(jlong_cast(0)); return; default : ShouldNotReachHere(); } } ValueType* t2 = x->y()->type(); if (t2->is_constant()) { switch (t2->tag()) { case intTag : if (t2->as_IntConstant()->value() == 0) set_canonical(x->x()); return; default : ShouldNotReachHere(); } } }
void Canonicalizer::do_Op2(Op2* x) { if (x->x() == x->y()) { switch (x->op()) { case Bytecodes::_isub: set_constant(0); return; case Bytecodes::_lsub: set_constant(jlong_cast(0)); return; case Bytecodes::_iand: // fall through case Bytecodes::_land: // fall through case Bytecodes::_ior: // fall through case Bytecodes::_lor : set_canonical(x->x()); return; case Bytecodes::_ixor: set_constant(0); return; case Bytecodes::_lxor: set_constant(jlong_cast(0)); return; } } if (x->x()->type()->is_constant() && x->y()->type()->is_constant()) { // do constant folding for selected operations switch (x->type()->tag()) { case intTag: { jint a = x->x()->type()->as_IntConstant()->value(); jint b = x->y()->type()->as_IntConstant()->value(); switch (x->op()) { case Bytecodes::_iadd: set_constant(a + b); return; case Bytecodes::_isub: set_constant(a - b); return; case Bytecodes::_imul: set_constant(a * b); return; case Bytecodes::_idiv: if (b != 0) { if (a == min_jint && b == -1) { set_constant(min_jint); } else { set_constant(a / b); } return; } break; case Bytecodes::_irem: if (b != 0) { if (a == min_jint && b == -1) { set_constant(0); } else { set_constant(a % b); } return; } break; case Bytecodes::_iand: set_constant(a & b); return; case Bytecodes::_ior : set_constant(a | b); return; case Bytecodes::_ixor: set_constant(a ^ b); return; } } break; case longTag: { jlong a = x->x()->type()->as_LongConstant()->value(); jlong b = x->y()->type()->as_LongConstant()->value(); switch (x->op()) { case Bytecodes::_ladd: set_constant(a + b); return; case Bytecodes::_lsub: set_constant(a - b); return; case Bytecodes::_lmul: set_constant(a * b); return; case Bytecodes::_ldiv: if (b != 0) { set_constant(SharedRuntime::ldiv(b, a)); return; } break; case Bytecodes::_lrem: if (b != 0) { set_constant(SharedRuntime::lrem(b, a)); return; } break; case Bytecodes::_land: set_constant(a & b); return; case Bytecodes::_lor : set_constant(a | b); return; case Bytecodes::_lxor: set_constant(a ^ b); return; } } break; // other cases not implemented (must be extremely careful with floats & doubles!) } } // make sure constant is on the right side, if any move_const_to_right(x); if (x->y()->type()->is_constant()) { // do constant folding for selected operations switch (x->type()->tag()) { case intTag: if (x->y()->type()->as_IntConstant()->value() == 0) { switch (x->op()) { case Bytecodes::_iadd: set_canonical(x->x()); return; case Bytecodes::_isub: set_canonical(x->x()); return; case Bytecodes::_imul: set_constant(0); return; // Note: for div and rem, make sure that C semantics // corresponds to Java semantics! case Bytecodes::_iand: set_constant(0); return; case Bytecodes::_ior : set_canonical(x->x()); return; } } break; case longTag: if (x->y()->type()->as_LongConstant()->value() == (jlong)0) { switch (x->op()) { case Bytecodes::_ladd: set_canonical(x->x()); return; case Bytecodes::_lsub: set_canonical(x->x()); return; case Bytecodes::_lmul: set_constant((jlong)0); return; // Note: for div and rem, make sure that C semantics // corresponds to Java semantics! case Bytecodes::_land: set_constant((jlong)0); return; case Bytecodes::_lor : set_canonical(x->x()); return; } } break; } } }
bool MethodComparator::args_same(Bytecodes::Code c_old, Bytecodes::Code c_new) { // BytecodeStream returns the correct standard Java bytecodes for various "fast" // bytecode versions, so we don't have to bother about them here.. switch (c_old) { case Bytecodes::_new : // fall through case Bytecodes::_anewarray : // fall through case Bytecodes::_multianewarray : // fall through case Bytecodes::_checkcast : // fall through case Bytecodes::_instanceof : { u2 cpi_old = _s_old->get_index_u2(); u2 cpi_new = _s_new->get_index_u2(); if ((_old_cp->klass_at_noresolve(cpi_old) != _new_cp->klass_at_noresolve(cpi_new))) return false; if (c_old == Bytecodes::_multianewarray && *(jbyte*)(_s_old->bcp() + 3) != *(jbyte*)(_s_new->bcp() + 3)) return false; break; } case Bytecodes::_getstatic : // fall through case Bytecodes::_putstatic : // fall through case Bytecodes::_getfield : // fall through case Bytecodes::_putfield : // fall through case Bytecodes::_invokevirtual : // fall through case Bytecodes::_invokespecial : // fall through case Bytecodes::_invokestatic : // fall through case Bytecodes::_invokeinterface : { int cpci_old = _s_old->get_index_u2_cpcache(); int cpci_new = _s_new->get_index_u2_cpcache(); // Check if the names of classes, field/method names and signatures at these indexes // are the same. Indices which are really into constantpool cache (rather than constant // pool itself) are accepted by the constantpool query routines below. if ((_old_cp->klass_ref_at_noresolve(cpci_old) != _new_cp->klass_ref_at_noresolve(cpci_new)) || (_old_cp->name_ref_at(cpci_old) != _new_cp->name_ref_at(cpci_new)) || (_old_cp->signature_ref_at(cpci_old) != _new_cp->signature_ref_at(cpci_new))) return false; break; } case Bytecodes::_invokedynamic: { int cpci_old = _s_old->get_index_u4(); int cpci_new = _s_new->get_index_u4(); // Check if the names of classes, field/method names and signatures at these indexes // are the same. Indices which are really into constantpool cache (rather than constant // pool itself) are accepted by the constantpool query routines below. if ((_old_cp->name_ref_at(cpci_old) != _new_cp->name_ref_at(cpci_new)) || (_old_cp->signature_ref_at(cpci_old) != _new_cp->signature_ref_at(cpci_new))) return false; int cpi_old = _old_cp->cache()->main_entry_at(cpci_old)->constant_pool_index(); int cpi_new = _new_cp->cache()->main_entry_at(cpci_new)->constant_pool_index(); int bsm_old = _old_cp->invoke_dynamic_bootstrap_method_ref_index_at(cpi_old); int bsm_new = _new_cp->invoke_dynamic_bootstrap_method_ref_index_at(cpi_new); if (!pool_constants_same(bsm_old, bsm_new)) return false; int cnt_old = _old_cp->invoke_dynamic_argument_count_at(cpi_old); int cnt_new = _new_cp->invoke_dynamic_argument_count_at(cpi_new); if (cnt_old != cnt_new) return false; for (int arg_i = 0; arg_i < cnt_old; arg_i++) { int idx_old = _old_cp->invoke_dynamic_argument_index_at(cpi_old, arg_i); int idx_new = _new_cp->invoke_dynamic_argument_index_at(cpi_new, arg_i); if (!pool_constants_same(idx_old, idx_new)) return false; } break; } case Bytecodes::_ldc : // fall through case Bytecodes::_ldc_w : { Bytecode_loadconstant ldc_old(_s_old->method(), _s_old->bci()); Bytecode_loadconstant ldc_new(_s_new->method(), _s_new->bci()); int cpi_old = ldc_old.pool_index(); int cpi_new = ldc_new.pool_index(); if (!pool_constants_same(cpi_old, cpi_new)) return false; break; } case Bytecodes::_ldc2_w : { u2 cpi_old = _s_old->get_index_u2(); u2 cpi_new = _s_new->get_index_u2(); constantTag tag_old = _old_cp->tag_at(cpi_old); constantTag tag_new = _new_cp->tag_at(cpi_new); if (tag_old.value() != tag_new.value()) return false; if (tag_old.is_long()) { if (_old_cp->long_at(cpi_old) != _new_cp->long_at(cpi_new)) return false; } else { // Use jlong_cast to compare the bits rather than numerical values. // This makes a difference for NaN constants. if (jlong_cast(_old_cp->double_at(cpi_old)) != jlong_cast(_new_cp->double_at(cpi_new))) return false; } break; } case Bytecodes::_bipush : if (_s_old->bcp()[1] != _s_new->bcp()[1]) return false; break; case Bytecodes::_sipush : if (_s_old->get_index_u2() != _s_new->get_index_u2()) return false; break; case Bytecodes::_aload : // fall through case Bytecodes::_astore : // fall through case Bytecodes::_dload : // fall through case Bytecodes::_dstore : // fall through case Bytecodes::_fload : // fall through case Bytecodes::_fstore : // fall through case Bytecodes::_iload : // fall through case Bytecodes::_istore : // fall through case Bytecodes::_lload : // fall through case Bytecodes::_lstore : // fall through case Bytecodes::_ret : if (_s_old->is_wide() != _s_new->is_wide()) return false; if (_s_old->get_index() != _s_new->get_index()) return false; break; case Bytecodes::_goto : // fall through case Bytecodes::_if_acmpeq : // fall through case Bytecodes::_if_acmpne : // fall through case Bytecodes::_if_icmpeq : // fall through case Bytecodes::_if_icmpne : // fall through case Bytecodes::_if_icmplt : // fall through case Bytecodes::_if_icmpge : // fall through case Bytecodes::_if_icmpgt : // fall through case Bytecodes::_if_icmple : // fall through case Bytecodes::_ifeq : // fall through case Bytecodes::_ifne : // fall through case Bytecodes::_iflt : // fall through case Bytecodes::_ifge : // fall through case Bytecodes::_ifgt : // fall through case Bytecodes::_ifle : // fall through case Bytecodes::_ifnonnull : // fall through case Bytecodes::_ifnull : // fall through case Bytecodes::_jsr : { int old_ofs = _s_old->bytecode().get_offset_s2(c_old); int new_ofs = _s_new->bytecode().get_offset_s2(c_new); if (_switchable_test) { int old_dest = _s_old->bci() + old_ofs; int new_dest = _s_new->bci() + new_ofs; if (old_ofs < 0 && new_ofs < 0) { if (! _bci_map->old_and_new_locations_same(old_dest, new_dest)) return false; } else if (old_ofs > 0 && new_ofs > 0) { _fwd_jmps->append(old_dest); _fwd_jmps->append(new_dest); } else { return false; } } else { if (old_ofs != new_ofs) return false; } break; } case Bytecodes::_iinc : if (_s_old->is_wide() != _s_new->is_wide()) return false; if (! _s_old->is_wide()) { // We could use get_index_u1 and get_constant_u1, but it's simpler to grab both bytes at once: if (Bytes::get_Java_u2(_s_old->bcp() + 1) != Bytes::get_Java_u2(_s_new->bcp() + 1)) return false; } else { // We could use get_index_u2 and get_constant_u2, but it's simpler to grab all four bytes at once: if (Bytes::get_Java_u4(_s_old->bcp() + 1) != Bytes::get_Java_u4(_s_new->bcp() + 1)) return false; } break; case Bytecodes::_goto_w : // fall through case Bytecodes::_jsr_w : { int old_ofs = _s_old->bytecode().get_offset_s4(c_old); int new_ofs = _s_new->bytecode().get_offset_s4(c_new); if (_switchable_test) { int old_dest = _s_old->bci() + old_ofs; int new_dest = _s_new->bci() + new_ofs; if (old_ofs < 0 && new_ofs < 0) { if (! _bci_map->old_and_new_locations_same(old_dest, new_dest)) return false; } else if (old_ofs > 0 && new_ofs > 0) { _fwd_jmps->append(old_dest); _fwd_jmps->append(new_dest); } else { return false; } } else { if (old_ofs != new_ofs) return false; } break; } case Bytecodes::_lookupswitch : // fall through case Bytecodes::_tableswitch : { if (_switchable_test) { address aligned_bcp_old = (address) round_to((intptr_t)_s_old->bcp() + 1, jintSize); address aligned_bcp_new = (address) round_to((intptr_t)_s_new->bcp() + 1, jintSize); int default_old = (int) Bytes::get_Java_u4(aligned_bcp_old); int default_new = (int) Bytes::get_Java_u4(aligned_bcp_new); _fwd_jmps->append(_s_old->bci() + default_old); _fwd_jmps->append(_s_new->bci() + default_new); if (c_old == Bytecodes::_lookupswitch) { int npairs_old = (int) Bytes::get_Java_u4(aligned_bcp_old + jintSize); int npairs_new = (int) Bytes::get_Java_u4(aligned_bcp_new + jintSize); if (npairs_old != npairs_new) return false; for (int i = 0; i < npairs_old; i++) { int match_old = (int) Bytes::get_Java_u4(aligned_bcp_old + (2+2*i)*jintSize); int match_new = (int) Bytes::get_Java_u4(aligned_bcp_new + (2+2*i)*jintSize); if (match_old != match_new) return false; int ofs_old = (int) Bytes::get_Java_u4(aligned_bcp_old + (2+2*i+1)*jintSize); int ofs_new = (int) Bytes::get_Java_u4(aligned_bcp_new + (2+2*i+1)*jintSize); _fwd_jmps->append(_s_old->bci() + ofs_old); _fwd_jmps->append(_s_new->bci() + ofs_new); } } else if (c_old == Bytecodes::_tableswitch) { int lo_old = (int) Bytes::get_Java_u4(aligned_bcp_old + jintSize); int lo_new = (int) Bytes::get_Java_u4(aligned_bcp_new + jintSize); if (lo_old != lo_new) return false; int hi_old = (int) Bytes::get_Java_u4(aligned_bcp_old + 2*jintSize); int hi_new = (int) Bytes::get_Java_u4(aligned_bcp_new + 2*jintSize); if (hi_old != hi_new) return false; for (int i = 0; i < hi_old - lo_old + 1; i++) { int ofs_old = (int) Bytes::get_Java_u4(aligned_bcp_old + (3+i)*jintSize); int ofs_new = (int) Bytes::get_Java_u4(aligned_bcp_new + (3+i)*jintSize); _fwd_jmps->append(_s_old->bci() + ofs_old); _fwd_jmps->append(_s_new->bci() + ofs_new); } } } else { // !_switchable_test, can use fast rough compare int len_old = _s_old->instruction_size(); int len_new = _s_new->instruction_size(); if (len_old != len_new) return false; if (memcmp(_s_old->bcp(), _s_new->bcp(), len_old) != 0) return false; } break; } } return true; }
inline void OrderAccess::store(volatile jdouble* p, jdouble v) { Atomic::store(jlong_cast(v), (volatile jlong*)p); }