static bool match(UnsafeRawOp* x,
                  Instruction** base,
                  Instruction** index,
                  int*          log2_scale) {
  Instruction* instr_to_unpin = NULL;
  ArithmeticOp* root = x->base()->as_ArithmeticOp();
  if (root == NULL) return false;
  // Limit ourselves to addition for now
  if (root->op() != Bytecodes::_ladd) return false;
  // Try to find shift or scale op
  if (match_index_and_scale(root->y(), index, log2_scale, &instr_to_unpin)) {
    *base = root->x();
  } else if (match_index_and_scale(root->x(), index, log2_scale, &instr_to_unpin)) {
    *base = root->y();
  } else if (root->y()->as_Convert() != NULL) {
    Convert* convert = root->y()->as_Convert();
    if (convert->op() == Bytecodes::_i2l && convert->value()->type() == intType) {
      // pick base and index, setting scale at 1
      *base  = root->x();
      *index = convert->value();
      *log2_scale = 0;
    } else {
      return false;
    }
  } else {
    // doesn't match any expected sequences
    return false;
  }
  // Typically the addition is pinned, as it is the result of an
  // inlined routine. We want to unpin it so as to avoid the long
  // addition, but in order to do so we must still ensure that the
  // operands are pinned, as they may be computed arbitrarily before
  // the Unsafe op completes (even if the Unsafe op is pinned). At
  // this point we do not really need to pin Unsafe raw or object
  // gets.
  if (root->is_pinned()) {
    if (root->pin_state() == Instruction::PinInlineReturnValue) {
      assert(x->is_pinned(), "All unsafe raw ops should be pinned");
      root->unpin(Instruction::PinInlineReturnValue);
      (*base)->pin();
      (*index)->pin();
    } else {
      // can't safely unpin this instruction
      return false;
    }
  }

  if (PrintUnsafeOptimization && instr_to_unpin != NULL) {
    tty->print_cr("pin_state = 0x%x", instr_to_unpin->pin_state());
    instr_to_unpin->print();
  }
  return true;
}
Esempio n. 2
0
void Canonicalizer::do_StoreField     (StoreField*      x) {
  // If a value is going to be stored into a field or array some of
  // the conversions emitted by javac are unneeded because the fields
  // are packed to their natural size.
  Convert* conv = x->value()->as_Convert();
  if (conv) {
    Value value = NULL;
    BasicType type = x->field()->type()->basic_type();
    switch (conv->op()) {
    case Bytecodes::_i2b: if (type == T_BYTE)  value = conv->value(); break;
    case Bytecodes::_i2s: if (type == T_SHORT || type == T_BYTE) value = conv->value(); break;
    case Bytecodes::_i2c: if (type == T_CHAR  || type == T_BYTE)  value = conv->value(); break;
    }
    // limit this optimization to current block
    if (value != NULL && in_current_block(conv)) {
      set_canonical(new StoreField(x->obj(), x->offset(), x->field(), value, x->is_static(),
                                   x->state_before(), x->needs_patching()));
      return;
    }
  }

}
// Here we set all the flags
void ScanBlocks::scan_block(BlockBegin* block, ScanResult* desc, bool live_only) {
  for (Instruction* n = block; n != NULL; n = n->next()) {
    if (live_only && !n->is_pinned() && (n->use_count() ==  0)) {
      // don't look at unused instructions because no code is emitted for them
      continue;
    }

    ValueTag tag = n->type()->tag();
    if (tag == floatTag) desc->set_has_floats(true);
    else if (tag == doubleTag) desc->set_has_doubles(true);
    if (n->as_StateSplit() != NULL) {
      if (n->as_Invoke() != NULL) {
        desc->set_has_calls(true);
      } else if (n->as_NewArray() || n->as_NewInstance() || n->as_AccessMonitor()) {
        desc->set_has_slow_cases(true);
      }  else if(n->as_Intrinsic() != NULL) {
        Intrinsic* i = n->as_Intrinsic();
        if (i->id() == methodOopDesc::_arraycopy) desc->set_has_slow_cases(true);
        if (!i->preserves_state()) desc->set_has_calls(true);
      }
    } else if (n->as_AccessField() != NULL) {
      AccessField* af = n->as_AccessField();
      if (!af->is_initialized() || !af->is_loaded()) desc->set_has_class_init(true);
    } else if (n->as_AccessLocal() != NULL) {
      AccessLocal* local = n->as_AccessLocal();
      StoreLocal* store = n->as_StoreLocal();
      int use_count = 0;
      if (store != NULL) {
        if (!store->is_eliminated()) {
          use_count = 1;
        }
      } else {
        use_count = n->use_count();
      }
      if (use_count > 0) {
        ValueType* type = local->type();
        assert(local->has_offset(), "must have had offset allocated");
        accumulate_access(in_words(local->offset()), tag, use_count);
      }
    }
#ifdef SPARC
    else {
      if (n->as_Convert() != NULL) {
        Convert* conv = n->as_Convert();
        switch (conv->op()) {
          case Bytecodes::_l2f: 
          case Bytecodes::_l2d: 
          case Bytecodes::_f2l: 
          case Bytecodes::_d2l: 
          case Bytecodes::_d2i: { desc->set_has_calls(true); break; }
        }
      } else if (n->as_ArithmeticOp() != NULL) {
        ArithmeticOp* arith = n->as_ArithmeticOp();
        switch (arith->op()) {
          case Bytecodes::_lrem:  
          case Bytecodes::_ldiv:  
          case Bytecodes::_lmul: 
          case Bytecodes::_drem: 
          case Bytecodes::_frem: { desc->set_has_calls(true); break; }
        }
      }
    }
#endif
  }
}