bool frame::is_interpreted_frame_valid(JavaThread* thread) const { // QQQ #ifdef CC_INTERP #else assert(is_interpreted_frame(), "Not an interpreted frame"); // These are reasonable sanity checks if (fp() == 0 || (intptr_t(fp()) & (wordSize-1)) != 0) { return false; } if (sp() == 0 || (intptr_t(sp()) & (wordSize-1)) != 0) { return false; } if (fp() + interpreter_frame_initial_sp_offset < sp()) { return false; } // These are hacks to keep us out of trouble. // The problem with these is that they mask other problems if (fp() <= sp()) { // this attempts to deal with unsigned comparison above return false; } // do some validation of frame elements // first the method Method* m = *interpreter_frame_method_addr(); // validate the method we'd find in this potential sender if (!m->is_valid_method()) return false; // stack frames shouldn't be much larger than max_stack elements if (fp() - sp() > 1024 + m->max_stack()*Interpreter::stackElementSize) { return false; } // validate bci/bcx intptr_t bcx = interpreter_frame_bcx(); if (m->validate_bci_from_bcx(bcx) < 0) { return false; } // validate ConstantPoolCache* ConstantPoolCache* cp = *interpreter_frame_cache_addr(); if (cp == NULL || !cp->is_metaspace_object()) return false; // validate locals address locals = (address) *interpreter_frame_locals_addr(); if (locals > thread->stack_base() || locals < (address) fp()) return false; // We'd have to be pretty unlucky to be mislead at this point #endif // CC_INTERP return true; }
bool BytecodePrinter::check_cp_cache_index(int i, int& cp_index, outputStream* st) { ConstantPool* constants = method()->constants(); int ilimit = constants->length(), climit = 0; Bytecodes::Code code = raw_code(); ConstantPoolCache* cache = constants->cache(); // If rewriter hasn't run, the index is the cp_index if (cache == NULL) { cp_index = i; return true; } //climit = cache->length(); // %%% private! size_t size = cache->size() * HeapWordSize; size -= sizeof(ConstantPoolCache); size /= sizeof(ConstantPoolCacheEntry); climit = (int) size; #ifdef ASSERT { const int CPCACHE_INDEX_TAG = ConstantPool::CPCACHE_INDEX_TAG; if (i >= CPCACHE_INDEX_TAG && i < climit + CPCACHE_INDEX_TAG) { i -= CPCACHE_INDEX_TAG; } else { st->print_cr(" CP[%d] missing bias?", i); return false; } } #endif //ASSERT if (i >= 0 && i < climit) { cp_index = cache->entry_at(i)->constant_pool_index(); } else { st->print_cr(" not in CP[*]?", i); return false; } return true; }
int CppInterpreter::accessor_entry(Method* method, intptr_t UNUSED, TRAPS) { JavaThread *thread = (JavaThread *) THREAD; ZeroStack *stack = thread->zero_stack(); intptr_t *locals = stack->sp(); // Drop into the slow path if we need a safepoint check if (SafepointSynchronize::do_call_back()) { return normal_entry(method, 0, THREAD); } // Load the object pointer and drop into the slow path // if we have a NullPointerException oop object = LOCALS_OBJECT(0); if (object == NULL) { return normal_entry(method, 0, THREAD); } // Read the field index from the bytecode, which looks like this: // 0: aload_0 // 1: getfield // 2: index // 3: index // 4: ireturn/areturn // NB this is not raw bytecode: index is in machine order u1 *code = method->code_base(); assert(code[0] == Bytecodes::_aload_0 && code[1] == Bytecodes::_getfield && (code[4] == Bytecodes::_ireturn || code[4] == Bytecodes::_areturn), "should do"); u2 index = Bytes::get_native_u2(&code[2]); // Get the entry from the constant pool cache, and drop into // the slow path if it has not been resolved ConstantPoolCache* cache = method->constants()->cache(); ConstantPoolCacheEntry* entry = cache->entry_at(index); if (!entry->is_resolved(Bytecodes::_getfield)) { return normal_entry(method, 0, THREAD); } // Get the result and push it onto the stack switch (entry->flag_state()) { case ltos: case dtos: stack->overflow_check(1, CHECK_0); stack->alloc(wordSize); break; } if (entry->is_volatile()) { switch (entry->flag_state()) { case ctos: SET_LOCALS_INT(object->char_field_acquire(entry->f2_as_index()), 0); break; case btos: SET_LOCALS_INT(object->byte_field_acquire(entry->f2_as_index()), 0); break; case stos: SET_LOCALS_INT(object->short_field_acquire(entry->f2_as_index()), 0); break; case itos: SET_LOCALS_INT(object->int_field_acquire(entry->f2_as_index()), 0); break; case ltos: SET_LOCALS_LONG(object->long_field_acquire(entry->f2_as_index()), 0); break; case ftos: SET_LOCALS_FLOAT(object->float_field_acquire(entry->f2_as_index()), 0); break; case dtos: SET_LOCALS_DOUBLE(object->double_field_acquire(entry->f2_as_index()), 0); break; case atos: SET_LOCALS_OBJECT(object->obj_field_acquire(entry->f2_as_index()), 0); break; default: ShouldNotReachHere(); } } else { switch (entry->flag_state()) { case ctos: SET_LOCALS_INT(object->char_field(entry->f2_as_index()), 0); break; case btos: SET_LOCALS_INT(object->byte_field(entry->f2_as_index()), 0); break; case stos: SET_LOCALS_INT(object->short_field(entry->f2_as_index()), 0); break; case itos: SET_LOCALS_INT(object->int_field(entry->f2_as_index()), 0); break; case ltos: SET_LOCALS_LONG(object->long_field(entry->f2_as_index()), 0); break; case ftos: SET_LOCALS_FLOAT(object->float_field(entry->f2_as_index()), 0); break; case dtos: SET_LOCALS_DOUBLE(object->double_field(entry->f2_as_index()), 0); break; case atos: SET_LOCALS_OBJECT(object->obj_field(entry->f2_as_index()), 0); break; default: ShouldNotReachHere(); } } // No deoptimized frames on the stack return 0; }