/*static*/ NamespaceSet* NamespaceSet::_create(MMgc::GC* gc, uint32_t count) { AvmAssert(count <= 0x7fffffff); // should be impossible since ABC only allow U30... size_t extra = (count >= 1 ? count-1 : 0)*sizeof(Namespacep); NamespaceSet* nsset = new (gc, extra) NamespaceSet; nsset->_countAndFlags = count<<1; return nsset; }
void CallStackNode::exit() { // m_env might be null (for fake CallStackNode), be careful AvmAssert(m_core != NULL); m_core->callStack = m_next; m_next = NULL; m_core = NULL; // so the dtor doesn't call exit() again }
// OSR is supported generally only in runmode RM_mixed. We don't support // methods with try/catch blocks because of the complexity of establishing // a new ExceptionFrame and jmp_buf. We also don't support methods for which // a previous compilation attempt failed, or for which failure can be predicted. // // We must only OSR methods that will execute with a BugCompatibility object // such that interpreter/compiler divergences are corrected. Builtin methods // are invoked with bug compatibility inherited from the innermost non-builtin // function on the call chain, and thus may vary from call to call. Non-builtins // should always execute with bug compatibility taken from the AbcEnv to which // the method belongs, which will thus remain invariant. We can therefore only OSR // non-builtin methods. bool OSR::isSupported(const AbcEnv* abc_env, const MethodInfo* m, MethodSignaturep ms) { AvmCore* core = abc_env->core(); AvmAssert(core == m->pool()->core); AvmAssert(abc_env->pool() == m->pool()); AvmAssert(abc_env->codeContext() != NULL); AvmAssert(abc_env->codeContext()->bugCompatibility() != NULL); return (!m->hasExceptions() && // method does not have a try block core->config.runmode == RM_mixed && // mixed runmode, allowing both interpreter and JIT core->config.osr_threshold != 0 && // OSR is enabled (may be disabled by host) !m->hasFailedJit() && // no previous attempt to compile the method has failed !CodegenLIR::jitWillFail(ms) && // fast-fail predictor says JIT success is possible !m->pool()->isBuiltin && // the method is not a builtin (ABC baked into application) abc_env->codeContext()->bugCompatibility()->bugzilla539094); // bug compatibility permits OSR }
void BigInteger::setFromBigInteger(const BigInteger* from, int32 offset, int32 amount) { numWords = amount; AvmAssert(numWords <= kMaxBigIntegerBufferSize); memcpy( (byte*)wordBuffer, (byte*)&(from->wordBuffer[offset]), amount*sizeof(uint32)); }
PoolObject* NativeInitializer::parseBuiltinABC(Domain* domain) { AvmAssert(domain != NULL); ScriptBuffer code = ScriptBuffer(new (core->GetGC()) ConstDataScriptBufferImpl(abcData, abcDataLen)); return core->parseActionBlock(code, /*start*/0, /*toplevel*/NULL, domain, this, ApiUtils::getLargestAPI(core)/*active api*/); }
MathClass::MathClass(VTable* cvtable) : ClassClosure(cvtable) { AvmAssert(traits()->getSizeOfInstance() == sizeof(MathClass)); MathUtils::initRandom(&seed); // todo does ES4 Math have a prototype object? }
/*static*/ CreateInstanceProc ClassClosure::calcCreateInstanceProc(VTable* cvtable) { VTable* ivtable = cvtable->ivtable; if (ivtable && ivtable->base) { ScopeChain* scope = cvtable->init->scope(); if (scope->getSize()) { Atom baseAtom = scope->getScope(scope->getSize()-1); if (!AvmCore::isObject(baseAtom)) cvtable->toplevel()->throwVerifyError(kCorruptABCError); ScriptObject* base = AvmCore::atomToScriptObject(baseAtom); // make sure scope object is base type's class object AvmAssert(base->traits()->itraits == cvtable->traits->itraits->base); if (base->traits()->itraits->isAbstractBase) { // If we get here, it means that we descend from an abstract base class, // but don't have a native createInstanceProc of our own; in that case, we // should just create a plain old ScriptObject. (Note that this can // happen for abstract and abstract-restricted; for the latter, we will do // a second check in checkForRestrictedInheritance() and may reject it anyway.) goto create_normal; } // ...otherwise, we're done. ClassClosure* base_cc = base->toClassClosure(); AvmAssert(base_cc != NULL); CreateInstanceProc proc = base_cc->m_createInstanceProc; // If the proc is SemiSealedArrayObject, revert back to normal Array, // and let checkForRestrictedInheritance() choose the proper proc: // we might be a dynamic subclass of a non-dynamic subclass of Array. if (proc == SemiSealedArrayObject::createInstanceProc) proc = ArrayClass::createInstanceProc; // Bugzilla 688486: don't use unsubclassed-specialized // instance creator to create subclassed instances. if (proc == ArrayClass::createUnsubclassedInstanceProc) proc = ArrayClass::createInstanceProc; return proc; } } create_normal: return ClassClosure::createScriptObjectProc; }
void PoolObject::resolveQName(uint32_t index, Multiname &m, const Toplevel* toplevel) const { if (index == 0 || index >= constantMnCount) { if (toplevel) toplevel->throwVerifyError(kCpoolIndexRangeError, core->toErrorString(index), core->toErrorString(constantMnCount)); AvmAssert(!"unhandled verify error"); } parseMultiname(cpool_mn[index], m); if (!m.isQName()) { if (toplevel) toplevel->throwVerifyError(kCpoolEntryWrongTypeError, core->toErrorString(index)); AvmAssert(!"unhandled verify error"); } }
void WordcodeEmitter::emitAbsJump(const uint8_t *new_pc) { code_start = new_pc; // When performing a jump: // - require that backpatches and labels no longer reference the old // code vector; those sets must both be empty. (We could clear out // labels, alternatively, but that appears not to be required.) // - recompute all the exception information, and require that none of it // has been consumed -- this is the only thing that makes sense, and appears // to be the view the verifier sanctions. (A full definition for the // semantics of abs_jump is sorely needed.) AvmAssert(!exceptions_consumed); AvmAssert(backpatches == NULL); AvmAssert(labels == NULL); computeExceptionFixups(); }
Token Lexer::xmlName() { AvmAssert( isXmlNameStart(*idx) ); mark = idx; while (isXmlNameSubsequent(*idx)) idx++; val.s = compiler->intern(mark, uint32_t(idx-mark)); return T_XmlName; }
int ScriptObject::nextNameIndex(int index) { AvmAssert(index >= 0); if (!traits()->needsHashtable()) return 0; return getTable()->next(index); }
Atom FASTCALL DictionaryObject::getKeyFromObject(Atom key) const { AvmAssert(AvmCore::isObject(key)); ScriptObject* obj = AvmCore::atomToScriptObject(key); AvmAssert(Traits::getBuiltinType(obj->traits()) != BUILTIN_qName); AvmAssert(MMgc::GC::Size(obj) >= sizeof(ScriptObject)); (void)obj; // This commented-out code probably pertains to Bugzilla 507699: // "Dictionary key of Xml type are not matching." // // FIXME: this doesn't work, need to convert back to an XMLObject // on the way out or intern XMLObject's somehow //if(AvmCore::isXML(key)) // key = AvmCore::genericObjectToAtom(AvmCore::atomToXML(key)); return key; }
int VMPI_system(const char *command) { #ifdef UNDER_CE AvmAssert(0); return 0; #else return system( command ); #endif }
template<class T> T SeqBuilder<T>::dequeue() { AvmAssert(items != NULL); T v = items->hd; items = items->tl; if (items == NULL) last = NULL; return v; }
ClassClosure::ClassClosure(VTable* cvtable) : ScriptObject(cvtable, NULL) , m_createInstanceProc(checkForRestrictedInheritance(cvtable->ivtable, cvtable->ivtable->createInstanceProc)) // NB: prototype is null right now, but we expect our subclass to // initialize it in their ctor (or, at a minimum, before it attempts // to create any instances). { AvmAssert(traits()->getSizeOfInstance() >= sizeof(ClassClosure)); // All callers of this ctor must have a non-null ivtable. AvmAssert(cvtable->ivtable != NULL); cvtable->ivtable->createInstanceProc = ClassClosure::reinitNullPrototypeCreateInstanceProc; AvmAssert(m_createInstanceProc != reinitNullPrototypeCreateInstanceProc); // don't assert here any more: MethodClosure descends //AvmAssert(cvtable->traits->itraits != NULL); //AvmAssert(ivtable() != NULL); }
void ConsistencyChecker::checkPhiOperandsJumpToCurrentBlock() { List<BasicBlock*, LIST_GCObjects>* basicBlocks = _functionToCheck->getBasicBlocks(); TessaAssert(basicBlocks != NULL); for (size_t i = 0; i < basicBlocks->size(); i++) { BasicBlock* currentBasicBlock = basicBlocks->get(i); if (currentBasicBlock->getPredecessors()->size() > 1) { List<PhiInstruction*, LIST_GCObjects>* phiInstructions = currentBasicBlock->getPhiInstructions(); for (size_t j = 0; j < phiInstructions->size(); j++) { PhiInstruction* phiInstruction = phiInstructions->get(j); AvmAssert(allPhiOperandsArePredecessors(phiInstruction)); AvmAssert(phiOperandsExistsInPredecessorBlock(phiInstruction)); } } } }
ArrayObject::ArrayObject(VTable *vtable, ScriptObject* proto, uint32 capacity) : ScriptObject(vtable, proto, 0), m_denseArr(capacity) { SAMPLE_FRAME("Array", core()); AvmAssert(traits()->getSizeOfInstance() >= sizeof(ArrayObject)); m_length = 0; m_lowHTentry = NO_LOW_HTENTRY; }
void CallStackNode::init(AvmCore* core, uint64_t functionId, int32_t lineno) { AvmAssert(core != NULL); AvmAssert(functionId != 0); m_functionId = functionId; m_info = NULL; m_env = NULL; m_fakename = NULL; m_core = core; m_next = core->callStack; core->callStack = this; m_depth = m_next ? (m_next->m_depth + 1) : 1; m_eip = NULL; m_filename = NULL; m_framep = NULL; m_traits = NULL; m_linenum = lineno; }
int DictionaryObject::nextNameIndex(int index) { AvmAssert(index >= 0); // hht could be null if you break in debugger in a subclasses constructor before super // has been called -- let's do it in all builds, it's better than crashing. HeapHashtable* hht = getHeapHashtable(); return hht ? hht->next(index) : 0; }
void DomainMgr::addNamedScriptEnvs(AbcEnv* abcEnv, const GCList<ScriptEnv>& envs) { // If the MethodInfo for this ScriptEnv isn't in the Domain's or Pool's // map, then we must have filtered it out as unreachable: don't // bother adding the ScriptEnv, as we'll never need to look it up. // (Note that we don't need to bother checking the parent Domains // for this, since we want to check loaded, not cached.) We can't rely // on looking up by name, since scripts all tend to be named "global", // so instead we make a temporary map of all the entries in the relevant // Pool and Domain. PoolObject* pool = abcEnv->pool(); DomainEnv* domainEnv = abcEnv->domainEnv(); Domain* domain = domainEnv->domain(); // we have no generic "set" type, so let's use a hashtable for the same purpose // (a bit more mem, but short-lived and better average lookup time than using List<>) HeapHashtable* ht = HeapHashtable::create(core->GetGC()); for (StMNHTMethodInfoIterator iter(pool->m_loadedScripts); iter.next(); ) { if (!iter.key()) continue; Atom const a = AvmCore::genericObjectToAtom(iter.value()); ht->add(a, a); } for (StMNHTMethodInfoIterator iter(domain->m_loadedScripts); iter.next(); ) { if (!iter.key()) continue; Atom const a = AvmCore::genericObjectToAtom(iter.value()); ht->add(a, a); } for (uint32_t i = 0, n = envs.length(); i < n; ++i) { ScriptEnv* se = envs[i]; AvmAssert(se->abcEnv() == abcEnv); MethodInfo* mi = se->method; AvmAssert(domainEnv->m_scriptEnvMap->get(mi) == NULL); if (ht->get(AvmCore::genericObjectToAtom(mi)) == undefinedAtom) continue; domainEnv->m_scriptEnvMap->add(mi, se); } delete ht; }
Traits* PoolObject::resolveTypeName(uint32 index, const Toplevel* toplevel, bool allowVoid/*=false*/) const { // only save the type name for now. verifier will resolve to traits if (index == 0) { return NULL; } // check contents is a multiname. in the cpool, and type system, kObjectType means multiname. if (index >= constantMnCount) { if (toplevel) toplevel->throwVerifyError(kCpoolIndexRangeError, core->toErrorString(index), core->toErrorString(constantMnCount)); AvmAssert(!"unhandled verify error"); } Multiname m; parseMultiname(cpool_mn[index], m); Traits* t = getTraits(m, toplevel); if(m.isParameterizedType()) { Traits* param_traits = resolveTypeName(m.getTypeParameter(), toplevel); t = resolveParameterizedType(toplevel, t, param_traits); } if (!t) { #ifdef AVMPLUS_VERBOSE if (!toplevel || !toplevel->verifyErrorClass()) core->console << "class not found: " << m << " index=" << index << "\n"; #endif if (toplevel) toplevel->throwVerifyError(kClassNotFoundError, core->toErrorString(&m)); AvmAssert(!"unhandled verify error"); } if (!allowVoid && t == VOID_TYPE) { if (toplevel) toplevel->throwVerifyError(kIllegalVoidError); AvmAssert(!"unhandled verify error"); } return t; }
Atom DictionaryObject::nextValue(int index) { AvmAssert(index > 0); HeapHashtable* hht = getHeapHashtable(); Atom m = hht->keyAt(index); if (AvmCore::isNullOrUndefined(m)) return nullStringAtom; return hht->valueAt(index); }
void NativeInitializer::fillInMethods(const NativeMethodInfo* _methodEntry) { while (_methodEntry->method_id != -1) { // if we overwrite a native method mapping, something is hosed AvmAssert(methods[_methodEntry->method_id] == NULL); methods[_methodEntry->method_id] = _methodEntry; _methodEntry++; } }
void MethodInfo::setInterpImpl() { MethodSignaturep ms = getMethodSignature(); if (ms->returnTraitsBT() == BUILTIN_number) _implFPR = avmplus::interpFPR; else _implGPR = avmplus::interpGPR; AvmAssert(isInterpreted()); _invoker = hasTypedArgs(ms) ? MethodEnv::coerceEnter_interp : MethodEnv::coerceEnter_interp_nocoerce; }
MethodInfo* DomainMgr::findScriptInDomainByNameOnlyImpl(Domain* domain, Stringp name, Namespace*& nsFound) { MethodInfo* mi = NULL; // First, look bottom-up to find the first cached instance. for (uint32_t i = 0, n = domain->m_baseCount; i < n; ++i) { Domain* d = domain->m_bases[i]; if ((mi = (MethodInfo*)d->m_cachedScripts->getName(name, &nsFound)) != NULL) { // if (cacheIfFound) -- always true here { if (i > 0) { AvmAssert(d != domain); domain->m_cachedScripts->add(name, nsFound, mi); } } return mi; } } // No instance ever cached, so look top-down to find the first loaded instance. for (uint32_t i = domain->m_baseCount; i > 0; --i) { Domain* d = domain->m_bases[i-1]; if ((mi = (MethodInfo*)d->m_loadedScripts->getName(name, &nsFound)) != NULL) { // if (cacheIfFound) -- always true here { if (i > 1) { AvmAssert(d != domain); d->m_cachedScripts->add(name, nsFound, mi); } domain->m_cachedScripts->add(name, nsFound, mi); } return mi; } } return NULL; }
void NativeInitializer::fillInClasses(const NativeClassInfo* _classEntry) { while (_classEntry->class_id != -1) { // if we overwrite a native class mapping, something is hosed AvmAssert(classes[_classEntry->class_id] == NULL); classes[_classEntry->class_id] = _classEntry; _classEntry++; } }
// Verify that currentBugCompatibility() is as OSR expected when method is called. void OSR::checkBugCompatibility(MethodEnv* env) { // A method can be OSR'd if it is not a builtin. We ignore builtins here. if (!env->method->pool()->isBuiltin) { const BugCompatibility* abcBugCompatibility = env->abcEnv()->codeContext()->bugCompatibility(); const BugCompatibility* dynamicBugCompatibility = env->core()->currentBugCompatibility(); // Verify that currentBugCompatibility() agrees with what the JIT would have assumed. AvmAssert(dynamicBugCompatibility == abcBugCompatibility); } }
MethodClosureClass::MethodClosureClass(VTable* cvtable) : ClassClosure(cvtable) { Toplevel* toplevel = this->toplevel(); toplevel->methodClosureClass = this; AvmAssert(traits()->getSizeOfInstance() == sizeof(MethodClosureClass)); prototype = toplevel->functionClass->createEmptyFunction(); }
// These take no arguments void WordcodeEmitter::emitOp0(const uint8_t *pc, WordOpcode opcode) { #ifdef _DEBUG AvmAssert(wopAttrs[opcode].width == 1); #endif // _DEBUG (void)pc; CHECK(1); *dest++ = NEW_OPCODE(opcode); #ifdef AVMPLUS_PEEPHOLE_OPTIMIZER peep(opcode, dest-1); #endif }
void NativeMethod::verify(Toplevel *toplevel) { AvmAssert(declaringTraits->isResolved()); resolveSignature(toplevel); union { Atom (*impl32)(MethodEnv*, int, uint32 *); AvmThunkNativeThunker thunker; } u; u.thunker = this->thunker; this->impl32 = u.impl32; }