bool ModuleGenerator::convertOutOfRangeBranchesToThunks() { masm_.haltingAlign(CodeAlignment); // Create thunks for callsites that have gone out of range. Use a map to // create one thunk for each callee since there is often high reuse. OffsetMap alreadyThunked(cx_); if (!alreadyThunked.init()) return false; for (; lastPatchedCallsite_ < masm_.callSites().length(); lastPatchedCallsite_++) { const CallSiteAndTarget& cs = masm_.callSites()[lastPatchedCallsite_]; if (!cs.isInternal()) continue; uint32_t callerOffset = cs.returnAddressOffset(); MOZ_RELEASE_ASSERT(callerOffset < INT32_MAX); if (funcIsDefined(cs.targetIndex())) { uint32_t calleeOffset = funcEntry(cs.targetIndex()); MOZ_RELEASE_ASSERT(calleeOffset < INT32_MAX); if (uint32_t(abs(int32_t(calleeOffset) - int32_t(callerOffset))) < JumpRange()) { masm_.patchCall(callerOffset, calleeOffset); continue; } } OffsetMap::AddPtr p = alreadyThunked.lookupForAdd(cs.targetIndex()); if (!p) { Offsets offsets; offsets.begin = masm_.currentOffset(); uint32_t thunkOffset = masm_.thunkWithPatch().offset(); if (masm_.oom()) return false; offsets.end = masm_.currentOffset(); if (!module_->codeRanges.emplaceBack(CodeRange::CallThunk, offsets)) return false; if (!module_->callThunks.emplaceBack(thunkOffset, cs.targetIndex())) return false; if (!alreadyThunked.add(p, cs.targetIndex(), offsets.begin)) return false; } masm_.patchCall(callerOffset, p->value()); } // Create thunks for jumps to stubs. Stubs are always generated at the end // so unconditionally thunk all existing jump sites. for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit)) { if (masm_.jumpSites()[target].empty()) continue; for (uint32_t jumpSite : masm_.jumpSites()[target]) { RepatchLabel label; label.use(jumpSite); masm_.bind(&label); } Offsets offsets; offsets.begin = masm_.currentOffset(); uint32_t thunkOffset = masm_.thunkWithPatch().offset(); if (masm_.oom()) return false; offsets.end = masm_.currentOffset(); if (!module_->codeRanges.emplaceBack(CodeRange::Inline, offsets)) return false; if (!jumpThunks_[target].append(thunkOffset)) return false; } // Unlike callsites, which need to be persisted in the Module, we can simply // flush jump sites after each patching pass. masm_.clearJumpSites(); return true; }
bool ModuleGenerator::patchCallSites(TrapExitOffsetArray* maybeTrapExits) { masm_.haltingAlign(CodeAlignment); // Create far jumps for calls that have relative offsets that may otherwise // go out of range. Far jumps are created for two cases: direct calls // between function definitions and calls to trap exits by trap out-of-line // paths. Far jump code is shared when possible to reduce bloat. This method // is called both between function bodies (at a frequency determined by the // ISA's jump range) and once at the very end of a module's codegen after // all possible calls/traps have been emitted. OffsetMap existingCallFarJumps; if (!existingCallFarJumps.init()) return false; EnumeratedArray<Trap, Trap::Limit, Maybe<uint32_t>> existingTrapFarJumps; for (; lastPatchedCallsite_ < masm_.callSites().length(); lastPatchedCallsite_++) { const CallSiteAndTarget& cs = masm_.callSites()[lastPatchedCallsite_]; uint32_t callerOffset = cs.returnAddressOffset(); MOZ_RELEASE_ASSERT(callerOffset < INT32_MAX); switch (cs.kind()) { case CallSiteDesc::Dynamic: case CallSiteDesc::Symbolic: break; case CallSiteDesc::Func: { if (funcIsCompiled(cs.funcIndex())) { uint32_t calleeOffset = funcCodeRange(cs.funcIndex()).funcNonProfilingEntry(); MOZ_RELEASE_ASSERT(calleeOffset < INT32_MAX); if (uint32_t(abs(int32_t(calleeOffset) - int32_t(callerOffset))) < JumpRange()) { masm_.patchCall(callerOffset, calleeOffset); break; } } OffsetMap::AddPtr p = existingCallFarJumps.lookupForAdd(cs.funcIndex()); if (!p) { Offsets offsets; offsets.begin = masm_.currentOffset(); uint32_t jumpOffset = masm_.farJumpWithPatch().offset(); offsets.end = masm_.currentOffset(); if (masm_.oom()) return false; if (!metadata_->codeRanges.emplaceBack(CodeRange::FarJumpIsland, offsets)) return false; if (!existingCallFarJumps.add(p, cs.funcIndex(), offsets.begin)) return false; // Record calls' far jumps in metadata since they must be // repatched at runtime when profiling mode is toggled. if (!metadata_->callThunks.emplaceBack(jumpOffset, cs.funcIndex())) return false; } masm_.patchCall(callerOffset, p->value()); break; } case CallSiteDesc::TrapExit: { if (maybeTrapExits) { uint32_t calleeOffset = (*maybeTrapExits)[cs.trap()].begin; MOZ_RELEASE_ASSERT(calleeOffset < INT32_MAX); if (uint32_t(abs(int32_t(calleeOffset) - int32_t(callerOffset))) < JumpRange()) { masm_.patchCall(callerOffset, calleeOffset); break; } } if (!existingTrapFarJumps[cs.trap()]) { Offsets offsets; offsets.begin = masm_.currentOffset(); masm_.append(TrapFarJump(cs.trap(), masm_.farJumpWithPatch())); offsets.end = masm_.currentOffset(); if (masm_.oom()) return false; if (!metadata_->codeRanges.emplaceBack(CodeRange::FarJumpIsland, offsets)) return false; existingTrapFarJumps[cs.trap()] = Some(offsets.begin); } masm_.patchCall(callerOffset, *existingTrapFarJumps[cs.trap()]); break; } } } return true; }