nmethod* SICompiler::compile() { EventMarker em("SIC-compiling %#lx %#lx", L->selector(), NULL); ShowCompileInMonitor sc(L->selector(), "SIC", recompilee != NULL); // cannot recompile uncommon branches in DI nmethods & top nmethod yet FlagSetting fs2(SICDeferUncommonBranches, SICDeferUncommonBranches && diLink == NULL && L->adeps->length() == 0 && L->selector() != VMString[DO_IT]); // don't use uncommon traps when recompiling because of trap useUncommonTraps = SICDeferUncommonBranches && !currentProcess->isUncommon(); // don't inline into doIt FlagSetting fs3(Inline, Inline && L->selector() != VMString[DO_IT]); # if TARGET_ARCH != I386_ARCH // no FastMapTest possible on I386 // don't use fast map loads if this nmethod trapped a lot FlagSetting fs4(FastMapTest, FastMapTest && (recompilee == NULL || recompilee->flags.trapCount < MapLoadTrapLimit)); # endif FlagSetting fs5(PrintCompilation, PrintCompilation || PrintSICCompilation); timer t; FlagSetting fs6(verifyOften, SICDebug || CheckAssertions); if(PrintCompilation || PrintLongCompilation || PrintCompilationStatistics || VMSICLongProfiling) { t.start(); } if (PrintCompilation || PrintSICCode) { lprintf("*SIC-%s%scompiling %s%s: (SICCompilationCount=%d)", currentProcess->isUncommon() ? "uncommon-" : "", recompilee ? "re" : "", sprintName( (methodMap*) method()->map(), L->selector()), sprintValueMethod( L->receiver ), (void*)SICCompilationCount); } topScope->genCode(); buildBBs(); if (verifyOften) bbIterator->verify(false); bbIterator->eliminateUnreachableNodes(); // needed for removeUptoMerge to work // compute exposed blocks and up-level accessed vars bbIterator->computeExposedBlocks(); bbIterator->computeUplevelAccesses(); // make defs & uses and insert flush nodes for uplevel-accessed vars bbIterator->makeUses(); // added verify here cause want to catch unreachable merge preds // before elimination -- dmu if (verifyOften) bbIterator->verify(); if (SICLocalCopyPropagate) { bbIterator->localCopyPropagate(); if (verifyOften) bbIterator->verify(); } if (SICGlobalCopyPropagate) { bbIterator->globalCopyPropagate(); if (verifyOften) bbIterator->verify(); } if (SICEliminateUnneededNodes) { bbIterator->eliminateUnneededResults(); if (verifyOften) bbIterator->verify(); } // do after CP to explot common type test source regs if (SICOptimizeTypeTests) { bbIterator->computeDominators(); bbIterator->optimizeTypeTests(); if (verifyOften) bbIterator->verify(); } // allocate the temp (i.e. volatile) registers bbIterator->allocateTempRegisters(); // allocate the callee-saved (i.e. non-volatile) registers SICAllocator* a = theAllocator; a->allocate(bbIterator->globals, topScope->incoming); stackLocCount = a->stackTemps; // make sure frame size is aligned properly int32 frame_size_so_far = frameSize(); stackLocCount += roundTo(frame_size_so_far, frame_word_alignment) - frame_size_so_far; // compute the register masks for inline caches bbIterator->computeMasks(stackLocCount, nonRegisterArgCount()); topScope->computeMasks(regStringToMask(topScope->incoming), stackLocCount, nonRegisterArgCount()); if (PrintSICCode) { print_code(false); lprintf("\n\n"); } topScope->describe(); // must come before gen to set scopeInfo genHelper = new SICGenHelper; bbIterator->gen(); assert(theAssembler->verifyLabels(), "undefined labels"); rec->generate(); topScope->fixupBlocks(); // must be after rec->gen to know offsets if (vscopes) computeMarkers(); // ditto nmethod* nm = new_nmethod(this, false); if (theAssembler->lastBackpatch >= theAssembler->instsEnd) fatal("dangling branch"); em.event.args[1] = nm; fint ms = IntervalTimer::dont_use_any_timer ? 0 : t.millisecs(); if (PrintCompilation || PrintLongCompilation) { if (!PrintCompilation && PrintLongCompilation && ms >= MaxCompilePause) { lprintf("*SIC-%s%scompiling ", currentProcess->isUncommon() ? "uncommon-" : "", recompilee ? "re" : ""); methodMap* mm = method() ? (methodMap*) method()->map() : NULL; printName(mm, L->selector()); lprintf(": %#lx (%ld ms; level %ld)\n", nm, (void*)ms, (void*)nm->level()); } else if (PrintCompilation) { lprintf(": %#lx (%ld ms; level %ld v%d)\n", (void*)nm, (void*)ms, (void*)nm->level(), (void*)nm->version()); } } if (SICDebug && estimatedSize() > inlineLimit[NmInstrLimit]) { float rat = (float)estimatedSize() / (float)nm->instsLen(); lprintf("*est. size = %ld, true size = %ld, ratio = %4.2f\n", (void*)estimatedSize(), (void*)nm->instsLen(), *(void**)&rat); } if (PrintCompilationStatistics) { static fint counter = 0; lprintf("\n*SIC-time= |%ld| ms; to/co/sc/lo/de= |%ld|%ld|%ld|%ld|%ld| %ld|%ld|%ld| %ld |", (void*)ms, (void*) (nm->instsLen() + nm->scopes->length() + nm->locsLen() + nm->depsLen), (void*)nm->instsLen(), (void*)nm->scopes->length(), (void*)nm->locsLen(), (void*)nm->depsLen, (void*)BasicNode::currentID, (void*)bbIterator->bbCount, (void*)ncodes, (void*)counter++); } # if GENERATE_DEBUGGING_AIDS if (CheckAssertions) { // nm->verify(); } # endif return nm; }
nmethod* Compiler::compile() { NewBackendGuard guard; if ((PrintProgress > 0) && (nofCompilations % PrintProgress == 0)) std->print("."); const char* compiling; if (DeltaProcess::active()->isUncommon()) { compiling = recompilee ? "Uncommon-Recompiling " : "Uncommon-Compiling "; } else { if (_uses_inlining_database) { compiling = recompilee ? "Recompiling (database)" : "Compiling (database)"; } else { compiling = recompilee ? "Recompiling " : "Compiling "; } } EventMarker em("%s%#lx %#lx", compiling, key->selector(), NULL); // don't use uncommon traps when recompiling because of trap useUncommonTraps = DeferUncommonBranches && !is_uncommon_compile(); if (is_uncommon_compile()) reporter->report_uncommon(false); if (recompilee && recompilee->isUncommonRecompiled()) reporter->report_uncommon(true); // don't use counters when compiling from DB FlagSetting fs(UseRecompilation, UseRecompilation && !is_database_compile()); bool should_trace = _uses_inlining_database ? PrintInliningDatabaseCompilation : PrintCompilation; TraceTime t(compiling, should_trace); if (should_trace || PrintCode) { print_key(std); if (PrintCode || PrintInlining) std->print("\n"); } topScope->genCode(); fixupNLRTestPoints(); buildBBs(); if (PrintCode) print_code(false); if (verifyOften) bbIterator->verify(); // compute escaping blocks and up-level accessed vars bbIterator->computeEscapingBlocks(); bbIterator->computeUplevelAccesses(); if (verifyOften) bbIterator->verify(); //if (PrintCode) print_code(false); // construct def & use information bbIterator->makeUses(); if (verifyOften) bbIterator->verify(); //if (PrintCode) print_code(false); if (LocalCopyPropagate) { bbIterator->localCopyPropagate(); if (verifyOften) bbIterator->verify(); } //if (PrintCode) print_code(false); if (GlobalCopyPropagate) { bbIterator->globalCopyPropagate(); if (verifyOften) bbIterator->verify(); } //if (PrintCode) print_code(false); if (BruteForcePropagate) { bbIterator->bruteForceCopyPropagate(); if (verifyOften) bbIterator->verify(); } //if (PrintCode) print_code(false); if (EliminateUnneededNodes) { bbIterator->eliminateUnneededResults(); if (verifyOften) bbIterator->verify(); } //if (PrintCode) print_code(false); if (OptimizeIntegerLoops) { // run after copy propagation so that loop increment is easier to recognize // also run after eliminateUnneededResults so that cpInfo is set for eliminated PRegs topScope->optimizeLoops(); if (verifyOften) bbIterator->verify(); } //if (PrintCode) print_code(false); // compute existence & format of run-time context objects and blocks computeBlockInfo(); // allocate floats _totalNofFloatTemporaries = topScope->allocateFloatTemporaries(0); // HACK: Fix preallocation // Necessary because a few primitives (allocateContext/Closure) need self or // the previous context after calling a primitive; i.e., self or the previous // context should not be allocated to a register. Currently not working correctly // -> allocated to stack as a temporary fix for the problem. theAllocator->preAllocate(topScope->self()->preg()); bbIterator->localAlloc(); // allocate regs within basic blocks theAllocator->allocate(bbIterator->globals); if (PrintCode) print_code(false); #ifdef ASSERT bbIterator->verify(); #endif if (PrintDebugInfoGeneration) { std->cr(); std->cr(); std->print_cr("Start of debugging info."); } topScope->generateDebugInfo(); // must come before gen to set scopeInfo topScope->generateDebugInfoForNonInlinedBlocks(); // generate machine code theMacroAssm = new MacroAssembler(_code); if (UseNewBackend) { PRegMapping* mapping = new PRegMapping(theMacroAssm, topScope->nofArguments(), 6, topScope->nofTemporaries()); CodeGenerator* cgen = new CodeGenerator(theMacroAssm, mapping); cgen->initialize(topScope); bbIterator->apply(cgen); cgen->finalize(topScope); } else { // use a node visitor to generate code OldCodeGenerator* cgen = new OldCodeGenerator(); bbIterator->apply(cgen); } theMacroAssm->finalize(); theMacroAssm = NULL; #ifndef ASSERT if (verifyOften) { #endif bool ok = bbIterator->verifyLabels(); if (!ok) print_code(false); #ifndef ASSERT } #endif rec->generate(); // write debugging info nmethod* nm = new_nmethod(this); // construct new nmethod em.event.args[1] = nm; if (PrintAssemblyCode) Disassembler::decode(nm); reporter->finish_reporting(); if (should_trace) { lprintf(": %#lx (%d bytes; level %ld v%d)\n", nm, nm->instsLen(), nm->level(), nm->version()); flush_logFile(); } if (verifyOften) nm->verify(); if (PrintDebugInfo) nm->print_inlining(std, true); return nm; }