//------------------------------------------------------------------------ // MorphAllocObjNodes: Morph each GT_ALLOCOBJ node either into an // allocation helper call or stack allocation. // // Notes: // Runs only over the blocks having bbFlags BBF_HAS_NEWOBJ set. void ObjectAllocator::MorphAllocObjNodes() { BasicBlock* block; foreach_block(comp, block) { const bool basicBlockHasNewObj = (block->bbFlags & BBF_HAS_NEWOBJ) == BBF_HAS_NEWOBJ; #ifndef DEBUG if (!basicBlockHasNewObj) { continue; } #endif // DEBUG for (GenTreeStmt* stmt = block->firstStmt(); stmt; stmt = stmt->gtNextStmt) { GenTree* stmtExpr = stmt->gtStmtExpr; GenTree* op2 = nullptr; bool canonicalAllocObjFound = false; if (stmtExpr->OperGet() == GT_ASG && stmtExpr->TypeGet() == TYP_REF) { op2 = stmtExpr->gtGetOp2(); if (op2->OperGet() == GT_ALLOCOBJ) { canonicalAllocObjFound = true; } } if (canonicalAllocObjFound) { assert(basicBlockHasNewObj); //------------------------------------------------------------------------ // We expect the following expression tree at this point // * GT_STMT void (top level) // | /--* GT_ALLOCOBJ ref // \--* GT_ASG ref // \--* GT_LCL_VAR ref //------------------------------------------------------------------------ GenTree* op1 = stmtExpr->gtGetOp1(); assert(op1->OperGet() == GT_LCL_VAR); assert(op1->TypeGet() == TYP_REF); assert(op2 != nullptr); assert(op2->OperGet() == GT_ALLOCOBJ); GenTreeAllocObj* asAllocObj = op2->AsAllocObj(); unsigned int lclNum = op1->AsLclVar()->GetLclNum(); if (IsObjectStackAllocationEnabled() && CanAllocateLclVarOnStack(lclNum)) { op2 = MorphAllocObjNodeIntoStackAlloc(asAllocObj, block, stmt); } else { op2 = MorphAllocObjNodeIntoHelperCall(asAllocObj); } // Propagate flags of op2 to its parent. stmtExpr->gtOp.gtOp2 = op2; stmtExpr->gtFlags |= op2->gtFlags & GTF_ALL_EFFECT; } #ifdef DEBUG else { // We assume that GT_ALLOCOBJ nodes are always present in the // canonical form. comp->fgWalkTreePre(&stmt->gtStmtExpr, AssertWhenAllocObjFoundVisitor); } #endif // DEBUG } } }
void Rationalizer::DoPhase() { DBEXEC(TRUE, SanityCheck()); comp->compCurBB = nullptr; comp->fgOrder = Compiler::FGOrderLinear; BasicBlock* firstBlock = comp->fgFirstBB; for (BasicBlock* block = comp->fgFirstBB; block != nullptr; block = block->bbNext) { comp->compCurBB = block; m_block = block; // Establish the first and last nodes for the block. This is necessary in order for the LIR // utilities that hang off the BasicBlock type to work correctly. GenTreeStmt* firstStatement = block->firstStmt(); if (firstStatement == nullptr) { // No statements in this block; skip it. block->MakeLIR(nullptr, nullptr); continue; } GenTreeStmt* lastStatement = block->lastStmt(); // Rewrite intrinsics that are not supported by the target back into user calls. // This needs to be done before the transition to LIR because it relies on the use // of fgMorphArgs, which is designed to operate on HIR. Once this is done for a // particular statement, link that statement's nodes into the current basic block. // // This walk also clears the GTF_VAR_USEDEF bit on locals, which is not necessary // in the backend. GenTree* lastNodeInPreviousStatement = nullptr; for (GenTreeStmt* statement = firstStatement; statement != nullptr; statement = statement->getNextStmt()) { assert(statement->gtStmtList != nullptr); assert(statement->gtStmtList->gtPrev == nullptr); assert(statement->gtStmtExpr != nullptr); assert(statement->gtStmtExpr->gtNext == nullptr); SplitData splitData; splitData.root = statement; splitData.block = block; splitData.thisPhase = this; comp->fgWalkTreePost(&statement->gtStmtExpr, [](GenTree** use, Compiler::fgWalkData* walkData) -> Compiler::fgWalkResult { GenTree* node = *use; if (node->OperGet() == GT_INTRINSIC && Compiler::IsIntrinsicImplementedByUserCall(node->gtIntrinsic.gtIntrinsicId)) { RewriteIntrinsicAsUserCall(use, walkData); } else if (node->OperIsLocal()) { node->gtFlags &= ~GTF_VAR_USEDEF; } return Compiler::WALK_CONTINUE; }, &splitData, true); GenTree* firstNodeInStatement = statement->gtStmtList; if (lastNodeInPreviousStatement != nullptr) { lastNodeInPreviousStatement->gtNext = firstNodeInStatement; } firstNodeInStatement->gtPrev = lastNodeInPreviousStatement; lastNodeInPreviousStatement = statement->gtStmtExpr; } block->MakeLIR(firstStatement->gtStmtList, lastStatement->gtStmtExpr); // Rewrite HIR nodes into LIR nodes. for (GenTreeStmt *statement = firstStatement, *nextStatement; statement != nullptr; statement = nextStatement) { nextStatement = statement->getNextStmt(); // If this statement has correct offset information, change it into an IL offset // node and insert it into the LIR. if (statement->gtStmtILoffsx != BAD_IL_OFFSET) { assert(!statement->IsPhiDefnStmt()); statement->SetOper(GT_IL_OFFSET); statement->gtNext = nullptr; statement->gtPrev = nullptr; BlockRange().InsertBefore(statement->gtStmtList, statement); } m_statement = statement; comp->fgWalkTreePost(&statement->gtStmtExpr, [](GenTree** use, Compiler::fgWalkData* walkData) -> Compiler::fgWalkResult { return reinterpret_cast<Rationalizer*>(walkData->pCallbackData) ->RewriteNode(use, *walkData->parentStack); }, this, true); } assert(BlockRange().CheckLIR(comp)); } comp->compRationalIRForm = true; }
void Rationalizer::DoPhase() { class RationalizeVisitor final : public GenTreeVisitor<RationalizeVisitor> { Rationalizer& m_rationalizer; public: enum { ComputeStack = true, DoPreOrder = true, DoPostOrder = true, UseExecutionOrder = true, }; RationalizeVisitor(Rationalizer& rationalizer) : GenTreeVisitor<RationalizeVisitor>(rationalizer.comp), m_rationalizer(rationalizer) { } // Rewrite intrinsics that are not supported by the target back into user calls. // This needs to be done before the transition to LIR because it relies on the use // of fgMorphArgs, which is designed to operate on HIR. Once this is done for a // particular statement, link that statement's nodes into the current basic block. fgWalkResult PreOrderVisit(GenTree** use, GenTree* user) { GenTree* const node = *use; if (node->OperGet() == GT_INTRINSIC && Compiler::IsIntrinsicImplementedByUserCall(node->gtIntrinsic.gtIntrinsicId)) { m_rationalizer.RewriteIntrinsicAsUserCall(use, this->m_ancestors); } return Compiler::WALK_CONTINUE; } // Rewrite HIR nodes into LIR nodes. fgWalkResult PostOrderVisit(GenTree** use, GenTree* user) { return m_rationalizer.RewriteNode(use, this->m_ancestors); } }; DBEXEC(TRUE, SanityCheck()); comp->compCurBB = nullptr; comp->fgOrder = Compiler::FGOrderLinear; RationalizeVisitor visitor(*this); for (BasicBlock* block = comp->fgFirstBB; block != nullptr; block = block->bbNext) { comp->compCurBB = block; m_block = block; GenTreeStmt* firstStatement = block->firstStmt(); block->MakeLIR(nullptr, nullptr); // Establish the first and last nodes for the block. This is necessary in order for the LIR // utilities that hang off the BasicBlock type to work correctly. if (firstStatement == nullptr) { // No statements in this block; skip it. continue; } for (GenTreeStmt *statement = firstStatement, *nextStatement; statement != nullptr; statement = nextStatement) { assert(statement->gtStmtList != nullptr); assert(statement->gtStmtList->gtPrev == nullptr); assert(statement->gtStmtExpr != nullptr); assert(statement->gtStmtExpr->gtNext == nullptr); BlockRange().InsertAtEnd(LIR::Range(statement->gtStmtList, statement->gtStmtExpr)); nextStatement = statement->getNextStmt(); statement->gtNext = nullptr; statement->gtPrev = nullptr; // If this statement has correct offset information, change it into an IL offset // node and insert it into the LIR. if (statement->gtStmtILoffsx != BAD_IL_OFFSET) { assert(!statement->IsPhiDefnStmt()); statement->SetOper(GT_IL_OFFSET); BlockRange().InsertBefore(statement->gtStmtList, statement); } m_block = block; visitor.WalkTree(&statement->gtStmtExpr, nullptr); } assert(BlockRange().CheckLIR(comp, true)); } comp->compRationalIRForm = true; }