CloogInput *Cloog::buildCloogInput() { CloogDomain *Context = cloog_domain_from_isl_set(isl_set_copy(S->getContext())); CloogUnionDomain *Statements = buildCloogUnionDomain(); CloogInput *Input = cloog_input_alloc (Context, Statements); return Input; }
CloogInput *Cloog::buildCloogInput() { // XXX: We do not copy the context of the scop, but use an unconstrained // context. This 'hack' is necessary as the context may contain bounds // on parameters such as [n] -> {:0 <= n < 2^32}. Those large // integers will cause CLooG to construct a clast that contains // expressions that include these large integers. Such expressions can // possibly not be evaluated correctly with i64 types. The cloog // based code generation backend, however, can not derive types // automatically and just assumes i64 types. Hence, it will break or // generate incorrect code. // This hack does not remove all possibilities of incorrectly generated // code, but it is ensures that for most problems the problems do not // show up. The correct solution, will be to automatically derive the // minimal types for each expression. This could be added to CLooG and it // will be available in the isl based code generation. isl_set *EmptyContext = isl_set_universe(S->getParamSpace()); CloogDomain *Context = cloog_domain_from_isl_set(EmptyContext); CloogUnionDomain *Statements = buildCloogUnionDomain(); isl_set *ScopContext = S->getContext(); for (unsigned i = 0; i < isl_set_dim(ScopContext, isl_dim_param); i++) { isl_id *id = isl_set_get_dim_id(ScopContext, isl_dim_param, i); Statements = cloog_union_domain_set_name(Statements, CLOOG_PARAM, i, isl_id_get_name(id)); isl_id_free(id); } isl_set_free(ScopContext); CloogInput *Input = cloog_input_alloc(Context, Statements); return Input; }
bool JSONImporter::runOnScop(Scop &S) { const Dependences &D = getAnalysis<DependenceInfo>().getDependences(Dependences::AL_Statement); const DataLayout &DL = S.getFunction().getParent()->getDataLayout(); std::string FileName = ImportDir + "/" + getFileName(S); std::string FunctionName = S.getFunction().getName(); errs() << "Reading JScop '" << S.getNameStr() << "' in function '" << FunctionName << "' from '" << FileName << "'.\n"; ErrorOr<std::unique_ptr<MemoryBuffer>> result = MemoryBuffer::getFile(FileName); std::error_code ec = result.getError(); if (ec) { errs() << "File could not be read: " << ec.message() << "\n"; return false; } Json::Reader reader; Json::Value jscop; bool parsingSuccessful = reader.parse(result.get()->getBufferStart(), jscop); if (!parsingSuccessful) { errs() << "JSCoP file could not be parsed\n"; return false; } isl_set *OldContext = S.getContext(); isl_set *NewContext = isl_set_read_from_str(S.getIslCtx(), jscop["context"].asCString()); for (unsigned i = 0; i < isl_set_dim(OldContext, isl_dim_param); i++) { isl_id *id = isl_set_get_dim_id(OldContext, isl_dim_param, i); NewContext = isl_set_set_dim_id(NewContext, isl_dim_param, i, id); } isl_set_free(OldContext); S.setContext(NewContext); StatementToIslMapTy NewSchedule; int index = 0; for (ScopStmt &Stmt : S) { Json::Value schedule = jscop["statements"][index]["schedule"]; isl_map *m = isl_map_read_from_str(S.getIslCtx(), schedule.asCString()); isl_space *Space = Stmt.getDomainSpace(); // Copy the old tuple id. This is necessary to retain the user pointer, // that stores the reference to the ScopStmt this schedule belongs to. m = isl_map_set_tuple_id(m, isl_dim_in, isl_space_get_tuple_id(Space, isl_dim_set)); for (unsigned i = 0; i < isl_space_dim(Space, isl_dim_param); i++) { isl_id *id = isl_space_get_dim_id(Space, isl_dim_param, i); m = isl_map_set_dim_id(m, isl_dim_param, i, id); } isl_space_free(Space); NewSchedule[&Stmt] = m; index++; } if (!D.isValidSchedule(S, &NewSchedule)) { errs() << "JScop file contains a schedule that changes the " << "dependences. Use -disable-polly-legality to continue anyways\n"; for (StatementToIslMapTy::iterator SI = NewSchedule.begin(), SE = NewSchedule.end(); SI != SE; ++SI) isl_map_free(SI->second); return false; } auto ScheduleMap = isl_union_map_empty(S.getParamSpace()); for (ScopStmt &Stmt : S) { if (NewSchedule.find(&Stmt) != NewSchedule.end()) ScheduleMap = isl_union_map_add_map(ScheduleMap, NewSchedule[&Stmt]); else ScheduleMap = isl_union_map_add_map(ScheduleMap, Stmt.getSchedule()); } S.setSchedule(ScheduleMap); int statementIdx = 0; for (ScopStmt &Stmt : S) { int memoryAccessIdx = 0; for (MemoryAccess *MA : Stmt) { Json::Value accesses = jscop["statements"][statementIdx]["accesses"] [memoryAccessIdx]["relation"]; isl_map *newAccessMap = isl_map_read_from_str(S.getIslCtx(), accesses.asCString()); isl_map *currentAccessMap = MA->getAccessRelation(); if (isl_map_dim(newAccessMap, isl_dim_param) != isl_map_dim(currentAccessMap, isl_dim_param)) { errs() << "JScop file changes the number of parameter dimensions\n"; isl_map_free(currentAccessMap); isl_map_free(newAccessMap); return false; } isl_id *OutId = isl_map_get_tuple_id(currentAccessMap, isl_dim_out); newAccessMap = isl_map_set_tuple_id(newAccessMap, isl_dim_out, OutId); if (MA->isArrayKind()) { // We keep the old alignment, thus we cannot allow accesses to memory // locations that were not accessed before if the alignment of the // access is not the default alignment. bool SpecialAlignment = true; if (LoadInst *LoadI = dyn_cast<LoadInst>(MA->getAccessInstruction())) { SpecialAlignment = DL.getABITypeAlignment(LoadI->getType()) != LoadI->getAlignment(); } else if (StoreInst *StoreI = dyn_cast<StoreInst>(MA->getAccessInstruction())) { SpecialAlignment = DL.getABITypeAlignment(StoreI->getValueOperand()->getType()) != StoreI->getAlignment(); } if (SpecialAlignment) { isl_set *newAccessSet = isl_map_range(isl_map_copy(newAccessMap)); isl_set *currentAccessSet = isl_map_range(isl_map_copy(currentAccessMap)); bool isSubset = isl_set_is_subset(newAccessSet, currentAccessSet); isl_set_free(newAccessSet); isl_set_free(currentAccessSet); if (!isSubset) { errs() << "JScop file changes the accessed memory\n"; isl_map_free(currentAccessMap); isl_map_free(newAccessMap); return false; } } } // We need to copy the isl_ids for the parameter dimensions to the new // map. Without doing this the current map would have different // ids then the new one, even though both are named identically. for (unsigned i = 0; i < isl_map_dim(currentAccessMap, isl_dim_param); i++) { isl_id *id = isl_map_get_dim_id(currentAccessMap, isl_dim_param, i); newAccessMap = isl_map_set_dim_id(newAccessMap, isl_dim_param, i, id); } // Copy the old tuple id. This is necessary to retain the user pointer, // that stores the reference to the ScopStmt this access belongs to. isl_id *Id = isl_map_get_tuple_id(currentAccessMap, isl_dim_in); newAccessMap = isl_map_set_tuple_id(newAccessMap, isl_dim_in, Id); if (!isl_map_has_equal_space(currentAccessMap, newAccessMap)) { errs() << "JScop file contains access function with incompatible " << "dimensions\n"; isl_map_free(currentAccessMap); isl_map_free(newAccessMap); return false; } auto NewAccessDomain = isl_map_domain(isl_map_copy(newAccessMap)); auto CurrentAccessDomain = isl_map_domain(isl_map_copy(currentAccessMap)); NewAccessDomain = isl_set_intersect_params(NewAccessDomain, S.getContext()); CurrentAccessDomain = isl_set_intersect_params(CurrentAccessDomain, S.getContext()); if (isl_set_is_subset(CurrentAccessDomain, NewAccessDomain) == isl_bool_false) { errs() << "Mapping not defined for all iteration domain elements\n"; isl_set_free(CurrentAccessDomain); isl_set_free(NewAccessDomain); isl_map_free(currentAccessMap); isl_map_free(newAccessMap); return false; } isl_set_free(CurrentAccessDomain); isl_set_free(NewAccessDomain); if (!isl_map_is_equal(newAccessMap, currentAccessMap)) { // Statistics. ++NewAccessMapFound; newAccessStrings.push_back(accesses.asCString()); MA->setNewAccessRelation(newAccessMap); } else { isl_map_free(newAccessMap); } isl_map_free(currentAccessMap); memoryAccessIdx++; } statementIdx++; } return false; }
/// @brief Generate LLVM-IR for the SCoP @p S. bool runOnScop(Scop &S) override { AI = &getAnalysis<IslAstInfo>(); // Check if we created an isl_ast root node, otherwise exit. isl_ast_node *AstRoot = AI->getAst(); if (!AstRoot) return false; LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo(); DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree(); SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE(); DL = &S.getRegion().getEntry()->getParent()->getParent()->getDataLayout(); RI = &getAnalysis<RegionInfoPass>().getRegionInfo(); Region *R = &S.getRegion(); assert(!R->isTopLevelRegion() && "Top level regions are not supported"); ScopAnnotator Annotator; Annotator.buildAliasScopes(S); simplifyRegion(R, DT, LI, RI); assert(R->isSimple()); BasicBlock *EnteringBB = S.getRegion().getEnteringBlock(); assert(EnteringBB); PollyIRBuilder Builder = createPollyIRBuilder(EnteringBB, Annotator); IslNodeBuilder NodeBuilder(Builder, Annotator, this, *DL, *LI, *SE, *DT, S); // Only build the run-time condition and parameters _after_ having // introduced the conditional branch. This is important as the conditional // branch will guard the original scop from new induction variables that // the SCEVExpander may introduce while code generating the parameters and // which may introduce scalar dependences that prevent us from correctly // code generating this scop. BasicBlock *StartBlock = executeScopConditionally(S, this, Builder.getTrue()); auto SplitBlock = StartBlock->getSinglePredecessor(); // First generate code for the hoisted invariant loads and transitively the // parameters they reference. Afterwards, for the remaining parameters that // might reference the hoisted loads. Finally, build the runtime check // that might reference both hoisted loads as well as parameters. // If the hoisting fails we have to bail and execute the original code. Builder.SetInsertPoint(SplitBlock->getTerminator()); if (!NodeBuilder.preloadInvariantLoads()) { auto *FalseI1 = Builder.getFalse(); auto *SplitBBTerm = Builder.GetInsertBlock()->getTerminator(); SplitBBTerm->setOperand(0, FalseI1); auto *StartBBTerm = StartBlock->getTerminator(); Builder.SetInsertPoint(StartBBTerm); Builder.CreateUnreachable(); StartBBTerm->eraseFromParent(); isl_ast_node_free(AstRoot); } else { NodeBuilder.addParameters(S.getContext()); Value *RTC = buildRTC(Builder, NodeBuilder.getExprBuilder()); Builder.GetInsertBlock()->getTerminator()->setOperand(0, RTC); Builder.SetInsertPoint(&StartBlock->front()); NodeBuilder.create(AstRoot); NodeBuilder.finalizeSCoP(S); fixRegionInfo(EnteringBB->getParent(), R->getParent()); } verifyGeneratedFunction(S, *EnteringBB->getParent()); // Mark the function such that we run additional cleanup passes on this // function (e.g. mem2reg to rediscover phi nodes). Function *F = EnteringBB->getParent(); F->addFnAttr("polly-optimized"); return true; }