void ASTCondTerm::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("`-"); switch( sel.ptr->ID() ){ case ASTBaseTerm_: std::printf("%sBaseTerm %p\n", trunk.Data(), sel.base); trunk.Back(1) = ' '; trunk.Back(2) = ' '; sel.base->Print(trunk); break; case ASTCondExpr_: std::printf("%sCondExpr %p\n", trunk.Data(), sel.cond); trunk.Back(1) = ' '; trunk.Back(2) = ' '; sel.cond->Print(trunk); break; default: std::printf("ClassID %d\n", sel.ptr->ID()); assert(0); break; } trunk.Pop(3).Push('\0'); }
void ASTPathElemExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("|-"); if( index != 0 ){ std::printf("%sIndexOp %p ", trunk.Data(), index); assert(index->intv); index->intv->Print(); std::putchar('\n'); } trunk.Back(2) = '`'; // the one before last one switch( sel.ptr->ID() ){ case ASTModelElemExpr_: std::printf("%sModelElemExpr %p ", trunk.Data(), sel.model); if( sel.model->ident ){ assert(sel.model->ident); sel.model->ident->Print(); std::putchar('\n'); } trunk.Back(1) = ' '; trunk.Back(2) = ' '; sel.model->Print(trunk); break; case ASTFuncExpr_: std::printf("%sFuncExpr %p ", trunk.Data(), sel.func); assert(sel.func->ident); sel.func->ident->Print(); std::putchar('\n'); trunk.Back(1) = ' '; trunk.Back(2) = ' '; sel.func->Print(trunk); break; case ASTRecurPathExpr_: std::printf("%sRecurPathExpr %p %d\n", trunk.Data(), sel.path, sel.path->plusplus); trunk.Back(1) = ' '; trunk.Back(2) = ' '; sel.path->Print(trunk); break; case ASTConstExpr_: std::printf("%sConstExpr %p\n", trunk.Data(), sel.constt); trunk.Back(1) = ' '; trunk.Back(2) = ' '; assert(sel.constt); sel.constt->Print(trunk); break; default: std::printf("ClassID: %d\n", sel.ptr->ID()); assert(0); break; } trunk.Pop(3).Push('\0'); }
void ASTConstExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("`-"); switch( sel.ptr->ID() ){ case ASTComplexLit_: std::printf("%sComplexLit %p\n", trunk.Data(), sel.complex_); trunk.Back(1) = ' '; trunk.Back(2) = ' '; sel.complex_->Print(trunk); break; case ASTVarExpr_: std::printf("%sVarExpr %p ", trunk.Data(), sel.var); assert(sel.var->ident); sel.var->ident->Print(); std::putchar('\n'); break; case ASTMetaVarExpr_: std::printf("%sMetaVarExpr %p ", trunk.Data(), sel.meta); assert(sel.meta->ident); sel.meta->ident->Print(); std::putchar('\n'); break; default: std::printf("ClassID %d\n", sel.ptr->ID()); assert(0); break; } trunk.Pop(3).Push('\0'); }
// compute the points in the CFG reachable from the entry point. void GetEntryReachable(BlockCFG *cfg) { // worklist items are reachable points whose outgoing edges have // not been examined Vector<PPoint> worklist; PPoint entry = cfg->GetEntryPoint(); entry_reach_table->Insert(entry); worklist.PushBack(entry); while (!worklist.Empty()) { PPoint back = worklist.Back(); worklist.PopBack(); const Vector<PEdge*>& outgoing = cfg->GetOutgoingEdges(back); for (size_t oind = 0; oind < outgoing.Size(); oind++) { PEdge *edge = outgoing[oind]; PPoint next = edge->GetTarget(); // already did this target if (entry_reach_table->Lookup(next)) continue; entry_reach_table->Insert(next); worklist.PushBack(next); } } }
XdbInfo& GetDatabaseInfo(const uint8_t *name, bool do_create) { Assert(!cleared_databases); String *name_str = String::Make((const char*)name); for (size_t dind = 0; dind < databases.Size(); dind++) { if (databases[dind].name == name_str) { XdbInfo &info = databases[dind]; // create the database if we previously did a non-create access. if (do_create && !info.xdb->Exists()) { info.xdb->Create(); if (info.xdb->HasError()) { logout << "ERROR: Corrupt database " << (const char*) name << endl; info.xdb->Truncate(); } } return info; } } Xdb *xdb = new Xdb((const char*) name, do_create, false, false); if (xdb->HasError()) { logout << "ERROR: Corrupt database " << (const char*) name << endl; xdb->Truncate(); } XdbInfo info; info.name = name_str; info.xdb = xdb; databases.PushBack(info); return databases.Back(); }
// marks the points in cfg which are isomorphic to points in the loop_cfg // invoked by cfg at the specified edge. code in a syntactic loop body // will be reflected in CFGs for both the loop and its parent if it may // reach both the recursive loop edge and a loop exit point. this common // code will be isomorphic between the two CFGs. void GetLoopIsomorphicPoints(BlockCFG *cfg, PEdge *loop_edge, BlockCFG *loop_cfg) { // mapping from points in cfg to isomorphic points in loop_cfg. PPointListHash remapping; // worklist items are isomorphic points whose outgoing edges have not // been examined. Vector<PPoint> worklist; PPoint target = loop_edge->GetTarget(); remapping.Insert(target, loop_cfg->GetEntryPoint()); cfg->AddLoopIsomorphic(target); worklist.PushBack(target); while (!worklist.Empty()) { PPoint cfg_point = worklist.Back(); worklist.PopBack(); PPoint loop_point = remapping.LookupSingle(cfg_point); const Vector<PEdge*> &cfg_outgoing = cfg->GetOutgoingEdges(cfg_point); const Vector<PEdge*> &loop_outgoing = loop_cfg->GetOutgoingEdges(loop_point); for (size_t eind = 0; eind < cfg_outgoing.Size(); eind++) { PEdge *edge = cfg_outgoing[eind]; PPoint target = edge->GetTarget(); // check for an existing remapping entry. some isomorphic points have // multiple incoming edges. we don't need to check all such incoming // edges; if any edge is isomorphic, they all will be. if (remapping.Lookup(target, false)) continue; // look for an equivalent outgoing edge from the loop. PPoint loop_target = 0; for (size_t lind = 0; lind < loop_outgoing.Size(); lind++) { PEdge *loop_edge = loop_outgoing[lind]; if (PEdge::CompareInner(edge, loop_edge) == 0) { loop_target = loop_edge->GetTarget(); break; } } if (!loop_target) { Assert(edge->IsAssume()); continue; } remapping.Insert(target, loop_target); cfg->AddLoopIsomorphic(target); worklist.PushBack(target); } } }
void ASTRecurPathExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("`-"); std::printf("%sPathExpr %p\n", trunk.Data(), path); trunk.Back(1) = ' '; trunk.Back(2) = ' '; path->Print(trunk); trunk.Pop(3).Push('\0'); }
void ASTCondExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("`-"); std::printf("%sOrExpr %p\n", trunk.Data(), or_); trunk.Back(1) = ' '; trunk.Back(2) = ' '; or_->Print(trunk); trunk.Pop(3).Push('\0'); }
// fill in the body of loophead. points in the body are those which // are reachable from loophead over non-backedges, and which themselves // reach a backedge for loophead. note that in the case of loop nesting, // a point may be contained in the body of multiple loops. // if any irreducible edges are found (edges incoming to a body point // other than loophead whose source is not in the body), those edges // are added to irreducible_edges void GetLoopBody(BlockCFG *cfg, PPoint loophead, Vector<PEdge*> *irreducible_edges) { Vector<PPoint> *body_list = body_list_table->Lookup(loophead, true); Assert(body_list->Empty()); // worklist items are points which reach a loop backedge but whose // incoming edges have not yet been examined. Vector<PPoint> worklist; const Vector<PEdge*> &head_incoming = cfg->GetIncomingEdges(loophead); for (size_t iind = 0; iind < head_incoming.Size(); iind++) { PEdge *edge = head_incoming[iind]; PPoint source = edge->GetSource(); if (backedge_table->Lookup(edge)) { Assert(reach_table->Lookup(PPointPair(loophead, source))); if (!body_table->Insert(PPointPair(loophead, source))) { body_list->PushBack(source); worklist.PushBack(source); } } } // this should only be called on loops that have actual backedges Assert(!worklist.Empty()); while (!worklist.Empty()) { PPoint back = worklist.Back(); worklist.PopBack(); if (back == loophead) continue; const Vector<PEdge*> &incoming = cfg->GetIncomingEdges(back); for (size_t iind = 0; iind < incoming.Size(); iind++) { PEdge *edge = incoming[iind]; PPoint source = edge->GetSource(); if (reach_table->Lookup(PPointPair(loophead, source))) { if (!body_table->Insert(PPointPair(loophead, source))) { body_list->PushBack(source); worklist.PushBack(source); } } else if (entry_reach_table->Lookup(source)) { // the source is not reachable from the loophead. // this is an irreducible edge. irreducible_edges->PushBack(edge); } } } }
void ASTBaseTerm::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("|-"); std::printf("%sPathExpr %p\n", trunk.Data(), lpath); trunk.Back(1) = ' '; lpath->Print(trunk); trunk.Back(1) = '-'; std::printf("%sRelOp %p ", trunk.Data(), op); op->op->Print(); std::putchar('\n'); trunk.Back(2) = '`'; std::printf("%sPathExpr %p\n", trunk.Data(), rpath); trunk.Back(1) = ' '; trunk.Back(2) = ' '; rpath->Print(trunk); trunk.Pop(3).Push('\0'); }
void ASTAndExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("|-"); for(std::size_t i=0; i+1 < terms.Size(); ++i){ std::printf("%sCondTerm %p\n", trunk.Data(), terms[i]); trunk.Back(1) = ' '; terms[i]->Print(trunk); } if( ! terms.Empty() ){ trunk.Back(2) = '`'; std::printf("%sCondTerm %p\n", trunk.Data(), terms.Back()); trunk.Back(1) = ' '; trunk.Back(2) = ' '; terms.Back()->Print(trunk); }else{ assert(0); } trunk.Pop(3).Push('\0'); }
bool LuaScript::PushLuaFunction(lua_State* L, const String& functionName) { Vector<String> splitNames = functionName.Split('.'); String currentName = splitNames.Front(); lua_getglobal(L, currentName.CString()); if (splitNames.Size() > 1) { for (unsigned i = 0; i < splitNames.Size() - 1; ++i) { if (i) { currentName = currentName + "." + splitNames[i]; lua_getfield(L, -1, splitNames[i].CString()); lua_replace(L, -2); } if (!lua_istable(L, -1)) { lua_pop(L, 1); lua_pushstring(L, ("Could not find Lua table: Table name = '" + currentName + "'").CString()); return false; } } currentName = currentName + "." + splitNames.Back(); lua_getfield(L, -1, splitNames.Back().CString()); lua_replace(L, -2); } if (!lua_isfunction(L, -1)) { lua_pop(L, 1); lua_pushstring(L, ("Could not find Lua function: Function name = '" + currentName + "'").CString()); return false; } return true; }
void ASTFuncExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("|-"); for(std::size_t i=0; i+1 < paths.Size(); ++i){ std::printf("%sPathExpr %p\n", trunk.Data(), paths[i]); trunk.Back(1) = ' '; paths[i]->Print(trunk); } if( ! paths.Empty() ){ trunk.Back(2) = '`'; std::printf("%sPathExpr %p\n", trunk.Data(), paths.Back()); trunk.Back(1) = ' '; trunk.Back(2) = ' '; paths.Back()->Print(trunk); }else{ assert(0); } trunk.Pop(3).Push('\0'); }
void ASTNodeCastExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("|-"); for(std::size_t i=0; i<paths.Size(); ++i){ trunk.Back(1) = '-'; std::printf("%sPathDecl %p ", trunk.Data(), paths[i]); assert(paths[i]->ident); paths[i]->ident->Print(); std::putchar('\n'); trunk.Back(1) = ' '; paths[i]->Print(trunk); } trunk.Back(2) = '`'; std::printf("%sCondExpr %p\n", trunk.Data(), cond); trunk.Back(1) = ' '; trunk.Back(2) = ' '; cond->Print(trunk); trunk.Pop(3).Push('\0'); }
bool LuaScript::PushScriptFunction(const String& functionName, bool silentIfNotFound) { Vector<String> splitedNames = functionName.Split('.'); String currentName = splitedNames.Front(); lua_getglobal(luaState_, currentName.CString()); if (splitedNames.Size() > 1) { if (!lua_istable(luaState_, -1)) { LOGERROR("Could not find Lua table: Table name = '" + currentName + "'"); return false; } for (unsigned i = 1; i < splitedNames.Size() - 1; ++i) { currentName = currentName + "." + splitedNames[i]; lua_getfield(luaState_, -1, splitedNames[i].CString()); if (!lua_istable(luaState_, -1)) { LOGERROR("Could not find Lua table: Table name = '" + currentName + "'"); return false; } } currentName = currentName + "." + splitedNames.Back().CString(); lua_getfield(luaState_, -1, splitedNames.Back().CString()); } if (!lua_isfunction(luaState_, -1)) { if (!silentIfNotFound) LOGERROR("Could not find Lua function: Function name = '" + currentName + "'"); return false; } return true; }
void ASTPathExpr::Print(Vector<char>& trunk) const { trunk.Pop().Push<3>("|-"); for(std::size_t i=0; i+1<elems.Size(); ++i){ trunk.Back(1) = '-'; std::printf("%sPathElemExpr %p\n", trunk.Data(), elems[i]); trunk.Back(1) = ' '; elems[i]->Print(trunk); } trunk.Back(1) = '-'; if( ! elems.Empty() ){ trunk.Back(2) = '`'; // the one before last one std::printf("%sPathElemExpr %p\n", trunk.Data(), elems.Back()); trunk.Back(2) = ' '; trunk.Back(1) = ' '; assert(elems.Back()); elems.Back()->Print(trunk); }else{ assert(0); } trunk.Pop(3).Push('\0'); }
Variant Spline::LinearInterpolation(const Vector<Variant>& knots, float t) const { if (knots.Size() < 2) return Variant::EMPTY; else { if (t >= 1.f) return knots.Back(); int originIndex = Clamp((int)(t * (knots.Size() - 1)), 0, (int)(knots.Size() - 2)); t = fmodf(t * (knots.Size() - 1), 1.f); return LinearInterpolation(knots[originIndex], knots[originIndex + 1], t); } }
void ASTModelElemExpr::Print(Vector<char>& trunk)const { trunk.Pop().Push<3>("|-"); if( type ){ if( label == 0 && node == 0 ){ trunk.Back(2) = '`'; } std::printf("%sTypeCastExpr %p ", trunk.Data(), type); assert(type->ident); type->ident->Print(); std::putchar('\n'); } if( label ){ if( node == 0 ){ trunk.Back(2) = '`'; } std::printf("%sNodeLabelExpr %p ", trunk.Data(), label); assert(label->ident); label->ident->Print(); std::putchar('\n'); } if( node ){ trunk.Back(2) = '`'; std::printf("%sNodeCastExpr %p\n", trunk.Data(), node); trunk.Back(1) = ' '; trunk.Back(2) = ' '; node->Print(trunk); } assert( type || label || node || ident); trunk.Pop(3).Push('\0'); }
// get the set of points reachable from loophead over paths // that do not go through a backedge. if loophead itself is // reachable, it is irreducible and those new edges to it are added // as backedges. return value is true iff the loop is irreducible. bool GetLoopReachable(BlockCFG *cfg, PPoint loophead) { // worklist items are points in reach_table whose outgoing edges // have not been examined Vector<PPoint> worklist; if (!entry_reach_table->Lookup(loophead)) return false; reach_table->Insert(PPointPair(loophead, loophead)); worklist.PushBack(loophead); bool found_irreducible = false; while (!worklist.Empty()) { PPoint back = worklist.Back(); worklist.PopBack(); const Vector<PEdge*>& outgoing = cfg->GetOutgoingEdges(back); for (size_t oind = 0; oind < outgoing.Size(); oind++) { PEdge *edge = outgoing[oind]; PPoint next = edge->GetTarget(); if (backedge_table->Lookup(edge)) continue; if (next == loophead) { // we're in an irreducible loop. add the new edge to backedge_table. backedge_table->Insert(edge); found_irreducible = true; continue; } if (!reach_table->Insert(PPointPair(loophead, next))) worklist.PushBack(next); } } return found_irreducible; }
void TopoSortCFG(BlockCFG *cfg) { // can't topo sort a CFG that might have loops. Assert(cfg->GetLoopHeadCount() == 0); // map from old CFG points to the new points in the topo order. we can only // add a new point once we've added all its predecessors. PPointListHash remapping; // points in the remappping, in the order to add them to the CFG. Vector<Location*> new_points; // map from new points back to original CFG points. Vector<PPoint> old_points; // worklist items are the points where the sources of incoming edges have // already been added to the remapping, the point itself has not. Vector<PPoint> worklist; PPoint entry_point = cfg->GetEntryPoint(); PPoint exit_point = cfg->GetExitPoint(); // seed the worklist. worklist.PushBack(entry_point); while (!worklist.Empty()) { // pick the point from the worklist with the minimum line number. // if there is code like: // if (x) // a; // else // b; // we could add either a or b to the remapping first, but we want to add // a first. the ordering of points is used for naming loops, and we want // this ordering to be deterministic and map back to the code predictably. size_t best_index = 0; size_t best_line = cfg->GetPointLocation(worklist[0])->Line(); for (size_t ind = 1; ind < worklist.Size(); ind++) { size_t new_line = cfg->GetPointLocation(worklist[ind])->Line(); if (new_line < best_line) { best_index = ind; best_line = new_line; } } PPoint point = worklist[best_index]; worklist[best_index] = worklist.Back(); worklist.PopBack(); Assert(!remapping.Lookup(point, false)); Location *loc = cfg->GetPointLocation(point); loc->IncRef(); new_points.PushBack(loc); old_points.PushBack(point); remapping.Insert(point, new_points.Size()); const Vector<PEdge*> &outgoing = cfg->GetOutgoingEdges(point); for (size_t oind = 0; oind < outgoing.Size(); oind++) { PEdge *edge = outgoing[oind]; PPoint target = edge->GetTarget(); // this can happen if there are multiple edges from the worklist point // to the target, e.g. 'if (x) {}'. not going to happen much. if (worklist.Contains(target)) continue; Assert(!remapping.Lookup(target, false)); // we can add the target to the worklist if it has no incoming edges // from points not in the remapping. bool missing_incoming = false; const Vector<PEdge*> &incoming = cfg->GetIncomingEdges(target); for (size_t iind = 0; iind < incoming.Size(); iind++) { PEdge *edge = incoming[iind]; PPoint source = edge->GetSource(); if (!remapping.Lookup(source, false)) { missing_incoming = true; break; } } if (!missing_incoming) worklist.PushBack(target); } } // this assert will fail if either the CFG contains cycles, or if there are // nodes unreachable from the start. neither of these cases should be // possible here. Assert(new_points.Size() == cfg->GetPointCount()); Assert(old_points.Size() == cfg->GetPointCount()); // remap all the edges. this is also done so that the edges will be // in topological order according to their source points. Vector<PEdge*> new_edges; for (size_t pind = 0; pind < old_points.Size(); pind++) { const Vector<PEdge*> &edges = cfg->GetOutgoingEdges(old_points[pind]); for (size_t eind = 0; eind < edges.Size(); eind++) { PEdge *edge = edges[eind]; PPoint new_source = remapping.LookupSingle(edge->GetSource()); PPoint new_target = remapping.LookupSingle(edge->GetTarget()); PEdge *new_edge = PEdge::ChangeEdge(edge, new_source, new_target); new_edges.PushBack(new_edge); } } // clear out the initial CFG. cfg->ClearBody(); // add the new points, edges, annotations. for (size_t pind = 0; pind < new_points.Size(); pind++) cfg->AddPoint(new_points[pind]); for (size_t eind = 0; eind < new_edges.Size(); eind++) cfg->AddEdge(new_edges[eind]); // set the new entry point. this had better be the first point in the order. PPoint new_entry_point = remapping.LookupSingle(entry_point); Assert(new_entry_point == 1); cfg->SetEntryPoint(new_entry_point); if (exit_point) { // set the new exit point. this had better be the last point in the order. PPoint new_exit_point = remapping.LookupSingle(exit_point); Assert(new_exit_point == new_points.Size()); cfg->SetExitPoint(new_exit_point); } }
void TrimUnreachable(BlockCFG *cfg, bool flatten_skips) { // can't flatten skips if there might be loops in the CFG. Assert(!flatten_skips || cfg->GetLoopHeadCount() == 0); // receives the locations of the new points and edges of the CFG. we will // fill these in, then replace wholesale the old points/edges on the CFG. Vector<Location*> new_points; Vector<PEdge*> new_edges; Vector<LoopHead> new_loop_heads; Vector<PPoint> worklist; // get the set of points reachable from CFG entry. // worklist items are points in entry_reachable whose outgoing edges // have not been examined. PPointHash entry_reachable; PPoint entry = cfg->GetEntryPoint(); entry_reachable.Insert(entry); worklist.PushBack(entry); while (!worklist.Empty()) { PPoint back = worklist.Back(); worklist.PopBack(); const Vector<PEdge*> &outgoing = cfg->GetOutgoingEdges(back); for (size_t oind = 0; oind < outgoing.Size(); oind++) { PEdge *edge = outgoing[oind]; PPoint next = edge->GetTarget(); if (!entry_reachable.Lookup(next)) { entry_reachable.Insert(next); worklist.PushBack(next); } } } // get the set of points which reach the CFG exit. // worklist items are points in exit_reaches whose incoming edges // have not been examined. PPointHash exit_reaches; PPoint exit = cfg->GetExitPoint(); exit_reaches.Insert(exit); worklist.PushBack(exit); while (!worklist.Empty()) { PPoint back = worklist.Back(); worklist.PopBack(); const Vector<PEdge*> &incoming = cfg->GetIncomingEdges(back); for (size_t iind = 0; iind < incoming.Size(); iind++) { PEdge *edge = incoming[iind]; PPoint prev = edge->GetSource(); if (!exit_reaches.Lookup(prev)) { exit_reaches.Insert(prev); worklist.PushBack(prev); } } } // make sure we include the entry regardless of whether the function // has a path from entry to exit. exit_reaches.Insert(entry); if (flatten_skips) exit_reaches.Insert(FollowSkipEdges(cfg, entry)); // map from old points to corresponding new points. only defined for // points that are in both entry_reachable and exit_reaches, // and that do not have outgoing skip edges (if flatten_skips is set). PPointListHash remapping; // map from some old p0 to another old p1 where p0 connects to p1 by // skip edges and p1 has no outgoing skips. empty if flatten_skips is // not set. only defined if remapping is defined for p1. PPointListHash skip_remapping; for (PPoint point = 1; point <= cfg->GetPointCount(); point++) { if (entry_reachable.Lookup(point) && exit_reaches.Lookup(point)) { // if this is just the source of some skip edges flatten them out. // the target of the skips will be defined by remapping since // there can be only one outgoing skip edge from a point and // thus all paths from point pass through target_point; if point // reaches the exit then so does target_point. if (flatten_skips) { PPoint target_point = FollowSkipEdges(cfg, point); if (target_point != point) { skip_remapping.Insert(point, target_point); // don't add anything to remapping for point continue; } } Location *loc = cfg->GetPointLocation(point); loc->IncRef(); new_points.PushBack(loc); PPoint new_point = new_points.Size(); remapping.Insert(point, new_point); } } for (size_t eind = 0; eind < cfg->GetEdgeCount(); eind++) { PEdge *edge = cfg->GetEdge(eind); PPoint source = edge->GetSource(); PPoint target = edge->GetTarget(); if (skip_remapping.Lookup(source, false)) continue; // flatten any skips after the target point Vector<PPoint> *skip_target_list = skip_remapping.Lookup(target, false); if (skip_target_list) { Assert(skip_target_list->Size() == 1); target = skip_target_list->At(0); } Vector<PPoint> *new_source_list = remapping.Lookup(source, false); Vector<PPoint> *new_target_list = remapping.Lookup(target, false); if (new_source_list && new_target_list) { Assert(new_source_list->Size() == 1); Assert(new_target_list->Size() == 1); PPoint new_source = new_source_list->At(0); PPoint new_target = new_target_list->At(0); PEdge *new_edge = PEdge::ChangeEdge(edge, new_source, new_target); new_edges.PushBack(new_edge); } } for (size_t lind = 0; lind < cfg->GetLoopHeadCount(); lind++) { const LoopHead &head = cfg->GetLoopHead(lind); // don't check skip_remapping because we don't allow skip flattening // when the CFG still has loops in it Vector<PPoint> *new_point_list = remapping.Lookup(head.point, false); if (new_point_list) { Assert(new_point_list->Size() == 1); LoopHead new_head(new_point_list->At(0), head.end_location); if (head.end_location) head.end_location->IncRef(); new_loop_heads.PushBack(new_head); } } // clear out the initial CFG. cfg->ClearBody(); // add the new points, edges, loop heads. for (size_t pind = 0; pind < new_points.Size(); pind++) cfg->AddPoint(new_points[pind]); for (size_t eind = 0; eind < new_edges.Size(); eind++) cfg->AddEdge(new_edges[eind]); for (size_t lind = 0; lind < new_loop_heads.Size(); lind++) cfg->AddLoopHead(new_loop_heads[lind].point, new_loop_heads[lind].end_location); // set the new entry and exit points of the CFG. // the entry may be connected to skip edges Vector<PPoint> *skip_entry_list = skip_remapping.Lookup(entry, false); if (skip_entry_list) { Assert(skip_entry_list->Size() == 1); entry = skip_entry_list->At(0); } PPoint new_entry = remapping.LookupSingle(entry); PPoint new_exit = 0; Vector<PPoint> *new_exit_list = remapping.Lookup(exit, false); if (new_exit_list) { Assert(new_exit_list->Size() == 1); new_exit = new_exit_list->At(0); } cfg->SetEntryPoint(new_entry); cfg->SetExitPoint(new_exit); }
void ExtractPropertyInfo(const String& functionName, const String& declaration, Vector<PropertyInfo>& propertyInfos) { String propertyName = functionName.Substring(4); PropertyInfo* info = 0; for (unsigned k = 0; k < propertyInfos.Size(); ++k) { if (propertyInfos[k].name_ == propertyName) { info = &propertyInfos[k]; break; } } if (!info) { propertyInfos.Resize(propertyInfos.Size() + 1); info = &propertyInfos.Back(); info->name_ = propertyName; } if (functionName.Contains("get_")) { info->read_ = true; // Extract type from the return value Vector<String> parts = declaration.Split(' '); if (parts.Size()) { if (parts[0] != "const") info->type_ = parts[0]; else if (parts.Size() > 1) info->type_ = parts[1]; } // If get method has parameters, it is indexed if (!declaration.Contains("()")) { info->indexed_ = true; info->type_ += "[]"; } // Sanitate the reference operator away info->type_.Replace("&", ""); } if (functionName.Contains("set_")) { info->write_ = true; if (info->type_.Empty()) { // Extract type from parameters unsigned begin = declaration.Find(','); if (begin == String::NPOS) begin = declaration.Find('('); else info->indexed_ = true; if (begin != String::NPOS) { ++begin; unsigned end = declaration.Find(')'); if (end != String::NPOS) { info->type_ = declaration.Substring(begin, end - begin); // Sanitate const & reference operator away info->type_.Replace("const ", ""); info->type_.Replace("&in", ""); info->type_.Replace("&", ""); } } } } }
// determine whether loophead is a reducible loop with backedges in cfg. // fill in dominate_table with the points dominated by loophead, // and add as backedges any edge going to loophead which is itself // dominated by loophead. return true if any backedges were found. bool GetLoopBackedges(BlockCFG *cfg, PPoint loophead) { // compute the nodes reachable from the entry point other than // through start. the dominated points are the dual of this set. // points reachable from the start according to the above criteria PPointHash reachable; // worklist items are points in reachable whose outgoing edges have // not been examined Vector<PPoint> worklist; if (!entry_reach_table->Lookup(loophead)) return false; PPoint entry = cfg->GetEntryPoint(); reachable.Insert(entry); worklist.PushBack(entry); while (!worklist.Empty()) { PPoint back = worklist.Back(); worklist.PopBack(); const Vector<PEdge*>& outgoing = cfg->GetOutgoingEdges(back); for (size_t oind = 0; oind < outgoing.Size(); oind++) { PEdge *edge = outgoing[oind]; PPoint next = edge->GetTarget(); if (next == loophead) continue; // already did this target if (reachable.Lookup(next)) continue; reachable.Insert(next); worklist.PushBack(next); } } // compute the set of dominated points. this is the difference // between the points reachable from the CFG entry, and the points // in the reach table we just computed. for (PPoint point = 1; point <= cfg->GetPointCount(); point++) { if (!reachable.Lookup(point) && entry_reach_table->Lookup(point)) dominate_table->Insert(PPointPair(loophead, point)); } // backedges on the loophead are incoming edges whose source is // dominated by the loophead bool found_backedge = false; const Vector<PEdge*> &incoming = cfg->GetIncomingEdges(loophead); for (size_t eind = 0; eind < incoming.Size(); eind++) { PEdge *edge = incoming[eind]; if (dominate_table->Lookup(PPointPair(loophead, edge->GetSource()))) { backedge_table->Insert(edge); found_backedge = true; } } return found_backedge; }
void SplitLoops(BlockCFG *base_cfg, Vector<BlockCFG*> *result_cfg_list) { // get the CFG which will eventually become the loop-free outer function CFG. BlockCFG *func_cfg; if (base_cfg->GetId()->Kind() == B_FunctionWhole) { // make an ID for the outer function body. Variable *function_info = base_cfg->GetId()->BaseVar(); function_info->IncRef(); BlockId *outer_id = BlockId::Make(B_Function, function_info); // make the function CFG by cloning the base CFG with the new ID. func_cfg = BlockCFG::Make(outer_id); CopyCFGLocationsVariables(base_cfg, func_cfg); CopyCFGPointsEdges(base_cfg, func_cfg); } else if (base_cfg->GetId()->Kind() == B_Function) { // this call came from a recursive invocation of SplitLoops after we // removed an irreducible loop from the function. func_cfg = base_cfg; } else { // just destructively update the original CFG. base_cfg->IncRef(); func_cfg = base_cfg; } // add a new entry point with a skip edge to the original entry point. // loop splitting breaks if the entry point is marked as a loop head. PPoint entry = func_cfg->GetEntryPoint(); Location *loc = func_cfg->GetPointLocation(entry); loc->IncRef(); PPoint new_entry = func_cfg->AddPoint(loc); PEdge *skip_edge = PEdge::MakeSkip(new_entry, entry); func_cfg->AddEdge(skip_edge); func_cfg->SetEntryPoint(new_entry); // setup the tables we need to do loop splitting. SetupTables(); // compute the points reachable from the entry point. GetEntryReachable(func_cfg); // the real loops in the program with back edges. Vector<PPoint> loops; for (size_t lind = 0; lind < func_cfg->GetLoopHeadCount(); lind++) { PPoint head = func_cfg->GetLoopHead(lind).point; if (GetLoopBackedges(func_cfg, head)) loops.PushBack(head); } // compute reachability and check for irreducible loops. for (size_t lind = 0; lind < func_cfg->GetLoopHeadCount(); lind++) { const LoopHead &head = func_cfg->GetLoopHead(lind); if (GetLoopReachable(func_cfg, head.point)) { // loop is irreducible. // get the loop's irreducible edges. Vector<PEdge*> irreducible_edges; GetLoopBody(func_cfg, head.point, &irreducible_edges); Assert(!irreducible_edges.Empty()); // clone the loop's body and remove the irreducible edges. ReduceLoop(func_cfg, head.point, irreducible_edges); // try again on the modified CFG. CleanupTables(); SplitLoops(func_cfg, result_cfg_list); return; } } // there are no irreducible loops at this point so this should // never have any entries added. Vector<PEdge*> irreducible_edges; // compute loop bodies. for (size_t lind = 0; lind < loops.Size(); lind++) { PPoint head = loops[lind]; GetLoopBody(func_cfg, head, &irreducible_edges); Assert(irreducible_edges.Empty()); } // construct a tree of all the loops. loop A contains loop B // if A != B and the head of B is in the body of A. PPointListHash loop_tree; // split off all the loops in the CFG. make sure we split inner loops // before outer, so that the Loop edges on inner loops will appear in // the split body for outer loops. while (!loops.Empty()) { // find a candidate loop to split. this is one whose loop children // have already been split off and are no longer in the loops list. PPoint loophead = 0; for (size_t lind = 0; lind < loops.Size(); lind++) { bool is_viable = true; for (size_t xlind = 0; xlind < loops.Size(); xlind++) { if (xlind == lind) continue; Assert(loops[lind] != loops[xlind]); if (body_table->Lookup(PPointPair(loops[lind], loops[xlind]))) { is_viable = false; break; } } if (is_viable) { loophead = loops[lind]; loops[lind] = loops.Back(); loops.PopBack(); break; } } Assert(loophead); BlockCFG *loop_cfg = SplitSingleLoop(loophead, loops, func_cfg); result_cfg_list->PushBack(loop_cfg); } // clear out the loopheads, we don't want them around anymore. func_cfg->ClearLoopHeads(); // trim unreachable points in the function CFG (i.e. bodies of loops that // now redirect to point zero), collapse skips and topo sort. TrimUnreachable(func_cfg, true); TopoSortCFG(func_cfg); result_cfg_list->PushBack(func_cfg); CleanupTables(); // fill in any loop parents for the inner loop CFGs, and make sure the // result CFGs are ordered correctly, with inner loops before outer loops // and the outer function. for (size_t cind = 0; cind < result_cfg_list->Size(); cind++) { BlockCFG *cfg = result_cfg_list->At(cind); for (size_t eind = 0; eind < cfg->GetEdgeCount(); eind++) { if (PEdgeLoop *edge = cfg->GetEdge(eind)->IfLoop()) { BlockId *target_id = edge->GetLoopId(); bool found_target = false; for (size_t xcind = 0; xcind < cind; xcind++) { BlockCFG *xcfg = result_cfg_list->At(xcind); if (xcfg->GetId() == target_id) { found_target = true; cfg->GetId()->IncRef(); BlockPPoint where(cfg->GetId(), edge->GetSource()); xcfg->AddLoopParent(where); // mark the isomorphic points in the parent CFG. GetLoopIsomorphicPoints(cfg, edge, xcfg); break; } } Assert(found_target); } } } // assign the final names to the various loop CFGs. FillLoopNames(func_cfg, "loop", *result_cfg_list); }
void Node::SetObjectAttributeAnimation(const String& name, ValueAnimation* attributeAnimation, WrapMode wrapMode, float speed) { Vector<String> names = name.Split('/'); // Only attribute name if (names.Size() == 1) SetAttributeAnimation(name, attributeAnimation, wrapMode, speed); else { // Name must in following format: "#0/#1/@component#0/attribute" Node* node = this; unsigned i = 0; for (; i < names.Size() - 1; ++i) { if (names[i].Front() != '#') break; unsigned index = ToInt(names[i].Substring(1, names[i].Length() - 1)); node = node->GetChild(index); if (!node) { LOGERROR("Could not find node by name " + name); return; } } if (i == names.Size() - 1) { node->SetAttributeAnimation(names.Back(), attributeAnimation, wrapMode, speed); return; } if (i != names.Size() - 2 || names[i].Front() != '@') { LOGERROR("Invalid name " + name); return; } String componentName = names[i].Substring(1, names[i].Length() - 1); Vector<String> componentNames = componentName.Split('#'); if (componentNames.Size() == 1) { Component* component = node->GetComponent(StringHash(componentNames.Front())); if (!component) { LOGERROR("Could not find component by name " + name); return; } component->SetAttributeAnimation(names.Back(), attributeAnimation, wrapMode, speed); } else { unsigned index = ToInt(componentNames[1]); PODVector<Component*> components; node->GetComponents(components, StringHash(componentNames.Front())); if (index >= components.Size()) { LOGERROR("Could not find component by name " + name); return; } components[index]->SetAttributeAnimation(names.Back(), attributeAnimation, wrapMode, speed); } } }
void Polyhedron::Clip(const Plane& plane, Vector<Vector3>& clippedVertices, Vector<Vector3>& outFace) { clippedVertices.Clear(); for (size_t i = 0; i < faces.Size(); ++i) { Vector<Vector3>& face = faces[i]; Vector3 lastVertex; float lastDistance = 0.0f; outFace.Clear(); for (size_t j = 0; j < face.Size(); ++j) { float distance = plane.Distance(face[j]); if (distance >= 0.0f) { if (lastDistance < 0.0f) { float t = lastDistance / (lastDistance - distance); Vector3 clippedVertex = lastVertex + t * (face[j] - lastVertex); outFace.Push(clippedVertex); clippedVertices.Push(clippedVertex); } outFace.Push(face[j]); } else { if (lastDistance >= 0.0f && j != 0) { float t = lastDistance / (lastDistance - distance); Vector3 clippedVertex = lastVertex + t * (face[j] - lastVertex); outFace.Push(clippedVertex); clippedVertices.Push(clippedVertex); } } lastVertex = face[j]; lastDistance = distance; } // Recheck the distances of the last and first vertices and add the final clipped vertex if applicable float distance = plane.Distance(face[0]); if ((lastDistance < 0.0f && distance >= 0.0f) || (lastDistance >= 0.0f && distance < 0.0f)) { float t = lastDistance / (lastDistance - distance); Vector3 clippedVertex = lastVertex + t * (face[0] - lastVertex); outFace.Push(clippedVertex); clippedVertices.Push(clippedVertex); } // Do not keep faces which are less than triangles if (outFace.Size() < 3) outFace.Clear(); face = outFace; } // Remove empty faces for (size_t i = faces.Size() - 1; i < faces.Size(); --i) { if (faces[i].IsEmpty()) faces.Erase(i); } // Create a new face from the clipped vertices. First remove duplicates for (size_t i = 0; i < clippedVertices.Size(); ++i) { for (size_t j = clippedVertices.Size() - 1; j > i; --j) { if (clippedVertices[j].Equals(clippedVertices[i])) clippedVertices.Erase(j); } } if (clippedVertices.Size() > 3) { outFace.Clear(); // Start with the first vertex outFace.Push(clippedVertices.Front()); clippedVertices.Erase(0); while (!clippedVertices.IsEmpty()) { // Then add the vertex which is closest to the last added const Vector3& lastAdded = outFace.Back(); float bestDistance = M_INFINITY; size_t bestIndex = 0; for (size_t i = 0; i < clippedVertices.Size(); ++i) { float distance = (clippedVertices[i] - lastAdded).LengthSquared(); if (distance < bestDistance) { bestDistance = distance; bestIndex = i; } } outFace.Push(clippedVertices[bestIndex]); clippedVertices.Erase(bestIndex); } faces.Push(outFace); } }
void DecalSet::GetFace(Vector<PODVector<DecalVertex> >& faces, Drawable* target, unsigned batchIndex, unsigned i0, unsigned i1, unsigned i2, const unsigned char* positionData, const unsigned char* normalData, const unsigned char* skinningData, unsigned positionStride, unsigned normalStride, unsigned skinningStride, const Frustum& frustum, const Vector3& decalNormal, float normalCutoff) { bool hasNormals = normalData != 0; bool hasSkinning = skinned_ && skinningData != 0; const Vector3& v0 = *((const Vector3*)(&positionData[i0 * positionStride])); const Vector3& v1 = *((const Vector3*)(&positionData[i1 * positionStride])); const Vector3& v2 = *((const Vector3*)(&positionData[i2 * positionStride])); // Calculate unsmoothed face normals if no normal data Vector3 faceNormal = Vector3::ZERO; if (!hasNormals) { Vector3 dist1 = v1 - v0; Vector3 dist2 = v2 - v0; faceNormal = (dist1.CrossProduct(dist2)).Normalized(); } const Vector3& n0 = hasNormals ? *((const Vector3*)(&normalData[i0 * normalStride])) : faceNormal; const Vector3& n1 = hasNormals ? *((const Vector3*)(&normalData[i1 * normalStride])) : faceNormal; const Vector3& n2 = hasNormals ? *((const Vector3*)(&normalData[i2 * normalStride])) : faceNormal; const unsigned char* s0 = hasSkinning ? &skinningData[i0 * skinningStride] : (const unsigned char*)0; const unsigned char* s1 = hasSkinning ? &skinningData[i1 * skinningStride] : (const unsigned char*)0; const unsigned char* s2 = hasSkinning ? &skinningData[i2 * skinningStride] : (const unsigned char*)0; // Check if face is too much away from the decal normal if (decalNormal.DotProduct((n0 + n1 + n2) / 3.0f) < normalCutoff) return; // Check if face is culled completely by any of the planes for (unsigned i = PLANE_FAR; i < NUM_FRUSTUM_PLANES; --i) { const Plane& plane = frustum.planes_[i]; if (plane.Distance(v0) < 0.0f && plane.Distance(v1) < 0.0f && plane.Distance(v2) < 0.0f) return; } faces.Resize(faces.Size() + 1); PODVector<DecalVertex>& face = faces.Back(); if (!hasSkinning) { face.Reserve(3); face.Push(DecalVertex(v0, n0)); face.Push(DecalVertex(v1, n1)); face.Push(DecalVertex(v2, n2)); } else { const float* bw0 = (const float*)s0; const float* bw1 = (const float*)s1; const float* bw2 = (const float*)s2; const unsigned char* bi0 = s0 + sizeof(float) * 4; const unsigned char* bi1 = s1 + sizeof(float) * 4; const unsigned char* bi2 = s2 + sizeof(float) * 4; unsigned char nbi0[4]; unsigned char nbi1[4]; unsigned char nbi2[4]; // Make sure all bones are found and that there is room in the skinning matrices if (!GetBones(target, batchIndex, bw0, bi0, nbi0) || !GetBones(target, batchIndex, bw1, bi1, nbi1) || !GetBones(target, batchIndex, bw2, bi2, nbi2)) return; face.Reserve(3); face.Push(DecalVertex(v0, n0, bw0, nbi0)); face.Push(DecalVertex(v1, n1, bw1, nbi1)); face.Push(DecalVertex(v2, n2, bw2, nbi2)); } }
void ProcessPreprocessedFile(istream &in, const char *input_file) { Assert(g_working_directory && g_base_directory); // read our entire input into a buffer. Buffer file_buf; ReadInStream(in, &file_buf); // table with contents read so far for HashTable<String*,FileData,String> file_table; // name of the original file which was being parsed, from which we will // get whether this is a C or a C++ file. const char *base_file = NULL; char *pos = (char*) file_buf.base; FileData *cur_data = NULL; while (*pos) { if (*pos == '#' && *(pos+1) == ' ') { // found a preprocessor line directive. // currently we just parse lines of the format '# line "file" ... // eat the '#' pos++; char *end_pos = NULL; long line = strtol(pos, &end_pos, 10); Assert(line >= 1); Assert(end_pos); char *start_quote = strchr(end_pos, '"'); Assert(start_quote); char *end_quote = strchr(start_quote + 1, '"'); Assert(end_quote); char *end_line = strchr(pos, '\n'); Assert(end_line && end_line > end_quote); if (base_file == NULL) { // just mark the first # line directive we see as the base file. base_file = start_quote + 1; } *end_quote = 0; String *file = String::Make(NormalizeFile(start_quote + 1)); Vector<FileData> *entries = file_table.Lookup(file, true); if (entries->Empty()) { entries->PushBack(FileData()); cur_data = &entries->Back(); cur_data->contents = new Buffer(); cur_data->cur_line = 1; } else { Assert(entries->Size() == 1); cur_data = &entries->Back(); } // insert enough newlines so that we've caught up with the # line. while ((long) cur_data->cur_line < line) { cur_data->contents->Append("\n", 1); cur_data->cur_line++; } // in some cases the # line directive will actually rewind the // apparent line to an earlier line, e.g.: // # 250 "foo.c" // something // else // # 250 "foo.c" // finally // in this case we'll replace the earlier newlines with spaces, // getting the string 'something else finally' at line 250. char *last_pos = (char*) cur_data->contents->pos - 1; while ((long) cur_data->cur_line > line) { while (*last_pos != '\n') last_pos--; *last_pos = ' '; cur_data->cur_line--; } pos = end_line + 1; continue; } if (cur_data == NULL) { // we can get here if the input is not actually preprocessed. // make data for the input file itself. Assert(input_file); String *file = String::Make(NormalizeFile(input_file)); Vector<FileData> *entries = file_table.Lookup(file, true); Assert(entries->Empty()); entries->PushBack(FileData()); cur_data = &entries->Back(); cur_data->contents = new Buffer(); cur_data->cur_line = 1; } char *end_line = strchr(pos, '\n'); if (end_line == NULL) { cur_data->contents->Append(pos, strlen(pos)); break; } cur_data->contents->Append(pos, end_line - pos + 1); cur_data->cur_line++; pos = end_line + 1; } // figure out which of the files we read need to be added to the dbs. Transaction *query = new Transaction(); HashIterate(file_table) QueryFileData(query, file_table.ItKey(), file_table.ItValueSingle()); SubmitTransaction(query); // add those files we found in the query. Transaction *dump = new Transaction(); HashIterate(file_table) DumpFileData(query, dump, file_table.ItKey(), file_table.ItValueSingle()); SubmitTransaction(dump); delete query; delete dump; HashIterate(file_table) delete file_table.ItValueSingle().contents; }