void MT::CollectStats () { GiSTpath path; path.MakeRoot (); GiSTnode *node = ReadNode (path); if (!node->IsLeaf()) { int maxLevel = node->Level(); double *radii = new double[maxLevel]; int *pages = new int[maxLevel]; for (int i=0; i<maxLevel; i++) { pages[i] = 0; radii[i] = 0; } TruePredicate truePredicate; GiSTlist<GiSTentry*> list = node->Search(truePredicate); // retrieve all the entries in this node double overlap = ((MTnode *)node)->Overlap(); double totalOverlap = overlap; delete node; while (!list.IsEmpty()) { GiSTentry *entry = list.RemoveFront (); path.MakeChild (entry->Ptr()); node = ReadNode (path); overlap = ((MTnode *)node)->Overlap(); totalOverlap += overlap; pages[node->Level()]++; radii[node->Level()] += ((MTkey *) entry->Key())->MaxRadius(); GiSTlist<GiSTentry*> newlist; if (!node->IsLeaf()) { newlist = node->Search(truePredicate); // recurse to next level } while (!newlist.IsEmpty()) { list.Append (newlist.RemoveFront ()); } path.MakeParent (); delete entry; delete node; } // output the results cout << "Level:\tPages:\tAverage_Radius:"<<endl; int totalPages = 1; // for the root for (int i=maxLevel-1; i>=0; i--) { totalPages += pages[i]; cout << i << ":\t" << pages[i] << "\t" << radii[i]/pages[i] << endl; } cout << "TotalPages:\t" << totalPages << endl; cout << "LeafPages:\t" << pages[0] << endl; cout << "TotalOverlap:\t" << (float)totalOverlap << endl; delete []radii; delete []pages; } else { delete node; } }
CBSPNode *CGBSFileType::ReadNode(FILE *pFile,CBSPNode *pParent) { CBSPNode *pNode=NULL; SGBSFileNodeInfo info; if(fread(&info,sizeof(info),1,pFile)!=1){return pNode;} CPlane plane(CVector(info.vNormal[0],info.vNormal[1],info.vNormal[2]),info.vDist); pNode=new CBSPNode(pParent,plane,info.nContent); if(pNode->content==CONTENT_NODE) { pNode->pChild[0]=ReadNode(pFile,pNode); pNode->pChild[1]=ReadNode(pFile,pNode); } return pNode; }
t_rc INXM_IndexHandle::InsertIntoLeaf(STORM_PageHandle leafPageHandle, void *key, const REM_RecordID &rid) { t_rc rc; /* Find correct point for the new key. */ INXM_InitPageHeader initPageHeader; LoadInitHeaders(leafPageHandle, initPageHeader); INXM_Node node; int insertPoint = 0; rc = ReadNode(leafPageHandle, insertPoint, node); if (rc != OK) { return rc; } while (insertPoint < initPageHeader.nItems && KeyCmp(node.key, key) < 0) { insertPoint++; rc = ReadNode(leafPageHandle, insertPoint, node); if (rc != OK) { return rc; } } /* Open needed space at correct point. */ int point = INXM_INITPAGEHEADER_SIZE + INXM_NODEPAGEHEADER_SIZE + insertPoint*(INXM_NODE_SIZE+this->inxmFileHeader.attrLength); char *leafData; rc = leafPageHandle.GetDataPtr(&leafData); if (rc != OK) { return rc; } memcpy(&leafData[point + INXM_NODE_SIZE+this->inxmFileHeader.attrLength], &leafData[point], (initPageHeader.nItems-insertPoint)*(INXM_NODE_SIZE+this->inxmFileHeader.attrLength)); /* Write new data. */ int slot; STORM_PageHandle lastDataPageHandle; rc = this->sfh.GetPage(this->inxmFileHeader.lastDataPage, lastDataPageHandle); if (rc != OK) { return rc; } rc = WriteData(lastDataPageHandle, rid, slot); if (rc != OK) { return rc; } /* Write new node. */ rc = WriteNode(leafPageHandle, insertPoint, key, this->inxmFileHeader.lastDataPage, slot); if (rc != OK) { return rc; } return(OK); }
t_rc INXM_IndexHandle::InsertIntoNoLeaf(STORM_PageHandle parentPage, int left_index, void* key, int rightPageID) { INXM_InitPageHeader parentInitPageHeader; INXM_NodePageHeader parentNodePageHeader; LoadNodeHeaders(parentPage, parentInitPageHeader, parentNodePageHeader); if (left_index == parentInitPageHeader.nItems) { parentNodePageHeader.next = rightPageID; UpdateNodeHeaders(parentPage, parentInitPageHeader, parentNodePageHeader); return(OK); } WriteNode(parentPage, key, 0, 0); INXM_Node runNode; for (int i = parentInitPageHeader.nItems; i > left_index; i--) { //Hack! we use the old nItems. We didnt update the header. ReadNode(parentPage, i-1, runNode); EditNode(parentPage, i, runNode); } runNode.key = key; runNode.left = rightPageID; runNode.slot = 0; EditNode(parentPage, left_index, runNode); return(OK); }
// adjust the keys of node, which is used during the final phase of the BulkLoad algorithm void MT::AdjKeys (GiSTnode *node) { if (node->Path().IsRoot()) { return; } GiSTpath parentPath = node->Path(); parentPath.MakeParent (); GiSTnode *parentNode = ReadNode (parentPath); GiSTentry *parentEntry = parentNode->SearchPtr(node->Path().Page()); // parent entry assert (parentEntry != NULL); GiSTentry *unionEntry = node->Union(); unionEntry->SetPtr(node->Path().Page()); ((MTkey *) unionEntry->Key())->distance = ((MTkey *) parentEntry->Key())->distance; // necessary to keep track of the distance from the parent if (!parentEntry->IsEqual(*unionEntry)) { // replace this entry parentNode->DeleteEntry(parentEntry->Position()); parentNode->Insert(*unionEntry); WriteNode (parentNode); AdjKeys (parentNode); } delete unionEntry; delete parentEntry; delete parentNode; }
void GiST::Delete(const GiSTpredicate& pred) { GiSTcursor *c=Search(pred); int condensed; GiSTentry *e; do { if(c==NULL) return; e=c->Next(); GiSTpath path=c->Path(); delete c; if(e==NULL) return; // Read in the node that this belongs to GiSTnode *node=ReadNode(path); node->DeleteEntry(e->Position()); WriteNode(node); condensed=CondenseTree(node); delete node; if(condensed) { ShortenTree(); // because the tree changed, we need to search all over again! // XXX - this is inefficient! users may want to avoid condensing. c=Search(pred); } } while(e!=NULL); }
MTnode * MT::ParentNode (MTnode *node) { GiSTpath path = node->Path(); path.MakeParent (); return (MTnode *) ReadNode (path); // parentNode should be destroyed by the caller }
bool MeshLoaderB3D::Load(const char* file) { _file = new lfFile; if( !_file->Open(file) ) return false; _mesh = new Mesh; lfStr head = ReadChunk(); int nB3DVersion = _file->ReadInt(); Sys_Printf("load b3d file %s, version %s %d\n", file, head.c_str(), nB3DVersion); while( CheckSize() ) { lfStr t = ReadChunk(); if (t == "TEXS") ReadTexs(); else if (t == "BRUS") ReadBrus(); else if (t == "NODE") _mesh->SetJoint(ReadNode()); ExitChunk(); } delete _file; _file = NULL; // The MESH chunk describes a mesh. // A mesh only has one VRTS chunk, but potentially many TRIS chunks. srfTriangles_t* tri = _mesh->GetGeometries(0); tri->numIndices = _indices.size(); _indices.set_free_when_destroyed(false); tri->indices = _indices.pointer(); return true; }
// split this M-tree into a list of trees having height level, which is used in the "splitting" phase of the BulkLoad algorithm // nCreated is the number of created subtrees, // level is the split level for the tree, // children is the list of the parents of each subtree, // name is the root for the subtrees names // the return value is the list of splitted subtrees's names GiSTlist<char *> * MT::SplitTree (int *nCreated, int level, GiSTlist<MTentry *> *parentEntries, const char *name) { GiSTlist<MTnode *> *oldList = new GiSTlist<MTnode *>; // upper level nodes MTnode *node = new MTnode; // this is because the first operation on node is a delete GiSTpath path; path.MakeRoot (); oldList->Append((MTnode *) ReadNode(path)); // insert the root do { // build the roots list GiSTlist<MTnode *> *newList = new GiSTlist<MTnode *>; // lower level nodes while (!oldList->IsEmpty()) { delete node; // delete the old node created by ReadNode node = oldList->RemoveFront(); // retrieve next node to be examined path = node->Path(); for (int i=0; i<node->NumEntries(); i++) { // append all its children to the new list path.MakeChild ((*node)[i].Ptr()->Ptr()); newList->Append((MTnode *)ReadNode(path)); path.MakeParent (); } } delete oldList; oldList = newList; } while (node->Level() > level); // stop if we're at the split level delete node; GiSTlist<char *> *newTreeNames = new GiSTlist<char *>; // this is the results list while (!oldList->IsEmpty()) { // now append each sub-tree to its root char newName[50]; sprintf (newName, "%s.%i", name, ++(*nCreated)); unlink (newName); // if this M-tree already exists, delete it MT *newTree = new MT; newTree->Create(newName); // create a new M-tree path.MakeRoot (); MTnode *rootNode = (MTnode *) newTree->ReadNode(path); // read the root of the new tree node = oldList->RemoveFront(); newTree->Append(rootNode, (MTnode *)node->Copy()); // append the current node to the root of new tree parentEntries->Append(node->ParentEntry()); // insert the original parent entry into the list newTreeNames->Append(strdup(newName)); // insert the new M-tree name into the list delete node; delete rootNode; delete newTree; } delete oldList; return newTreeNames; }
bool cPack::ReadNode(const CStdString & node) { Stamina::SXML XML; // Wczytujemy info o paczce... XML.loadSource(node); this->info = XML.getText("pack/info"); // Szykujemy siê do czytania atrybutów XML.prepareNode("pack"); this->name = XML.getAttrib("name"); this->url = XML.getAttrib("url"); this->title = XML.getAttrib("title"); this->dlUrl = XML.getAttrib("dlURL"); this->path = XML.getAttrib("path"); this->hidden = XML.getAttrib("hidden") == "1"; this->required = XML.getAttrib("required") == "1"; this->isNew = XML.getAttrib("new") == "1"; this->needRestart = XML.getAttrib("restart") == "1"; this->noinstall = XML.getAttrib("noinstall") == "1"; this->recommended = XML.getAttrib("recommended") == "1"; this->additional = XML.getAttrib("additional") == "1"; this->target = cFile::ParseTarget(XML, cFile::targetAuto); this->searchPath = XML.getAttrib("searchpath"); // this->system = XML.getAttrib("system") == "1"; if (this->target == cFile::targetAuto && ((this->name == "UPD2") || (this->GetPath().find("%konnektTemp%") == 0))) { this->target = cFile::targetTemp; } CStdString type = XML.getAttrib("type"); this->type = type=="gz" ? nPackType::gz : type=="zip" ? nPackType::zip : type=="" ? nPackType::none : nPackType::unknown; type = XML.getAttrib("depends"); if (!type.empty()) Owner()->updatePrepare->AddDependants(this , type); if (!this->IsTempTarget()) { Owner()->updatePrepare->AddDependant(this , "UPD2"); } // Dalsza czêœæ zale¿y ju¿ od typu kontrolki... if (!ReadNode(XML)) return false; CStdString item; // a teraz paczki XML.loadSource(XML.getContent("pack").c_str()); while (!(item = XML.getNode("file")).empty()) { XML.prepareNode("file" , true); Stamina::SXML::Position bck = XML.pos; if (!this->ReadFileNode(XML)) break; XML.pos = bck; XML.next(); } /* revLog */ XML.restart(); while (!(item = XML.getNode("rev")).empty()) { XML.prepareNode("rev" , true); revLog.push_back(cRevItem(XML)); XML.next(); } return true; }
bool WRL1BASE::Read( WRLPROC& proc ) { if( proc.GetVRMLType() != VRML_V1 ) { #ifdef DEBUG_VRML1 do { std::ostringstream ostr; ostr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << "\n"; ostr << " * [BUG] no open file or file is not a VRML1 file"; wxLogTrace( MASK_VRML, "%s\n", ostr.str().c_str() ); } while( 0 ); #endif return false; } // Note: according to the VRML1 specification, a file may contain // only one grouping node at the top level. The following code // supports non-conformant VRML1 files by processing all top level // nodes as if the vrml1_base were the equivalent of a vrml1_separator while( proc.Peek() ) { size_t line, column; proc.GetFilePosData( line, column ); if( !ReadNode( proc, this, NULL ) ) { #if defined( DEBUG_VRML1 ) && ( DEBUG_VRML1 > 1 ) do { std::ostringstream ostr; ostr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << "\n"; ostr << " * [INFO] bad file format; unexpected eof at line "; ostr << line << ", column " << column; wxLogTrace( MASK_VRML, "%s\n", ostr.str().c_str() ); } while( 0 ); #endif return false; } } if( !proc.eof() ) { #if defined( DEBUG_VRML1 ) && ( DEBUG_VRML1 > 1 ) do { std::ostringstream ostr; ostr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << "\n"; ostr << proc.GetError(); wxLogTrace( MASK_VRML, "%s\n", ostr.str().c_str() ); } while( 0 ); #endif return false; } return true; }
GiSTlist<MTentry *> MT::RangeSearch (const MTquery& query, int *pages) { GiSTpath path; path.MakeRoot (); MTnode *root = (MTnode *) ReadNode (path); GiSTlist<MTentry *> list = root->RangeSearch(query, pages); delete root; return list; }
// return root level+1 (the height of the tree) // this is used in the "splitting" phase of the BulkLoad algorithm int MT::TreeHeight () const { GiSTpath path; path.MakeRoot (); GiSTnode *root = ReadNode (path); int i = root->Level(); delete root; return (i+1); }
bool CGBSFileType::Load(const char *pFileName,CBSPNode **ppBSPNode,std::vector<CPolygon *> *pGeometricData) { FILE *pFile=fopen(pFileName,"rb"); if(pFile==NULL){return false;} bool bHeaderOk=false; if(fread(&m_Header,sizeof(m_Header),1,pFile)==1) { if(memcmp(&m_Header.sMagic,GBS_FILE_MAGIC,GBS_FILE_MAGIC_LENGTH)==0 && m_Header.dwVersion<=GBS_FILE_VERSION) { bHeaderOk=true; } else { m_Header.dwVersion=GBS_FILE_VERSION; m_Header.dwDataOffset=0; m_Header.dwFlags=0; memcpy(m_Header.sMagic,GBS_FILE_MAGIC,GBS_FILE_MAGIC_LENGTH); } if(pGeometricData && m_Header.dwFlags&GBS_FILE_FLAG_CONTAINS_GEOMETRIC_DATA) { unsigned int x=0,dwPolygonCount=0; if(fread(&dwPolygonCount,sizeof(dwPolygonCount),1,pFile)==1) { for(x=0;x<dwPolygonCount;x++) { unsigned int v=0,dwVertexCount=0; if(fread(&dwVertexCount,sizeof(dwVertexCount),1,pFile)!=1){break;} CPolygon *pPolygon=new CPolygon; pPolygon->m_nVertexes=dwVertexCount; if(pPolygon->m_nVertexes) { pPolygon->m_pVertexes=new CVector[pPolygon->m_nVertexes]; for(v=0;v<dwVertexCount;v++) { if(fread(pPolygon->m_pVertexes[v].c,sizeof(pPolygon->m_pVertexes[v].c),1,pFile)!=1) { break; } } pPolygon->CalcPlane(); } pGeometricData->push_back(pPolygon); } } } } fseek(pFile,m_Header.dwDataOffset,SEEK_SET); if(ppBSPNode){(*ppBSPNode)=ReadNode(pFile,NULL);} fclose(pFile); pFile=NULL; return true; }
void INXM_IndexHandle::debug() { /* Read root page. */ INXM_InitPageHeader initPageHeader; INXM_NodePageHeader nodePageHeader; LoadNodeHeaders(this->inxmFileHeader.treeRoot, initPageHeader, nodePageHeader); printf("Root stats\n"); printf("initHeader: is_data:%d, nItems: %d \n", initPageHeader.isData, initPageHeader.nItems); printf("nodeHeader: isLeaf:%d, next: %d, parent: %d \n", nodePageHeader.isLeaf, nodePageHeader.next, nodePageHeader.parent); INXM_Node node; int runner = 0; ReadNode(this->inxmFileHeader.treeRoot, runner, node); while (runner < initPageHeader.nItems) { printf("node %d::: key:%d, left:%d, slot:%d\n", runner, *(int *)node.key, node.left, node.slot); runner++; ReadNode(this->inxmFileHeader.treeRoot, runner, node); } /* Read lastData page. */ runner = 0; STORM_PageHandle lastDataPageHandle; this->sfh.GetPage(this->inxmFileHeader.lastDataPage, lastDataPageHandle); INXM_Data data; INXM_InitPageHeader lastDataInitPageHeader; LoadInitHeaders(lastDataPageHandle, lastDataInitPageHeader); printf("Last Data stats\n"); printf("initHeader: is_data:%d, nItems: %d \n", lastDataInitPageHeader.isData, lastDataInitPageHeader.nItems); ReadData(lastDataPageHandle, runner, data); while (runner < lastDataInitPageHeader.nItems) { printf("Data %d::: pageID:%d, slot:%d, nextPageID:%d, nextSlot:%d\n", runner, data.pageID, data.slot, data.nextPageID, data.nextSlot); runner++; ReadData(lastDataPageHandle, runner, data); } }
void GiST::ShortenTree() { GiSTpath path; // Shorten the tree if necessary (This should only be done if root actually changed!) path.MakeRoot(); GiSTnode *root=ReadNode(path); if(!root->IsLeaf()&&root->NumEntries()==1) { path.MakeChild((*root)[0]->Ptr()); GiSTnode *child=ReadNode(path); store->Deallocate(path.Page()); child->SetSibling(0); child->Path().MakeRoot(); WriteNode(child); delete child; } delete root; }
void Model::ReadNode(std::fstream &file, Node * node, std::map<unsigned int, Node*> &map) { unsigned int data; READ_FILE(data); map[data] = node; READ_SZ_FILE(node->meshes); if (data>0) READ_FILE_EX(&node->meshes[0], node->meshes.size() * sizeof(unsigned int)); READ_SZ_FILE(node->children); for (unsigned i = 0; i < node->children.size(); i++) ReadNode(file, &node->children[i], map); }
t_rc INXM_IndexHandle::ReadNode(int pageID, int slot, INXM_Node &node) { t_rc rc; STORM_PageHandle pageHandle; rc = this->sfh.GetPage(pageID, pageHandle); if (rc != OK) { return rc; } rc = ReadNode(pageHandle, slot, node); if (rc != OK) { return rc; } return(OK); }
TNode *ReadBranch(FILE *fv, TTree *tree, int numNames, char **names) { char ch; double len, param=0.0; TNode *node; ch=ReadToNextChar(fv); if (ch=='(') { /* is a node */ node=ReadNode(fv, tree, numNames, names, 1); ReadUntil(fv, ')', "Closing bracket"); if (treeError) return NULL; } else { /* is a tip */ node=ReadTip(fv, ch, tree, numNames, names); } ch=ReadToNextChar(fv); if (ch==':') { if (tree->lengths==0) { sprintf(treeErrorMsg, "Some branches don't have branch lengths"); return NULL; } else tree->lengths=1; if (fscanf(fv, "%lf", &len)!=1) { sprintf(treeErrorMsg, "Unable to read branch length"); return NULL; } ch=ReadToNextChar(fv); if (ch=='[') { if (fscanf(fv, "%lf", ¶m)!=1) { sprintf(treeErrorMsg, "Unable to read branch parameter"); return NULL; } ReadUntil(fv, ']', "Close square bracket"); } else ungetc(ch, fv); } else { if (tree->lengths==1) { sprintf(treeErrorMsg, "Some branches don't have branch lengths"); return NULL; } else tree->lengths=0; len=0.0; ungetc(ch, fv); } node->length0=len; node->param=param; return node; }
void GiST::AdjustKeys (GiSTnode *node, GiSTnode **parent) { if (node->Path().IsRoot()) { return; } GiSTnode *P; // Read in node's parent if (parent == NULL) { GiSTpath parent_path = node->Path(); parent_path.MakeParent (); P = ReadNode (parent_path); parent = &P; } else { P = *parent; } // Get the old entry pointing to node GiSTentry *entry = P->SearchPtr(node->Path().Page()); assert (entry != NULL); // Get union of node GiSTentry *actual = node->Union(); WriteNode(node); // added by myself for the splitted = false; actual->SetPtr(node->Path().Page()); if (!entry->IsEqual(*actual)) { int pos = entry->Position(); P->DeleteEntry(pos); P->InsertBefore(*actual, pos); // A split may be necessary. // XXX: should we do Forced Reinsert here too? if (P->IsOverFull(*store)) { Split (parent, *actual); GiSTpage page = node->Path().Page(); node->Path() = P->Path(); node->Path().MakeChild(page); } else { WriteNode (P); AdjustKeys (P, NULL); } } if (parent == &P) { delete P; } delete actual; delete entry; }
GiSTnode* GiST::ChooseSubtree(GiSTpage page, const GiSTentry &entry, int level) { GiSTnode *node; GiSTpath path; for(;;) { path.MakeChild(page); node=ReadNode(path); if(level==node->Level()||node->IsLeaf()) break; page=node->SearchMinPenalty(entry); delete node; } return node; }
/* ====================== ReadNodeList Parses and creates an AINodeList_t from a token list The token list pointer is modified to point to the beginning of the next node text block after reading ====================== */ AIGenericNode_t *ReadNodeList( pc_token_list **tokenlist ) { AINodeList_t *list; pc_token_list *current = *tokenlist; if ( !expectToken( "{", ¤t, true ) ) { return nullptr; } list = allocNode( AINodeList_t ); while ( Q_stricmp( current->token.string, "}" ) ) { AIGenericNode_t *node = ReadNode( ¤t ); if ( node && list->numNodes >= MAX_NODE_LIST ) { Log::Warn( "Max selector children limit exceeded at line %d", (*tokenlist)->token.line ); FreeNode( node ); FreeNodeList( list ); *tokenlist = current; return nullptr; } else if ( node ) { list->list[ list->numNodes ] = node; list->numNodes++; } if ( !node ) { *tokenlist = current; FreeNodeList( list ); return nullptr; } if ( !current ) { *tokenlist = current; return ( AIGenericNode_t * ) list; } } *tokenlist = current->next; return ( AIGenericNode_t * ) list; }
void DBTreeCtrl::ReadNode(tinyxml2::XMLElement *lvRootNode, wxTreeItemId parent, DBCatalog *cat) { wxString str, str2; long ival; int level; tinyxml2::XMLElement *child = lvRootNode->FirstChildElement(); while (child) { if (strcmp(child->Name(), "Group") == 0) { str = child->Attribute("Name"); wxTreeItemId item = AppendItem(parent, str, 1); DBItemInfo* item_data = new DBItemInfo; item_data->type = 1;//group; SetItemData(item, item_data); ReadNode(child, item, cat); } if (strcmp(child->Name(), "Data") == 0) { DBItemInfo* item_data = new DBItemInfo; str = child->Attribute("Name"); str2 = child->Attribute("Description"); item_data->desc = str + wxT(" ") + str2; item_data->desc.MakeLower(); item_data->name = str + wxT(" ") + str2; if (cat) item_data->cat_name = cat->name; wxTreeItemId item = AppendItem(parent, item_data->name, 0); item_data->type = 2;//data; str = child->Attribute("DataURL"); item_data->data_url = str; str = child->Attribute("SampleID"); str.ToLong(&ival); item_data->sample_id = ival; str = child->Attribute("AnnotationURL"); item_data->annotation_url = str; ReadAnnotations(str, item_data->annotations); SetItemData(item, item_data); if (cat) cat->items.push_back(item_data); } child = child->NextSiblingElement(); } }
status_t Volume::Mount(fs_volume* fsVolume) { fFSVolume = fsVolume; // load the superblock Block block; if (!block.GetReadable(this, kCheckSumFSSuperBlockOffset / B_PAGE_SIZE)) RETURN_ERROR(B_ERROR); SuperBlock* superBlock = (SuperBlock*)block.Data(); if (!superBlock->Check(fTotalBlocks)) RETURN_ERROR(B_BAD_DATA); // copy the volume name fName = strdup(superBlock->Name()); if (fName == NULL) RETURN_ERROR(B_NO_MEMORY); // init the block allocator status_t error = fBlockAllocator->Init(superBlock->BlockBitmap(), superBlock->FreeBlocks()); if (error != B_OK) RETURN_ERROR(error); // load the root directory Node* rootNode; error = ReadNode(superBlock->RootDirectory(), rootNode); if (error != B_OK) RETURN_ERROR(error); fRootDirectory = dynamic_cast<Directory*>(rootNode); if (fRootDirectory == NULL) { delete rootNode; RETURN_ERROR(B_BAD_DATA); } error = PublishNode(fRootDirectory, 0); if (error != B_OK) { delete fRootDirectory; fRootDirectory = NULL; return error; } return B_OK; }
// append the subtree rooted at from to the node to, which is used in the "append" phase of the BulkLoad algorithm void MT::Append (MTnode *to, MTnode *from) { GiSTlist<MTnode *> *oldList = new GiSTlist<MTnode *>; // upper level nodes to append oldList->Append(from); GiSTlist<GiSTpath> pathList; pathList.Append (to->Path()); MTnode *node = new MTnode, *newNode = NULL; MT *fromTree = (MT *) from->Tree(); do { GiSTlist<MTnode *> *newList = new GiSTlist<MTnode *>; // lower level nodes to append while (!oldList->IsEmpty()) { delete node; node = oldList->RemoveFront(); GiSTpath path = pathList.RemoveFront (); newNode = (MTnode *) ReadNode (path); // node to be appended for (int i=0; i<node->NumEntries(); i++) { MTentry *entry = (MTentry *) (*node)[i].Ptr()->Copy(); if (node->Level() > 0) { // if node isn't a leaf, we've to allocate its children GiSTpath nodePath = node->Path(); nodePath.MakeChild (entry->Ptr()); newList->Append((MTnode *) fromTree->ReadNode(nodePath)); entry->SetPtr(Store()->Allocate()); // allocate its child in the inserted tree path.MakeChild (entry->Ptr()); MTnode *childNode = (MTnode *) CreateNode (); childNode->Path() = path; childNode->SetTree(this); WriteNode (childNode); // write the empty node delete childNode; pathList.Append (path); path.MakeParent (); } newNode->Insert(*entry); delete entry; } newNode->SetLevel(node->Level()); WriteNode (newNode); // write the node delete newNode; } delete oldList; oldList = newList; } while (node->Level() > 0); // until we reach the leaves' level delete node; delete oldList; }
void MeshLoaderB3D::ReadNode() { //Joint* joint = new Joint; idStr str; readString(_file, str); PrintTree(str.c_str()); idVec3 t; idVec3 s; _file->ReadVec3(t); _file->ReadVec3(s); idQuat r; _file->ReadFloat(r.x); _file->ReadFloat(r.y); _file->ReadFloat(r.z); _file->ReadFloat(r.w); //joint->name = str; //joint->position = t; //joint->scale = s; //joint->rotation = r; while( CheckSize() ){ idStr t = ReadChunk(); if( t=="MESH" ){ ReadMesh(); }else if( t=="BONE" ){ ReadBone(); }else if( t=="ANIM" ){ ReadAnim(); }else if( t=="KEYS" ){ ReadKey(); }else if( t=="NODE" ){ ReadNode(); //Joint* child = ReadNode(); //Sys_Printf("parent %s children %s\n", joint->name.c_str(), child->name.c_str()); //joint->children.push_back(child); //child->parent = joint; } ExitChunk(); } }
void GiST::DumpNode (ostream& os, GiSTpath path) const { GiSTnode *node = ReadNode(path); node->Print(os); if (!node->IsLeaf()) { TruePredicate truePredicate; GiSTlist<GiSTentry*> list = node->Search(truePredicate); while (!list.IsEmpty()) { GiSTentry *e = list.RemoveFront(); path.MakeChild(e->Ptr()); DumpNode (os, path); path.MakeParent(); delete e; } } delete node; }
BOOL CPackFiles::ReadNodes(void) { filePos iPosition; int i; int iNumFiles; BOOL bResult; iPosition = miPosition; if (miNodes != 0) { for (;;) { if (miNodes != iPosition) { mcFile.Seek(miNodes, EFSO_SET); } ReturnOnFalse(mcFile.ReadInt(&iNumFiles)); for (i = 0; i < iNumFiles; i++) { ReturnOnFalse(ReadNode()); } miNextNodesPtr = miPosition; bResult = mcFile.ReadLong(&miNodes); if (!bResult) { break; } } mcFile.Seek(iPosition); miPosition = iPosition; return TRUE; } else { return FALSE; } }
BOOL MT::CheckNode (GiSTpath path, MTentry *parentEntry) { MTnode *node = (MTnode *) ReadNode (path); BOOL ret = TRUE; for (int i=0; i<node->NumEntries() && ret; i++) { MTentry *nextEntry = (MTentry *) (*node)[i].Ptr(); if (parentEntry!=NULL && (nextEntry->Key()->distance+nextEntry->MaxRadius() > parentEntry->MaxRadius() || nextEntry->Key()->distance != nextEntry->object().distance(parentEntry->object()))) { cout << "Error with entry " << nextEntry << "in " << node; ret = FALSE; } if (!node->IsLeaf()) { path.MakeChild (nextEntry->Ptr()); ret &= CheckNode (path, nextEntry); path.MakeParent (); } } delete node; return ret; }
t_rc INXM_IndexHandle::FindLeaf(int rootPageID, void *key, STORM_PageHandle &leafPageHandle) { t_rc rc; int nodeRunner = rootPageID; INXM_Node node; INXM_InitPageHeader initPageHeader; INXM_NodePageHeader nodePageHeader; LoadNodeHeaders(nodeRunner, initPageHeader, nodePageHeader); while (!nodePageHeader.isLeaf) { int keyRunner = 0; while (keyRunner < initPageHeader.nItems) { rc = ReadNode(nodeRunner, keyRunner, node); if (rc != OK) { return rc; } if (KeyCmp(key,node.key) > 0) { keyRunner++; } else { break; } } nodeRunner = node.left; if (keyRunner == initPageHeader.nItems) { nodeRunner = nodePageHeader.next; } rc = LoadNodeHeaders(nodeRunner, initPageHeader, nodePageHeader); if (rc != OK) { return rc; } } rc = this->sfh.GetPage(nodeRunner, leafPageHandle); if (rc != OK) { return rc; } return(OK); }