struct tnode *sortTree(struct tnode *root, struct tnode *sortedTree){ if(root != NULL){ sortedTree = addNode(sortedTree, root); sortTree(root->left, sortedTree); sortTree(root->right, sortedTree); } return sortedTree; }
Status CanonicalQuery::init(LiteParsedQuery* lpq, const MatchExpressionParser::WhereCallback& whereCallback, MatchExpression* root) { _pq.reset(lpq); // Normalize, sort and validate tree. root = normalizeTree(root); sortTree(root); _root.reset(root); Status validStatus = isValid(root, *_pq); if (!validStatus.isOK()) { return validStatus; } // Validate the projection if there is one. if (!_pq->getProj().isEmpty()) { ParsedProjection* pp; Status projStatus = ParsedProjection::make(_pq->getProj(), _root.get(), &pp, whereCallback); if (!projStatus.isOK()) { return projStatus; } _proj.reset(pp); } return Status::OK(); }
EarthquakeSet::EarthquakeSet(const char* earthquakeFileName,double scaleFactor) :treePointIndices(0), pointRadius(1.0f),highlightTime(1.0),currentTime(0.0) { /* Check the earthquake file name's extension: */ if(Misc::hasCaseExtension(earthquakeFileName,".anss")) { /* Read an earthquake database snapshot in "readable" ANSS format: */ loadANSSFile(earthquakeFileName,scaleFactor); } else { /* Read an earthquake event file in space- or comma-separated format: */ loadCSVFile(earthquakeFileName,scaleFactor); } /* Create a temporary kd-tree to sort the events for back-to-front traversal: */ Geometry::ArrayKdTree<Geometry::ValuedPoint<Point,int> > sortTree(events.size()); Geometry::ValuedPoint<Point,int>* stPtr=sortTree.accessPoints(); int i=0; for(std::vector<Event>::const_iterator eIt=events.begin();eIt!=events.end();++eIt,++stPtr,++i) { *stPtr=eIt->position; stPtr->value=i; } sortTree.releasePoints(8); /* Retrieve the sorted event indices: */ treePointIndices=new int[events.size()]; stPtr=sortTree.accessPoints(); for(int i=0;i<sortTree.getNumNodes();++i,++stPtr) treePointIndices[i]=stPtr->value; }
Status CanonicalQuery::init(OperationContext* opCtx, std::unique_ptr<QueryRequest> qr, bool canHaveNoopMatchNodes, std::unique_ptr<MatchExpression> root, std::unique_ptr<CollatorInterface> collator) { _qr = std::move(qr); _collator = std::move(collator); _canHaveNoopMatchNodes = canHaveNoopMatchNodes; // Normalize, sort and validate tree. _root = MatchExpression::optimize(std::move(root)); sortTree(_root.get()); Status validStatus = isValid(_root.get(), *_qr); if (!validStatus.isOK()) { return validStatus; } // Validate the projection if there is one. if (!_qr->getProj().isEmpty()) { ParsedProjection* pp; Status projStatus = ParsedProjection::make(opCtx, _qr->getProj(), _root.get(), &pp); if (!projStatus.isOK()) { return projStatus; } _proj.reset(pp); } if (_proj && _proj->wantSortKey() && _qr->getSort().isEmpty()) { return Status(ErrorCodes::BadValue, "cannot use sortKey $meta projection without a sort"); } return Status::OK(); }
Status CanonicalQuery::init(LiteParsedQuery* lpq, const ExtensionsCallback& extensionsCallback, MatchExpression* root) { _pq.reset(lpq); _hasNoopExtensions = extensionsCallback.hasNoopExtensions(); _isIsolated = LiteParsedQuery::isQueryIsolated(lpq->getFilter()); // Normalize, sort and validate tree. root = normalizeTree(root); sortTree(root); _root.reset(root); Status validStatus = isValid(root, *_pq); if (!validStatus.isOK()) { return validStatus; } // Validate the projection if there is one. if (!_pq->getProj().isEmpty()) { ParsedProjection* pp; Status projStatus = ParsedProjection::make(_pq->getProj(), _root.get(), &pp, extensionsCallback); if (!projStatus.isOK()) { return projStatus; } _proj.reset(pp); } if (_proj && _proj->wantSortKey() && _pq->getSort().isEmpty()) { return Status(ErrorCodes::BadValue, "cannot use sortKey $meta projection without a sort"); } return Status::OK(); }
void ZProf::dumpToFile( char *file, char *rootName ) { sortTree(); // FIND the root ZProf *root = 0; ZProf *i; for( i = ZProf::heapHead; i; i=i->heapNext ) { if( !strcmp(i->identString,rootName) ) { root = i; break; } } zprofDumpFile = fopen( file, "wt" ); fprintf( zprofDumpFile, "parent%% tot%% tot count avg name\n" ); // There can be multiple top level entry points for( i = ZProf::heapHead; i; i=i->heapNext ) { if( ! i->parent || i == root ) { zprofDumpRecurse( i, 0, root ); } } fclose( zprofDumpFile ); }
// static void CanonicalQuery::sortTree(MatchExpression* tree) { for (size_t i = 0; i < tree->numChildren(); ++i) { sortTree(tree->getChild(i)); } std::vector<MatchExpression*>* children = tree->getChildVector(); if (NULL != children) { std::sort(children->begin(), children->end(), matchExpressionLessThan); } }
// static void CanonicalQuery::sortTree(MatchExpression* tree) { for (size_t i = 0; i < tree->numChildren(); ++i) { sortTree(tree->getChild(i)); } std::vector<MatchExpression*>* children = tree->getChildVector(); if (NULL != children) { std::sort(children->begin(), children->end(), OperatorAndFieldNameComparison); } }
int backend(AST *tree, char *filename) { FILE *fp = NULL; u8 *bin = NULL; u32 binlen; u32 p2mlen; printf("*** Compiler back end ***\n"); if (mode & MODE_SORT) { printf("Sorting by game title... "); sortTree(tree); printf("Done.\n"); } printf("Translating to BIN... "); if ((bin = translateTree(tree)) == NULL) { fprintf(stderr, "Failed.\n"); return -1; } binlen = *(u32*) bin + 8; printf("Done (%s).\n", sizeToStr(binlen)); printf("%s file '%s'... ", mode & MODE_BIN ? "Writing to" : "Creating P2M", filename); if ((fp = fopen(filename, "wb")) == NULL) { fprintf(stderr, "Failed.\n"); return -2; } if ((mode & MODE_BIN ? writeToFile(fp, bin, 0, binlen) : createP2mFile(fp, bin, binlen, &p2mlen)) < 0) { fprintf(stderr, "Failed.\n"); return -3; } free(bin); fclose(fp); if (mode & MODE_BIN) printf("Done.\n"); else printf("Done (%s).\n", sizeToStr(p2mlen)); return 0; }
int main(int argc, char **argv){ struct tnode *root; char word[MAXWORD]; int nLine = 1; root = NULL; while(getword(word) != EOF){ if(*word == '\n') nLine++; if(isalpha(word[0])) root = addtree(root, word, nLine); } printf("before sort: \n\n"); treeprint(root); struct tnode *sortedTree = sortTree(root, NULL); printf("\nafter sort: \n\n"); treeprint(sortedTree); return 0; }
Status CanonicalQuery::init(std::unique_ptr<QueryRequest> qr, const ExtensionsCallback& extensionsCallback, MatchExpression* root, std::unique_ptr<CollatorInterface> collator) { _qr = std::move(qr); _collator = std::move(collator); _hasNoopExtensions = extensionsCallback.hasNoopExtensions(); _isIsolated = QueryRequest::isQueryIsolated(_qr->getFilter()); // Normalize, sort and validate tree. root = normalizeTree(root); sortTree(root); _root.reset(root); Status validStatus = isValid(root, *_qr); if (!validStatus.isOK()) { return validStatus; } // Validate the projection if there is one. if (!_qr->getProj().isEmpty()) { ParsedProjection* pp; Status projStatus = ParsedProjection::make(_qr->getProj(), _root.get(), &pp, extensionsCallback); if (!projStatus.isOK()) { return projStatus; } _proj.reset(pp); } if (_proj && _proj->wantSortKey() && _qr->getSort().isEmpty()) { return Status(ErrorCodes::BadValue, "cannot use sortKey $meta projection without a sort"); } return Status::OK(); }
Status CanonicalQuery::normalize(MatchExpression* root) { _root.reset(normalizeTree(root)); sortTree(_root.get()); return isValid(_root.get()); }
bool dgCollisionConvexHull::Create (dgInt32 count, dgInt32 strideInBytes, const dgFloat32* const vertexArray, dgFloat32 tolerance) { dgInt32 stride = strideInBytes / sizeof (dgFloat32); dgStack<dgFloat64> buffer(3 * 2 * count); for (dgInt32 i = 0; i < count; i ++) { buffer[i * 3 + 0] = vertexArray[i * stride + 0]; buffer[i * 3 + 1] = vertexArray[i * stride + 1]; buffer[i * 3 + 2] = vertexArray[i * stride + 2]; } dgConvexHull3d* convexHull = new (GetAllocator()) dgConvexHull3d (GetAllocator(), &buffer[0], 3 * sizeof (dgFloat64), count, tolerance); if (!convexHull->GetCount()) { // this is a degenerated hull hull to add some thickness and for a thick plane delete convexHull; dgStack<dgVector> tmp(3 * count); for (dgInt32 i = 0; i < count; i ++) { tmp[i][0] = dgFloat32 (buffer[i*3 + 0]); tmp[i][1] = dgFloat32 (buffer[i*3 + 1]); tmp[i][2] = dgFloat32 (buffer[i*3 + 2]); tmp[i][2] = dgFloat32 (0.0f); } dgObb sphere; sphere.SetDimensions (&tmp[0][0], sizeof (dgVector), count); dgInt32 index = 0; dgFloat32 size = dgFloat32 (1.0e10f); for (dgInt32 i = 0; i < 3; i ++) { if (sphere.m_size[i] < size) { index = i; size = sphere.m_size[i]; } } dgVector normal (dgFloat32 (0.0f), dgFloat32 (0.0f), dgFloat32 (0.0f), dgFloat32 (0.0f)); normal[index] = dgFloat32 (1.0f); dgVector step = sphere.RotateVector (normal.Scale3 (dgFloat32 (0.05f))); for (dgInt32 i = 0; i < count; i ++) { dgVector p1 (tmp[i] + step); dgVector p2 (tmp[i] - step); buffer[i * 3 + 0] = p1.m_x; buffer[i * 3 + 1] = p1.m_y; buffer[i * 3 + 2] = p1.m_z; buffer[(i + count) * 3 + 0] = p2.m_x; buffer[(i + count) * 3 + 1] = p2.m_y; buffer[(i + count) * 3 + 2] = p2.m_z; } count *= 2; convexHull = new (GetAllocator()) dgConvexHull3d (GetAllocator(), &buffer[0], 3 * sizeof (dgFloat64), count, tolerance); if (!convexHull->GetCount()) { delete convexHull; return false; } } // check for degenerated faces for (bool success = false; !success; ) { success = true; const dgBigVector* const hullVertexArray = convexHull->GetVertexPool(); dgStack<dgInt8> mask(convexHull->GetVertexCount()); memset (&mask[0], 1, mask.GetSizeInBytes()); for (dgConvexHull3d::dgListNode* node = convexHull->GetFirst(); node; node = node->GetNext()) { dgConvexHull3DFace& face = node->GetInfo(); const dgBigVector& p0 = hullVertexArray[face.m_index[0]]; const dgBigVector& p1 = hullVertexArray[face.m_index[1]]; const dgBigVector& p2 = hullVertexArray[face.m_index[2]]; dgBigVector p1p0 (p1 - p0); dgBigVector p2p0 (p2 - p0); dgBigVector normal (p2p0 * p1p0); dgFloat64 mag2 = normal % normal; if (mag2 < dgFloat64 (1.0e-6f * 1.0e-6f)) { success = false; dgInt32 index = -1; dgBigVector p2p1 (p2 - p1); dgFloat64 dist10 = p1p0 % p1p0; dgFloat64 dist20 = p2p0 % p2p0; dgFloat64 dist21 = p2p1 % p2p1; if ((dist10 >= dist20) && (dist10 >= dist21)) { index = 2; } else if ((dist20 >= dist10) && (dist20 >= dist21)) { index = 1; } else if ((dist21 >= dist10) && (dist21 >= dist20)) { index = 0; } dgAssert (index != -1); mask[face.m_index[index]] = 0; } } if (!success) { dgInt32 count = 0; dgInt32 vertexCount = convexHull->GetVertexCount(); for (dgInt32 i = 0; i < vertexCount; i ++) { if (mask[i]) { buffer[count * 3 + 0] = hullVertexArray[i].m_x; buffer[count * 3 + 1] = hullVertexArray[i].m_y; buffer[count * 3 + 2] = hullVertexArray[i].m_z; count ++; } } delete convexHull; convexHull = new (GetAllocator()) dgConvexHull3d (GetAllocator(), &buffer[0], 3 * sizeof (dgFloat64), count, tolerance); } } dgAssert (convexHull); dgInt32 vertexCount = convexHull->GetVertexCount(); if (vertexCount < 4) { delete convexHull; return false; } const dgBigVector* const hullVertexArray = convexHull->GetVertexPool(); dgPolyhedra polyhedra (GetAllocator()); polyhedra.BeginFace(); for (dgConvexHull3d::dgListNode* node = convexHull->GetFirst(); node; node = node->GetNext()) { dgConvexHull3DFace& face = node->GetInfo(); polyhedra.AddFace (face.m_index[0], face.m_index[1], face.m_index[2]); } polyhedra.EndFace(); if (vertexCount > 4) { // bool edgeRemoved = false; // while (RemoveCoplanarEdge (polyhedra, hullVertexArray)) { // edgeRemoved = true; // } // if (edgeRemoved) { // if (!CheckConvex (polyhedra, hullVertexArray)) { // delete convexHull; // return false; // } // } while (RemoveCoplanarEdge (polyhedra, hullVertexArray)); } dgStack<dgInt32> vertexMap(vertexCount); memset (&vertexMap[0], -1, vertexCount * sizeof (dgInt32)); dgInt32 mark = polyhedra.IncLRU(); dgPolyhedra::Iterator iter (polyhedra); for (iter.Begin(); iter; iter ++) { dgEdge* const edge = &iter.GetNode()->GetInfo(); if (edge->m_mark != mark) { if (vertexMap[edge->m_incidentVertex] == -1) { vertexMap[edge->m_incidentVertex] = m_vertexCount; m_vertexCount ++; } dgEdge* ptr = edge; do { ptr->m_mark = mark; ptr->m_userData = m_edgeCount; m_edgeCount ++; ptr = ptr->m_twin->m_next; } while (ptr != edge) ; } } m_vertex = (dgVector*) m_allocator->Malloc (dgInt32 (m_vertexCount * sizeof (dgVector))); m_simplex = (dgConvexSimplexEdge*) m_allocator->Malloc (dgInt32 (m_edgeCount * sizeof (dgConvexSimplexEdge))); m_vertexToEdgeMapping = (const dgConvexSimplexEdge**) m_allocator->Malloc (dgInt32 (m_vertexCount * sizeof (dgConvexSimplexEdge*))); for (dgInt32 i = 0; i < vertexCount; i ++) { if (vertexMap[i] != -1) { m_vertex[vertexMap[i]] = hullVertexArray[i]; m_vertex[vertexMap[i]].m_w = dgFloat32 (0.0f); } } delete convexHull; vertexCount = m_vertexCount; mark = polyhedra.IncLRU();; for (iter.Begin(); iter; iter ++) { dgEdge* const edge = &iter.GetNode()->GetInfo(); if (edge->m_mark != mark) { dgEdge *ptr = edge; do { ptr->m_mark = mark; dgConvexSimplexEdge* const simplexPtr = &m_simplex[ptr->m_userData]; simplexPtr->m_vertex = vertexMap[ptr->m_incidentVertex]; simplexPtr->m_next = &m_simplex[ptr->m_next->m_userData]; simplexPtr->m_prev = &m_simplex[ptr->m_prev->m_userData]; simplexPtr->m_twin = &m_simplex[ptr->m_twin->m_userData]; ptr = ptr->m_twin->m_next; } while (ptr != edge) ; } } m_faceCount = 0; dgStack<char> faceMarks (m_edgeCount); memset (&faceMarks[0], 0, m_edgeCount * sizeof (dgInt8)); dgStack<dgConvexSimplexEdge*> faceArray (m_edgeCount); for (dgInt32 i = 0; i < m_edgeCount; i ++) { dgConvexSimplexEdge* const face = &m_simplex[i]; if (!faceMarks[i]) { dgConvexSimplexEdge* ptr = face; do { dgAssert ((ptr - m_simplex) >= 0); faceMarks[dgInt32 (ptr - m_simplex)] = '1'; ptr = ptr->m_next; } while (ptr != face); faceArray[m_faceCount] = face; m_faceCount ++; } } m_faceArray = (dgConvexSimplexEdge **) m_allocator->Malloc(dgInt32 (m_faceCount * sizeof(dgConvexSimplexEdge *))); memcpy (m_faceArray, &faceArray[0], m_faceCount * sizeof(dgConvexSimplexEdge *)); if (vertexCount > DG_CONVEX_VERTEX_CHUNK_SIZE) { // create a face structure for support vertex dgStack<dgConvexBox> boxTree (vertexCount); dgTree<dgVector,dgInt32> sortTree(GetAllocator()); dgStack<dgTree<dgVector,dgInt32>::dgTreeNode*> vertexNodeList(vertexCount); dgVector minP ( dgFloat32 (1.0e15f), dgFloat32 (1.0e15f), dgFloat32 (1.0e15f), dgFloat32 (0.0f)); dgVector maxP (-dgFloat32 (1.0e15f), -dgFloat32 (1.0e15f), -dgFloat32 (1.0e15f), dgFloat32 (0.0f)); for (dgInt32 i = 0; i < vertexCount; i ++) { const dgVector& p = m_vertex[i]; vertexNodeList[i] = sortTree.Insert (p, i); minP.m_x = dgMin (p.m_x, minP.m_x); minP.m_y = dgMin (p.m_y, minP.m_y); minP.m_z = dgMin (p.m_z, minP.m_z); maxP.m_x = dgMax (p.m_x, maxP.m_x); maxP.m_y = dgMax (p.m_y, maxP.m_y); maxP.m_z = dgMax (p.m_z, maxP.m_z); } boxTree[0].m_box[0] = minP; boxTree[0].m_box[1] = maxP; boxTree[0].m_leftBox = -1; boxTree[0].m_rightBox = -1; boxTree[0].m_vertexStart = 0; boxTree[0].m_vertexCount = vertexCount; dgInt32 boxCount = 1; dgInt32 stack = 1; dgInt32 stackBoxPool[64]; stackBoxPool[0] = 0; while (stack) { stack --; dgInt32 boxIndex = stackBoxPool[stack]; dgConvexBox& box = boxTree[boxIndex]; if (box.m_vertexCount > DG_CONVEX_VERTEX_CHUNK_SIZE) { dgVector median (dgFloat32 (0.0f), dgFloat32 (0.0f), dgFloat32 (0.0f), dgFloat32 (0.0f)); dgVector varian (dgFloat32 (0.0f), dgFloat32 (0.0f), dgFloat32 (0.0f), dgFloat32 (0.0f)); for (dgInt32 i = 0; i < box.m_vertexCount; i ++) { dgVector& p = vertexNodeList[box.m_vertexStart + i]->GetInfo(); minP.m_x = dgMin (p.m_x, minP.m_x); minP.m_y = dgMin (p.m_y, minP.m_y); minP.m_z = dgMin (p.m_z, minP.m_z); maxP.m_x = dgMax (p.m_x, maxP.m_x); maxP.m_y = dgMax (p.m_y, maxP.m_y); maxP.m_z = dgMax (p.m_z, maxP.m_z); median += p; varian += p.CompProduct3 (p); } varian = varian.Scale3 (dgFloat32 (box.m_vertexCount)) - median.CompProduct3(median); dgInt32 index = 0; dgFloat64 maxVarian = dgFloat64 (-1.0e10f); for (dgInt32 i = 0; i < 3; i ++) { if (varian[i] > maxVarian) { index = i; maxVarian = varian[i]; } } dgVector center = median.Scale3 (dgFloat32 (1.0f) / dgFloat32 (box.m_vertexCount)); dgFloat32 test = center[index]; dgInt32 i0 = 0; dgInt32 i1 = box.m_vertexCount - 1; do { for (; i0 <= i1; i0 ++) { dgFloat32 val = vertexNodeList[box.m_vertexStart + i0]->GetInfo()[index]; if (val > test) { break; } } for (; i1 >= i0; i1 --) { dgFloat32 val = vertexNodeList[box.m_vertexStart + i1]->GetInfo()[index]; if (val < test) { break; } } if (i0 < i1) { dgSwap(vertexNodeList[box.m_vertexStart + i0], vertexNodeList[box.m_vertexStart + i1]); i0++; i1--; } } while (i0 <= i1); if (i0 == 0){ i0 = box.m_vertexCount / 2; } if (i0 >= (box.m_vertexCount - 1)){ i0 = box.m_vertexCount / 2; } { dgVector minP ( dgFloat32 (1.0e15f), dgFloat32 (1.0e15f), dgFloat32 (1.0e15f), dgFloat32 (0.0f)); dgVector maxP (-dgFloat32 (1.0e15f), -dgFloat32 (1.0e15f), -dgFloat32 (1.0e15f), dgFloat32 (0.0f)); for (dgInt32 i = i0; i < box.m_vertexCount; i ++) { const dgVector& p = vertexNodeList[box.m_vertexStart + i]->GetInfo(); minP.m_x = dgMin (p.m_x, minP.m_x); minP.m_y = dgMin (p.m_y, minP.m_y); minP.m_z = dgMin (p.m_z, minP.m_z); maxP.m_x = dgMax (p.m_x, maxP.m_x); maxP.m_y = dgMax (p.m_y, maxP.m_y); maxP.m_z = dgMax (p.m_z, maxP.m_z); } box.m_rightBox = boxCount; boxTree[boxCount].m_box[0] = minP; boxTree[boxCount].m_box[1] = maxP; boxTree[boxCount].m_leftBox = -1; boxTree[boxCount].m_rightBox = -1; boxTree[boxCount].m_vertexStart = box.m_vertexStart + i0; boxTree[boxCount].m_vertexCount = box.m_vertexCount - i0; stackBoxPool[stack] = boxCount; stack ++; boxCount ++; } { dgVector minP ( dgFloat32 (1.0e15f), dgFloat32 (1.0e15f), dgFloat32 (1.0e15f), dgFloat32 (0.0f)); dgVector maxP (-dgFloat32 (1.0e15f), -dgFloat32 (1.0e15f), -dgFloat32 (1.0e15f), dgFloat32 (0.0f)); for (dgInt32 i = 0; i < i0; i ++) { const dgVector& p = vertexNodeList[box.m_vertexStart + i]->GetInfo(); minP.m_x = dgMin (p.m_x, minP.m_x); minP.m_y = dgMin (p.m_y, minP.m_y); minP.m_z = dgMin (p.m_z, minP.m_z); maxP.m_x = dgMax (p.m_x, maxP.m_x); maxP.m_y = dgMax (p.m_y, maxP.m_y); maxP.m_z = dgMax (p.m_z, maxP.m_z); } box.m_leftBox = boxCount; boxTree[boxCount].m_box[0] = minP; boxTree[boxCount].m_box[1] = maxP; boxTree[boxCount].m_leftBox = -1; boxTree[boxCount].m_rightBox = -1; boxTree[boxCount].m_vertexStart = box.m_vertexStart; boxTree[boxCount].m_vertexCount = i0; stackBoxPool[stack] = boxCount; stack ++; boxCount ++; } } } for (dgInt32 i = 0; i < m_vertexCount; i ++) { m_vertex[i] = vertexNodeList[i]->GetInfo(); vertexNodeList[i]->GetInfo().m_w = dgFloat32 (i); } m_supportTreeCount = boxCount; m_supportTree = (dgConvexBox*) m_allocator->Malloc(dgInt32 (boxCount * sizeof(dgConvexBox))); memcpy (m_supportTree, &boxTree[0], boxCount * sizeof(dgConvexBox)); for (dgInt32 i = 0; i < m_edgeCount; i ++) { dgConvexSimplexEdge* const ptr = &m_simplex[i]; dgTree<dgVector,dgInt32>::dgTreeNode* const node = sortTree.Find(ptr->m_vertex); dgInt32 index = dgInt32 (node->GetInfo().m_w); ptr->m_vertex = dgInt16 (index); } } for (dgInt32 i = 0; i < m_edgeCount; i ++) { dgConvexSimplexEdge* const edge = &m_simplex[i]; m_vertexToEdgeMapping[edge->m_vertex] = edge; } SetVolumeAndCG (); return true; }
void AllMusic::resync() { m_done_loading = false; QString aquery = "SELECT music_songs.song_id, music_artists.artist_name, music_comp_artists.artist_name AS compilation_artist, " "music_albums.album_name, music_songs.name, music_genres.genre, music_songs.year, " "music_songs.track, music_songs.length, CONCAT_WS('/', " "music_directories.path, music_songs.filename) AS filename, " "music_songs.rating, music_songs.numplays, music_songs.lastplay, music_albums.compilation, " "music_songs.format " "FROM music_songs " "LEFT JOIN music_directories ON music_songs.directory_id=music_directories.directory_id " "LEFT JOIN music_artists ON music_songs.artist_id=music_artists.artist_id " "LEFT JOIN music_albums ON music_songs.album_id=music_albums.album_id " "LEFT JOIN music_artists AS music_comp_artists ON music_albums.artist_id=music_comp_artists.artist_id " "LEFT JOIN music_genres ON music_songs.genre_id=music_genres.genre_id " "ORDER BY music_songs.song_id;"; QString filename, artist, album, title, compartist; MSqlQuery query(MSqlQuery::InitCon()); if (!query.exec(aquery)) MythDB::DBError("AllMusic::resync", query); m_root_node->clear(); m_all_music.clear(); m_numPcs = query.size() * 2; m_numLoaded = 0; if (query.isActive() && query.size() > 0) { while (query.next()) { filename = query.value(9).toString(); if (!filename.contains("://")) filename = m_startdir + filename; Metadata *temp = new Metadata( filename, query.value(1).toString(), // artist query.value(2).toString(), // compilation artist query.value(3).toString(), // album query.value(4).toString(), // title query.value(5).toString(), // genre query.value(6).toInt(), // year query.value(7).toInt(), // track no. query.value(8).toInt(), // length query.value(0).toInt(), // id query.value(10).toInt(), // rating query.value(11).toInt(), // playcount query.value(12).toDateTime(), // lastplay (query.value(13).toInt() > 0), // compilation query.value(14).toString()); // format // Don't delete temp, as PtrList now owns it m_all_music.append(temp); // compute max/min playcount,lastplay for all music if (query.at() == 0) { // first song m_playcountMin = m_playcountMax = temp->PlayCount(); m_lastplayMin = m_lastplayMax = temp->LastPlay().toTime_t(); } else { int playCount = temp->PlayCount(); double lastPlay = temp->LastPlay().toTime_t(); m_playcountMin = min(playCount, m_playcountMin); m_playcountMax = max(playCount, m_playcountMax); m_lastplayMin = min(lastPlay, m_lastplayMin); m_lastplayMax = max(lastPlay, m_lastplayMax); } m_numLoaded++; } } else { VERBOSE(VB_IMPORTANT, "MythMusic hasn't found any tracks! " "That's ok with me if it's ok with you."); } // To find this data quickly, build a map // (a map to pointers!) music_map.clear(); MetadataPtrList::iterator it = m_all_music.begin(); for (; it != m_all_music.end(); ++it) music_map[(*it)->ID()] = *it; // Build a tree to reflect current state of // the metadata. Once built, sort it. buildTree(); sortTree(); m_done_loading = true; }