BOOL AddMQ2Data(PCHAR szName, fMQData Function) { if (FindMQ2Data(szName)) return false; unsigned long N = MQ2DataItems.GetUnused(); PMQ2DATAITEM pNew = new MQ2DATAITEM; strcpy_s(pNew->Name, szName); pNew->Function = Function; MQ2DataItems[N] = pNew; MQ2DataMap[szName] = N + 1; return true; }
// Called once, when the plugin is to shutdown PLUGIN_API VOID ShutdownPlugin(VOID) { DebugSpewAlways("Shutting down MQ2CustomBinds"); RemoveCommand("/custombind"); //SaveCustomBinds(); for (unsigned long N = 0 ; N < CustomBinds.Size ; N++) if (PCUSTOMBIND pBind=CustomBinds[N]) { RemoveMQ2KeyBind(pBind->Name); } CustomBinds.Cleanup(); // Remove commands, macro parameters, hooks, etc. // RemoveMQ2Benchmark(bmMyBenchmark); // RemoveParm("$myparm(x)"); // RemoveCommand("/mycommand"); // RemoveXMLFile("MQUI_MyXMLFile.xml"); }
void ShutdownMQ2Data() { MQ2DataItems.Cleanup(); }
/**@function to build a index first and then do search operation with the given query objects * * @param dataFileName raw data file name. * @param numPivot number of pivots in an index node. * @param setA the value of A in the incremental pivot selection. * @param setN the value of N in the incremental pivot selection. * @param singlePivotFanout fanout of a pivot. * @param fftscale use for pcaonfft pivot selection method. * @param pivotSelectionMethod the pivot selection selection method: random, fft, center, pcaonfft, pca. * @param partitionMethod data partition method: balanced, clusteringkmeans, clusteringboundary. * @param maxLeafSize maximum number of children in leaf nodes. * @param dataType data type, one of "protein", "dna", "vector", "image", "msms","string". * @param initialSize size of smallest database. * @param finalSize size of largest database. * @param stepSize step size of databases. * @param bucket whether bucketing will be used, 1: use. * @param fragmentLength fragment length, only meaningful for sequences. * @param dim dimension of vector data to load or number of features to be loaded. * @param maxR maximum radius for partition. * @param indexType [list,mvptree],the type of the index built later. * @param fftOpt 0:use old version FFT, 1: use new version FFT but don't choose the third point got from FFT as the second pivot, 2:use new version FFT and choose the third point got from FFT as the second pivot. Default 0. * @param indexFileName name of the file storing the index structure built before. * @param queryFileName query file name. * @param firstQuery offset of first query to be used in the query file, start from 0, inclusive, default 0. * @param lastQuery offset of last query to be used in the query file, exclusive, default 1. * @param minRadius maximum search radius, default 10. * @param maxRadius minimum search radius, default 0. * @param step step size for search radii, default 1. * @param verify 1 if search results are to be verified against a linear scan and 0 otherwise, default 0. * @param resultsFileName name of the file to store the result information of the search operation. * */ void buildAndSearch(char *&dataFileName,int &numPivot,int setE,int setC,int &singlePivotFanout,int fftscale,char *&pivotSelectionMethod,char *&partitionMethod,int &maxLeafSize,char *&dataType,int initialSize,int finalSize,int stepSize,bool bucket,int &fragmentLength,int dim,double maxR,char *&indexType,int fftOpt,char* queryFileName,int firstQuery,int lastQuery,double maxRadius,double minRadius,double step,bool verify,char *resultsFileName,int buildMode,char *indexName,int searchMode,double MF_maxRadius,double MF_middleProportion,bool putInfo,int cacheHeight,int sThreadNum,bool cacheAll,int bThreadNum,double trisectionRadius,char* selectOptimalPivots,char* partitionByOnePivot,int numberOfChildrenPathSearchingThreads) { vector< shared_ptr<CMetricData> > *rawData=0; CMetricDistance *metric=0; CPivotSelectionMethod *psm=0; CPartitionMethod *pm=0; CIndex *index =0; int indexNum = (finalSize - initialSize) / stepSize + 1; double *loadDataTimes=new double[indexNum]; double *buildTimes=new double[indexNum]; #ifdef _WIN32 char *newDataFileName="./SourceFiles/util/data/"; #else char *newDataFileName="../data/"; #endif joinCharArray(newDataFileName,dataFileName); #ifdef _WIN32 char *newQueryFile="./SourceFiles/util/data/"; #else char *newQueryFile="../data/"; #endif joinCharArray(newQueryFile,queryFileName); #ifdef _WIN32 char *newIndexName="./SourceFiles/util/database/"; #else char *newIndexName="../database/"; #endif joinCharArray(newIndexName,indexName); #ifdef _WIN32 char *resultFile="./SourceFiles/util/result/"; #else char *resultFile="../result/"; #endif joinCharArray(resultFile,resultsFileName); ofstream output(resultFile,ofstream::app); vector<shared_ptr<CMetricData> > *queryList = NULL; stringstream newSize; for (int size = initialSize, i = 0; (size <= finalSize) & (i < indexNum); size += stepSize, i++) { #ifdef __GNUC__ clock_t beginTime = times(NULL); #else GetThreadTimes(GetCurrentThread(),&ftDummy,&ftDummy,&kernalStartTime,&userStartTime); #endif loadDataByType(dataType,rawData,newDataFileName,size,dim,fragmentLength); loadDataByType(dataType,queryList,newQueryFile,lastQuery,dim,fragmentLength); #ifdef __GNUC__ clock_t endTime = times(NULL); loadDataTimes[i]= (endTime-beginTime)/100.0; #else GetThreadTimes(GetCurrentThread(),&ftDummy,&ftDummy,&kernalEndTime,&userEndTime); loadDataTimes[i]=((userEndTime.dwLowDateTime + kernalEndTime.dwLowDateTime) - (userStartTime.dwLowDateTime + kernalStartTime.dwLowDateTime)) / 10000000.00; #endif #ifdef __GNUC__ CMemMonitor *memoryMonitor = new CMemMonitor(); memoryMonitor->getMemorySize(); output<<"=================Loading Data Used Time And Memory================="<<endl; output<<"loadDataTime: "<<loadDataTimes[i]<<" s"<<endl; output<<"WorkingSetSize: "<<memoryMonitor->resMemorySize <<" MB / PeakWorkingSetSize: "<<memoryMonitor->peekResMemorySize<<" MB "<<endl; /*output<<"swapOutSize: "<<memoryMonitor->swapMemorySize<<" MB"<<endl;*/ output<<"MemoryUsage: "<<memoryMonitor->virtMemorySize <<" MB/ PeakMemoryUsage: "<< memoryMonitor->peekVirtMemorySize <<" MB "<<endl; output<<"==================================================================="<<endl<<endl; #else GetProcessMemoryInfo(GetCurrentProcess(),&pmc,sizeof(pmc)); output<<"=================Loading Data Used Time And Memory================="<<endl; output<<"loadDataTime: "<<loadDataTimes[i]<<" s"<<endl; output<<"WorkingSetSize: "<<pmc.WorkingSetSize/1048576.0 <<" MB / PeakWorkingSetSize: "<<pmc.PeakWorkingSetSize/1048576.0<<" MB "<<endl; output<<"PagefileUsage: "<<pmc.PagefileUsage/1048576.0<<" MB/ PeakPagefileUsage: "<< pmc.PeakPagefileUsage/1048576.0<<" MB "<<endl; output<<"MemoryUsage: "<<pmc.PagefileUsage/1048576.0 + pmc.WorkingSetSize/1048576.0<<" MB/ PeakMemoryUsage: "<< pmc.PeakPagefileUsage/1048576.0 + pmc.PeakWorkingSetSize/1048576.0 <<" MB "<<endl; output<<"==================================================================="<<endl<<endl; #endif getMetricByType(metric,dataType); CountedMetric *cmetric=new CountedMetric(metric); getPivotSelectionMethod(pivotSelectionMethod,psm,fftscale,setE,setC) ; getPartitionMethod(partitionMethod,pm); getIndexType(metric,cmetric,rawData,psm,pm,index,dataType,indexType,newIndexName,numPivot,singlePivotFanout,maxLeafSize,MF_maxRadius,MF_middleProportion,buildMode,bThreadNum,trisectionRadius,selectOptimalPivots,partitionByOnePivot); int distanceFunctionCounterBeforSearch=0,distanceFunctionCounterAfterSearch=0; distanceFunctionCounterBeforSearch=dynamic_cast<CountedMetric*>(index->getMetric())->getCounter(); #ifdef _WIN32 GetThreadTimes(GetCurrentThread(),&ftDummy,&ftDummy,&kernalStartTime,&userStartTime); index->bulkLoad(*rawData,buildMode,bThreadNum); GetThreadTimes(GetCurrentThread(),&ftDummy,&ftDummy,&kernalEndTime,&userEndTime); buildTimes[i] = ((userEndTime.dwLowDateTime + kernalEndTime.dwLowDateTime) - (userStartTime.dwLowDateTime + kernalStartTime.dwLowDateTime))/10000000.00; distanceFunctionCounterAfterSearch=dynamic_cast<CountedMetric*>(index->getMetric())->getCounter(); GetProcessMemoryInfo(GetCurrentProcess(),&pmc,sizeof(pmc)); output<<"=================Building index Used Time And Memory================="<<endl; output<<"datasize: "<<size<<endl; output<<"distCalculateTimes: "<<distanceFunctionCounterAfterSearch-distanceFunctionCounterBeforSearch<<endl; output<<"buildTime: "<<buildTimes[i]<<" s"<<endl; output<<"WorkingSetSize: "<<pmc.WorkingSetSize/1048576.0 <<" MB / PeakWorkingSetSize: "<<pmc.PeakWorkingSetSize/1048576.0<<" MB "<<endl; output<<"PagefileUsage: "<<pmc.PagefileUsage/1048576.0<<" MB/ PeakPagefileUsage: "<< pmc.PeakPagefileUsage/1048576.0<<" MB "<<endl; output<<"MemoryUsage: "<<pmc.PagefileUsage/1048576.0 + pmc.WorkingSetSize/1048576.0<<" MB/ PeakMemoryUsage: "<< pmc.PeakPagefileUsage/1048576.0 + pmc.PeakWorkingSetSize/1048576.0 <<" MB "<<endl; output<<"==================================================================="<<endl<<endl; #else beginTime = times(NULL); index->bulkLoad(*rawData,buildMode,bThreadNum); endTime = times(NULL); buildTimes[i]= (endTime-beginTime)/100.0; distanceFunctionCounterAfterSearch=dynamic_cast<CountedMetric*>(index->getMetric())->getCounter(); output<<"=================Building index Used Time And Memory================="<<endl; output<<"datasize: "<<size<<endl; output<<"distCalculateTimes: "<<distanceFunctionCounterAfterSearch-distanceFunctionCounterBeforSearch<<endl; output<<"buildTime: "<<buildTimes[i]<<" s"<<endl; output<<"WorkingSetSize: "<<memoryMonitor->resMemorySize <<" MB / PeakWorkingSetSize: "<<memoryMonitor->peekResMemorySize<<" MB "<<endl; //output<<"swapOutSize: "<<memoryMonitor->swapMemorySize<<" MB"; output<<"MemoryUsage: "<<memoryMonitor->virtMemorySize <<" MB/ PeakMemoryUsage: "<< memoryMonitor->peekVirtMemorySize <<" MB "<<endl; output<<"==================================================================="<<endl<<endl; delete memoryMonitor; #endif if(putInfo==1) getMVPIndexStructureInfo(newIndexName,dataType,resultsFileName); _search(queryList,cmetric,resultFile,newIndexName,newDataFileName,dim,fragmentLength,verify,dataType,maxRadius,minRadius,step,searchMode,firstQuery,lastQuery,size,cacheHeight,cacheAll,sThreadNum,numberOfChildrenPathSearchingThreads); } free(resultFile); }