示例#1
0
//------------------------------------------------------------------------------
void Solver::HashTable::Fill
(
    const IDList& particleIDs
)
{
    // clear all buckets
    for (unsigned int i = 0; i < mDomain.Dimensions.X*mDomain.Dimensions.Y; i++)
    {
        mBuckets[i].clear();
    }

    // fill buckets
    IDList::const_iterator i = particleIDs.begin();
    IDList::const_iterator e = particleIDs.end();

    for (; i != e; i++)
    {
        unsigned int hash = computeHash
                            (
                                Vector2f(&mPositions[2*(*i)]),
                                mDomain
                            );
        mBuckets[hash].push_back(*i);
    }

}
示例#2
0
bool QueueEditor::EditEntry(int ID, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText)
{
	IDList cIDList;
	cIDList.clear();
	cIDList.push_back(ID);
	return EditList(&cIDList, NULL, mmID, bSmartOrder, eAction, iOffset, szText);
}
示例#3
0
bool QueueEditor::LockedEditEntry(DownloadQueue* pDownloadQueue, int ID, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText)
{
	IDList cIDList;
	cIDList.clear();
	cIDList.push_back(ID);
	return InternEditList(pDownloadQueue, &cIDList, bSmartOrder, eAction, iOffset, szText);
}
示例#4
0
void UnitsDialog::loadConversionTable( ConversionTable *table, Unit::Type type )
{
	UnitList unitList;
	database->loadUnits( &unitList, type );

	QStringList unitNames;
	IDList unitIDs; // We need to store these in the table, so rows and cols are identified by unitID, not name.
	table->clear();
	for ( UnitList::const_iterator unit_it = unitList.constBegin(); unit_it != unitList.constEnd(); ++unit_it ) {
		unitNames.append( ( *unit_it ).name() );
		unitIDs.append( ( *unit_it ).id() ); // append the element
	}

	// Resize the table
	table->resize( unitNames.count(), unitNames.count() );

	// Set the table labels, and id's
	table->setRowLabels( unitNames );
	table->setColumnLabels( unitNames );
	table->setUnitIDs( unitIDs );


	// Load and Populate the data into the table
	UnitRatioList ratioList;
	database->loadUnitRatios( &ratioList, type );
	for ( UnitRatioList::const_iterator ratio_it = ratioList.constBegin(); ratio_it != ratioList.constEnd(); ++ratio_it ) {
		table->setRatio( ( *ratio_it ).unitId1(), ( *ratio_it ).unitId2(), ( *ratio_it ).ratio() );
	}
}
示例#5
0
// searches IDList idL for std::string s, returns its position, -1 if not found
inline int FindInIDList(IDList& idL,const std::string& s)
{
//   int ix=0;
  for(IDList::iterator i=idL.begin(); i != idL.end(); ++i)//, ++ix) 
    if( *i==s) 
      {
	return i - idL.begin();
      }

  return -1;
}
static IDList allIDs(QSqlDatabase db, const QString& tableName)
{
    IDList ret;
    QSqlQuery query(QString("SELECT id FROM %1").arg(tableName), db);

    while(query.next()) {
        ret.append(query.record().value("id").toULongLong());
    }

    return ret;
}
示例#7
0
文件: md.cpp 项目: Fran89/seiscomp3
// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
AmplitudeProcessor::IDList
AmplitudeProcessor_Md::capabilityParameters(Capability cap) const {

	if ( cap == MeasureType ) {
		IDList params;
		params.push_back("AbsMax");
		params.push_back("MinMax");
		return params;
	}

	return AmplitudeProcessor::capabilityParameters(cap);
}
void CSVPointListExporter::exportFromDataBase()
{
    if(!QFile::exists(sourseDataBaseFile_))
    {
        qWarning() << sourseDataBaseFile_ << "not exists";
        return;
    }

    SqlPointListReader reader(sourseDataBaseFile_, sourseTableName_);

    if(!reader.open())
    {
        qWarning() << sourseDataBaseFile_ << "not open";
        return;
    }

    QFile targetFile(targetFileName_);
    if(!targetFile.open(QFile::WriteOnly | QIODevice::Text))
    {
        qWarning() << targetFileName_ << "can't open for writing";
    }

    QTextStream targetFileStream(&targetFile);
    const IDList idList = reader.readAllItems();

    for(int i = 0; i < idList.count(); i++)
    {
        const ID& id = idList.at(i);
        const PointList pointList = reader.read(id);
        for(int j = 0; j < pointList.count(); j++)
        {
            const Point& point = pointList.at(j);
            targetFileStream << pointList.id() << ";" << point;

            const bool isLast = ((i + 1) == idList.count())
                    && ((j + 1) == pointList.count());

            if(!isLast)
            {
                targetFileStream << '\n';
            }
        }
    }

    targetFile.flush();
    targetFile.close();
}
void BaseSemanticWalker::declare(const IDList& ids, Type* type) {
  IDList::const_iterator it;
  for (it = ids.begin(); it != ids.end(); ++it) {
    try {
      _symtable->insertSymbol(
        Symbol((*it)->getText(),
              type,
              _symtable->currentScope(),
              _symtable->unit(),
              (*it)->getLine(),
              (*it)->getColumn()));
    } catch (RedeclarationException e) {
      report((*it)->getLine(), (*it)->getColumn(),
            std::string("redeclaração: ") + e.symbol().lexeme());
    }
  }
}
void ExternalWrenchesAndTorquesEstimator::estimateExternalWrenchAndInternalJoints(RobotJointStatus & joint_status, RobotSensorStatus & sensor_status)
{
    //Temporary workaround: wholeBodyDynamicsStatesInterface needs all the DOF present in the dynamical model
    if( sensors->getSensorNumber(wbi::SENSOR_ENCODER) != robot_estimation_model->getNrOfDOFs() )
    {
        IDList list = sensors->getSensorList(wbi::SENSOR_ENCODER);

        std::cerr << "Available encoders: " << list.toString() << std::endl;

           std::cerr << "yarpWholeBodyDynamicsEstimator::run() error: " <<
                  sensors->getSensorNumber(wbi::SENSOR_ENCODER) << " joint sensors are available, while  " <<
                  robot_estimation_model->getNrOfDOFs() << " joints are present in the model " << std::endl;
        assert(false);
        return;
    }

    ///< \todo improve robustness: what if a sensor dies or stop working? interface should warn the user
    {
        resizeAll(sensors->getSensorNumber(SENSOR_ENCODER));
        resizeFTs(sensors->getSensorNumber(SENSOR_FORCE_TORQUE));
        resizeIMUs(sensors->getSensorNumber(SENSOR_IMU));

        ///< Read encoders
        omega_used_IMU  = sensor_status.omega_imu;

        domega_used_IMU = sensor_status.domega_imu;

        ddp_used_IMU = sensor_status.proper_ddp_imu;

        ///< Read skin contacts
        readSkinContacts();

        ///< Estimate joint torque sensors from force/torque measurements
        estimateExternalForcesAndJointTorques(joint_status,sensor_status);

        ///< Filter obtained joint torque measures
        // \todo reintroduce the filter ?
        joint_status.setJointTorquesYARP(tauJ);// tauJFilt->filt(tauJ);  ///< low pass filter


    }

    return;
}
示例#11
0
int CacheBase::Truncate(std::string path, off_t offset)
{
	pf_log[W_DEBUG] << "Truncating \"" << path << "\" at " << offset;

	pf_stat stat = GetAttr(path);
	stat.size = (size_t)offset;
	stat.ctime = time(NULL);
	stat.mtime = stat.ctime;
	stat.meta_mtime = stat.ctime;

	IDList idlist;
	idlist.insert(environment.my_id.Get());

	SetAttr(path, stat, idlist);

	FileContent& file = content_list.GetFile(path);
	file.Truncate(offset);

	return 0;
}
示例#12
0
  int Find( const std::string& name)
  {
    String_abbref_eq strAbbrefEq_name(name);

    // search keyword
    IDList::iterator f=std::find_if(listName.begin(),
			       listName.end(),
			       strAbbrefEq_name);
    if( f == listName.end()) return -1;

    // Note: there might be duplicate extra keyword names, return first one
//     // continue search (for ambiguity)
//     IDList::iterator ff=std::find_if(f+1,
// 				listName.end(),
// 				strAbbrefEq_name);
//     if( ff != listName.end())
//       {
// 	throw GDLException("Ambiguous keyword abbreviation in _EXTRA: "+name);
//       }
    return std::distance( listName.begin(),f);
  }
示例#13
0
void CacheBase::Write(std::string path, const char* buf, size_t size, off_t off)
{
	FileContent& file = content_list.GetFile(path);
	FileChunk chunk(buf, off, size);
	file.SetChunk(chunk);

	/* No need to lock cache, we don't touch its members */
	pf_stat stat = GetAttr(path);
	time_t now = time(NULL);
	stat.meta_mtime = now;
	stat.mtime = now;
	stat.ctime = now;
	IDList idlist;
	idlist.insert(environment.my_id.Get());
	if(off + (off_t)size > (off_t)stat.size)
	{
		stat.size = (size_t)off + size;
		SetAttr(path, stat, idlist);
	}

	//content_list.RefreshPeersRef(path);
}
示例#14
0
NZBInfo* PrePostProcessor::MergeGroups(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
	int iAddedGroupID = 0;

	// merge(1): find ID of any file in new nzb-file
	for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
	{
		FileInfo* pFileInfo = *it;
		if (pFileInfo->GetNZBInfo() == pNZBInfo)
		{
			iAddedGroupID = pFileInfo->GetID();
			break;
		}
	}

	// merge(2): check if queue has other nzb-files with the same name
	if (iAddedGroupID > 0)
	{
		for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
		{
			FileInfo* pFileInfo = *it;
			if (pFileInfo->GetNZBInfo() != pNZBInfo &&
				!strcmp(pFileInfo->GetNZBInfo()->GetName(), pNZBInfo->GetName()))
			{
				// file found, do merging

				IDList cIDList;
				cIDList.push_back(pFileInfo->GetID());
				cIDList.push_back(iAddedGroupID);

				g_pQueueCoordinator->GetQueueEditor()->LockedEditList(pDownloadQueue, &cIDList, false, QueueEditor::eaGroupMerge, 0, NULL);

				return pFileInfo->GetNZBInfo();
			}
		}
	}

	return pNZBInfo;
}
示例#15
0
bool CKeyMgr::CanCharacterControlObject( HOBJECT hChar, HOBJECT hObj )
{
#ifndef _CLIENTBUILD

	if( !hChar || !hObj || !IsCharacter( hChar ))
		return false;

	// If there are no keys that control this object the Character shouldn't be able to control it.
	
	KeyControlMap::iterator iter = m_mapKeyControl.find( hObj );
	if( iter == m_mapKeyControl.end() )
		return false;

	CCharacter *pChar = (CCharacter*)g_pLTServer->HandleToObject( hChar );
	if( !pChar )
		return false;

	IDList *pCharKeyList = pChar->GetKeyList();
	uint8 nDummy;

	IDList& idList = (*iter).second.m_IDList;

	// Make sure the character has all the keys that are needed to control the object...

	for( uint8 i = 0; i < idList.m_IDArray.size(); ++i )
	{
		if( !pCharKeyList->Have( idList.m_IDArray[i], nDummy ))
		{
			return false;
		}
	}

#endif

	return true;
}
示例#16
0
//retrieve candidate result set by the var_sig in the _query. 
void VSTree::retrieve(SPARQLquery& _query)
{
	Util::logging("IN retrieve");

	//debug
//	{
//	    VNode* temp_ptr = this->getLeafNodeByEntityID(473738);
//	    stringstream _ss;
//
//	    for (int i=0;i<temp_ptr->getChildNum();i++)
//	        if (temp_ptr->getChildEntry(i).getEntityId() == 473738)
//	        {
//	            _ss << "entity id=473738 entry sig:" << endl;
//	            _ss << "entity id=473738 leaf node line: " << temp_ptr->getFileLine() << endl;
//	            _ss << Signature::BitSet2str(temp_ptr->getChildEntry(i).getEntitySig().entityBitSet) << endl;
//	            break;
//	        }
//
//	    _ss << "leaf node sig:" << endl;
//	    _ss << Signature::BitSet2str(temp_ptr->getEntry().getEntitySig().entityBitSet) << endl;
//
//	    temp_ptr = temp_ptr->getFather(*(this->node_buffer));
//	    while (temp_ptr != NULL)
//	    {
//	        _ss << "line=" << temp_ptr->getFileLine() << endl;
//	        _ss << Signature::BitSet2str(temp_ptr->getEntry().getEntitySig().entityBitSet) << endl;
//	        temp_ptr = temp_ptr->getFather(*(this->node_buffer));
//	    }
//	    Util::logging(_ss.str());
//	}

    vector<BasicQuery*>& queryList = _query.getBasicQueryVec();
    // enumerate each BasicQuery and retrieve their variables' mapping entity in the VSTree.
    vector<BasicQuery*>::iterator iter=queryList.begin();
    for(; iter != queryList.end(); iter++)
    {
        int varNum = (*iter)->getVarNum();
        for (int i = 0; i < varNum; i++)
        {
            //debug
        	{
        		std::stringstream _ss;
        		_ss << "retrieve of var: " << i << endl;
        		Util::logging(_ss.str());
        	}
			bool flag = (*iter)->isLiteralVariable(i);
            const EntityBitSet& entityBitSet = (*iter)->getVarBitSet(i);
            IDList* idListPtr = &( (*iter)->getCandidateList(i) );
            this->retrieveEntity(entityBitSet, idListPtr);
#ifdef DEBUG_VSTREE
			stringstream _ss;
			_ss << "total num: " << this->entry_num << endl;
			_ss << "candidate num: " << idListPtr->size() << endl;
			_ss << endl;
			_ss << "isExist 473738: " << (idListPtr->isExistID(473738)?"true":"false") <<endl;
			_ss << "isExist 473472: " << (idListPtr->isExistID(473472)?"true":"false") <<endl;
			_ss << "isExist 473473: " << (idListPtr->isExistID(473473)?"true":"false") <<endl;
			Util::logging(_ss.str());
#endif

			//the basic query should end if one non-literal var has no candidates
			if(idListPtr->size() == 0 && !flag)
			{
				break;
			}
        }
    }
	Util::logging("OUT retrieve");
}
示例#17
0
void UpdateData::addGUID(const IDList& id)
{
	m_guidList.insert(id.begin(), id.end());
}
示例#18
0
int main(int argc, char* argv[])
{
    struct rlimit r;
    getrlimit(RLIMIT_NOFILE, &r);
    cout << "current rlimit: " << r.rlim_cur << endl;
    r.rlim_cur = 1024 * 16;
    setrlimit(RLIMIT_NOFILE, &r);
    cout << "change rlimit to: " << r.rlim_cur << endl;

    struct timeval startTime, endTime;

    gettimeofday(&startTime, NULL);

    int numFiles=1;
    vector<string> pairNameVector, distNameVector;

    FILE **distFileList, **pairFileList;
    unsigned int ** inPairArray;
    float ** inDistArray;

    string inFileName = "", outFileName = "";

    printf("\n----------------------------------------------------------------------\n");
    printf("                  AVERAGE CLUSTERING GENETIC DISTANCES                  \n");

    float endLevel = -1;
    unsigned long long maxNumEdges=0;
    int i, iteration = 0;
    int numReads=0;

    getOptions(argc, argv, inFileName, numReads, numFiles, endLevel, outFileName);
    if (endLevel < 0 || endLevel > 1)
        endLevel = 1/log2((double)numReads);

    getDistNameList(inFileName, pairNameVector, distNameVector, numFiles);

    FILE * outFile = NULL;
    FILE * outFile1 = NULL;

    if (outFileName.length() > 0) {
        string outFileName1 = outFileName;
        outFileName1.append("_matlab");
        cout << outFileName << endl;
        cout << outFileName1 << endl;
        outFile = fopen(outFileName.c_str(), "w");
        outFile1 = fopen(outFileName1.c_str(), "w");
        if(outFile==NULL || outFile1==NULL)
        {
            cout<<"Error: Cannot open output file" << endl;
            cout << outFileName << endl;
            cout << outFileName1 << endl;
            exit(-1);
        }
    }
    FILE * mergeFile = NULL;
    string mergeFileName;

    mergeFileName=inFileName;
    mergeFileName.append("_Align_Merge");
    mergeFile = fopen(mergeFileName.c_str(), "w");

    if(pairNameVector.size()==0)
    {
        cout<<"Error: No distance file loaded."<<endl;
        exit(-1);
    }

    int fileId;

    pairFileList=(FILE**)malloc(sizeof(FILE*)*pairNameVector.size());
    distFileList=(FILE**)malloc(sizeof(FILE*)*distNameVector.size());

    if(distFileList==NULL || pairFileList==NULL)
    {
        cout<<"Error: Not enough memory" << endl;
        exit(-1);
    }

    for(fileId = 0; fileId < numFiles; ++fileId)
    {
        pairFileList[fileId]=fopen(pairNameVector[fileId].c_str(),"rb");
        if(pairFileList[fileId]==NULL)
        {
            cout<<"Error: Cannot open file" << pairNameVector[fileId].c_str() << endl;
            exit(-1);
        }
    }

    for(fileId = 0; fileId < numFiles; ++fileId)
    {
        distFileList[fileId]=fopen(distNameVector[fileId].c_str(),"rb");
        if(distFileList[fileId]==NULL)
        {
            cout<<"Error: Cannot open file" << distNameVector[fileId].c_str() << endl;
            exit(-1);
        }
    }

    if(numFiles!=distNameVector.size() || numFiles <= 0)
    {
        cout<<"Error: invalid number of files!EXIT..."<<endl;
        exit(-1);
    }
    cout<<"Use "<<numFiles<<" distance file(s)."<<endl;
    cout<<"endLevel: " << endLevel << endl;

    unsigned long long totalNumPairs = 0;

    multimap<float, DistPair> nodeMap;
    multimap<float, DistPair>::iterator iter;
    unsigned int idX, idY;
    float dist;
    inPairArray = (unsigned int **) malloc(sizeof(unsigned int*) * numFiles);
    inDistArray = (float **) malloc(sizeof(float*) * numFiles);
    int * indices = (int*) malloc(sizeof(int) * numFiles);
    int * readSizes = (int*) malloc(sizeof(int) * numFiles);
    bool * EOFTags = (bool*) malloc(sizeof(bool) * numFiles);
    bool suc;

    for(fileId=0; fileId<numFiles; fileId++)
    {
        // initialize
        inPairArray[fileId] = (unsigned int*) malloc(sizeof(unsigned int) * BUF_SIZE * 2);
        inDistArray[fileId] = (float*) malloc(sizeof(float) * BUF_SIZE);
        indices[fileId] = 0;
        readSizes[fileId] = 0;
        EOFTags[fileId] = false;

        // add the first pair of each file to the nodeMap
        suc = loadAPair(readSizes[fileId], indices[fileId], EOFTags[fileId], idX, idY, dist, pairFileList[fileId], distFileList[fileId], inPairArray[fileId], inDistArray[fileId]);

        if (suc)
            nodeMap.insert(pair<float, DistPair>(dist,DistPair(idX,idY,fileId)));
    }

    unsigned int minExactIndex;
    TreeNode *nodeX=0, *nodeY=0;
    vector<float>::iterator minExactIter;
    float minExactDist, minInexactDist;
    LinkMap::iterator mapIter;

    vActiveNodes.resize(2*numReads+1);
    vExactNodes.resize(2*numReads+1);
    vExactDist.resize(2*numReads+1);
    vInexactDist.resize(2*numReads+1);

    leaves=(TreeNode**)malloc(sizeof(TreeNode*)*numReads);

    if(leaves==0)
    {
        cout<<"Error: Not enough memory" << endl;
        exit(-1);
    }

    for(i = 0; i < numReads; ++i) {
        vActiveNodes[i] = leaves[i] = new TreeNode(i);
        vExactNodes[i] = 0;
    }

    for(i = numReads+1; i < 2*numReads+1; ++i) {
        vActiveNodes[i] = 0;
        vExactNodes[i] = 0;
    }

    newId = numReads;

    fill(vExactDist.begin(), vExactDist.end(), 1.0f);
    fill(vInexactDist.begin(), vInexactDist.end(), 1.0f);

    maxNumEdges = numReads;
    cout << "numReads: " << numReads << "\tmaxNumEdges: " << maxNumEdges << endl;

    //while (!nodeMap.empty())
    while (!allLoaded)
    {
        // the clustering can't continue and the edges exceed the capacity of the RAM
        if (totalNumEdges < maxNumEdges)
        {
            while(totalNumEdges <= maxNumEdges && !nodeMap.empty())
            {
                // get the first item in the nodeMap
                iter = nodeMap.begin();
                fileId = iter->second.fileId;

                // write to output
                idX = iter->second.idX;
                idY =  iter->second.idY;
                dist = iter->first;

                if (outFile != NULL) {
                    fprintf(outFile, "%d %d %f\n", idX, idY, dist);
                    fprintf(outFile1, "%d %d %f\n", idX+1, idY+1, dist);
                }

                if (dist < endLevel || fabs(dist-endLevel) < EPSILON)
                    absorb(idX, idY, dist);

                // remove the current item from the nodeMap
                nodeMap.erase(iter);

                suc = loadAPair(readSizes[fileId], indices[fileId], EOFTags[fileId], idX, idY, dist, pairFileList[fileId], distFileList[fileId], inPairArray[fileId], inDistArray[fileId]);
                if (suc)
                    nodeMap.insert(pair<float, DistPair>(dist,DistPair(idX,idY,fileId)));
            }

            if (dist > lambda)
                lambda = dist;
        }
        else
        {
            maxNumEdges *= 2;
            cout << "new maxNumEdges: " << maxNumEdges << "\tnewId: " << newId << "\tlambda: " << lambda << endl;

            if (maxNumEdges > UPPER_BOUND_NUM_EDGES)
            {
                /*
                cout << "LOOK AHEAD: current node: " << newId << "\tnum edges: " << totalNumEdges << endl;

                while(!nodeMap.empty())
                {
                	// get the first item in the nodeMap
                	iter = nodeMap.begin();
                	fileId = iter->second.fileId;

                	// write to output
                	idX = iter->second.idX;
                	idY =  iter->second.idY;
                	dist = iter->first;

                	lookAhead(idX, idY, dist);
                	// remove the current item from the nodeMap
                	nodeMap.erase(iter);

                	suc = loadAPair(readSizes[fileId], indices[fileId], EOFTags[fileId], idX, idY, dist, pairFileList[fileId], distFileList[fileId], inPairArray[fileId], inDistArray[fileId]);
                	if (suc)
                		nodeMap.insert(pair<float, DistPair>(dist,DistPair(idX,idY,fileId)));
                }
                */
                endLevel = lambda;
                allLoaded = true;
                cout << "new endLevel: " << endLevel << endl;
            }
        }

        if (nodeMap.empty())
            allLoaded = true;

        updateAllMin(endLevel);

        minInexactDist = *min_element(vInexactDist.begin(), vInexactDist.end());
        minExactIter = min_element(vExactDist.begin(),vExactDist.end());
        minExactDist = *minExactIter;

        //cout << "lambda_" << iteration << " = " << lambda << "\t" << lambda/2 << "\t" << minInexactDist << endl;

        while ((minExactDist < 1.0f) && (minExactDist < minInexactDist || fabs(minExactDist-minInexactDist) < EPSILON))
        {
            minExactIndex = minExactIter-vExactDist.begin();

            nodeX = vActiveNodes[minExactIndex]->topParent;
            nodeY = vExactNodes[minExactIndex]->topParent;

            merge(nodeX, nodeY, minExactDist, endLevel);
            fprintf(mergeFile, "%d %d %.6f\n", nodeX->ID+1, nodeY->ID+1, minExactDist);

            minInexactDist = *min_element(vInexactDist.begin(), vInexactDist.end());
            minExactIter = min_element(vExactDist.begin(),vExactDist.end());
            minExactDist = *minExactIter;
        }

        //cout << "cannot progress: " << newId << "\t" << totalNumEdges << "\t" << minInexactDist << "\t" << minExactDist << "\t" << minExactDist - lambda/2 << "\n";
        ++iteration;
    }

    cout << "DONE!" << endl;
    cout << "current node: " << newId << "\tnum unlinked: " << totalUnlinked << endl;

    // get root nodes and orphan nodes
    NodeSet roots;
    IDList orphanNodes;
    TreeNode *aLeaf = 0;

    for(i=0; i< numReads; ++i)
    {
        aLeaf = leaves[i];
        if(aLeaf->parent==0)  // find nodes with no parent
            orphanNodes.push_back(i);
        else
            roots.insert(aLeaf->topParent);
    }

    // print output to files

    string clusterListName, clusterName;

    clusterListName=inFileName;
    clusterListName.append(".Cluster_List");
    clusterName=inFileName;
    clusterName.append(".Cluster");

    printClusters(roots, orphanNodes, clusterListName, clusterName, endLevel);

    // clear memory
    emptyTree(roots);
    roots.clear();
    orphanNodes.clear();
    free(leaves);
    vActiveNodes.clear();
    vExactNodes.clear();
    vExactDist.clear();

    // clean up
    for(fileId=0; fileId<numFiles; ++fileId) {
        free(inDistArray[fileId]);
        free(inPairArray[fileId]);
    }
    free(inDistArray);
    free(inPairArray);
    free(indices);
    free(readSizes);
    free(EOFTags);
    free(pairFileList);
    free(distFileList);

    gettimeofday(&endTime, NULL);
    long elapsedTime = (endTime.tv_sec - startTime.tv_sec) * 1000u + (endTime.tv_usec - startTime.tv_usec) / 1.e3 + 0.5;

    if (outFile != NULL) {
        fclose(outFile);
        fclose(outFile1);
    }
    fclose(mergeFile);

    printf("totalNumPairs: %llu\n", totalNumPairs);
    printf("Time taken: %.3f s\n", elapsedTime/1.e3);
    printf("\n----------------------------------------------------------------------\n");
    return 0;
}
示例#19
0
int main(int argc, char* argv[])
{   	
	struct timeval startTime, endTime;
	
	gettimeofday(&startTime, NULL);
	
	string inFileName = "", cutoffFileName = "";
		
	printf("\n----------------------------------------------------------------------\n");
	printf("                  DENDROGRAM CUTTING GENETIC DISTANCES                  \n");

	int i, numReads;

	getOptions(argc, argv, inFileName, cutoffFileName, numReads);
	
	FILE * cutoffFile = NULL;
	vector<float> cutoffs;
	float cutoff;
	cutoffFile = fopen(cutoffFileName.c_str(),"r");
	while(!feof(cutoffFile)) {
		fscanf(cutoffFile, "%f\n", &cutoff);
		cutoffs.push_back(cutoff);
		cout << cutoff << endl;
	}
	fclose(cutoffFile);	

	leaves=(TreeNode**)malloc(sizeof(TreeNode*)*(2*numReads+1));

	if(leaves==0)
	{   
		cout<<"Error: Not enough memory" << endl;
		exit(-1);
	}
	
	for(i = 0; i < numReads; ++i) 
		leaves[i] = new TreeNode(i);	
	
	for(i = numReads+1; i < 2*numReads+1; ++i) 
		leaves[i] = 0;
	
	int idX, idY;
	float dist;		
	string mergeFileName=inFileName;
	mergeFileName.append("_Merge");	
	
	FILE * mergeFile;
	mergeFile = fopen(mergeFileName.c_str(), "r");
	
	newId = numReads;	
	while ( fscanf(mergeFile, "%d %d %f", &idX, &idY, &dist) == 3 ) 
		merge(leaves[idX-1], leaves[idY-1], dist);	
			
	fclose(mergeFile);
	cout << "DONE!" << endl;
	cout << "current node: " << newId << endl;	

	// get root nodes and orphan nodes
	NodeSet roots;
	IDList orphanNodes;	
	TreeNode *aLeaf = 0;
     	
	for(i=0; i< numReads; ++i)
	{   
		aLeaf = leaves[i];
		if(aLeaf->parent==0)  // find nodes with no parent
			orphanNodes.push_back(i); 				
		else
			roots.insert(aLeaf->topParent); 										
	}

 	// print output to files
 	
	string clusterListName, clusterName;

	clusterListName=inFileName;
	clusterListName.append(".Cluster_List");
	clusterName=inFileName;
	clusterName.append(".Cluster");

	printClusters(roots, orphanNodes, clusterListName, clusterName, cutoffs);		
	
	// clear memory
	emptyTree(roots);	
	roots.clear();	
	orphanNodes.clear();	
	free(leaves);
	
	gettimeofday(&endTime, NULL);	
	long elapsedTime = (endTime.tv_sec - startTime.tv_sec) * 1000u + (endTime.tv_usec - startTime.tv_usec) / 1.e3 + 0.5;

	printf("Time taken: %.3f s\n", elapsedTime/1.e3);
	printf("\n----------------------------------------------------------------------\n");
	return 0;
}
示例#20
0
int printClusters(NodeSet roots, IDList orphanNodes,
	string clusterListName, string clusterName, 
	vector<float> cutoffs)
{   
	TreeNode *tempNode = 0;
	NodeSetIter setIter;

	NodeList nodeList, tempList;
	NodeListIter nodeIt, tempIt;
	
	IDList OTU;
	IDListIter it;
	
	unsigned int size, numOTUs;
	FILE *clusterListFile, *clusterFile;

	clusterListFile = fopen(clusterListName.c_str(),"wb");
	clusterFile = fopen(clusterName.c_str(),"wb");
	if(clusterListFile == NULL|| clusterFile == NULL)
	{   
		cout << "Cannot open output files. Skipped" << endl;
		return 0;
	}
	printf("\n");

	vector<float>::iterator c;
	float distLevel;
	for(c = cutoffs.begin(); c != cutoffs.end(); c++)
	{   
		distLevel = *(c);
		numOTUs = 0;
		nodeList.clear();
		
		// extract the valid nodes for each distance level
		for(setIter=roots.begin(); setIter!=roots.end(); ++setIter)
		{   
			tempNode=0;
			if(*setIter != 0)
			{   
				if((*setIter)->dist < distLevel || fabs((*setIter)->dist-distLevel) < EPSILON)
				{   
					nodeList.push_front(*setIter);
					continue;
				}

				tempList.push_front(*setIter);
				while (tempList.size()!=0)
				{   
					tempIt=tempList.begin();
					tempNode=(*tempIt);
					tempList.pop_front();

					if (tempNode->left->dist < distLevel || fabs(tempNode->left->dist-distLevel) < EPSILON)
						nodeList.push_front(tempNode->left);						
					else
						tempList.push_front(tempNode->left);

					if (tempNode->right->dist < distLevel || fabs(tempNode->right->dist-distLevel) < EPSILON)
						nodeList.push_front(tempNode->right);
					else
						tempList.push_front(tempNode->right);					
				}
			}
			tempList.clear();
		}

		fprintf(clusterListFile," %.6f ", distLevel);
		fprintf(clusterFile," %.6f ", distLevel);
		
		// write the nodeList to file
		tempList.clear();
		for(nodeIt=nodeList.begin(); nodeIt!=nodeList.end(); ++nodeIt)
		{   
			// clean up and initialize
			fprintf(clusterFile,"|");
			tempNode=0;			
			size=0;
			OTU.clear();
			
			tempList.push_front(*nodeIt);
			
			while(tempList.size()!=0)
			{   
				tempIt=tempList.begin();
				tempNode=(*tempIt);
				tempList.pop_front();
				
				if(tempNode->left==0 && tempNode->right==0)
				{   
					OTU.push_back(tempNode->ID);
					size+=tempNode->numMembers;
				}				
				if (tempNode->right!=0)
					tempList.push_front(tempNode->right);
				if(tempNode->left!=0 )
					tempList.push_front(tempNode->left);
				
			}
			tempList.clear();					
			// print to clusterFile
			it=OTU.begin();
			fprintf(clusterFile,"%u",(*it));
			++it;
			for(;it!=OTU.end(); ++it)
				fprintf(clusterFile," %u",(*it));
			
			fprintf(clusterListFile, "%d ", size);	
			++numOTUs;			
		}
		
		for (it=orphanNodes.begin(); it != orphanNodes.end(); ++it) {
			fprintf(clusterFile,"|%u",(*it));
			fprintf(clusterListFile, "1 ");
		}
		numOTUs += orphanNodes.size();
		
		fprintf(clusterFile,"|\n");
		fprintf(clusterListFile, "\n");		
		printf("Dist: %.6f. numOTUs: %u. numSingletons: %lu\n", distLevel, numOTUs, orphanNodes.size());
	}
	
	printf("\n");
	OTU.clear();    
	fclose(clusterListFile);
	fclose(clusterFile);
	return 1;
}
示例#21
0
文件: BinRpc.cpp 项目: Bootz/nzbm
void EditQueueBinCommand::Execute()
{
	SNZBEditQueueRequest EditQueueRequest;
	if (!ReceiveRequest(&EditQueueRequest, sizeof(EditQueueRequest)))
	{
		return;
	}

	int iNrIDEntries = ntohl(EditQueueRequest.m_iNrTrailingIDEntries);
	int iNrNameEntries = ntohl(EditQueueRequest.m_iNrTrailingNameEntries);
	int iNameEntriesLen = ntohl(EditQueueRequest.m_iTrailingNameEntriesLen);
	int iAction = ntohl(EditQueueRequest.m_iAction);
	int iMatchMode = ntohl(EditQueueRequest.m_iMatchMode);
	int iOffset = ntohl(EditQueueRequest.m_iOffset);
	int iTextLen = ntohl(EditQueueRequest.m_iTextLen);
	bool bSmartOrder = ntohl(EditQueueRequest.m_bSmartOrder);
	unsigned int iBufLength = ntohl(EditQueueRequest.m_iTrailingDataLength);

	if (iNrIDEntries * sizeof(int32_t) + iTextLen + iNameEntriesLen != iBufLength)
	{
		error("Invalid struct size");
		return;
	}

	char* pBuf = (char*)malloc(iBufLength);

	// Read from the socket until nothing remains
	char* pBufPtr = pBuf;
	int NeedBytes = iBufLength;
	int iResult = 0;
	while (NeedBytes > 0)
	{
		iResult = recv(m_iSocket, pBufPtr, NeedBytes, 0);
		// Did the recv succeed?
		if (iResult <= 0)
		{
			error("invalid request");
			break;
		}
		pBufPtr += iResult;
		NeedBytes -= iResult;
	}
	bool bOK = NeedBytes == 0;

	if (iNrIDEntries <= 0 && iNrNameEntries <= 0)
	{
		SendBoolResponse(false, "Edit-Command failed: no IDs/Names specified");
		return;
	}

	if (bOK)
	{
		char* szText = iTextLen > 0 ? pBuf : NULL;
		int32_t* pIDs = (int32_t*)(pBuf + iTextLen);
		char* pNames = (pBuf + iTextLen + iNrIDEntries * sizeof(int32_t));

		IDList cIDList;
		NameList cNameList;

		if (iNrIDEntries > 0)
		{
			cIDList.reserve(iNrIDEntries);
			for (int i = 0; i < iNrIDEntries; i++)
			{
				cIDList.push_back(ntohl(pIDs[i]));
			}
		}

		if (iNrNameEntries > 0)
		{
			cNameList.reserve(iNrNameEntries);
			for (int i = 0; i < iNrNameEntries; i++)
			{
				cNameList.push_back(pNames);
				pNames += strlen(pNames) + 1;
			}
		}

		if (iAction < eRemoteEditActionPostMoveOffset)
		{
			bOK = g_pQueueCoordinator->GetQueueEditor()->EditList(
				iNrIDEntries > 0 ? &cIDList : NULL,
				iNrNameEntries > 0 ? &cNameList : NULL,
				(QueueEditor::EMatchMode)iMatchMode, bSmartOrder, (QueueEditor::EEditAction)iAction, iOffset, szText);
		}
		else
		{
			bOK = g_pPrePostProcessor->QueueEditList(&cIDList, (PrePostProcessor::EEditAction)iAction, iOffset);
		}
	}

	free(pBuf);

	if (bOK)
	{
		SendBoolResponse(true, "Edit-Command completed successfully");
	}
	else
	{
#ifndef HAVE_REGEX_H
		if ((QueueEditor::EMatchMode)iMatchMode == QueueEditor::mmRegEx)
		{
			SendBoolResponse(false, "Edit-Command failed: the program was compiled without RegEx-support");
			return;
		}
#endif
		SendBoolResponse(false, "Edit-Command failed");
	}
}
示例#22
0
 void Add( const std::string& k, BaseGDL** const val)
 {
   listName.push_back(k);
   listEnv.push_back(val);
 }
示例#23
0
int main(int argc, char* argv[])
{ 
	struct rlimit r;
	getrlimit(RLIMIT_NOFILE, &r);
	cout << "current rlimit: " << r.rlim_cur << endl;
	r.rlim_cur = 2048;
	setrlimit(RLIMIT_NOFILE, &r);
	cout << "change rlimit to: " << r.rlim_cur << endl;
	  
	struct timeval startTime, endTime;
	
	gettimeofday(&startTime, NULL);
	
	int numFiles=1;
	vector<string> pairNameVector, distNameVector;

	FILE **distFileList, **pairFileList;
	unsigned int ** inPairArray;
	float ** inDistArray;

	string inFileName = "";
		
	printf("\n----------------------------------------------------------------------\n");
	printf("                  COMPLETE CLUSTERING GENETIC DISTANCES                  \n");


	float stepSize = 0.01f, endLevel = 0.10f;	
	int i;
	int numReads=0;
		
	getOptions(argc, argv, inFileName, numReads, numFiles, endLevel, stepSize);
	getDistNameList(inFileName, pairNameVector, distNameVector, numFiles);

	if(pairNameVector.size()==0)
	{   
		cout<<"Error: No distance file loaded."<<endl;
		exit(-1);
	}
	
	int fileId;
	
	pairFileList=(FILE**)malloc(sizeof(FILE*)*pairNameVector.size());
	distFileList=(FILE**)malloc(sizeof(FILE*)*distNameVector.size());
	
	if(distFileList==NULL || pairFileList==NULL)
	{   
		cout<<"Error: Not enough memory" << endl;
		exit(-1);
	}

	for(fileId = 0; fileId < numFiles; ++fileId)
	{
		pairFileList[fileId]=fopen(pairNameVector[fileId].c_str(),"rb");
		if(pairFileList[fileId]==NULL)
		{   
			cout<<"Error: Cannot open file" << pairNameVector[fileId].c_str() << endl;
			exit(-1);
		}
	}
	
	for(fileId = 0; fileId < numFiles; ++fileId)
	{
		distFileList[fileId]=fopen(distNameVector[fileId].c_str(),"rb");
		if(distFileList[fileId]==NULL)
		{   
			cout<<"Error: Cannot open file" << distNameVector[fileId].c_str() << endl;
			exit(-1);
		}
	}

	if(numFiles!=distNameVector.size() || numFiles <= 0)
	{   
		cout<<"Error: invalid number of files!EXIT..."<<endl;
		exit(-1);
	}
	cout<<"Use "<<numFiles<<" distance file(s)."<<endl;
	
	unsigned long long totalNumPairs = 0;

	multimap<float, DistPair> nodeMap;
	multimap<float, DistPair>::iterator iter;
	unsigned int idX, idY;
	float dist;
	inPairArray = (unsigned int **) malloc(sizeof(unsigned int*) * numFiles);
	inDistArray = (float **) malloc(sizeof(float*) * numFiles);
	int * indices = (int*) malloc(sizeof(int) * numFiles);
	int * readSizes = (int*) malloc(sizeof(int) * numFiles);
	bool * EOFTags = (bool*) malloc(sizeof(bool) * numFiles);
	bool suc;

	for(fileId=0; fileId<numFiles; fileId++)
	{   
		// initialize
		inPairArray[fileId] = (unsigned int*) malloc(sizeof(unsigned int) * BUF_SIZE * 2);
		inDistArray[fileId] = (float*) malloc(sizeof(float) * BUF_SIZE);
		indices[fileId] = 0;
		readSizes[fileId] = 0;
		EOFTags[fileId] = false;				

		// add the first pair of each file to the nodeMap
		suc = loadAPair(readSizes[fileId], indices[fileId], EOFTags[fileId], idX, idY, dist, pairFileList[fileId], distFileList[fileId], inPairArray[fileId], inDistArray[fileId]);				
	
		if (suc)
			nodeMap.insert(pair<float, DistPair>(dist,DistPair(idX,idY,fileId)));
	}		
				

	LinkMap::iterator mapIter;	
						

	vActiveNodes.resize(2*numReads+1);

	leaves=(TreeNode**)malloc(sizeof(TreeNode*)*numReads);

	if(leaves==0)
	{   
		cout<<"Error: Not enough memory" << endl;
		exit(-1);
	}
	
	for(i = 0; i < numReads; ++i) {
		vActiveNodes[i] = leaves[i] = new TreeNode(i);	
	}
	
	for(i = numReads+1; i < 2*numReads+1; ++i) {
		vActiveNodes[i] = 0;
	}
	
	newId = numReads;	
	
	cout << "numReads: " << numReads << "\tmaxNumEdges: " << MAX_NUM_EDGES << endl;
	cout << "endLevel: " << endLevel << endl;
			
	while(totalNumEdges < MAX_NUM_EDGES && !nodeMap.empty())
	{   
		// get the first item in the nodeMap
		iter = nodeMap.begin();
		fileId = iter->second.fileId;        

		// write to output
		idX = iter->second.idX;
		idY =  iter->second.idY;
		dist = iter->first;
		
		absorb(idX, idY, dist);	
		
		// remove the current item from the nodeMap
		nodeMap.erase(iter);
			
		suc = loadAPair(readSizes[fileId], indices[fileId], EOFTags[fileId], idX, idY, dist, pairFileList[fileId], distFileList[fileId], inPairArray[fileId], inDistArray[fileId]);				

		if (suc) 
			nodeMap.insert(pair<float, DistPair>(dist,DistPair(idX,idY,fileId)));	
	}
			
	while(!nodeMap.empty())
	{				
		// get the first item in the nodeMap
		iter = nodeMap.begin();
		fileId = iter->second.fileId;        

		// write to output
		idX = iter->second.idX;
		idY =  iter->second.idY;
		dist = iter->first;

		lookAhead(idX, idY, dist);	
		// remove the current item from the nodeMap
		nodeMap.erase(iter);
			
		suc = loadAPair(readSizes[fileId], indices[fileId], EOFTags[fileId], idX, idY, dist, pairFileList[fileId], distFileList[fileId], inPairArray[fileId], inDistArray[fileId]);				

		if (suc) 
			nodeMap.insert(pair<float, DistPair>(dist,DistPair(idX,idY,fileId)));	
	}
	
	cout << "DONE!" << endl;
	cout << "current node: " << newId << "\tnum unlinked: " << totalUnlinked << endl;	

	// get root nodes and orphan nodes
	NodeSet roots;
	IDList orphanNodes;	
	TreeNode *aLeaf = 0;
     	
	for(i=0; i< numReads; ++i)
	{   
		aLeaf = leaves[i];
		if(aLeaf->parent==0)  // find nodes with no parent
			orphanNodes.push_back(i); 				
		else
			roots.insert(aLeaf->topParent); 										
	}

 	// print output to files
 	
	string clusterListName, clusterName;

	clusterListName=inFileName;
	clusterListName.append(".Cluster_List");
	clusterName=inFileName;
	clusterName.append(".Cluster");

	printClusters(roots, orphanNodes, clusterListName, clusterName, stepSize, endLevel);			
	
	// clear memory
	emptyTree(roots);	
	roots.clear();	
	orphanNodes.clear();	
	free(leaves);
	vActiveNodes.clear();
	
	// clean up
	for(fileId=0; fileId<numFiles; ++fileId) {
		free(inDistArray[fileId]);
		free(inPairArray[fileId]);
	}
	free(inDistArray);
	free(inPairArray);
	free(indices);
	free(readSizes);
	free(EOFTags);
	free(pairFileList);
	free(distFileList);	
	
	gettimeofday(&endTime, NULL);	
	long elapsedTime = (endTime.tv_sec - startTime.tv_sec) * 1000u + (endTime.tv_usec - startTime.tv_usec) / 1.e3 + 0.5;
	
	printf("totalNumPairs: %llu\n", totalNumPairs);
	printf("Time taken: %.3f s\n", elapsedTime/1.e3);
	printf("\n----------------------------------------------------------------------\n");
	return 0;
}
示例#24
0
bool QueueEditor::EditGroup(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo, EEditAction eAction, int iOffset, const char* szText)
{
	IDList cIDList;
	cIDList.clear();

	// collecting files belonging to group
	for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
	{
		FileInfo* pFileInfo2 = *it;
		if (pFileInfo2->GetNZBInfo() == pFileInfo->GetNZBInfo())
		{
			cIDList.push_back(pFileInfo2->GetID());
		}
	}

	if (eAction == eaGroupMoveOffset)
	{
		// calculating offset in terms of files
		FileList cGroupList;
		BuildGroupList(pDownloadQueue, &cGroupList);
		unsigned int iNum = 0;
		for (FileList::iterator it = cGroupList.begin(); it != cGroupList.end(); it++, iNum++)
		{
			FileInfo* pGroupInfo = *it;
			if (pGroupInfo->GetNZBInfo() == pFileInfo->GetNZBInfo())
			{
				break;
			}
		}
		int iFileOffset = 0;
		if (iOffset > 0)
		{
			if (iNum + iOffset >= cGroupList.size() - 1)
			{
				eAction = eaGroupMoveBottom;
			}
			else
			{
				for (unsigned int i = iNum + 2; i < cGroupList.size() && iOffset > 0; i++, iOffset--)
				{
					iFileOffset += FindFileInfoEntry(pDownloadQueue, cGroupList[i]) - FindFileInfoEntry(pDownloadQueue, cGroupList[i-1]);
				}
			}
		}
		else
		{
			if (iNum + iOffset <= 0)
			{
				eAction = eaGroupMoveTop;
			}
			else
			{
				for (unsigned int i = iNum; i > 0 && iOffset < 0; i--, iOffset++)
				{
					iFileOffset -= FindFileInfoEntry(pDownloadQueue, cGroupList[i]) - FindFileInfoEntry(pDownloadQueue, cGroupList[i-1]);
				}
			}
		}
		iOffset = iFileOffset;
	}
	else if (eAction == eaGroupDelete)
	{
		pFileInfo->GetNZBInfo()->SetDeleted(true);
		pFileInfo->GetNZBInfo()->SetCleanupDisk(CanCleanupDisk(pDownloadQueue, pFileInfo->GetNZBInfo()));
	}

	EEditAction GroupToFileMap[] = { (EEditAction)0, eaFileMoveOffset, eaFileMoveTop, eaFileMoveBottom, 
		eaFilePause, eaFileResume, eaFileDelete, eaFilePauseAllPars, eaFilePauseExtraPars, eaFileSetPriority, eaFileReorder,
		eaFileMoveOffset, eaFileMoveTop, eaFileMoveBottom, eaFilePause, eaFileResume, eaFileDelete, 
		eaFilePauseAllPars, eaFilePauseExtraPars, eaFileSetPriority,
		(EEditAction)0, (EEditAction)0, (EEditAction)0 };

	return InternEditList(pDownloadQueue, &cIDList, true, GroupToFileMap[eAction], iOffset, szText);
}