void create_primary_chain_from_list(node *head_node, vector<pair<int, int> >& chain, node *next_node, node *previous_node,  int readlen)
{
        bool repeat_sequence;
        int range1, range2;
	int max_reference_delta;
	
	node *current_node = new node;
        current_node->ref_ind = head_node->ref_ind;
        current_node->read_ind = head_node->read_ind;
        current_node->next = head_node->next;

        while(next_node != NULL)
        {
                //cout << "Chain Ref = " << next_node->ref_ind << ", Read = " << next_node->read_ind << endl;
                //cout << "\t\t\tCurrent Node Ref = " << current_node->ref_ind << ", Read = " << current_node->read_ind << endl;
                repeat_sequence = false;
                range1 = next_node->read_ind - current_node->read_ind;//read
                range2 = next_node->ref_ind - current_node->ref_ind;//reference

		max_reference_delta = min(1.30 * readlen - current_node->read_ind, SEEDTUPLEDIST);
		if(range2 > max_reference_delta)
                //if(range2 > readlen * 1.30 - current_node->read_ind)// || range2 > 1000)
		{
                        break;
		}
                if(range1 < 0)
                {
                        previous_node = next_node;
                        next_node = next_node->next;
                        continue;
                }
                else if(range2 == 0 || range1 == 0)
                {
                        //create_primary_chain_from_list(current_node, chain, next_node->next, next_node, readlen);
                        previous_node->next = next_node->next;
                        delete next_node;
                        next_node = previous_node->next;//NULL;
                        //return;
                }
                else {
                        float diff = (1.0 * range1) / range2;

			//if(diff > 0.90 && diff < 1.10) // hack
                        if(diff > 0.70 && diff < 1.30)
                        {
				current_node->ref_ind = next_node->ref_ind;
                                current_node->read_ind = next_node->read_ind;
                                current_node->next = next_node->next;
				
                                //if(range1 == range2)
                                {
					chain.push_back(make_pair(next_node->ref_ind, next_node->read_ind));
					//cout << "Chain Ref = " << next_node->ref_ind << ", Read = " << next_node->read_ind << endl;
                                        //create_primary_chain_from_list(next_node, chain, next_node->next, next_node, readlen);
                                        previous_node->next = next_node->next;
                                        delete next_node;
                                        next_node = NULL;
                                }
                                /*else
                                {
                                        chain.push_back(make_pair(next_node->ref_ind, next_node->read_ind));

                                        //create_primary_chain_from_list(next_node, chain, next_node->next, next_node, readlen);
                                        previous_node->next = next_node->next;
                                        delete next_node;
                                        next_node = NULL;
                                }
				*/
				next_node = previous_node->next;
                                //return;
                        }
                        else {
                                previous_node = next_node;
                                next_node = next_node->next;
                        }
                }
        }

	delete current_node;
	current_node = NULL;
}
Exemple #2
0
 virtual pair<BSONObj, BSONObj> rangeFor( const BSONObj& chunkDoc,
                                          const BSONObj& min,
                                          const BSONObj& max ) const
 {
     return make_pair( min, max );
 }
/* load grammar file and save as map <string, vector<list<string> > > 
 * */
void Parser:: inputGrammar(char* input_path){

	/* inputGrammar save as map<string, vector<list<string> >> */
	Nonterminal* head;
	string head_symbol;
	string terminal;
	vector<list<string> > body;

	// open file
	ifstream input_file(input_path, ios::in);
	if( !input_file ){
		cout << "Grammar.txt could not be opend" << endl;
		exit(1);
	}

	// read input line by line
	for ( string line; getline(input_file, line); ){
	
		//spilit the '\n'
		line.substr(0, line.length()-1);

		// is Nonterminal
		if ( isalpha(line[0]) ){
			if( !body.empty() ){
				grammar.insert(make_pair(head_symbol, body));
			}
			
			head_symbol = line;
			head = new Nonterminal( head_symbol );
			
			// non_terminals vector
			non_terminals.push_back(head);
			// non_terminals map for search
			non_terminals_map.insert(make_pair(line, head));
			body.clear();

		}// is terminal
		else{	
			list<string> right_hand_side;

			// convert string to char* for split
			char* tokenPtr = new char[line.length() + 1];
			strcpy(tokenPtr, line.c_str() );
				
			// splite by white space
			tokenPtr = strtok(tokenPtr, " \t");
				
			// whike has next token
			while( tokenPtr != NULL ){
				terminal = tokenPtr;
				right_hand_side.push_back(terminal);
				// get next token
				tokenPtr = strtok(NULL, " \t");
			}
			delete [] tokenPtr;
			body.push_back(right_hand_side);	
		}
	}

	// insert last production to produce grammar
	grammar.insert(make_pair(head_symbol, body));
}
Exemple #4
0
void Junction::addJunctionController(Controller *controller, const std::string &controlType)
{
    junctionControllerVector.push_back(make_pair(controller, controlType));
}
Exemple #5
0
 void KeyFrame::UpdateConnections()
 {
     map<KeyFrame*,int> KFcounter;
     
     vector<MapPoint*> vpMP;
     
     {
         boost::mutex::scoped_lock lockMPs(mMutexFeatures);
         vpMP = mvpMapPoints;
     }
     
     //For all map points in keyframe check in which other keyframes are they seen
     //Increase counter for those keyframes
     for(vector<MapPoint*>::iterator vit=vpMP.begin(), vend=vpMP.end(); vit!=vend; vit++)
     {
         MapPoint* pMP = *vit;
         
         if(!pMP)
             continue;
         
         if(pMP->isBad())
             continue;
         
         map<KeyFrame*,size_t> observations = pMP->GetObservations();
         
         for(map<KeyFrame*,size_t>::iterator mit=observations.begin(), mend=observations.end(); mit!=mend; mit++)
         {
             if(mit->first->mnId==mnId)
                 continue;
             KFcounter[mit->first]++;
         }
     }
     
     if(KFcounter.empty())
         return;
     
     //If the counter is greater than threshold add connection
     //In case no keyframe counter is over threshold add the one with maximum counter
     int nmax=0;
     KeyFrame* pKFmax=NULL;
     int th = 15;
     
     vector<pair<int,KeyFrame*> > vPairs;
     vPairs.reserve(KFcounter.size());
     for(map<KeyFrame*,int>::iterator mit=KFcounter.begin(), mend=KFcounter.end(); mit!=mend; mit++)
     {
         if(mit->second>nmax)
         {
             nmax=mit->second;
             pKFmax=mit->first;
         }
         if(mit->second>=th)
         {
             vPairs.push_back(make_pair(mit->second,mit->first));
             (mit->first)->AddConnection(this,mit->second);
         }
     }
     
     if(vPairs.empty())
     {
         vPairs.push_back(make_pair(nmax,pKFmax));
         pKFmax->AddConnection(this,nmax);
     }
     
     sort(vPairs.begin(),vPairs.end());
     list<KeyFrame*> lKFs;
     list<int> lWs;
     for(size_t i=0; i<vPairs.size();i++)
     {
         lKFs.push_front(vPairs[i].second);
         lWs.push_front(vPairs[i].first);
     }
     
     {
         boost::mutex::scoped_lock lockCon(mMutexConnections);
         
         // mspConnectedKeyFrames = spConnectedKeyFrames;
         mConnectedKeyFrameWeights = KFcounter;
         mvpOrderedConnectedKeyFrames = vector<KeyFrame*>(lKFs.begin(),lKFs.end());
         mvOrderedWeights = vector<int>(lWs.begin(), lWs.end());
         
         if(mbFirstConnection && mnId!=0)
         {
             mpParent = mvpOrderedConnectedKeyFrames.front();
             mpParent->AddChild(this);
             mbFirstConnection = false;
         }
         
     }
 }
Exemple #6
0
/**
@brief
메모리 블럭의 크기와 갯수를 읽어 initMessageBlockManager 에 넘겨준다.
*/
int readMBconfig(void)
{
	filename_t fileName(Config::instance()->process.memconf);

	FILE *fp = NULL;
	fp = fopen( fileName, "rt" );
	if( fp == NULL )
	{
		PAS_ERROR1( "Memory Pool Config File:  [%s]  not found", fileName.toStr() );
		return -1;
	}

	// 맵을 사용하는 이유는, 키값에는 블럭사이즈, Value 값에는 최대 사이즈를 넣으면 되고
	// 만일 config 파일에 중복된 값(예:16K, 16K)이 있을 경우 뒷부분은 자동으로 무시되므로
	// map 을 사용하는 것이 가장 적당하다고 판단된다.
	mapint mapSizes;
	int	lineCount = 0;
	int	maxBlockSize =0;
	while( !feof(fp) )
	{
		char line[1024] = "\0";
		char *pResult = fgets( line, sizeof(line), fp );
		if( pResult == NULL )
			break;
		lineCount++;
		
		StrSplit MBSpliter( 2, sizeof(line) );
		MBSpliter.split( line );

		// skip comment
		if( MBSpliter.fldVal(0)[0] == '#' )
			continue;

		// 정상적인 데이터가 아니면 패스
		if( MBSpliter.numFlds() != 2 )
		{
			PAS_ERROR2( "Memory Pool Config File: Error in line [%d], file[%s]", lineCount, fileName.toStr() );
			return -1;
		}

		char *pLeftItem = MBSpliter.fldVal( 0 );
		char *pRightItem = MBSpliter.fldVal( 1 );
		int nLeftLength = strlen( pLeftItem );

		int nBlockSize = 0;
		int nMaxCount = strtol( pRightItem, NULL, 10 );

		if( pLeftItem[nLeftLength-1] == 'K' || pLeftItem[nLeftLength-1] == 'k' )
		{
			pLeftItem[nLeftLength-1] = '\0';
			nBlockSize = strtol( pLeftItem, NULL, 10 );
			nBlockSize = nBlockSize * 1024;
		}

		else if( pLeftItem[nLeftLength-1] == 'M' || pLeftItem[nLeftLength-1] == 'm' )
		{
			pLeftItem[nLeftLength-1] = '\0';
			nBlockSize = strtol( pLeftItem, NULL, 10 );
			nBlockSize = nBlockSize * 1024 * 1024;
		}

		// 단위값이 잘 못 입력된 경우 무시한다.
		else
		{
			PAS_ERROR2( "Memory Pool Config File: Error in line [%d], file[%s]", lineCount, fileName.toStr());
			return -1;
		}

		if (nBlockSize < maxBlockSize)
		{
			PAS_ERROR2( "Memory Pool Config File: Block Size must be bigger than the previous size. Error in line [%d], file[%s]", lineCount, fileName.toStr());
			return -1;
			
		}

		maxBlockSize = nBlockSize;
		
		mapSizes.insert( make_pair(nBlockSize, nMaxCount) );
		
	}

	fclose( fp );

	initMessageBlockManager( mapSizes );

	return 0;
}
Exemple #7
0
void SimpleXML::addAttrib(const string& aName, const string& aData) throw(SimpleXMLException) {
    if(current == &root)
        throw SimpleXMLException("No tag is currently selected");

    current->attribs.push_back(make_pair(aName, aData));
}
Exemple #8
0
int ReadMifOneElement(ifstream& fMifFile, vector<pair<double, double> >& vCoorXYLst)
{
	int iLineLoop, iPntLoop;
	int iLineNum = 0;
	int iPntNum = 0;
	string strTok = " ";  
	string strPline = "Pline";
	string strLine = "Line";
	string strMultiple = "Multiple";
	double dX, dY;
	char  acLine[1024];
	vector<string> vFirstLineOfEle;

	vCoorXYLst.clear();

	do {
		memset(acLine, 0, 1024 );
		fMifFile.getline(acLine, 1024);
	} while( strcmp(acLine,"") == 0);

	vFirstLineOfEle.clear();
	vFirstLineOfEle = SplitString(acLine, strTok, "");   

	if( 0 == vFirstLineOfEle.size())
	{
		//log
		return RET_FAILED;
	}

	// 修改适应不同的mif样式表述 gaojian 2014-01-17
	while (vFirstLineOfEle[0] != strPline && vFirstLineOfEle[0] != strLine)
	{
		if (fMifFile.eof()) {
			return RET_FAILED;
		}

		do {
			memset(acLine, 0, 1024 );
			fMifFile.getline(acLine, 1024);
			if (fMifFile.eof()) {
				return RET_FAILED;
			}
		} while( strcmp(acLine,"") == 0);

		vFirstLineOfEle.clear();
		vFirstLineOfEle = SplitString(acLine, strTok, "");   

		if( 0 == vFirstLineOfEle.size())
		{
			//log
			return RET_FAILED;
		}
	}

	if( vFirstLineOfEle[0] == strPline )
	{
		if( vFirstLineOfEle[1] == strMultiple)
		{
			iLineNum = StringToInt(vFirstLineOfEle[2]);

			for( iLineLoop = 0; iLineLoop < iLineNum; iLineLoop++ )
			{
				memset(acLine, 0, 1024 );
				fMifFile.getline(acLine, 1024);

				vFirstLineOfEle.clear();
				vFirstLineOfEle = SplitString(acLine, strTok, "");
				iPntNum = StringToInt(vFirstLineOfEle[0]);
				for( iPntLoop = 0; iPntLoop < iPntNum; iPntLoop++ )
				{
					memset(acLine, 0, 1024 );
					fMifFile.getline(acLine, 1024);

					vFirstLineOfEle.clear();
					vFirstLineOfEle = SplitString(acLine, strTok, "");   

					dX = StringToDouble(vFirstLineOfEle[0]);
					dY = StringToDouble(vFirstLineOfEle[1]);
					vCoorXYLst.push_back(make_pair(dX,dY));				
				}
			}
		}
		else
		{
			iPntNum = StringToInt(vFirstLineOfEle[1]);

			for( iPntLoop = 0; iPntLoop < iPntNum; iPntLoop++ )
			{
				memset(acLine, 0, 1024 );
				fMifFile.getline(acLine, 1024);

				vFirstLineOfEle.clear();
				vFirstLineOfEle = SplitString(acLine, strTok, "");   

				dX = StringToDouble(vFirstLineOfEle[0]);
				dY = StringToDouble(vFirstLineOfEle[1]);
				vCoorXYLst.push_back(make_pair(dX,dY));				
			}
		}
	}
	else if( vFirstLineOfEle[0] == strLine )
	{
		dX = StringToDouble(vFirstLineOfEle[1]);
		dY = StringToDouble(vFirstLineOfEle[2]);
		vCoorXYLst.push_back(make_pair(dX,dY));

		dX = StringToDouble(vFirstLineOfEle[3]);
		dY = StringToDouble(vFirstLineOfEle[4]);
		vCoorXYLst.push_back(make_pair(dX,dY));
	}
	else
	{
		//log
		return RET_FAILED;
	}

	fMifFile.getline(acLine, 1024);


	return RET_SUCCESS;
}
bool MeshTopologyTests::testEntityConstraints()
{
  bool success = true;

  // make two simple meshes
  MeshTopologyPtr mesh2D = makeRectMesh(0.0, 0.0, 2.0, 1.0,
                                        2, 1);
  MeshTopologyPtr mesh3D = makeHexMesh(0.0, 0.0, 0.0, 2.0, 4.0, 3.0,
                                       2, 2, 1);

  unsigned vertexDim = 0;
  unsigned edgeDim = 1;
  unsigned faceDim = 2;

  // first, check that unconstrained edges and faces are unconstrained

  set< unsigned > boundaryEdges;
  set< unsigned > internalEdges;

  for (unsigned cellIndex=0; cellIndex<mesh2D->cellCount(); cellIndex++)
  {
    CellPtr cell = mesh2D->getCell(cellIndex);
    unsigned sideCount = cell->getSideCount();

    for (unsigned sideOrdinal=0; sideOrdinal<sideCount; sideOrdinal++)
    {
      unsigned edgeIndex = cell->entityIndex(edgeDim, sideOrdinal);
      unsigned numCells = mesh2D->getActiveCellCount(edgeDim,edgeIndex);
      if (numCells == 1)   // boundary edge
      {
        boundaryEdges.insert(edgeIndex);
      }
      else if (numCells == 2)
      {
        internalEdges.insert(edgeIndex);
      }
      else
      {
        success = false;
        cout << "testEntityConstraints: In initial 2D mesh, edge " << edgeIndex << " has active cell count of " << numCells << ".\n";
      }
    }
  }
  if (internalEdges.size() != 1)
  {
    success = false;
    cout << "testEntityConstraints: In initial 2D mesh, there are " << internalEdges.size() << " internal edges (expected 1).\n";
  }
  for (set<unsigned>::iterator edgeIt=internalEdges.begin(); edgeIt != internalEdges.end(); edgeIt++)
  {
    unsigned edgeIndex = *edgeIt;
    unsigned constrainingEntityIndex = mesh2D->getConstrainingEntity(edgeDim,edgeIndex).first;
    if (constrainingEntityIndex != edgeIndex)
    {
      success = false;
      cout << "testEntityConstraints: In initial 2D mesh, internal edge is constrained by a different edge.\n";
    }
  }

  set<unsigned> boundaryFaces;
  set<unsigned> internalFaces;
  map<unsigned, vector<unsigned> > faceToEdges;
  for (unsigned cellIndex=0; cellIndex<mesh3D->cellCount(); cellIndex++)
  {
    CellPtr cell = mesh3D->getCell(cellIndex);
    unsigned sideCount = cell->getSideCount();

    for (unsigned sideOrdinal=0; sideOrdinal<sideCount; sideOrdinal++)
    {
      unsigned faceIndex = cell->entityIndex(faceDim, sideOrdinal);
      unsigned numCells = mesh3D->getActiveCellCount(faceDim,faceIndex);
      if (numCells == 1)   // boundary face
      {
        boundaryFaces.insert(faceIndex);
      }
      else if (numCells == 2)
      {
        internalFaces.insert(faceIndex);
      }
      else
      {
        success = false;
        cout << "testEntityConstraints: In initial 3D mesh, face " << faceIndex << " has active cell count of " << numCells << ".\n";
      }

      if (faceToEdges.find(faceIndex) == faceToEdges.end())
      {
        CellTopoPtr faceTopo = cell->topology()->getSubcell(faceDim, sideOrdinal);
        unsigned numEdges = faceTopo->getSubcellCount(edgeDim);
        vector<unsigned> edgeIndices(numEdges);
        for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
        {
          edgeIndices[edgeOrdinal] = mesh3D->getFaceEdgeIndex(faceIndex, edgeOrdinal);
        }
      }
    }
  }

  if (internalFaces.size() != 4)
  {
    success = false;
    cout << "testEntityConstraints: In initial 3D mesh, there are " << internalFaces.size() << " internal faces (expected 4).\n";
  }
  for (set<unsigned>::iterator faceIt=internalFaces.begin(); faceIt != internalFaces.end(); faceIt++)
  {
    unsigned faceIndex = *faceIt;
    unsigned constrainingEntityIndex = mesh3D->getConstrainingEntity(faceDim,faceIndex).first;
    if (constrainingEntityIndex != faceIndex)
    {
      success = false;
      cout << "testEntityConstraints: In initial 3D mesh, internal face is constrained by a different face.\n";
    }
  }

  // now, make a single refinement in each mesh:
  unsigned cellToRefine2D = 0, cellToRefine3D = 3;
  mesh2D->refineCell(cellToRefine2D, RefinementPattern::regularRefinementPatternQuad(), mesh2D->cellCount());
  mesh3D->refineCell(cellToRefine3D, RefinementPattern::regularRefinementPatternHexahedron(), mesh3D->cellCount());

//  printMeshInfo(mesh2D);

  // figure out which faces/edges were refined and add the corresponding

  map<unsigned,pair<IndexType,unsigned> > expectedEdgeConstraints2D;
  set<unsigned> refinedEdges;
  for (set<unsigned>::iterator edgeIt=boundaryEdges.begin(); edgeIt != boundaryEdges.end(); edgeIt++)
  {
    set<unsigned> children = mesh2D->getChildEntitiesSet(edgeDim, *edgeIt);
    if (children.size() > 0)
    {
      refinedEdges.insert(*edgeIt);
      boundaryEdges.insert(children.begin(), children.end());
    }
  }
  for (set<unsigned>::iterator edgeIt=internalEdges.begin(); edgeIt != internalEdges.end(); edgeIt++)
  {
    set<unsigned> children = mesh2D->getChildEntitiesSet(edgeDim, *edgeIt);
    if (children.size() > 0)
    {
      refinedEdges.insert(*edgeIt);
      internalEdges.insert(children.begin(), children.end());
      for (set<unsigned>::iterator childIt = children.begin(); childIt != children.end(); childIt++)
      {
        unsigned childIndex = *childIt;
        expectedEdgeConstraints2D[childIndex] = make_pair(*edgeIt, edgeDim);
      }
    }
  }
  // 1 quad refined: expect 4 refined edges
  if (refinedEdges.size() != 4)
  {
    success = false;
    cout << "After initial refinement, 2D mesh has " << refinedEdges.size() << " refined edges (expected 4).\n";
  }
  checkConstraints(mesh2D, edgeDim, expectedEdgeConstraints2D);

  set<unsigned> refinedFaces;
  map<unsigned,pair<IndexType,unsigned> > expectedFaceConstraints3D;
  map<unsigned,pair<IndexType,unsigned> > expectedEdgeConstraints3D;

  for (set<unsigned>::iterator faceIt=boundaryFaces.begin(); faceIt != boundaryFaces.end(); faceIt++)
  {
    set<unsigned> children = mesh3D->getChildEntitiesSet(faceDim, *faceIt);
    if (children.size() > 0)
    {
      refinedFaces.insert(*faceIt);
      boundaryFaces.insert(children.begin(), children.end());
    }
  }

  for (set<unsigned>::iterator faceIt=internalFaces.begin(); faceIt != internalFaces.end(); faceIt++)
  {
    vector<unsigned> children = mesh3D->getChildEntities(faceDim, *faceIt);
    if (children.size() > 0)
    {
      refinedFaces.insert(*faceIt);
      internalFaces.insert(children.begin(), children.end());
      for (unsigned childOrdinal = 0; childOrdinal < children.size(); childOrdinal++)
      {
        unsigned childIndex = children[childOrdinal];
        expectedFaceConstraints3D[childIndex] = make_pair(*faceIt, faceDim);
        unsigned numEdges = 4;
        unsigned internalEdgeCount = 0; // for each child of a quad, we expect to have 2 internal edges
        for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
        {
          unsigned edgeIndex = mesh3D->getFaceEdgeIndex(childIndex, edgeOrdinal);
          unsigned activeCellCount = mesh3D->getActiveCellCount(edgeDim, edgeIndex);
          if (activeCellCount==2)
          {
            internalEdgeCount++;
            expectedEdgeConstraints3D[edgeIndex] = make_pair(*faceIt, faceDim);
          }
          else if (activeCellCount==1)     // hanging edge
          {
            if (! mesh3D->entityHasParent(edgeDim, edgeIndex))
            {
              cout << "Hanging edge with edgeIndex " << edgeIndex << " (in face " << childIndex << ") does not have a parent edge.\n";
              cout << "Edge vertices:\n";
              mesh3D->printEntityVertices(edgeDim, edgeIndex);
              cout << "Face vertices:\n";
              mesh3D->printEntityVertices(faceDim, childIndex);
              success = false;
            }
            else
            {
              unsigned edgeParentIndex = mesh3D->getEntityParent(edgeDim, edgeIndex);
              expectedEdgeConstraints3D[edgeIndex] = make_pair(edgeParentIndex, edgeDim);
            }
          }
          else
          {
            cout << "Unexpected number of active cells: " << activeCellCount << endl;
          }
        }
        if (internalEdgeCount != 2)
        {
          cout << "Expected internalEdgeCount to be 2; was " << internalEdgeCount << endl;
          success = false;
        }
      }
    }
  }
  // 1 hex refined: expect 6 refined faces
  if (refinedFaces.size() != 6)
  {
    success = false;
    cout << "After initial refinement, 3D mesh has " << refinedFaces.size() << " refined faces (expected 6).\n";
  }
  if (! checkConstraints(mesh3D, faceDim, expectedFaceConstraints3D, "refined 3D mesh") )
  {
    cout << "Failed face constraint check for refined 3D mesh." << endl;
    success = false;
  }
  if (! checkConstraints(mesh3D, edgeDim, expectedEdgeConstraints3D, "refined 3D mesh") )
  {
    cout << "Failed edge constraint check for refined 3D mesh." << endl;
    success = false;
  }

  // now, we refine one of the children of the refined cells in each mesh, to produce a 2-level constraint
  set<unsigned> edgeChildren2D;
  set<unsigned> cellsForEdgeChildren2D;
  for (map<unsigned,pair<IndexType,unsigned> >::iterator edgeConstraint=expectedEdgeConstraints2D.begin();
       edgeConstraint != expectedEdgeConstraints2D.end(); edgeConstraint++)
  {
    edgeChildren2D.insert(edgeConstraint->first);
    unsigned cellIndex = mesh2D->getActiveCellIndices(edgeDim, edgeConstraint->first).begin()->first;
    cellsForEdgeChildren2D.insert(cellIndex);
//    cout << "cellsForEdgeChildren2D: " << cellIndex << endl;
  }

  // one of these has (1,0) as one of its vertices.  Let's figure out which one:
  unsigned vertexIndex;
  if (! mesh2D->getVertexIndex(makeVertex(1, 0), vertexIndex) )
  {
    cout << "Error: vertex not found.\n";
    success = false;
  }

  vector< pair<unsigned,unsigned> > cellsForVertex = mesh2D->getActiveCellIndices(vertexDim, vertexIndex);
  if (cellsForVertex.size() != 2)
  {
    cout << "cellsForVertex should have 2 entries; has " << cellsForVertex.size() << endl;
    success = false;
  }
  unsigned childCellForVertex, childCellConstrainedEdge;
  set<unsigned> childNewlyConstrainingEdges; // the two interior edges that we break
  for (vector< pair<unsigned,unsigned> >::iterator cellIt=cellsForVertex.begin(); cellIt != cellsForVertex.end(); cellIt++)
  {
//    cout << "cellsForVertex: " << cellIt->first << endl;
    if ( cellsForEdgeChildren2D.find( cellIt->first ) != cellsForEdgeChildren2D.end() )
    {
      // found match
      childCellForVertex = cellIt->first;
      // now, figure out which of the "edgeChildren2D" is shared by this cell:
      CellPtr cell = mesh2D->getCell(childCellForVertex);
      unsigned numEdges = cell->getSideCount();
      for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
      {
        unsigned edgeIndex = cell->entityIndex(edgeDim, edgeOrdinal);
        if (edgeChildren2D.find(edgeIndex) != edgeChildren2D.end())
        {
          childCellConstrainedEdge = edgeIndex;
        }
        else if ( mesh2D->getActiveCellCount(edgeDim, edgeIndex) == 2 )
        {
          childNewlyConstrainingEdges.insert(edgeIndex);
        }
      }
    }
  }
  if (childNewlyConstrainingEdges.size() != 2)
  {
    cout << "Expected 2 newly constraining edges after 2nd refinement of 2D mesh, but found " << childNewlyConstrainingEdges.size() << endl;
    success = false;
  }

  // refine the cell that matches (1,0):
  mesh2D->refineCell(childCellForVertex, RefinementPattern::regularRefinementPatternQuad(), mesh2D->cellCount());

  // now, fix the expected edge constraints, then check them...
  set<unsigned> childEdges = mesh2D->getChildEntitiesSet(edgeDim, childCellConstrainedEdge);
  if (childEdges.size() != 2)
  {
    cout << "Expected 2 child edges, but found " << childEdges.size() << ".\n";
    success = false;
  }
  for (set<unsigned>::iterator edgeIt = childEdges.begin(); edgeIt != childEdges.end(); edgeIt++)
  {
    expectedEdgeConstraints2D[*edgeIt] = expectedEdgeConstraints2D[childCellConstrainedEdge];
  }
  expectedEdgeConstraints2D.erase(childCellConstrainedEdge);
  for (set<unsigned>::iterator edgeIt = childNewlyConstrainingEdges.begin(); edgeIt != childNewlyConstrainingEdges.end(); edgeIt++)
  {
    set<unsigned> newChildEdges = mesh2D->getChildEntitiesSet(edgeDim, *edgeIt);
    for (set<unsigned>::iterator newEdgeIt = newChildEdges.begin(); newEdgeIt != newChildEdges.end(); newEdgeIt++)
    {
      expectedEdgeConstraints2D[*newEdgeIt] = make_pair(*edgeIt,edgeDim);
    }
  }

  if (! checkConstraints(mesh2D, edgeDim, expectedEdgeConstraints2D, "twice-refined 2D mesh") )
  {
    cout << "Failed constraint check for twice-refined 2D mesh." << endl;
    success = false;
  }

  // now, do a second level of refinement for 3D mesh
  // one of these has (1,2,0) as one of its vertices.  Let's figure out which one:
  if (! mesh3D->getVertexIndex(makeVertex(1, 2, 0), vertexIndex) )
  {
    cout << "Error: vertex not found.\n";
    success = false;
  }

  cellsForVertex = mesh3D->getActiveCellIndices(vertexDim, vertexIndex);
  if (cellsForVertex.size() != 4)
  {
    cout << "cellsForVertex should have 4 entries; has " << cellsForVertex.size() << endl;
    success = false;
  }

  vector<unsigned> justCellsForVertex;
  for (vector< pair<unsigned,unsigned> >::iterator entryIt = cellsForVertex.begin(); entryIt != cellsForVertex.end(); entryIt++)
  {
    justCellsForVertex.push_back(entryIt->first);
  }
  vector<unsigned> childCellIndices = mesh3D->getCell(cellToRefine3D)->getChildIndices(mesh3D);
  std::sort(childCellIndices.begin(), childCellIndices.end());
  vector<unsigned> matches(childCellIndices.size() + cellsForVertex.size());
  vector<unsigned>::iterator matchEnd = std::set_intersection(justCellsForVertex.begin(), justCellsForVertex.end(), childCellIndices.begin(), childCellIndices.end(), matches.begin());
  matches.resize(matchEnd-matches.begin());

  if (matches.size() != 1)
  {
    cout << "matches should have exactly one entry, but has " << matches.size();
    success = false;
  }
  unsigned childCellIndex = matches[0];
  CellPtr childCell = mesh3D->getCell(childCellIndex);
  set<unsigned> childInteriorUnconstrainedFaces;
  set<unsigned> childInteriorConstrainedFaces;
  unsigned faceCount = childCell->getSideCount();
  for (unsigned faceOrdinal=0; faceOrdinal<faceCount; faceOrdinal++)
  {
    unsigned faceIndex = childCell->entityIndex(faceDim, faceOrdinal);
    if (mesh3D->getActiveCellCount(faceDim, faceIndex) == 1)
    {
      // that's an interior constrained face, or a boundary face
      if (expectedFaceConstraints3D.find(faceIndex) != expectedFaceConstraints3D.end())
      {
        // constrained face
        childInteriorConstrainedFaces.insert(faceIndex);
      }
    }
    else if (mesh3D->getActiveCellCount(faceDim, faceIndex) == 2)
    {
      // an interior unconstrained face
      childInteriorUnconstrainedFaces.insert(faceIndex);
    }
    else
    {
      cout << "Error: unexpected active cell count.  Expected 1 or 2, but was " << mesh3D->getActiveCellCount(faceDim, faceIndex) << endl;
      success = false;
    }
  }
//  Camellia::print("childInteriorUnconstrainedFaces", childInteriorUnconstrainedFaces);
//  Camellia::print("childInteriorConstrainedFaces", childInteriorConstrainedFaces);

  mesh3D->refineCell(childCellIndex, RefinementPattern::regularRefinementPatternHexahedron(), mesh3D->cellCount());

  // update expected face and edge constraints
//  set<unsigned> edgeConstraintsToDrop;
  for (set<unsigned>::iterator faceIt=childInteriorConstrainedFaces.begin(); faceIt != childInteriorConstrainedFaces.end(); faceIt++)
  {
    unsigned faceIndex = *faceIt;
    set<unsigned> newChildFaces = mesh3D->getChildEntitiesSet(faceDim, faceIndex);
    for (set<unsigned>::iterator newChildIt=newChildFaces.begin(); newChildIt != newChildFaces.end(); newChildIt++)
    {
      unsigned newChildIndex = *newChildIt;
      expectedFaceConstraints3D[newChildIndex] = expectedFaceConstraints3D[faceIndex];
//      cout << "Expecting two-level face constraint: face " << newChildIndex << " constrained by face " << expectedFaceConstraints3D[newChildIndex].first << endl;
    }
    unsigned numEdges = mesh3D->getSubEntityCount(faceDim, faceIndex, edgeDim);
    set<IndexType> childEdgesOnParentBoundary;
    for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
    {
      unsigned edgeIndex = mesh3D->getSubEntityIndex(faceDim, faceIndex, edgeDim, edgeOrdinal);
      set<unsigned> newChildEdges = mesh3D->getChildEntitiesSet(edgeDim, edgeIndex);
      for (set<unsigned>::iterator newChildIt=newChildEdges.begin(); newChildIt != newChildEdges.end(); newChildIt++)
      {
        unsigned newChildIndex = *newChildIt;
        expectedEdgeConstraints3D[newChildIndex] = expectedEdgeConstraints3D[edgeIndex];
//        cout << "Expecting two-level edge constraint: edge " << newChildIndex << " constrained by ";
//        cout << typeString(expectedEdgeConstraints3D[newChildIndex].second) << " " << expectedEdgeConstraints3D[newChildIndex].first << endl;
        childEdgesOnParentBoundary.insert(newChildIndex);
//        edgeConstraintsToDrop.insert(edgeIndex);
      }
    }

    for (set<unsigned>::iterator newChildIt=newChildFaces.begin(); newChildIt != newChildFaces.end(); newChildIt++)
    {
      unsigned newChildFaceIndex = *newChildIt;
      int numEdges = mesh3D->getSubEntityCount(faceDim, newChildFaceIndex, edgeDim);
      for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
      {
        unsigned newChildEdgeIndex = mesh3D->getSubEntityIndex(faceDim, newChildFaceIndex, edgeDim, edgeOrdinal);
        if (childEdgesOnParentBoundary.find(newChildEdgeIndex) == childEdgesOnParentBoundary.end())
        {
          expectedEdgeConstraints3D[newChildEdgeIndex] = expectedFaceConstraints3D[faceIndex];
        }
      }
    }

    expectedFaceConstraints3D.erase(faceIndex);
  }
//  for (set<unsigned>::iterator edgeToDropIt=edgeConstraintsToDrop.begin(); edgeToDropIt != edgeConstraintsToDrop.end(); edgeToDropIt++) {
//    expectedEdgeConstraints3D.erase(*edgeToDropIt);
//  }
  for (set<unsigned>::iterator faceIt=childInteriorUnconstrainedFaces.begin(); faceIt != childInteriorUnconstrainedFaces.end(); faceIt++)
  {
    unsigned faceIndex = *faceIt;
    set<unsigned> newChildFaces = mesh3D->getChildEntitiesSet(faceDim, faceIndex);
    for (set<unsigned>::iterator newChildIt=newChildFaces.begin(); newChildIt != newChildFaces.end(); newChildIt++)
    {
      unsigned newChildIndex = *newChildIt;
      expectedFaceConstraints3D[newChildIndex] = make_pair(faceIndex, faceDim);
    }
    expectedFaceConstraints3D.erase(faceIndex);
    unsigned numEdges = mesh3D->getSubEntityCount(faceDim, faceIndex, edgeDim);
    set<IndexType> childEdgesOnParentBoundary;
    for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
    {
      unsigned edgeIndex = mesh3D->getSubEntityIndex(faceDim, faceIndex, edgeDim, edgeOrdinal);
      set<unsigned> newChildEdges = mesh3D->getChildEntitiesSet(edgeDim, edgeIndex);
      for (set<unsigned>::iterator newChildIt=newChildEdges.begin(); newChildIt != newChildEdges.end(); newChildIt++)
      {
        unsigned newChildIndex = *newChildIt;
        if (expectedEdgeConstraints3D.find(newChildIndex) == expectedEdgeConstraints3D.end())   // only impose edge constraint if there is not one already present
        {
          expectedEdgeConstraints3D[newChildIndex] = make_pair(edgeIndex,edgeDim);
        }
        childEdgesOnParentBoundary.insert(newChildIndex);
      }
    }
    for (set<unsigned>::iterator newChildIt=newChildFaces.begin(); newChildIt != newChildFaces.end(); newChildIt++)
    {
      unsigned newChildFaceIndex = *newChildIt;
      int numEdges = mesh3D->getSubEntityCount(faceDim, newChildFaceIndex, edgeDim);
      for (unsigned edgeOrdinal=0; edgeOrdinal<numEdges; edgeOrdinal++)
      {
        unsigned newChildEdgeIndex = mesh3D->getSubEntityIndex(faceDim, newChildFaceIndex, edgeDim, edgeOrdinal);
        if (childEdgesOnParentBoundary.find(newChildEdgeIndex) == childEdgesOnParentBoundary.end())
        {
          if (expectedEdgeConstraints3D.find(newChildEdgeIndex) == expectedEdgeConstraints3D.end())   // only impose edge constraint if there is not one already present
          {
            expectedEdgeConstraints3D[newChildEdgeIndex] = make_pair(faceIndex, faceDim);
          }
        }
      }
    }
  }

  if (! checkConstraints(mesh3D, edgeDim, expectedEdgeConstraints3D, "twice-refined 3D mesh") )
  {
    cout << "Failed edge constraint check for twice-refined 3D mesh." << endl;
    success = false;
  }

  if (! checkConstraints(mesh3D, faceDim, expectedFaceConstraints3D, "twice-refined 3D mesh") )
  {
    cout << "Failed face constraint check for twice-refined 3D mesh." << endl;
    success = false;
  }

  return success;
}
void AddConnection::bfs(int x1, int y1, int x2, int y2, Component*** a, BFSOut &outx)
{
	int** vis = new int*[1400];for (int i = 0;i < 1400;i++)vis[i] = new int[780];
	for (int i = 0;i < 1400;i++)for (int j = 0;j < 780;j++)vis[i][j] = 0;
	int** ifc = new int*[780];for (int i = 0;i < 780;i++)ifc[i] = new int[1400];
	int** oth = new int*[780];for (int i = 0;i < 780;i++)oth[i] = new int[1400];
	for (int i = 0;i < 780;i++)
		for (int j = 0;j < 1400;j++) {
			if (a[i][j] != NULL)
				if (a[i][j]->getSourcePin()!=NULL) {
					if (a[y1][x1 - 15]->getSourcePin()==NULL) {
						if  (((a[i][j]))->getSourcePin() == (a[y1][x1 - 15])->GetOutputPin())
							ifc[i][j] = 0;
						else
							ifc[i][j] = 1;
						oth[i][j] = 0;
					}
				}
				else { ifc[i][j] = 0;oth[i][j] = 1; }
			else { ifc[i][j] = 0;oth[i][j] = 0; }

		}
	queue< pair<int, int> > qu;
	qu.push(make_pair(x1, y1));
	while (!qu.empty())
	{
		pair<int, int> pr = qu.front();
		vis[pr.first][pr.second] = 1;
		qu.pop();

		if (isvalid(pr.first + 15, pr.second, vis, ifc, oth, pr.first, pr.second, x2, y2, x1,y1))
		{
			qu.push(make_pair(pr.first + 15, pr.second));
			outx.arr[pr.first + 15][pr.second] = make_pair(pr.first, pr.second);
			vis[pr.first + 15][pr.second] = 1;
			if (qu.back().first == x2 && qu.back().second == y2)
				break;
		}
		if (isvalid(pr.first, pr.second + 15, vis, ifc, oth, pr.first, pr.second, x2, y2, x1,y1))
		{
			qu.push(make_pair(pr.first, pr.second + 15));
			outx.arr[pr.first][pr.second + 15] = make_pair(pr.first, pr.second);
			vis[pr.first][pr.second + 15] = 1;	   if (qu.back().first == x2 && qu.back().second == y2)
				break;
		}
		if (isvalid(pr.first - 15, pr.second, vis, ifc, oth, pr.first, pr.second, x2, y2, x1,y1))
		{
			qu.push(make_pair(pr.first - 15, pr.second));
			outx.arr[pr.first - 15][pr.second] = make_pair(pr.first, pr.second);
			vis[pr.first - 15][pr.second] = 1;	if (qu.back().first == x2 && qu.back().second == y2)
				break;
		}
		if (isvalid(pr.first, pr.second - 15, vis, ifc, oth, pr.first, pr.second, x2, y2, x1,y1))
		{
			qu.push(make_pair(pr.first, pr.second - 15));
			outx.arr[pr.first][pr.second - 15] = make_pair(pr.first, pr.second);
			vis[pr.first][pr.second - 15] = 1;	   if (qu.back().first == x2 && qu.back().second == y2)
				break;
		}
	}

	if (vis[x2][y2] == 1)
		outx.check = true;
	else outx.check = false;
	for (int i = 0; i < 780; i++) { delete[]vis[i]; delete[]ifc[i]; delete[]oth[i]; }
	delete[]vis; delete[]ifc; delete[]oth;
}
 pair<ll,ll> query(int l, int r) {
   return make_pair(((__int128)p[r - l] * a[l] - a[r] + moda) % moda,
                    ((__int128)q[r - l] * b[l] - b[r] + modb) % modb);
 }
bool AddConnection::ReadActionParameters()
{

	//Get a Pointer to the Input / Output Interfaces
	Output* pOut = pManager->GetOutput();
	Input* pIn = pManager->GetInput();
	pOut->FlushKeyQueue();
	pOut->ClearStatusBar();
	//Print Action Message
	
	pOut->PrintMsg("Adding Connection : Click to add the first edge ");
	pOut->UpdateBuffer();
	if ( pManager->GetComponent( UI.u_GfxInfo.x1 , UI.u_GfxInfo.y1 )==NULL)
	do {
		if (pIn->GetKeyPressed() == ESCAPE)
			return false;
		if (pIn->GetPointClicked(Cx, Cy) == LEFT_CLICK&& (pManager->GetArr()[Cy][Cx])!=NULL) {
			if (((pManager->GetArr()[Cy][Cx])->GetOutputPinCoordinates().first != 0
				&& (pManager->GetArr()[Cy][Cx])->GetOutputPinCoordinates().second != 0)) {
				GInfo.x1 = (pManager->GetArr()[Cy][Cx])->GetOutputPinCoordinates().first;
				GInfo.y1 = (pManager->GetArr()[Cy][Cx])->GetOutputPinCoordinates().second;
				break;
			}
			else {
				pOut->ClearStatusBar();
				pOut->PrintMsg("Please choose a vaild Gate or Switch  ");
				pOut->UpdateBuffer();
			}
		}

	} while (true);
	else
	{
		GInfo.x1 = (pManager->GetComponent( UI.u_GfxInfo.x1,UI.u_GfxInfo.y1))->GetOutputPinCoordinates( ).first;
		GInfo.y1 = (pManager->GetComponent(UI.u_GfxInfo.x1,UI.u_GfxInfo.y1))->GetOutputPinCoordinates( ).second;
	}
	pOut->ClearStatusBar();
	pOut->PrintMsg("Adding Connection : Click to add the second edge ");
	pOut->UpdateBuffer();
	do {
		if (pIn->GetKeyPressed() == ESCAPE)
			return false;
		if (pIn->GetPointClicked(Cx, Cy) == LEFT_CLICK && (pManager->GetArr()[Cy][Cx]) != NULL)
		{
			pOut->Magnetize(Cx, Cy);
			if (pManager->GetArr()[Cy][Cx]->GetInputPinCoordinates(make_pair(Cx, Cy)) != NULL) {
			
				GInfo.x2 = (pManager->GetComponent( Cx , Cy ))->GetInputPinCoordinates( make_pair( Cx , Cy ) )->first;
				GInfo.y2 = (pManager->GetComponent( Cx , Cy ))->GetInputPinCoordinates( make_pair( Cx , Cy ) )->second;
				break;
			}
			else
			{
				pOut->ClearStatusBar();
				pOut->PrintMsg("You choosed an invalid Component, please choose a Gate or Led !!");
				pOut->UpdateBuffer();
			}
		}
	} while (true);
	bfs(GInfo.x1, GInfo.y1, GInfo.x2, GInfo.y2, pManager->GetArr(), outx);
	if (outx.check)
		return true;
	else {
		pManager->GetOutput()->ClearStatusBar();
		pManager->GetOutput()->PrintMsg("There is no valid path");
		pManager->GetOutput()->UpdateBuffer();
		return false;
	}
}
Exemple #13
0
bool ResourceLoader::loadAnimData(AnimationData& aData, const pugi::xml_document &animXmlDoc, const SpriteSheet* sheet)
{
	pugi::xml_node animationsXml = animXmlDoc.first_child();
	aData.sheetName = animationsXml.attribute("spriteSheet").as_string();
	// Iterate through all animations
	for (auto& animXml : animationsXml.children()) {
		string name = animXml.attribute("name").value();
		int frameNum = (int)std::distance(animXml.children().begin(), animXml.children().end());
		AnimationData::anim& a = aData.animations.emplace(make_pair(name, AnimationData::anim(frameNum))).first->second;
		a.name = name;
		a.loops = animXml.attribute("loops").as_uint();
		// Iterate through cells in the current animation
		int cellIndex = 0;
		for (auto& cellXml : animXml.children()) {
			AnimationData::frame& f = a.frames[cellIndex];
			f.delay = max(1, cellXml.attribute("delay").as_int() * 30900);
			std::multimap<int, AnimationData::sprite> zList;
			// Iterate through sprites in the current cell
			for (auto& spriteXml : cellXml.children()) {
				int z = spriteXml.attribute("z").as_int();
				std::pair<int, AnimationData::sprite> smap(z, {});
				auto& s = smap.second;
				string spriteName = spriteXml.attribute("name").as_string();
				const auto& it = sheet->sprites.find(spriteName);
				if (it == sheet->sprites.end()) {
					// Couldn't find the requested sprite!
					std::cerr << "ERROR: couldn't find sprite \"" << spriteName << "\" in sheet \"" << sheet->imageName << "\"\n";
					return false;
				}
				// Get draw rect from sprite object, and offset data from anim file
				s.draw = { it->second.left, it->second.top, it->second.width, it->second.height };
				s.offset.x = spriteXml.attribute("x").as_float() - (int)(s.draw.width / 2.0f);
				s.offset.y = spriteXml.attribute("y").as_float() - (int)(s.draw.height / 2.0f);
				// Does it need to be flipped?
				if (spriteXml.attribute("flipH").as_bool())
				{
					s.flipH = true;
				}
				if (spriteXml.attribute("flipV").as_bool())
				{
					s.flipV = true;
				}
				// Use an associative container to keep the sprites in this frame in z-order
				zList.insert(smap);
			}
			// Create our vertex array from the collected rect data
			f.sprites.resize(zList.size() * 4);
			int i = 0;
			for (auto z : zList) {
				auto& s = z.second;
				f.sprites[i].texCoords = { s.draw.left, s.draw.top };
				f.sprites[i].position = { s.offset.x, s.offset.y };
				f.sprites[i + 1].texCoords = { s.draw.left + s.draw.width, s.draw.top };
				f.sprites[i + 1].position = { s.draw.width + s.offset.x, s.offset.y };
				f.sprites[i + 2].texCoords = { s.draw.left + s.draw.width, s.draw.top + s.draw.height };
				f.sprites[i + 2].position = { s.draw.width + s.offset.x, s.draw.height + s.offset.y };
				f.sprites[i + 3].texCoords = { s.draw.left, s.draw.top + s.draw.height };
				f.sprites[i + 3].position = { s.offset.x, s.draw.height + s.offset.y };
				if (s.flipH)
				{
					std::swap(f.sprites[i].position, f.sprites[i + 1].position);
					std::swap(f.sprites[i + 2].position, f.sprites[i + 3].position);
				}
				if (s.flipV)
				{
					std::swap(f.sprites[i].position, f.sprites[i + 3].position);
					std::swap(f.sprites[i + 1].position, f.sprites[i + 2].position);
				}
				i += 4;
			}
			cellIndex++;
		}
	}
	return true;
}
void refine_kmer_index(vector<pair<int, int> >& kmer_index, vector<pair<int, vector<pair<int, int> > > >& primary_chain,
                                string read, int dir, vector<reference_index>& refindex, int index)
{
        int first, second, range1, range2;
	int i = 0, k = 0, start, min_len;
        int pre_str_pos, pre_ref_pos;
        int first_index, last_index;
        int ret_val1, ret_val2;
        int min_index = 0;
        int readlen = read.length();

	node *head = new node;
	head->ref_ind = kmer_index[i].first;
	head->read_ind = kmer_index[i].second;
	head->next = NULL;
	node *current_node = head;

	//cout << "\nIndex = " << index << " ********************Starting New Analysis******************** = " 
	//		<< kmer_index.size() << endl << endl;

	int ref_dif, read_dif;
	int next_ref_dif, next_read_dif;
	int prev_ref_dif, prev_read_dif;

	bool ref_repeat_flag = false;
	bool read_repeat_flag = false;
	int best_index, max_length = 0;
	int ref_start, read_start, l, x;
	
	for(i = 1; i < kmer_index.size(); i++)
	{
				
 		//cout << i << ") Current Ref = " << current_node->ref_ind << ", Read = " << current_node->read_ind << endl;
		ref_dif = (kmer_index[i].first - current_node->ref_ind);
		read_dif = (kmer_index[i].second - current_node->read_ind);

		node *next_node = new node;
		next_node->ref_ind = kmer_index[i].first;
		next_node->read_ind = kmer_index[i].second;
		next_node->next = NULL;
		//cout << "Considering " << k << ") Reference = " << next_node->ref_ind << ", Read = " << next_node->read_ind << endl;
		current_node->next = next_node;
		current_node = next_node;
		k += 1;
		max_length = 0;
		ref_repeat_flag = false;
		read_repeat_flag = false;
	}

	if(i < kmer_index.size())
	{
		node *next_node = new node;
		next_node->ref_ind = kmer_index[i].first;
		next_node->read_ind = kmer_index[i].second;
		next_node->next = NULL;
		//cout << "\t" << k << ") Reference = " << next_node->ref_ind << ", Read = " << next_node->read_ind << endl;
		current_node->next = next_node;
		current_node = next_node;
	}

	///////////////////////////////////////////////////////////////////////////////////////////////////////////

	node *tmp_node;
	current_node = head;
	
	while(current_node != NULL)
	{
		//cout << "\nStarting Chain Creation: " << endl;
		//cout << "Ref = " << current_node->ref_ind << ", And Read = " << current_node->read_ind << endl;
		//if(current_node->read_ind < readlen / 2)
		{
			vector<pair<int, int> > chain;
                	chain.push_back(make_pair(current_node->ref_ind, current_node->read_ind));

                	create_primary_chain_from_list(current_node, chain, current_node->next, current_node, readlen);
			if(chain.size() > 1)
			{
                		primary_chain.push_back(make_pair(index * MULTIPLIER + dir, chain));
				//cout << "Size of Chain is = " << chain.size() << endl; 
			}
		}
		
		tmp_node = current_node;
		current_node = current_node->next;
		delete tmp_node;
		tmp_node = NULL;
	
		//cout << "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" << endl << endl;
	}

}
Exemple #15
0
Table* read_from_userprofile() {
	ifstream myfile ("userprofile.csv");
  if (myfile.is_open())
  {

	Table::ColumnList columns;

	String userID;
	getline( myfile, userID, ',' );
    columns.push_back(make_pair(userID, Table::varchar));

	String latitude;
	getline( myfile, latitude, ',' );
	columns.push_back(make_pair(latitude, Table::floating));

	String longitude;
	getline( myfile, longitude, ',' );
	columns.push_back(make_pair(longitude, Table::integer));

	String smoker;
	getline( myfile, smoker, ',');
    columns.push_back(make_pair(smoker, Table::varchar));

	String drink_level;
	getline( myfile, drink_level, ',');
    columns.push_back(make_pair(drink_level, Table::varchar));

	String dress_preference;
	getline( myfile, dress_preference, ',' );
    columns.push_back(make_pair(dress_preference, Table::varchar));

	String ambience;
	getline( myfile, ambience, ',' );
    columns.push_back(make_pair(ambience, Table::varchar));

	String transport;
	getline( myfile, transport, ',' );
    columns.push_back(make_pair(transport, Table::varchar));

	String marital_status;
	getline( myfile, marital_status, ',' );
    columns.push_back(make_pair(marital_status, Table::varchar));

	String hijos;
	getline( myfile, hijos, ',');
    columns.push_back(make_pair(hijos, Table::varchar));

	String birth_year;
	getline( myfile, birth_year, ',' );
    columns.push_back(make_pair(birth_year, Table::integer));

		String interest;
	getline( myfile, interest, ',' );
    columns.push_back(make_pair(interest, Table::varchar));

		String personality;
	getline( myfile, personality, ',' );
    columns.push_back(make_pair(personality, Table::varchar));

		String religion;
	getline( myfile, religion, ',' );
    columns.push_back(make_pair(religion, Table::varchar));

		String activity;
	getline( myfile, activity, ',' );
    columns.push_back(make_pair(activity, Table::varchar));

		String color;
	getline( myfile, color, ',' );
    columns.push_back(make_pair(color, Table::varchar));

		String weight;
	getline( myfile, weight, ',' );
    columns.push_back(make_pair(weight, Table::integer));

		String budget;
	getline( myfile, budget, ',' );
    columns.push_back(make_pair(budget, Table::varchar));

		String height;
	getline( myfile, height, '\n' );
	columns.push_back(make_pair(height, Table::floating));


	Table* geo = new Table(columns); // Create geo using the defined columns
	


    while ( myfile.good() )
    {

	  vector<pair<string, string> > entries; // Entries for the record to be placed in the table
	  
	  string userID;
	  getline( myfile, userID, ',' );
	  pair <string,string> pair0  ("userID",userID);   // value init
	  entries.push_back(pair0);

	  string latitude;
	  getline( myfile, latitude, ',' );
	  pair <string,string> pair1  ("latitude",latitude);   // value init
	  entries.push_back(pair1);

	  string longitude;
	  getline( myfile, longitude, ',' );
	  pair <string,string> pair2  ("longitude",longitude);   // value init
	  entries.push_back(pair2);

	  string smoker;
	  getline( myfile, smoker, ',' );
	  pair <string,string> pair3  ("smoker",smoker);   // value init
	  entries.push_back(pair3);

	  string drink_level;
	  getline( myfile, drink_level, ',' );
	  pair <string,string> pair4  ("drink_level",drink_level);   // value init
	  entries.push_back(pair4);

	  string dress_preference;
	  getline( myfile, dress_preference, ',' );
	  pair <string,string> pair5  ("dress_preference",dress_preference);   // value init
	  entries.push_back(pair5);

	  string ambience;
	  getline( myfile, ambience, ',' );
	  pair <string,string> pair6  ("ambience",ambience);   // value init
	  entries.push_back(pair6);

	  string transport;
	  getline( myfile, transport, ',' );
	  pair <string,string> pair7  ("transport",transport);   // value init
	  entries.push_back(pair7);

	  string marital_status;
	  getline( myfile, marital_status, ',' );
	  pair <string,string> pair8  ("marital_status",marital_status);   // value init
	  entries.push_back(pair8);

	  string hijos;
	  getline( myfile, hijos, ',' );
	  pair <string,string> pair9  ("hijos",hijos);   // value init
	  entries.push_back(pair9);

	  string birth_year;
	  getline( myfile, birth_year, ',' );
	  pair <string,string> pair10  ("birth_year",birth_year);   // value init
	  entries.push_back(pair10);

	  string interest;
	  getline( myfile, interest, ',' );
	  pair <string,string> pair11  ("interest",interest);   // value init
	  entries.push_back(pair11);

	  string personality;
	  getline( myfile, personality, ',' );
	  pair <string,string> pair12  ("personality",personality);   // value init
	  entries.push_back(pair12);

	  string religion;
	  getline( myfile, religion, ',' );
	  pair <string,string> pair13  ("religion",religion);   // value init
	  entries.push_back(pair13);

	  string activity;
	  getline( myfile, activity, ',' );
	  pair <string,string> pair14  ("activity",activity);   // value init
	  entries.push_back(pair14);

	  string color;
	  getline( myfile, color, ',' );
	  pair <string,string> pair15 ("color",color);   // value init
	  entries.push_back(pair15);

	  string weight;
	  getline( myfile, weight, ',' );
	  pair <string,string> pair16  ("weight",weight);   // value init
	  entries.push_back(pair16);

	  string budget;
	  getline( myfile, budget, ',' );
	  pair <string,string> pair17  ("budget",budget);   // value init
	  entries.push_back(pair17);

	  string height;
	  getline( myfile, height, '\n' );
	  pair <string,string> pair18  ("height",height);   // value init
	  entries.push_back(pair18);



	  Record add(entries); // Create record to add to table

	  geo->insert(add); // Insert add record into geo
	  
	}
    myfile.close();
	return geo;
  }
}
Exemple #16
0
void InsertSplits(const NetParameter& param, NetParameter* param_split) {
  // Initialize by copying from the input NetParameter.
  param_split->CopyFrom(param);
  param_split->clear_layer();
  map<string, pair<int_tp, int_tp> > blob_name_to_last_top_idx;
  map<pair<int_tp, int_tp>, pair<int_tp, int_tp> > bottom_idx_to_source_top_idx;
  map<pair<int_tp, int_tp>, int_tp> top_idx_to_bottom_count;
  map<pair<int_tp, int_tp>, float> top_idx_to_loss_weight;
  map<pair<int_tp, int_tp>, int_tp> top_idx_to_bottom_split_idx;
  map<int_tp, string> layer_idx_to_layer_name;
  for (int_tp i = 0; i < param.layer_size(); ++i) {
    const LayerParameter& layer_param = param.layer(i);
    layer_idx_to_layer_name[i] = layer_param.name();
    for (int_tp j = 0; j < layer_param.bottom_size(); ++j) {
      const string& blob_name = layer_param.bottom(j);
      if (blob_name_to_last_top_idx.find(blob_name) ==
          blob_name_to_last_top_idx.end()) {
        LOG(FATAL) << "Unknown bottom blob '" << blob_name << "' (layer '"
                   << layer_param.name() << "', bottom index " << j << ")";
      }
      const pair<int_tp, int_tp>& bottom_idx = make_pair(i, j);
      const pair<int_tp, int_tp>& top_idx =
          blob_name_to_last_top_idx[blob_name];
      bottom_idx_to_source_top_idx[bottom_idx] = top_idx;
      ++top_idx_to_bottom_count[top_idx];
    }
    for (int_tp j = 0; j < layer_param.top_size(); ++j) {
      const string& blob_name = layer_param.top(j);
      blob_name_to_last_top_idx[blob_name] = make_pair(i, j);
    }
    // a use of a top blob as a loss should be handled similarly to the use of
    // a top blob as a bottom blob to another layer.
    const int_tp last_loss =
        std::min(layer_param.loss_weight_size(), layer_param.top_size());
    for (int_tp j = 0; j < last_loss; ++j) {
      const string& blob_name = layer_param.top(j);
      const pair<int_tp, int_tp>& top_idx =
          blob_name_to_last_top_idx[blob_name];
      top_idx_to_loss_weight[top_idx] = layer_param.loss_weight(j);
      if (top_idx_to_loss_weight[top_idx]) {
        ++top_idx_to_bottom_count[top_idx];
      }
    }
  }
  for (int_tp i = 0; i < param.layer_size(); ++i) {
    LayerParameter* layer_param = param_split->add_layer();
    layer_param->CopyFrom(param.layer(i));
    // Replace any shared bottom blobs with split layer outputs.
    for (int_tp j = 0; j < layer_param->bottom_size(); ++j) {
      const pair<int_tp, int_tp>& top_idx =
          bottom_idx_to_source_top_idx[make_pair(i, j)];
      const int_tp split_count = top_idx_to_bottom_count[top_idx];
      if (split_count > 1) {
        const string& layer_name = layer_idx_to_layer_name[top_idx.first];
        const string& blob_name = layer_param->bottom(j);
        layer_param->set_bottom(j, SplitBlobName(layer_name,
            blob_name, top_idx.second, top_idx_to_bottom_split_idx[top_idx]++));
        while (layer_param->bottom_quantizer_size() <= j) {
          layer_param->add_bottom_quantizer();
        }
        // Need to preserve the original blob name for quantization purposes
        if (!layer_param->bottom_quantizer(j).has_name()) {
          layer_param->mutable_bottom_quantizer(j)->set_name(blob_name);
        }
      }
    }
    // Create split layer for any top blobs used by other layer as bottom
    // blobs more than once.
    for (int_tp j = 0; j < layer_param->top_size(); ++j) {
      const pair<int_tp, int_tp>& top_idx = make_pair(i, j);
      const int_tp split_count = top_idx_to_bottom_count[top_idx];
      if (split_count > 1) {
        const string& layer_name = layer_idx_to_layer_name[i];
        const string& blob_name = layer_param->top(j);
        LayerParameter* split_layer_param = param_split->add_layer();
        const float loss_weight = top_idx_to_loss_weight[top_idx];
        const QuantizerParameter* ref_quant_param = nullptr;
        if (layer_param->bottom_quantizer_size() > j) {
          ref_quant_param = &(layer_param->bottom_quantizer(j));
        }
        ConfigureSplitLayer(layer_name, blob_name, j, split_count,
            loss_weight, split_layer_param, layer_param->top_data_type(),
            layer_param->top_data_type(), ref_quant_param);
        if (loss_weight) {
          layer_param->clear_loss_weight();
          top_idx_to_bottom_split_idx[top_idx]++;
        }
      }
    }
  }
}
Exemple #17
0
Table* read_from_geoplaces2() {


  
  ifstream myfile ("geoplaces2.csv");
  if (myfile.is_open())
  {

	Table::ColumnList columns;

	String placeID;
	getline( myfile, placeID, ',' );
    columns.push_back(make_pair(placeID, Table::integer));

	String latitude;
	getline( myfile, latitude, ',' );
	columns.push_back(make_pair(latitude, Table::floating));

	String longitude;
	getline( myfile, longitude, ',' );
	columns.push_back(make_pair(longitude, Table::floating));

	String the_geom_meter;
	getline( myfile, the_geom_meter, ',' );
	columns.push_back(make_pair(the_geom_meter, Table::varchar));

	String name;
	getline( myfile, name, ',' );
    columns.push_back(make_pair(name, Table::varchar));

	String address;
	getline( myfile, address, ',' );
    columns.push_back(make_pair(address, Table::varchar));

	String city;
	getline( myfile, city, ',' );
    columns.push_back(make_pair(city, Table::varchar));

	String state;
	getline( myfile, state, ',' );
    columns.push_back(make_pair(state, Table::varchar));

	String country;
	getline( myfile, country, ',' );
    columns.push_back(make_pair(country, Table::varchar));

	String fax;
	getline( myfile, fax, ',' );
    columns.push_back(make_pair(fax, Table::varchar));

	String zip;
	getline( myfile, zip, ',' );
    columns.push_back(make_pair(zip, Table::varchar));

	String alcohol;
	getline( myfile, alcohol, ',' );
    columns.push_back(make_pair(alcohol, Table::varchar));

	String smoking_area;
	getline( myfile, smoking_area, ',' );
    columns.push_back(make_pair(smoking_area, Table::varchar));

	String dress_code;
	getline( myfile, dress_code, ',' );
    columns.push_back(make_pair(dress_code, Table::varchar));

	String accessibility;
	getline( myfile, accessibility, ',' );
    columns.push_back(make_pair(accessibility, Table::varchar));

	String price;
	getline( myfile, price, ',' );
    columns.push_back(make_pair(price, Table::varchar));

	String url;
	getline( myfile, url, ',' );
    columns.push_back(make_pair(url, Table::varchar));

	String Rambience;
	getline( myfile, Rambience, ',' );
    columns.push_back(make_pair(Rambience, Table::varchar));

	String franchise;
	getline( myfile, franchise, ',' );
    columns.push_back(make_pair(franchise, Table::varchar));

	String area;
	getline( myfile, area, ',' );
    columns.push_back(make_pair(area, Table::varchar));

	String other_services;
	getline( myfile, other_services, '\n' );
    columns.push_back(make_pair(other_services, Table::varchar));

	

	Table* geo = new Table(columns); // Create geo using the defined columns
	


    while ( myfile.good() )
    {
	  vector<pair<string, string> > entries; // Entries for the record to be placed in the table
	  
	  string placeID;
	  getline( myfile, placeID, ',' );
	  pair <string,string> pair0  ("placeID",placeID);   // value init
	  entries.push_back(pair0);

	  string latitude;
	  getline( myfile, latitude, ',' );
	  pair <string,string> pair1  ("latitude",latitude);   // value init
	  entries.push_back(pair1);

	  string longitude;
	  getline( myfile, longitude, ',' );
	  pair <string,string> pair2  ("longitude",longitude);   // value init
	  entries.push_back(pair2);

	  string the_geom_meter;
	  getline( myfile, the_geom_meter, ',' );
	  pair <string,string> pair3  ("the_geom_meter",the_geom_meter);   // value init
	  entries.push_back(pair3);

	  string name;
	  getline( myfile, name, ',' );
	  pair <string,string> pair4  ("name",name);   // value init
	  entries.push_back(pair4);

	  string address;
	  getline( myfile, address, ',' );
	  pair <string,string> pair5  ("address",address);   // value init
	  entries.push_back(pair5);

	  string city;
	  getline( myfile, city, ',' );
	  pair <string,string> pair6  ("city",city);   // value init
	  entries.push_back(pair6);

	  string state;
	  getline( myfile, state, ',' );
	  pair <string,string> pair7  ("state",state);   // value init
	  entries.push_back(pair7);

	  string country;
	  getline( myfile, country, ',' );
	  pair <string,string> pair8  ("country",country);   // value init
	  entries.push_back(pair8);

	  string fax;
	  getline( myfile, fax, ',' );
	  pair <string,string> pair9  ("fax",fax);   // value init
	  entries.push_back(pair9);

	  string zip;
	  getline( myfile, zip, ',' );
	  pair <string,string> pair10  ("zip",zip);   // value init
	  entries.push_back(pair10);

	  string alcohol;
	  getline( myfile, alcohol, ',' );
	  pair <string,string> pair11  ("alcohol",alcohol);   // value init
	  entries.push_back(pair11);

	  string smoking_area;
	  getline( myfile, smoking_area, ',' );
	  pair <string,string> pair12  ("smoking_area",smoking_area);   // value init
	  entries.push_back(pair12);

	  string dress_code;
	  getline( myfile, dress_code, ',' );
	  pair <string,string> pair13  ("dress_code",dress_code);   // value init
	  entries.push_back(pair13);

	  string accessibility;
	  getline( myfile, accessibility, ',' );
	  pair <string,string> pair14  ("accessibility",accessibility);   // value init
	  entries.push_back(pair14);

	  string price;
	  getline( myfile, price, ',' );
	  pair <string,string> pair15 ("price",price);   // value init
	  entries.push_back(pair15);

	  string url;
	  getline( myfile, url, ',' );
	  pair <string,string> pair16  ("url",url);   // value init
	  entries.push_back(pair16);

	  string Rambience;
	  getline( myfile, Rambience, ',' );
	  pair <string,string> pair17  ("Rambience",Rambience);   // value init
	  entries.push_back(pair17);

	  string franchise;
	  getline( myfile, franchise, ',' );
	  pair <string,string> pair18  ("franchise",franchise);   // value init
	  entries.push_back(pair18);

	  string area;
	  getline( myfile, area, ',' );
	  pair <string,string> pair19  ("area",area);   // value init
	  entries.push_back(pair19);

	  string other_services;
	  getline( myfile, other_services, '\n' );
	  pair <string,string> pair20  ("other_services",other_services);   // value init
	  entries.push_back(pair20);

	  Record add(entries); // Create record to add to table

	  geo->insert(add);  // Insert add record into geo
	  
	}
    
	myfile.close();
	return geo;

  }

}
    void BatchWriteExec::executeBatch( const BatchedCommandRequest& clientRequest,
                                       BatchedCommandResponse* clientResponse ) {

        LOG( 4 ) << "starting execution of write batch of size "
                 << static_cast<int>( clientRequest.sizeWriteOps() )
                 << " for " << clientRequest.getNS() << endl;

        BatchWriteOp batchOp;
        batchOp.initClientRequest( &clientRequest );

        // Current batch status
        bool refreshedTargeter = false;
        int rounds = 0;
        int numCompletedOps = 0;
        int numRoundsWithoutProgress = 0;

        while ( !batchOp.isFinished() ) {

            //
            // Get child batches to send using the targeter
            //
            // Targeting errors can be caused by remote metadata changing (the collection could have
            // been dropped and recreated, for example with a new shard key).  If a remote metadata
            // change occurs *before* a client sends us a batch, we need to make sure that we don't
            // error out just because we're staler than the client - otherwise mongos will be have
            // unpredictable behavior.
            //
            // (If a metadata change happens *during* or *after* a client sends us a batch, however,
            // we make no guarantees about delivery.)
            //
            // For this reason, we don't record targeting errors until we've refreshed our targeting
            // metadata at least once *after* receiving the client batch - at that point, we know:
            //
            // 1) our new metadata is the same as the metadata when the client sent a batch, and so
            //    targeting errors are real.
            // OR
            // 2) our new metadata is a newer version than when the client sent a batch, and so
            //    the metadata must have changed after the client batch was sent.  We don't need to
            //    deliver in this case, since for all the client knows we may have gotten the batch
            //    exactly when the metadata changed.
            //

            OwnedPointerVector<TargetedWriteBatch> childBatchesOwned;
            vector<TargetedWriteBatch*>& childBatches = childBatchesOwned.mutableVector();

            // If we've already had a targeting error, we've refreshed the metadata once and can
            // record target errors definitively.
            bool recordTargetErrors = refreshedTargeter;
            Status targetStatus = batchOp.targetBatch( *_targeter,
                                                       recordTargetErrors,
                                                       &childBatches );
            if ( !targetStatus.isOK() ) {
                // Don't do anything until a targeter refresh
                _targeter->noteCouldNotTarget();
                refreshedTargeter = true;
                ++_stats->numTargetErrors;
                dassert( childBatches.size() == 0u );
            }

            //
            // Send all child batches
            //

            size_t numSent = 0;
            size_t numToSend = childBatches.size();
            bool remoteMetadataChanging = false;
            while ( numSent != numToSend ) {

                // Collect batches out on the network, mapped by endpoint
                OwnedHostBatchMap ownedPendingBatches;
                OwnedHostBatchMap::MapType& pendingBatches = ownedPendingBatches.mutableMap();

                //
                // Send side
                //

                // Get as many batches as we can at once
                for ( vector<TargetedWriteBatch*>::iterator it = childBatches.begin();
                    it != childBatches.end(); ++it ) {

                    //
                    // Collect the info needed to dispatch our targeted batch
                    //

                    TargetedWriteBatch* nextBatch = *it;
                    // If the batch is NULL, we sent it previously, so skip
                    if ( nextBatch == NULL ) continue;

                    // Figure out what host we need to dispatch our targeted batch
                    ConnectionString shardHost;
                    Status resolveStatus = _resolver->chooseWriteHost( nextBatch->getEndpoint()
                                                                           .shardName,
                                                                       &shardHost );
                    if ( !resolveStatus.isOK() ) {

                        ++_stats->numResolveErrors;

                        // Record a resolve failure
                        // TODO: It may be necessary to refresh the cache if stale, or maybe just
                        // cancel and retarget the batch
                        WriteErrorDetail error;
                        buildErrorFrom( resolveStatus, &error );

                        LOG( 4 ) << "unable to send write batch to " << shardHost.toString()
                                 << causedBy( resolveStatus.toString() ) << endl;

                        batchOp.noteBatchError( *nextBatch, error );

                        // We're done with this batch
                        // Clean up when we can't resolve a host
                        delete *it;
                        *it = NULL;
                        --numToSend;
                        continue;
                    }

                    // If we already have a batch for this host, wait until the next time
                    OwnedHostBatchMap::MapType::iterator pendingIt = pendingBatches.find( shardHost );
                    if ( pendingIt != pendingBatches.end() ) continue;

                    //
                    // We now have all the info needed to dispatch the batch
                    //

                    BatchedCommandRequest request( clientRequest.getBatchType() );
                    batchOp.buildBatchRequest( *nextBatch, &request );

                    // Internally we use full namespaces for request/response, but we send the
                    // command to a database with the collection name in the request.
                    NamespaceString nss( request.getNS() );
                    request.setNS( nss.coll() );

                    LOG( 4 ) << "sending write batch to " << shardHost.toString() << ": "
                             << request.toString() << endl;

                    _dispatcher->addCommand( shardHost, nss.db(), request );

                    // Indicate we're done by setting the batch to NULL
                    // We'll only get duplicate hostEndpoints if we have broadcast and non-broadcast
                    // endpoints for the same host, so this should be pretty efficient without
                    // moving stuff around.
                    *it = NULL;

                    // Recv-side is responsible for cleaning up the nextBatch when used
                    pendingBatches.insert( make_pair( shardHost, nextBatch ) );
                }

                // Send them all out
                _dispatcher->sendAll();
                numSent += pendingBatches.size();

                //
                // Recv side
                //

                while ( _dispatcher->numPending() > 0 ) {

                    // Get the response
                    ConnectionString shardHost;
                    BatchedCommandResponse response;
                    Status dispatchStatus = _dispatcher->recvAny( &shardHost, &response );

                    // Get the TargetedWriteBatch to find where to put the response
                    dassert( pendingBatches.find( shardHost ) != pendingBatches.end() );
                    TargetedWriteBatch* batch = pendingBatches.find( shardHost )->second;

                    if ( dispatchStatus.isOK() ) {

                        TrackedErrors trackedErrors;
                        trackedErrors.startTracking( ErrorCodes::StaleShardVersion );

                        LOG( 4 ) << "write results received from " << shardHost.toString() << ": "
                                 << response.toString() << endl;

                        // Dispatch was ok, note response
                        batchOp.noteBatchResponse( *batch, response, &trackedErrors );

                        // Note if anything was stale
                        const vector<ShardError*>& staleErrors =
                            trackedErrors.getErrors( ErrorCodes::StaleShardVersion );

                        if ( staleErrors.size() > 0 ) {
                            noteStaleResponses( staleErrors, _targeter );
                            ++_stats->numStaleBatches;
                        }

                        // Remember if the shard is actively changing metadata right now
                        if ( isShardMetadataChanging( staleErrors ) ) {
                            remoteMetadataChanging = true;
                        }

                        // Remember that we successfully wrote to this shard
                        // NOTE: This will record lastOps for shards where we actually didn't update
                        // or delete any documents, which preserves old behavior but is conservative
                        _stats->noteWriteAt( shardHost,
                                             response.isLastOpSet() ? 
                                             response.getLastOp() : OpTime(),
                                             response.isElectionIdSet() ?
                                             response.getElectionId() : OID());
                    }
                    else {

                        // Error occurred dispatching, note it

                        stringstream msg;
                        msg << "write results unavailable from " << shardHost.toString()
                            << causedBy( dispatchStatus.toString() );

                        WriteErrorDetail error;
                        buildErrorFrom( Status( ErrorCodes::RemoteResultsUnavailable, msg.str() ),
                                        &error );

                        LOG( 4 ) << "unable to receive write results from " << shardHost.toString()
                                 << causedBy( dispatchStatus.toString() ) << endl;

                        batchOp.noteBatchError( *batch, error );
                    }
                }
            }

            ++rounds;
            ++_stats->numRounds;

            // If we're done, get out
            if ( batchOp.isFinished() )
                break;

            // MORE WORK TO DO

            //
            // Refresh the targeter if we need to (no-op if nothing stale)
            //

            bool targeterChanged = false;
            Status refreshStatus = _targeter->refreshIfNeeded( &targeterChanged );

            if ( !refreshStatus.isOK() ) {

                // It's okay if we can't refresh, we'll just record errors for the ops if
                // needed.
                warning() << "could not refresh targeter" << causedBy( refreshStatus.reason() )
                          << endl;
            }

            //
            // Ensure progress is being made toward completing the batch op
            //

            int currCompletedOps = batchOp.numWriteOpsIn( WriteOpState_Completed );
            if ( currCompletedOps == numCompletedOps && !targeterChanged
                 && !remoteMetadataChanging ) {
                ++numRoundsWithoutProgress;
            }
            else {
                numRoundsWithoutProgress = 0;
            }
            numCompletedOps = currCompletedOps;

            if ( numRoundsWithoutProgress > kMaxRoundsWithoutProgress ) {

                stringstream msg;
                msg << "no progress was made executing batch write op in " << clientRequest.getNS()
                    << " after " << kMaxRoundsWithoutProgress << " rounds (" << numCompletedOps
                    << " ops completed in " << rounds << " rounds total)";

                WriteErrorDetail error;
                buildErrorFrom( Status( ErrorCodes::NoProgressMade, msg.str() ), &error );
                batchOp.abortBatch( error );
                break;
            }
        }

        batchOp.buildClientResponse( clientResponse );

        LOG( 4 ) << "finished execution of write batch"
                 << ( clientResponse->isErrDetailsSet() ? " with write errors" : "")
                 << ( clientResponse->isErrDetailsSet() &&
                      clientResponse->isWriteConcernErrorSet() ? " and" : "" )
                 << ( clientResponse->isWriteConcernErrorSet() ? " with write concern error" : "" )
                 << " for " << clientRequest.getNS() << endl;
    }
WalletModel::SendCoinsReturn WalletModel::sendCoins(const QList<SendCoinsRecipient> &recipients)
{
    qint64 total = 0;
    QSet<QString> setAddress;
    QString hex;

    if(recipients.empty())
    {
        return OK;
    }

    // Pre-check input data for validity
    foreach(const SendCoinsRecipient &rcp, recipients)
    {
        if(!validateAddress(rcp.address))
        {
            return InvalidAddress;
        }
        setAddress.insert(rcp.address);

        if(rcp.amount <= 0)
        {
            return InvalidAmount;
        }
        total += rcp.amount;
    }

    if(recipients.size() > setAddress.size())
    {
        return DuplicateAddress;
    }

    if(total > getBalance())
    {
        return AmountExceedsBalance;
    }

    if((total + nTransactionFee) > getBalance())
    {
        return SendCoinsReturn(AmountWithFeeExceedsBalance, nTransactionFee);
    }

    {
        LOCK2(cs_main, wallet->cs_wallet);

        // Sendmany
        std::vector<std::pair<CScript, int64> > vecSend;
        foreach(const SendCoinsRecipient &rcp, recipients)
        {
            CScript scriptPubKey;
            scriptPubKey.SetDestination(CBitcoinAddress(rcp.address.toStdString()).Get());
            vecSend.push_back(make_pair(scriptPubKey, rcp.amount));
        }

        CWalletTx wtx;
        CReserveKey keyChange(wallet);
        int64 nFeeRequired = 0;
        std::string strFailReason;
        bool fCreated = wallet->CreateTransaction(vecSend, wtx, keyChange, nFeeRequired, strFailReason);

        if(!fCreated)
        {
            if((total + nFeeRequired) > wallet->GetBalance())
            {
                return SendCoinsReturn(AmountWithFeeExceedsBalance, nFeeRequired);
            }
            emit message(tr("Send Coins"), QString::fromStdString(strFailReason),
                         CClientUIInterface::MSG_ERROR);
            return TransactionCreationFailed;
        }
        if(!uiInterface.ThreadSafeAskFee(nFeeRequired))
        {
            return Aborted;
        }
        if(!wallet->CommitTransaction(wtx, keyChange))
        {
            return TransactionCommitFailed;
        }
        hex = QString::fromStdString(wtx.GetHash().GetHex());
    }
Exemple #20
0
WalletModel::SendCoinsReturn WalletModel::sendCoins(const QList<SendCoinsRecipient> &recipients, const CCoinControl *coinControl)
{
    qint64 total = 0;
    QSet<QString> setAddress;
    QString hex;

    if(recipients.empty())
    {
        return OK;
    }

    // Pre-check input data for validity
    foreach(const SendCoinsRecipient &rcp, recipients)
    {
        if(!validateAddress(rcp.address))
        {
            return InvalidAddress;
        }
        setAddress.insert(rcp.address);

        if(rcp.amount < MIN_TXOUT_AMOUNT)
        {
            return InvalidAmount;
        }
        total += rcp.amount;
    }

    if(recipients.size() > setAddress.size())
    {
        return DuplicateAddress;
    }

    // we do not use getBalance() here, because some coins could be locked or coin control could be active
    int64 nBalance = 0;
    std::vector<COutput> vCoins;
    wallet->AvailableCoins(vCoins, true, coinControl);
    BOOST_FOREACH(const COutput& out, vCoins)
        nBalance += out.tx->vout[out.i].nValue;

    if(total > nBalance) 
    {
        return AmountExceedsBalance;
    }

    if((total + nTransactionFee) > nBalance)
    {
        return SendCoinsReturn(AmountWithFeeExceedsBalance, nTransactionFee);
    }

    {
        LOCK2(cs_main, wallet->cs_wallet);

        // Sendmany
        std::vector<std::pair<CScript, int64> > vecSend;
        foreach(const SendCoinsRecipient &rcp, recipients)
        {
            CScript scriptPubKey;
            scriptPubKey.SetDestination(CBitcoinAddress(rcp.address.toStdString()).Get());
            vecSend.push_back(make_pair(scriptPubKey, rcp.amount));
        }

        CWalletTx wtx;
        CReserveKey keyChange(wallet);
        int64 nFeeRequired = 0;
        bool fCreated = wallet->CreateTransaction(vecSend, wtx, keyChange, nFeeRequired, coinControl);

        if(!fCreated)
        {
            if((total + nFeeRequired) > nBalance)
            {
                return SendCoinsReturn(AmountWithFeeExceedsBalance, nFeeRequired);
            }
            return TransactionCreationFailed;
        }
        if(!ThreadSafeAskFee(nFeeRequired, tr("Sending...").toStdString()))
        {
            return Aborted;
        }
        if(!wallet->CommitTransaction(wtx, keyChange))
        {
            return TransactionCommitFailed;
        }
        hex = QString::fromStdString(wtx.GetHash().GetHex());
    }
Exemple #21
0
void SimpleXML::addChildAttrib(const string& aName, const string& aData) throw(SimpleXMLException) {
    checkChildSelected();

    (*currentChild)->attribs.push_back(make_pair(aName, aData));
}
//get candicate query id,category
StrToIntMap indexEngine::search(const std::string& userQuery
                                ,queryProperty& qProperty)
{
    std::string nstr = userQuery;
    if(0 != userQuery.length())
    {
        Normalize::normalize(nstr);
        qProperty.cqIdList.clear();
        qProperty.cQuery.clear();
        qProperty.cCategory.clear();
        qProperty.rsKeywords.clear();

        StrToIntMap termsMap;
        StrToIntMapIter termsMapIter;

        tokenTerms(nstr,termsMap);
        IdToQListIter termsQidIter;

        //get candicate query id
        for(termsMapIter = termsMap.begin(); termsMapIter != termsMap.end(); ++termsMapIter)
        {
            termsQidIter = terms2qIDs_.find(termsMapIter->second);
            if(terms2qIDs_.end() != termsQidIter)
            {
                qProperty.cqIdList.insert(make_pair(termsMapIter->second,termsQidIter->second));
            }
        }

        IdToQueryIter queryIter;
        //get candicate query data
        for(termsQidIter = qProperty.cqIdList.begin(); termsQidIter != qProperty.cqIdList.end(); ++termsQidIter)
        {
            for(std::size_t i = 0; i < termsQidIter->second.size(); ++i)
            {
                queryIter = queryIdata_.find(termsQidIter->second[i]);
                if(queryIdata_.end() != queryIter)
                {
                    if(queryIter->second.text.size() > 30)
                        continue;
                    qProperty.cQuery.insert(make_pair(termsQidIter->second[i],queryIter->second));
                }
            }
        }
        GetProperty(userQuery,qProperty.cCategory,qProperty.rsKeywords);
//	std::cout << "termsmap.size" << termsMap.size() << "\tcandicateqid.size()"
//		<< candicateQid.size() <<"\tcandicate query.size()"
//		<< candicateQuery.size()<< std::endl;
        //get candicate category
        /*	std::string str = userQuery;
        	Normalize::normalize(str);
            std::size_t qID = hash_query(str);
            QueryCateMapIter cateIter;
            cateIter = query2Cate_.find(qID);
            if(query2Cate_.end() != cateIter)
            {
                candicateCate.insert(make_pair(qID,cateIter->second));
            }*/
        return termsMap;
    }
    LOG(INFO) << "User query terms num:" << qProperty.cqIdList.size()
              << "\tCandicate query num:" << qProperty.cQuery.size()
              << "\tCandicate category num:" << qProperty.cCategory.size();
}
Exemple #23
0
void
MP2::
expectation(CorpusCache& cache){
    double& alpha=alpha_;
    double alphaCount=0;
    for(auto& sp: cache){
        vector<vector<double>> target_probs(sp.m,vector<double>(sp.l,0.0));
        alpha/=sp.n*sp.l;
        for(int j=0;j<sp.m;j++){
            for(int jlen=0;jlen<sp.l;jlen++){
                for(int i=0;i<sp.n;i++){
                    for(int ilen=0;ilen<sp.l;ilen++){
                        if(sp(i,ilen,j,jlen)!=(void*)0){
                            target_probs[j][jlen]+=
                            (sp(i,ilen,j,jlen)->prob*alpha);
                            if(sp(i,ilen,j,jlen)->prob>1){
                                cerr<<"wth prob>1 : "
                                <<sp(i,ilen,j,jlen)->prob<<endl;
                            }
                        }
                    }
                }
                //cout<<target_probs[j][jlen]<<" ";
            }
            //cout<<endl;
        }
        //cout<<endl;

        for(int j=0;j<sp.m;j++){
            for(int jlen=0;jlen<sp.l;jlen++){
                if(target_probs[j][jlen]>1)
                    cerr<<"error :"<<target_probs[j][jlen]<<endl;
            }
        }

        vector<double> forward(sp.m,0.0),backward(sp.m,0.0);
        //forward[i] is the posterior probability of target words of 1...i+1
        for(int i=0;i<sp.l&&i<sp.m;i++)
            forward[i]=target_probs[0][i];
        for(int i=1;i<(int)forward.size();i++){
            for(int j=1;j<=sp.l&&i-j>=0;j++){
                forward[i]+=forward[i-j]*target_probs[i-j+1][j-1];
            }
        }
        //backward[i] is the posterior probability of target words of i+1...m
        for(int i=0;i<sp.l&&i<sp.m;i++)
            backward[sp.m-i-1]=target_probs[sp.m-i-1][i];
        for(int i=sp.m-2;i>=0;i--){
            for(int j=1;j<=sp.l&&i+j<sp.m;j++){
                backward[i]+=target_probs[i][j-1]*backward[i+j];
            }
        }

        //viterbi
        vector<pair<double,int> > viterbi(sp.m,pair<double,int>(0.0,0));
        for(int i=0;i<sp.l&&i<sp.m;i++)
            viterbi[i]=pair<double,int>(target_probs[0][i],-1);
        for(int i=1;i<(int)forward.size();i++){
            for(int j=1;j<=sp.l&&i-j>=0;j++){
                if(viterbi[i-j].first*target_probs[i-j+1][j-1]>viterbi[i].first)
                    viterbi[i]=
                    make_pair(viterbi[i-j].first*target_probs[i-j+1][j-1],i-j);
            }
        }

        int pos=sp.m-1;
        string sequence="";
        while(pos>=0){
            sequence=to_string(pos)+" "+sequence;
            pos=viterbi[pos].second;
        }
        //cout<<"best seg:"<<sequence<<endl;

        //make sure forward[sp.m-1]==backward[0];
        assert(backward[0]>0);
        if(abs(forward[sp.m-1]-backward[0])>=1e-5*backward[0])
            cerr<<forward[sp.m-1]<<", "<<backward[0]<<endl;
        assert(abs(forward[sp.m-1]-backward[0])<1e-5*backward[0]);
        //cerr<<"backward[0]:"<<backward[0]<<endl;
        //collect fractional count for each phrase pair
        //fraccount=forward[j]*backward[j+jlen]*p(t|s)/backward[0];

        for(int j=0;j<sp.m;j++){
            for(int jlen=0;jlen<sp.l&&j+jlen+1<=sp.m;jlen++){
                double segprob=0;
                double before=1;
                double after=1;
                if(j>0)before=forward[j-1];
                if(j+jlen+1<sp.m)after=backward[j+jlen+1];

                segprob=before*after*target_probs[j][jlen]/backward[0];

                if(segprob>1||segprob<=0){
                    //cerr<<"segprob "<<segprob<<","<<j<<","<<jlen<<endl;
                }
                if(segprob<=0)continue;
                for(int i=0;i<sp.n;i++){
                    for(int ilen=0;ilen<sp.l&&ilen+i+1<=sp.n;ilen++){
                        if(sp(i,ilen,j,jlen)!=(void*)0){
                            double count=sp(i,ilen,j,jlen)->prob*segprob*alpha
                            /target_probs[j][jlen];
                            sp(i,ilen,j,jlen)->count+=count;
                            alphaCount+=count;
                            if(count>1)
                                cerr<<i<<","<<ilen<<","<<j
                                <<","<<jlen<<" ["<<sp.m<<","<<sp.n<<"]"
                                <<",count "<<count<<endl;
                        }
                    }
                }
            }
        }
        alpha*=sp.n*sp.l;
    }
    //cerr<<alphaCount<<","<<cache.size()<<endl;
    alpha=alphaCount/(alphaCount+cache.size());
}
void indexEngine::indexing(const std::string& corpus_pth)
{
    ifstream ifOrigin_data; //file stream to load data from corpus
    ifOrigin_data.open(corpus_pth.c_str());
    if(!ifOrigin_data.is_open())
        //std::cerr << "Open corpus files failed!" << std::endl;
        LOG(ERROR) << "Open indexing corpus files error!";

    std::string sLine = "";
    std::string sData = "";
    vector<string> splitDat;

    while(getline(ifOrigin_data,sLine))
    {
        splitDat.clear();

        //normalize
        Normalize::normalize(sLine);

        //get detail data from original corpus
        for(unsigned int i = 0; i < 3; ++i)
        {
            std::size_t pos = sLine.find('\t');
            if(std::string::npos != pos)
            {
                sData = sLine.substr(0,pos);
                sLine.assign(sLine,pos+1,sLine.length()-1);
                splitDat.push_back(sData);
            }
        }
        splitDat.push_back(sLine);

        //check data
        if(4 != splitDat.size())
            continue;
        QueryData qDat;
        qDat.text = splitDat[0]; //query text
        qDat.hits = atoi(splitDat[2].c_str()); //query times
        qDat.counts = atoi(splitDat[3].c_str());//results numbers

        //get term id
        StrToIntMap termsVector;
        tokenTerms(qDat.text,termsVector);

        std::size_t queryID = izenelib::util::izene_hashing(qDat.text);
        vector<std::size_t> queryIdVector;
        vector<std::size_t> termsIdVector;

        IdToQListIter termsQueryIter;
        queryIdVector.push_back(queryID);
        StrToIntMapIter termsIter;
        //assign hash id for every terms
        for(termsIter = termsVector.begin(); termsIter != termsVector.end(); ++termsIter)
        {
            termsIdVector.push_back(termsIter->second);
            //find terms in dictornary,termsID.v
            termsQueryIter = terms2qIDs_.find(termsIter->second);
            if(termsQueryIter != terms2qIDs_.end())
            {
                termsQueryIter->second.push_back(queryID);
            }
            else
            {
                terms2qIDs_.insert(make_pair(termsIter->second,queryIdVector));
            }
        }

        //complete data for one query
        qDat.tid = termsIdVector;
        boost::unordered_map<std::size_t,QueryData>::iterator queryIter;
        queryIdata_.insert(make_pair(queryID,qDat));

        //flush to disk file
        /* ofQueryDat << queryID << "\t" <<qDat.text << "\t" << qDat.hits
           << "\t" << qDat.counts;
         for(unsigned int i = 0; i < qDat.tid.size(); ++i)
         {
        	ofQueryDat << "\t" << qDat.tid[i];
        	//cout << "terms id :" << qDat.tid[i] << ",";
         }
         ofQueryDat << std::endl;


        //merge the searching times
        boost::unordered_map<std::size_t,QueryData>::iterator queryIter;
        queryIter = queryIdata_.find(queryID);
        if(queryIdata_.end() != queryIter && queryIter->second.text == qDat.text)
        {
        	std::cout << "queryIter->seoncd.text:" << queryIter->second.text <<
        		"\t qDat.text:" << qDat.text << std::endl;
        	std::cout << "queryID:" << queryID << "\tqueryIter->first:" << queryIter->first << std::endl;
        	std::cout << "\t text:" << qDat.text << std::endl;
        	queryIter->second.hits += qDat.hits;
        	std::cout  << "test--hits:" << queryIter->second.hits << "\t qdat.hits:" << qDat.hits << std::endl;
        }
        else
        	queryIdata_.insert(make_pair(queryID,qDat));
        queryIdata_.insert(make_pair(queryID,qDat));*/
    }
    ifOrigin_data.close();//file stream close
    //ofQueryDat.close();
}
Exemple #25
0
// TokenAnalyze的初始化工作
void TokenAnalyze::initialize()
{
	/********** 初始化保留字字典 **********/
	// 共有13个
	reservedWord.insert(make_pair("const",CONSTTK));
	reservedWord.insert(make_pair("int",INTTK));
	reservedWord.insert(make_pair("char",CHARTK));
	reservedWord.insert(make_pair("void",VOIDTK));
	reservedWord.insert(make_pair("main",MAINTK));
	reservedWord.insert(make_pair("if",IFTK));
	reservedWord.insert(make_pair("else",ELSETK));
	reservedWord.insert(make_pair("do",DOTK));
	reservedWord.insert(make_pair("while",WHILETK));
	reservedWord.insert(make_pair("for",FORTK));
	reservedWord.insert(make_pair("scanf",SCANFTK));
	reservedWord.insert(make_pair("printf",PRINTFTK));
	reservedWord.insert(make_pair("return",RETURNTK));
	/**********************************************/

	/*********** 初始化符号 **************/
	ssym.insert(make_pair("+",PLUS));
	ssym.insert(make_pair("-",MINU));
	ssym.insert(make_pair("*",MULT));
	ssym.insert(make_pair("/",DIV));
	ssym.insert(make_pair("<",LSS));
	ssym.insert(make_pair("<=",LEQ));
	ssym.insert(make_pair(">",GRE));
	ssym.insert(make_pair(">=",GEQ));
	ssym.insert(make_pair("==",EQL));
	ssym.insert(make_pair("!=",NEQ));
	ssym.insert(make_pair("=",ASSIGN));
	ssym.insert(make_pair(";",SEMICN));
	ssym.insert(make_pair(",",COMMA));
	ssym.insert(make_pair("(",LPARENT));
	ssym.insert(make_pair(")",RPARENT));
	ssym.insert(make_pair("[",LBRACK));
	ssym.insert(make_pair("]",RBRACK));
	ssym.insert(make_pair("{",LBRACE));
	ssym.insert(make_pair("}",RBRACE));
	/***************************************/


	/******************又要费一次事,这个就是结构没有设计好啊********************/
	token_typename.insert(make_pair(IDEN,"IDEN"));
	token_typename.insert(make_pair(INTCON,"INTCON"));
	token_typename.insert(make_pair(CHARCON,"CHARCON"));
	token_typename.insert(make_pair(STRCON,"STRCON"));
	token_typename.insert(make_pair(CONSTTK,"CONSTTK"));
	token_typename.insert(make_pair(INTTK,"INTTK"));
	token_typename.insert(make_pair(CHARTK,"CHARTK"));
	token_typename.insert(make_pair(VOIDTK,"VOIDTK"));
	token_typename.insert(make_pair(MAINTK,"MAINTK"));
	token_typename.insert(make_pair(IFTK,"IFTK"));
	token_typename.insert(make_pair(ELSETK,"ELSETK"));
	token_typename.insert(make_pair(DOTK,"DOTK"));
	token_typename.insert(make_pair(WHILETK,"WHILETK"));
	token_typename.insert(make_pair(FORTK,"FORTK"));
	token_typename.insert(make_pair(SCANFTK,"SCANFTK"));
	token_typename.insert(make_pair(PRINTFTK,"PRINTFTK"));
	token_typename.insert(make_pair(RETURNTK,"RETURNTK"));
	token_typename.insert(make_pair(PLUS,"PLUS"));
	token_typename.insert(make_pair(MINU,"MINU"));
	token_typename.insert(make_pair(MULT,"MULT"));
	token_typename.insert(make_pair(DIV,"DIV"));
	token_typename.insert(make_pair(LSS,"LSS"));
	token_typename.insert(make_pair(LEQ,"LEQ"));
	token_typename.insert(make_pair(GRE,"GRE"));
	token_typename.insert(make_pair(GEQ,"GEQ"));
	token_typename.insert(make_pair(EQL,"EQL"));
	token_typename.insert(make_pair(NEQ,"NEQ"));
	token_typename.insert(make_pair(ASSIGN,"ASSIGN"));
	token_typename.insert(make_pair(SEMICN,"SEMICN"));
	token_typename.insert(make_pair(COMMA,"COMMA"));
	token_typename.insert(make_pair(LPARENT,"LPARENT"));
	token_typename.insert(make_pair(RPARENT,"RPARENT"));
	token_typename.insert(make_pair(LBRACK,"LBRACK"));
	token_typename.insert(make_pair(RBRACK,"RBRACK"));
	token_typename.insert(make_pair(LBRACE,"LBRACE"));
	token_typename.insert(make_pair(RBRACE,"RBRACE"));
}
bool indexEngine::open()
{
    if(0 == dict_pth_.length())
        return false;
    std::string termId_pth =  dict_pth_ + "/termId.v";
    std::string queryDat_pth = dict_pth_ + "/queryDat.v";

    ifstream finTermsId;
    ifstream finQueryDat;

    std::string sLine = "";
    std::string s = "";

    //open term id dictionary if exist , load data
    finTermsId.open(termId_pth.c_str());
    if(finTermsId.is_open())
    {
        vector<std::size_t> queryIdVector;
        vector<std::string> context;
        std::size_t termID;
        //std::size_t queryID;
        while(getline(finTermsId,sLine))
        {
            queryIdVector.clear();
            context.clear();
            if(0 >= sLine.length())
                continue;
            boost::split(context,sLine,boost::is_any_of("\t"));
            if(2 > context.size())
                continue;
            termID = boost::lexical_cast<std::size_t>(context[0]);
            for(std::size_t it = 1; it < context.size()-1; ++it)
            {
                std::size_t id = boost::lexical_cast<std::size_t>(context[it]);
                queryIdVector.push_back(id);
            }
            terms2qIDs_.insert(make_pair(termID,queryIdVector));
        }
    }
//	std::cout << "读入terms的大小:" << terms2qIDs_.size() << std::endl;
    finTermsId.close();
    //open query data exist if exist , load data
    finQueryDat.open(queryDat_pth.c_str());
    if(finQueryDat.is_open())
    {
        QueryData qDat;
        std::size_t queryID;
        std::size_t tID;
        sLine = "";
        s = "";
        vector<std::size_t> termsIdVector;
        vector<std::string> context;
        while(getline(finQueryDat,sLine))
        {
            termsIdVector.clear();
            qDat.tid.clear();
            context.clear();
            if(0>= sLine.length())
                continue;
            boost::split(context,sLine,boost::is_any_of("\t"));
            if( 5 > context.size())
                continue;
            queryID = boost::lexical_cast<std::size_t>(context[0]);
            qDat.text = context[1];
            qDat.hits = boost::lexical_cast<std::size_t>(context[2]);
            qDat.counts = boost::lexical_cast<std::size_t>(context[3]);
            for(vector<std::string>::iterator it = context.begin()+4; it != context.end(); ++it)
            {
                tID = boost::lexical_cast<std::size_t>(*it);
                termsIdVector.push_back(tID);
            }
            qDat.tid = termsIdVector;
            queryIdata_.insert(make_pair(queryID,qDat));
        }

    }
//	std::cout << "读入query id 的大小:" << queryIdata_.size() << std::endl;
    finQueryDat.close();

    //load forbid keywords list
    ifstream finForbid;
    std::string forbid_pth = dict_pth_ + "/forbidden.v";
    finForbid.open(forbid_pth.c_str());
    if(finForbid.is_open())
    {
        sLine = "";
        forbidList_.clear();
        while(getline(finForbid,sLine))
        {
            if(0 == sLine.size())
                continue;
            forbidList_.insert(sLine);
        }
    }
    finForbid.close();

    //load rskeywords form TaoBao
    ifstream finRskeywords;
    string rs_pth = dict_pth_ + "/HotRskeywords.v";
    finRskeywords.open(rs_pth.c_str());
    if(finRskeywords.is_open()) {
        vector<std::string> context;
        vector<std::string> rskeywords;
        sLine = "";
        std::size_t hsId;
        while(getline(finRskeywords,sLine))
        {
            rskeywords.clear();
            if(0 >= sLine.length())
                continue;
            boost::split(context,sLine,boost::is_any_of("\t"));
            if(2 > context.size())
                continue;
            hsId = boost::lexical_cast<std::size_t>(context[0]);
            boost::unordered_map<std::size_t,vector<std::string> >::iterator rsIter;
            //std::cout << "key:" << context[0] << "\t hash:" << hsId << std::endl;
            rsIter = rsKeyTaoBao_.find(hsId);
            if(rsIter != rsKeyTaoBao_.end())
                continue;
            for(std::size_t i = 1; i < context.size(); ++i)
                if(context[i].size() != 0)
                    rskeywords.push_back(context[i]);
            rsKeyTaoBao_.insert(make_pair(hsId,rskeywords));
        }
    }
    finRskeywords.close();
    std::cout << "load rskeywords size:" << rsKeyTaoBao_.size() << std::endl;

    //load category dictonary
    ifstream finQuery2Cate;
    string cate_pth = dict_pth_ + "/query2Cate.v";
    finQuery2Cate.open(cate_pth.c_str());
    if(finQuery2Cate.is_open())
    {
        vector<std::string> context;
        vector<std::string> cate;
        sLine = "";
        std::size_t qID;
        while(getline(finQuery2Cate,sLine))
        {
            cate.clear();
            context.clear();
            if(0 >= sLine.length())
                continue;
            std::size_t pos = sLine.find('\t');
            if(std::string::npos == pos)
            {
                continue;
            }
            else
            {
                string ss = sLine.substr(0,pos);
                sLine.assign(sLine,pos+1,sLine.length()-1);
                qID = boost::lexical_cast<std::size_t>(ss);
                if( std::string::npos != sLine.find(","))
                {
                    boost::split(context,sLine,boost::is_any_of(","));
                    // if(1 > context.size())
                    //    continue;
                    for(std::size_t id = 0; id < context.size(); ++id)
                        cate.push_back(context[id]);
                }
                else
                    cate.push_back(sLine);
            }
            query2Cate_.insert(make_pair(qID,cate));
        }

    }
    finQuery2Cate.close();
    std::cout << "读入的category size:" << query2Cate_.size() <<  std::endl;

    LOG(INFO) << "Load terms dictionary size:" << terms2qIDs_.size()
              << "\tLoad keywords size:" << queryIdata_.size()
              << "\tLoad category size:" << query2Cate_.size()
              << "\tLoad forbidden keywords size:" << forbidList_.size();

    if(0 == terms2qIDs_.size() || 0 == queryIdata_.size())
    {
        isNeedflush = true;
        return true;
    }
    else
        return false; // load dictionary successfully!
}
    /**
     * Module execution function. Saves interesting files recorded on the 
     * blackboard to a user-specified output directory.
     *
     * @returns TskModule::OK on success if all files saved, TskModule::FAIL if one or more files were not saved
     */
    TSK_MODULE_EXPORT TskModule::Status report()
    {
        TskModule::Status status = TskModule::OK;
        
        const std::string MSG_PREFIX = "SaveInterestingFilesModule::report : ";
        try
        {
            if (outputFolderPath.empty())
            {
                // Initialization failed. The reason why was already logged in initialize().
                return TskModule::FAIL;
            }

            // Get the interesting file set hits from the blackboard and sort them by set name.
            FileSets fileSets;
            FileSetHits fileSetHits;
            std::vector<TskBlackboardArtifact> fileSetHitArtifacts = TskServices::Instance().getBlackboard().getArtifacts(TSK_INTERESTING_FILE_HIT);
            for (std::vector<TskBlackboardArtifact>::iterator fileHit = fileSetHitArtifacts.begin(); fileHit != fileSetHitArtifacts.end(); ++fileHit)
            {
                // Find the set name attrbute of the artifact.
                bool setNameFound = false;
                std::vector<TskBlackboardAttribute> attrs = (*fileHit).getAttributes();
                for (std::vector<TskBlackboardAttribute>::iterator attr = attrs.begin(); attr != attrs.end(); ++attr)
                {
                    if ((*attr).getAttributeTypeID() == TSK_SET_NAME)
                    {
                        setNameFound = true;
                        
                        // Save the set name and description, using a map to ensure that these values are saved once per file set.
                        fileSets.insert(make_pair((*attr).getValueString(), (*attr).getContext()));
                        
                        // Drop the artifact into a multimap to allow for retrieval of all of the file hits for a file set as an 
                        // iterator range.
                        fileSetHits.insert(make_pair((*attr).getValueString(), (*fileHit)));
                    }
                }

                if (!setNameFound)
                {
                    // Log the error and try the next artifact.
                    std::stringstream msg;
                    msg << MSG_PREFIX << "failed to find TSK_SET_NAME attribute for TSK_INTERESTING_FILE_HIT artifact with id '" << (*fileHit).getArtifactID() << "', skipping artifact";
                    LOGERROR(msg.str());
                }
            }

            // Save the interesting files to the output directory, file set by file set.
            for (map<std::string, std::string>::const_iterator fileSet = fileSets.begin(); fileSet != fileSets.end(); ++fileSet)
            {
                // Get the file hits for the file set as an iterator range.
                FileSetHitsRange fileSetHitsRange = fileSetHits.equal_range((*fileSet).first); 

                // Save the files corresponding to the file hit artifacts.
                saveFiles((*fileSet).first, (*fileSet).second, fileSetHitsRange);
            }
        }
        catch (TskException &ex)
        {
            status = TskModule::FAIL;
            std::stringstream msg;
            msg << MSG_PREFIX << "TskException: " << ex.message();
            LOGERROR(msg.str());
        }
        catch (Poco::Exception &ex)
        {
            status = TskModule::FAIL;
            std::stringstream msg;
            msg << MSG_PREFIX << "Poco::Exception: " << ex.displayText();
            LOGERROR(msg.str());
        }
        catch (std::exception &ex)
        {
            status = TskModule::FAIL;
            std::stringstream msg;
            msg << MSG_PREFIX << "std::exception: " << ex.what();
            LOGERROR(msg.str());
        }
        catch (...)
        {
            status = TskModule::FAIL;
            LOGERROR(MSG_PREFIX + "unrecognized exception");
        }
        
        return status;
    }
Exemple #28
0
Table* read_from_rating_final() {

  ifstream myfile ("rating_final.csv");
  if (myfile.is_open())
  {

	Table::ColumnList columns;

	String placeID;
	getline( myfile, placeID, ',' );
    columns.push_back(make_pair(placeID, Table::varchar));

	String latitude;
	getline( myfile, latitude, ',' );
	columns.push_back(make_pair(latitude, Table::integer));

	String longitude;
	getline( myfile, longitude, ',' );
	columns.push_back(make_pair(longitude, Table::integer));

	String the_geom_meter;
	getline( myfile, the_geom_meter, ',' );
	columns.push_back(make_pair(the_geom_meter, Table::integer));

	String name;
	getline( myfile, name, '\n' );
    columns.push_back(make_pair(name, Table::integer));


	Table* geo = new Table(columns); // Create geo using the defined columns
	


    while ( myfile.good() )
    {
	  vector<pair<string, string> > entries; // Entries for the record to be placed in the table
	  
	  string placeID;
	  getline( myfile, placeID, ',' );
	  pair <string,string> pair0  ("userID",placeID);   // value init
	  entries.push_back(pair0);

	  string latitude;
	  getline( myfile, latitude, ',' );
	  pair <string,string> pair1  ("placeID",latitude);   // value init
	  entries.push_back(pair1);

	  string longitude;
	  getline( myfile, longitude, ',' );
	  pair <string,string> pair2  ("rating",longitude);   // value init
	  entries.push_back(pair2);

	  string the_geom_meter;
	  getline( myfile, the_geom_meter, ',' );
	  pair <string,string> pair3  ("food_rating",the_geom_meter);   // value init
	  entries.push_back(pair3);

	  string name;
	  getline( myfile, name, '\n' );
	  pair <string,string> pair4  ("service_rating",name);   // value init
	  entries.push_back(pair4);


	  Record add(entries); // Create record to add to table

	  geo->insert(add); // Insert add record into geo
	  
	}
    myfile.close();
	return geo;
  }
}
/* create parsing tree

 * */
void Parser:: createParsingTree(vector<Token*> tokens){

	stack<string> stateStack;	
	stack<Token*> inputStack;
	stateStack.push("S");
	int treeIndex = 0;
	string treeValue;
	int order = 0;
	int find_return;

	// push input to a stack 
	inputStack.push( new Token("$", "SP", 0) );
	for(vector<Token*>::reverse_iterator itr = tokens.rbegin(); itr != tokens.rend(); ++itr ){
		inputStack.push( (*itr) );
	}

	// loop while state stack  not empty
	while( !stateStack.empty() ){

		// preprocessing
		if( stateStack.top().compare("@") == 0 ){
			--treeIndex;
			stateStack.pop();
			continue;
		}
		// skip epsilon
		if( stateStack.top().compare("epsilon") == 0){
			stateStack.pop();
			continue;
		}
		
		treeValue = stateStack.top();
	
		// terminal insert to tree
		if( isTerminal( stateStack.top() ) ){
			parsingTree.insert( make_pair( order++ ,Node(treeIndex, treeValue, inputStack.top() -> getSymbol(), inputStack.top() -> getCatergory() ) ) );
		}
		// non_terminal insert to tree
		else{
			parsingTree.insert( make_pair( order++ ,Node(treeIndex, treeValue, inputStack.top() -> getSymbol(), " " ) ) );
		}

		// stack Pop and Push
		if( isTerminal(stateStack.top())){
			stateStack.pop();
			inputStack.pop();
		}
		// not found in parsin table
		else if( (find_return = findTable(stateStack.top(),  (*inputStack.top()) ) ) == -1  ){
			cout << "Line: " << inputStack.top() -> getLineNumber() << " Parsing Error!" << endl;
			exit(1);
		}
		else{
			// reduce
			map<string, vector<list<string> > > :: iterator itr =  grammar.find( stateStack.top() );
			stateStack.pop();	
		
			treeIndex += 1;
			// for corret tree index
			stateStack.push("@");
			for(list<string>:: reverse_iterator itr2 = itr->second[find_return].rbegin(); \
							itr2 != itr->second[find_return].rend(); ++itr2 ){
				stateStack.push( (*itr2) );
			}
		}
	}
	outputParingTree();
}
void read_vs_reference(string& read, string& read_name, int dir, vector<reference_index>& refindex, 
				vector<pair<int, vector<pair<int, int> > > >& primary_chain)
{
	time_t start, end;
	vector<pair<long, int> > kmer_list;
	//unordered_map<long, int> kmer_list;
	//unordered_map<long, int> kmer_map;
	//vector<pair<int, vector<pair<int, int> > > > primary_chain;

	//cout << "\t readseq: " << read_name << " with length = " << read.length() 
	//		<< " comparing to " << refindex.size() << " references" << endl;
	
	time(&start);

	bool flag = true;
	long hash_key = 0;
	int map_val; 
	int readlen = read.length();
	long prehashval = -1;
	int prehashcnt = 0;

	for(int k = 0; k < read.length(); k++)
	{
		if(flag == true)
		{
			if(k + KMER > read.length())
				break;
			for(int l = k, end = k, i = KMER - 2; l < end + KMER - 1; l++, k++, i--)
			{
				map_val = map_value(read.at(l));
				if(map_val == -1)
					break;

				hash_key += base_power_kmer[i][map_val];
			
				//cout << "For character " << read.at(k) << ", at position = " << k <<
				//	", the hash_key = " << hash_key << endl;
			}
		}	

		map_val = map_value(read.at(k));
		if(map_val == -1)
		{
			//cout << "Encountered invalid character N ########################" << endl;
			flag = true;
			hash_key = 0;
			continue;
		}
		else
			flag = false;

		hash_key = hash_key * BASE + map_val;
		/*	
		if(kmer_map.find(hash_key) == kmer_map.end())
		{
			kmer_map[hash_key] = 1;//k - KMER + 1;
			kmer_list.push_back(make_pair(hash_key, k - KMER + 1));
		}
		else
		{
			kmer_map[hash_key] += 1;//-1;//need work here
		
			if(kmer_list[kmer_list.size() - 1].first != hash_key)
				kmer_list.push_back(make_pair(hash_key, k - KMER + 1));
			else
				kmer_list[kmer_list.size() - 1].second = k - KMER + 1;
			
		}
		//cout << "For character " << read.at(k) << ", at position = " << k <<
		//		", the hash_key = " << hash_key << endl;
	 	*/
	 	if(prehashval != hash_key)
		{
                	kmer_list.push_back(make_pair(hash_key, k - KMER + 1));
			prehashval = hash_key;
			prehashcnt = 1;
		}
		else
		{
			if(prehashcnt > 2)//suppress same character repeat
                                kmer_list[kmer_list.size() - 1].second = k - KMER + 1;
			else
				kmer_list.push_back(make_pair(hash_key, k - KMER + 1));

			prehashcnt += 1;
		}
		
		map_val = map_value(read.at(k - KMER + 1));
		hash_key -= base_power_kmer[KMER - 1][map_val];
	}

	{
		cout << "Starting KMER Chain Analysis for " << dir << endl;
		vector<vector<pair<int, int> > > kmer_index;
		for(int i = 0; i < refindex.size(); i++)
		{
			vector<pair<int, int> > kmer_pair;
			kmer_index.push_back(kmer_pair);
		}

		int interval = 1, index, reflen;
		int kmer_ref_loc, kmer_read_loc;

		for(int k = 0; k < kmer_list.size(); k++)
		{
			if(basic_index.index[kmer_list[k].first] == -1)
				continue;
			//vector<int> pos = basic_index.position[basic_index.index[kmer_list[k].first]];
			int ref_location;
			//for(int i = 0; i < pos.size(); i++)
			for(vector<int>::iterator it = basic_index.position[basic_index.index[kmer_list[k].first]].begin(); 
				it != basic_index.position[basic_index.index[kmer_list[k].first]].end(); ++it) 
			{
				ref_location = *it;
				if(ref_location < 0)//if(pos[i] < 0)
				{
					index = abs(ref_location) - 1;//abs(pos[i]) - 1;
					reflen = refindex[index].ref.length();
					continue;
				}
				kmer_ref_loc = ref_location;// pos[i];
				kmer_read_loc = kmer_list[k].second;
				if(CIRCULAR == 0 || kmer_ref_loc - kmer_read_loc / 1.3 >= 0 &&
					kmer_ref_loc + (readlen - kmer_read_loc) / 1.3 < reflen)
				{
					//cout << "index = " << index << endl;
					//assert(index >= 0 && index < refindex.size());
					kmer_index[index].push_back(make_pair(kmer_ref_loc, kmer_read_loc));
				}
			}
		}

		for(int index = 0; index < refindex.size(); index++)
		{
			if(kmer_index[index].size() == 0)
				continue;
			sort(kmer_index[index].begin(), kmer_index[index].end(), compare_function);
			/*for(int i = 0; i < kmer_index[index].size(); i++)
			{
				cout << "first = " << kmer_index[index][i].first << ", and second = " << kmer_index[index][i].second << endl;
			}*/
			refine_kmer_index(kmer_index[index], primary_chain, read, dir, refindex, index);
			kmer_index[index].clear();
		}
	}

	if(SINGLE == 1)
	{
		kmer_list.clear();
		return;
	}

	{
		cout << "Starting Reverse Analysis for " << FR << endl;
		vector<vector<pair<int, int> > > kmer_index;
		kmer_index.clear();
		for(int i = 0; i < refindex.size(); i++)
		{
			vector<pair<int, int> > kmer_pair;
			kmer_index.push_back(kmer_pair);
		}

		int interval = 1, index, reflen;
		int kmer_ref_loc, kmer_read_loc;

		for(int k = 0; k < kmer_list.size(); k++)
		{
			if(basic_index.revind[kmer_list[k].first] == -1)
				continue;
			//vector<int> pos = basic_index.position[basic_index.revind[kmer_list[k].first]];
			int ref_location;
			//for(int i = 0; i < pos.size(); i++)
			for(vector<int>::iterator it = basic_index.position[basic_index.revind[kmer_list[k].first]].begin();
                                it != basic_index.position[basic_index.revind[kmer_list[k].first]].end(); ++it)
			{
				ref_location = *it;
				if(ref_location < 0)//if(pos[i] < 0)
				{
					index = abs(ref_location) - 1;//index = abs(pos[i]) - 1;
					reflen = refindex[index].rev.length();
					continue;
				}
				kmer_ref_loc = reflen - KMER - ref_location;
				//kmer_ref_loc = reflen - KMER - pos[i];
				kmer_read_loc = readlen - KMER - kmer_list[k].second;
				if(CIRCULAR == 0 || kmer_ref_loc - kmer_read_loc / 1.3 >= 0 &&
					kmer_ref_loc + (readlen - kmer_read_loc) / 1.3 < reflen)
				{
					//cout << "index = " << index << endl;
					//assert(index >= 0 && index < refindex.size());
					kmer_index[index].push_back(make_pair(kmer_ref_loc, kmer_read_loc));
				}
			}
		}

		for(int index = 0; index < refindex.size(); index++)
		{
			if(kmer_index[index].size() == 0)
				continue;
			sort(kmer_index[index].begin(), kmer_index[index].end(), compare_function);
			/*for(int i = 0; i < kmer_index[index].size(); i++)
			{
				cout << "first = " << kmer_index[index][i].first << ", and second = " << kmer_index[index][i].second << endl;
			}*/
			refine_kmer_index(kmer_index[index], primary_chain, read, FR, refindex, index);
			kmer_index[index].clear();
		}
	}



	time(&end);
	//cout << "Total time taken inside read_vs_reference = " << difftime(end, start) << endl;
	//cout << "size of the candidate idices = " << primary_chain.size() << endl << endl;

	//kmer_map.clear();	
	kmer_list.clear();
	
	return;
}