Ejemplo n.º 1
0
bool PropertyBag::get(const string &key, ivec2 &vec, size_t instance) const {
	string _s;
	
	if (!get(key, _s, instance)) {
		return false;
	}
	
	string s = toLowerCase(_s);
	
	boost::char_separator<char> delimeters("(,)\t\n");
	boost::tokenizer<boost::char_separator<char> > tokens(s, delimeters);
	
	boost::tokenizer<boost::char_separator<char> >::const_iterator i = tokens.begin();
	
	if (i == tokens.end())
		return false;
		
	if (string(*i) != "&ivec2")
		return false;
		
	vec.x = stoi(*(++i));
	vec.y = stoi(*(++i));
	
	return true;
}
Ejemplo n.º 2
0
// -----------------------------------------------------------------------------
//
// -----------------------------------------------------------------------------
int DxReader::readFile()
{
  std::stringstream ss;
  VoxelDataContainer* m = getVoxelDataContainer();
  if (NULL == m)
  {
    ss.clear();
    ss << "DataContainer Pointer was NULL and Must be valid." << __FILE__ << "("<<__LINE__<<")";
    addErrorMessage(getHumanLabel(), ss.str(), -5);
    setErrorCondition(-5);
    return -1;
  }

  std::string delimeters(", ;\t"); /* delimeters to split the data */
  std::vector<std::string> tokens; /* vector to store the split data */

  int error, spin; /* dummy variables */

  bool finished_header, finished_data;
  finished_header = true;
  finished_data = false;
  size_t index = 0;

  size_t totalPoints = m->getTotalPoints();

  // Remove the array that we are about to create first as a 'datacheck()' was called from the super class's 'execute'
  // method which is performed before this function. This will cause an error -501 because the array with the name
  // m_GrainIdsArrayName already exists but of size 1, not the size we are going to read. So we get rid of the array
  m->removeCellData(m_GrainIdsArrayName);
  // Rerun the data check in order to allocate the array to store the data from the .dx file.
//  dataCheck(false, totalPoints, m->getNumFieldTuples(), m->getNumEnsembleTuples());
  CREATE_NON_PREREQ_DATA(m, DREAM3D, CellData, GrainIds, ss, int32_t, Int32ArrayType, 0, totalPoints, 1)


  if (getErrorCondition() < 0)
  {
    m_InStream.close();
    return -1;
  }

  for (std::string line; std::getline(m_InStream, line);)
  {

    // Get the remaining lines of the header and ignore
    tokens.clear();
    error = 0;
    tokenize(line, tokens, delimeters);

    size_t total = m->getTotalPoints();
    if( index == total || ( finished_header && tokens.size() != 0 && tokens[0] == "attribute") )
    {
      finished_data = true;
    }

    // Allocate the DataArray at this point:
    if(finished_header && !finished_data)
    {
      for (size_t in_spins = 0; in_spins < tokens.size(); in_spins++)
      {
        error += sscanf(tokens[in_spins].c_str(), "%d", &spin);
        m_GrainIds[index] =  spin;
        ++index;
      }
    }
  }

  if(index != static_cast<size_t>(m->getTotalPoints()))
  {
    ss.clear();
    ss << "ERROR: data size does not match header dimensions" << std::endl;
    ss << "\t" << index << "\t" << m->getTotalPoints() << std::endl;
    setErrorCondition(-495);
    addErrorMessage(getHumanLabel(), ss.str(), getErrorCondition());
    m_InStream.close();
    return getErrorCondition();
  }

  tokens.clear();
  m_InStream.close();


  // Find the unique set of grain ids
//  std::set<int32_t> grainIdSet;
//  for (int64_t i = 0; i < totalPoints; ++i)
//  {
//    grainIdSet.insert(m_GrainIds[i]);
//  }
//  for (std::set<int32_t>::iterator iter = grainIdSet.begin(); iter != grainIdSet.end(); ++iter )
//  {
//    std::cout << "Grain ID: " << (*iter) << std::endl;
//  }

  notifyStatusMessage("Complete");
  return 0;
}
Ejemplo n.º 3
0
//-----------------------------------------------------------------------------
//
// -----------------------------------------------------------------------------
int DxReader::readHeader()
{
  VoxelDataContainer* m = getVoxelDataContainer();
  std::stringstream ss;
  int error = 0;

  std::string line;
  std::string delimeters(", ;\t"); /* delimeters to split the data */
  std::vector<std::string> tokens; /* vector to store the split data */

  getline(m_InStream, line, '\n');
  tokenize(line, tokens, delimeters);

  // Process the header information and look for the std::string "counts"
  // Then read the data size after that
  size_t pos1 = 0;
  while (pos1 == 0)
  { // continue until we find the keyword
    for (size_t i = 0; i < tokens.size(); i++)
    {
      if(tokens[i] == "counts")
      {
        pos1 = i;
      }
    }
    // Read the next line of the header if we did not find the keyword
    // in the line
    if(pos1 == 0)
    {
      tokens.clear();
      getline(m_InStream, line, '\n');
      tokenize(line, tokens, delimeters);
      if(tokens.size() == 20)
      {
        ss.clear();
        ss << "ERROR: Unable to read data dimensions from the header" << std::endl;
        addErrorMessage(getHumanLabel(), ss.str(), -7);
        setErrorCondition(-499);
        m_InStream.close();
        return -499;
      }
    }
  }

  int nx = 0;
  int ny = 0;
  int nz = 0;

  if(pos1 != 0)
  {
    error = 0;
    error += sscanf(tokens[pos1 + 1].c_str(), "%d", &nz);
    error += sscanf(tokens[pos1 + 2].c_str(), "%d", &ny);
    error += sscanf(tokens[pos1 + 3].c_str(), "%d", &nx);
    tokens.clear();
    // The dimensions listed in the DX file are always one greater
    // than the actual dimensions
    nx--;
    ny--;
    nz--;
  }

//  std::cout << "INFO: DX data dimensions: " << std::endl;
//  std::cout << "nz= " << nz << std::endl;
//  std::cout << "ny= " << ny << std::endl;
//  std::cout << "nx= " << nx << std::endl;

  //The DX file has a unique format of 20 entries on each line. I have
  //no idea who initiated this insanity but I am about to perpetuate
  //it.
  //
  //The most simple thing to do is to read the entire dataset into one
  //long vector and then read that vector to assign values to the grid

  //  ADR:  6 Sep 08; time to make the input much more general!
  //  equivalent to list-direcvted input in Fortran, actually !!

  pos1 = 0;
  while (pos1 == 0)
  { // continue until we find the keyword
    for (size_t i = 0; i < tokens.size(); i++)
    {
      if(tokens[i] == "items")
      {
        pos1 = i;
      }
    }
    // Read the next line of the header if we did not find the keyword
    // in the line
    if(pos1 == 0)
    {
      tokens.clear();
      getline(m_InStream, line, '\n');
      tokenize(line, tokens, delimeters);
      if(tokens.size() == 20)
      {
        ss.clear();
        ss << "ERROR: Unable to locate the last header line" << std::endl;
        addErrorMessage(getHumanLabel(), ss.str(), -8);
        setErrorCondition(-496);
        m_InStream.close();
        return -496;
      }
    }
  } // when we get here, we are looking at data
  int points = 0;
  if(pos1 != 0)
  {
    error = 0;
    error += sscanf(tokens[pos1 + 1].c_str(), "%d", &points);
    tokens.clear();
  }
  m->setDimensions(nx, ny, nz);
//  std::cout << "Compare no. points " << points << " with x*y*z: " << nx * ny * nz << std::endl;
  return error;
}
Ejemplo n.º 4
0
/**
 *
 * @param FileName
 * @param data
 * @param nx X Dimension
 * @param ny Y Dimension
 * @param nz Z Dimension
 */
int  ReadPHFile(std::string FileName, std::vector<int> &data, int &nx, int &ny, int &nz)
{
  std::string line;
  line.resize(1024);

  std::string delimeters(", ;\t"); /* delimeters to split the data */
  std::vector<std::string> tokens; /* vector to store the split data */
  //std::vector<int> data; /* vector to store the data */

  int error, spin; /* dummy variables */
  //int nx, ny, nz;

  std::ifstream InFile;
  InFile.open(FileName.c_str(), std::ios_base::binary);
  if (!InFile)
  {
    std::cout << "Failed to open: " << FileName << std::endl;
    return -1;
  }

  getline(InFile, line);

  tokenize(line, tokens, delimeters);

  // Process the header information from the PH file.
  error = 0;
  error += sscanf(tokens[0].c_str(), "%d", &nx);
  error += sscanf(tokens[1].c_str(), "%d", &ny);
  error += sscanf(tokens[2].c_str(), "%d", &nz);
  if (error < 0)
  {
    std::cout << "Error parsing Dimensions from ph file. The line that is being parsed was \n'" <<
       line << "'" <<  std::endl;
    return -1;
  }
  tokens.clear();

  //  cout << "INFO: PH file grid size: " << nx << "\t" << ny << "\t" << nz << endl;;

  //MCgrid3D* grid = new grid(nx,ny,nz);

  // Get the remaining two lines of the header and ignore
  getline(InFile, line, '\n');
  getline(InFile, line, '\n');

  //The PH file has a unique format of 20 entries on each line. I have
  //now idea who initiated this insanity but I am about to propetuate
  //it.
  //
  //The most simple thing todo is to read the entire dataset into one
  //long vector and then read that vector to assign values to the grid

  while (getline(InFile, line, '\n') != NULL)
  {
    tokens.clear();
    error = 0;
    tokenize(line, tokens, delimeters);
    //        cout << line << endl;
    //        for(int i=0; i < tokens.size(); i++ )
    //              cout << setw(6) << tokens[i];
    //        cout << endl;

    for (size_t in_spins = 0; in_spins < tokens.size(); in_spins++)
    {
      error += sscanf(tokens[in_spins].c_str(), "%d", &spin);
      data.push_back(spin);
    }
    //        if(error != 20)
    //              {
    //                cout << "ERROR: Invalid number of line entries in PH file" << endl;
    //              }
  }

  tokens.clear();

  InFile.close();
  return 0;
}