Example #1
0
int HdfProcessor::getPointsNumber(const Group& group) const {
	DataSet dataSet = group.openDataSet(COORDINATES_DATASET_NAMES[Step::X]);
	DataSpace dataSpace = dataSet.getSpace();
	int pointsNumber = dataSpace.getSimpleExtentNpoints();
	dataSet.close();
	return pointsNumber;
}
Example #2
0
void Weather::load(const std::string& name)
{
	std::cout << "Loading " << name << std::endl;
	H5File file(name, H5F_ACC_RDONLY);
	DataSet dataset = file.openDataSet("weather_data");
	std::cout << "Number of attributes: " << dataset.getNumAttrs() << std::endl;
	dataset.openAttribute("resolution").read(PredType::NATIVE_UINT, &resolution);
	//float bounds[4];
	dataset.openAttribute("bounds").read(PredType::NATIVE_DOUBLE, &bounds);

	std::cout << "Resolution: " << resolution << std::endl;
	std::cout << "Bounds: " << bounds.minx << "," << bounds.miny << "," << bounds.maxx << "," << bounds.maxy << std::endl;
	DataSpace ds = dataset.getSpace();
	int dim = ds.getSimpleExtentNdims();
	std::cout << "Dimensions: " << dim << std::endl;
	hsize_t dims_out[3];
	ds.getSimpleExtentDims(dims_out, NULL);
	std::cout << "Size: " << dims_out[0] << "," << dims_out[1] << "," << dims_out[2] << std::endl;
	dimX = dims_out[1];
	dimY = dims_out[2];
	numScenarios = dims_out[0];
	std::cout << "Size: " << dims_out[0] * dims_out[1] * dims_out[2] << std::endl;
	std::cout << "Dataset typeclass: " << dataset.getTypeClass() << "," << H5T_COMPOUND << std::endl;
	std::cout << "Dataset size: " << dataset.getInMemDataSize() << "," << H5T_COMPOUND << std::endl;


	CompType mtype2(sizeof(WeatherData));
	mtype2.insertMember("wind_xcomp", HOFFSET(WeatherData, wind_xcomp), PredType::NATIVE_FLOAT);
	mtype2.insertMember("wind_ycomp", HOFFSET(WeatherData, wind_ycomp), PredType::NATIVE_FLOAT);
	mtype2.insertMember("hs", HOFFSET(WeatherData, hs), PredType::NATIVE_FLOAT);
	mtype2.insertMember("light", HOFFSET(WeatherData, light), PredType::NATIVE_CHAR);
	//WeatherData wd[106938134];

	try {
		weatherData = (WeatherData *)malloc(ds.getSimpleExtentNpoints() * sizeof(WeatherData));
		dataset.read(weatherData, mtype2);
		std::cout << "Finished" << std::endl;


		//size_t ix = i1*dims_out[1] * dims_out[2] + i2 * dims_out[2] + i3;
		//printf("%f %f %f %d\n", wd[ix].wind_xcomp, wd[ix].wind_ycomp, wd[ix].hs, wd[ix].light);
	}
	catch (int e)
	{
		std::cout << "An exception occurred. Exception Nr. " << e << '\n';
	}
}
Example #3
0
/**
   Read synapse list from hdf5 dataset and for each cell store the
   total Gbar for each synapse type (ampa, nmda, gaba).
 */
void cell_syn_stat(const DataSet& syndataset,
                   set<string, comparator>& cellset,
                   map<string, double>& cell_ampa_map,
                   map<string, double>& cell_nmda_map,
                   map<string, double>& cell_gaba_map){
    DataSpace dataspace;
    try{
        dataspace = syndataset.getSpace();
    } catch (DataSetIException error){
        error.printError();
        return;
    }
    hsize_t len = dataspace.getSimpleExtentNpoints();
    syn_t * gbar_dataset = (syn_t*)calloc(len, sizeof(syn_t));
    assert(gbar_dataset != NULL);
    syndataset.read(gbar_dataset, syndataset.getDataType());
    // This block reads in all the synapses and sums up the gbar on
    // all compartments for each cell
    for (hsize_t ii = 0; ii < len; ++ii){
        map<string, double> * sum_map;
        // Update total Gbar for each cell
        if (string(gbar_dataset[ii].type, 4) == "ampa"){
            sum_map = &cell_ampa_map;
        } else if (string(gbar_dataset[ii].type, 4) == "nmda"){
            sum_map = &cell_nmda_map;
        } else if (string(gbar_dataset[ii].type, 4) == "gaba"){
            sum_map = &cell_gaba_map;
        } else {
            cerr << "Error: unrecognized synapse type '" << gbar_dataset[ii].type << " on " << gbar_dataset[ii].dest << endl;
            continue;
        }
        string comp_path(gbar_dataset[ii].dest);
        string cellname = comp_path.substr(0, comp_path.rfind('/'));
        cellset.insert(cellname);
        map<string, double>::iterator it = sum_map->find(cellname);
        if (it == sum_map->end()){
            sum_map->insert(pair<string, double>(cellname, gbar_dataset[ii].Gbar));
        } else {
            it->second += gbar_dataset[ii].Gbar;
        }
    }
    free(gbar_dataset);    
}
Example #4
0
bool Wrapper_i_hdf::priv_init(int fr_count)
{ 
  if(locked_)
    return false;
  
  try
  {
    
    
    H5File * file = new H5File( file_name_, H5F_ACC_RDONLY );  
    if(two_d_data_)
    {
      const string nop_str = "number-of-planes";
      Group g = file->openGroup("/");
      Attr_list_hdf atr_list(&g);
      if(!atr_list.contains_attr(nop_str))
	throw logic_error("wrapper_i_hdf: number-of-planes not found in file");
      atr_list.get_value(nop_str,frame_count_);
    }
    else
    {
      /**
	 @todo deal with this better, don't have any data 
       */
      frame_count_ = 1;
    }
    

    

    if(fr_count != 0)
    {
      if(fr_count + start_ > frame_count_)
	throw runtime_error("wrapper_i_hdf: asking for too many frames");
      frame_count_ = fr_count;
      
    }
    
    
    
    // logic to set up data maps and data storage
    int i_count =0;
    int f_count =0;
    int c_count =0;
    
    for(set<pair<D_TYPE,int> >::iterator it = data_types_.begin();
	it != data_types_.end();++it)
    {
      D_TYPE cur = (*it).first;
      
      switch(v_type(cur))
      {
      case V_INT:
	data_i_.push_back(vector<int*>(frame_count_));
	d_mapi_.set_lookup(cur,i_count++);
	break;
      case V_FLOAT:
	data_f_.push_back(vector<float*>(frame_count_));
	d_mapf_.set_lookup(cur,f_count++);
	break;
      case V_COMPLEX:
	data_c_.push_back(vector<complex<float>*>(frame_count_));
	d_mapc_.set_lookup(cur,c_count++);
	break;
      case V_STRING:
      case V_BOOL:
      case V_GUID:
      case V_TIME:
      case V_UINT:
      case V_ERROR:
	throw logic_error("wrapper_i_hdf: The data type should not have been " + VT2str_s(v_type(cur)));
      }
    }

    frame_c_.reserve(frame_count_);
    if(two_d_data_)
      frame_zdata_.resize(frame_count_);
    

    // set the size of the md_store
    set_Md_store_size(frame_count_);
    
    // fill in data
    // assume that the frames run from 0 to frame_count_
    for(unsigned int j = 0; j<frame_count_;++j)
    {
      string frame_name = format_name(j+start_);
      Group * frame = new Group(file->openGroup(frame_name));
      
      Attr_list_hdf g_attr_list(frame);
      
      set_Md_store(j,new Md_store(g_attr_list));
      
      
      if(two_d_data_)
      {
	if(!g_attr_list.contains_attr("z-position"))
	  throw logic_error("wrapper_i_hdf: z-position not found");
	g_attr_list.get_value("z-position",frame_zdata_[j]);
      }
      
      
      for(set<pair<D_TYPE,int> >::iterator it = data_types_.begin();
	  it != data_types_.end();++it)
      {
	
	if(two_d_data_ && ((*it).first)==utilities::D_ZPOS)
	  continue;

	// ***************	
	DataSet * dset = new DataSet(frame->openDataSet(format_dset_name((*it).first,(*it).second)));
	// ***************	
	DataSpace dspace = dset-> getSpace();
	dspace.selectAll();
	int part_count = dspace.getSimpleExtentNpoints();
	
	// if the first data set for this frame set the number of particles
	if(frame_c_.size()==j)
	  frame_c_.push_back(part_count);
	// if the part_count is less than a previous dataset, set the
	// number of particles to be the smaller number.  This
	// shouldn't result in memory leaks as the bare arrays are
	// never returned
	else if(frame_c_.at(j) > part_count)
	  frame_c_.at(j) = part_count;
	// if the current set has more than a previous set, keep the
	// old value.  these checks are a kludge, need to deal with
	// this better at the level of writing out the data
	else if(frame_c_.at(j) < part_count)
	  continue;
	// if(frame_c_.at(j) != part_count)
	//   throw runtime_error("wrapper_i_hdf: data sets different sizes");
	D_TYPE cur_type = (*it).first;
	
	switch(v_type(cur_type))
	{
	case V_INT:
	  data_i_.at(d_mapi_(cur_type)).at(j) = new int [part_count];
	  dset->read(data_i_.at(d_mapi_(cur_type)).at(j),PredType::NATIVE_INT);
	  
	  break;
	case V_FLOAT:
	  data_f_.at(d_mapf_(cur_type)).at(j) = new float [part_count];
	  dset->read(data_f_.at(d_mapf_(cur_type)).at(j),PredType::NATIVE_FLOAT);
	  break;
	case V_COMPLEX:
	  throw logic_error("not implemented yet");
	  
	  break;
	case V_STRING:
	case V_BOOL:
	case V_GUID:
	case V_TIME:
	case V_UINT:
	case V_ERROR:
	  throw logic_error("wrapper_i_hdf: The data type should not have been " + VT2str_s(v_type(cur_type)));
      	}
	
	// clean up hdf stuff
	dset->close();
	delete dset;
	dset = NULL;
      }
      frame->close();
      
      delete frame;
      frame = NULL;
      
    }
    file->close();
    

    delete file;
    
    file = NULL;
    
    // shift all of the z by the minimum to start at zero
    if(two_d_data_)
    {
      float min = frame_zdata_[0];
      for(unsigned int j = 0; j<frame_count_;++j)
	if(frame_zdata_[j]<min)
	  min = frame_zdata_[j];
      for(unsigned int j = 0; j<frame_count_;++j)
	frame_zdata_[j] -= min ;
    }


  }
  catch(Exception & e)
  {
    // clean up data if it died
    e.printError();
    
    throw runtime_error("wrapper_i_hdf: constructor error");
    
  }
  
  for(unsigned int j= 0; j<frame_count_;++j)
    total_part_count_ += frame_c_.at(j);

  return true;
  
}
void Generic_wrapper_hdf::get_dset_priv(vector<T> & data,std::vector<unsigned int> & dims, const std::string & dset_name,const DataType & mtype) const
{
  if (!(wrapper_open_))
    throw runtime_error("wrapper must be open to read a dataset");
  
  dims.clear();
  data.clear();
  
  // get data set
  DataSet dset;
  // open data set  
  try
  {
    
  if(!group_open_ || dset_name[0] == '/')
  {
    if(file_)
      try
      {
        dset = file_->openDataSet(dset_name);
      }
      catch(FileIException &e)
      {
        throw runtime_error(e.getDetailMsg());
      }
    
    else
      throw runtime_error("there is no open file");
    
  }
  else if(group_)
  {
    dset = group_->openDataSet(dset_name);
  }
  else
    throw logic_error("generic_wrapper_hdf:: can't read from a closed group");
  }
  catch(Exception &e )
  {
    std::string er_msg = "error opening hdf \n" + e.getDetailMsg();
    throw runtime_error(er_msg);
  }
  
  // check type
  H5T_class_t dset_class_t = dset.getTypeClass();

  H5T_class_t mem_class_t = mtype.getClass();
  
  if(dset_class_t != mem_class_t)
    throw runtime_error("Data type miss-match");
  
  // if(mem_class_t == H5T_INTEGER)
  // {
  //   IntType mem_int = IntType(mtype);
  //   H5T_sign_t dsign = dset.getIntType().getSign();
  //   H5T_sign_t msign = mem_int.getSign();

  //   if(dsign  != msign)
  //     throw runtime_error("int signness miss-match ");

  // }
  
  
  // get the data space
  DataSpace dataspace = dset.getSpace();
  // select everything
  dataspace.selectAll();
  // get the rank
  hsize_t rank = dataspace.getSimpleExtentNdims();
  // make dims the right size

  
  vector <hsize_t> tdims;
  tdims.resize(rank);

  // get the dimensionality 
  dataspace.getSimpleExtentDims(tdims.data(),NULL);
  // copy to the return vector
  dims.resize(rank);

  for(hsize_t j = 0; j<rank;++j)
    dims[j] = (unsigned int)tdims[j];


  // get the number of entries
  hsize_t total = dataspace.getSimpleExtentNpoints();
  // resize the data vector
  data.resize(total);
  // read the data out 
  dset.read( data.data(), mtype, dataspace, dataspace );
  

}