Esempio n. 1
0
File: fclib.c Progetto: xhub/fclib
/** read problem info */
static struct fclib_info* read_problem_info (hid_t id)
{
  struct fclib_info *info;
  H5T_class_t class_id;
  hsize_t dim;
  size_t size;

  MM (info = malloc (sizeof (struct fclib_info)));

  if (H5LTfind_dataset (id, "title"))
  {
    IO (H5LTget_dataset_info  (id, "title", &dim, &class_id, &size));
    MM (info->title = malloc (sizeof (char [size])));
    IO (H5LTread_dataset_string (id, "title", info->title));
  }
  else info->title = NULL;

  if (H5LTfind_dataset (id, "description"))
  {
    IO (H5LTget_dataset_info  (id, "description", &dim, &class_id, &size));
    MM (info->description = malloc (sizeof (char [size])));
    IO (H5LTread_dataset_string (id, "description", info->description));
  }
  else info->description = NULL;

  if (H5LTfind_dataset (id, "math_info"))
  {
    IO (H5LTget_dataset_info  (id, "math_info", &dim, &class_id, &size));
    MM (info->math_info = malloc (sizeof (char [size])));
    IO (H5LTread_dataset_string (id, "math_info", info->math_info));
  }
  else info->math_info = NULL;

  return info;
}
Esempio n. 2
0
void SGDSolver<Dtype>::RestoreSolverStateFromHDF5(const string& state_file) {
#ifdef USE_HDF5
  hid_t file_hid = H5Fopen(state_file.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
  CHECK_GE(file_hid, 0) << "Couldn't open solver state file " << state_file;
  this->iter_ = hdf5_load_int(file_hid, "iter");
  if (H5LTfind_dataset(file_hid, "learned_net")) {
    string learned_net = hdf5_load_string(file_hid, "learned_net");
    this->net_->CopyTrainedLayersFrom(learned_net);
  }
  this->current_step_ = hdf5_load_int(file_hid, "current_step");
  hid_t history_hid = H5Gopen2(file_hid, "history", H5P_DEFAULT);
  CHECK_GE(history_hid, 0) << "Error reading history from " << state_file;
  int state_history_size = hdf5_get_num_links(history_hid);
  CHECK_EQ(state_history_size, history_.size())
      << "Incorrect length of history blobs.";
  for (int i = 0; i < history_.size(); ++i) {
    ostringstream oss;
    oss << i;
    hdf5_load_nd_dataset<Dtype>(history_hid, oss.str().c_str(), 0,
                                kMaxBlobAxes, history_[i].get());
  }
  H5Gclose(history_hid);
  H5Fclose(file_hid);
#else
  LOG(FATAL) << "RestoreSolverStateFromHDF5 requires hdf5;"
             << " compile with USE_HDF5.";
#endif  // USE_HDF5
}
Esempio n. 3
0
/**
 * Execute the algorithm.
 */
void LoadSassena::exec()
{
  //auto gws=boost::dynamic_pointer_cast<API::WorkspaceGroup>(getProperty("OutputWorkspace"));
  //API::WorkspaceGroup_sptr gws=getProperty("OutputWorkspace");
  API::Workspace_sptr ows=getProperty("OutputWorkspace");

  API::WorkspaceGroup_sptr gws=boost::dynamic_pointer_cast<API::WorkspaceGroup>(ows);
  if(gws && API::AnalysisDataService::Instance().doesExist( gws->name() ) )
  {
    //gws->deepRemoveAll(); // remove workspace members
    API::AnalysisDataService::Instance().deepRemoveGroup( gws->name() );
  }
  else
  {
    gws = boost::make_shared<API::WorkspaceGroup>();
    setProperty("OutputWorkspace", boost::dynamic_pointer_cast<API::Workspace>(gws));
  }

  //populate m_validSets
  int nvalidSets = 4;
  const char* validSets[] = { "fq", "fq0", "fq2", "fqt"};
  for(int iSet=0; iSet<nvalidSets; iSet++) this->m_validSets.push_back( validSets[iSet] );

  //open the HDF5 file for reading
  m_filename = this->getPropertyValue("Filename");
  hid_t h5file = H5Fopen(m_filename.c_str(),H5F_ACC_RDONLY,H5P_DEFAULT);
  if( h5file < 0)
  {
    this->g_log.error("Cannot open "+m_filename);
    throw Kernel::Exception::FileError("Unable to open:" , m_filename);
  }

  //find out the sassena version used
  char cversion[16];
  if ( H5LTget_attribute_string( h5file, "/", "sassena_version", cversion ) < 0 )
  {
    this->g_log.error("Unable to read Sassena version");
  }
  //const std::string version(cversion);
  //determine which loader protocol to use based on the version
  //to be done at a later time, maybe implement a Version class
  std::vector<int> sorting_indexes;
  const MantidVec qvmod = this->loadQvectors( h5file, gws, sorting_indexes);
  //iterate over the valid sets
  std::string setName;
  for(std::vector<std::string>::const_iterator it=this->m_validSets.begin(); it!=this->m_validSets.end(); ++it){
    setName = *it;
    if(H5LTfind_dataset(h5file,setName.c_str())==1)
    {
      if(setName == "fq" || setName == "fq0" || setName == "fq2")
        this->loadFQ( h5file, gws, setName, qvmod, sorting_indexes);
      else if(setName == "fqt")
        this->loadFQT( h5file, gws, setName, qvmod, sorting_indexes);
    }
    else
      this->g_log.information("Dataset "+setName+" not present in file");
  }// end of iterate over the valid sets

  H5Fclose(h5file);
} // end of LoadSassena::exec()
herr_t H5IMmake_palette( hid_t loc_id,
                        const char *pal_name,
                        const hsize_t *pal_dims,
                        const unsigned char *pal_data )

{

    int has_pal;

    /* check the arguments */
    if (pal_name == NULL) 
      return -1;

    /* Check if the dataset already exists */
    has_pal = H5LTfind_dataset( loc_id, pal_name );

    /* It exists. Return */
    if ( has_pal == 1 )
        return 0;

    /* Make the palette dataset. */
    if ( H5LTmake_dataset( loc_id, pal_name, 2, pal_dims, H5T_NATIVE_UCHAR, pal_data ) <  0 )
        return -1;

    /* Attach the attribute "CLASS" to the >>palette<< dataset*/
    if ( H5LTset_attribute_string( loc_id, pal_name, "CLASS", PALETTE_CLASS ) < 0)
        return -1;

    /* Attach the attribute "PAL_VERSION" to the >>palette<< dataset*/
    if ( H5LTset_attribute_string( loc_id, pal_name, "PAL_VERSION", "1.2" ) < 0)
        return -1;

    return 0;

}
Esempio n. 5
0
void hdf5_load_nd_dataset_helper(
    hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
    Blob<Dtype>* blob) {
  // Verify that the dataset exists.
  CHECK(H5LTfind_dataset(file_id, dataset_name_))
      << "Failed to find HDF5 dataset " << dataset_name_;
  // Verify that the number of dimensions is in the accepted range.
  herr_t status;
  int ndims;
  status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims);
  CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_;
  CHECK_GE(ndims, min_dim);
  CHECK_LE(ndims, max_dim);

  // Verify that the data format is what we expect: float or double.
  std::vector<hsize_t> dims(ndims);
  H5T_class_t class_;
  status = H5LTget_dataset_info(
      file_id, dataset_name_, dims.data(), &class_, NULL);
  CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_;
  CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data";

  vector<int> blob_dims(dims.size());
  for (int i = 0; i < dims.size(); ++i) {
    blob_dims[i] = dims[i];
  }
  blob->Reshape(blob_dims);
}
Esempio n. 6
0
int_f
h5ltfind_dataset_c(hid_t_f *loc_id,
                    size_t_f *namelen,
                    _fcd name)
{
    hid_t   c_loc_id;
    char    *c_name = NULL;
    herr_t  ret;

    /*
    * Convert FORTRAN name to C name
    */
    c_name = (char *)HD5f2cstring(name, (size_t)*namelen);
    if (c_name == NULL) return -1;

    /*
    * Call H5LTget_dataset_ndims function.
    */
    c_loc_id = (hid_t)*loc_id;

    ret = H5LTfind_dataset(c_loc_id, c_name);

    if(c_name!=NULL)
       HDfree(c_name);

    return ret;

}
Esempio n. 7
0
File: hdf5.cpp Progetto: jofeu/caffe
void hdf5_load_nd_dataset_helper(
    hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
    Blob<Dtype>* blob) {
  // Verify that the dataset exists.
  CHECK(H5LTfind_dataset(file_id, dataset_name_))
      << "Failed to find HDF5 dataset " << dataset_name_;
  // Verify that the number of dimensions is in the accepted range.
  herr_t status;
  int ndims;
  status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims);
  CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_;
  CHECK_GE(ndims, min_dim);
  CHECK_LE(ndims, max_dim);

  // Verify that the data format is what we expect: float or double.
  std::vector<hsize_t> dims(ndims);
  H5T_class_t class_;
  status = H5LTget_dataset_info(
      file_id, dataset_name_, dims.data(), &class_, NULL);
  CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_;
  // blocks around "LOG" macros are to avoid "initialization of occurresces_?? 
  // is skipped by case label" errors on msvc
  switch (class_) {
    case H5T_FLOAT: {
      LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_FLOAT";
      break;
    }
    case H5T_INTEGER: {
      LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_INTEGER";
      break;
    }
    case H5T_TIME:
    { LOG(FATAL) << "Unsupported datatype class: H5T_TIME"; }
    case H5T_STRING:
    { LOG(FATAL) << "Unsupported datatype class: H5T_STRING"; }
    case H5T_BITFIELD:
    { LOG(FATAL) << "Unsupported datatype class: H5T_BITFIELD"; }
    case H5T_OPAQUE:
    { LOG(FATAL) << "Unsupported datatype class: H5T_OPAQUE"; }
    case H5T_COMPOUND:
    { LOG(FATAL) << "Unsupported datatype class: H5T_COMPOUND"; }
    case H5T_REFERENCE:
    { LOG(FATAL) << "Unsupported datatype class: H5T_REFERENCE"; }
    case H5T_ENUM:
    { LOG(FATAL) << "Unsupported datatype class: H5T_ENUM"; }
    case H5T_VLEN:
    { LOG(FATAL) << "Unsupported datatype class: H5T_VLEN"; }
    case H5T_ARRAY:
    { LOG(FATAL) << "Unsupported datatype class: H5T_ARRAY"; }
    default:
    { LOG(FATAL) << "Datatype class unknown"; }
  }

  vector<int> blob_dims(dims.size());
  for (int i = 0; i < dims.size(); ++i) {
    blob_dims[i] = dims[i];
  }
  blob->Reshape(blob_dims);
}
Esempio n. 8
0
/*-------------------------------------------------------------*/
static hid_t findDependentField(pNXVcontext self,
		hid_t inFieldID,char *dpData)
{
	char *pPtr;
	hid_t fieldID, groupID;
	char fname[512], newPath[1024], groupName[1024];

	/*
		get at enclosing group
  */
	memset(groupName,0,sizeof(groupName));
	H5Iget_name(inFieldID,groupName,sizeof(groupName));
	pPtr = strrchr(groupName,'/');
	*pPtr = '\0';
	pPtr = NULL;

	pPtr = strchr(dpData,'/');

	if(pPtr != NULL){
		if(pPtr == dpData){
			/* absolute path */
			if(H5LTpath_valid(self->fileID,dpData,1)){
				fieldID = H5Oopen(self->fileID,dpData,H5P_DEFAULT);
				return fieldID;
			} else {
				return -1;
			}
		} else {
			/* relative path further along the path */
			snprintf(newPath,sizeof(newPath), "%s/%s", groupName, dpData);
			if(H5LTpath_valid(self->fileID,newPath,1)){
				fieldID = H5Oopen(self->fileID,newPath,H5P_DEFAULT);
				return fieldID;
			} else {
				return -1;
			}
		}
	} else {
		/* path within the group */
		groupID = H5Oopen(self->fileID, groupName,H5P_DEFAULT);
		if(H5LTfind_dataset(groupID,dpData)){
			fieldID = H5Dopen(groupID,dpData,H5P_DEFAULT);
			H5Oclose(groupID);
			return fieldID;
		} else {
			H5Oclose(groupID);
			return -1;
		}
	}

	return -1;
}
Esempio n. 9
0
int_f
nh5ltfind_dataset_c(hid_t_f *loc_id,
                    int_f *namelen,
                    _fcd name)
{
 hid_t   c_loc_id;
 char    *c_name;
 int     c_namelen;

/*
 * Convert FORTRAN name to C name
 */
 c_namelen = (int)*namelen;
 c_name = (char *)HD5f2cstring(name, c_namelen);
 if (c_name == NULL) return -1;

/*
 * Call H5LTget_dataset_ndims function.
 */
 c_loc_id = (hid_t)*loc_id;

 return( H5LTfind_dataset(c_loc_id, c_name));

}
Esempio n. 10
0
herr_t H5IMmake_palettef(hid_t loc_id,
                          const char *pal_name,
                          const hsize_t *pal_dims,
                          int_f *pal_data)

{

 hid_t did;                /* dataset ID */
 hid_t sid;                /* space ID */
 int   has_pal;

 /* Check if the dataset already exists */
 has_pal = H5LTfind_dataset(loc_id, pal_name);

 /* It exists. Return */
 if(has_pal == 1)
  return 0;

/*-------------------------------------------------------------------------
 * create and write the dataset
 *-------------------------------------------------------------------------
 */

 /* create the data space for the dataset. */
 if((sid = H5Screate_simple(2, pal_dims, NULL)) < 0)
  return -1;

 /* create the dataset as H5T_NATIVE_UCHAR */
 if((did = H5Dcreate2(loc_id, pal_name, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
  return -1;

 /* write with memory type H5T_NATIVE_INT */
 if(pal_data) {
  if(sizeof(int_f) == sizeof(int)) {
      if(H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, pal_data) < 0)
       return -1;
  } else if(sizeof(int_f) == sizeof(long)) {
      if(H5Dwrite(did, H5T_NATIVE_LONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, pal_data) < 0)
       return -1;
  } else if(sizeof(int_f) == sizeof(long long)) {
      if(H5Dwrite(did, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, pal_data) < 0)
       return -1;
  } else
      return -1;
 }

 /* close */
 if(H5Dclose(did) < 0)
  return -1;
 if(H5Sclose(sid) < 0)
  return -1;

/*-------------------------------------------------------------------------
 * attach the specification attributes
 *-------------------------------------------------------------------------
 */

 /* Attach the attribute "CLASS" to the >>palette<< dataset*/
 if(H5LTset_attribute_string(loc_id, pal_name, "CLASS", PALETTE_CLASS) < 0)
  return -1;

 /* Attach the attribute "PAL_VERSION" to the >>palette<< dataset*/
 if(H5LTset_attribute_string(loc_id, pal_name, "PAL_VERSION", "1.2") < 0)
  return -1;

 return 0;

}
void HDF5GeneralDataLayer<Dtype>::LoadGeneralHDF5FileData(const char* filename) {
  DLOG(INFO) << "Loading The general HDF5 file" << filename;
  hid_t file_id = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
  if (file_id < 0) {
    LOG(ERROR) << "Failed opening HDF5 file" << filename;
  }

  HDF5GeneralDataParameter data_param = this->layer_param_.hdf5_general_data_param();
  int fieldNum = data_param.field_size();
  hdf_blobs_.resize(fieldNum);

  const int MIN_DATA_DIM = 1;
  const int MAX_DATA_DIM = 4;
  for(int i = 0; i < fieldNum; ++i){
	  //LOG(INFO) << "Data type: " << data_param.datatype(i).data();
	  if(i < data_param.datatype_size() && 
      strcmp(data_param.datatype(i).data(), "int8") == 0){
		  
      // We take out the io functions here
      const char* dataset_name_ = data_param.field(i).data();
      hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>());

		  CHECK(H5LTfind_dataset(file_id, dataset_name_))
        << "Failed to find HDF5 dataset " << dataset_name_;
      // Verify that the number of dimensions is in the accepted range.
      herr_t status;
      int ndims;
      status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims);
      CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_;
      CHECK_GE(ndims, MIN_DATA_DIM);
      CHECK_LE(ndims, MAX_DATA_DIM);
      
      // Verify that the data format is what we expect: int8
      std::vector<hsize_t> dims(ndims);
      H5T_class_t class_;
      status = H5LTget_dataset_info(file_id, dataset_name_, dims.data(), &class_, NULL);
      CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_;
      CHECK_EQ(class_, H5T_INTEGER) << "Expected integer data";

      vector<int> blob_dims(dims.size());
      for (int j = 0; j < dims.size(); ++j) {
        blob_dims[j] = dims[j];
      }
      hdf_blobs_[i]->Reshape(blob_dims);
      std::cout<<"Trying to allocate memories!\n";
		  int* buffer_data = new int[hdf_blobs_[i]->count()];
      std::cout<<"Memories loaded!!!\n";
		  status = H5LTread_dataset_int(file_id, dataset_name_, buffer_data);
		  CHECK_GE(status, 0) << "Failed to read int8 dataset " << dataset_name_;

		  Dtype* target_data = hdf_blobs_[i]->mutable_cpu_data();
		  for(int j = 0; j < hdf_blobs_[i]->count(); j++){
			  //LOG(INFO) << Dtype(buffer_data[j]);
			  target_data[j] = Dtype(buffer_data[j]);
		  }
		  delete buffer_data;

	  }else{
      // The dataset is still the float32 datatype
      hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>());
		  hdf5_load_nd_dataset(file_id, data_param.field(i).data(),
        MIN_DATA_DIM, MAX_DATA_DIM, hdf_blobs_[i].get());
	  }
  }

  herr_t status = H5Fclose(file_id);
  CHECK_GE(status, 0) << "Failed to close HDF5 file " << filename;

  for(int i = 1; i < fieldNum; ++i){
	  CHECK_EQ(hdf_blobs_[0]->num(), hdf_blobs_[i]->num());
  }
  data_permutation_.clear();
  data_permutation_.resize(hdf_blobs_[0]->shape(0));
  for (int i = 0; i < hdf_blobs_[0]->shape(0); i++)
    data_permutation_[i] = i;
  //TODO: DATA SHUFFLE
  //LOG(INFO) << "Successully loaded " << data_blob_.num() << " rows";
}
Esempio n. 12
0
herr_t H5IMunlink_palette( hid_t loc_id,
                          const char *image_name,
                          const char *pal_name )
{
    hid_t       did;
    hid_t       atid;
    hid_t       aid;
    H5T_class_t aclass;
    int         ok_pal, has_pal;

    /* check the arguments */
    if(image_name == NULL) 
      return -1;
    if(pal_name == NULL) 
      return -1;

    /* Try to find the palette dataset */
    has_pal = H5LTfind_dataset( loc_id, pal_name );

    /* It does not exist. Return */
    if ( has_pal == 0 )
        return -1;

    /* The image dataset may or not have the attribute "PALETTE"
    * First we try to open to see if it is already there; if not, it is created.
    * If it exists, the array of references is extended to hold the reference
    * to the new palette
    */

    /* First we get the image id */
    if((did = H5Dopen2(loc_id, image_name, H5P_DEFAULT)) < 0)
        return -1;

    /* Try to find the attribute "PALETTE" on the >>image<< dataset */
    ok_pal = H5LT_find_attribute(did, "PALETTE");

    /* It does not exist. Nothing to do */
    if(ok_pal == 0)
        return -1;

    /* The attribute exists, open it */
    else if(ok_pal ==  1)
    {
        if((aid = H5Aopen(did, "PALETTE", H5P_DEFAULT)) < 0)
            goto out;

        if((atid = H5Aget_type(aid)) < 0)
            goto out;

        if((aclass = H5Tget_class(atid)) < 0)
            goto out;

        /* Check if it is really a reference */
        if(aclass == H5T_REFERENCE)
        {
            /* Delete the attribute */
            if(H5Adelete(did, "PALETTE") < 0)
                goto out;

        }  /* H5T_REFERENCE */

        if(H5Tclose(atid) < 0)
            goto out;

        /* Close the attribute. */
        if(H5Aclose(aid) < 0)
            goto out;

    } /* ok_pal */

    /* Close the image dataset. */
    if(H5Dclose(did) < 0)
        return -1;

    return 0;

out:
    H5Dclose( did );
    return -1;
}
Esempio n. 13
0
/*--------------------------------------------------------------*/
int NXVvalidateGroup(pNXVcontext self, hid_t groupID,
	xmlNodePtr groupNode)
{
		hash_table namesSeen, baseNames;
		xmlNodePtr cur = NULL;
		xmlChar *name = NULL, *myClass = NULL;
		xmlChar *target = NULL;
		hid_t childID;
		char fName[256], childName[512], nxdlChildPath[512], childPath[512];
		char mynxdlPath[512];
		char *savedNXDLPath, *pPtr;
		SecondPassData spd;
		hsize_t idx = 0;

		/*
			manage nxdlPath, xmlGetNodePath does not work
		*/
		savedNXDLPath = self->nxdlPath;
		myClass = xmlGetProp(groupNode,(xmlChar *)"type");
		if(self->nxdlPath == NULL) {
			snprintf(mynxdlPath,sizeof(mynxdlPath),"/%s", (char *) myClass);
		} else {
			snprintf(mynxdlPath,sizeof(mynxdlPath),"%s/%s",
				self->nxdlPath, (char *) myClass);
		}
		self->nxdlPath = mynxdlPath;

		/*
			tell what we are doing
		*/
		H5Iget_name(groupID,fName,sizeof(fName));
		NXVsetLog(self,"sev","debug");
		NXVsetLog(self,"message","Validating group");
		NXVsetLog(self,"nxdlPath",self->nxdlPath);
		NXVsetLog(self,"dataPath",fName);
		NXVlog(self);


		validateGroupAttributes(self, groupID, groupNode);
		hash_construct_table(&namesSeen,100);

		/* first pass */
		cur = groupNode->xmlChildrenNode;
		while(cur != NULL){
			if(xmlStrcmp(cur->name,(xmlChar *) "group") == 0){
					childID = findGroup(self, groupID, cur);
					if(childID >= 0){
							H5Iget_name(childID, childName,sizeof(childName));
							/*
								we have to get at the HDF5 name. There may be no
								name in the NXDL, but a suitable group has been found
								by NXclass.
							*/
							pPtr = strrchr(childName,'/');
							if(pPtr != NULL){
								hash_insert(pPtr+1,strdup(""),&namesSeen);
							} else {
								hash_insert(childName,strdup(""),&namesSeen);
							}
							NXVvalidateGroup(self,childID,cur);
					} else {
						name = xmlGetProp(cur,(xmlChar *)"type");
						snprintf(nxdlChildPath,sizeof(nxdlChildPath),"%s/%s",
							self->nxdlPath, (char *)name);
						xmlFree(name);
						NXVsetLog(self,"dataPath",fName);
						NXVsetLog(self,"nxdlPath", nxdlChildPath);
						if(!isOptional(cur)){
							NXVsetLog(self,"sev","error");
							NXVsetLog(self,"message","Required group missing");
							NXVlog(self);
							self->errCount++;
						} else {
							NXVsetLog(self,"sev","warnopt");
							NXVsetLog(self,"message","Optional group missing");
							NXVlog(self);
						}
					}
			}
			if(xmlStrcmp(cur->name,(xmlChar *) "field") == 0){
					name = xmlGetProp(cur,(xmlChar *)"name");
					if(H5LTfind_dataset(groupID,(char *)name) ) {
						childID = H5Dopen(groupID,(char *)name,H5P_DEFAULT);
					} else {
						childID = -1;
					}
					snprintf(childPath,sizeof(childPath),"%s/%s",
						fName,name);
					if(childID < 0){
						NXVsetLog(self,"dataPath",childPath);
						snprintf(nxdlChildPath,sizeof(nxdlChildPath),
							"%s/%s", self->nxdlPath, name);
						NXVsetLog(self,"nxdlPath", nxdlChildPath);
						if(!isOptional(cur)){
									NXVsetLog(self,"sev","error");
									NXVsetLog(self,"message","Required field missing");
									NXVlog(self);
									self->errCount++;
						} else {
							NXVsetLog(self,"sev","warnopt");
							NXVsetLog(self,"message","Optional field missing");
							NXVlog(self);
						}
					} else {
						if(xmlStrcmp(name,(xmlChar *)"depends_on") == 0){
							/*
								This must b validated from the field level. As
								it might point to fields which are not in the
								application definition
							*/
							validateDependsOn(self,groupID,childID);
						} else {
							NXVvalidateField(self,childID, cur);
						}
						hash_insert((char *)name,strdup(""),&namesSeen);
					}
					xmlFree(name);
			}
			if(xmlStrcmp(cur->name,(xmlChar *) "link") == 0){
				name = xmlGetProp(cur,(xmlChar *)"name");
				target = xmlGetProp(cur,(xmlChar *)"target");
				hash_insert((char *)name,strdup(""),&namesSeen);
				validateLink(self,groupID,name, target);
				xmlFree(name);
				xmlFree(target);
			}
			cur = cur->next;
		}

		/*
			Second pass: search the HDF5 group for additional
			stuff which have not checked yet. Most of the hard work
			is in the SecondPassIterator.
		*/
		hash_construct_table(&baseNames,100);
		NXVloadBaseClass(self,&baseNames,(char *)myClass);
		spd.baseNames = &baseNames;
		spd.namesSeen = &namesSeen;
		spd.self = self;
		NXVsetLog(self,"nxdlPath", mynxdlPath);
		H5Literate(groupID, H5_INDEX_NAME, H5_ITER_INC, &idx,
			SecondPassIterator, &spd);

		/*
			clean up
		*/
		hash_free_table(&namesSeen,free);
		hash_free_table(&baseNames,free);
		xmlFree(myClass);
		/*
			restore my paths...
		*/
		self->nxdlPath = savedNXDLPath;
		return 0;
	}
Esempio n. 14
0
void rdiff(const char *name, hid_t f1, hid_t f2) {
  hid_t g1 = H5Gopen(f1, name, H5P_DEFAULT);
  hid_t g2 = H5Gopen(f2, name, H5P_DEFAULT);
  if (g1 >= 0 && g2 >= 0) {
    int n1 = H5Aget_num_attrs(g1);
    for (int i = 0; i < n1; i++) {
      char aname[MAXNAME];
      hid_t a1 = H5Aopen_idx(g1, i);
      assert(H5Aget_name(a1, MAXNAME, aname) < MAXNAME);
      H5Aclose(a1);
      if (!H5LTfind_attribute(g2, aname)) {
	printf("Only in %s[%s%s]\n", file1, name, aname);
	continue;
      }
      int d1, d2;
      H5LTget_attribute_ndims(f1, name, aname, &d1);
      H5LTget_attribute_ndims(f2, name, aname, &d2);
      assert(d1 <= 1 && d2 <= 1);
      hsize_t dims1, dims2;
      H5T_class_t t1, t2;
      size_t ts1, ts2;
      H5LTget_attribute_info(f1, name, aname, &dims1, &t1, &ts1);
      H5LTget_attribute_info(f2, name, aname, &dims2, &t2, &ts2);
      assert(t1 == t2);
      assert(t1 == H5T_INTEGER || t1 == H5T_FLOAT || t1 == H5T_STRING);
      if (t1 == H5T_INTEGER) {
	assert(d1==0 || (dims1 == 1 && dims2 == 1));
	assert(ts1 == 4 && ts2 == 4);
	int v1, v2;
	H5LTget_attribute_int(f1, name, aname, &v1);
	H5LTget_attribute_int(f2, name, aname, &v2);
	if (v1 != v2) {
	  printf("%s[%s%s]=%d %s[%s%s]=%d\n", file1, name, aname, v1, file2, name, aname, v2);
	}
      }
      if (t1 == H5T_FLOAT) {
	assert(d1==0 || (dims1 == 1 && dims2 == 1));
	assert(ts1 == 4 && ts2 == 4);
	float v1, v2;
	H5LTget_attribute_float(f1, name, aname, &v1);
	H5LTget_attribute_float(f2, name, aname, &v2);
	if (v1 != v2) {
	  printf("%s[%s%s]=%g %s[%s%s]=%g\n", file1, name, aname, v1, file2, name, aname, v2);
	}
      }
      if (t1 == H5T_STRING) {
	assert(ts1 < 256 && ts2 < 256);
	char buf1[256];
	char buf2[256];
	H5LTget_attribute_string(f1, name, aname, buf1);
	H5LTget_attribute_string(f2, name, aname, buf2);
	if (strcmp(buf1, buf2)) {
	  printf("%s[%s%s]=%s %s[%s%s]=%s\n", file1, name, aname, buf1, file2, name, aname, buf2);
	}
      }
    }
    int n2 = H5Aget_num_attrs(g2);
    for (int i = 0; i < n2; i++) {
      char aname[MAXNAME];
      hid_t a2 = H5Aopen_idx(g2, i);
      assert(H5Aget_name(a2, MAXNAME, aname) < MAXNAME);
      H5Aclose(a2);
      if (!H5LTfind_attribute(g1, aname)) {
	printf("Only in %s[%s%s]\n", file2, name, aname);
	continue;
      }
    }

    hsize_t nobj;
    H5Gget_num_objs(g1, &nobj);
    for (int i = 0; i < nobj; i++) {
      char oname[MAXNAME];
      assert(H5Gget_objname_by_idx(g1, i, oname, MAXNAME) < MAXNAME);
      int otype = H5Gget_objtype_by_idx(g1, i);
      assert(otype == H5G_DATASET);
      if (!H5LTfind_dataset(g2, oname)) {
	printf("Only in %s[%s%s]\n", file1, name, oname);
	continue;
      }
      hsize_t dims1[2], dims2[2];
      H5T_class_t t1, t2;
      size_t ts1, ts2;
      H5LTget_dataset_info(g1, oname, dims1, &t1, &ts1);
      H5LTget_dataset_info(g2, oname, dims2, &t2, &ts2);
      if (dims1[0] != dims2[0] || dims1[1] != dims2[1]) {
	printf("%s[%s%s](%d,%d) != %s[%s%s](%d,%d)\n", 
	       file1, name, oname, dims1[1], dims1[0],
	       file2, name, oname, dims2[1], dims2[0]);
	continue;
      }
      float *data1 = malloc(dims1[0]*dims1[1]*sizeof(float));
      float *data2 = malloc(dims1[0]*dims1[1]*sizeof(float));
      H5LTread_dataset_float(g1, oname, data1);
      H5LTread_dataset_float(g2, oname, data2);
      float maxdiff = 0;
      for (int i = dims1[0]*dims1[1]-1; i >= 0; i--) {
	float d = data1[i] - data2[i];
	if (d < 0) d = -d;
	if (d > maxdiff) maxdiff = d;
      }
      printf("max |%s[%s%s] - %s[%s%s]| = %g\n",
	     file1, name, oname, file2, name, oname, maxdiff);
      free(data1); free(data2);
    }
    H5Gget_num_objs(g2, &nobj);
    for (int i = 0; i < nobj; i++) {
      char oname[MAXNAME];
      assert(H5Gget_objname_by_idx(g2, i, oname, MAXNAME) < MAXNAME);
      int otype = H5Gget_objtype_by_idx(g2, i);
      assert(otype == H5G_DATASET);
      if (!H5LTfind_dataset(g1, oname)) {
	printf("Only in %s[%s%s]\n", file2, name, oname);
	continue;
      }
    }
    H5Gclose(g1);
    H5Gclose(g2);
  } else if (g1 >= 0) {
    printf("Only in %s:%s\n", file1, name);
    H5Gclose(g1);
  } else if (g2 >= 0) {
    printf("Only in %s:%s\n", file2, name);
    H5Gclose(g2);
  } else {
    printf("Group %s does not exist in either file.\n", name);
  }
}
Esempio n. 15
0
File: fclib.c Progetto: xhub/fclib
/** read matrix */
struct fclib_matrix* read_matrix (hid_t id)
{
  struct fclib_matrix *mat;

  MM (mat = malloc (sizeof (struct fclib_matrix)));
 
  IO (H5LTread_dataset_int (id, "nzmax", &mat->nzmax));
  IO (H5LTread_dataset_int (id, "m", &mat->m));
  IO (H5LTread_dataset_int (id, "n", &mat->n));
  IO (H5LTread_dataset_int (id, "nz", &mat->nz));

  if (mat->nz >= 0) /* triplet */
  {
    MM (mat->p = malloc (sizeof (int [mat->nz])));
    MM (mat->i = malloc (sizeof (int [mat->nz])));
    IO (H5LTread_dataset_int (id, "p", mat->p));
    IO (H5LTread_dataset_int (id, "i", mat->i));
  }
  else if (mat->nz == -1) /* csc */
  {
    MM (mat->p = malloc (sizeof (int [mat->n+1])));
    MM (mat->i = malloc (sizeof (int [mat->nzmax])));
    IO (H5LTread_dataset_int (id, "p", mat->p));
    IO (H5LTread_dataset_int (id, "i", mat->i));
  }
  else if (mat->nz == -2) /* csr */
  {
    MM (mat->p = malloc (sizeof (int [mat->m+1])));
    MM (mat->i = malloc (sizeof (int [mat->nzmax])));
    IO (H5LTread_dataset_int (id, "p", mat->p));
    IO (H5LTread_dataset_int (id, "i", mat->i));
  }
  else ASSERT (0, "ERROR: unkown sparse matrix type => fclib_matrix->nz = %d\n", mat->nz);

  MM (mat->x = malloc (sizeof (double [mat->nzmax])));
  IO (H5LTread_dataset_double (id, "x", mat->x));

  if (H5LTfind_dataset (id, "conditioning"))
  {
    H5T_class_t class_id;
    hsize_t dim;
    size_t size;

    MM (mat->info = malloc (sizeof (struct fclib_matrix_info)));
    if (H5LTfind_dataset (id, "comment"))
    {
      IO (H5LTget_dataset_info  (id, "comment", &dim, &class_id, &size));
      MM (mat->info->comment = malloc (sizeof (char [size])));
      IO (H5LTread_dataset_string (id, "comment", mat->info->comment));
    }
    else mat->info->comment = NULL;
    IO (H5LTread_dataset_double (id, "conditioning", &mat->info->conditioning));
    IO (H5LTread_dataset_double (id, "determinant", &mat->info->determinant));
    IO (H5LTread_dataset_int (id, "rank", &mat->info->rank));
  }
  else
  {
    mat->info = NULL;
  }

  return mat;
}