/** * Write a hyperslab into the dataset. * * @param [in] HDF5_Dataset_id - Dataset id * @param [in] Position - Position in the dataset * @param [in] Size - Size of the hyperslab * @param [in] Data * @throw ios::failure * */ void THDF5_File::WriteHyperSlab(const hid_t HDF5_Dataset_id, const TDimensionSizes & Position , const TDimensionSizes & Size, const float * Data){ // Select hyperslab const int MatrixRank = 3; hsize_t ElementCount[MatrixRank] = {Size.Z, Size.Y, Size.X}; hsize_t Offset[MatrixRank] = {Position.Z,Position.Y,Position.X}; herr_t status; hid_t HDF5_Filespace,HDF5_Memspace; // Select hyperslab in the file. HDF5_Filespace = H5Dget_space(HDF5_Dataset_id); status = H5Sselect_hyperslab(HDF5_Filespace, H5S_SELECT_SET, Offset, 0, ElementCount, NULL); if (status < 0) { char ErrorMessage[256]; sprintf(ErrorMessage,HDF5_ERR_FMT_CouldNotWriteTo,""); throw ios::failure(ErrorMessage); } // assign memspace HDF5_Memspace = H5Screate_simple(MatrixRank, ElementCount, NULL); status = H5Dwrite(HDF5_Dataset_id, H5T_NATIVE_FLOAT, HDF5_Memspace, HDF5_Filespace, H5P_DEFAULT, Data); if (status < 0) { char ErrorMessage[256]; sprintf(ErrorMessage,HDF5_ERR_FMT_CouldNotWriteTo,""); throw ios::failure(ErrorMessage); } H5Sclose(HDF5_Memspace); H5Sclose(HDF5_Filespace); }// end of WriteHyperSlab
bool save_quad_bc(hid_t parent_group_id, JudyArray<Boundary *> &bcs) { herr_t status; // create main group hid_t group_id = H5Gcreate(parent_group_id, "quad", 0); // count hid_t dataspace_id = H5Screate(H5S_SCALAR); hid_t attr_count = H5Acreate(group_id, "count", H5T_NATIVE_UINT32, dataspace_id, H5P_DEFAULT); uint count = bcs.count(); status = H5Awrite(attr_count, H5T_NATIVE_UINT32, &count); H5Aclose(attr_count); /// hsize_t dims = Quad::NUM_VERTICES; hid_t elem_dataspace_id = H5Screate_simple(1, &dims, NULL); hid_t merker_dataspace_id = H5Screate(H5S_SCALAR); // dump vertices for (int i = 0; i < count; i++) { char name[256]; sprintf(name, "%d", i); // the dataset hid_t dataset_id = H5Dcreate(group_id, name, H5T_NATIVE_UINT32, elem_dataspace_id, H5P_DEFAULT); status = H5Dwrite(dataset_id, H5T_NATIVE_UINT32, H5S_ALL, H5S_ALL, H5P_DEFAULT, bcs[i]->get_vertices()); // marker hid_t attr_marker = H5Acreate(dataset_id, "marker", H5T_NATIVE_UINT32, dataspace_id, H5P_DEFAULT); uint marker = bcs[i]->get_marker(); status = H5Awrite(attr_marker, H5T_NATIVE_UINT32, &marker); H5Aclose(attr_marker); status = H5Dclose(dataset_id); } H5Sclose(elem_dataspace_id); H5Sclose(dataspace_id); status = H5Gclose(group_id); // close the group }
void seissol::checkpoint::h5::Fault::write(int timestepFault) { EPIK_TRACER("CheckPointFault_write"); SCOREP_USER_REGION("CheckPointFault_write", SCOREP_USER_REGION_TYPE_FUNCTION); if (numSides() == 0) return; logInfo(rank()) << "Writing fault check point."; // Create array with all pointers EPIK_USER_REG(r_write_fault, "checkpoint_write_fault"); SCOREP_USER_REGION_DEFINE(r_write_fault); EPIK_USER_START(r_write_fault); SCOREP_USER_REGION_BEGIN(r_write_fault, "checkpoint_write_fault", SCOREP_USER_REGION_TYPE_COMMON); // Attributes checkH5Err(H5Awrite(m_h5timestepFault[odd()], H5T_NATIVE_INT, ×tepFault)); // Set memory and file space hsize_t fStart[2] = {fileOffset(), 0}; hsize_t count[2] = {numSides(), numBndGP()}; hid_t h5memSpace = H5Screate_simple(2, count, 0L); checkH5Err(h5memSpace); checkH5Err(H5Sselect_all(h5memSpace)); checkH5Err(H5Sselect_hyperslab(m_h5fSpaceData, H5S_SELECT_SET, fStart, 0L, count, 0L)); for (unsigned int i = 0; i < NUM_VARIABLES; i++) { checkH5Err(H5Dwrite(m_h5data[odd()][i], H5T_NATIVE_DOUBLE, h5memSpace, m_h5fSpaceData, h5XferList(), data(i))); } checkH5Err(H5Sclose(h5memSpace)); EPIK_USER_END(r_write_fault); SCOREP_USER_REGION_END(r_write_fault); // Finalize the checkpoint finalizeCheckpoint(); logInfo(rank()) << "Writing fault check point. Done."; }
herr_t H5TBOwrite_elements( hid_t dataset_id, hid_t mem_type_id, hsize_t nrecords, const void *coords, const void *data ) { hsize_t count[1]; hid_t space_id; hid_t mem_space_id; /* Get the dataspace handle */ if ( (space_id = H5Dget_space( dataset_id )) < 0 ) goto out; /* Define a selection of points in the dataset */ if ( H5Sselect_elements(space_id, H5S_SELECT_SET, (size_t)nrecords, (const hsize_t *)coords) < 0 ) goto out; /* Create a memory dataspace handle */ count[0] = nrecords; if ( (mem_space_id = H5Screate_simple( 1, count, NULL )) < 0 ) goto out; if ( H5Dwrite( dataset_id, mem_type_id, mem_space_id, space_id, H5P_DEFAULT, data ) < 0 ) goto out; /* Terminate access to the memory dataspace */ if ( H5Sclose( mem_space_id ) < 0 ) goto out; /* Terminate access to the dataspace */ if ( H5Sclose( space_id ) < 0 ) goto out; return 0; out: return -1; }
/* ------- begin -------------------------- writeMPI_p.c ----- */ void writeMPI_p(int task) { /* Writes output on indata file, MPI group, one task at once */ const char routineName[] = "writeMPI_p"; hsize_t offset[] = {0, 0, 0, 0}; hsize_t count[] = {1, 1, 1, 1}; hsize_t dims[4]; hid_t file_dspace, mem_dspace; dims[0] = 1; if (( mem_dspace = H5Screate_simple(1, dims, NULL) ) < 0) HERR(routineName); offset[0] = mpi.ix; offset[1] = mpi.iy; if (( file_dspace = H5Dget_space(io.in_mpi_tm) ) < 0) HERR(routineName); if (( H5Sselect_hyperslab(file_dspace, H5S_SELECT_SET, offset, NULL, count, NULL) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_tm, H5T_NATIVE_INT, mem_dspace, file_dspace, H5P_DEFAULT, &mpi.rank) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_tn, H5T_NATIVE_INT, mem_dspace, file_dspace, H5P_DEFAULT, &task) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_it, H5T_NATIVE_INT, mem_dspace, file_dspace, H5P_DEFAULT, &mpi.niter[0]) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_conv, H5T_NATIVE_INT, mem_dspace, file_dspace, H5P_DEFAULT, &mpi.convergence[0]) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_zc, H5T_NATIVE_INT, mem_dspace, file_dspace, H5P_DEFAULT, &mpi.zcut_hist[0]) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_dm, H5T_NATIVE_DOUBLE, mem_dspace, file_dspace, H5P_DEFAULT, &mpi.dpopsmax[0]) ) < 0) HERR(routineName); if (( H5Sclose(file_dspace) ) < 0) HERR(routineName); if (( H5Sclose(mem_dspace) ) < 0) HERR(routineName); dims[0] = mpi.niter[0]; if (( mem_dspace = H5Screate_simple(1, dims, NULL) ) < 0) HERR(routineName); offset[0] = mpi.ix; offset[1] = mpi.iy; count[2] = mpi.niter[0]; if (( file_dspace = H5Dget_space(io.in_mpi_dmh) ) < 0) HERR(routineName); if (( H5Sselect_hyperslab(file_dspace, H5S_SELECT_SET, offset, NULL, count, NULL) ) < 0) HERR(routineName); if (( H5Dwrite(io.in_mpi_dmh, H5T_NATIVE_DOUBLE, mem_dspace, file_dspace, H5P_DEFAULT, mpi.dpopsmax_hist[0]) ) < 0) HERR(routineName); if (( H5Sclose(file_dspace) ) < 0) HERR(routineName); if (( H5Sclose(mem_dspace) ) < 0) HERR(routineName); return; }
void hdf5_dataset::write(hdf5_datatype const& type, void const* buffer) { herr_t status = H5Dwrite( get_id(), type.get_id(), H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer ); if(status < 0) { boost::serialization::throw_exception( hdf5_archive_exception( hdf5_archive_exception::hdf5_archive_dataset_write_error ) ); } }
static int Write_hdf5(void *buf, size_t nbytes) { hid_t dsid; herr_t n1, n2; char dsname[256]; static int n = 0; if (dspc == -1) { hsize_t dims = nbytes; dspc = H5Screate_simple(1, &dims, &dims); } sprintf(dsname, "data_%07d", n++); dsid = H5Dcreate(fid, dsname, H5T_NATIVE_UCHAR, dspc, H5P_DEFAULT); if (dsid < 0) return 0; n1 = H5Dwrite(dsid, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf); n2 = H5Dclose(dsid); if (n1 < 0 || n2 < 0) return 0; return nbytes; }
int create_new_dataset(H5block *d){ hid_t file_id, dataset_id, dataspace_id, status, dcpl, datatype; file_id = H5Fopen(d->name, H5F_ACC_RDWR, H5P_DEFAULT); hsize_t dims[2]; dims[0] = d->x_index_dim; dims[1] = d->y_index_dim; dataspace_id = H5Screate_simple(2, dims, NULL); datatype = H5Tcopy(H5T_NATIVE_FLOAT); status = H5Tset_order(datatype, H5T_ORDER_LE); char buffer[50]; sprintf(buffer, "/dset%ld", d->ticks); dataset_id = H5Dcreate(file_id, buffer, datatype, dataspace_id, H5P_DEFAULT); status = H5Dwrite(dataset_id, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, d->field); status = H5Dclose(dataset_id); status = H5Tclose(datatype); status = H5Sclose(dataspace_id); status = H5Fclose(file_id); }
void testCreateStringDataset() { const char * data[STR_DSET_LEN] = {"You have to", "live", "life", "to the limit"}; hid_t file, memtype, dset; hsize_t size = STR_DSET_LEN; herr_t status; HDF5WriterBase writer; string h5Filename = moose::random_string( 10 ); file = H5Fcreate(h5Filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); dset = writer.createStringDataset(file, STR_DSET_NAME, size, size); assert(dset >= 0); memtype = H5Tcopy(H5T_C_S1); status = H5Tset_size(memtype, H5T_VARIABLE); assert(status >= 0); status = H5Dwrite(dset, memtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); assert(status >= 0); status = H5Dclose(dset); H5Tclose(memtype); H5Fclose(file); }
herr_t ASDF_write_quakeml(hid_t loc_id, const char *quakeml_string) { hsize_t dims[1] = {strlen(quakeml_string)}; hsize_t maxdims[1] = {H5S_UNLIMITED}; hid_t space_id, dcpl_id, array_id; CHK_H5(space_id = H5Screate_simple(1, dims, maxdims)); CHK_H5(dcpl_id = H5Pcreate(H5P_DATASET_CREATE)); CHK_H5(H5Pset_chunk(dcpl_id, 1, dims)); CHK_H5(array_id = H5Dcreate(loc_id, "/QuakeML", H5T_STD_I8LE, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)); CHK_H5(H5Dwrite(array_id, H5T_STD_I8LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, quakeml_string)); CHK_H5(H5Dclose(array_id)); CHK_H5(H5Sclose(space_id)); return 0; // Success }
herr_t writeDouble2d(hid_t file_id,const char *dsName, void * matrix, int DIM_X, int DIM_Y) { hid_t dataset_id,dataspace_id; hsize_t dims[] = {DIM_Y,DIM_X}; /*DIM_Y corresponds to number of rows, DIM_X to columns*/ herr_t status; /* Create the data space for the dataset. */ dataspace_id = H5Screate_simple(2, dims, NULL); /* Create the dataset. */ dataset_id = H5Dcreate(file_id, dsName, H5T_IEEE_F64LE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); status = H5Sclose(dataspace_id); /*Write the dataset*/ status = H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, matrix); /* End access to the dataset and release resources used by it. */ status = H5Dclose(dataset_id); return status; }
asynStatus NDFileHDF5AttributeDataset::writeAttributeDataset(hdf5::When_t whenToSave, hsize_t *offsets, NDAttribute *ndAttr, int flush, int indexed) { asynStatus status = asynSuccess; char * stackbuf[MAX_ATTRIBUTE_STRING_SIZE]; void* pDatavalue = stackbuf; int ret; //check if the attribute is meant to be saved at this time if (whenToSave_ == whenToSave) { // Extend the dataset as required to store the data if (indexed == -1){ extendDataSet(offsets); } else { extendIndexDataSet(offsets[indexed]); } // find the data based on datatype ret = ndAttr->getValue(ndAttr->getDataType(), pDatavalue, MAX_ATTRIBUTE_STRING_SIZE); if (ret == ND_ERROR) { memset(pDatavalue, 0, MAX_ATTRIBUTE_STRING_SIZE); } // Work with HDF5 library to select a suitable hyperslab (one element) and write the new data to it H5Dset_extent(dataset_, dims_); filespace_ = H5Dget_space(dataset_); // Select the hyperslab H5Sselect_hyperslab(filespace_, H5S_SELECT_SET, offset_, NULL, elementSize_, NULL); // Write the data to the hyperslab. H5Dwrite(dataset_, datatype_, memspace_, filespace_, H5P_DEFAULT, pDatavalue); // Check if we are being asked to flush if (flush == 1){ status = this->flushDataset(); } H5Sclose(filespace_); nextRecord_++; } return status; }
void saveParticleComp_Int(int *data,char *fileName,char *dataName,int totalCnt,int cnt,int offSet) { int i,j,k; int myrank, nTasks; MPI_Comm_rank(MPI_COMM_WORLD, &myrank); MPI_Comm_size(MPI_COMM_WORLD, &nTasks); hid_t file_id,dset_id,plist_id,tic_id; herr_t status; hid_t total_file_space,subfilespace,filespace,memspace,ticspace; hsize_t dimsf[1],count[1],offset[1]; plist_id=H5Pcreate(H5P_FILE_ACCESS); H5Pset_fapl_mpio(plist_id,MPI_COMM_WORLD,MPI_INFO_NULL); // H5Pset_fclose_degree(plist_id,H5F_CLOSE_SEMI); // MPI_Barrier(MPI_COMM_WORLD); file_id=H5Fopen(fileName,H5F_ACC_RDWR,plist_id); H5Pclose(plist_id); dimsf[0]=totalCnt; filespace=H5Screate_simple(1,dimsf,NULL); count[0]=cnt; offset[0]=offSet; memspace=H5Screate_simple(1,count,NULL); dset_id=H5Dcreate2(file_id,dataName,H5T_NATIVE_INT,filespace,H5P_DEFAULT,H5P_DEFAULT,H5P_DEFAULT); subfilespace=H5Dget_space(dset_id); H5Sselect_hyperslab(subfilespace,H5S_SELECT_SET,offset,NULL,count,NULL); plist_id=H5Pcreate(H5P_DATASET_XFER); H5Pset_dxpl_mpio(plist_id,H5FD_MPIO_INDEPENDENT); status = H5Dwrite(dset_id, H5T_NATIVE_INT,memspace,subfilespace,plist_id,data); H5Pclose(plist_id); H5Sclose(subfilespace); H5Dclose(dset_id); H5Sclose(memspace); H5Sclose(filespace); H5Fclose(file_id); }
void F77_FUNC_(pwhdf_open_file,PWHDF_OPEN_FILE)(const char* fname, const int* length) { char * hfname = ( char * ) malloc( (*length) + 1 ) ; memcpy( hfname , fname , *length ) ; hfname[*length] = '\0' ; if(h_file>=0) H5Fclose(h_file); h_file = H5Fcreate(hfname,H5F_ACC_TRUNC,H5P_DEFAULT,H5P_DEFAULT); /* impelements version 1.00 hdf5 format */ int version[]={1,10}; hsize_t dim=2; hid_t dataspace= H5Screate_simple(1, &dim, NULL); hid_t dataset= H5Dcreate(h_file, "version", H5T_NATIVE_INT, dataspace, H5P_DEFAULT); hid_t ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,version); H5Sclose(dataspace); H5Dclose(dataset); free(hfname); }
void dumpToH5(int Ni, int Nj, int Nk, int is, int js, int ks, int ie, int je, int ke, float ***f, char *format, ...) { char filename[1024]; va_list ap; va_start(ap, format); vsprintf(filename, format, ap); hid_t file, dataset, filespace, memspace; hsize_t dimsm[3] = { Ni, Nj, Nk }; hsize_t start[3] = { is, js, ks }; hsize_t count[3] = { 1-is+ie, 1-js+je, 1-ks+ke }; memspace = H5Screate_simple(3, dimsm, 0); filespace = H5Screate_simple(3, count, 0); file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); dataset = H5Dcreate(file, "Data", H5T_NATIVE_FLOAT, filespace, H5P_DEFAULT); H5Sselect_hyperslab(memspace, H5S_SELECT_SET, start, 0, count, 0); H5Dwrite(dataset, H5T_NATIVE_FLOAT, memspace, filespace, H5P_DEFAULT, f[0][0]); H5Dclose(dataset); H5Sclose(filespace); H5Sclose(memspace); H5Fclose(file); }
//-***************************************************************************** void WriteReferences( hid_t iParent, const std::string& iRefName, size_t iNumRefs, const void *iRefs ) { hsize_t dims[1]; dims[0] = iNumRefs; hid_t dspaceId = H5Screate_simple( 1, dims, NULL ); DspaceCloser dspaceCloser( dspaceId ); hid_t dsetId = H5Dcreate2( iParent, iRefName.c_str(), H5T_STD_REF_OBJ, dspaceId, H5P_DEFAULT, H5P_DEFAULT,H5P_DEFAULT); DsetCloser dsetCloser( dsetId ); herr_t status = H5Dwrite( dsetId, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, iRefs); ABCA_ASSERT( status >= 0, "Couldn't write reference: " << iRefName ); }
herr_t Compartment::saveHdf5(hid_t group) const { hid_t dataspace, dataset; herr_t status; hsize_t dims[1]; dims[0]=4; dataspace = H5Screate_simple(1, dims, NULL); // dataset = H5Dcreate(group, id().c_str(), H5T_NATIVE_INT, dataspace, // H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); dataset = H5Dcreate2(group, id().c_str(), H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); int boundaries[4] = { x0_, y0_, x1_, y1_ }; status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &(boundaries)); status = H5Dclose(dataset); status = H5Sclose(dataspace); return status; }
int write_data_to_hdf5_file (int nx, int ny, double **data, hid_t file) { hid_t dataset; /* file and dataset handles */ hid_t datatype, dataspace; /* handles */ hsize_t dimsf[2]; /* dataset dimensions */ herr_t status; dimsf[0] = nx; dimsf[1] = ny; dataspace = H5Screate_simple (RANK, dimsf, NULL); /* * Define datatype for the data in the file. * We will store little endian DOUBLE numbers. */ datatype = H5Tcopy (H5T_NATIVE_DOUBLE); status = H5Tset_order (datatype, H5T_ORDER_LE); /* * Create a new dataset within the file using defined dataspace and * datatype and default dataset creation properties. */ dataset = H5Dcreate (file, "Temperature", datatype, dataspace, H5P_DEFAULT); /* * Write the data to the dataset using default transfer properties. */ status = H5Dwrite (dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); /* * Close/release resources. */ H5Sclose (dataspace); H5Tclose (datatype); H5Dclose (dataset); return 0; }
/*------------------------------------------------------------------------- * Function: create_dataset * * Purpose: Creates a square dataset with square chunks, registers a * stupid compress/uncompress pair for counting I/O, and * initializes the dataset. The chunk size is in bytes, the * dataset size is in terms of chunks. * * Return: void * * Programmer: Robb Matzke * Thursday, May 14, 1998 * * Modifications: * *------------------------------------------------------------------------- */ static void create_dataset (void) { hid_t file, space, dcpl, dset; hsize_t size[2]; signed char *buf; /* The file */ file = H5Fcreate (FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_g); /* The data space */ size[0] = size[1] = DS_SIZE * CH_SIZE; space = H5Screate_simple (2, size, size); /* The storage layout and compression */ dcpl = H5Pcreate (H5P_DATASET_CREATE); size[0] = size[1] = CH_SIZE; H5Pset_chunk (dcpl, 2, size); #ifdef H5_WANT_H5_V1_4_COMPAT H5Zregister (FILTER_COUNTER, "counter", counter); #else /* H5_WANT_H5_V1_4_COMPAT */ H5Zregister (H5Z_COUNTER); #endif /* H5_WANT_H5_V1_4_COMPAT */ H5Pset_filter (dcpl, FILTER_COUNTER, 0, 0, NULL); /* The dataset */ dset = H5Dcreate (file, "dset", H5T_NATIVE_SCHAR, space, dcpl); assert (dset>=0); /* The data */ buf = calloc (1, SQUARE (DS_SIZE*CH_SIZE)); H5Dwrite (dset, H5T_NATIVE_SCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf); free (buf); /* Close */ H5Dclose (dset); H5Sclose (space); H5Pclose (dcpl); H5Fclose (file); }
/** create the DS for uniform data. */ void NSDFWriter::createUniformMap() { // Create the container for all the DS // TODO: make a common function like `mkdir -p` to avoid repeating this htri_t exists; herr_t status; hid_t uniformMapContainer = require_group(filehandle_, MAPUNIFORMSRC); // Create the DS themselves for (map< string, vector < unsigned int > >::iterator ii = classFieldToSrcIndex_.begin(); ii != classFieldToSrcIndex_.end(); ++ii){ vector < string > pathTokens; tokenize(ii->first, "/", pathTokens); string className = pathTokens[0]; string fieldName = pathTokens[1]; hid_t container = require_group(uniformMapContainer, className); char ** sources = (char **)calloc(ii->second.size(), sizeof(char*)); for (unsigned int jj = 0; jj < ii->second.size(); ++jj){ sources[jj] = (char*)calloc(src_[ii->second[jj]].path().length()+1, sizeof(char)); strcpy(sources[jj],src_[ii->second[jj]].path().c_str()); } hid_t ds = createStringDataset(container, fieldName, (hsize_t)ii->second.size(), (hsize_t)ii->second.size()); hid_t memtype = H5Tcopy(H5T_C_S1); status = H5Tset_size(memtype, H5T_VARIABLE); assert(status >= 0); status = H5Dwrite(ds, memtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, sources); #ifndef NDEBUG cout << "Write dataset: status=" << status << endl; #endif assert(status >= 0); for (unsigned int jj = 0; jj < ii->second.size(); ++jj){ free(sources[jj]); } free(sources); status = H5DSset_scale(ds, "source"); status = H5DSattach_scale(classFieldToUniform_[ii->first], ds, 0); status = H5DSset_label(classFieldToUniform_[ii->first], 0, "source"); status = H5Dclose(ds); status = H5Tclose(memtype); } }
int FTI_WriteElements(hid_t dataspace, hid_t dataType, hid_t dataset, hsize_t *count, hsize_t *offset, hsize_t ranks, void *ptr) { char str[FTI_BUFS]; hid_t status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL,count, NULL); hsize_t *dims_in= (hsize_t*) malloc (sizeof(hsize_t)*ranks); memcpy(dims_in,count,ranks*sizeof(hsize_t)); hid_t memspace = H5Screate_simple(ranks,dims_in, NULL); hsize_t *offset_in = (hsize_t*) calloc (ranks,sizeof(ranks)); status = H5Sselect_hyperslab( memspace, H5S_SELECT_SET, offset_in, NULL, count, NULL); status = H5Dwrite(dataset, dataType, memspace, dataspace, H5P_DEFAULT, ptr); if (status < 0) { free(offset); free(count); sprintf(str, "Dataset could not be written"); FTI_Print(str, FTI_EROR); return FTI_NSCS; } free(offset_in); free(dims_in); return FTI_SCES; }
/** * \brief Writes an entire set of region definitions to the file overwriting * any existing regions data. * \param[in] file_id The HDF file reference id * \param[in] n Number of regions to write * \param[in] points The regions data in a contiguous array * \returns Status code * \retval 1 Failure * \retval 0 Success */ int ch5m_regn_set_all(hid_t file_id, int n, int *regions) { hid_t group_id = ch5_gnrc_open_or_create_group(file_id, CH5_REGN_GROUP_NAME); if (group_id < 0) return 1; hid_t dset_id = ch5_gnrc_open_or_create_chunked_dset(group_id, CH5_REGN_DSET_NAME, H5T_STD_U32LE, 2, (hsize_t[2]){ n, 2 }, (hsize_t[2]){ H5S_UNLIMITED, 2 }, (hsize_t[2]){ CH5_REGN_CHUNK_SIZE, 2 }, 1); if (dset_id < 0) { H5Gclose(group_id); return 1; } herr_t status = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, regions); H5Dclose(dset_id); return (status < 0); }
/** * \brief Writes an entire set of points to the file overwriting any existing * points data. * \param[in] file_id The HDF file reference id * \param[in] n Number of points to write * \param[in] points The points data in a contiguous array * \returns Status code * \retval 1 Failure * \retval 0 Success */ int ch5m_pnts_set_all(hid_t file_id, int n, float *points) { hid_t group_id = ch5_gnrc_open_or_create_group(file_id, CH5_POINTS_GROUP_NAME); if (group_id < 0) return 1; hid_t dset_id = ch5_gnrc_open_or_create_chunked_dset(group_id, CH5_POINTS_DSET_NAME, H5T_IEEE_F32LE, 2, (hsize_t[2]){ n, 3 }, (hsize_t[2]){ H5S_UNLIMITED, 3 }, (hsize_t[2]){ CH5_POINTS_CHUNK_SIZE, 3 }, 1); if (dset_id < 0) { H5Gclose(group_id); return 1; } herr_t status = H5Dwrite(dset_id, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points); H5Dclose(dset_id); return (status < 0); }
int cxi_write_dataset_slice(CXI_Dataset * dataset,unsigned int slice, void * data, hid_t datatype){ if(!dataset){ return -1; } if(!data){ return -1; } if(slice >= dataset->dimensions[0]){ return -1; } if(dataset->handle < 0){ return -1; } hid_t s = H5Dget_space(dataset->handle); if(s < 0){ return -1; } hsize_t *start; start = malloc(sizeof(hsize_t)*dataset->dimension_count); hsize_t *count; count = malloc(sizeof(hsize_t)*dataset->dimension_count); for(int i =0;i<dataset->dimension_count;i++){ start[i] = 0; count[i] = dataset->dimensions[i]; } start[0] = slice; count[0] = 1; hid_t memspace = H5Screate_simple (dataset->dimension_count, count, NULL); H5Sselect_hyperslab(s, H5S_SELECT_SET, start, NULL, count, NULL); H5Dwrite(dataset->handle,datatype,memspace,s,H5P_DEFAULT,data); H5Sclose(s); free(start); free(count); return 0; }
herr_t H5ARRAYwrite_records( hid_t dataset_id, hid_t type_id, const int rank, hsize_t *start, hsize_t *step, hsize_t *count, const void *data ) { hid_t space_id; hid_t mem_space_id; /* Create a simple memory data space */ if ( (mem_space_id = H5Screate_simple( rank, count, NULL )) < 0 ) return -3; /* Get the file data space */ if ( (space_id = H5Dget_space( dataset_id )) < 0 ) return -4; /* Define a hyperslab in the dataset */ if ( rank != 0 && H5Sselect_hyperslab( space_id, H5S_SELECT_SET, start, step, count, NULL) < 0 ) return -5; if ( H5Dwrite( dataset_id, type_id, mem_space_id, space_id, H5P_DEFAULT, data ) < 0 ) return -6; /* Terminate access to the dataspace */ if ( H5Sclose( mem_space_id ) < 0 ) return -7; if ( H5Sclose( space_id ) < 0 ) return -8; /* Everything went smoothly */ return 0; }
int luaC_h5_write_string(lua_State *L) { const char *dsetnm = luaL_checkstring(L, 1); const char *string = luaL_checkstring(L, 2); if (PresentFile < 0) { luaL_error(L, "no open file"); } hsize_t size = strlen(string); hid_t fspc = H5Screate(H5S_SCALAR); hid_t strn = H5Tcopy(H5T_C_S1); H5Tset_size(strn, size); hid_t dset = H5Dcreate(PresentFile, dsetnm, strn, fspc, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); H5Dwrite(dset, strn, fspc, fspc, H5P_DEFAULT, string); H5Dclose(dset); H5Tclose(strn); H5Sclose(fspc); return 0; }
herr_t ASDF_write_provenance_data(hid_t loc_id, const char *provenance_string) { hsize_t dims[1] = {strlen(provenance_string)}; hsize_t maxdims[1] = {H5S_UNLIMITED}; hid_t array_id, group_id, space_id, dcpl_id; CHK_H5(space_id = H5Screate_simple(1, dims, maxdims)); CHK_H5(dcpl_id = H5Pcreate(H5P_DATASET_CREATE)); CHK_H5(H5Pset_chunk(dcpl_id, 1, dims)); CHK_H5(group_id = H5Gcreate(loc_id, "Provenance", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); CHK_H5(array_id = H5Dcreate(group_id, "373da5fe_d424_4f44_9bca_4334d77ed10b", H5T_STD_I8LE, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT)); CHK_H5(H5Dwrite(array_id, H5T_STD_I8LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, provenance_string)); CHK_H5(H5Dclose(array_id)); CHK_H5(H5Gclose(group_id)); CHK_H5(H5Sclose(space_id)); return 0; // Success }
/* open twist# */ void F77_FUNC_(pwhdf_open_twist,PWHDF_OPEN_TWIST)(const int* ik, const double *xk, const int* nband, const int* nspin) { char twistname[16]; sprintf(twistname,"twist%i",(*ik)-1); if(h_twist>=0) H5Gclose(h_twist); h_twist = H5Gcreate(h_main,twistname,0); /* write twist_angle */ hsize_t dim=3; hid_t dataspace= H5Screate_simple(1, &dim, NULL); hid_t dataset= H5Dcreate(h_twist, "twist_angle", H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT); hid_t ret = H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,xk); H5Sclose(dataspace); H5Dclose(dataset); if((xk[0]*xk[0]+xk[1]*xk[1]+xk[2]*xk[2]) < 1e-12) is_gamma=1; else is_gamma=0; /* create band#/spin# groups so that H5Gopen can be used */ for(int ib=0; ib<*nband; ib++) { char bandname[16]; sprintf(bandname,"band%i",ib); hid_t h_band = H5Gcreate(h_twist,bandname,0); for(int ispin=0; ispin<*nspin; ispin++) { char spinname[16]; sprintf(spinname,"spin%i",ispin); hid_t h_spin = H5Gcreate(h_band,spinname,0); H5Gclose(h_spin); } H5Gclose(h_band); } }
void BigArray<T>::setMatrix(unsigned long startingRow, unsigned long startingCol, const T* M, const unsigned long M_rows, const unsigned long M_cols) { if(startingRow >= this->numrows || startingCol >= this->numcols) throw gException(Exception_Index_Out_of_Bound); if(startingRow+M_rows > this->numrows || startingCol+M_cols > this->numcols) throw gException(Exception_Index_Out_of_Bound); std::string errorString("Error writing matrix data"); hsize_t dims[2] = {M_cols, M_rows}; hid_t memspace = H5Screate_simple(2, dims, NULL); CHECK_HDF5_ERR(memspace, errorString) hsize_t count[2] = {1, 1}; hsize_t stride[2] = {1, 1}; hsize_t block[2] = {dims[0], dims[1]}; hsize_t offset[2] = {startingCol, startingRow}; // Select hyperslab in the file. hid_t filespace = H5Dget_space(dset_id); CHECK_HDF5_ERR(filespace, errorString) herr_t status; status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, stride, count, block); CHECK_HDF5_ERR(status, errorString) status = H5Dwrite(dset_id, getHdfType<T>(), memspace, filespace, plist_id, M); CHECK_HDF5_ERR(status, errorString) H5Sclose(memspace); CHECK_HDF5_ERR(status, errorString) H5Sclose(filespace); CHECK_HDF5_ERR(status, errorString) }
/*------------------------------------------------------------------------- * Function: gent_named_vl * * Purpose: Generate a variable lenght named datatype for a dataset in LOC_ID * *------------------------------------------------------------------------- */ static void gent_named_vl(hid_t loc_id) { hid_t sid, did, tid; hsize_t dims[1] = {2}; hvl_t buf[2]; /* allocate and initialize VL dataset to write */ buf[0].len = 1; buf[0].p = HDmalloc( 1 * sizeof(int)); ((int *)buf[0].p)[0]=1; buf[1].len = 2; buf[1].p = HDmalloc( 2 * sizeof(int)); ((int *)buf[1].p)[0]=2; ((int *)buf[1].p)[1]=3; /* create dataspace */ sid = H5Screate_simple(1, dims, NULL); /* create datatype */ tid = H5Tvlen_create(H5T_NATIVE_INT); /* create named datatype */ H5Tcommit2(loc_id, "vl", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* create dataset */ did = H5Dcreate2(loc_id, DATASET_NAMED_VL, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* write */ H5Dwrite(did, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf); /* close */ H5Dvlen_reclaim(tid,sid,H5P_DEFAULT,buf); H5Sclose(sid); H5Dclose(did); H5Tclose(tid); }