예제 #1
0
//--------------------------------------------------------------------------
// Function:	DSetCreatPropList::setFletcher32
///\brief	Sets Fletcher32 checksum of EDC for this property list.
///
///\exception	H5::PropListIException
// Programmer	Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DSetCreatPropList::setFletcher32() const
{
   herr_t ret_value = H5Pset_fletcher32(id);
   if( ret_value < 0 )
   {
      throw PropListIException("DSetCreatPropList::setFletcher32",
                "H5Pset_fletcher32 failed");
   }
}
예제 #2
0
/*-------------------------------------------------------------------------
 * Function:	create_file_with_bogus_filter
 *
 * Purpose:	Create a dataset with the fletcher filter.
 *	        This function is used to create the test file `test_filters.h5' 
 *              which has a dataset with the "fletcher" I/O filter.  This dataset 
 *              will be used to verify the correct behavior of the library in 
 *              the test "dsets"
 *
 * Return:	Success:	0
 *
 *		Failure:	-1
 *
 * Programmer:  Pedro Vicente <*****@*****.**>
 *              Thursday, March 25, 2004
 *
 *-------------------------------------------------------------------------
 */
static herr_t
test_filters_endianess(void)
{
#if defined H5_HAVE_FILTER_FLETCHER32
    hid_t     fid = -1;              /* file ID */
    hid_t     dsid = -1;             /* dataset ID */
    hid_t     sid = -1;              /* dataspace ID */
    hid_t     dcpl = -1;             /* dataset creation property list ID */
    hsize_t   dims[1] = {20};        /* dataspace dimensions */
    hsize_t   chunk_dims[1] = {10};  /* chunk dimensions */
    int       buf[20];
    int       rank = 1;
    int       i;

    for(i = 0; i < 20; i++)
        buf[i] = 1;

    /* create a file using default properties */
    if((fid = H5Fcreate(TESTFILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;

    /* create a data space */
    if((sid = H5Screate_simple(rank, dims, NULL)) < 0) goto error;

    /* create dcpl  */
    if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
    if(H5Pset_chunk(dcpl, rank, chunk_dims) < 0) goto error;

    if(H5Pset_fletcher32(dcpl) < 0) goto error;

    /* create a dataset */
    if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;

    if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;

    /* close */
    if(H5Pclose(dcpl) < 0) goto error;
    if(H5Dclose(dsid) < 0) goto error;
    if(H5Sclose(sid) < 0) goto error;
    if(H5Fclose(fid) < 0) goto error;

#endif /* H5_HAVE_FILTER_FLETCHER32 */
    return 0;

#if defined H5_HAVE_FILTER_FLETCHER32
error:
    H5E_BEGIN_TRY {
        H5Pclose(dcpl);
        H5Dclose(dsid);
        H5Sclose(sid);
        H5Fclose(fid);
    } H5E_END_TRY;
    return -1;
#endif /* H5_HAVE_FILTER_FLETCHER32 */
} /* end test_filters_endianess() */
예제 #3
0
파일: hdf5.c 프로젝트: leobago/fti
int FTI_WriteHDF5Var(FTIT_dataset *FTI_DataVar)
{
    int j;
    hsize_t dimLength[32];
    char str[FTI_BUFS];
    int res;
    hid_t dcpl;

    for (j = 0; j < FTI_DataVar->rank; j++) {
        dimLength[j] = FTI_DataVar->dimLength[j];
    }

    dcpl = H5Pcreate (H5P_DATASET_CREATE);
    res = H5Pset_fletcher32 (dcpl);
    res = H5Pset_chunk (dcpl, FTI_DataVar->rank, dimLength);

    hid_t dataspace = H5Screate_simple( FTI_DataVar->rank, dimLength, NULL);
    hid_t dataset = H5Dcreate2 ( FTI_DataVar->h5group->h5groupID, FTI_DataVar->name,FTI_DataVar->type->h5datatype, dataspace,  H5P_DEFAULT, dcpl , H5P_DEFAULT);

    // If my data are stored in the CPU side
    // Just store the data to the file and return;
#ifdef GPUSUPPORT    
    if ( !FTI_DataVar->isDevicePtr ){
#endif
        res = H5Dwrite(dataset,FTI_DataVar->type->h5datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, FTI_DataVar->ptr);  
        if (res < 0) {
            sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
            FTI_Print(str, FTI_EROR);
            return FTI_NSCS;
        }

        res = H5Pclose (dcpl);
        if (res < 0) {
            sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
            FTI_Print(str, FTI_EROR);
            return FTI_NSCS;
        }

        res = H5Dclose(dataset);
        if (res < 0) {
            sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
            FTI_Print(str, FTI_EROR);
            return FTI_NSCS;
        }
        res = H5Sclose(dataspace);
        if (res < 0) {
            sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
            FTI_Print(str, FTI_EROR);
            return FTI_NSCS;
        }
        return FTI_SCES;
#ifdef GPUSUPPORT        
    }

    // This code is only executed in the GPU case.

    hsize_t *count = (hsize_t*) malloc (sizeof(hsize_t)*FTI_DataVar->rank); 
    hsize_t *offset= (hsize_t*) calloc (FTI_DataVar->rank,sizeof(hsize_t)); 

    if ( !count|| !offset){
        sprintf(str, "Could Not allocate count and offset regions");
        FTI_Print(str, FTI_EROR);
        return FTI_NSCS;
    }


    hsize_t seperator;
    hsize_t fetchBytes = FTI_getHostBuffSize();
    fetchBytes = FTI_calculateCountDim(FTI_DataVar->eleSize, fetchBytes ,count, FTI_DataVar->rank, dimLength, &seperator);
    sprintf(str,"GPU-Device Message: I Will Fetch %lld Bytes Per Stream Request", fetchBytes);
    FTI_Print(str,FTI_DBUG);


    FTIT_data_prefetch prefetcher;
    prefetcher.fetchSize = fetchBytes;
    prefetcher.totalBytesToFetch = FTI_DataVar->size;
    prefetcher.isDevice = FTI_DataVar->isDevicePtr;
    prefetcher.dptr = FTI_DataVar->devicePtr;
    size_t bytesToWrite;
    FTI_InitPrefetcher(&prefetcher);
    unsigned char *basePtr = NULL;


    if ( FTI_Try(FTI_getPrefetchedData(&prefetcher, &bytesToWrite, &basePtr), "Fetch next memory block from GPU to write to HDF5") !=  FTI_SCES){
        return FTI_NSCS;
    }

    while( basePtr  ){
        res = FTI_WriteElements( dataspace, FTI_DataVar->type->h5datatype, dataset, count, offset, FTI_DataVar->rank , basePtr);
        if (res != FTI_SCES ) {
            free(offset);
            free(count);
            sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
            FTI_Print(str, FTI_EROR);
            return FTI_NSCS;
        }
        FTI_AdvanceOffset(seperator, offset,count, dimLength, FTI_DataVar->rank);

        if ( FTI_Try(FTI_getPrefetchedData(&prefetcher, &bytesToWrite, &basePtr), 
                    "Fetch next memory block from GPU to write to HDF5") !=  FTI_SCES){
            return FTI_NSCS;
        }

    }


    res = H5Dclose(dataset);
    if (res < 0) {
        free(offset);
        free(count);
        sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
        FTI_Print(str, FTI_EROR);
        return FTI_NSCS;
    }
    res = H5Sclose(dataspace);
    if (res < 0) {
        free(offset);
        free(count);
        sprintf(str, "Dataset #%d could not be written", FTI_DataVar->id);
        FTI_Print(str, FTI_EROR);
        return FTI_NSCS;
    }
    free(offset);
    free(count);
    return FTI_SCES;
#endif
}
예제 #4
0
int apply_filters(const char* name,    /* object name from traverse list */
                  int rank,            /* rank of dataset */
                  hsize_t *dims,       /* dimensions of dataset */
                  size_t msize,        /* size of type */
                  hid_t dcpl_id,       /* dataset creation property list */
                  pack_opt_t *options, /* repack options */
                  int *has_filter)     /* (OUT) object NAME has a filter */


{
    int          nfilters;       /* number of filters in DCPL */
    hsize_t      chsize[64];     /* chunk size in elements */
    H5D_layout_t layout;
    int          i;
    pack_info_t  obj;

    *has_filter = 0;

    if (rank==0) /* scalar dataset, do not apply */
        return 0;

   /*-------------------------------------------------------------------------
    * initialize the assigment object
    *-------------------------------------------------------------------------
    */
    init_packobject(&obj);

   /*-------------------------------------------------------------------------
    * find options
    *-------------------------------------------------------------------------
    */
    if (aux_assign_obj(name,options,&obj)==0)
        return 0;

    /* get information about input filters */
    if ((nfilters = H5Pget_nfilters(dcpl_id))<0)
        return -1;

   /*-------------------------------------------------------------------------
    * check if we have filters in the pipeline
    * we want to replace them with the input filters
    * only remove if we are inserting new ones
    *-------------------------------------------------------------------------
    */
    if (nfilters && obj.nfilters )
    {
        *has_filter = 1;
        if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0)
            return -1;
    }

   /*-------------------------------------------------------------------------
    * check if there is an existent chunk
    * read it only if there is not a requested layout
    *-------------------------------------------------------------------------
    */
    if (obj.layout == -1 )
    {
        if ((layout = H5Pget_layout(dcpl_id))<0)
            return -1;

        if (layout == H5D_CHUNKED)
        {
            if ((rank = H5Pget_chunk(dcpl_id,NELMTS(chsize),chsize/*out*/))<0)
                return -1;
            obj.layout = H5D_CHUNKED;
            obj.chunk.rank = rank;
            for ( i = 0; i < rank; i++)
                obj.chunk.chunk_lengths[i] = chsize[i];
        }
    }

    /*-------------------------------------------------------------------------
    * the type of filter and additional parameter
    * type can be one of the filters
    * H5Z_FILTER_NONE        0 , uncompress if compressed
    * H5Z_FILTER_DEFLATE     1 , deflation like gzip
    * H5Z_FILTER_SHUFFLE     2 , shuffle the data
    * H5Z_FILTER_FLETCHER32  3 , fletcher32 checksum of EDC
    * H5Z_FILTER_SZIP        4 , szip compression
    * H5Z_FILTER_NBIT        5 , nbit compression
    * H5Z_FILTER_SCALEOFFSET 6 , scaleoffset compression
    *-------------------------------------------------------------------------
    */

    if (obj.nfilters)
    {

   /*-------------------------------------------------------------------------
    * filters require CHUNK layout; if we do not have one define a default
    *-------------------------------------------------------------------------
    */
        if (obj.layout==-1)
        {

            /* stripmine info */
            hsize_t sm_size[H5S_MAX_RANK]; /*stripmine size */
            hsize_t sm_nbytes;             /*bytes per stripmine */

            obj.chunk.rank = rank;

            /*
            * determine the strip mine size. The strip mine is
            * a hyperslab whose size is manageable.
            */



            sm_nbytes = msize;
            for ( i = rank; i > 0; --i)
            {
                hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
                if ( size == 0) /* datum size > H5TOOLS_BUFSIZE */
                    size = 1;
                sm_size[i - 1] = MIN(dims[i - 1], size);
                sm_nbytes *= sm_size[i - 1];
                assert(sm_nbytes > 0);

            }

            for ( i = 0; i < rank; i++)
            {
                obj.chunk.chunk_lengths[i] = sm_size[i];
            }

        }

        for ( i=0; i<obj.nfilters; i++)
        {
            switch (obj.filter[i].filtn)
            {
            default:
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_DEFLATE       1 , deflation like gzip
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_DEFLATE:
                {
                    unsigned     aggression;     /* the deflate level */

                    aggression = obj.filter[i].cd_values[0];
                    /* set up for deflated data */
                    if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                        return -1;
                    if(H5Pset_deflate(dcpl_id,aggression)<0)
                        return -1;
                }
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_SZIP       4 , szip compression
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_SZIP:
                {
                    unsigned  options_mask;
                    unsigned  pixels_per_block;

                    options_mask     = obj.filter[i].cd_values[0];
                    pixels_per_block = obj.filter[i].cd_values[1];

                    /* set up for szip data */
                    if(H5Pset_chunk(dcpl_id,obj.chunk.rank,obj.chunk.chunk_lengths)<0)
                        return -1;
                    if (H5Pset_szip(dcpl_id,options_mask,pixels_per_block)<0)
                        return -1;

                }
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_SHUFFLE    2 , shuffle the data
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_SHUFFLE:
                if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                    return -1;
                if (H5Pset_shuffle(dcpl_id)<0)
                    return -1;
                break;

           /*-------------------------------------------------------------------------
            * H5Z_FILTER_FLETCHER32 3 , fletcher32 checksum of EDC
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_FLETCHER32:
                if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                    return -1;
                if (H5Pset_fletcher32(dcpl_id)<0)
                    return -1;
                break;
           /*----------- -------------------------------------------------------------
            * H5Z_FILTER_NBIT , NBIT compression
            *-------------------------------------------------------------------------
            */
            case H5Z_FILTER_NBIT:
                if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                    return -1;
                if (H5Pset_nbit(dcpl_id)<0)
                    return -1;
                break;
            /*----------- -------------------------------------------------------------
             * H5Z_FILTER_SCALEOFFSET , scale+offset compression
             *-------------------------------------------------------------------------
             */

            case H5Z_FILTER_SCALEOFFSET:
                {
                    H5Z_SO_scale_type_t scale_type;
                    int                 scale_factor;

                    scale_type   = (H5Z_SO_scale_type_t)obj.filter[i].cd_values[0];
                    scale_factor = obj.filter[i].cd_values[1];

                    if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                        return -1;
                    if (H5Pset_scaleoffset(dcpl_id,scale_type,scale_factor)<0)
                        return -1;
                }
                break;
            } /* switch */
        }/*i*/

    }
    /*obj.nfilters*/

    /*-------------------------------------------------------------------------
    * layout
    *-------------------------------------------------------------------------
    */

    if (obj.layout>=0)
    {
        /* a layout was defined */
        if (H5Pset_layout(dcpl_id, obj.layout)<0)
            return -1;

        if (H5D_CHUNKED == obj.layout)
        {
            if(H5Pset_chunk(dcpl_id, obj.chunk.rank, obj.chunk.chunk_lengths)<0)
                return -1;
        }
        else if (H5D_COMPACT == obj.layout)
        {
            if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY)<0)
                return -1;
        }
        /* remove filters for the H5D_CONTIGUOUS case */
        else if (H5D_CONTIGUOUS == obj.layout)
        {
            if (H5Premove_filter(dcpl_id,H5Z_FILTER_ALL)<0)
                return -1;
        }

    }

 return 0;
}
예제 #5
0
파일: H5ARRAY.c 프로젝트: 87/PyTables
herr_t H5ARRAYmake( hid_t loc_id,
		    const char *dset_name,
		    const char *obversion,
		    const int rank,
		    const hsize_t *dims,
		    int   extdim,
		    hid_t type_id,
		    hsize_t *dims_chunk,
		    void  *fill_data,
		    int   compress,
		    char  *complib,
		    int   shuffle,
		    int   fletcher32,
		    const void *data)
{

 hid_t   dataset_id, space_id;
 hsize_t *maxdims = NULL;
 hid_t   plist_id = 0;
 unsigned int cd_values[6];
 int     chunked = 0;
 int     i;

 /* Check whether the array has to be chunked or not */
 if (dims_chunk) {
   chunked = 1;
 }

 if(chunked) {
   maxdims = malloc(rank*sizeof(hsize_t));
   if(!maxdims) return -1;

   for(i=0;i<rank;i++) {
     if (i == extdim) {
       maxdims[i] = H5S_UNLIMITED;
     }
     else {
       maxdims[i] = dims[i] < dims_chunk[i] ? dims_chunk[i] : dims[i];
     }
   }
 }

 /* Create the data space for the dataset. */
 if ( (space_id = H5Screate_simple( rank, dims, maxdims )) < 0 )
   return -1;

 if (chunked) {
   /* Modify dataset creation properties, i.e. enable chunking  */
   plist_id = H5Pcreate (H5P_DATASET_CREATE);
   if ( H5Pset_chunk ( plist_id, rank, dims_chunk ) < 0 )
     return -1;

   /* Set the fill value using a struct as the data type. */
   if (fill_data) {
       if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 )
	 return -1;
   }
   else {
     if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 )
       return -1;
   }

   /*
      Dataset creation property list is modified to use
   */

   /* Fletcher must be first */
   if (fletcher32) {
     if ( H5Pset_fletcher32( plist_id) < 0 )
       return -1;
   }
   /* Then shuffle (not if blosc is activated) */
   if ((shuffle) && (strcmp(complib, "blosc") != 0)) {
     if ( H5Pset_shuffle( plist_id) < 0 )
       return -1;
   }
   /* Finally compression */
   if (compress) {
     cd_values[0] = compress;
     cd_values[1] = (int)(atof(obversion) * 10);
     if (extdim <0)
       cd_values[2] = CArray;
     else
       cd_values[2] = EArray;

     /* The default compressor in HDF5 (zlib) */
     if (strcmp(complib, "zlib") == 0) {
       if ( H5Pset_deflate( plist_id, compress) < 0 )
	 return -1;
     }
     /* The Blosc compressor does accept parameters */
     else if (strcmp(complib, "blosc") == 0) {
       cd_values[4] = compress;
       cd_values[5] = shuffle;
       if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
	 return -1;
     }
     /* The LZO compressor does accept parameters */
     else if (strcmp(complib, "lzo") == 0) {
       if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
	 return -1;
     }
     /* The bzip2 compress does accept parameters */
     else if (strcmp(complib, "bzip2") == 0) {
       if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
	 return -1;
     }
     else {
       /* Compression library not supported */
       fprintf(stderr, "Compression library not supported\n");
       return -1;
     }
   }

   /* Create the (chunked) dataset */
   if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id,
			       space_id, plist_id )) < 0 )
     goto out;
 }
 else {  			/* Not chunked case */
   /* Create the dataset. */
   if ((dataset_id = H5Dcreate(loc_id, dset_name, type_id,
			       space_id, H5P_DEFAULT )) < 0 )
     goto out;
 }

 /* Write the dataset only if there is data to write */

 if (data)
 {
   if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 )
   goto out;
 }

 /* Terminate access to the data space. */
 if ( H5Sclose( space_id ) < 0 )
  return -1;

 /* End access to the property list */
 if (plist_id)
   if ( H5Pclose( plist_id ) < 0 )
     goto out;

 /* Release resources */
 if (maxdims)
   free(maxdims);

 return dataset_id;

out:
 H5Dclose( dataset_id );
 H5Sclose( space_id );
 if (maxdims)
   free(maxdims);
 if (dims_chunk)
   free(dims_chunk);
 return -1;

}
예제 #6
0
/*+++++++++++++++++++++++++
.IDENTifer   PYTABLE_make_array
.PURPOSE     create extensible HDF5 dataset
.INPUT/OUTPUT
  call as    stat = PYTABLE_make_array( locID, dset_name, title, rank, dims,
			                extdim, typeID, dims_chunk, fill_data,
			                compress, shuffle, fletcher32, buff );
     input:
            hid_t locID           :  HDF5 identifier of file or group
	    char *dset_name       :  name of dataset
	    char *title           :
	    int rank              :  number of dimensions
	    hsize_t *dims         :  size of each dimension
	    int extdim            :  index of expendable dimension
	    hid_t typeID          :  data type (HDF5 identifier)
	    hsize_t *dims_chunk   :  chunk sizes
	    void *fill_data       :  Fill value for data
	    unsigned int compress :  compression level (zero for no compression)
	    bool shuffle          :  shuffel data for better compression
	    bool fletcher32       :  
	    void *buffer          :  buffer with data to write (or NULL)
	    
.RETURNS     A negative value is returned on failure. 
.COMMENTS    none
-------------------------*/
herr_t PYTABLE_make_array( hid_t locID, const char *dset_name, 
			   const char *title, const int rank, 
			   const hsize_t *dims, int extdim, hid_t typeID,
			   const hsize_t *dims_chunk, void *fill_data,
			   unsigned int compress, bool shuffle, 
			   bool fletcher32, const void *buffer )
{
     register int ni;

     hid_t   dataID = -1, spaceID = -1;
     herr_t  stat;

/* check if the array has to be chunked or not */
     if ( dims_chunk != NULL ) {
	  hid_t   plistID;

	  hsize_t *maxdims = (hsize_t *) malloc( rank * sizeof(hsize_t) );
	  if ( maxdims == NULL )
	       NADC_GOTO_ERROR( NADC_ERR_ALLOC, "maxdims" );

	  for ( ni = 0; ni < rank; ni++ ) {
	       if ( ni == extdim )
		    maxdims[ni] = H5S_UNLIMITED;
	       else
		    maxdims[ni] = 
			 dims[ni] < dims_chunk[ni] ? dims_chunk[ni] : dims[ni];
	  }
	  spaceID = H5Screate_simple( rank, dims, maxdims );
	  free( maxdims );
	  if ( spaceID < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_SPACE, "" );

	  /* Modify dataset creation properties, i.e. enable chunking  */
	  plistID = H5Pcreate( H5P_DATASET_CREATE );
	  if ( H5Pset_chunk( plistID, rank, dims_chunk ) < 0 ) goto done;

          /* set the fill value using a struct as the data type */
	  if ( fill_data != NULL 
	       && H5Pset_fill_value( plistID, typeID, fill_data ) < 0 )
	       goto done;

          /* dataset creation property list is modified to use */
          /* fletcher must be first */
	  if ( fletcher32 ) {
	       if ( H5Pset_fletcher32( plistID ) < 0 ) goto done;
	  }
          /* then shuffle */
	  if ( shuffle ) {
	       if ( H5Pset_shuffle( plistID ) < 0 ) goto done;
	  }
          /* finally compression */
	  if ( compress > 0 ) {
	       if ( H5Pset_deflate( plistID, compress ) < 0 ) goto done;
	  }
          /* create the (chunked) dataset */
	  dataID = H5Dcreate( locID, dset_name, typeID, spaceID, 
			      H5P_DEFAULT, plistID, H5P_DEFAULT );
	  if ( dataID < 0 ) 
	       NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, dset_name );

          /* end access to the property list */
	  if ( H5Pclose( plistID ) < 0 ) goto done;
     } else {
	  spaceID = H5Screate_simple( rank, dims, NULL );
	  if ( spaceID < 0 ) return -1;

          /* create the dataset (not chunked) */
	  dataID = H5Dcreate( locID, dset_name, typeID, spaceID, 
			      H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );
	  if ( dataID < 0 ) 
	       NADC_GOTO_ERROR( NADC_ERR_HDF_DATA, dset_name );
     }
/*
 * write the data
 */
     stat = H5Dwrite( dataID, typeID, H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer );
     if ( stat < 0 ) NADC_GOTO_ERROR( NADC_ERR_HDF_WR, "" );

     (void) H5Dclose( dataID );
     (void) H5Sclose( spaceID );
/*
 * Set the conforming array attributes
 *
 * attach the CLASS attribute
 */
     (void) H5LTset_attribute_string( locID, dset_name, "CLASS", 
				      PY_ARRAY_CLASS );

/* attach the EXTDIM attribute in case of enlargeable arrays */
     (void) H5LTset_attribute_int( locID, dset_name, "EXTDIM", &extdim, 1 );

/* attach the FLAVOR attribute */
     (void) H5LTset_attribute_string( locID, dset_name, "FLAVOR", 
				      PY_ARRAY_FLAVOR );
/* attach the VERSION attribute */
     (void) H5LTset_attribute_string( locID, dset_name, "VERSION", 
				      PY_ARRAY_VERSION );

/* attach the TITLE attribute */
     (void) H5LTset_attribute_string( locID, dset_name, "TITLE", title );

     return 0;
 done:
     if ( dataID > 0 ) (void) H5Dclose( dataID );
     if ( spaceID > 0 ) (void) H5Sclose( spaceID );
     return -1;
}
예제 #7
0
파일: H5VLARRAY.c 프로젝트: r0k3/PyTables
herr_t H5VLARRAYmake( hid_t loc_id,
                      const char *dset_name,
                      const char *obversion,
                      const int rank,
                      const hsize_t *dims,
                      hid_t type_id,
                      hsize_t chunk_size,
                      void  *fill_data,
                      int   compress,
                      char  *complib,
                      int   shuffle,
                      int   fletcher32,
                      const void *data)
{

    hvl_t   vldata;
    hid_t   dataset_id, space_id, datatype, tid1;
    hsize_t dataset_dims[1];
    hsize_t maxdims[1] = { H5S_UNLIMITED };
    hsize_t dims_chunk[1];
    hid_t   plist_id;
    unsigned int cd_values[6];

    if (data)
        /* if data, one row will be filled initially */
        dataset_dims[0] = 1;
    else
        /* no data, so no rows on dataset initally */
        dataset_dims[0] = 0;

    dims_chunk[0] = chunk_size;

    /* Fill the vldata estructure with the data to write */
    /* This is currectly not used */
    vldata.p = (void *)data;
    vldata.len = 1;        /* Only one array type to save */

    /* Create a VL datatype */
    if (rank == 0) {
        datatype = H5Tvlen_create(type_id);
    }
    else {
        tid1 = H5Tarray_create(type_id, rank, dims);
        datatype = H5Tvlen_create(tid1);
        H5Tclose( tid1 );   /* Release resources */
    }

    /* The dataspace */
    space_id = H5Screate_simple( 1, dataset_dims, maxdims );

    /* Modify dataset creation properties, i.e. enable chunking  */
    plist_id = H5Pcreate (H5P_DATASET_CREATE);
    if ( H5Pset_chunk ( plist_id, 1, dims_chunk ) < 0 )
        return -1;

    /*
       Dataset creation property list is modified to use
    */

    /* Fletcher must be first */
    if (fletcher32) {
        if ( H5Pset_fletcher32( plist_id) < 0 )
            return -1;
    }
    /* Then shuffle (blosc shuffles inplace) */
    if (shuffle && (strcmp(complib, "blosc") != 0)) {
        if ( H5Pset_shuffle( plist_id) < 0 )
            return -1;
    }
    /* Finally compression */
    if (compress) {
        cd_values[0] = compress;
        cd_values[1] = (int)(atof(obversion) * 10);
        cd_values[2] = VLArray;
        /* The default compressor in HDF5 (zlib) */
        if (strcmp(complib, "zlib") == 0) {
            if ( H5Pset_deflate( plist_id, compress) < 0 )
                return -1;
        }
        /* The Blosc compressor does accept parameters */
        else if (strcmp(complib, "blosc") == 0) {
            cd_values[4] = compress;
            cd_values[5] = shuffle;
            if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
                return -1;
        }
        /* The LZO compressor does accept parameters */
        else if (strcmp(complib, "lzo") == 0) {
            if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
                return -1;
        }
        /* The bzip2 compress does accept parameters */
        else if (strcmp(complib, "bzip2") == 0) {
            if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
                return -1;
        }
        else {
            /* Compression library not supported */
            fprintf(stderr, "Compression library not supported\n");
            return -1;
        }
    }

    /* Create the dataset. */
    if ((dataset_id = H5Dcreate(loc_id, dset_name, datatype, space_id,
                                H5P_DEFAULT, plist_id, H5P_DEFAULT )) < 0 )
        goto out;

    /* Write the dataset only if there is data to write */
    if (data)
        if ( H5Dwrite( dataset_id, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, &vldata ) < 0 )
            goto out;

    /* Terminate access to the data space. */
    if ( H5Sclose( space_id ) < 0 )
        return -1;

    /* Release the datatype in the case that it is not an atomic type */
    if ( H5Tclose( datatype ) < 0 )
        return -1;

    /* End access to the property list */
    if ( H5Pclose( plist_id ) < 0 )
        goto out;

    return dataset_id;

out:

    return -1;

}
예제 #8
0
herr_t H5TBOmake_table( const char *table_title,
                        hid_t loc_id,
                        const char *dset_name,
                        char *version,
                        const char *class_,
                        hid_t type_id,
                        hsize_t nrecords,
                        hsize_t chunk_size,
                        void  *fill_data,
                        int compress,
                        char *complib,
                        int shuffle,
                        int fletcher32,
                        const void *data )
{

 hid_t   dataset_id;
 hid_t   space_id;
 hid_t   plist_id;
 hsize_t dims[1];
 hsize_t dims_chunk[1];
 hsize_t maxdims[1] = { H5S_UNLIMITED };
 unsigned int cd_values[7];
 int     blosc_compcode;
 char    *blosc_compname = NULL;

 dims[0]       = nrecords;
 dims_chunk[0] = chunk_size;

 /* Create a simple data space with unlimited size */
 if ( (space_id = H5Screate_simple( 1, dims, maxdims )) < 0 )
  return -1;

 /* Modify dataset creation properties, i.e. enable chunking  */
 plist_id = H5Pcreate (H5P_DATASET_CREATE);
 if ( H5Pset_chunk ( plist_id, 1, dims_chunk ) < 0 )
  return -1;

 /* Set the fill value using a struct as the data type. */
 if ( fill_data)
   {
     if ( H5Pset_fill_value( plist_id, type_id, fill_data ) < 0 )
       return -1;
   }
 else {
   if ( H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0 )
     return -1;
 }

 /*
  Dataset creation property list is modified to use filters
  */

 /* Fletcher must be first */
 if (fletcher32) {
   if ( H5Pset_fletcher32( plist_id) < 0 )
     return -1;
 }
 /* Then shuffle (blosc shuffles inplace) */
 if ((shuffle && compress) && (strncmp(complib, "blosc", 5) != 0)) {
   if ( H5Pset_shuffle( plist_id) < 0 )
     return -1;
 }
 /* Finally compression */
 if ( compress )
 {
   cd_values[0] = compress;
   cd_values[1] = (int)(atof(version) * 10);
   cd_values[2] = Table;
   /* The default compressor in HDF5 (zlib) */
   if (strcmp(complib, "zlib") == 0) {
     if ( H5Pset_deflate( plist_id, compress) < 0 )
       return -1;
   }
   /* The Blosc compressor does accept parameters */
   else if (strcmp(complib, "blosc") == 0) {
     cd_values[4] = compress;
     cd_values[5] = shuffle;
     if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 6, cd_values) < 0 )
       return -1;
   }
   /* The Blosc compressor can use other compressors */
   else if (strncmp(complib, "blosc:", 6) == 0) {
     cd_values[4] = compress;
     cd_values[5] = shuffle;
     blosc_compname = complib + 6;
     blosc_compcode = blosc_compname_to_compcode(blosc_compname);
     cd_values[6] = blosc_compcode;
     if ( H5Pset_filter( plist_id, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 7, cd_values) < 0 )
       return -1;
   }
   /* The LZO compressor does accept parameters */
   else if (strcmp(complib, "lzo") == 0) {
     if ( H5Pset_filter( plist_id, FILTER_LZO, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
       return -1;
   }
   /* The bzip2 compress does accept parameters */
   else if (strcmp(complib, "bzip2") == 0) {
     if ( H5Pset_filter( plist_id, FILTER_BZIP2, H5Z_FLAG_OPTIONAL, 3, cd_values) < 0 )
       return -1;
   }
   else {
     /* Compression library not supported */
     return -1;
   }

 }

 /* Create the dataset. */
 if ( (dataset_id = H5Dcreate( loc_id, dset_name, type_id, space_id,
                               H5P_DEFAULT, plist_id, H5P_DEFAULT )) < 0 )
  goto out;

 /* Only write if there is something to write */
 if ( data )
 {

 /* Write data to the dataset. */
 if ( H5Dwrite( dataset_id, type_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, data ) < 0 )
  goto out;

 }

 /* Terminate access to the data space. */
 if ( H5Sclose( space_id ) < 0 )
  goto out;

 /* End access to the property list */
 if ( H5Pclose( plist_id ) < 0 )
  goto out;

 /* Return the object unique ID for future references */
 return dataset_id;

/* error zone, gracefully close */
out:
 H5E_BEGIN_TRY {
  H5Dclose(dataset_id);
  H5Sclose(space_id);
  H5Pclose(plist_id);
 } H5E_END_TRY;
 return -1;

}