Пример #1
0
/*
 * Generate an HDF5 file with groups, datasets, attributes for testing the options:
 *	-l N (--links=N): Set the threshold for # of links when printing information for small groups.
 *	-m N (--dims=N): Set the threshold for the # of dimension sizes when printing information for small datasets.
 *	-a N (--numattrs=N): Set the threshold for the # of attributes when printing information for small # of attributes.
 */
static void
gen_threshold_file(const char *fname)
{
    hid_t fid;				/* File ID */
    hid_t sid0, sid1, sid2, sid3, sid4;	/* Dataspace IDs */
    hid_t did;				/* Dataset ID */
    hid_t attr_id;			/* Attribute ID */
    hid_t gid;				/* Group ID */
    hsize_t two_dims[] = {2, 5};	/* Dimension array */
    hsize_t one_dims[] = {6};		/* Dimension array */
    hsize_t zero_dims[] = {0};		/* Dimension array */
    char name[30];			/* Name */
    unsigned i;				/* Local index variable */

    /* Create file */
    if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create 1-D dataspace with zero dimension size */
    if((sid0 = H5Screate_simple(1, zero_dims, NULL)) < 0)
        goto error;

    /* Create 1-D dataspace with non-zero dimension size*/
    if((sid1 = H5Screate_simple(1, one_dims, NULL)) < 0)
        goto error;

    /* Create 2-D dataspace */
    if((sid2 = H5Screate_simple(2, two_dims, NULL)) < 0)
        goto error;

    /* Create scalar dataspace */
    if((sid3 = H5Screate(H5S_SCALAR)) < 0)
        goto error;

    /* Create null dataspace */
    if((sid4 = H5Screate(H5S_NULL)) < 0)
        goto error;

    /* Create an attribute for the root group */
    if((attr_id = H5Acreate2(fid, "attr", H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;
    if(H5Aclose(attr_id) < 0)
        goto error;

    /* Create 1-D dataset with zero dimension size for the root group */
    if((did = H5Dcreate2(fid, "zero_dset", H5T_NATIVE_UCHAR, sid0, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create 11 attributes for the dataset */
    for(i = 1; i <= (THRES_NUM+1); i++) {
        sprintf(name, "%s%d", THRES_ATTR_NAME,i);
        if((attr_id = H5Acreate2(did, name, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;
        if(H5Aclose(attr_id) < 0)
            goto error;
    }
    if(H5Dclose(did) < 0)
        goto error;

    /* Create dataset with scalar dataspace for the root group */
    if((did = H5Dcreate2(fid, "scalar_dset", H5T_NATIVE_UCHAR, sid3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;
    if(H5Dclose(did) < 0)
        goto error;

    /* Create dataset with null dataspace for the root group */
    if((did = H5Dcreate2(fid, "null_dset", H5T_NATIVE_UCHAR, sid4, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;
    if(H5Dclose(did) < 0)
        goto error;

    /* Create 2-D dataset for the root group */
    if((did = H5Dcreate2(fid, "dset", H5T_NATIVE_UCHAR, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create 10 attributes for the 2-D dataset */
    for(i = 1; i <= THRES_NUM; i++) {
        sprintf(name, "%s%d", THRES_ATTR_NAME,i);
        if((attr_id = H5Acreate2(did, name, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;
        if(H5Aclose(attr_id) < 0)
            goto error;
    }
    if(H5Dclose(did) < 0)
        goto error;

    /* Create first group */
    if((gid = H5Gcreate2(fid, "group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create an attribute for the group */
    if((attr_id = H5Acreate2(gid, "ATTR", H5T_NATIVE_INT, sid3, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Close attribute */
    if(H5Aclose(attr_id) < 0)
        goto error;

    /* Create 10 1-D datasets with non-zero dimension size for the group */
    for(i = 1; i <= THRES_NUM; i++) {
        /* set up dataset name */
        sprintf(name, "%s%d", THRES_DSET_NAME,i);

        /* Create the dataset */
        if((did = H5Dcreate2(gid, name, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;

        /* Close the dataset */
        if(H5Dclose(did) < 0)
            goto error;
    }

    /* Close the group */
    if(H5Gclose(gid) < 0)
        goto error;


    /* Create second group */
    if((gid = H5Gcreate2(fid, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create 25 attributes for the group */
    for(i = 1; i <= THRES_NUM_25; i++) {
        /* Set up attribute name */
        sprintf(name, "%s%d", THRES_ATTR_GRP_NAME,i);

        /* Create the attribute */
        if((attr_id = H5Acreate2(gid, name, H5T_NATIVE_INT, sid2, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;

        /* Close the attribute */
        if(H5Aclose(attr_id) < 0)
            goto error;
    }

    /* Close the group */
    if(H5Gclose(gid) < 0)
        goto error;

    /* Create third group */
    if((gid = H5Gcreate2(fid, "group3", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create 9 1-D datasets with non-zero dimension size for the group */
    for(i = 1; i < THRES_NUM; i++) {
        /* set up dataset name */
        sprintf(name, "%s%d", THRES_DSET_NAME,i);

        /* Create the dataset */
        if((did = H5Dcreate2(gid, name, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;

        /* Close the dataset */
        if(H5Dclose(did) < 0)
            goto error;
    }

    /* Close the group */
    if(H5Gclose(gid) < 0)
        goto error;


    /* Close dataspaces */
    if(H5Sclose(sid0) < 0)
        goto error;
    if(H5Sclose(sid1) < 0)
        goto error;
    if(H5Sclose(sid2) < 0)
        goto error;
    if(H5Sclose(sid3) < 0)
        goto error;
    if(H5Sclose(sid4) < 0)
        goto error;

    /* Close file */
    if(H5Fclose(fid) < 0)
        goto error;

error:
    H5E_BEGIN_TRY {
        H5Gclose(gid);
        H5Aclose(attr_id);
        H5Dclose(did);
        H5Sclose(sid0);
        H5Sclose(sid1);
        H5Sclose(sid2);
        H5Sclose(sid3);
        H5Sclose(sid4);
        H5Fclose(fid);
    } H5E_END_TRY;

} /* gen_threshold_file() */
Пример #2
0
/*
 * Generate HDF5 file with latest format with
 * NUM_GRPS groups and NUM_ATTRS attributes for the dataset
 *
 */
static void
gen_newgrat_file(const char *fname)
{
    hid_t fcpl; 	/* File creation property */
    hid_t fapl; 	/* File access property */
    hid_t fid;		/* File id */
    hid_t gid;		/* Group id */
    hid_t tid;		/* Datatype id */
    hid_t sid; 		/* Dataspace id */
    hid_t attr_id; 	/* Attribute id */
    hid_t did;		/* Dataset id */
    char name[30];	/* Group name */
    char attrname[30];	/* Attribute name */
    int  i;		/* Local index variable */

    /* Get a copy file access property list */
    if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
        goto error;

    /* Set to use latest library format */
    if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0)
        goto error;

    /* Get a copy of file creation property list */
    if((fcpl = H5Pcreate(H5P_FILE_CREATE)) < 0)
        goto error;

    /* Set file space handling strategy */
    if(H5Pset_file_space(fcpl, H5F_FILE_SPACE_ALL_PERSIST, (hsize_t)0) < 0)
        goto error;

    /* Create file */
    if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, fcpl, fapl)) < 0)
        goto error;

    /* Create NUM_GRPS groups in the root group */
    for(i = 1; i <= NUM_GRPS; i++) {
        sprintf(name, "%s%d", GROUP_NAME,i);
        if((gid = H5Gcreate2(fid, name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;
        if(H5Gclose(gid) < 0)
            goto error;
    } /* end for */

    /* Create a datatype to commit and use */
    if((tid = H5Tcopy(H5T_NATIVE_INT)) < 0)
        goto error;

    /* Create dataspace for dataset */
    if((sid = H5Screate(H5S_SCALAR)) < 0)
        goto error;

    /* Create dataset */
    if((did = H5Dcreate2(fid, DATASET_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Create NUM_ATTRS for the dataset */
    for(i = 1; i <= NUM_ATTRS; i++) {
        sprintf(attrname, "%s%d", ATTR_NAME,i);
        if((attr_id = H5Acreate2(did, attrname, tid, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
            goto error;
        if(H5Aclose(attr_id) < 0)
            goto error;
    } /* end for */

    /* Close dataset, dataspace, datatype, file */
    if(H5Dclose(did) < 0)
        goto error;
    if(H5Sclose(sid) < 0)
        goto error;
    if(H5Tclose(tid) < 0)
        goto error;
    if(H5Fclose(fid) < 0)
        goto error;

error:
    H5E_BEGIN_TRY {
        H5Aclose(attr_id);
        H5Dclose(did);
        H5Tclose(tid);
        H5Sclose(sid);
        H5Gclose(gid);
        H5Fclose(fid);
    } H5E_END_TRY;

} /* gen_newgrat_file() */
Пример #3
0
/** Read/write a hyperslab of data, performing dimension remapping
 * and data rescaling as needed.
 */
static int mirw_hyperslab_icv(int opcode,
                              mihandle_t volume,
                              mitype_t buffer_data_type,
                              const misize_t start[],
                              const misize_t count[],
                              void *buffer)
{
  hid_t dset_id = -1;
  hid_t mspc_id = -1;
  hid_t fspc_id = -1;
  hid_t buffer_type_id = -1;
  int result = MI_ERROR;
  hsize_t hdf_start[MI2_MAX_VAR_DIMS];
  hsize_t hdf_count[MI2_MAX_VAR_DIMS];
  int dir[MI2_MAX_VAR_DIMS];  /* Direction vector in file order */
  hsize_t ndims;
  int slice_ndims;
  int n_different = 0;
  double volume_valid_min, volume_valid_max;
  misize_t buffer_size;
  void *temp_buffer=NULL;
  size_t icount[MI2_MAX_VAR_DIMS];
  int idir[MI2_MAX_VAR_DIMS];
  int imap[MI2_MAX_VAR_DIMS];
  double *image_slice_max_buffer=NULL;
  double *image_slice_min_buffer=NULL;
  int scaling_needed=0;
  char path[MI2_MAX_PATH];
  
  hsize_t image_slice_start[MI2_MAX_VAR_DIMS];
  hsize_t image_slice_count[MI2_MAX_VAR_DIMS];
  hsize_t image_slice_length=0;
  hsize_t total_number_of_slices=0;
  hsize_t i;
  int j;


  /* Disallow write operations to anything but the highest resolution.
   */
  if (opcode == MIRW_OP_WRITE && volume->selected_resolution != 0) {
    return MI_LOG_ERROR(MI2_MSG_GENERIC,"Trying to write to a volume thumbnail");
  }
  
  sprintf(path, MI_ROOT_PATH "/image/%d/image", volume->selected_resolution);
  /*printf("Using:%s\n",path);*/
  
  /* Open the dataset with the specified path
  */
  MI_CHECK_HDF_CALL(dset_id = H5Dopen1(volume->hdf_id, path),"H5Dopen1");
  if (dset_id < 0) {
    return (MI_ERROR);
  }

  MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space");
  if (fspc_id < 0) {
    goto cleanup;
  }

 
  buffer_type_id = mitype_to_hdftype(buffer_data_type, TRUE);
  if(buffer_type_id<0)
  {
    goto cleanup;
  }
  
  ndims = volume->number_of_dims;
  
  if (ndims == 0) {
    /* A scalar volume is possible but extremely unlikely, not to
     * mention useless!
     */
    mspc_id = H5Screate(H5S_SCALAR);
    hdf_count[0]=1; 
  } else {

    n_different = mitranslate_hyperslab_origin(volume, start, count, hdf_start, hdf_count, dir);

    mspc_id = H5Screate_simple(ndims, hdf_count, NULL);
    
    if (mspc_id < 0) {
      fprintf(stderr,"H5Screate_simple: Fail %s:%d\n",__FILE__,__LINE__);
      goto cleanup;
    }
  }
  
  miget_hyperslab_size_hdf(buffer_type_id, ndims, hdf_count, &buffer_size);

  MI_CHECK_HDF_CALL(result = H5Sselect_hyperslab(fspc_id, H5S_SELECT_SET, hdf_start, NULL,
                               hdf_count, NULL),"H5Sselect_hyperslab");
  if (result < 0) {
    goto cleanup;
  }

  if((result=miget_volume_valid_range( volume, &volume_valid_max, &volume_valid_min))<0)
  {
    goto cleanup;
  }

#ifdef _DEBUG
  printf("mirw_hyperslab_icv:Volume:%lx valid_max:%f valid_min:%f scaling:%d n_different:%d\n",(long int)(volume),volume_valid_max,volume_valid_min,volume->has_slice_scaling,n_different);
#endif  
  
  if(volume->has_slice_scaling)
  {
    hid_t image_max_fspc_id;
    hid_t image_min_fspc_id;
    hid_t scaling_mspc_id;
    total_number_of_slices=1;
    image_slice_length=1;
    scaling_needed=1;

    image_max_fspc_id=H5Dget_space(volume->imax_id);
    image_min_fspc_id=H5Dget_space(volume->imin_id);

    if ( image_max_fspc_id < 0 ) {
      /*Report error that image-max is not found!*/
      return ( MI_ERROR );
    }

    slice_ndims = H5Sget_simple_extent_ndims ( image_max_fspc_id );
    if(slice_ndims<0)
    {
      /*TODO: report read error somehow*/
      fprintf(stderr,"H5Sget_simple_extent_ndims: Fail %s:%d\n",__FILE__,__LINE__);
      goto cleanup;
    }

    if ( (hsize_t)slice_ndims > ndims ) { /*Can this really happen?*/
      slice_ndims = ndims;
    }

    for ( j = 0; j < slice_ndims; j++ ) {
      image_slice_count[j] = hdf_count[j];
      image_slice_start[j] = hdf_start[j];
      
      if(hdf_count[j]>1) /*avoid zero sized dimensions?*/
        total_number_of_slices*=hdf_count[j];
    }
    
    for (i = slice_ndims; i < ndims; i++ ) {
      if(hdf_count[i]>1) /*avoid zero sized dimensions?*/
        image_slice_length*=hdf_count[i];
      
      image_slice_count[i] = 0;
      image_slice_start[i] = 0;
    }
    
    image_slice_max_buffer=malloc(total_number_of_slices*sizeof(double));
    if(!image_slice_max_buffer)
    {
      result=MI_ERROR;
      MI_LOG_ERROR(MI2_MSG_OUTOFMEM,total_number_of_slices*sizeof(double));
      goto cleanup;
    }
    
    image_slice_min_buffer=malloc(total_number_of_slices*sizeof(double));
    
    if(!image_slice_min_buffer)
    {
      result=MI_ERROR;
      MI_LOG_ERROR(MI2_MSG_OUTOFMEM,total_number_of_slices*sizeof(double));
      goto cleanup;
    }
    
    scaling_mspc_id = H5Screate_simple(slice_ndims, image_slice_count, NULL);
    
    if( (result=H5Sselect_hyperslab(image_max_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 )
    {
      if((result=H5Dread(volume->imax_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_max_fspc_id, H5P_DEFAULT,image_slice_max_buffer))<0)
      {
        MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread");
        goto cleanup;
      }
    } else {
      MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab");
      goto cleanup;
    }
    
    if((result=H5Sselect_hyperslab(image_min_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 )
    {
      if((result=H5Dread(volume->imin_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_min_fspc_id, H5P_DEFAULT,image_slice_min_buffer))<0)
      {
        MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread");
        goto cleanup;
      }
    } else {
      /*TODO: report read error somehow*/
      MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab");
      goto cleanup;
    }
    H5Sclose(scaling_mspc_id);
    H5Sclose(image_max_fspc_id);
  } else {
    slice_ndims=0;
    total_number_of_slices=1;
    image_slice_max_buffer=malloc(sizeof(double));
    image_slice_min_buffer=malloc(sizeof(double));
    miget_volume_range(volume,image_slice_max_buffer,image_slice_min_buffer);
    image_slice_length=1;
    /*it produces unity scaling*/
    scaling_needed=(*image_slice_max_buffer!=volume_valid_max) || (*image_slice_min_buffer!=volume_valid_min);
    for (i = 0; i < ndims; i++) {
      image_slice_length *= hdf_count[i];
    }
#ifdef _DEBUG    
    printf("mirw_hyperslab_icv:Real max:%f min:%f\n",*image_slice_max_buffer,*image_slice_min_buffer);
#endif    
  }
  
  //A hack to disable interslice scaling when it is not needed according to MINC1 specs
  if( volume->volume_type==MI_TYPE_FLOAT    || volume->volume_type==MI_TYPE_DOUBLE || 
      volume->volume_type==MI_TYPE_FCOMPLEX || volume->volume_type==MI_TYPE_DCOMPLEX )
  {
    scaling_needed=0;
  } 
#ifdef _DEBUG  
  printf("mirw_hyperslab_icv:Slice_ndim:%d total_number_of_slices:%d image_slice_length:%d scaling_needed:%d\n",slice_ndims,total_number_of_slices,image_slice_length,scaling_needed);
#endif

  if (opcode == MIRW_OP_READ) 
  {
    MI_CHECK_HDF_CALL(result = H5Dread(dset_id, buffer_type_id, mspc_id, fspc_id, H5P_DEFAULT, buffer),"H5Dread");
    if(result<0)
    {
      goto cleanup;
    }
    
    if(scaling_needed)
    {
      switch(buffer_data_type)
      {
        case MI_TYPE_FLOAT:
#ifdef _DEBUG  
          printf("Descaling  float\n");
#endif
          APPLY_DESCALING(float,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_DOUBLE:
#ifdef _DEBUG  
          printf("Descaling  double\n");
#endif
          APPLY_DESCALING(double,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_INT:
#ifdef _DEBUG  
          printf("Descaling  int\n");
#endif
          APPLY_DESCALING(int,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_UINT:
#ifdef _DEBUG  
          printf("Descaling  uint\n");
#endif
          APPLY_DESCALING(unsigned int,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_SHORT:
#ifdef _DEBUG  
          printf("Descaling  short\n");
#endif
          APPLY_DESCALING(short,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_USHORT:
#ifdef _DEBUG  
          printf("Descaling  ushort\n");
#endif
          APPLY_DESCALING(unsigned short,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_BYTE:
#ifdef _DEBUG  
          printf("Descaling  byte\n");
#endif
          APPLY_DESCALING(char,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        case MI_TYPE_UBYTE:
#ifdef _DEBUG  
          printf("Descaling  ubyte\n");
#endif
          APPLY_DESCALING(unsigned char,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
          break;
        default:
          /*TODO: report unsupported conversion*/
          result=MI_ERROR;
          goto cleanup;
      } 
    } else {
#ifdef _DEBUG  
      printf("Descaling  not needed!\n");
#endif
    }
    
    if (n_different != 0 ) {
      for (i = 0; i < ndims; i++) {
        icount[i] = count[i];
      }
      restructure_array(ndims, buffer, icount, H5Tget_size(buffer_type_id),volume->dim_indices, dir);
      /*TODO: check if we managed to restructure the array*/
      result=0;
    }
  } else { /*opcode != MIRW_OP_READ*/

    volume->is_dirty = TRUE; /* Mark as modified. */
    
    if (n_different != 0 ) {
      /* Invert before calling */
      for (i = 0; i < ndims; i++) {
        icount[volume->dim_indices[i]] = count[i];
        idir[volume->dim_indices[i]] = dir[i];
        /* this one was correct the original way*/
        imap[volume->dim_indices[i]] = i;

      }
    }
    if(scaling_needed || n_different != 0) 
    {
      /*create temporary copy, to be destroyed*/
      temp_buffer=malloc(buffer_size);
      if(!temp_buffer)
      {
        MI_LOG_ERROR(MI2_MSG_OUTOFMEM,buffer_size);
        result=MI_ERROR; /*TODO: error code?*/
        goto cleanup;
      }
      memcpy(temp_buffer,buffer,buffer_size);

      if (n_different != 0 )
        restructure_array(ndims, temp_buffer, icount, H5Tget_size(buffer_type_id), imap, idir);

      if(scaling_needed)
      {
        switch(buffer_data_type)
        {
          case MI_TYPE_FLOAT:
#ifdef _DEBUG  
            printf("scaling  float\n");
#endif
            APPLY_SCALING(float,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_DOUBLE:
#ifdef _DEBUG  
            printf("scaling  double\n");
#endif
            APPLY_SCALING(double,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_INT:
#ifdef _DEBUG  
            printf("scaling  int\n");
#endif
            APPLY_SCALING(int,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_UINT:
#ifdef _DEBUG  
            printf("scaling  unsigned int\n");
#endif
            APPLY_SCALING(unsigned int,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_SHORT:
#ifdef _DEBUG  
            printf("scaling  short\n");
#endif
            APPLY_SCALING(short,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_USHORT:
#ifdef _DEBUG  
            printf("scaling  unsigned short\n");
#endif
            APPLY_SCALING(unsigned short,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_BYTE:
#ifdef _DEBUG
            printf("scaling  char\n");
#endif
            APPLY_SCALING(char,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          case MI_TYPE_UBYTE:
#ifdef _DEBUG
            printf("scaling  unsigned char\n");
#endif
            APPLY_SCALING(unsigned char,temp_buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max);
            break;
          default:
            /*TODO: report unsupported conversion*/
            result=MI_ERROR;
            goto cleanup;
        }
      }
      MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, buffer_type_id, mspc_id, fspc_id, H5P_DEFAULT, temp_buffer),"H5Dwrite");
    } else {
      MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, buffer_type_id, mspc_id, fspc_id, H5P_DEFAULT, buffer),"H5Dwrite");
    }
    
    if(result<0)
    {
      goto cleanup;
    }
  }
      
cleanup:

  if (buffer_type_id >= 0) {
    H5Tclose(buffer_type_id);
  }
  if (mspc_id >= 0) {
    H5Sclose(mspc_id);
  }
  if (fspc_id >= 0) {
    H5Sclose(fspc_id);
  }
  if ( dset_id >=0 ) {
    H5Dclose(dset_id);
  }
  
  if(temp_buffer!=NULL)
  {
    free(temp_buffer);
  }
  if(image_slice_min_buffer!=NULL)
  {
    free(image_slice_min_buffer);
  }
  if(image_slice_max_buffer!=NULL)
  {
    free(image_slice_max_buffer);
  }
  return (result);
}
Пример #4
0
/** Read/write a hyperslab of data, performing dimension remapping
 * and data rescaling as needed. Data in the range (min-max) will map to the appropriate full range of buffer_data_type
 */
static int mirw_hyperslab_normalized(int opcode,
                              mihandle_t volume,
                              mitype_t buffer_data_type,
                              const misize_t start[],
                              const misize_t count[],
                              double data_min,
                              double data_max,
                              void *buffer)
{
  hid_t dset_id = -1;
  hid_t mspc_id = -1;
  hid_t fspc_id = -1;
  hid_t volume_type_id = -1;
  hid_t buffer_type_id = -1;
  int result = MI_ERROR;
  hsize_t hdf_start[MI2_MAX_VAR_DIMS];
  hsize_t hdf_count[MI2_MAX_VAR_DIMS];
  int dir[MI2_MAX_VAR_DIMS];  /* Direction vector in file order */
  hsize_t ndims;
  int slice_ndims;
  int n_different = 0;
  double volume_valid_min, volume_valid_max;
  misize_t buffer_size;
  misize_t input_buffer_size;
  double *temp_buffer=NULL;
  size_t icount[MI2_MAX_VAR_DIMS];
  int idir[MI2_MAX_VAR_DIMS];
  int imap[MI2_MAX_VAR_DIMS];
  double *image_slice_max_buffer=NULL;
  double *image_slice_min_buffer=NULL;

  char path[MI2_MAX_PATH];
  
  hsize_t image_slice_start[MI2_MAX_VAR_DIMS];
  hsize_t image_slice_count[MI2_MAX_VAR_DIMS];
  hsize_t image_slice_length=0;
  hsize_t total_number_of_slices=0;
  hsize_t i;
  int j;


  /* Disallow write operations to anything but the highest resolution.
   */
  if (opcode == MIRW_OP_WRITE && volume->selected_resolution != 0) {
    /*TODO: report error that we are not dealing with the rihgt image here*/
    return (MI_ERROR);
  }
  
  sprintf(path, MI_ROOT_PATH "/image/%d/image", volume->selected_resolution);

  /* Open the dataset with the specified path
  */
  MI_CHECK_HDF_CALL(dset_id = H5Dopen1(volume->hdf_id, path),"H5Dopen1");
  if (dset_id < 0) {
    return (MI_ERROR);
  }

  MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space");
  if (fspc_id < 0) {
    /*TODO: report can't get dataset*/
    goto cleanup;
  }
  buffer_type_id = mitype_to_hdftype(buffer_data_type,TRUE);
  if(buffer_type_id<0)
  {
    goto cleanup;
  }
  
  MI_CHECK_HDF_CALL(volume_type_id = H5Tcopy ( H5T_NATIVE_DOUBLE ),"H5Tcopy");
  if(volume_type_id<0)
  {
    fprintf(stderr,"H5Tcopy: Fail %s:%d\n",__FILE__,__LINE__);
    goto cleanup;
  }
  
  ndims = volume->number_of_dims;
  
  if (ndims == 0) {
    /* A scalar volume is possible but extremely unlikely, not to
     * mention useless!
     */
    mspc_id = H5Screate(H5S_SCALAR);
  } else {

    n_different = mitranslate_hyperslab_origin(volume,start,count, hdf_start,hdf_count,dir);

    MI_CHECK_HDF_CALL(mspc_id = H5Screate_simple(ndims, hdf_count, NULL),"H5Screate_simple");
    
    if (mspc_id < 0) {
      goto cleanup;
    }
  }
  
  miget_hyperslab_size_hdf(volume_type_id,ndims,hdf_count,&buffer_size);
  miget_hyperslab_size_hdf(buffer_type_id,ndims,hdf_count,&input_buffer_size);

  MI_CHECK_HDF_CALL(result = H5Sselect_hyperslab(fspc_id, H5S_SELECT_SET, hdf_start, NULL,
                               hdf_count, NULL),"H5Sselect_hyperslab");
  if (result < 0) {
    goto cleanup;
  }

  miget_volume_valid_range( volume, &volume_valid_max, &volume_valid_min);

#ifdef _DEBUG
  printf("mirw_hyperslab_normalized:Volume:%x valid_max:%f valid_min:%f scaling:%d\n",volume,volume_valid_max,volume_valid_min,volume->has_slice_scaling);
#endif  
  
  if(volume->has_slice_scaling && 
    !(volume->volume_type==MI_TYPE_FLOAT    || volume->volume_type==MI_TYPE_DOUBLE || 
      volume->volume_type==MI_TYPE_FCOMPLEX || volume->volume_type==MI_TYPE_DCOMPLEX) )
  {
    hid_t image_max_fspc_id;
    hid_t image_min_fspc_id;
    hid_t scaling_mspc_id;
    total_number_of_slices=1;
    image_slice_length=1;

    MI_CHECK_HDF_CALL(image_max_fspc_id=H5Dget_space(volume->imax_id),"H5Dget_space");
    MI_CHECK_HDF_CALL(image_min_fspc_id=H5Dget_space(volume->imin_id),"H5Dget_space");

    if ( image_max_fspc_id < 0 || image_min_fspc_id<0 ) {
      result=MI_ERROR;
      goto cleanup;
    }

    MI_CHECK_HDF_CALL(slice_ndims = H5Sget_simple_extent_ndims ( image_max_fspc_id ),"H5Sget_simple_extent_ndims");
    if(slice_ndims<0)
    {
      goto cleanup;
    }

    if ( (hsize_t)slice_ndims > ndims ) { /*Can this really happen?*/
      slice_ndims = ndims;
    }

    for ( j = 0; j < slice_ndims; j++ ) {
      image_slice_count[j] = hdf_count[j];
      image_slice_start[j] = hdf_start[j];
      
      if(hdf_count[j]>1) /*avoid zero sized dimensions?*/
        total_number_of_slices*=hdf_count[j];
    }
    
    for (i = slice_ndims; i < ndims; i++ ) {
      if(hdf_count[i]>1) /*avoid zero sized dimensions?*/
        image_slice_length*=hdf_count[i];
      
      image_slice_count[i] = 0;
      image_slice_start[i] = 0;
    }
    
    image_slice_max_buffer=malloc(total_number_of_slices*sizeof(double));
    image_slice_min_buffer=malloc(total_number_of_slices*sizeof(double));
    /*TODO check for allocation failure ?*/
    
    MI_CHECK_HDF_CALL(scaling_mspc_id = H5Screate_simple(slice_ndims, image_slice_count, NULL),"H5Screate_simple");
    
    if( (result=H5Sselect_hyperslab(image_max_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 )
    {
      if( ( result=H5Dread(volume->imax_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_max_fspc_id, H5P_DEFAULT,image_slice_max_buffer))<0)
      {
        MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread");
        goto cleanup;
      }
    } else {
      MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab");
      goto cleanup;
    }
    
    if( (result=H5Sselect_hyperslab(image_min_fspc_id, H5S_SELECT_SET, image_slice_start, NULL, image_slice_count, NULL))>=0 )
    {
      if( (result=H5Dread(volume->imin_id, H5T_NATIVE_DOUBLE, scaling_mspc_id, image_min_fspc_id, H5P_DEFAULT,image_slice_min_buffer))<0)
      {
        MI_LOG_ERROR(MI2_MSG_HDF5,"H5Dread");
        goto cleanup;
      }
    } else {
      MI_LOG_ERROR(MI2_MSG_HDF5,"H5Sselect_hyperslab");
      goto cleanup;
    }
    H5Sclose(scaling_mspc_id);
    H5Sclose(image_max_fspc_id);
    
  } else {
    slice_ndims=0;
    total_number_of_slices=1;
    image_slice_max_buffer=malloc(sizeof(double));
    image_slice_min_buffer=malloc(sizeof(double));
    miget_volume_range( volume,image_slice_max_buffer,image_slice_min_buffer );
    image_slice_length=1;
    for (i = 0; i < ndims; i++) {
      image_slice_length *= hdf_count[i];
    }
#ifdef _DEBUG
    printf("mirw_hyperslab_normalized:Real max:%f min:%f\n",*image_slice_max_buffer,*image_slice_min_buffer);
#endif
  }

#ifdef _DEBUG  
  printf("mirw_hyperslab_normalized:Slice_ndim:%d total_number_of_slices:%d image_slice_length:%d\n",slice_ndims,total_number_of_slices,image_slice_length);
  printf("mirw_hyperslab_normalized:data min:%f data max:%f buffer_data_type:%d\n",data_min,data_max,buffer_data_type);
#endif

  /*Allocate temporary Buffer*/
  temp_buffer=(double*)malloc(buffer_size);
  if(!temp_buffer)
  {
    MI_LOG_ERROR(MI2_MSG_OUTOFMEM,buffer_size);
    result=MI_ERROR;
    goto cleanup;
  }
  
  if (opcode == MIRW_OP_READ) 
  {
    MI_CHECK_HDF_CALL(result = H5Dread(dset_id, volume_type_id, mspc_id, fspc_id, H5P_DEFAULT, temp_buffer),"H5Dread");
    if(result<0)
    {
      goto cleanup;
    }
    
    /*WARNING: floating point types will be normalized between 0.0 and 1.0*/
    switch(buffer_data_type)
    {
      case MI_TYPE_FLOAT:
        APPLY_DESCALING_NORM(float,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0.0f,1.0f);
        break;
      case MI_TYPE_DOUBLE:
        APPLY_DESCALING_NORM(double,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0.0,1.0);
        break;
      case MI_TYPE_INT:
        APPLY_DESCALING_NORM(int,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,INT_MIN,INT_MAX);
        break;
      case MI_TYPE_UINT:
        APPLY_DESCALING_NORM(unsigned int,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0,UINT_MAX);
        break;
      case MI_TYPE_SHORT:
        APPLY_DESCALING_NORM(short,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,SHRT_MIN,SHRT_MAX);
        break;
      case MI_TYPE_USHORT:
        APPLY_DESCALING_NORM(unsigned short,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0,USHRT_MAX);
        break;
      case MI_TYPE_BYTE:
        APPLY_DESCALING_NORM(char,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,SCHAR_MIN,SCHAR_MAX);
        break;
      case MI_TYPE_UBYTE:
        APPLY_DESCALING_NORM(unsigned char,temp_buffer,buffer,image_slice_length,total_number_of_slices,image_slice_min_buffer,image_slice_max_buffer,volume_valid_min,volume_valid_max,data_min,data_max,0,UCHAR_MAX);
        break;
      default:
        /*TODO: report unsupported conversion*/
        result=MI_ERROR;
        goto cleanup;
    }
    
    if (n_different != 0 ) {
      for (i = 0; i < ndims; i++) {
         icount[i] = count[i];
      }
      restructure_array(ndims, buffer, icount, H5Tget_size(buffer_type_id),volume->dim_indices, dir);
      /*TODO: check if we managed to restructure the array*/
      result=0;
    }
  } else { /*opcode != MIRW_OP_READ*/
Пример #5
0
int
main (int argc, char *argv[])
{
  inp_t input_params;
  mdl_t source_mdl;
  net_t network;
  int cell_index;

  int verbose = 1;
  char *input_file;

  /* Parse options and command line arguments. Diplay help
     message if no (or more than one) argument is given. */

    {
      int opt;

      static struct option longopts[] = {
          {"help", no_argument, NULL, 'h'},
          {"version", no_argument, NULL, 'V'},
          {"verbose", no_argument, NULL, 'v'},
          {"quiet", no_argument, NULL, 'q'},
          {0, 0, 0, 0}
      };

      while ((opt = getopt_long (argc, argv, "hVvq", longopts, NULL)) != -1)
        {
          switch (opt)
            {
            case 'h':
              usage ();
              return EXIT_SUCCESS;
              break;
            case 'V':
              version ();
              return EXIT_SUCCESS;
              break;
            case 'v':
              verbose = 2;
              break;
            case 'q':
              verbose = 0;
              break;
            default:
              usage ();
              return EXIT_FAILURE;
            }
        };
      argc -= optind;
      argv += optind;
      if (argc != 1)
        {
          usage ();
          return EXIT_FAILURE;
        }
      input_file = argv[0];
    }

  /* Read the input file */
  if( read_input_file_names (input_file, &input_params.files, verbose) != EXIT_SUCCESS )
    {
      return EXIT_FAILURE;
    }

  /* Read the chemical network file */
  if( read_network (input_params.files.chem_file, &network, verbose) != EXIT_SUCCESS )
    {
      return EXIT_FAILURE;
    }

  /* Read the input file */
  if( read_input (input_file, &input_params, &network, verbose) != EXIT_SUCCESS )
    {
      return EXIT_FAILURE;
    }

  /* Read the source model file */
  if( read_source (input_params.files.source_file, &source_mdl, &input_params,
               verbose) != EXIT_SUCCESS )
    {
      return EXIT_FAILURE;
    }

  // Hdf5 files, datatype and dataspace
  hid_t       fid, datatype, dataspace, dataset, tsDataset, tsDataspace,  speciesDataset, speciesDataspace, speciesType;
  datatype = H5Tcopy(H5T_NATIVE_DOUBLE);

  hsize_t     dimsf[ ROUTE_DATASET_RANK ]={  source_mdl.n_cells, source_mdl.ts.n_time_steps, input_params.output.n_output_species, N_OUTPUT_ROUTES };
  fid = H5Fcreate( "astrochem_output.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); 
  dataspace = H5Screate_simple( ABUNDANCE_DATASET_RANK, dimsf, NULL);

  // Add Atributes
  hid_t simpleDataspace = H5Screate(H5S_SCALAR);
  hid_t attrType = H5Tcopy(H5T_C_S1);
  H5Tset_size ( attrType, MAX_CHAR_FILENAME );
  H5Tset_strpad(attrType,H5T_STR_NULLTERM);
  hid_t attrNetwork = H5Acreate( fid, "chem_file", attrType, simpleDataspace, H5P_DEFAULT, H5P_DEFAULT);
  H5Awrite( attrNetwork, attrType, input_params.files.chem_file);
  H5Aclose( attrNetwork );
  hid_t attrModel = H5Acreate( fid, "source_file", attrType, simpleDataspace, H5P_DEFAULT, H5P_DEFAULT);
  H5Awrite( attrModel, attrType, input_params.files.source_file);
  H5Aclose( attrModel );

  H5Tclose( attrType );
  H5Sclose( simpleDataspace );

  // Define chunk property
  hsize_t     chunk_dims[ ROUTE_DATASET_RANK ] = { 1, 1, input_params.output.n_output_species, N_OUTPUT_ROUTES };
  hid_t prop_id = H5Pcreate(H5P_DATASET_CREATE);
  H5Pset_chunk(prop_id, ABUNDANCE_DATASET_RANK , chunk_dims);

  // Create dataset
  dataset = H5Dcreate(fid, "Abundances", datatype, dataspace, H5P_DEFAULT, prop_id, H5P_DEFAULT);

  int i;
  hid_t dataspaceRoute, route_t_datatype, r_t_datatype, route_prop_id, routeGroup;
  hid_t routeDatasets[ input_params.output.n_output_species ];
  if (input_params.output.trace_routes)
    {

      // Create route dataspace
      dataspaceRoute = H5Screate_simple( ROUTE_DATASET_RANK, dimsf, NULL);

      // Create route datatype
      r_t_datatype = H5Tcreate (H5T_COMPOUND, sizeof(r_t));
      H5Tinsert( r_t_datatype, "reaction_number", HOFFSET(r_t, reaction_no ), H5T_NATIVE_INT);
      H5Tinsert( r_t_datatype, "reaction_rate", HOFFSET(r_t, rate), H5T_NATIVE_DOUBLE);

      route_t_datatype = H5Tcreate (H5T_COMPOUND, sizeof(rout_t));
      H5Tinsert( route_t_datatype, "formation_rate", HOFFSET(rout_t, formation ), r_t_datatype );
      H5Tinsert( route_t_datatype, "destruction_rate", HOFFSET(rout_t, destruction ), r_t_datatype );

      // Define route chunk property
      route_prop_id = H5Pcreate(H5P_DATASET_CREATE);
      H5Pset_chunk( route_prop_id, ROUTE_DATASET_RANK, chunk_dims);


      // Create each named route dataset
      routeGroup = H5Gcreate( fid, "Routes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );
      char routeName[7] = "route_";
      char tempName[ MAX_CHAR_SPECIES + sizeof( routeName ) ];
      for( i = 0; i < input_params.output.n_output_species ; i++ )
        {
          strcpy( tempName, routeName );
          strcat( tempName, network.species[input_params.output.output_species_idx[i]].name );
          routeDatasets[i] = H5Dcreate( routeGroup, tempName, route_t_datatype, dataspaceRoute, H5P_DEFAULT, route_prop_id, H5P_DEFAULT);
        }
    }
  // Timesteps and species
  hsize_t n_ts = source_mdl.ts.n_time_steps;
  hsize_t n_species =  input_params.output.n_output_species ;
  tsDataspace = H5Screate_simple( 1, &n_ts, NULL);
  speciesDataspace = H5Screate_simple( 1, &n_species, NULL);
  speciesType = H5Tcopy (H5T_C_S1);
  H5Tset_size (speciesType, MAX_CHAR_SPECIES );
  H5Tset_strpad(speciesType,H5T_STR_NULLTERM);

  // Create ts and species datasets
  tsDataset = H5Dcreate(fid, "TimeSteps", datatype, tsDataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
  speciesDataset = H5Dcreate(fid, "Species", speciesType, speciesDataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
  double* convTs = (double*) malloc( sizeof(double)* source_mdl.ts.n_time_steps );
  for( i=0; i< source_mdl.ts.n_time_steps; i++ )
    {
      convTs[i] = source_mdl.ts.time_steps[i] / CONST_MKSA_YEAR;
    }

  H5Dwrite( tsDataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, convTs );


  char speciesName [ input_params.output.n_output_species ][ MAX_CHAR_SPECIES ];
  for( i = 0; i < input_params.output.n_output_species ; i++ )
    {
      strcpy( speciesName[i], network.species[input_params.output.output_species_idx[i]].name );
    }

  H5Dwrite( speciesDataset, speciesType, H5S_ALL, H5S_ALL, H5P_DEFAULT, speciesName );

  free( convTs );
  H5Dclose( tsDataset );
  H5Dclose( speciesDataset );
  H5Tclose( speciesType );
  H5Sclose( tsDataspace );
  H5Sclose( speciesDataspace );


#ifdef HAVE_OPENMP
  /*Initialize lock*/
  omp_init_lock(&lock);


  /* Solve the ODE system for each cell. */
    {
#pragma omp parallel for schedule (dynamic, 1)
#endif
      for (cell_index = 0; cell_index < source_mdl.n_cells; cell_index++)
        {
          if (verbose >= 1)
            fprintf (stdout, "Computing abundances in cell %d...\n",
                     cell_index);
          if( full_solve ( fid, dataset, routeDatasets, dataspace, dataspaceRoute, datatype, route_t_datatype, cell_index, &input_params, source_mdl.mode,
                       &source_mdl.cell[cell_index], &network, &source_mdl.ts, verbose) != EXIT_SUCCESS )
            {
	      exit (EXIT_FAILURE);
            }
          if (verbose >= 1)
            fprintf (stdout, "Done with cell %d.\n", cell_index);
        }
#ifdef HAVE_OPENMP
    }


  /*Finished lock mechanism, destroy it*/
  omp_destroy_lock(&lock);
#endif

  /*
   * Close/release hdf5 resources.
   */
  if (input_params.output.trace_routes)
    {


      for( i = 0; i <  input_params.output.n_output_species ; i++ )
        {
          H5Dclose(routeDatasets[i] );
        }
      H5Sclose(dataspaceRoute);
      H5Gclose(routeGroup);
      H5Pclose(route_prop_id);
      H5Tclose(r_t_datatype);
      H5Tclose(route_t_datatype);
    }
  H5Dclose(dataset);
  H5Pclose(prop_id);
  H5Sclose(dataspace);
  H5Tclose(datatype);

  H5Fclose(fid);

  free_input (&input_params);
  free_mdl (&source_mdl);
  free_network (&network);
  return (EXIT_SUCCESS);
}
Пример #6
0
/** Read/write a hyperslab of data.  This is the simplified function
 * which performs no value conversion.  It is much more efficient than
 * mirw_hyperslab_icv()
 */
static int mirw_hyperslab_raw(int opcode,
                              mihandle_t volume,
                              mitype_t midatatype,
                              const misize_t start[],
                              const misize_t count[],
                              void *buffer)
{
  hid_t dset_id = -1;
  hid_t mspc_id = -1;
  hid_t fspc_id = -1;
  hid_t type_id = -1;
  int result = MI_ERROR;
  hsize_t hdf_start[MI2_MAX_VAR_DIMS];
  hsize_t hdf_count[MI2_MAX_VAR_DIMS];
  int dir[MI2_MAX_VAR_DIMS];  /* Direction vector in file order */
  int ndims;
  int n_different = 0;
  misize_t buffer_size;
  void *temp_buffer=NULL;
  char path[MI2_MAX_PATH];
  size_t icount[MI2_MAX_VAR_DIMS];

  /* Disallow write operations to anything but the highest resolution.
   */
  if (opcode == MIRW_OP_WRITE && volume->selected_resolution != 0) {
    return MI_LOG_ERROR(MI2_MSG_GENERIC,"Trying to write to a volume thumbnail");
  }

  sprintf(path, MI_ROOT_PATH "/image/%d/image", volume->selected_resolution);
  /*printf("Using:%s\n",path);*/
  
  /* Open the dataset with the specified path
  */
  MI_CHECK_HDF_CALL(dset_id = H5Dopen1(volume->hdf_id, path),"H5Dopen1");
  if (dset_id < 0) {
    return (MI_ERROR);
  }

  MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space");
  if (fspc_id < 0) {
    /*TODO: report can't get dataset*/
    goto cleanup;
  }

  MI_CHECK_HDF_CALL(fspc_id = H5Dget_space(dset_id),"H5Dget_space");
  if (fspc_id < 0) {
    goto cleanup;
  }

  if (midatatype == MI_TYPE_UNKNOWN) {
    type_id = H5Tcopy(volume->mtype_id);
  } else {
    type_id = mitype_to_hdftype(midatatype, TRUE);
  }

  ndims = volume->number_of_dims;

  if (ndims == 0) {
    /* A scalar volume is possible but extremely unlikely, not to
     * mention useless!
     */
    mspc_id = H5Screate(H5S_SCALAR);
  } else {

    n_different = mitranslate_hyperslab_origin(volume, start, count, hdf_start, hdf_count, dir);

    MI_CHECK_HDF_CALL(mspc_id = H5Screate_simple(ndims, hdf_count, NULL),"H5Screate_simple");
    if (mspc_id < 0) {
      goto cleanup;
    }
  }

  MI_CHECK_HDF_CALL(result = H5Sselect_hyperslab(fspc_id, H5S_SELECT_SET, hdf_start, NULL,
                               hdf_count, NULL),"H5Sselect_hyperslab");
  if (result < 0) {
    goto cleanup;
  }

  miget_hyperslab_size_hdf(type_id,ndims,hdf_count,&buffer_size);
  
  
  if (opcode == MIRW_OP_READ) {
    MI_CHECK_HDF_CALL(result = H5Dread(dset_id, type_id, mspc_id, fspc_id, H5P_DEFAULT,buffer),"H5Dread");
    
    /* Restructure the array after reading the data in file orientation.
     */
    if (n_different != 0) {
      int i;

      for (i = 0; i < ndims; i++) {
        icount[i] = count[i];
      }
      restructure_array(ndims, buffer, icount, H5Tget_size(type_id),
                        volume->dim_indices, dir);
    }
  } else {

    volume->is_dirty = TRUE; /* Mark as modified. */

    /* Restructure array before writing to file.
     * TODO: use temporary buffer for that!
     */

    if (n_different != 0) {
      int idir[MI2_MAX_VAR_DIMS];
      int imap[MI2_MAX_VAR_DIMS];
      int i;

      /* Invert before calling */
      for (i = 0; i < ndims; i++) {
        icount[volume->dim_indices[i]] = count[i];

        idir[volume->dim_indices[i]] = dir[i];

        // this one was correct the original way
        imap[volume->dim_indices[i]] = i;

      }

      /*Use temporary array to preserve input data*/
      temp_buffer=malloc(buffer_size);
      if(temp_buffer==NULL)
      {
        /*TODO: report memory error*/
        result=MI_ERROR;
        goto cleanup;
      }
      
      memcpy(temp_buffer,buffer,buffer_size);
      
      restructure_array(ndims, temp_buffer, icount, H5Tget_size(type_id),
                        imap, idir);
      MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, type_id, mspc_id, fspc_id, H5P_DEFAULT,
                      temp_buffer),"H5Dwrite");
    } else {
      MI_CHECK_HDF_CALL(result = H5Dwrite(dset_id, type_id, mspc_id, fspc_id, H5P_DEFAULT,
                        buffer),"H5Dwrite");
    }

  }

cleanup:

  if (type_id >= 0) {
    H5Tclose(type_id);
  }
  if (mspc_id >= 0) {
    H5Sclose(mspc_id);
  }
  if (fspc_id >= 0) {
    H5Sclose(fspc_id);
  }
  if ( dset_id >=0 ) {
    H5Dclose(dset_id);
  }
  if ( temp_buffer!= NULL) {
    free( temp_buffer );
  }
  return (result);
}
Пример #7
0
op_dat op_decl_dat_hdf5(op_set set, int dim, char const *type, char const *file, char const *name)
{
  //create new communicator
  int my_rank, comm_size;
  MPI_Comm_dup(MPI_COMM_WORLD, &OP_MPI_HDF5_WORLD);
  MPI_Comm_rank(OP_MPI_HDF5_WORLD, &my_rank);
  MPI_Comm_size(OP_MPI_HDF5_WORLD, &comm_size);

  //MPI variables
  MPI_Info info  = MPI_INFO_NULL;

  //HDF5 APIs definitions
  hid_t       file_id; //file identifier
  hid_t plist_id;  //property list identifier
  hid_t dset_id; //dataset identifier
  hid_t       dataspace; //data space identifier
  hid_t       memspace; //memory space identifier

  hsize_t count[2]; //hyperslab selection parameters
  hsize_t offset[2];
  hid_t attr;   //attribute identifier

  //Set up file access property list with parallel I/O access
  plist_id = H5Pcreate(H5P_FILE_ACCESS);
  H5Pset_fapl_mpio(plist_id, OP_MPI_HDF5_WORLD, info);

  file_id = H5Fopen(file, H5F_ACC_RDONLY, plist_id );
  H5Pclose(plist_id);


  /*find element size of this dat with available attributes*/
  size_t dat_size = 0;
  //open existing data set
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);
  //get OID of the attribute
  attr = H5Aopen(dset_id, "size", H5P_DEFAULT);
  //read attribute
  H5Aread(attr,H5T_NATIVE_INT,&dat_size);
  H5Aclose(attr);
  H5Dclose(dset_id);


  /*find dim with available attributes*/
  int dat_dim = 0;
  //open existing data set
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);
  //get OID of the attribute
  attr = H5Aopen(dset_id, "dim", H5P_DEFAULT);
  //read attribute
  H5Aread(attr,H5T_NATIVE_INT,&dat_dim);
  H5Aclose(attr);
  H5Dclose(dset_id);
  if(dat_dim != dim)
  {
    printf("dat.dim %d in file %s and dim %d do not match\n",dat_dim,file,dim);
    MPI_Abort(OP_MPI_HDF5_WORLD, 2);
  }

  /*find type with available attributes*/
  dataspace= H5Screate(H5S_SCALAR);
  hid_t  atype = H5Tcopy(H5T_C_S1);
  H5Tset_size(atype, 10);
  //open existing data set
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);
  //get OID of the attribute
  attr = H5Aopen(dset_id, "type", H5P_DEFAULT);
  //read attribute
  char typ[10];
  H5Aread(attr,atype,typ);
  H5Aclose(attr);
  H5Sclose(dataspace);
  H5Dclose(dset_id);
  if(strcmp(typ,type) != 0)
  {
    printf("dat.type %s in file %s and type %s do not match\n",typ,file,type);
    MPI_Abort(OP_MPI_HDF5_WORLD, 2);
  }


  /*read in dat in hyperslabs*/

  //Create the dataset with default properties and close dataspace.
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);

  //Each process defines dataset in memory and reads from a hyperslab in the file.
  int disp = 0;
  int* sizes = (int *)xmalloc(sizeof(int)*comm_size);
  MPI_Allgather(&(set->size), 1, MPI_INT, sizes, 1, MPI_INT, OP_MPI_HDF5_WORLD);
  for(int i = 0; i<my_rank; i++)disp = disp + sizes[i];
  free(sizes);

  count[0] = set->size;
  count[1] = dim;
  offset[0] = disp;
  offset[1] = 0;
  memspace = H5Screate_simple(2, count, NULL);

  //Select hyperslab in the file.
  dataspace = H5Dget_space(dset_id);
  H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);

  //Create property list for collective dataset write.
  plist_id = H5Pcreate(H5P_DATASET_XFER);
  H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);

  //initialize data buffer and read in data
  char* data = 0;
  if(strcmp(type,"double") == 0)
  {
    data = (char *)xmalloc(set->size*dim*sizeof(double));
    H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, dataspace, plist_id, data);

    if(dat_size != dim*sizeof(double))
    {
      printf("dat.size %lu in file %s and %d*sizeof(double) do not match\n",dat_size,file,dim);
      MPI_Abort(OP_MPI_HDF5_WORLD, 2);
    }
    else
      dat_size = sizeof(double);

  }else if(strcmp(type,"float") == 0)
  {
    data = (char *)xmalloc(set->size*dim*sizeof(float));
    H5Dread(dset_id, H5T_NATIVE_FLOAT, memspace, dataspace, plist_id, data);

    if(dat_size != dim*sizeof(float))
    {
      printf("dat.size %lu in file %s and %d*sizeof(float) do not match\n",dat_size,file,dim);
      MPI_Abort(OP_MPI_HDF5_WORLD, 2);
    }
    else
      dat_size = sizeof(float);

  }else if(strcmp(type,"int") == 0)
  {
    data = (char *)xmalloc(set->size*dim*sizeof(int));
    H5Dread(dset_id, H5T_NATIVE_INT, memspace, dataspace, plist_id, data);

    if(dat_size != dim*sizeof(int))
    {
      printf("dat.size %lu in file %s and %d*sizeof(int) do not match\n",dat_size,file,dim);
      MPI_Abort(OP_MPI_HDF5_WORLD, 2);
    }
    else
      dat_size = sizeof(int);
  }else
  {
    printf("unknown type\n");
    MPI_Abort(OP_MPI_HDF5_WORLD, 2);
  }

  H5Pclose(plist_id);
  H5Sclose(memspace);
  H5Sclose(dataspace);
  H5Dclose(dset_id);

  H5Fclose(file_id);
  MPI_Comm_free(&OP_MPI_HDF5_WORLD);

  return op_decl_dat(set, dim, type, dat_size, data, name );
}
int main(void)
{
    hid_t	file_id, prop_id, memspace_id, type_id;
    hid_t	group_id;
    hid_t	dataset_id, dataspace_id;
    herr_t	status;
    hsize_t	dims[1];
    hsize_t	maxdims[1];
    float	data[NPOINTS];
    float	floatval;
    unsigned	numdataobj	= 0;
    unsigned	i, j;
    char	name[80];
    hsize_t	start[1]			= {0};
    hsize_t	stride[1]			= {1};
    hsize_t	count[1]			= {1};

    /* Create a file */
    file_id = H5Fcreate(FILEN, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /* Create a dataset to hold the number of data objects */
    /* Create the data space */
    dataspace_id = H5Screate(H5S_SCALAR);

    /* Create dataset */
    dataset_id = H5Dcreate2(file_id, "/NumDataObj",
                                    H5T_NATIVE_UINT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

    /* Write value to NumDataObj dataset */
    status = H5Dwrite(dataset_id, H5T_NATIVE_UINT, H5S_ALL,
            H5S_ALL, H5P_DEFAULT, &numdataobj);

    /* Close the identifiers */
    status = H5Dclose(dataset_id);
    status = H5Sclose(dataspace_id);

    /* Create extendible arrays */
    /* Set up for extendible dataset */
    prop_id = H5Pcreate(H5P_DATASET_CREATE);
    dims[0] = CHUNK_SIZE;
    status = H5Pset_chunk(prop_id, 1, dims);

    /* Create dataspace */
    dims[0]=1;
    maxdims[0]=H5S_UNLIMITED;
    dataspace_id = H5Screate_simple(1, dims, maxdims);

    for(i=0; i<NEXTARRAYS; i++)
    {
        /* Create dataset */
        sprintf(name, "/ExtArray%06d", i);
        dataset_id = H5Dcreate2(file_id, name,
                H5T_NATIVE_FLOAT, dataspace_id, H5P_DEFAULT, prop_id, H5P_DEFAULT);

        /* Close the identifier */
        status = H5Dclose(dataset_id);
    }

    /* Close the identifiers */
    status = H5Sclose(dataspace_id);
    status = H5Pclose(prop_id);

    /* Create group to hold data object data arrays */
    group_id = H5Gcreate2(file_id, "/DataArray", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    H5Gclose(group_id);

    for(j=0; j<NDATAOBJECTS; j++)
    {
        /* Removed print statement as it would lock system resources on Windows */
        /*
         * printf("\rWriting Object #%d of %d", j+1, NDATAOBJECTS);
         * fflush(stdout);
         */
        floatval = (float)j;

        /* Create group to hold data arrays for this object */
        sprintf(name, "/DataArray/%06d", j);
        group_id = H5Gcreate2(file_id, name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
        if(group_id < 0) {
            fprintf(stderr, "Failed to create DataArray group.\n");
            status = H5Fclose(file_id);
            return -1;
        }

        /* Loop over data arrays */
        for(i=0; i<NDATAARRAYS; i++)
        {
            /* Create dataspace */
            dims[0]=NPOINTS;
            maxdims[0]=NPOINTS;
            dataspace_id = H5Screate_simple(1 ,dims, maxdims);

            /* Create dataset */
            sprintf(name, "DataArray%06d", i);
            dataset_id = H5Dcreate2(group_id, name,
                    H5T_NATIVE_FLOAT, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
            if(dataset_id < 0) {
                fprintf(stderr, "Failed to create DataArray dataset.\n");
                status = H5Fclose(file_id);
                return -1;
            }

            /* Write the data array data */
            status = H5Dwrite(dataset_id, H5T_NATIVE_FLOAT, H5S_ALL,
                    H5S_ALL, H5P_DEFAULT, data);
            if(status < 0) {
                fprintf(stderr, "Failed to write DataArray dataset.\n");
                status = H5Fclose(file_id);
                return -1;
            }

            /* Close the identifiers */
            status = H5Dclose(dataset_id);
            status = H5Sclose(dataspace_id);
        }

        /* Open NumDataObj dataset */
        dataset_id = H5Dopen2(file_id, "/NumDataObj", H5P_DEFAULT);
        if(dataset_id < 0) {
            fprintf(stderr, "Failed to open NumDataObj dataset.\n");
            status = H5Fclose(file_id);
            return -1;
        }

        /* Write value to NumDataObj dataset */
        numdataobj = j + 1;
        status = H5Dwrite(dataset_id, H5T_NATIVE_UINT, H5S_ALL,
                H5S_ALL, H5P_DEFAULT, &numdataobj);
        if(status < 0) {
            fprintf(stderr, "Failed to write NumDataObj dataset.\n");
            status = H5Fclose(file_id);
            return -1;
        }

        /* Close identifiers */
        status = H5Dclose(dataset_id);
        status = H5Gclose(group_id);

        /* Extend attribute arrays */
        for(i = 0; i < NEXTARRAYS; i++) {
            /* Open extendable dataset */
            sprintf(name, "/ExtArray%06d", i);
            dataset_id = H5Dopen2(file_id, name, H5P_DEFAULT);
            if(dataset_id < 0) {
                fprintf(stderr, "Failed to open ExtArray dataset.\n");
                status = H5Fclose(file_id);
                return -1;
            } /* end if */

            /* Extend attribute dataset */
            dims[0] = (hsize_t)j + 1;
            status = H5Dset_extent(dataset_id, dims);
            if(status < 0) {
                fprintf(stderr, "Failed to extend DataArray dataset.\n");
                status = H5Fclose(file_id);
                return -1;
            } /* end if */

            /* Select element and write value to attribute dataset */
            dims[0] = 1;
            memspace_id = H5Screate_simple(1, dims, dims);
            dataspace_id = H5Dget_space(dataset_id);
            type_id = H5Dget_type(dataset_id);

            start[0] = 0;
            status = H5Sselect_hyperslab(memspace_id, H5S_SELECT_SET,
                    start, stride, count, NULL);
            start[0] = (hssize_t)j;
            status = H5Sselect_hyperslab(dataspace_id, H5S_SELECT_SET,
                    start, stride, count, NULL);
            status = H5Dwrite(dataset_id, type_id, memspace_id,
                    dataspace_id, H5P_DEFAULT, &floatval);
            if(status < 0)
            {
                fprintf(stderr, "Failed to write DataArray dataset.\n");
                status = H5Fclose(file_id);
                return -1;
            }

            /* Close identifiers */
            status = H5Tclose(type_id);
            status = H5Sclose(dataspace_id);
            status = H5Sclose(memspace_id);
            status = H5Dclose(dataset_id);
        }
    }


    /* Close the file */
    status = H5Fclose(file_id);

    printf("\n");

    return 0;
}
Пример #9
0
/*-------------------------------------------------------------------------
 * Function:    add_records
 *
 * Purpose:     Writes a specified number of records to random datasets in
 *              the SWMR test file.
 *
 * Parameters:  hid_t fid
 *              The file ID of the SWMR HDF5 file
 *
 *              unsigned verbose
 *              Whether or not to emit verbose console messages
 *
 *              unsigned long nrecords
 *              # of records to write to the datasets
 *
 *              unsigned long flush_count
 *              # of records to write before flushing the file to disk
 *
 * Return:      Success:    0
 *              Failure:    -1
 *
 *-------------------------------------------------------------------------
 */
static int
add_records(hid_t fid, unsigned verbose, unsigned long nrecords, unsigned long flush_count)
{
    hid_t tid;                          /* Datatype ID for records */
    hid_t mem_sid;                      /* Memory dataspace ID */
    hsize_t start[2] = {0, 0};          /* Hyperslab selection values */
    hsize_t count[2] = {1, 1};          /* Hyperslab selection values */
    symbol_t record;                    /* The record to add to the dataset */
    H5AC_cache_config_t mdc_config_orig; /* Original metadata cache configuration */
    H5AC_cache_config_t mdc_config_cork; /* Corked metadata cache configuration */
    unsigned long rec_to_flush;         /* # of records left to write before flush */
    volatile int dummy;                 /* Dummy varialbe for busy sleep */
    hsize_t dim[2] = {1,0};             /* Dataspace dimensions */
    unsigned long u, v;                 /* Local index variables */

    HDassert(fid >= 0);

    /* Reset the record */
    /* (record's 'info' field might need to change for each record written, also) */
    HDmemset(&record, 0, sizeof(record));

    /* Create a dataspace for the record to add */
    if((mem_sid = H5Screate(H5S_SCALAR)) < 0)
        return -1;

    /* Create datatype for appending records */
    if((tid = create_symbol_datatype()) < 0)
        return -1;

    /* Get the current metadata cache configuration, and set up the corked
     * configuration */
    mdc_config_orig.version = H5AC__CURR_CACHE_CONFIG_VERSION;
    if(H5Fget_mdc_config(fid, &mdc_config_orig) < 0)
        return -1;
    HDmemcpy(&mdc_config_cork, &mdc_config_orig, sizeof(mdc_config_cork));
    mdc_config_cork.evictions_enabled = FALSE;
    mdc_config_cork.incr_mode = H5C_incr__off;
    mdc_config_cork.flash_incr_mode = H5C_flash_incr__off;
    mdc_config_cork.decr_mode = H5C_decr__off;

    /* Add records to random datasets, according to frequency distribution */
    rec_to_flush = flush_count;
    for(u = 0; u < nrecords; u++) {
        symbol_info_t *symbol;  /* Symbol to write record to */
        hid_t file_sid;         /* Dataset's space ID */
        hid_t aid;              /* Attribute ID */

        /* Get a random dataset, according to the symbol distribution */
        symbol = choose_dataset();

        /* Cork the metadata cache, to prevent the object header from being
         * flushed before the data has been written */
        /*if(H5Fset_mdc_config(fid, &mdc_config_cork) < 0)
            return(-1);*/

        /* If this is the first time the dataset has been opened, extend it and
         * add the sequence attribute */
        if(symbol->nrecords == 0) {
            symbol->nrecords = nrecords / 5;
            dim[1] = symbol->nrecords;

            if(H5Dset_extent(symbol->dsid, dim) < 0)
                return -1;

            if((file_sid = H5Screate(H5S_SCALAR)) < 0)
                return -1;
            if((aid = H5Acreate2(symbol->dsid, "seq", H5T_NATIVE_ULONG, file_sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
                return -1;
            if(H5Sclose(file_sid) < 0)
                return -1;
        } /* end if */
        else if((aid = H5Aopen(symbol->dsid, "seq", H5P_DEFAULT)) < 0)
            return -1;

        /* Get the coordinate to write */
        start[1] = (hsize_t)HDrandom() % symbol->nrecords;

        /* Set the record's ID (equal to its position) */
        record.rec_id = start[1];

        /* Get the dataset's dataspace */
        if((file_sid = H5Dget_space(symbol->dsid)) < 0)
            return -1;

        /* Choose a random record in the dataset */
        if(H5Sselect_hyperslab(file_sid, H5S_SELECT_SET, start, NULL, count, NULL) < 0)
            return -1;

        /* Write record to the dataset */
        if(H5Dwrite(symbol->dsid, tid, mem_sid, file_sid, H5P_DEFAULT, &record) < 0)
            return -1;

        /* Write the sequence number attribute.  Since we synchronize the random
         * number seed, the readers will always generate the same sequence of
         * randomly chosen datasets and offsets.  Therefore, and because of the
         * flush dependencies on the object header, the reader will be
         * guaranteed to see the written data if the sequence attribute is >=u.
         */
        if(H5Awrite(aid, H5T_NATIVE_ULONG, &u) < 0)
            return -1;

        /* Close the attribute */
        if(H5Aclose(aid) < 0)
            return -1;

        /* Uncork the metadata cache */
        /*if(H5Fset_mdc_config(fid, &mdc_config_orig) < 0)
            return(-1);*/

        /* Close the dataset's dataspace */
        if(H5Sclose(file_sid) < 0)
            return -1;

        /* Check for flushing file */
        if(flush_count > 0) {
            /* Decrement count of records to write before flushing */
            rec_to_flush--;

            /* Check for counter being reached */
            if(0 == rec_to_flush) {
                /* Flush contents of file */
                if(H5Fflush(fid, H5F_SCOPE_GLOBAL) < 0)
                    return -1;

                /* Reset flush counter */
                rec_to_flush = flush_count;
            } /* end if */
        } /* end if */

#ifdef OUT
        /* Busy wait, to let readers catch up */
        /* If this is removed, also remove the BUSY_WAIT symbol
         * at the top of the file.
         */
        dummy = 0;
        for(v=0; v<BUSY_WAIT; v++)
            dummy++;
        if((unsigned long)dummy != v)
            return -1;
#endif /* OUT */

    } /* end for */

    /* Close the memory dataspace */
    if(H5Sclose(mem_sid) < 0)
        return -1;

    /* Close the datatype */
    if(H5Tclose(tid) < 0)
        return -1;

    /* Emit informational message */
    if(verbose)
        fprintf(stderr, "Closing datasets\n");

    /* Close the datasets */
    for(u = 0; u < NLEVELS; u++)
        for(v = 0; v < symbol_count[u]; v++)
            if(H5Dclose(symbol_info[u][v].dsid) < 0)
                return -1;

    return 0;
}
Пример #10
0
op_map op_decl_map_hdf5(op_set from, op_set to, int dim, char const *file, char const *name)
{
  //create new communicator
  int my_rank, comm_size;
  MPI_Comm_dup(MPI_COMM_WORLD, &OP_MPI_HDF5_WORLD);
  MPI_Comm_rank(OP_MPI_HDF5_WORLD, &my_rank);
  MPI_Comm_size(OP_MPI_HDF5_WORLD, &comm_size);

  //MPI variables
  MPI_Info info  = MPI_INFO_NULL;

  //HDF5 APIs definitions
  hid_t       file_id; //file identifier
  hid_t plist_id;  //property list identifier
  hid_t dset_id; //dataset identifier
  hid_t       dataspace; //data space identifier
  hid_t       memspace; //memory space identifier

  hsize_t count[2]; //hyperslab selection parameters
  hsize_t offset[2];

  //Set up file access property list with parallel I/O access
  plist_id = H5Pcreate(H5P_FILE_ACCESS);
  H5Pset_fapl_mpio(plist_id, OP_MPI_HDF5_WORLD, info);

  file_id = H5Fopen(file, H5F_ACC_RDONLY, plist_id );
  H5Pclose(plist_id);

  /*find total size of this map by reading attributes*/
  int g_size;
  //open existing data set
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);
  //get OID of the attribute
  hid_t attr = H5Aopen(dset_id, "size", H5P_DEFAULT);
  //read attribute
  H5Aread(attr,H5T_NATIVE_INT,&g_size);
  H5Aclose(attr);
  H5Dclose(dset_id);

  //calculate local size of set for this mpi process
  int l_size = compute_local_size (g_size, comm_size, my_rank);

  //check if size is accurate
  if(from->size != l_size)
  {
    printf("map from set size %d in file %s and size %d do not match on rank %d\n",
        l_size,file,from->size, my_rank);
    MPI_Abort(OP_MPI_HDF5_WORLD, 2);
  }

  /*find dim with available attributes*/
  int map_dim = 0;
  //open existing data set
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);
  //get OID of the attribute
  attr = H5Aopen(dset_id, "dim", H5P_DEFAULT);
  //read attribute
  H5Aread(attr,H5T_NATIVE_INT,&map_dim);
  H5Aclose(attr);
  H5Dclose(dset_id);
  if(map_dim != dim)
  {
    printf("map.dim %d in file %s and dim %d do not match\n",map_dim,file,dim);
    MPI_Abort(OP_MPI_HDF5_WORLD, 2);
  }

  /*find type with available attributes*/
  dataspace= H5Screate(H5S_SCALAR);
  hid_t  atype = H5Tcopy(H5T_C_S1);
  H5Tset_size(atype, 10);
  //open existing data set
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);
  //get OID of the attribute
  attr = H5Aopen(dset_id, "type", H5P_DEFAULT);
  //read attribute
  char typ[10];
  H5Aread(attr,atype,typ);
  H5Aclose(attr);
  H5Sclose(dataspace);
  H5Dclose(dset_id);

  /*read in map in hyperslabs*/

  //Create the dataset with default properties and close dataspace.
  dset_id = H5Dopen(file_id, name, H5P_DEFAULT);

  //Each process defines dataset in memory and reads from a hyperslab in the file.
  int disp = 0;
  int* sizes = (int *)xmalloc(sizeof(int)*comm_size);
  MPI_Allgather(&l_size, 1, MPI_INT, sizes, 1, MPI_INT, OP_MPI_HDF5_WORLD);
  for(int i = 0; i<my_rank; i++)disp = disp + sizes[i];
  free(sizes);

  count[0] = l_size;
  count[1] = dim;
  offset[0] = disp;
  offset[1] = 0;
  memspace = H5Screate_simple(2, count, NULL);

  //Select hyperslab in the file.
  dataspace = H5Dget_space(dset_id);
  H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);

  //Create property list for collective dataset write.
  plist_id = H5Pcreate(H5P_DATASET_XFER);
  H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);

  //initialize data buffer and read data
  int* map = 0;
  if(strcmp(typ,"int") == 0)
  {
    map = (int *)xmalloc(sizeof(int)*l_size*dim);
    H5Dread(dset_id, H5T_NATIVE_INT, memspace, dataspace, plist_id, map);
  }
  else if (strcmp(typ,"long") == 0)
  {
    map = (int *)xmalloc(sizeof(long)*l_size*dim);
    H5Dread(dset_id, H5T_NATIVE_LONG, memspace, dataspace, plist_id, map);
  }
  else if (strcmp(typ,"long long") == 0)
  {
    map = (int *)xmalloc(sizeof(long)*l_size*dim);
    H5Dread(dset_id, H5T_NATIVE_LLONG, memspace, dataspace, plist_id, map);
  }
  else
  {
    printf("unknown type\n");
    MPI_Abort(OP_MPI_HDF5_WORLD, 2);
  }

  H5Pclose(plist_id);
  H5Sclose(memspace);
  H5Sclose(dataspace);
  H5Dclose(dset_id);

  H5Fclose(file_id);
  MPI_Comm_free(&OP_MPI_HDF5_WORLD);

  return op_decl_map(from, to, dim, map, name);
}
/*-------------------------------------------------------------------------
 * Function:    gent_att_compound_vlstr
 *
 * Purpose:     Generate a dataset and a group.
 *              Both has an attribute with a compound datatype consisting 
 *              of a variable length string
 *
 *-------------------------------------------------------------------------
 */
static void gent_att_compound_vlstr(hid_t loc_id)
{
    typedef struct { /* Compound structure for the attribute */
        int i;
        char *v;
    } s1;
    hsize_t dim[1] = {1};	/* Dimension size */
    hid_t sid = -1; 		/* Dataspace ID */
    hid_t tid = -1; 		/* Datatype ID */
    hid_t aid = -1; 		/* Attribute ID */
    hid_t did = -1; 		/* Dataset ID */
    hid_t gid = -1; 		/* Group ID */
    hid_t vl_str_tid = -1;	/* Variable length datatype ID */
    hid_t cmpd_tid = -1;	/* Compound datatype ID */
    hid_t null_sid = -1;	/* Null dataspace ID */
    s1 buf;                 /* Buffer */

    buf.i = 9;
    buf.v = "ThisIsAString";

    /* Create an integer datatype */
    if((tid = H5Tcopy(H5T_NATIVE_INT)) < 0)
        goto error;

    /* Create a variable length string */
    if((vl_str_tid = H5Tcopy(H5T_C_S1)) < 0)
        goto error;
    if(H5Tset_size(vl_str_tid, H5T_VARIABLE) < 0)
        goto error;

    /* Create a compound datatype with a variable length string and an integer */
    if((cmpd_tid = H5Tcreate(H5T_COMPOUND, sizeof(s1))) < 0)
        goto error;
    if(H5Tinsert(cmpd_tid, "i", HOFFSET(s1, i), tid) < 0)
        goto error;
    if(H5Tinsert(cmpd_tid, "v", HOFFSET(s1, v), vl_str_tid) < 0)
        goto error;

    /* Create a dataset */
    if((null_sid = H5Screate(H5S_NULL)) < 0)
        goto error;
    if((did = H5Dcreate2(loc_id, DATASET_ATTR, H5T_NATIVE_INT, null_sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Attach an attribute with the compound datatype to the dataset */
    if((sid = H5Screate_simple(1, dim, dim)) < 0)
        goto error;
    if((aid = H5Acreate2(did, ATTR, cmpd_tid, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Write the attribute */
    buf.i = 9;
    buf.v = "ThisIsAString";
    if(H5Awrite(aid, cmpd_tid, &buf) < 0)
        goto error;

    /* Close the dataset and its attribute */
    if(H5Dclose(did) < 0)
        goto error;
    if(H5Aclose(aid) < 0)
        goto error;

    /* Create a group */
    if((gid = H5Gcreate2(loc_id, GROUP_ATTR, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;

    /* Attach an attribute with the compound datatype to the group */
    if((aid = H5Acreate2(gid, ATTR, cmpd_tid, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
        goto error;
    if(H5Awrite(aid, cmpd_tid, &buf) < 0)
        goto error;

    /* Close the group and its attribute */
    if(H5Aclose(aid) < 0)
        goto error;
    if(H5Gclose(gid) < 0)
        goto error;

    /* Close dataspaces */
    if(H5Sclose(sid) < 0)
        goto error;
    if(H5Sclose(null_sid) < 0)
        goto error;

    /* Close datatypes */
    if(H5Tclose(tid) < 0)
        goto error;
    if(H5Tclose(vl_str_tid) < 0)
        goto error;
    if(H5Tclose(cmpd_tid) < 0)
        goto error;

error:
    H5E_BEGIN_TRY {
        H5Tclose(tid);
        H5Tclose(vl_str_tid);
        H5Tclose(cmpd_tid);
        H5Sclose(null_sid);
        H5Sclose(sid);
        H5Dclose(did);
        H5Aclose(aid);
        H5Gclose(gid);
    } H5E_END_TRY;

} /* gen_att_compound_vlstr() */
Пример #12
0
int main (int argc, char ** argv)  
{
    char        filename [256]; 
    int         rank, size, gidx, i, j, k,l;
    MPI_Comm    comm_dummy = MPI_COMM_WORLD;  /* MPI_Comm is defined through adios_read.h */
    enum ADIOS_DATATYPES attr_type;
    void      * data = NULL;
    uint64_t    start[] = {0,0,0,0,0,0,0,0,0,0};
    uint64_t    count[MAX_DIMS], hcount[MAX_DIMS], bytes_read = 0;
    herr_t      h5_err;
    char        h5name[256],aname[256],fname[256];
    int         dims [MAX_DIMS];
    int         h5rank[MAX_DIMS];
    int         h5i, level;
    hid_t       grp_id [GMAX+1], space_id, dataset_id;
    hid_t       memspace_id, dataspace_id, att_id;
    char        ** grp_name;
    hid_t       type_id;
    hid_t       h5_type_id;
    hsize_t     adims;

    if (argc < 2) {
        printf("Usage: %s <BP-file> <HDF5-file>\n", argv[0]);
        return 1;
    }

    MPI_Init(&argc, &argv);
    h5_err = H5Eset_auto(NULL, NULL );
    ADIOS_FILE * f = adios_fopen (argv[1], comm_dummy);
    HDF5_FILE = H5Fcreate(argv[2],H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /* create the complex types for HDF5 */
    complex_real_id = H5Tcreate (H5T_COMPOUND, sizeof (complex_real_t));
    H5Tinsert (complex_real_id, "real", HOFFSET(complex_real_t,re), H5T_NATIVE_FLOAT);
    H5Tinsert (complex_real_id, "imaginary", HOFFSET(complex_real_t,im), H5T_NATIVE_FLOAT);

    complex_double_id = H5Tcreate (H5T_COMPOUND, sizeof (complex_double_t));
    H5Tinsert (complex_double_id, "real", HOFFSET(complex_double_t,re), H5T_NATIVE_DOUBLE);
    H5Tinsert (complex_double_id, "imaginary", HOFFSET(complex_double_t,im), H5T_NATIVE_DOUBLE);

    if (f == NULL) {
        if (DEBUG) printf ("%s\n", adios_errmsg());
	return -1;
    }
    /* For all groups */
    for (gidx = 0; gidx < f->groups_count; gidx++) {
        if (DEBUG) printf("Group %s:\n", f->group_namelist[gidx]);
        ADIOS_GROUP * g = adios_gopen (f, f->group_namelist[gidx]);
        if (g == NULL) {
            if (DEBUG) printf ("%s\n", adios_errmsg());
            return -1;
        }
/* First create all of the groups */
        grp_id [0] = HDF5_FILE;
        for (i = 0; i < g->vars_count; i++) {
             ADIOS_VARINFO * v = adios_inq_var_byid (g, i);
             strcpy(h5name,g->var_namelist[i]);
             grp_name = bp_dirparser (h5name, &level);
             for (j = 0; j < level-1; j++) {
                grp_id [j + 1] = H5Gopen (grp_id [j], grp_name [j]);
                if (grp_id [j + 1] < 0) {
                   grp_id [j + 1] = H5Gcreate (grp_id [j], grp_name [j], 0);
                }
             }
             for (j=1; j<level; j++) {
                  H5Gclose(grp_id[j]);
             }
        }
/* Now we can write data into these scalars */        
        /* For all variables */
        if (DEBUG) printf("  Variables=%d:\n", g->vars_count);
        for (i = 0; i < g->vars_count; i++) {
             ADIOS_VARINFO * v = adios_inq_var_byid (g, i);

            uint64_t total_size = adios_type_size (v->type, v->value);
            for (j = 0; j < v->ndim; j++)
                total_size *= v->dims[j];
            strcpy(h5name,g->var_namelist[i]);
            if (DEBUG) printf("    %-9s  %s", adios_type_to_string(v->type), g->var_namelist[i]);
            h5_err = bp_getH5TypeId (v->type, &h5_type_id);
            if (v->type==adios_string) H5Tset_size(h5_type_id,strlen(v->value)); 
            if (v->ndim == 0) {
                /* Scalars do not need to be read in, we get it from the metadata
                   when using adios_inq_var */
                if (DEBUG) printf(" = %s\n", value_to_string(v->type, v->value, 0));
                 // add the hdf5 dataset, these are scalars
                for (h5i = 0;h5i<MAX_DIMS;h5i++) 
                   count[0] = 0;
                count[0] = 1; // we are writing just 1 element, RANK=1
                h5_err = bp_getH5TypeId (v->type, &h5_type_id);
                H5LTmake_dataset(HDF5_FILE,h5name,1,count,h5_type_id,v->value);
            } else {

                    h5_err = readVar(g, v,  h5name);
            }
            adios_free_varinfo (v);
        } /* variables */

        /* For all attributes */
        if (DEBUG) printf("  Attributes=%d:\n", g->attrs_count);
        for (i = 0; i < g->attrs_count; i++) {
            enum ADIOS_DATATYPES atype;
            int  asize;
	    void *adata;
            adios_get_attr_byid (g, i, &atype, &asize, &adata);
            grp_name = bp_dirparser (g->attr_namelist[i], &level);
            strcpy(aname,grp_name[level-1]); 
// the name of the attribute is the last in the array
// we then need to concat the rest together
            strcpy(fname,"/");
            for (j=0;j<level-1;j++) {
              strcat(fname,grp_name[j]); 
            }
            h5_err = bp_getH5TypeId (atype, &h5_type_id);

            // let's create the attribute
            adims = 1;
            if (atype==adios_string) H5Tset_size(h5_type_id,strlen(adata)); 
            space_id = H5Screate(H5S_SCALAR); // just a scalar
            att_id = H5Acreate(HDF5_FILE, g->attr_namelist[i], h5_type_id, space_id,H5P_DEFAULT);
            h5_err = H5Awrite(att_id, h5_type_id, adata);
            h5_err = H5Aclose(att_id);
            h5_err = H5Sclose(space_id);

            if (DEBUG) printf("    %-9s  %s = %s\n", adios_type_to_string(atype), 
                    g->attr_namelist[i], value_to_string(atype, adata, 0));
            free(adata);
        } /* attributes */

        adios_gclose (g);
    } /* groups */

    adios_fclose (f);
    h5_err =  H5Fclose(HDF5_FILE);

    MPI_Finalize();
    return 0;
}
Пример #13
0
int main( int argc, char ** argv ) {

 hid_t       file_id,group_id,header_id; 
 hid_t       hdf5_dataspace,hdf5_attribute;
 hsize_t     pdims[2];
 hsize_t     mdims[1];
 hsize_t     tdims[1];
 hsize_t     fdims[1];
 float       positions[NPARTS*NDIMS];
 float       velocities[NPARTS*NDIMS];
 int         IDs[NPARTS];
 int         nFiles=1;
 int         Npart[NTYPES]={0,0,0,0,NPARTS,0};
 int         Npart_hw[NTYPES]={0,0,0,0,0,0};
#ifdef HAS_CORE
 double      core_fraction=0.1;
 double      disk_mass=(1.0-core_fraction)*(500*400/1.0e10);
 double      core_mass=disk_mass*core_fraction/(1.0-core_fraction);
 double      mass_per=disk_mass/(NPARTS-1);
#else
 double      disk_mass=(500*400/1.0e10);
 double      mass_per=disk_mass/(NPARTS);
#endif
 //double      Massarr[NTYPES]={0.0,0.0,0.0,0.0,mass_per,0.0};
 float      masses[NPARTS];
 herr_t      status;
 double      redshift=0.0;
 double      time=0.0;
 double      boxsize=0.0;
 double      dzero=0.0;
 double      done=1.0;
 int         izero=0;
 int         ione=1;
 double      distance;
 double      radius=0.77;
 double      base_vel=0.0;
        double r,rscaled,a,v,cosine,sine,con,posx,posy;
        double rotate_factor=1.0;
        double G=6.67e-8;  // cm^3 g^-1 s^-2
 char    filename[80];

 int i,j,k;

  seed_by_time(0);

  if (argc>1) {
      sscanf(argv[1],"%s",filename);
  } else {
      strcpy(filename,"gal_test.hdf5");
  }


#ifdef HAS_CORE
  masses[0]=core_mass;
  for(i=1;i<NPARTS;i++) {
      masses[i]=mass_per;
  }
#else
  for(i=0;i<NPARTS;i++) {
      masses[i]=mass_per;
  }
#endif


  positions[0]=0.0;
  positions[1]=0.0;
  positions[2]=0.0;
  for(i=1;i<NPARTS;i++) {
    distance=2.0*radius;
    while(distance>radius) {
        distance=0.0;
        for(j=0;j<NDIMS;j++) {
           positions[3*i+j] = radius*(1.0-2.0*(double)rand()/(double)RAND_MAX);
           distance+=pow(positions[3*i+j],2.0);
        }
        distance=sqrt(distance);
    }
  }
/*   random velocity
  for(i=0;i<NPARTS;i++) {
    for(j=0;j<NDIMS;j++) {
       velocities[3*i+j] = base_vel*(1.0-2.0*(double)rand()/(double)RAND_MAX);
    }
  }
*/


  velocities[0]=0.0;
  velocities[1]=0.0;
  velocities[2]=0.0;
        for(i=1;i<NPARTS;i++) {
                posx = positions[NDIMS*i];
                posy = positions[NDIMS*i+1];
                r=sqrt(pow(posx,2.0)+pow(posy,2.0));
#ifdef HAS_CORE
                con = G*(disk_mass+core_mass)*1.99e43*pow((r/radius),3.0);
#else
                con = G*(disk_mass)*1.99e43*pow((r/radius),3.0);
#endif
                rscaled=r*3.089e21;    // convert to cm
                if(r>0.0) {
                        v=sqrt(con/rscaled)*1.0e-5;
                        cosine=posx/r;
                        sine=posy/r;
                        velocities[NDIMS*i+0]=v*rotate_factor*(-sine);
                        velocities[NDIMS*i+1]=v*rotate_factor*cosine;
                        velocities[NDIMS*i+2]=0.0;
                }
        }


  for(i=0;i<NPARTS;i++) {
    IDs[i]=i;
  }

	// EXAMPLE("make a dataset");
  
 file_id = H5Fcreate (filename,
      H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); 
 group_id = H5Gcreate(file_id, "PartType4", 0);
 header_id = H5Gcreate(file_id, "Header", 0);
   
 pdims[0] = NPARTS;
 pdims[1] = NDIMS;
 mdims[0] = NPARTS;
 tdims[0] = NTYPES;
 fdims[0] = 1;

  hdf5_dataspace = H5Screate(H5S_SIMPLE);
  H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
  hdf5_attribute = H5Acreate(header_id, "NumPart_ThisFile", H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_INT, Npart);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SIMPLE);
  H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
  hdf5_attribute = H5Acreate(header_id, "NumPart_Total", H5T_NATIVE_UINT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_UINT, Npart);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SIMPLE);
  H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
  hdf5_attribute = H5Acreate(header_id, "NumPart_Total_HW", H5T_NATIVE_UINT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_UINT, Npart_hw);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

/*
  hdf5_dataspace = H5Screate(H5S_SIMPLE);
  H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
  hdf5_attribute = H5Acreate(header_id, "MassTable", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, Massarr);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);
*/

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Time", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &time);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);


  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Redshift", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &redshift);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "BoxSize", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &boxsize);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

 hdf5_dataspace = H5Screate(H5S_SCALAR);
 hdf5_attribute = H5Acreate(header_id, "NumFilesPerSnapshot", H5T_NATIVE_INT,
          hdf5_dataspace, H5P_DEFAULT);
 H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &nFiles);
 H5Aclose(hdf5_attribute);
 H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Omega0", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &dzero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "OmegaLambda", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &dzero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "HubbleParam", H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &done);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Flag_Sfr", H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &izero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Flag_Cooling", H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &izero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Flag_StellarAge", H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &izero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Flag_Metals", H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &izero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SCALAR);
  hdf5_attribute = H5Acreate(header_id, "Flag_Feedback", H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &izero);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);

  hdf5_dataspace = H5Screate(H5S_SIMPLE);
  H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
  hdf5_attribute = H5Acreate(header_id, "Flag_Entropy_ICs", H5T_NATIVE_UINT, hdf5_dataspace, H5P_DEFAULT);
  H5Awrite(hdf5_attribute, H5T_NATIVE_UINT, Npart_hw);
  H5Aclose(hdf5_attribute);
  H5Sclose(hdf5_dataspace);


 status = H5LTmake_dataset(file_id,"PartType4/Coordinates",2,
            pdims,H5T_NATIVE_FLOAT,positions);
 status = H5LTmake_dataset(file_id,"PartType4/ParticleIDs",1,
            mdims,H5T_NATIVE_UINT,IDs);
 status = H5LTmake_dataset(file_id,"PartType4/Velocities",2,
            pdims,H5T_NATIVE_FLOAT,velocities);
 status = H5LTmake_dataset(file_id,"PartType4/Masses",1,
            mdims,H5T_NATIVE_FLOAT,masses);

 status = H5Fclose (file_id);

	// PASSED();

 return 0;


}
Пример #14
0
int
main (void)
{
    hid_t       file, space, dset, attr;            /* Handles */
    herr_t      status;
    hsize_t     dims[2] = {DIM0, DIM1};
    double      wdata[DIM0][DIM1],                  /* Write buffer */
                **rdata;                            /* Read buffer */
    int         ndims,
                i, j;

    /*
     * Initialize data.
     */
    for (i=0; i<DIM0; i++)
        for (j=0; j<DIM1; j++)
            wdata[i][j] = (double) i / (j + 0.5) + j;;

    /*
     * Create a new file using the default properties.
     */
    file = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /*
     * Create dataset with a scalar dataspace.
     */
    space = H5Screate (H5S_SCALAR);
    dset = H5Dcreate (file, DATASET, H5T_STD_I32LE, space, H5P_DEFAULT);
    status = H5Sclose (space);

    /*
     * Create dataspace.  Setting maximum size to NULL sets the maximum
     * size to be the current size.
     */
    space = H5Screate_simple (2, dims, NULL);

    /*
     * Create the attribute and write the floating point data to it.
     * In this example we will save the data as 64 bit little endian
     * IEEE floating point numbers, regardless of the native type.  The
     * HDF5 library automatically converts between different floating
     * point types.
     */
    attr = H5Acreate (dset, ATTRIBUTE, H5T_IEEE_F64LE, space, H5P_DEFAULT);
    status = H5Awrite (attr, H5T_NATIVE_DOUBLE, wdata[0]);

    /*
     * Close and release resources.
     */
    status = H5Aclose (attr);
    status = H5Dclose (dset);
    status = H5Sclose (space);
    status = H5Fclose (file);


    /*
     * Now we begin the read section of this example.  Here we assume
     * the attribute has the same name and rank, but can have any size.
     * Therefore we must allocate a new array to read in data using
     * malloc().
     */

    /*
     * Open file, dataset, and attribute.
     */
    file = H5Fopen (FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
    dset = H5Dopen (file, DATASET);
    attr = H5Aopen_name (dset, ATTRIBUTE);

    /*
     * Get dataspace and allocate memory for read buffer.  This is a
     * two dimensional attribute so the dynamic allocation must be done
     * in steps.
     */
    space = H5Aget_space (attr);
    ndims = H5Sget_simple_extent_dims (space, dims, NULL);

    /*
     * Allocate array of pointers to rows.
     */
    rdata = (double **) malloc (dims[0] * sizeof (double *));

    /*
     * Allocate space for floating point data.
     */
    rdata[0] = (double *) malloc (dims[0] * dims[1] * sizeof (double));

    /*
     * Set the rest of the pointers to rows to the correct addresses.
     */
    for (i=1; i<dims[0]; i++)
        rdata[i] = rdata[0] + i * dims[1];

    /*
     * Read the data.
     */
    status = H5Aread (attr, H5T_NATIVE_DOUBLE, rdata[0]);

    /*
     * Output the data to the screen.
     */
    printf ("%s:\n", ATTRIBUTE);
    for (i=0; i<dims[0]; i++) {
        printf (" [");
        for (j=0; j<dims[1]; j++)
            printf (" %6.4f", rdata[i][j]);
        printf ("]\n");
    }

    /*
     * Close and release resources.
     */
    free (rdata[0]);
    free (rdata);
    status = H5Aclose (attr);
    status = H5Dclose (dset);
    status = H5Sclose (space);
    status = H5Fclose (file);

    return 0;
}
Пример #15
0
void op_write_hdf5(char const * file_name)
{
  printf("Writing to %s\n",file_name);

  //declare timers
  double cpu_t1, cpu_t2, wall_t1, wall_t2;
  double time;
  double max_time;
  op_timers(&cpu_t1, &wall_t1); //timer start for hdf5 file write

  //create new communicator
  int my_rank, comm_size;
  MPI_Comm_dup(MPI_COMM_WORLD, &OP_MPI_HDF5_WORLD);
  MPI_Comm_rank(OP_MPI_HDF5_WORLD, &my_rank);
  MPI_Comm_size(OP_MPI_HDF5_WORLD, &comm_size);

  //MPI variables
  MPI_Info info  = MPI_INFO_NULL;

  //HDF5 APIs definitions
  hid_t       file_id; //file identifier
  hid_t plist_id;  //property list identifier
  hid_t dset_id = 0; //dataset identifier
  hid_t       dataspace; //data space identifier
  hid_t       memspace; //memory space identifier

  hsize_t     dimsf[2]; // dataset dimensions
  hsize_t count[2]; //hyperslab selection parameters
  hsize_t offset[2];

  //Set up file access property list with parallel I/O access
  plist_id = H5Pcreate(H5P_FILE_ACCESS);
  H5Pset_fapl_mpio(plist_id, OP_MPI_HDF5_WORLD, info);

  //Create a new file collectively and release property list identifier.
  file_id = H5Fcreate(file_name, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
  H5Pclose(plist_id);

  /*loop over all the op_sets and write them to file*/
  for(int s=0; s<OP_set_index; s++) {
    op_set set=OP_set_list[s];

    //Create the dataspace for the dataset.
    hsize_t dimsf_set[] = {1};
    dataspace = H5Screate_simple(1, dimsf_set, NULL);

    //Create the dataset with default properties and close dataspace.
    dset_id = H5Dcreate(file_id, set->name, H5T_NATIVE_INT, dataspace,
        H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

    //Create property list for collective dataset write.
    plist_id = H5Pcreate(H5P_DATASET_XFER);
    H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);

    int size = 0;
    int* sizes = (int *)xmalloc(sizeof(int)*comm_size);
    MPI_Allgather(&set->size, 1, MPI_INT, sizes, 1, MPI_INT, OP_MPI_HDF5_WORLD);
    for(int i = 0; i<comm_size; i++)size = size + sizes[i];

    //write data
    H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, plist_id, &size);
    H5Sclose(dataspace);
    H5Pclose(plist_id);
    H5Dclose(dset_id);
  }


  /*loop over all the op_maps and write them to file*/
  for(int m=0; m<OP_map_index; m++) {
    op_map map=OP_map_list[m];

    //find total size of map
    int* sizes = (int *)xmalloc(sizeof(int)*comm_size);
    int g_size = 0;
    MPI_Allgather(&map->from->size, 1, MPI_INT, sizes, 1, MPI_INT, OP_MPI_HDF5_WORLD);
    for(int i = 0; i<comm_size; i++)g_size = g_size + sizes[i];

    //Create the dataspace for the dataset.
    dimsf[0] = g_size;
    dimsf[1] = map->dim;
    dataspace = H5Screate_simple(2, dimsf, NULL);

    //Create the dataset with default properties and close dataspace.
    if(sizeof(map->map[0]) == sizeof(int))
      dset_id = H5Dcreate(file_id, map->name, H5T_NATIVE_INT, dataspace,
          H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    else if(sizeof(map->map[0]) == sizeof(long))
      dset_id = H5Dcreate(file_id, map->name, H5T_NATIVE_LONG, dataspace,
          H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    else if(sizeof(map->map[0]) == sizeof(long long))
      dset_id = H5Dcreate(file_id, map->name, H5T_NATIVE_LLONG, dataspace,
          H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);

    H5Sclose(dataspace);


    //Each process defines dataset in memory and writes it to a hyperslab
    //in the file.
    int disp = 0;
    for(int i = 0; i<my_rank; i++)disp = disp + sizes[i];
    count[0] = map->from->size;
    count[1] = dimsf[1];
    offset[0] = disp;
    offset[1] = 0;
    memspace = H5Screate_simple(2, count, NULL);

    //Select hyperslab in the file.
    dataspace = H5Dget_space(dset_id);
    H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);

    //Create property list for collective dataset write.
    plist_id = H5Pcreate(H5P_DATASET_XFER);
    H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);

    //write data
    if(sizeof(map->map[0]) == sizeof(int))
      H5Dwrite(dset_id, H5T_NATIVE_INT, memspace, dataspace, plist_id, map->map);
    else if(sizeof(map->map[0]) == sizeof(long))
      H5Dwrite(dset_id, H5T_NATIVE_LONG, memspace, dataspace, plist_id, map->map);
    else if(sizeof(map->map[0]) == sizeof(long long))
      H5Dwrite(dset_id, H5T_NATIVE_LLONG, memspace, dataspace, plist_id, map->map);

    H5Pclose(plist_id);
    H5Sclose(memspace);
    H5Sclose(dataspace);
    H5Dclose(dset_id);

    free(sizes);

    /*attach attributes to map*/

    //open existing data set
    dset_id = H5Dopen(file_id, map->name, H5P_DEFAULT);
    //create the data space for the attribute
    hsize_t dims = 1;
    dataspace = H5Screate_simple(1, &dims, NULL);

    //Create an int attribute - size
    hid_t attribute = H5Acreate(dset_id, "size", H5T_NATIVE_INT, dataspace,
        H5P_DEFAULT, H5P_DEFAULT);
    //Write the attribute data.
    H5Awrite(attribute, H5T_NATIVE_INT, &g_size);
    //Close the attribute.
    H5Aclose(attribute);

    //Create an int attribute - dimension
    attribute = H5Acreate(dset_id, "dim", H5T_NATIVE_INT, dataspace,
        H5P_DEFAULT, H5P_DEFAULT);
    //Write the attribute data.
    H5Awrite(attribute, H5T_NATIVE_INT, &map->dim);
    //Close the attribute.
    H5Aclose(attribute);
    H5Sclose(dataspace);

    //Create an string attribute - type
    dataspace= H5Screate(H5S_SCALAR);
    hid_t atype = H5Tcopy(H5T_C_S1);
    H5Tset_size(atype, 10);
    attribute = H5Acreate(dset_id, "type", atype, dataspace,
        H5P_DEFAULT, H5P_DEFAULT);

    if(sizeof(map->map[0]) == sizeof(int))
      H5Awrite(attribute, atype, "int");
    if(sizeof(map->map[0]) == sizeof(long))
      H5Awrite(attribute, atype, "long");
    if(sizeof(map->map[0]) == sizeof(long long))
      H5Awrite(attribute, atype, "long long");

    H5Aclose(attribute);
    //Close the dataspace
    H5Sclose(dataspace);
    //Close to the dataset.
    H5Dclose(dset_id);
  }

  /*loop over all the op_dats and write them to file*/
  for(int d=0; d<OP_dat_index; d++) {
    op_dat dat=OP_dat_list[d];

    //find total size of map
    int* sizes = (int *)xmalloc(sizeof(int)*comm_size);
    int g_size = 0;
    MPI_Allgather(&dat->set->size, 1, MPI_INT, sizes, 1, MPI_INT, OP_MPI_HDF5_WORLD);
    for(int i = 0; i<comm_size; i++)g_size = g_size + sizes[i];

    //Create the dataspace for the dataset.
    dimsf[0] = g_size;
    dimsf[1] = dat->dim;
    dataspace = H5Screate_simple(2, dimsf, NULL);

    //Create the dataset with default properties and close dataspace.
    if(strcmp(dat->type,"double")==0)
      dset_id = H5Dcreate(file_id, dat->name, H5T_NATIVE_DOUBLE, dataspace,
          H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    else if(strcmp(dat->type,"float")==0)
      dset_id = H5Dcreate(file_id, dat->name, H5T_NATIVE_FLOAT, dataspace,
          H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    else if(strcmp(dat->type,"int")==0)
      dset_id = H5Dcreate(file_id, dat->name, H5T_NATIVE_INT, dataspace,
          H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
    else printf("Unknown type\n");

    H5Sclose(dataspace);

    //Each process defines dataset in memory and writes it to a hyperslab
    //in the file.
    int disp = 0;
    for(int i = 0; i<my_rank; i++)disp = disp + sizes[i];
    count[0] = dat->set->size;
    count[1] = dimsf[1];
    offset[0] = disp;
    offset[1] = 0;
    memspace = H5Screate_simple(2, count, NULL);

    //Select hyperslab in the file.
    dataspace = H5Dget_space(dset_id);
    H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);

    //Create property list for collective dataset write.
    plist_id = H5Pcreate(H5P_DATASET_XFER);
    H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);

    //write data
    if(strcmp(dat->type,"double") == 0)
      H5Dwrite(dset_id, H5T_NATIVE_DOUBLE, memspace, dataspace, plist_id, dat->data);
    else if(strcmp(dat->type,"float") == 0)
      H5Dwrite(dset_id, H5T_NATIVE_FLOAT, memspace, dataspace, plist_id, dat->data);
    else if(strcmp(dat->type,"int") == 0)
      H5Dwrite(dset_id, H5T_NATIVE_INT, memspace, dataspace, plist_id, dat->data);
    else printf("Unknown type\n");

    H5Pclose(plist_id);
    H5Sclose(memspace);
    H5Sclose(dataspace);
    H5Dclose(dset_id);
    free(sizes);


    /*attach attributes to dat*/

    //open existing data set
    dset_id = H5Dopen(file_id, dat->name, H5P_DEFAULT);
    //create the data space for the attribute
    hsize_t dims = 1;
    dataspace = H5Screate_simple(1, &dims, NULL);

    //Create an int attribute - size
    hid_t attribute = H5Acreate(dset_id, "size", H5T_NATIVE_INT, dataspace,
        H5P_DEFAULT, H5P_DEFAULT);
    //Write the attribute data.
    H5Awrite(attribute, H5T_NATIVE_INT, &dat->size);
    //Close the attribute.
    H5Aclose(attribute);

    //Create an int attribute - dimension
    attribute = H5Acreate(dset_id, "dim", H5T_NATIVE_INT, dataspace,
        H5P_DEFAULT, H5P_DEFAULT);
    //Write the attribute data.
    H5Awrite(attribute, H5T_NATIVE_INT, &dat->dim);
    H5Aclose(attribute);
    H5Sclose(dataspace);

    //Create an string attribute - type
    dataspace= H5Screate(H5S_SCALAR);
    hid_t atype = H5Tcopy(H5T_C_S1);
    H5Tset_size(atype, 10);
    attribute = H5Acreate(dset_id, "type", atype, dataspace,
        H5P_DEFAULT, H5P_DEFAULT);
    H5Awrite(attribute, atype, dat->type);
    H5Aclose(attribute);

    //Close the dataspace.
    H5Sclose(dataspace);
    H5Dclose(dset_id);
  }

  H5Fclose(file_id);

  op_timers(&cpu_t2, &wall_t2);  //timer stop for hdf5 file write
  //compute import/export lists creation time
  time = wall_t2-wall_t1;
  MPI_Reduce(&time, &max_time, 1, MPI_DOUBLE, MPI_MAX, MPI_ROOT, OP_MPI_HDF5_WORLD);
  //print performance results
  if(my_rank == MPI_ROOT)
  {
    printf("Max hdf5 file write time = %lf\n\n",max_time);
  }
  MPI_Comm_free(&OP_MPI_HDF5_WORLD);

}
Пример #16
0
int gal2gad2(NbodyModel *theModel, const char * prefix, const char * path,
         double length, double mass, double velocity) {
#ifdef HAS_HDF5
    int n=theModel->n;
    hid_t       file_id,group_id,header_id; 
    hid_t       hdf5_dataspace,hdf5_attribute;
    hsize_t     pdims[2];
    hsize_t     mdims[1];
    hsize_t     tdims[1];
    hsize_t     fdims[1];
    float       *positions;
    float       *velocities;
    int         *IDs;
    int         nFiles=1;
    int         Npart[6]={0,0,0,0,n,0};
    int         Npart_hw[6]={0,0,0,0,0,0};
    float       *masses;
    herr_t      status;
    int         i_zero=0;
    double      d_zero=0.0;
    double      d_one=1.0;
    int i;
    char hdf5file[100];
    char paramfile[100];
    FILE * pfile;

    sprintf(hdf5file,"%s%s.hdf5",path,prefix);
    sprintf(paramfile,"%s%s.param",path,prefix);
    positions = (float *)malloc(sizeof(float)*3*n);
    velocities = (float *)malloc(sizeof(float)*3*n);
    masses = (float *)malloc(sizeof(float)*n);
    IDs = (int *)malloc(sizeof(int)*n);

    printf("HDF5FILE BEING GENERATED\n");

    for(i=0;i<n;i++) {
        positions[i*3+0] = (float)theModel->x[i];
        positions[i*3+1] = (float)theModel->y[i];
        positions[i*3+2] = (float)theModel->z[i];
        velocities[i*3+0] = (float)theModel->vx[i];
        velocities[i*3+1] = (float)theModel->vy[i];
        velocities[i*3+2] = (float)theModel->vz[i];
        masses[i] = (float)theModel->mass[i];
        IDs[i]=i;
    }

    file_id = H5Fcreate (hdf5file,
        H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); 
    group_id = H5Gcreate1(file_id, "PartType4", 0);
    header_id = H5Gcreate1(file_id, "Header", 0);
    pdims[0] = n;
    pdims[1] = 3;
    mdims[0] = n;
    tdims[0] = 6;
    fdims[0] = 1;

    hdf5_dataspace = H5Screate(H5S_SIMPLE);
    H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
    hdf5_attribute = H5Acreate1(header_id, "NumPart_ThisFile",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, Npart);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SIMPLE);
    H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
    hdf5_attribute = H5Acreate1(header_id, "NumPart_Total",
        H5T_NATIVE_UINT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_UINT, Npart);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);
      
    hdf5_dataspace = H5Screate(H5S_SIMPLE);
    H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
    hdf5_attribute = H5Acreate1(header_id, "NumPart_Total_HW",
        H5T_NATIVE_UINT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_UINT, Npart_hw);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Time",
        H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &time);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);


    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Redshift",
        H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &d_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "BoxSize",
        H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &d_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);
      
    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "NumFilesPerSnapshot",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &nFiles);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Omega0",
        H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &d_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "OmegaLambda",
        H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &d_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "HubbleParam",
        H5T_NATIVE_DOUBLE, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_DOUBLE, &d_one);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Flag_Sfr",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &i_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Flag_Cooling",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &i_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Flag_StellarAge",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &i_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Flag_Metals",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &i_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);

    hdf5_dataspace = H5Screate(H5S_SCALAR);
    hdf5_attribute = H5Acreate1(header_id, "Flag_Feedback",
        H5T_NATIVE_INT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_INT, &i_zero);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);
      
    hdf5_dataspace = H5Screate(H5S_SIMPLE);
    H5Sset_extent_simple(hdf5_dataspace, 1, tdims, NULL);
    hdf5_attribute = H5Acreate1(header_id, "Flag_Entropy_ICs",
        H5T_NATIVE_UINT, hdf5_dataspace, H5P_DEFAULT);
    H5Awrite(hdf5_attribute, H5T_NATIVE_UINT, Npart_hw);
    H5Aclose(hdf5_attribute);
    H5Sclose(hdf5_dataspace);
      
    status = H5LTmake_dataset(file_id,"PartType4/Coordinates",2,
        pdims,H5T_NATIVE_FLOAT,positions);
    status = H5LTmake_dataset(file_id,"PartType4/ParticleIDs",1,
        mdims,H5T_NATIVE_UINT,IDs);
    status = H5LTmake_dataset(file_id,"PartType4/Velocities",2,
        pdims,H5T_NATIVE_FLOAT,velocities);
    status = H5LTmake_dataset(file_id,"PartType4/Masses",1,
        mdims,H5T_NATIVE_FLOAT,masses);

    status = H5Fclose (file_id);

    free(positions);
    free(velocities);
    free(masses);
    free(IDs);

    
    pfile = fopen(paramfile,"w");
    fprintf(pfile,"InitCondFile\t%s\n",prefix);
    fprintf(pfile,"OutputDir\tout\n");
    fprintf(pfile,"EnergyFile\tenergy.txt\n");
    fprintf(pfile,"InfoFile\tinfo.txt\n");
    fprintf(pfile,"TimingsFile\ttimings.txt\n");
    fprintf(pfile,"CpuFile\tcpu.txt\n");
    fprintf(pfile,"RestartFile\trestart\n");
    fprintf(pfile,"SnapshotFileBase\tsnapshot\n");
    fprintf(pfile,"OutputListFilename\toutput_list.txt\n");
    fprintf(pfile,"ICFormat\t3\n");
    fprintf(pfile,"SnapFormat\t3\n");
    fprintf(pfile,"TypeOfTimestepCriterion\t0\n");
    fprintf(pfile,"OutputListOn\t0\n");
    fprintf(pfile,"PeriodicBoundariesOn\t0\n");
    fprintf(pfile,"TimeBegin\t0.0\n");
    fprintf(pfile,"TimeMax\t%le\n",theModel->tFinal);
    fprintf(pfile,"Omega0\t0\n");
    fprintf(pfile,"OmegaLambda\t0\n");
    fprintf(pfile,"OmegaBaryon\t0\n");
    fprintf(pfile,"HubbleParam\t1.0\n");
    fprintf(pfile,"BoxSize\t0\n");
    fprintf(pfile,"TimeBetSnapshot\t%le\n",theModel->tFinal/100); //change this
    fprintf(pfile,"TimeOfFirstSnapshot\t0\n");
    fprintf(pfile,"CpuTimeBetRestartFile\t300.0\n");
    fprintf(pfile,"TimeBetStatistics\t0.1\n");
    fprintf(pfile,"NumFilesPerSnapshot\t1\n");
    fprintf(pfile,"NumFilesWrittenInParallel\t1\n");
    fprintf(pfile,"ErrTolIntAccuracy\t0.0025\n");
    fprintf(pfile,"CourantFac\t0.15\n");
    fprintf(pfile,"MaxSizeTimestep\t0.01\n");
    fprintf(pfile,"MinSizeTimestep\t0.0\n");
    fprintf(pfile,"ErrTolTheta\t0.05\n");
    fprintf(pfile,"TypeOfOpeningCriterion\t1\n");
    fprintf(pfile,"ErrTolForceAcc\t0.0005\n");
    fprintf(pfile,"TreeDomainUpdateFrequency\t0.01\n");
    fprintf(pfile,"DesNumNgb\t50\n");
    fprintf(pfile,"MaxNumNgbDeviation\t2\n");
    fprintf(pfile,"ArtBulkViscConst\t0.8\n");
    fprintf(pfile,"InitGasTemp\t0\n");
    fprintf(pfile,"MinGasTemp\t0\n");
    fprintf(pfile,"PartAllocFactor\t1.5\n");
    fprintf(pfile,"TreeAllocFactor\t0.8\n");
    fprintf(pfile,"BufferSize\t25\n");
    fprintf(pfile,"UnitLength_in_cm\t%le  \n",length);
    fprintf(pfile,"UnitMass_in_g\t%le    \n",mass);
    fprintf(pfile,"UnitVelocity_in_cm_per_s\t%le  \n",velocity);
    fprintf(pfile,"GravityConstantInternal\t0\n");
    fprintf(pfile,"MinGasHsmlFractional\t0.25\n");
    fprintf(pfile,"SofteningGas\t0\n");
    fprintf(pfile,"SofteningHalo\t1.0\n");
    fprintf(pfile,"SofteningDisk\t0.4\n");
    fprintf(pfile,"SofteningBulge\t0\n");
    fprintf(pfile,"SofteningStars\t1.0e-2\n");
    fprintf(pfile,"SofteningBndry\t0\n");
    fprintf(pfile,"SofteningGasMaxPhys\t0\n");
    fprintf(pfile,"SofteningHaloMaxPhys\t1.0\n");
    fprintf(pfile,"SofteningDiskMaxPhys\t0.4\n");
    fprintf(pfile,"SofteningBulgeMaxPhys\t0\n");
    fprintf(pfile,"SofteningStarsMaxPhys\t1.0e-2\n");
    fprintf(pfile,"SofteningBndryMaxPhys\t0\n");
    fprintf(pfile,"MaxRMSDisplacementFac\t0.2\n");
    fprintf(pfile,"TimeLimitCPU\t36000\n");
    fprintf(pfile,"ResubmitOn\t0\n");
    fprintf(pfile,"ResubmitCommand\tmy-scriptfile\n");
    fprintf(pfile,"ComovingIntegrationOn\t0\n");
    fclose(pfile);
    return 0;
#else
    printf("ATTEMTPING TO RUN GAL2GAD2 ROUTINE WITHOUT HDF5 SUPPORT!\n");
    printf("EXITING!\n");
    exit(0);
    return 1;
#endif
}
Пример #17
0
int
main (void)
{
    hid_t       file, space, dtype, dset, attr;     /* Handles */
    herr_t      status;
    hsize_t     dims[1] = {DIM0};
    size_t      len;
    char        wdata[DIM0*LEN],                    /* Write buffer */
                *rdata,                             /* Read buffer */
                str[LEN] = "OPAQUE",
                *tag;
    int         ndims,
                i, j;

    /*
     * Initialize data.
     */
    for (i=0; i<DIM0; i++) {
        for (j=0; j<LEN-1; j++)
            wdata[j + i * LEN] = str[j];
        wdata[LEN - 1 + i * LEN] = (char) i + '0';
    }

    /*
     * Create a new file using the default properties.
     */
    file = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);

    /*
     * Create dataset with a null dataspace.
     */
    space = H5Screate (H5S_NULL);
    dset = H5Dcreate (file, DATASET, H5T_STD_I32LE, space, H5P_DEFAULT,
                H5P_DEFAULT, H5P_DEFAULT);
    status = H5Sclose (space);

    /*
     * Create opaque datatype and set the tag to something appropriate.
     * For this example we will write and view the data as a character
     * array.
     */
    dtype = H5Tcreate (H5T_OPAQUE, LEN);
    status = H5Tset_tag (dtype, "Character array");

    /*
     * Create dataspace.  Setting maximum size to NULL sets the maximum
     * size to be the current size.
     */
    space = H5Screate_simple (1, dims, NULL);

    /*
     * Create the attribute and write the opaque data to it.
     */
    attr = H5Acreate (dset, ATTRIBUTE, dtype, space, H5P_DEFAULT, H5P_DEFAULT);
    status = H5Awrite (attr, dtype, wdata);

    /*
     * Close and release resources.
     */
    status = H5Aclose (attr);
    status = H5Dclose (dset);
    status = H5Sclose (space);
    status = H5Tclose (dtype);
    status = H5Fclose (file);


    /*
     * Now we begin the read section of this example.  Here we assume
     * the attribute has the same name and rank, but can have any size.
     * Therefore we must allocate a new array to read in data using
     * malloc().
     */

    /*
     * Open file, dataset, and attribute.
     */
    file = H5Fopen (FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
    dset = H5Dopen (file, DATASET, H5P_DEFAULT);
    attr = H5Aopen (dset, ATTRIBUTE, H5P_DEFAULT);

    /*
     * Get datatype and properties for the datatype.  Note that H5Tget_tag
     * allocates space for the string in tag, so we must remember to free() it
     * later.
     */
    dtype = H5Aget_type (attr);
    len = H5Tget_size (dtype);
    tag = H5Tget_tag (dtype);

    /*
     * Get dataspace and allocate memory for read buffer.
     */
    space = H5Aget_space (attr);
    ndims = H5Sget_simple_extent_dims (space, dims, NULL);
    rdata = (char *) malloc (dims[0] * len);

    /*
     * Read the data.
     */
    status = H5Aread (attr, dtype, rdata);

    /*
     * Output the data to the screen.
     */
    printf ("Datatype tag for %s is: \"%s\"\n", ATTRIBUTE, tag);
    for (i=0; i<dims[0]; i++) {
        printf ("%s[%u]: ",ATTRIBUTE,i);
        for (j=0; j<len; j++)
            printf ("%c", rdata[j + i * len]);
        printf ("\n");
    }

    /*
     * Close and release resources.
     */
    free (rdata);
    free (tag);
    status = H5Aclose (attr);
    status = H5Dclose (dset);
    status = H5Sclose (space);
    status = H5Tclose (dtype);
    status = H5Fclose (file);

    return 0;
}