Exemple #1
0
void NSDFWriter::flush()
{
    // We need to update the tend on each write since we do not know
    // when the simulation is getting over and when it is just paused.
    writeScalarAttr<string>(filehandle_, "tend", iso_time(NULL));    
    
    // append all uniform data
    for (map< string, hid_t>::iterator it = classFieldToUniform_.begin();
         it != classFieldToUniform_.end(); ++it){
        map< string, vector < unsigned int > >::iterator idxit = classFieldToSrcIndex_.find(it->first);
        if (idxit == classFieldToSrcIndex_.end()){
            cerr << "Error: NSDFWriter::flush - could not find entry for " << it->first <<endl;
            break;
        }
        if (data_.size() == 0 || data_[0].size() == 0){
            break;
        }        
        double * buffer = (double*)calloc(idxit->second.size() * steps_, sizeof(double));
        vector< double > values;
        for (unsigned int ii = 0; ii < idxit->second.size(); ++ii){
            for (unsigned int jj = 0; jj < steps_; ++jj){
                buffer[ii * steps_ + jj] = data_[idxit->second[ii]][jj];
            }
            data_[idxit->second[ii]].clear();
        }
        
        hid_t filespace = H5Dget_space(it->second);
        if (filespace < 0){
            break;
        }
        hsize_t dims[2];
        hsize_t maxdims[2];
        // retrieve current datset dimensions
        herr_t status = H5Sget_simple_extent_dims(filespace, dims, maxdims);        
        hsize_t newdims[] = {dims[0], dims[1] + steps_}; // new column count
        status = H5Dset_extent(it->second, newdims); // extend dataset to new column count
        H5Sclose(filespace);
        filespace = H5Dget_space(it->second); // get the updated filespace 
        hsize_t start[2] = {0, dims[1]};
        dims[1] = steps_; // change dims for memspace & hyperslab
        hid_t memspace = H5Screate_simple(2, dims, NULL);
        H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, dims, NULL);
        status = H5Dwrite(it->second, H5T_NATIVE_DOUBLE,  memspace, filespace, H5P_DEFAULT, buffer);
        H5Sclose(memspace);
        H5Sclose(filespace);
        free(buffer);
    }
    
    // append all event data
    for (unsigned int ii = 0; ii < eventSrc_.size(); ++ii){
        appendToDataset(getEventDataset(eventSrc_[ii], eventSrcFields_[ii]),
                        events_[ii]);
        events_[ii].clear();
    }
    // flush HDF5 nodes.
    HDF5DataWriter::flush();
}
Exemple #2
0
void HDF5DataWriter::flush()
{
    if (filehandle_ < 0){
        cerr << "HDF5DataWriter::flush() - "
                "Filehandle invalid. Cannot write data." << endl;
        return;
    }

    for (unsigned int ii = 0; ii < datasets_.size(); ++ii){
        herr_t status = appendToDataset(datasets_[ii], data_[ii]);
        data_[ii].clear();
        if (status < 0){
            cerr << "Warning: appending data for object " << src_[ii]
                 << " returned status " << status << endl;
        }
    }
    HDF5WriterBase::flush();
    H5Fflush(filehandle_, H5F_SCOPE_LOCAL);
}
Exemple #3
0
void HDF5DataWriter::flush()
{
    if (filehandle_ < 0){
        cerr << "HDF5DataWriter::flush() - Filehandle invalid. Cannot write data." << endl;
        return;
    }
    for (map < string, vector < double > >::iterator ii = datamap_.begin(); ii != datamap_.end(); ++ ii){
        hid_t dataset = nodemap_[ii->first];
        if (dataset < 0){
            dataset = get_dataset(ii->first);
            nodemap_[ii->first] = dataset;
        }
        herr_t status = appendToDataset(dataset, ii->second);
        if (status < 0){
            cerr << "Warning: appending data for object " << ii->first << " returned status " << status << endl;                
        }
        ii->second.clear();        
    }
    H5Fflush(filehandle_, H5F_SCOPE_LOCAL);
}
Exemple #4
0
/**
   Write data to datasets in HDF5 file. Clear all data in the table
   objects associated with this object. */
void HDF5DataWriter::process(const Eref & e, ProcPtr p)
{
    if (filehandle_ < 0){
        return;
    }
    // cout << "HDF5DataWriter::process: currentTime=" << p->currTime << endl;
    requestOut()->send(e, recvDataBuf()->getFid());
    for (map<string, vector < double > >:: iterator data_it = datamap_.begin(); data_it != datamap_.end(); ++data_it){        
        string path = data_it->first;
        // if (data_it->second.size() >= flushLimit_){
        map < string, hid_t >::iterator node_it = nodemap_.find(path);
        assert (node_it != nodemap_.end());
        if (node_it->second < 0){
            nodemap_[path] = get_dataset(path);
        }
        herr_t status = appendToDataset(nodemap_[path], data_it->second);
        if (status < 0){
            cerr << "Warning: appending data for object " << data_it->first << " returned status " << status << endl;                
        }
        data_it->second.clear();
    }    
}
Exemple #5
0
/**
   Write data to datasets in HDF5 file. Clear all data in the table
   objects associated with this object. */
void HDF5DataWriter::process(const Eref & e, ProcPtr p)
{
    if (filehandle_ < 0){
        return;
    }

    vector <double> dataBuf;
        requestOut()->send(e, &dataBuf);
    for (unsigned int ii = 0; ii < dataBuf.size(); ++ii){
        data_[ii].push_back(dataBuf[ii]);
    }
    ++steps_;
    if (steps_ >= flushLimit_){
        steps_ = 0;
        for (unsigned int ii = 0; ii < datasets_.size(); ++ii){
            herr_t status = appendToDataset(datasets_[ii], data_[ii]);
            data_[ii].clear();
            if (status < 0){
                cerr << "Warning: appending data for object " << src_[ii]
                     << " returned status " << status << endl;
            }
        }
    }
}