void medAbstractDatabaseImporter::importFile ( void ) { QMutexLocker locker ( &d->mutex ); /* The idea of this algorithm can be summarized in 3 steps: * 1. Get a list of all the files that will (try to) be imported or indexed * 2. Filter files that cannot be read, or won't be possible to write afterwards, or are already in the db * 3. Fill files metadata, write them to the db, and populate db tables * * note that depending on the input files, they might be aggregated by volume */ // 1) Obtain a list of all the files that are going to be processed // this flattens the tree structure (if the input is a directory) // and puts all the files in one single list QStringList fileList = getAllFilesToBeProcessed ( d->file ); // Files that pass the filters named above are grouped // by volume in this map and will be written in the db after. // the key will be the name of the aggregated file with the volume QMap<QString, QStringList> imagesGroupedByVolume; QMap<QString, QString> imagesGroupedByPatient; QMap<QString, QString> imagesGroupedBySeriesId; int currentFileNumber = 0; // this variable will be used only for calculating progress // if importing, and depending on the input files, they might be aggregated // that is: files corresponding to the same volume will be written // in a single output meta file (e.g. .mha) // this map is used to store a unique id per volume and its volume number QMap<QString, int> volumeUniqueIdToVolumeNumber; int volumeNumber = 1; // 2) Select (by filtering) files to be imported // // In this first loop we read the headers of all the images to be imported // and check if we don't have any problem in reading the file, the header // or in selecting a proper format to store the new file afterwards // new files ARE NOT written in medInria database yet, but are stored in a map for writing in a posterior step QString tmpPatientId; QString currentPatientId = ""; QString patientID; QString tmpSeriesUid; QString currentSeriesUid = "-1"; QString currentSeriesId = ""; bool atLeastOneImportSucceeded = false; foreach ( QString file, fileList ) { if ( d->isCancelled ) // check if user cancelled the process break; emit progress ( this, ( ( qreal ) currentFileNumber/ ( qreal ) fileList.count() ) * 50.0 ); //TODO: reading and filtering represents 50% of the importing process? currentFileNumber++; QFileInfo fileInfo ( file ); if (fileInfo.size() != 0) { dtkSmartPointer<medAbstractData> medData; // 2.1) Try reading file information, just the header not the whole file bool readOnlyImageInformation = true; medData = tryReadImages ( QStringList ( fileInfo.filePath() ), readOnlyImageInformation ); if ( !medData ) { qWarning() << "Reader was unable to read: " << fileInfo.filePath(); continue; } // 2.2) Fill missing metadata populateMissingMetadata ( medData, medMetaDataKeys::SeriesID.getFirstValue(medData)); QString patientName = medMetaDataKeys::PatientName.getFirstValue(medData).simplified(); QString birthDate = medMetaDataKeys::BirthDate.getFirstValue(medData); tmpPatientId = patientName + birthDate; if(tmpPatientId != currentPatientId) { currentPatientId = tmpPatientId; patientID = getPatientID(patientName, birthDate); } medData->setMetaData ( medMetaDataKeys::PatientID.key(), QStringList() << patientID ); tmpSeriesUid = medMetaDataKeys::SeriesDicomID.getFirstValue(medData); if (tmpSeriesUid != currentSeriesUid) { currentSeriesUid = tmpSeriesUid; currentSeriesId = medMetaDataKeys::SeriesID.getFirstValue(medData); } else medData->setMetaData ( medMetaDataKeys::SeriesID.key(), QStringList() << currentSeriesId ); // 2.3) Generate an unique id for each volume // all images of the same volume should share the same id QString volumeId = generateUniqueVolumeId ( medData ); // check whether the image belongs to a new volume if ( !volumeUniqueIdToVolumeNumber.contains ( volumeId ) ) { volumeUniqueIdToVolumeNumber[volumeId] = volumeNumber; volumeNumber++; } // 2.3) a) Determine future file name and path based on patient/study/series/image // i.e.: where we will write the imported image QString imageFileName = determineFutureImageFileName ( medData, volumeUniqueIdToVolumeNumber[volumeId] ); #ifdef Q_OS_WIN32 if ( (medStorage::dataLocation() + "/" + imageFileName).length() > 255 ) { emit showError ( tr ( "Your database path is too long" ), 5000 ); emit dataImported(medDataIndex(), d->uuid); emit failure ( this ); return; } #endif // 2.3) b) Find the proper extension according to the type of the data // i.e.: in which format we will write the file in our database QString futureExtension = determineFutureImageExtensionByDataType ( medData ); // we care whether we can write the image or not if we are importing if (!d->indexWithoutImporting && futureExtension.isEmpty()) { emit showError(tr("Could not save file due to unhandled data type: ") + medData->identifier(), 5000); continue; } imageFileName = imageFileName + futureExtension; // 2.3) c) Add the image to a map for writing them all in medInria's database in a posterior step // First check if patient/study/series/image path already exists in the database // Should we emit a message otherwise ??? TO if ( !checkIfExists ( medData, fileInfo.fileName() ) ) { imagesGroupedByVolume[imageFileName] << fileInfo.filePath(); imagesGroupedByPatient[imageFileName] = patientID; imagesGroupedBySeriesId[imageFileName] = currentSeriesId; } } else { QString error = QString(tr("Could not read empty file: ") + fileInfo.completeBaseName()); qWarning() << __FUNCTION__ << error; emit showError(error, 5000); } } // some checks to see if the user cancelled or something failed if ( d->isCancelled ) { emit showError (tr ( "User cancelled import process" ), 5000 ); emit dataImported(medDataIndex(), d->uuid); emit cancelled ( this ); return; } // from now on the process cannot be cancelled emit disableCancel ( this ); // 3) Re-read selected files and re-populate them with missing metadata // then write them to medInria db and populate db tables QMap<QString, QStringList>::const_iterator it = imagesGroupedByVolume.begin(); QMap<QString, QString>::const_iterator itPat = imagesGroupedByPatient.begin(); QMap<QString, QString>::const_iterator itSer = imagesGroupedBySeriesId.begin(); // 3.1) first check is after the filtering we have something to import // maybe we had problems with all the files, or they were already in the database if ( it == imagesGroupedByVolume.end() ) { // TODO we know if it's either one or the other error, we can make this error better... emit showError (tr ( "No compatible image found or all of them had been already imported." ), 5000 ); emit dataImported(medDataIndex(), d->uuid); emit failure ( this ); return; } else qDebug() << "Chosen directory contains " << imagesGroupedByVolume.size() << " files"; int imagesCount = imagesGroupedByVolume.count(); // used only to calculate progress int currentImageIndex = 0; // used only to calculate progress medDataIndex index; //stores the last volume's index to be emitted on success // final loop: re-read, re-populate and write to db for ( ; it != imagesGroupedByVolume.end(); it++ ) { emit progress ( this, ( ( qreal ) currentImageIndex/ ( qreal ) imagesCount ) * 50.0 + 50.0 ); // 50? I do not think that reading all the headers is half the job... currentImageIndex++; QString aggregatedFileName = it.key(); // note that this file might be aggregating more than one input files QStringList filesPaths = it.value(); // input files being aggregated, might be only one or many patientID = itPat.value(); QString seriesID = itSer.value(); //qDebug() << currentImageIndex << ": " << aggregatedFileName << "with " << filesPaths.size() << " files"; dtkSmartPointer<medAbstractData> imagemedData; QFileInfo imagefileInfo ( filesPaths[0] ); // 3.2) Try to read the whole image, not just the header bool readOnlyImageInformation = false; imagemedData = tryReadImages ( filesPaths, readOnlyImageInformation ); if ( imagemedData ) { // 3.3) a) re-populate missing metadata // if there is no SeriesDescription, we use the tag Series Instance UID (specific and required) populateMissingMetadata ( imagemedData, medMetaDataKeys::SeriesDicomID.getFirstValue(imagemedData)); imagemedData->setMetaData ( medMetaDataKeys::PatientID.key(), QStringList() << patientID ); imagemedData->setMetaData ( medMetaDataKeys::SeriesID.key(), QStringList() << seriesID ); // 3.3) b) now we are able to add some more metadata addAdditionalMetaData ( imagemedData, aggregatedFileName, filesPaths ); } else { qWarning() << "Could not repopulate data!"; emit showError (tr ( "Could not read data: " ) + filesPaths[0], 5000 ); emit dataImported(medDataIndex(), d->uuid); emit failure(this); return; } if ( !d->indexWithoutImporting ) { // create location to store file QFileInfo fileInfo ( medStorage::dataLocation() + aggregatedFileName ); if ( !fileInfo.dir().exists() && !medStorage::mkpath ( fileInfo.dir().path() ) ) { qDebug() << "Cannot create directory: " << fileInfo.dir().path(); continue; } // now writing file bool writeSuccess = tryWriteImage ( fileInfo.filePath(), imagemedData ); if ( !writeSuccess ) { emit showError (tr ( "Could not save data file: " ) + filesPaths[0], 5000 ); continue; } } atLeastOneImportSucceeded = true; // and finally we populate the database QFileInfo aggregatedFileNameFileInfo ( aggregatedFileName ); QString pathToStoreThumbnails = aggregatedFileNameFileInfo.dir().path() + "/" + aggregatedFileNameFileInfo.completeBaseName() + "/"; index = this->populateDatabaseAndGenerateThumbnails ( imagemedData, pathToStoreThumbnails ); if(!d->uuid.isNull()) { emit dataImported(index, d->uuid); } else { emit dataImported(index); } itPat++; itSer++; } // end of the final loop if ( ! atLeastOneImportSucceeded) { emit progress ( this,100 ); emit dataImported(medDataIndex(), d->uuid); emit failure(this); return; } d->index = index; emit progress ( this,100 ); emit success ( this ); }
void medAbstractDatabaseImporter::importData() { QMutexLocker locker ( &d->mutex ); if ( !d->data ) { emit failure ( this ); emit dataImported(medDataIndex(), d->uuid); return; } populateMissingMetadata(d->data, "EmptySeries"); if ( !d->data->hasMetaData ( medMetaDataKeys::FilePaths.key() ) ) d->data->addMetaData ( medMetaDataKeys::FilePaths.key(), QStringList() << "data created internally" ); // Information about the app and version of the application QString attachedInfoApp = QString("generated with " + QString(PROJECT_NAME) + " " + QString(MEDINRIA_VERSION)); d->data->setMetaData(medMetaDataKeys::Description.key(), attachedInfoApp); QString size =""; if ( medAbstractImageData *imagedata = dynamic_cast<medAbstractImageData*> ( d->data.data()) ) size = QString::number ( imagedata->zDimension() ); d->data->setMetaData ( medMetaDataKeys::Size.key(), size ); QString patientName = medMetaDataKeys::PatientName.getFirstValue(d->data).simplified(); QString birthDate = medMetaDataKeys::BirthDate.getFirstValue(d->data); QString seriesId = medMetaDataKeys::SeriesID.getFirstValue(d->data); QString patientId = getPatientID(patientName, birthDate); d->data->setMetaData ( medMetaDataKeys::PatientID.key(), QStringList() << patientId ); bool writeSuccess = true; QString thumb_dir; if ( !d->indexWithoutImporting ) { QString subDirName = "/" + patientId; QString imageFileNameBase = subDirName + "/" + seriesId; QDir dir ( medStorage::dataLocation() + subDirName ); if ( !dir.exists() ) { if ( !medStorage::mkpath ( medStorage::dataLocation() + subDirName ) ) { qWarning() << "Unable to create directory for images"; emit failure ( this ); emit dataImported(medDataIndex(), d->uuid); return ; } } QString extension = determineFutureImageExtensionByDataType ( d->data ); QString imageFileName = imageFileNameBase + extension; // writing file writeSuccess = tryWriteImage ( medStorage::dataLocation()+imageFileName, d->data ); if ( !writeSuccess ) { // when creating empty patients or studies, we need to continue to populate the database qWarning() << "Unable to write image " + imageFileName; qWarning() << "Either there is nothing to write or a problem occured when writing."; } else { d->data->setMetaData ( "FileName", imageFileName ); } QFileInfo seriesInfo ( imageFileName ); thumb_dir = seriesInfo.dir().path() + "/" + seriesInfo.completeBaseName() + "/"; } // Now, populate the database medDataIndex index = this->populateDatabaseAndGenerateThumbnails ( d->data, thumb_dir ); emit progress(this, 100); emit success(this); if (d->uuid == "") emit dataImported(index); else emit dataImported(index,d->uuid); }
int PET::write( int options ) { int sl, fr; ofstream outfile; char outbuff[100]; string patid( getPatientID() ); string outname( getPatientID() ); string patname( _prefix ); cout << "PET::write(): " << _numFrame << " frames of " << _numSlice << " slices." << endl; if ( options & OPT_VOLU ) { // Using one output file. // string outname( _prefix + "." ); // string outname( + "." ); outname.append( "." ); outname.append( (options & OPT_ANAL) ? "img" : "raw"); outfile.open( outname.c_str(), ios::binary ); if ( !outfile.is_open() ) error( "Cannot open output file", outname ); // Write the Analyze header if required. if ( options & OPT_ANAL ) analyze->writePET( this, outname, options, 0 ); } // Run through each frame and write to disk. std::transform(patname.begin(), patname.end(), patname.begin(), (int(*)(int)) toupper); for ( fr = 0; fr < _numFrame; fr++ ) { if ( !( options & OPT_VOLU )) { // New output file per frame if not writing to one file. // Changed this 4/2/07 ahc. Can't remember why I was using patid // rather than abbreviated name for Analyze files. // sprintf( outbuff, "%s_%02d.%s", // patid.c_str(), fr, (options & OPT_ANAL) ? "img" : "raw" ); sprintf( outbuff, "%s_%02d.%s", patname.c_str(), fr, (options & OPT_ANAL) ? "img" : "raw" ); outfile.open( outbuff, ios::binary ); if ( !outfile.is_open() ) error( "Cannot open output file", outbuff ); // Write the Analyze header if required. if ( options & OPT_ANAL ) analyze->writePET( this, outbuff, options, fr ); } // Call virtual fn. for specific PET slices. for ( int sl = 0; sl < _numSlice; sl++ ) { writeSliceData( fr, sl, outfile, options ); } if ( !( options & OPT_VOLU )) outfile.close(); cout << ((fr + 1) % 10) << flush; } cout << endl; if ( outfile.is_open() ) outfile.close(); return 0; }