void CryptTask::calculateMd5( const QString& path,char * result ) { emit titleUpdate( tr( "Calculating md5sum" ) ) ; emit disableCancel() ; zuluCryptMD5_CTX ctx ; zuluCryptMD5_Init( &ctx ) ; QByteArray p = path.toLatin1() ; int fd = open( p.constData(),O_RDONLY ) ; if( fd != -1 ){ struct stat st ; fstat( fd,&st ) ; void * map = mmap( 0,st.st_size,PROT_READ,MAP_PRIVATE,fd,0 ) ; if( map != MAP_FAILED ){ zuluCryptMD5_Update( &ctx,map,st.st_size ) ; munmap( map,st.st_size ) ; char digest[ 32 ] ; zuluCryptMD5_Final( ( unsigned char * )digest,&ctx ) ; for( int i = 0 ; i < 16 ; i++ ) { snprintf( &(result[i*2] ),32,"%02x",( unsigned int )digest[i] ) ; } } close( fd ) ; } emit enableCancel() ; }
void medAbstractDatabaseImporter::importFile ( void ) { QMutexLocker locker ( &d->mutex ); /* The idea of this algorithm can be summarized in 3 steps: * 1. Get a list of all the files that will (try to) be imported or indexed * 2. Filter files that cannot be read, or won't be possible to write afterwards, or are already in the db * 3. Fill files metadata, write them to the db, and populate db tables * * note that depending on the input files, they might be aggregated by volume */ // 1) Obtain a list of all the files that are going to be processed // this flattens the tree structure (if the input is a directory) // and puts all the files in one single list QStringList fileList = getAllFilesToBeProcessed ( d->file ); // Files that pass the filters named above are grouped // by volume in this map and will be written in the db after. // the key will be the name of the aggregated file with the volume QMap<QString, QStringList> imagesGroupedByVolume; QMap<QString, QString> imagesGroupedByPatient; QMap<QString, QString> imagesGroupedBySeriesId; int currentFileNumber = 0; // this variable will be used only for calculating progress // if importing, and depending on the input files, they might be aggregated // that is: files corresponding to the same volume will be written // in a single output meta file (e.g. .mha) // this map is used to store a unique id per volume and its volume number QMap<QString, int> volumeUniqueIdToVolumeNumber; int volumeNumber = 1; // 2) Select (by filtering) files to be imported // // In this first loop we read the headers of all the images to be imported // and check if we don't have any problem in reading the file, the header // or in selecting a proper format to store the new file afterwards // new files ARE NOT written in medInria database yet, but are stored in a map for writing in a posterior step QString tmpPatientId; QString currentPatientId = ""; QString patientID; QString tmpSeriesUid; QString currentSeriesUid = "-1"; QString currentSeriesId = ""; bool atLeastOneImportSucceeded = false; foreach ( QString file, fileList ) { if ( d->isCancelled ) // check if user cancelled the process break; emit progress ( this, ( ( qreal ) currentFileNumber/ ( qreal ) fileList.count() ) * 50.0 ); //TODO: reading and filtering represents 50% of the importing process? currentFileNumber++; QFileInfo fileInfo ( file ); if (fileInfo.size() != 0) { dtkSmartPointer<medAbstractData> medData; // 2.1) Try reading file information, just the header not the whole file bool readOnlyImageInformation = true; medData = tryReadImages ( QStringList ( fileInfo.filePath() ), readOnlyImageInformation ); if ( !medData ) { qWarning() << "Reader was unable to read: " << fileInfo.filePath(); continue; } // 2.2) Fill missing metadata populateMissingMetadata ( medData, medMetaDataKeys::SeriesID.getFirstValue(medData)); QString patientName = medMetaDataKeys::PatientName.getFirstValue(medData).simplified(); QString birthDate = medMetaDataKeys::BirthDate.getFirstValue(medData); tmpPatientId = patientName + birthDate; if(tmpPatientId != currentPatientId) { currentPatientId = tmpPatientId; patientID = getPatientID(patientName, birthDate); } medData->setMetaData ( medMetaDataKeys::PatientID.key(), QStringList() << patientID ); tmpSeriesUid = medMetaDataKeys::SeriesDicomID.getFirstValue(medData); if (tmpSeriesUid != currentSeriesUid) { currentSeriesUid = tmpSeriesUid; currentSeriesId = medMetaDataKeys::SeriesID.getFirstValue(medData); } else medData->setMetaData ( medMetaDataKeys::SeriesID.key(), QStringList() << currentSeriesId ); // 2.3) Generate an unique id for each volume // all images of the same volume should share the same id QString volumeId = generateUniqueVolumeId ( medData ); // check whether the image belongs to a new volume if ( !volumeUniqueIdToVolumeNumber.contains ( volumeId ) ) { volumeUniqueIdToVolumeNumber[volumeId] = volumeNumber; volumeNumber++; } // 2.3) a) Determine future file name and path based on patient/study/series/image // i.e.: where we will write the imported image QString imageFileName = determineFutureImageFileName ( medData, volumeUniqueIdToVolumeNumber[volumeId] ); #ifdef Q_OS_WIN32 if ( (medStorage::dataLocation() + "/" + imageFileName).length() > 255 ) { emit showError ( tr ( "Your database path is too long" ), 5000 ); emit dataImported(medDataIndex(), d->uuid); emit failure ( this ); return; } #endif // 2.3) b) Find the proper extension according to the type of the data // i.e.: in which format we will write the file in our database QString futureExtension = determineFutureImageExtensionByDataType ( medData ); // we care whether we can write the image or not if we are importing if (!d->indexWithoutImporting && futureExtension.isEmpty()) { emit showError(tr("Could not save file due to unhandled data type: ") + medData->identifier(), 5000); continue; } imageFileName = imageFileName + futureExtension; // 2.3) c) Add the image to a map for writing them all in medInria's database in a posterior step // First check if patient/study/series/image path already exists in the database // Should we emit a message otherwise ??? TO if ( !checkIfExists ( medData, fileInfo.fileName() ) ) { imagesGroupedByVolume[imageFileName] << fileInfo.filePath(); imagesGroupedByPatient[imageFileName] = patientID; imagesGroupedBySeriesId[imageFileName] = currentSeriesId; } } else { QString error = QString(tr("Could not read empty file: ") + fileInfo.completeBaseName()); qWarning() << __FUNCTION__ << error; emit showError(error, 5000); } } // some checks to see if the user cancelled or something failed if ( d->isCancelled ) { emit showError (tr ( "User cancelled import process" ), 5000 ); emit dataImported(medDataIndex(), d->uuid); emit cancelled ( this ); return; } // from now on the process cannot be cancelled emit disableCancel ( this ); // 3) Re-read selected files and re-populate them with missing metadata // then write them to medInria db and populate db tables QMap<QString, QStringList>::const_iterator it = imagesGroupedByVolume.begin(); QMap<QString, QString>::const_iterator itPat = imagesGroupedByPatient.begin(); QMap<QString, QString>::const_iterator itSer = imagesGroupedBySeriesId.begin(); // 3.1) first check is after the filtering we have something to import // maybe we had problems with all the files, or they were already in the database if ( it == imagesGroupedByVolume.end() ) { // TODO we know if it's either one or the other error, we can make this error better... emit showError (tr ( "No compatible image found or all of them had been already imported." ), 5000 ); emit dataImported(medDataIndex(), d->uuid); emit failure ( this ); return; } else qDebug() << "Chosen directory contains " << imagesGroupedByVolume.size() << " files"; int imagesCount = imagesGroupedByVolume.count(); // used only to calculate progress int currentImageIndex = 0; // used only to calculate progress medDataIndex index; //stores the last volume's index to be emitted on success // final loop: re-read, re-populate and write to db for ( ; it != imagesGroupedByVolume.end(); it++ ) { emit progress ( this, ( ( qreal ) currentImageIndex/ ( qreal ) imagesCount ) * 50.0 + 50.0 ); // 50? I do not think that reading all the headers is half the job... currentImageIndex++; QString aggregatedFileName = it.key(); // note that this file might be aggregating more than one input files QStringList filesPaths = it.value(); // input files being aggregated, might be only one or many patientID = itPat.value(); QString seriesID = itSer.value(); //qDebug() << currentImageIndex << ": " << aggregatedFileName << "with " << filesPaths.size() << " files"; dtkSmartPointer<medAbstractData> imagemedData; QFileInfo imagefileInfo ( filesPaths[0] ); // 3.2) Try to read the whole image, not just the header bool readOnlyImageInformation = false; imagemedData = tryReadImages ( filesPaths, readOnlyImageInformation ); if ( imagemedData ) { // 3.3) a) re-populate missing metadata // if there is no SeriesDescription, we use the tag Series Instance UID (specific and required) populateMissingMetadata ( imagemedData, medMetaDataKeys::SeriesDicomID.getFirstValue(imagemedData)); imagemedData->setMetaData ( medMetaDataKeys::PatientID.key(), QStringList() << patientID ); imagemedData->setMetaData ( medMetaDataKeys::SeriesID.key(), QStringList() << seriesID ); // 3.3) b) now we are able to add some more metadata addAdditionalMetaData ( imagemedData, aggregatedFileName, filesPaths ); } else { qWarning() << "Could not repopulate data!"; emit showError (tr ( "Could not read data: " ) + filesPaths[0], 5000 ); emit dataImported(medDataIndex(), d->uuid); emit failure(this); return; } if ( !d->indexWithoutImporting ) { // create location to store file QFileInfo fileInfo ( medStorage::dataLocation() + aggregatedFileName ); if ( !fileInfo.dir().exists() && !medStorage::mkpath ( fileInfo.dir().path() ) ) { qDebug() << "Cannot create directory: " << fileInfo.dir().path(); continue; } // now writing file bool writeSuccess = tryWriteImage ( fileInfo.filePath(), imagemedData ); if ( !writeSuccess ) { emit showError (tr ( "Could not save data file: " ) + filesPaths[0], 5000 ); continue; } } atLeastOneImportSucceeded = true; // and finally we populate the database QFileInfo aggregatedFileNameFileInfo ( aggregatedFileName ); QString pathToStoreThumbnails = aggregatedFileNameFileInfo.dir().path() + "/" + aggregatedFileNameFileInfo.completeBaseName() + "/"; index = this->populateDatabaseAndGenerateThumbnails ( imagemedData, pathToStoreThumbnails ); if(!d->uuid.isNull()) { emit dataImported(index, d->uuid); } else { emit dataImported(index); } itPat++; itSer++; } // end of the final loop if ( ! atLeastOneImportSucceeded) { emit progress ( this,100 ); emit dataImported(medDataIndex(), d->uuid); emit failure(this); return; } d->index = index; emit progress ( this,100 ); emit success ( this ); }