void KVDataSetManager::Print(Option_t * opt) const { //Print list of datasets //If opt="" (default) all datasets are shown with full information //if opt="available" only available datasets are shown, each with a number which can //be used with GetAvailableDataSet(Int_t) in order to retrieve the corresponding dataset. TString Sopt(opt); Sopt.ToUpper(); if (Sopt.BeginsWith("AVAIL")) { if (!fNavailable) { cout << " *** No available datasets ***" << endl; return; } else { for (int i = 1; i <= fNavailable; i++) { KVDataSet *ds = GetAvailableDataSet(i); cout << "\t" << i << ". " << ds->GetTitle() << endl; } } return; } if (fDataSets.GetSize()) { TIter next(&fDataSets); KVDataSet *ds; while ((ds = (KVDataSet *) next())) ds->ls(); } }
//_____________________________________ void KVINDRAReconIdent::EndRun(void) { //At the end of each run we: // write the tree into the new file // close the file // copy the file into the required repository (see InitRun) // update the available runlist fIdentFile->cd(); gDataAnalyser->WriteBatchInfo(fIdentTree); GetRawData()->CloneTree(-1,"fast"); //copy raw data tree to file GetGeneData()->CloneTree(-1,"fast"); //copy pulser & laser (gene) tree to file fIdentFile->Write(); //add file to repository // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet( gDataSet->GetDataSetEnv("ReconIdent.DataAnalysisTask.OutputRepository", gDataRepository->GetName()), gDataSet->GetName() ); OutputDataset->CommitRunfile("ident", gIndra->GetCurrentRunNumber(), fIdentFile); fIdentFile = 0; fIdentTree = 0; }
//_____________________________________ void KVINDRAReconRoot::InitRun(void) { //When each run is opened, we create a new ROOT file for the identified events we //are going to generate from the reconstructed events we are reading. // By default this file will be written in the same data repository as the recon data file we are reading. // This can be changed by setting the environment variable(s): // // ReconRoot.DataAnalysisTask.OutputRepository: [name of repository] // [name of dataset].ReconRoot.DataAnalysisTask.OutputRepository: [name of repository] // // If no value is set for the current dataset (second variable), the value of the // first variable will be used. If neither is defined, the new file will be written in the same repository as // the recon file (if possible, i.e. if repository is not remote). // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet( gDataSet->GetDataSetEnv("ReconRoot.DataAnalysisTask.OutputRepository", gDataRepository->GetName()), gDataSet->GetName() ); //create new ROOT file for identified events fRunNumber = gIndra->GetCurrentRunNumber(); fIdentFile = OutputDataset->NewRunfile("root", fRunNumber); fIdentTree = new TTree("ReconstructedEvents", Form("%s : %s : fully-identified & calibrated events created from recon data", gIndraDB->GetRun(fRunNumber)->GetName(), gIndraDB->GetRun(fRunNumber)->GetTitle()) ); #if ROOT_VERSION_CODE > ROOT_VERSION(5,25,4) #if ROOT_VERSION_CODE < ROOT_VERSION(5,26,1) // The TTree::OptimizeBaskets mechanism is disabled, as for ROOT versions < 5.26/00b // this lead to a memory leak fIdentTree->SetAutoFlush(0); #endif #endif //leaves for reconstructed events fIdentTree->Branch("INDRAReconEvent", "KVINDRAReconEvent", GetEventReference(), 10000000, 0)->SetAutoDelete(kFALSE); Info("InitRun", "Created identified/calibrated data tree %s : %s", fIdentTree->GetName(), fIdentTree->GetTitle()); // initialise identifications gIndra->InitializeIDTelescopes(); // print status of identifications gIndra->PrintStatusOfIDTelescopes(); // print status of calibrations gIndra->PrintCalibStatusOfDetectors(); for(register int i=0; i<15; i++) Acodes[i]=0; for(register int i=0; i<4; i++) Astatus[i]=0; }
//_____________________________________ void KVINDRAReconRoot::EndRun(void) { //At the end of each run we: // write the tree into the new file // close the file // copy the file into the required repository (see InitRun) // update the available runlist fIdentFile->cd(); gDataAnalyser->WriteBatchInfo(fIdentTree); GetRawData()->CloneTree(-1,"fast"); //copy raw data tree to file GetGeneData()->CloneTree(-1,"fast"); //copy pulser & laser (gene) tree to file fIdentFile->Write(); //add file to repository // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet( gDataSet->GetDataSetEnv("ReconRoot.DataAnalysisTask.OutputRepository", gDataRepository->GetName()), gDataSet->GetName() ); OutputDataset->CommitRunfile("root", gIndra->GetCurrentRunNumber(), fIdentFile); fIdentFile = 0; fIdentTree = 0; cout << endl << "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" << endl; cout << endl << " BILAN DES COURSES " << endl << endl;; int ntot=0; for(register int i=0; i<4; i++)ntot+=Astatus[i]; for(register int i=0; i<4; i++){ cout << " Status" <<i<< "\t" << Astatus[i] <<"\t"<<setprecision(2)<<100.*Astatus[i]/(1.*ntot)<<" %"<< endl; } cout << endl << " Total all status : " << ntot <<endl<<endl; ntot=0; for(register int i=0; i<15; i++)ntot+=Acodes[i]; for(register int i=0; i<15; i++){ cout << " Code" <<i<< "\t" << Acodes[i] <<"\t"<<setprecision(2)<<100.*Acodes[i]/(1.*ntot)<<" %"<< endl; } cout << endl << " Total all codes : " << ntot <<endl; cout << endl << "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" << endl; }
void KVINDRARawDataReconstructor::EndRun() { SafeDelete(recev); cout << endl << " *** Number of reconstructed INDRA events : " << nb_recon << " ***" << endl << endl; file->cd(); gDataAnalyser->WriteBatchInfo(tree); tree->Write();//write tree to file rawtree->Write(); genetree->Write(); // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet(gDataSet->GetOutputRepository(taskname), gDataSet->GetName()); //add new file to repository OutputDataset->CommitRunfile(datatype.Data(), fRunNumber, file); }
Bool_t KVDataSetManager::ReadDataSetList() { //Initialise list of all known datasets from informations in $KVROOT/KVFIles/.kvrootrc //(and user's .kvrootrc) KVString manip_list = gEnv->GetValue("DataSet", ""); fDataSets.Clear(); TObjArray *manips = manip_list.Tokenize(" "); TIter next(manips); TObjString* manip; while ( (manip = (TObjString*)next()) ) { KVDataSet *ds = NewDataSet(); ds->SetName(manip->GetString().Data()); ds->SetTitle( gEnv->GetValue( Form("%s.DataSet.Title", manip->GetString().Data()), "Experimental dataset" ) ); ds->SetDataPathSubdir( gEnv->GetValue( Form("%s.DataSet.RepositoryDir", manip->GetString().Data()), manip->GetString().Data() ) ); ds->SetUserGroups( gEnv->GetValue( Form("%s.DataSet.UserGroup", manip->GetString().Data()), "") ); ds->SetRepository(fRepository); fDataSets.Add(ds); } delete manips; return kTRUE; }
void KVFAZIARawDataReconstructor::InitRun() { // Creates new ROOT file with TTree for reconstructed events. // By default this file will be written in the same data repository as the raw data file we are reading. // This can be changed by setting the environment variable(s): // // Reconstruction.DataAnalysisTask.OutputRepository: [name of repository] // [name of dataset].Reconstruction.DataAnalysisTask.OutputRepository: [name of repository] // // If no value is set for the current dataset (second variable), the value of the // first variable will be used. If neither is defined, the new file will be written in the same repository as // the raw file (if possible, i.e. if repository is not remote). // Create new KVReconstructedEvent filled with KVFAZIAReconNuc object // used to reconstruct & store events if (!recev) recev = new KVReconstructedEvent(50, "KVFAZIAReconNuc"); // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet(gDataSet->GetOutputRepository(taskname), gDataSet->GetName()); file = OutputDataset->NewRunfile(datatype.Data(), GetCurrentRunNumber()); std::cout << "Writing \"" << datatype.Data() << "\" events in ROOT file " << file->GetName() << std::endl; //tree for reconstructed events tree = new TTree("ReconstructedEvents", Form("%s : %s : %s events created from raw data", gFaziaDB->GetRun(GetCurrentRunNumber())->GetName(), gFaziaDB->GetRun(GetCurrentRunNumber())->GetTitle(), datatype.Data()) ); //leaves for reconstructed events KVEvent::MakeEventBranch(tree, "FAZIAReconEvent", "KVReconstructedEvent", &recev); Info("InitRun", "Created reconstructed data tree %s : %s", tree->GetName(), tree->GetTitle()); nb_recon = 0; }
void KVFAZIARawDataReconstructor::EndRun() { SafeDelete(recev); std::cout << std::endl << " *** Number of reconstructed FAZIA events : " << nb_recon << " ***" << std::endl << std::endl; file->cd(); gDataAnalyser->WriteBatchInfo(tree); tree->Write();//write tree to file // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet(gDataSet->GetOutputRepository(taskname), gDataSet->GetName()); //add new file to repository OutputDataset->CommitRunfile(datatype.Data(), GetCurrentRunNumber(), file); ProcInfo_t pid; if (gSystem->GetProcInfo(&pid) == 0) { std::cout << " ------------- Process infos -------------" << std::endl; printf(" CpuSys = %f s. CpuUser = %f s. ResMem = %f MB VirtMem = %f MB\n", pid.fCpuSys, pid.fCpuUser, pid.fMemResident / 1024., pid.fMemVirtual / 1024.); } }
Bool_t KVDataSetManager::ReadAvailableDatasetsFile() { //Opens and reads file containing cached info on available datasets, and sets //the availability of the concerned datasets. //Returns kTRUE if all goes well. //Returns kFALSE if no cache exists or if file cannot be opened. if(OpenAvailableDatasetsFile()){ Info("ReadAvailableDataSetsFile", "Reading cached information in file %s", fCacheFileName.Data()); //read file TString line; line.ReadLine(fDatasets); while (fDatasets.good()) { TObjArray *toks = line.Tokenize(": ,"); //first entry is dataset name TString datasetname = ((TObjString *) toks->At(0))->String(); KVDataSet *dataset = GetDataSet(datasetname.Data()); if(dataset) { //check dataset is known to local version of KaliVeda //in case of remote repository, there may be datasets in the remote repository which are not defined here if (toks->GetEntries() > 1 && dataset->CheckUserCanAccess()) { //AVAILABLE DATASET dataset->SetAvailable(); fNavailable++; for (register int i = 1; i < toks->GetEntries(); i++) { //each following entry is a subdirectory name dataset->AddAvailableDataType(((TObjString *) toks->At(i))->String(). Data()); } } else { //UNAVAILABLE DATASET (no subdirs) dataset->SetAvailable(kFALSE); } } delete toks; line.ReadLine(fDatasets); } //close file fDatasets.close(); fDatasets.clear(); if (fNavailable) { TIter next(&fDataSets); //now set up array of available datasets' indices if (fIndex) delete[]fIndex; fIndex = new Int_t[fNavailable]; Int_t i, j; i = j = 0; KVDataSet *ds; while ((ds = (KVDataSet *) next())) { if (ds->IsAvailable()) { fIndex[i] = j; i++; } j++; } } //all is OK return kTRUE; } //we could not find/open the cache file return kFALSE; }
void KVDataSetManager::CheckAvailability() { //Check availability of datasets in repository associated to this data set manager // //If caching is activated for the parent repository, i.e. if // // [repository name].DataRepository.CacheAvailable: yes // //then instead of directly checking the existence of the directories for each dataset, //we use the cached information written in the file //KVBase::WorkingRepository()/[repository name].available.datasets //unless (1) it doesn't exist, or (2) the file is older than the maximum //cache time (in seconds) defined by // // [repository name].DataRepository.MaxCacheSeconds: // //In either of these 2 cases, we check the existence of the directories and update/ //create the cache file. // //If the repository appears to be empty (perhaps because we are using a remote access //protocol to check it, and the protocol has some problems...), then as a last resort we //we will use the cache if it exists, whatever its age. if( fCacheAvailable ) { //caching of dataset availability is activated if( CheckCacheStatus() ) { //cache file exists and is not out of date if( ReadAvailableDatasetsFile() ) return; } } // print (repository-dependent) warning/informational message if(fRepository) fRepository->PrintAvailableDatasetsUpdateWarning(); //open temporary file ofstream tmp_file; TString tmp_file_path = fCacheFileName; KVBase::OpenTempFile(tmp_file_path, tmp_file); fNavailable = 0; if (fDataSets.GetSize()) { TIter next(&fDataSets); KVDataSet *ds; while ((ds = (KVDataSet *) next())) { //The results of this check are written in $KVROOT/KVFiles/[repository name].available.datasets //This file may be read by KVRemoteDataSetManager::CheckAvailability when this //data repository is accessed as a remote data repository from a remote machine. //In this case we do not want the identity of the user to influence the contents of the file. //Therefore even for 'unavailable' datasets we write the available datatypes (if any) //in the file. tmp_file << ds->GetName() << " : "; ds->CheckAvailable(); tmp_file << ds->GetAvailableDataTypes() << endl; if (ds->IsAvailable()) { fNavailable++; } } //close temp file tmp_file.close(); //if datasets are found, then we copy the temporary file to KVFiles directory, //overwriting any previous version. if no datasets were found, we try the cache //file (if it exists) if(fNavailable && fRepository){//if no repository is associated, no need to keep file TString runlist=KVBase::GetWORKDIRFilePath(fCacheFileName.Data()); gSystem->CopyFile(tmp_file_path, runlist, kTRUE); //set access permissions to 664 gSystem->Chmod(runlist.Data(), CHMODE(6,6,4)); } //delete temp file gSystem->Unlink(tmp_file_path); if(!fNavailable){ //no datasets found when checking file system ? //can we rely on the cache file ? ReadAvailableDatasetsFile(); } else { //now set up array of available datasets' indices if (fIndex) delete[]fIndex; fIndex = new Int_t[fNavailable]; next.Reset(); Int_t i, j; i = j = 0; while ((ds = (KVDataSet *) next())) { if (ds->IsAvailable()) { fIndex[i] = j; i++; } j++; } } } }
void KVIVReconIdent::InitRun(void){ //When each run is opened, we create a new ROOT file for the identified events we //are going to generate from the reconstructed events we are reading. // By default this file will be written in the same data repository as the recon data file we are reading. // This can be changed by setting the environment variable(s): // // ReconIdent.DataAnalysisTask.OutputRepository: [name of repository] // [name of dataset].ReconIdent.DataAnalysisTask.OutputRepository: [name of repository] // // If no value is set for the current dataset (second variable), the value of the // first variable will be used. If neither is defined, the new file will be written in the same repository as // the recon file (if possible, i.e. if repository is not remote). // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet( gDataSet->GetDataSetEnv("ReconIdent.DataAnalysisTask.OutputRepository", gDataRepository->GetName()), gDataSet->GetName() ); //create new ROOT file for identified events fRunNumber = gIndra->GetCurrentRunNumber(); fIdentFile = OutputDataset->NewRunfile("ident", fRunNumber); fIdentTree = new TTree("ReconstructedEvents", Form("%s : %s : ident events created from recon data", gIndraDB->GetRun(fRunNumber)->GetName(), gIndraDB->GetRun(fRunNumber)->GetTitle()) ); #if ROOT_VERSION_CODE > ROOT_VERSION(5,25,4) #if ROOT_VERSION_CODE < ROOT_VERSION(5,26,1) // The TTree::OptimizeBaskets mechanism is disabled, as for ROOT versions < 5.26/00b // this lead to a memory leak fIdentTree->SetAutoFlush(0); #endif #endif //leaves for reconstructed events TBranch *recon_br = (TBranch *)fChain->GetListOfBranches()->First(); fIdentTree->Branch(recon_br->GetName(), recon_br->GetClassName(), GetEventReference(), 10000000, 0)->SetAutoDelete(kFALSE); // set flag if this branch contains a KVIVReconEvent object fIsIVevent = TClass::GetClass(recon_br->GetClassName())->InheritsFrom("KVIVReconEvent"); Info("InitRun", "Created identified/calibrated data tree %s : %s", fIdentTree->GetName(), fIdentTree->GetTitle()); // initialise identifications gIndra->InitializeIDTelescopes(); gVamos->InitializeIDTelescopes(); cout << endl <<setw(20)<<""<<"----------------------"<<endl; cout <<setw(20)<<""<<"| STATUS FOR INDRA |"<<endl; cout <<setw(20)<<""<<"----------------------"<<endl<<endl; // print status of identifications gIndra->PrintStatusOfIDTelescopes(); // print status of calibrations gIndra->PrintCalibStatusOfDetectors(); cout << endl <<setw(20)<<""<<"----------------------"<<endl; cout <<setw(20)<<""<<"| STATUS FOR VAMOS |"<<endl; cout <<setw(20)<<""<<"----------------------"<<endl<<endl; // print status of identifications gVamos->PrintStatusOfIDTelescopes(); // print status of calibrations gVamos->PrintCalibStatusOfDetectors(); }
void KVINDRARawDataReconstructor::InitRun() { // Creates new ROOT file with TTree for reconstructed/calibrated events. // By default this file will be written in the same data repository as the raw data file we are reading. // This can be changed by setting the environment variable(s): // // Reconstruction.DataAnalysisTask.OutputRepository: [name of repository] // [name of dataset].Reconstruction.DataAnalysisTask.OutputRepository: [name of repository] // // If no value is set for the current dataset (second variable), the value of the // first variable will be used. If neither is defined, the new file will be written in the same repository as // the raw file (if possible, i.e. if repository is not remote). // Create new KVINDRAReconEvent used to reconstruct & store events // The condition used to seed new reconstructed particles (see KVReconstructedEvent::AnalyseTelescopes) // is set by reading the value of the environment variables: // Reconstruction.DataAnalysisTask.ParticleSeedCond: [all/any] // [name of dataset].Reconstruction.DataAnalysisTask.ParticleSeedCond: [all/any] // If no value is set for the current dataset (second variable), the value of the // first variable will be used. if (!recev) recev = new KVINDRAReconEvent; recev->SetPartSeedCond(gDataSet->GetDataSetEnv("Reconstruction.DataAnalysisTask.ParticleSeedCond")); // get dataset to which we must associate new run KVDataSet* OutputDataset = gDataRepositoryManager->GetDataSet(gDataSet->GetOutputRepository(taskname), gDataSet->GetName()); file = OutputDataset->NewRunfile(datatype.Data(), fRunNumber); cout << "Writing \"" << datatype.Data() << "\" events in ROOT file " << file->GetName() << endl; //tree for raw data rawtree = new TTree("RawData", Form("%s : %s : raw data", gIndraDB->GetRun(fRunNumber)->GetName(), gIndraDB->GetRun(fRunNumber)->GetTitle())); rawtree->Branch("RunNumber", &fRunNumber, "RunNumber/I"); rawtree->Branch("EventNumber", &fEventNumber, "EventNumber/I"); // the format of the raw data tree must be "arrays" : we depend on it in KVINDRAReconDataAnalyser // in order to read the raw data and set the detector acquisition parameters TString raw_opt = "arrays"; GetRawDataReader()->SetUserTree(rawtree, raw_opt.Data()); Info("InitRun", "Created raw data tree (%s : %s). Format: %s", rawtree->GetName(), rawtree->GetTitle(), raw_opt.Data()); #if ROOT_VERSION_CODE > ROOT_VERSION(5,25,4) #if ROOT_VERSION_CODE < ROOT_VERSION(5,26,1) // The TTree::OptimizeBaskets mechanism is disabled, as for ROOT versions < 5.26/00b // this lead to a memory leak rawtree->SetAutoFlush(0); #endif #endif //tree for reconstructed events tree = new TTree("ReconstructedEvents", Form("%s : %s : %s events created from raw data", gIndraDB->GetRun(fRunNumber)->GetName(), gIndraDB->GetRun(fRunNumber)->GetTitle(), datatype.Data()) ); #if ROOT_VERSION_CODE > ROOT_VERSION(5,25,4) #if ROOT_VERSION_CODE < ROOT_VERSION(5,26,1) // The TTree::OptimizeBaskets mechanism is disabled, as for ROOT versions < 5.26/00b // this lead to a memory leak tree->SetAutoFlush(0); #endif #endif //leaves for reconstructed events KVEvent::MakeEventBranch(tree, "INDRAReconEvent", "KVINDRAReconEvent", &recev); Info("InitRun", "Created reconstructed data tree %s : %s", tree->GetName(), tree->GetTitle()); //tree for gene data genetree = new TTree("GeneData", Form("%s : %s : gene data", gIndraDB->GetRun(fRunNumber)->GetName(), gIndraDB->GetRun(fRunNumber)->GetTitle())); //we add to the 'gene tree' a branch for every acquisition parameter of the detector genetree->Branch("RunNumber", &fRunNumber, "RunNumber/I"); genetree->Branch("EventNumber", &fEventNumber, "EventNumber/I"); KVACQParam* acqpar; TIter next_acqpar(gIndra->GetACQParams()); while ((acqpar = (KVACQParam*)next_acqpar())) { genetree->Branch(acqpar->GetName(), *(acqpar->ConnectData()), Form("%s/S", acqpar->GetName())); } #if ROOT_VERSION_CODE > ROOT_VERSION(5,25,4) #if ROOT_VERSION_CODE < ROOT_VERSION(5,26,1) // The TTree::OptimizeBaskets mechanism is disabled, as for ROOT versions < 5.26/00b // this lead to a memory leak genetree->SetAutoFlush(0); #endif #endif Info("InitRun", "Created pulser/laser data tree (%s : %s) for %d parameters", genetree->GetName(), genetree->GetTitle(), genetree->GetNbranches()); //initialise number of reconstructed events nb_recon = 0; }