//this function is mainly there for debugging bool QgsRelief::exportFrequencyDistributionToCsv( const QString& file ) { int nCellsX, nCellsY; GDALDatasetH inputDataset = openInputFile( nCellsX, nCellsY ); if ( inputDataset == NULL ) { return false; } //open first raster band for reading (elevation raster is always single band) GDALRasterBandH elevationBand = GDALGetRasterBand( inputDataset, 1 ); if ( elevationBand == NULL ) { GDALClose( inputDataset ); return false; } //1. get minimum and maximum of elevation raster -> 252 elevation classes int minOk, maxOk; double minMax[2]; minMax[0] = GDALGetRasterMinimum( elevationBand, &minOk ); minMax[1] = GDALGetRasterMaximum( elevationBand, &maxOk ); if ( !minOk || !maxOk ) { GDALComputeRasterMinMax( elevationBand, true, minMax ); } //2. go through raster cells and get frequency of classes //store elevation frequency in 256 elevation classes double frequency[252]; double frequencyClassRange = ( minMax[1] - minMax[0] ) / 252.0; //initialize to zero for ( int i = 0; i < 252; ++i ) { frequency[i] = 0; } float* scanLine = ( float * ) CPLMalloc( sizeof( float ) * nCellsX ); int elevationClass = -1; for ( int i = 0; i < nCellsY; ++i ) { GDALRasterIO( elevationBand, GF_Read, 0, i, nCellsX, 1, scanLine, nCellsX, 1, GDT_Float32, 0, 0 ); for ( int j = 0; j < nCellsX; ++j ) { elevationClass = frequencyClassForElevation( scanLine[j], minMax[0], frequencyClassRange ); if ( elevationClass >= 0 ) { frequency[elevationClass] += 1.0; } } } CPLFree( scanLine ); //log10 transformation for all frequency values for ( int i = 0; i < 252; ++i ) { frequency[i] = log10( frequency[i] ); } //write out frequency values to csv file for debugging QFile outFile( file ); if ( !outFile.open( QIODevice::WriteOnly ) ) { return false; } QTextStream outstream( &outFile ); for ( int i = 0; i < 252; ++i ) { outstream << QString::number( i ) + ',' + QString::number( frequency[i] ) << endl; } outFile.close(); return true; }
int main(int argc, char **argv) { if (argc == 1) PrintHelpInfo(); else { if (!ReadParameter(argc, argv)) { std::cerr << "Bad Parameters.\n"; return 1; } ReadConfig(configFileName); if (compress) { // Compress db_compress::Compressor compressor(outputFileName, schema, config); int iter_cnt = 0; while (1) { std::cout << "Iteration " << ++iter_cnt << " Starts\n"; std::ifstream inFile(inputFileName); std::string str; int tuple_cnt = 0; while (std::getline(inFile,str)) { std::stringstream sstream(str); std::string item; db_compress::Tuple tuple(schema.attr_type.size()); size_t count = 0; while (std::getline(sstream, item, ',')) { AppendAttr(&tuple, item, attr_type[count], count); ++ count; } // The last item might be empty string if (str[str.length() - 1] == ',') { AppendAttr(&tuple, "", attr_type[count], count); ++ count; } if (count != attr_type.size()) { std::cerr << "File Format Error!\n"; } compressor.ReadTuple(tuple); if (!compressor.RequireFullPass() && ++ tuple_cnt >= NonFullPassStopPoint) { break; } } compressor.EndOfData(); if (!compressor.RequireMoreIterations()) break; } } else { // Decompress db_compress::Decompressor decompressor(inputFileName, schema); std::ofstream outFile(outputFileName); decompressor.Init(); while (decompressor.HasNext()) { db_compress::Tuple tuple(attr_type.size()); decompressor.ReadNextTuple(&tuple); for (size_t i = 0; i < attr_type.size(); ++i) { std::string str = ExtractAttr(tuple, attr_type[i], i); outFile << str << (i == attr_type.size() - 1 ? '\n' : ','); } } } } return 0; }
int createSimplePoem(const std::string inFileName) { if(!checkOutputTxtFile(inFileName)) return -1; std::ifstream inFile(inFileName); std::string str; std::vector<std::string>* strVect_p = new std::vector<std::string>; while(std::getline(inFile, str)){ strVect_p->push_back(str); } inFile.close(); std::srand(unsigned(std::time(NULL))); std::random_shuffle(strVect_p->begin(), strVect_p->end(), myrandom); std::vector<std::string>* poemVect_p = new std::vector<std::string>; bool outOfMatches = false; while((int)poemVect_p->size() < 10){ int strSize = (int)strVect_p->size(); bool isMatch = false; for(int iter = 0; iter < strSize-1; iter++){ std::string sentence1 = strVect_p->at(iter); std::vector<std::string>* words1_p = new std::vector<std::string>; getWordsFromSentence(sentence1, words1_p); for(int iter2 = 1; iter2 < strSize; iter2++){ std::string sentence2 = strVect_p->at(iter2); std::vector<std::string>* words2_p = new std::vector<std::string>; getWordsFromSentence(sentence2, words2_p); for(int wordIter1 = 0; wordIter1 < (int)words1_p->size(); wordIter1++){ std::string word1 = words1_p->at(wordIter1); for(int wordIter2 = 0; wordIter2 < (int)words2_p->size(); wordIter2++){ std::string word2 = words2_p->at(wordIter2); if(word1.find(word2) != std::string::npos || word2.find(word1) != std::string::npos){ isMatch = true; break; } } if(isMatch) break; } if(isMatch){ poemVect_p->push_back(sentence1); poemVect_p->push_back(sentence2); strVect_p->erase(strVect_p->begin()+iter2); strVect_p->erase(strVect_p->begin()+iter); } else if(iter == strSize - 2) outOfMatches = true; words2_p->clear(); delete words2_p; if(isMatch) break; } words1_p->clear(); delete words1_p; if(isMatch) break; } if(outOfMatches) break; } std::string outFileName = inFileName; outFileName.replace(outFileName.find(".txt"), 4, ""); outFileName = outFileName + "_SimplePoem.txt"; std::ofstream outFile(outFileName); for(int iter = 0; iter < (int)poemVect_p->size(); iter++){ outFile << poemVect_p->at(iter) << std::endl; } outFile.close(); poemVect_p->clear(); delete poemVect_p; strVect_p->clear(); delete strVect_p; return 0; }
int main() { std::ofstream outFile("fstreamtest.out", std::ios::out); outFile << "output data from fstreamtest" ; for(unsigned char a = 0; a < 255 ; ++a){ outFile << a; } outFile.close(); std::ifstream inFile; inFile.open("fstreamtest.out", std::ios::in | std::ios::binary ); char a; unsigned char b; for(int i = 0 ; i < 28; ++i){ inFile >> a; } while (!inFile.eof()){ inFile.get(a); std::cout << "Read in character: " << (unsigned int)(unsigned char)a << " " << a << std::endl; } inFile.close(); outFile.clear(); outFile.open("fstreamtest2.out", std::ios::out); outFile.write("abcd", 4); outFile.put('e'); outFile.write("fghi", 4); outFile.close(); inFile.clear(); inFile.open("fstreamtest2.out", std::ios::in | std::ios::binary); inFile >> a; if (a != 'a'){ std::cout << "Error reading character a, was instead: " << a << std::endl; } inFile >> a; if (a != 'b'){ std::cout << "Error reading character b, was instead: " << a << std::endl; } inFile >> a; if (a != 'c'){ std::cout << "Error reading character c, was instead: " << a << std::endl; } inFile >> a; if (a != 'd'){ std::cout << "Error reading character d, was instead: " << a << std::endl; } inFile >> a; if (a != 'e'){ std::cout << "Error reading character e, was instead: " << a << std::endl; } inFile >> a; if (a != 'f'){ std::cout << "Error reading character f, was instead: " << a << std::endl; } inFile >> a; if (a != 'g'){ std::cout << "Error reading character g, was instead: " << a << std::endl; } inFile >> a; if (a != 'h'){ std::cout << "Error reading character h, was instead: " << a << std::endl; } inFile >> a; if (a != 'i'){ std::cout << "Error reading character i, was instead: " << a << std::endl; } inFile.close(); inFile.clear(); std::cout << "Now reading other input file to see what wonderful goodness we can discover" << std::endl; inFile.open("fstreamtest.input", std::ios::in | std::ios::binary ); inFile.seekg(27); inFile.read(&a, 1); b = a; if(correctValue(27) != b){ std::cout << "Read in invalid value. Read in " << (int)b << ", expected " << (int)correctValue(27) << std::endl; }else{ std::cout << "Character 27 read in correctly\n"; } std::cout << "The following two lines should be identical\n"; std::cout << "Current position: 28\n"; std::cout << "Current position: " << inFile.tellg() << std::endl; inFile.seekg(1, std::ios::cur); inFile.read(&a, 1); b = a; if(correctValue(29) != b){ std::cout << "Read in invalid value. Read in " << (int)b << ", expected " << (int)correctValue(29) << std::endl; }else{ std::cout << "Character 29 read in correctly\n"; } std::cout << "The following two lines should be identical\n"; std::cout << "Current position: 30\n"; std::cout << "Current position: " << inFile.tellg() << std::endl; return 0; }
void testEcalClusterToolsFWLite() { std::cout <<"opening file"<<std::endl; TFile *file=TFile::Open("rfio:///castor/cern.ch/cms/store/user/meridian/meridian/SingleGammaPt35_DSZS_V1/SingleGammaPt35_DSZS_V1/00b02d884670d693cb397a1e0af88088/SingleGammaPt35_cfi_py_GEN_SIM_DIGI_L1_DIGI2RAW_RAW2DIGI_RECO_1.root"); TH1F iEtaiEtaEB("iEtaiEtaEB","iEtaiEtaEB",100,0.,0.03); TH1F iEtaiPhiEB("iEtaiPhiEB","iEtaiPhiEB",100,0.,0.03); TH1F iPhiiPhiEB("iPhiiPhiEB","iPhiiPhiEB",100,0.,0.03); TH1F iEtaiEtaEE("iEtaiEtaEE","iEtaiEtaEE",100,0.,0.03); TH1F iEtaiPhiEE("iEtaiPhiEE","iEtaiPhiEE",100,0.,0.03); TH1F iPhiiPhiEE("iPhiiPhiEE","iPhiiPhiEE",100,0.,0.03); fwlite::Event ev(file); CaloTopology *topology=new CaloTopology(); EcalBarrelHardcodedTopology* ebTopology=new EcalBarrelHardcodedTopology(); EcalEndcapHardcodedTopology* eeTopology=new EcalEndcapHardcodedTopology(); topology->setSubdetTopology(DetId::Ecal,EcalBarrel,ebTopology); topology->setSubdetTopology(DetId::Ecal,EcalEndcap,eeTopology); for( ev.toBegin(); ! ev.atEnd(); ++ev) { fwlite::Handle<reco::BasicClusterCollection > pEBClusters; pEBClusters.getByLabel(ev,"hybridSuperClusters","hybridBarrelBasicClusters"); const reco::BasicClusterCollection *ebClusters = pEBClusters.ptr(); fwlite::Handle<reco::BasicClusterCollection > pEEClusters; pEEClusters.getByLabel(ev,"multi5x5BasicClusters","multi5x5EndcapBasicClusters"); const reco::BasicClusterCollection *eeClusters = pEEClusters.ptr(); fwlite::Handle< EcalRecHitCollection > pEBRecHits; pEBRecHits.getByLabel( ev, "reducedEcalRecHitsEB",""); const EcalRecHitCollection *ebRecHits = pEBRecHits.ptr(); fwlite::Handle< EcalRecHitCollection > pEERecHits; pEERecHits.getByLabel( ev, "reducedEcalRecHitsEE",""); const EcalRecHitCollection *eeRecHits = pEERecHits.ptr(); std::cout << "========== BARREL ==========" << std::endl; for (reco::BasicClusterCollection::const_iterator it = ebClusters->begin(); it != ebClusters->end(); ++it ) { std::cout << "----- new cluster -----" << std::endl; std::cout << "----------------- size: " << (*it).size() << " energy: " << (*it).energy() << std::endl; std::cout << "e1x3..................... " << EcalClusterTools::e1x3( *it, ebRecHits, topology ) << std::endl; std::cout << "e3x1..................... " << EcalClusterTools::e3x1( *it, ebRecHits, topology ) << std::endl; std::cout << "e1x5..................... " << EcalClusterTools::e1x5( *it, ebRecHits, topology ) << std::endl; //std::cout << "e5x1..................... " << EcalClusterTools::e5x1( *it, ebRecHits, topology ) << std::endl; std::cout << "e2x2..................... " << EcalClusterTools::e2x2( *it, ebRecHits, topology ) << std::endl; std::cout << "e3x3..................... " << EcalClusterTools::e3x3( *it, ebRecHits, topology ) << std::endl; std::cout << "e4x4..................... " << EcalClusterTools::e4x4( *it, ebRecHits, topology ) << std::endl; std::cout << "e5x5..................... " << EcalClusterTools::e5x5( *it, ebRecHits, topology ) << std::endl; std::cout << "e2x5Right................ " << EcalClusterTools::e2x5Right( *it, ebRecHits, topology ) << std::endl; std::cout << "e2x5Left................. " << EcalClusterTools::e2x5Left( *it, ebRecHits, topology ) << std::endl; std::cout << "e2x5Top.................. " << EcalClusterTools::e2x5Top( *it, ebRecHits, topology ) << std::endl; std::cout << "e2x5Bottom............... " << EcalClusterTools::e2x5Bottom( *it, ebRecHits, topology ) << std::endl; std::cout << "e2x5Max.................. " << EcalClusterTools::e2x5Max( *it, ebRecHits, topology ) << std::endl; std::cout << "eMax..................... " << EcalClusterTools::eMax( *it, ebRecHits ) << std::endl; std::cout << "e2nd..................... " << EcalClusterTools::e2nd( *it, ebRecHits ) << std::endl; std::vector<float> vEta = EcalClusterTools::energyBasketFractionEta( *it, ebRecHits ); std::cout << "energyBasketFractionEta.."; for (size_t i = 0; i < vEta.size(); ++i ) { std::cout << " " << vEta[i]; } std::cout << std::endl; std::vector<float> vPhi = EcalClusterTools::energyBasketFractionPhi( *it, ebRecHits ); std::cout << "energyBasketFractionPhi.."; for (size_t i = 0; i < vPhi.size(); ++i ) { std::cout << " " << vPhi[i]; } std::cout << std::endl; std::vector<float> vLocCov = EcalClusterTools::localCovariances( *it, ebRecHits, topology ); std::cout << "local covariances........ " << vLocCov[0] << " " << vLocCov[1] << " " << vLocCov[2] << std::endl; if ((*it).energy() < 10) continue; iEtaiEtaEB.Fill(TMath::Sqrt(vLocCov[0])); iEtaiPhiEB.Fill(TMath::Sqrt(vLocCov[1])); iPhiiPhiEB.Fill(TMath::Sqrt(vLocCov[2])); } std::cout << "========== ENDCAPS ==========" << std::endl; for (reco::BasicClusterCollection::const_iterator it = eeClusters->begin(); it != eeClusters->end(); ++it ) { std::cout << "----- new cluster -----" << std::endl; std::cout << "----------------- size: " << (*it).size() << " energy: " << (*it).energy() << std::endl; std::cout << "e1x3..................... " << EcalClusterTools::e1x3( *it, eeRecHits, topology ) << std::endl; std::cout << "e3x1..................... " << EcalClusterTools::e3x1( *it, eeRecHits, topology ) << std::endl; std::cout << "e1x5..................... " << EcalClusterTools::e1x5( *it, eeRecHits, topology ) << std::endl; //std::cout << "e5x1..................... " << EcalClusterTools::e5x1( *it, eeRecHits, topology ) << std::endl; std::cout << "e2x2..................... " << EcalClusterTools::e2x2( *it, eeRecHits, topology ) << std::endl; std::cout << "e3x3..................... " << EcalClusterTools::e3x3( *it, eeRecHits, topology ) << std::endl; std::cout << "e4x4..................... " << EcalClusterTools::e4x4( *it, eeRecHits, topology ) << std::endl; std::cout << "e5x5..................... " << EcalClusterTools::e5x5( *it, eeRecHits, topology ) << std::endl; std::cout << "e2x5Right................ " << EcalClusterTools::e2x5Right( *it, eeRecHits, topology ) << std::endl; std::cout << "e2x5Left................. " << EcalClusterTools::e2x5Left( *it, eeRecHits, topology ) << std::endl; std::cout << "e2x5Top.................. " << EcalClusterTools::e2x5Top( *it, eeRecHits, topology ) << std::endl; std::cout << "e2x5Bottom............... " << EcalClusterTools::e2x5Bottom( *it, eeRecHits, topology ) << std::endl; std::cout << "eMax..................... " << EcalClusterTools::eMax( *it, eeRecHits ) << std::endl; std::cout << "e2nd..................... " << EcalClusterTools::e2nd( *it, eeRecHits ) << std::endl; std::vector<float> vLocCov = EcalClusterTools::localCovariances( *it, eeRecHits, topology ); std::cout << "local covariances........ " << vLocCov[0] << " " << vLocCov[1] << " " << vLocCov[2] << std::endl; if ((*it).energy() < 10) continue; iEtaiEtaEE.Fill(TMath::Sqrt(vLocCov[0])); iEtaiPhiEE.Fill(TMath::Sqrt(vLocCov[1])); iPhiiPhiEE.Fill(TMath::Sqrt(vLocCov[2])); } } //Writing OutFile TFile outFile("locCov.root","RECREATE"); outFile.cd(); iEtaiEtaEB.Write(); iEtaiPhiEB.Write(); iPhiiPhiEB.Write(); iEtaiEtaEE.Write(); iEtaiPhiEE.Write(); iPhiiPhiEE.Write(); outFile.Write(); outFile.Close(); delete topology; }
int ExportOGG::Export(AudacityProject *project, int numChannels, wxString fName, bool selectionOnly, double t0, double t1, MixerSpec *mixerSpec, Tags *metadata, int WXUNUSED(subformat)) { double rate = project->GetRate(); TrackList *tracks = project->GetTracks(); double quality = (gPrefs->Read(wxT("/FileFormats/OggExportQuality"), 50)/(float)100.0); wxLogNull logNo; // temporarily disable wxWidgets error messages int updateResult = eProgressSuccess; int eos = 0; FileIO outFile(fName, FileIO::Output); if (!outFile.IsOpened()) { wxMessageBox(_("Unable to open target file for writing")); return false; } // All the Ogg and Vorbis encoding data ogg_stream_state stream; ogg_page page; ogg_packet packet; vorbis_info info; vorbis_comment comment; vorbis_dsp_state dsp; vorbis_block block; // Encoding setup vorbis_info_init(&info); vorbis_encode_init_vbr(&info, numChannels, int(rate + 0.5), quality); // Retrieve tags if (!FillComment(project, &comment, metadata)) { return false; } // Set up analysis state and auxiliary encoding storage vorbis_analysis_init(&dsp, &info); vorbis_block_init(&dsp, &block); // Set up packet->stream encoder. According to encoder example, // a random serial number makes it more likely that you can make // chained streams with concatenation. srand(time(NULL)); ogg_stream_init(&stream, rand()); // First we need to write the required headers: // 1. The Ogg bitstream header, which contains codec setup params // 2. The Vorbis comment header // 3. The bitstream codebook. // // After we create those our responsibility is complete, libvorbis will // take care of any other ogg bistream constraints (again, according // to the example encoder source) ogg_packet bitstream_header; ogg_packet comment_header; ogg_packet codebook_header; vorbis_analysis_headerout(&dsp, &comment, &bitstream_header, &comment_header, &codebook_header); // Place these headers into the stream ogg_stream_packetin(&stream, &bitstream_header); ogg_stream_packetin(&stream, &comment_header); ogg_stream_packetin(&stream, &codebook_header); // Flushing these headers now guarentees that audio data will // start on a new page, which apparently makes streaming easier while (ogg_stream_flush(&stream, &page)) { outFile.Write(page.header, page.header_len); outFile.Write(page.body, page.body_len); } int numWaveTracks; WaveTrack **waveTracks; tracks->GetWaveTracks(selectionOnly, &numWaveTracks, &waveTracks); Mixer *mixer = CreateMixer(numWaveTracks, waveTracks, tracks->GetTimeTrack(), t0, t1, numChannels, SAMPLES_PER_RUN, false, rate, floatSample, true, mixerSpec); delete [] waveTracks; ProgressDialog *progress = new ProgressDialog(wxFileName(fName).GetName(), selectionOnly ? _("Exporting the selected audio as Ogg Vorbis") : _("Exporting the entire project as Ogg Vorbis")); while (updateResult == eProgressSuccess && !eos) { float **vorbis_buffer = vorbis_analysis_buffer(&dsp, SAMPLES_PER_RUN); sampleCount samplesThisRun = mixer->Process(SAMPLES_PER_RUN); if (samplesThisRun == 0) { // Tell the library that we wrote 0 bytes - signalling the end. vorbis_analysis_wrote(&dsp, 0); } else { for (int i = 0; i < numChannels; i++) { float *temp = (float *)mixer->GetBuffer(i); memcpy(vorbis_buffer[i], temp, sizeof(float)*SAMPLES_PER_RUN); } // tell the encoder how many samples we have vorbis_analysis_wrote(&dsp, samplesThisRun); } // I don't understand what this call does, so here is the comment // from the example, verbatim: // // vorbis does some data preanalysis, then divvies up blocks // for more involved (potentially parallel) processing. Get // a single block for encoding now while (vorbis_analysis_blockout(&dsp, &block) == 1) { // analysis, assume we want to use bitrate management vorbis_analysis(&block, NULL); vorbis_bitrate_addblock(&block); while (vorbis_bitrate_flushpacket(&dsp, &packet)) { // add the packet to the bitstream ogg_stream_packetin(&stream, &packet); // From vorbis-tools-1.0/oggenc/encode.c: // If we've gone over a page boundary, we can do actual output, // so do so (for however many pages are available). while (!eos) { int result = ogg_stream_pageout(&stream, &page); if (!result) { break; } outFile.Write(page.header, page.header_len); outFile.Write(page.body, page.body_len); if (ogg_page_eos(&page)) { eos = 1; } } } } updateResult = progress->Update(mixer->MixGetCurrentTime()-t0, t1-t0); } delete progress;; delete mixer; ogg_stream_clear(&stream); vorbis_block_clear(&block); vorbis_dsp_clear(&dsp); vorbis_info_clear(&info); vorbis_comment_clear(&comment); outFile.Close(); return updateResult; }
int main(int ac, char** av){ if(ac < 2){ std::cout << "usage: ./signalAcceptance inputFile[s]" << std::endl; return -1; } TH1F* allCategory = new TH1F("allCategory","all Category",19,0.5,19.5); allCategory->GetXaxis()->SetBinLabel(1,"Total"); allCategory->GetXaxis()->SetBinLabel(2,"AllHad"); allCategory->GetXaxis()->SetBinLabel(3,"1 lepton"); allCategory->GetXaxis()->SetBinLabel(4,"2 leptons"); allCategory->GetXaxis()->SetBinLabel(5,""); allCategory->GetXaxis()->SetBinLabel(6,"1 e"); allCategory->GetXaxis()->SetBinLabel(7,"2 e"); allCategory->GetXaxis()->SetBinLabel(8,"1 #mu"); allCategory->GetXaxis()->SetBinLabel(9,"2 #mu"); allCategory->GetXaxis()->SetBinLabel(10,"1 e 1 #mu"); allCategory->GetXaxis()->SetBinLabel(11,"1 e, 3 jets"); allCategory->GetXaxis()->SetBinLabel(12,"1 e, 3 jets 1 b"); allCategory->GetXaxis()->SetBinLabel(13,"1 e, 3 jets 1 b, MET"); allCategory->GetXaxis()->SetBinLabel(14,"1 e, 3 jets 1 b, MET, 1 pho"); allCategory->GetXaxis()->SetBinLabel(16,"1 #mu, 3 jets"); allCategory->GetXaxis()->SetBinLabel(17,"1 #mu, 3 jets 1 b"); allCategory->GetXaxis()->SetBinLabel(18,"1 #mu, 3 jets 1 b, MET"); allCategory->GetXaxis()->SetBinLabel(19,"1 #mu, 3 jets 1 b, MET, 1 pho"); TH1F* preselCategory = new TH1F("preselCategory","presel Category",19,0.5,19.5); preselCategory->GetXaxis()->SetBinLabel(1,"Total"); preselCategory->GetXaxis()->SetBinLabel(2,"AllHad"); preselCategory->GetXaxis()->SetBinLabel(3,"1 lepton"); preselCategory->GetXaxis()->SetBinLabel(4,"2 leptons"); preselCategory->GetXaxis()->SetBinLabel(5,""); preselCategory->GetXaxis()->SetBinLabel(6,"1 e"); preselCategory->GetXaxis()->SetBinLabel(7,"2 e"); preselCategory->GetXaxis()->SetBinLabel(8,"1 #mu"); preselCategory->GetXaxis()->SetBinLabel(9,"2 #mu"); preselCategory->GetXaxis()->SetBinLabel(10,"1 e 1 #mu"); preselCategory->GetXaxis()->SetBinLabel(11,"1 e, 3 jets"); preselCategory->GetXaxis()->SetBinLabel(12,"1 e, 3 jets 1 b"); preselCategory->GetXaxis()->SetBinLabel(13,"1 e, 3 jets 1 b, MET"); preselCategory->GetXaxis()->SetBinLabel(14,"1 e, 3 jets 1 b, MET, 1 pho"); preselCategory->GetXaxis()->SetBinLabel(16,"1 #mu, 3 jets"); preselCategory->GetXaxis()->SetBinLabel(17,"1 #mu, 3 jets 1 b"); preselCategory->GetXaxis()->SetBinLabel(18,"1 #mu, 3 jets 1 b, MET"); preselCategory->GetXaxis()->SetBinLabel(19,"1 #mu, 3 jets 1 b, MET, 1 pho"); TH1F* photonCategory = new TH1F("photonCategory","reco photon Category",19,0.5,19.5); photonCategory->GetXaxis()->SetBinLabel(1,"Total"); photonCategory->GetXaxis()->SetBinLabel(2,"AllHad"); photonCategory->GetXaxis()->SetBinLabel(3,"1 lepton"); photonCategory->GetXaxis()->SetBinLabel(4,"2 leptons"); photonCategory->GetXaxis()->SetBinLabel(5,""); photonCategory->GetXaxis()->SetBinLabel(6,"1 e"); photonCategory->GetXaxis()->SetBinLabel(7,"2 e"); photonCategory->GetXaxis()->SetBinLabel(8,"1 #mu"); photonCategory->GetXaxis()->SetBinLabel(9,"2 #mu"); photonCategory->GetXaxis()->SetBinLabel(10,"1 e 1 #mu"); photonCategory->GetXaxis()->SetBinLabel(11,"1 e, 3 jets"); photonCategory->GetXaxis()->SetBinLabel(12,"1 e, 3 jets 1 b"); photonCategory->GetXaxis()->SetBinLabel(13,"1 e, 3 jets 1 b, MET"); photonCategory->GetXaxis()->SetBinLabel(14,"1 e, 3 jets 1 b, MET, 1 pho"); photonCategory->GetXaxis()->SetBinLabel(16,"1 #mu, 3 jets"); photonCategory->GetXaxis()->SetBinLabel(17,"1 #mu, 3 jets 1 b"); photonCategory->GetXaxis()->SetBinLabel(18,"1 #mu, 3 jets 1 b, MET"); photonCategory->GetXaxis()->SetBinLabel(19,"1 #mu, 3 jets 1 b, MET, 1 pho"); TH1F* VisAllCategory = new TH1F("VisAllCategory","all Category, Vis",15,0.5,15.5); TH1F* VisPreselCategory = new TH1F("VisPreselCategory","presel Category, Vis",15,0.5,15.5); TH1F* VisPhotonCategory = new TH1F("VisPhotonCategory","reco photon Category, Vis",15,0.5,15.5); TH1F* dROtherGen = new TH1F("dROtherGen", "dROtherGen", 800, 0.0, 4.0); TH1F* parentage = new TH1F("parentage","parentage",30, 0, 30); TH1F* dptOverpt = new TH1F("dptOverpt","dptOverpt", 400, -2.0, 2.0); TH1F* dRrecoGen = new TH1F("dRrecoGen","dRrecoGen", 200, 0.0, 0.2); TH1F* dPhiRecoGen = new TH1F("dPhiRecoGen","dPhiRecoGen", 400, 0.0, 0.2); TH1F* dEtaRecoGen = new TH1F("dEtaRecoGen","dEtaRecoGen", 800, -0.2, 0.2); TH1F* dRGenNearJet = new TH1F("dRGenNearJet","dRGenNearJet", 200, 0.0, 1.0); TH1F* dPhiGenNearJet = new TH1F("dPhiGenNearJet","dPhiGenNearJet", 100, 0.0, 0.5); TH1F* dEtaGenNearJet = new TH1F("dEtaGenNearJet","dEtaGenNearJet", 200, -0.5, 0.5); //TH1F* dRGenNextNearJet = new TH1F("dRGenNextNearJet","dRGenNextNearJet", 600, 0.0, 6.0); //TH1F* dPhiGenNextNearJet = new TH1F("dPhiGenNextNearJet","dPhiGenNextNearJet", 300, 0.0, 3.0); //TH1F* dEtaGenNextNearJet = new TH1F("dEtaGenNextNearJet","dEtaGenNextNearJet", 600, -3.0, 3.0); // object selector Selector* selectorLoose = new Selector(); // create event selectors here EventPick* evtPickLoose = new EventPick("LoosePhotonID"); // do not do jet to photon dR cleaning evtPickLoose->veto_pho_jet_dR = 0.0; EventTree* tree = new EventTree(ac-1, av+1); double PUweight = 1.0; Long64_t nEntr = tree->GetEntries(); for(Long64_t entry=0; entry<nEntr; entry++){ if(entry%10000 == 0) std::cout << "processing entry " << entry << " out of " << nEntr << std::endl; tree->GetEntry(entry); doJER(tree); selectorLoose->process_objects(tree); evtPickLoose->process_event(tree, selectorLoose, PUweight); // fill the histograms fillCategory(tree, allCategory, PUweight); if(evtPickLoose->passPreSel) fillCategory(tree, preselCategory, PUweight); if(evtPickLoose->passAll) fillCategory(tree, photonCategory, PUweight); // fill histograms for gen photon passing the acceptance cuts defined in analysis bool inAcc = false; for(int mcInd=0; mcInd<tree->nMC_; ++mcInd){ if(tree->mcPID->at(mcInd) == 22 && (tree->mcParentage->at(mcInd)==2 || tree->mcParentage->at(mcInd)==10 || tree->mcParentage->at(mcInd)==26) && tree->mcPt->at(mcInd) > 25 && fabs(tree->mcEta->at(mcInd)) < 1.4442){ inAcc = true; } } if(inAcc){ fillCategory(tree, VisAllCategory, PUweight); if(evtPickLoose->passPreSel) fillCategory(tree, VisPreselCategory, PUweight); if(evtPickLoose->passAll) fillCategory(tree, VisPhotonCategory, PUweight); } // have at least one good photon if(!evtPickLoose->passAll) continue; // test //if(overlapMadGraph(tree)) continue; int phoInd = evtPickLoose->Photons.at(0); // experiment with delta R cuts for photons for(int mcInd=0; mcInd<tree->nMC_; ++mcInd){ bool etetamatch = dR(tree->mcEta->at(mcInd),tree->mcPhi->at(mcInd),tree->phoEta_->at(phoInd),tree->phoPhi_->at(phoInd)) < 0.2 && (fabs(tree->phoEt_->at(phoInd) - tree->mcPt->at(mcInd)) / tree->mcPt->at(mcInd)) < 1.0; if( etetamatch && tree->mcPID->at(mcInd) == 22){ // test if(!(tree->mcParentage->at(mcInd)==2 || tree->mcParentage->at(mcInd)==10 || tree->mcParentage->at(mcInd)==26)) continue; // fill histograms for mathced photon candidate parentage->Fill( tree->mcParentage->at(mcInd) ); dptOverpt->Fill( (tree->phoEt_->at(phoInd) - tree->mcPt->at(mcInd)) / tree->mcPt->at(mcInd)); dRrecoGen->Fill( dR(tree->mcEta->at(mcInd),tree->mcPhi->at(mcInd),tree->phoEta_->at(phoInd),tree->phoPhi_->at(phoInd)) ); dPhiRecoGen->Fill( dPhi( tree->phoPhi_->at(phoInd) - tree->mcPhi->at(mcInd) ) ); dEtaRecoGen->Fill( tree->phoEta_->at(phoInd) - tree->mcEta->at(mcInd) ); int closestGenInd = secondMinDrIndex( mcInd, tree ); if(dR(tree->mcEta->at(mcInd), tree->mcPhi->at(mcInd), tree->mcEta->at(closestGenInd), tree->mcPhi->at(closestGenInd)) < 0.01){ std::cout << "closest PID " << tree->mcPID->at(closestGenInd) << " MomPID " << tree->mcMomPID->at(closestGenInd) << std::endl; std::cout << "photon mother PID " << tree->mcMomPID->at(mcInd) << std::endl; } dROtherGen->Fill( dR(tree->mcEta->at(mcInd), tree->mcPhi->at(mcInd), tree->mcEta->at(closestGenInd), tree->mcPhi->at(closestGenInd)) ); int closestJetInd = minDrIndex( tree->mcEta->at(mcInd), tree->mcPhi->at(mcInd), tree->jetEta_, tree->jetPhi_ ); dRGenNearJet->Fill( dR(tree->mcEta->at(mcInd), tree->mcPhi->at(mcInd), tree->jetEta_->at(closestJetInd), tree->jetPhi_->at(closestJetInd) ) ); dPhiGenNearJet->Fill( dPhi( tree->jetPhi_->at(closestJetInd) - tree->mcPhi->at(mcInd) ) ); dEtaGenNearJet->Fill( tree->jetEta_->at(closestJetInd) - tree->mcEta->at(mcInd) ); //closestJetInd = secondMinDrIndex( tree->mcEta->at(mcInd), tree->mcPhi->at(mcInd), tree->jetEta_, tree->jetPhi_ ); //dRGenNextNearJet->Fill( dR(tree->mcEta->at(mcInd), tree->mcPhi->at(mcInd), tree->jetEta_->at(closestJetInd), tree->jetPhi_->at(closestJetInd) ) ); //dPhiGenNextNearJet->Fill( dPhi( tree->jetPhi_->at(closestJetInd) - tree->mcPhi->at(mcInd) ) ); //dEtaGenNextNearJet->Fill( tree->jetEta_->at(closestJetInd) - tree->mcEta->at(mcInd) ); } } } evtPickLoose->print_cutflow(); // write histograms TFile outFile("signalAcc.root","RECREATE"); saveHist(allCategory, &outFile); saveHist(preselCategory, &outFile); saveHist(photonCategory, &outFile); saveHist(VisAllCategory, &outFile); saveHist(VisPreselCategory, &outFile); saveHist(VisPhotonCategory, &outFile); saveHist(dROtherGen, &outFile); saveHist(parentage, &outFile); saveHist(dptOverpt, &outFile); saveHist(dRrecoGen, &outFile); saveHist(dPhiRecoGen, &outFile); saveHist(dEtaRecoGen, &outFile); saveHist(dRGenNearJet, &outFile); saveHist(dPhiGenNearJet, &outFile); saveHist(dEtaGenNearJet, &outFile); //saveHist(dRGenNextNearJet, &outFile); //saveHist(dPhiGenNextNearJet, &outFile); //saveHist(dEtaGenNextNearJet, &outFile); outFile.Close(); delete tree; return 0; }
CC_FILE_ERROR MascaretFilter::saveToFile(ccHObject* entity, QString filename, SaveParameters& parameters) { if (!entity || filename.isEmpty()) return CC_FERR_BAD_ARGUMENT; //look for valid profiles std::vector<ccPolyline*> profiles; try { //get all polylines std::vector<ccPolyline*> candidates; if (entity->isA(CC_TYPES::POLY_LINE)) { candidates.push_back(static_cast<ccPolyline*>(entity)); } else if (entity->isA(CC_TYPES::HIERARCHY_OBJECT)) { for (unsigned i=0; i<entity->getChildrenNumber(); ++i) if (entity->getChild(i) && entity->getChild(i)->isA(CC_TYPES::POLY_LINE)) candidates.push_back(static_cast<ccPolyline*>(entity->getChild(i))); } //then keep the valid profiles only for (size_t i=0; i<candidates.size(); ++i) { ccPolyline* poly = candidates[i]; if ( !poly->hasMetaData(ccPolyline::MetaKeyUpDir()) || !poly->hasMetaData(ccPolyline::MetaKeyAbscissa()) || !poly->hasMetaData(ccPolyline::MetaKeyPrefixCenter()+".x") || !poly->hasMetaData(ccPolyline::MetaKeyPrefixCenter()+".y") || !poly->hasMetaData(ccPolyline::MetaKeyPrefixCenter()+".z") || !poly->hasMetaData(ccPolyline::MetaKeyPrefixDirection()+".x") || !poly->hasMetaData(ccPolyline::MetaKeyPrefixDirection()+".y") || !poly->hasMetaData(ccPolyline::MetaKeyPrefixDirection()+".z") ) { ccLog::Warning(QString("[Mascaret] Polyline '%1' is not a valid profile (missing meta-data)").arg(poly->getName())); break; } else { profiles.push_back(poly); } } } catch (const std::bad_alloc&) { return CC_FERR_NOT_ENOUGH_MEMORY; } if (profiles.empty()) return CC_FERR_NO_SAVE; //open ASCII file for writing QFile file(filename); if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) return CC_FERR_WRITING; QTextStream outFile(&file); outFile.setRealNumberPrecision(12); //ask some parameters SaveMascaretFileDlg smfDlg; if (!smfDlg.exec()) return CC_FERR_CANCELED_BY_USER; QString biefName = smfDlg.biefNameLineEdit->text(); QString type("T"); //B or T --> ask the user switch(smfDlg.typeComboBox->currentIndex()) { case 0: type = "B"; //bathy break; case 1: type = "T"; //topo break; default: assert(false); } //sanitize the 'bief' (reach) name biefName = MakeMascaretName(biefName); //sort the sections by their abscissa if (profiles.size() > 1) { for (size_t i=0; i<profiles.size()-1; ++i) { size_t smallestIndex = i; double smallestAbscissa = profiles[i]->getMetaData(ccPolyline::MetaKeyAbscissa()).toDouble(); for (size_t j=i+1; j<profiles.size(); ++j) { double a = profiles[j]->getMetaData(ccPolyline::MetaKeyAbscissa()).toDouble(); if (a < smallestAbscissa) { smallestAbscissa = a; smallestIndex = j; } } if (i != smallestIndex) { std::swap(profiles[i],profiles[smallestIndex]); } } } CC_FILE_ERROR result = CC_FERR_NO_SAVE; //for each profile for (size_t i=0; i<profiles.size(); ++i) { ccPolyline* poly = profiles[i]; unsigned vertCount = poly ? poly->size() : 0; if (vertCount < 2) { //invalid size ccLog::Warning(QString("[Mascaret] Polyline '%1' does not have enough vertices").arg(poly->getName())); continue; } //decode meta-data bool ok = true; int upDir = 2; double absc = 0.0; CCVector3d Cd(0,0,0); CCVector3d Ud(0,0,0); while (true) //fake loop for easy break { upDir = poly->getMetaData(ccPolyline::MetaKeyUpDir()).toInt(&ok); if (!ok) break; absc = poly->getMetaData(ccPolyline::MetaKeyAbscissa()).toDouble(&ok); if (!ok) break; Cd.x = poly->getMetaData(ccPolyline::MetaKeyPrefixCenter()+".x").toDouble(&ok); if (!ok) break; Cd.y = poly->getMetaData(ccPolyline::MetaKeyPrefixCenter()+".y").toDouble(&ok); if (!ok) break; Cd.z = poly->getMetaData(ccPolyline::MetaKeyPrefixCenter()+".z").toDouble(&ok); if (!ok) break; Ud.x = poly->getMetaData(ccPolyline::MetaKeyPrefixDirection()+".x").toDouble(&ok); if (!ok) break; Ud.y = poly->getMetaData(ccPolyline::MetaKeyPrefixDirection()+".y").toDouble(&ok); if (!ok) break; Ud.z = poly->getMetaData(ccPolyline::MetaKeyPrefixDirection()+".z").toDouble(&ok); break; } if (!ok) { ccLog::Warning(QString("[Mascaret] At least one of the meta-data entry of polyline '%1' is invalid?!").arg(poly->getName())); continue; } QString profileName = poly->getName(); profileName = MakeMascaretName(profileName); CCVector3 C = CCVector3::fromArray(Cd.u); CCVector3 U = CCVector3::fromArray(Ud.u); U.normalize(); //write header outFile << "PROFIL " << biefName << " " << profileName << " " << absc; #define SAVE_AS_GEO_MASCARET #ifdef SAVE_AS_GEO_MASCARET int xDir = upDir == 2 ? 0 : upDir+1; int yDir = xDir == 2 ? 0 : xDir+1; //for "geo"-mascaret, we add some more information: // - first point { const CCVector3* firstP = poly->getPoint(0); CCVector3d firstPg = poly->toGlobal3d(*firstP); outFile << " "; outFile << firstPg.u[xDir] << " " << firstPg.u[yDir]; } // - last point { const CCVector3* lastP = poly->getPoint(vertCount-1); CCVector3d lastPg = poly->toGlobal3d(*lastP); outFile << " "; outFile << lastPg.u[xDir] << " " << lastPg.u[yDir]; } // - profile/path intersection point { outFile << " AXE "; CCVector3d Cdg = poly->toGlobal3d(Cd); outFile << Cdg.u[xDir] << " " << Cdg.u[yDir]; } #endif outFile << endl; //check the abscissa values order (must be increasing!) bool inverted = false; { const CCVector3* P0 = poly->getPoint(0); //convert to 'local' coordinate system CCVector2 Q0; ToLocalAbscissa(*P0, C, U, upDir, Q0); const CCVector3* P1 = poly->getPoint(vertCount-1); //convert to 'local' coordinate system CCVector2 Q1; ToLocalAbscissa(*P1, C, U, upDir, Q1); inverted = (Q1.x < Q0.x); } for (unsigned j=0; j<vertCount; ++j) { const CCVector3* P = poly->getPoint(inverted ? vertCount-1-j : j); //convert to 'local' coordinate system CCVector2 Q; ToLocalAbscissa(*P, C, U, upDir, Q); outFile << Q.x << " " << Q.y << " " << type; #ifdef SAVE_AS_GEO_MASCARET { //for "geo"-mascaret, we add some more information: // - real coordinates of the point outFile << " "; CCVector3d Pg = poly->toGlobal3d(*P); outFile << Pg.u[xDir] << " " << Pg.u[yDir]; } #endif outFile << endl; } result = CC_FERR_NO_ERROR; } file.close(); return result; }
void Write_Settings(void) { std::ofstream outFile("Options.txt"); outFile << "Resolution: \n"; outFile << Resolutions[Sel_Resolution] << std::endl; }