void ImageItem::paint(QPainter *ppainter, const QStyleOptionGraphicsItem *option, QWidget *widget) { ppainter->save(); if (isSelected()) ppainter->setOpacity(Const::SELECTION_OPACITY); else ppainter->setOpacity(qreal(opacity())/100); QPointF point = rect().topLeft(); QImage img; if (m_scale && !image().isNull()){ img = image().scaled(rect().width(), rect().height(), keepAspectRatio() ? Qt::KeepAspectRatio : Qt::IgnoreAspectRatio, Qt::SmoothTransformation); } else { img = image(); } qreal shiftHeight = rect().height() - img.height(); qreal shiftWidth = rect().width() - img.width(); if (m_center){ if (shiftHeight<0 || shiftWidth<0){ qreal cutX = 0; qreal cutY = 0; qreal cutWidth = img.width(); qreal cutHeigth = img.height(); if (shiftWidth > 0){ point.setX(point.x()+shiftWidth/2); } else { cutX = abs(shiftWidth/2); cutWidth += shiftWidth; } if (shiftHeight > 0){ point.setY(point.x()+shiftHeight/2); } else { cutY = abs(shiftHeight/2); cutHeigth += shiftHeight; } img = img.copy(cutX,cutY,cutWidth,cutHeigth); } else { point.setX(point.x()+shiftWidth/2); point.setY(point.y()+shiftHeight/2); } } if (img.isNull() && itemMode()==DesignMode){ QString text; ppainter->setFont(transformToSceneFont(QFont("Arial",10))); if (!datasource().isEmpty() && !field().isEmpty()) text = datasource()+"."+field(); else text = tr("Image"); ppainter->drawText(rect().adjusted(4,4,-4,-4), Qt::AlignCenter, text ); } else { ppainter->drawImage(point,img); } ItemDesignIntf::paint(ppainter,option,widget); ppainter->restore(); }
double terrama2::core::RiscoDeFogo::XYLinhaCol(double x, double y, const std::string& path, const std::string& filename) const { //const auto& prec = *(precipitacao.rbegin()+i); std::shared_ptr<te::da::DataSource> datasource(te::da::DataSourceFactory::make("GDAL", "file://"+path+filename)); //RAII for open/closing the datasource terrama2::core::OpenClose<std::shared_ptr<te::da::DataSource> > openClose(datasource); std::shared_ptr<te::da::DataSourceTransactor> transactor(datasource->getTransactor()); std::shared_ptr<te::da::DataSet> dataset(transactor->getDataSet(filename)); std::shared_ptr<te::rst::Raster> raster(dataset->getRaster(0)); te::rst::Grid* grid = raster->getGrid(); te::rst::Band* band = raster->getBand(0); double colD, rowD; grid->geoToGrid(x, y, colD, rowD); int col = std::round(colD); int row = std::round(rowD); double value; band->getValue(col, row, value); return value; }
int main(int argc, char *argv[]) { mapreduce::specification spec; int prime_limit = 10000; if (argc > 1) prime_limit = std::max(1, atoi(argv[1])); if (argc > 2) spec.map_tasks = std::max(1, atoi(argv[2])); if (argc > 3) spec.reduce_tasks = atoi(argv[3]); else spec.reduce_tasks = std::max(1U, std::thread::hardware_concurrency()); prime_calculator::job::datasource_type datasource(0, prime_limit, prime_limit/spec.reduce_tasks); std::cout <<"\nCalculating Prime Numbers in the range 0 .. " << prime_limit << " ..." <<std::endl; prime_calculator::job job(datasource, spec); mapreduce::results result; #ifdef _DEBUG job.run<mapreduce::schedule_policy::sequential<prime_calculator::job> >(result); #else job.run<mapreduce::schedule_policy::cpu_parallel<prime_calculator::job> >(result); #endif std::cout <<"\nMapReduce finished in " << result.job_runtime.count() << " with " << std::distance(job.begin_results(), job.end_results()) << " results" << std::endl; for (auto it=job.begin_results(); it!=job.end_results(); ++it) std::cout << it->second <<" "; return 0; }
void terrama2::core::erasePreviousResult(DataManagerPtr dataManager, DataSeriesId dataSeriesId, std::shared_ptr<te::dt::TimeInstantTZ> startTime) { auto outputDataSeries = dataManager->findDataSeries(dataSeriesId); if(!outputDataSeries) { TERRAMA2_LOG_ERROR() << QObject::tr("Invalid output data series for analysis."); return; } auto outputDataProvider = dataManager->findDataProvider(outputDataSeries->dataProviderId); if(!outputDataProvider) { TERRAMA2_LOG_ERROR() << QObject::tr("Invalid output data provider for analysis."); return; } if(outputDataProvider->dataProviderType == "POSTGIS") { auto dataset = outputDataSeries->datasetList[0]; std::string tableName = getTableNameProperty(dataset); std::shared_ptr<te::da::DataSource> datasource(te::da::DataSourceFactory::make("POSTGIS", outputDataProvider->uri)); // RAII for open/closing the datasource terrama2::core::OpenClose<std::shared_ptr<te::da::DataSource> > openClose(datasource); if(!datasource->isOpened()) { QString errMsg = QObject::tr("DataProvider could not be opened."); TERRAMA2_LOG_ERROR() << errMsg; throw Exception() << ErrorDescription(errMsg); } // get a transactor to interact to the data source std::shared_ptr<te::da::DataSourceTransactor> transactor(datasource->getTransactor()); auto dataSetNames = transactor->getDataSetNames(); if(std::find(dataSetNames.cbegin(), dataSetNames.cend(), tableName) != dataSetNames.cend() || std::find(dataSetNames.cbegin(), dataSetNames.cend(), "public."+tableName) != dataSetNames.cend()) transactor->execute("delete from " + tableName + " where execution_date = '" + startTime->toString() + "'"); } else { QString errMsg = QObject::tr("Removing old results not implement for this dataseries format."); TERRAMA2_LOG_ERROR() << errMsg; } }
Options AttributeFilter::getDefaultOptions() { Options options; pdal::Option red("dimension", "Classification", ""); pdal::Option b0("value","0", ""); pdal::Option geometry("geometry","POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", ""); pdal::Option query("query","", ""); pdal::Option layer("layer","", ""); pdal::Option datasource("datasource","", ""); pdal::Options redO; redO.add(b0); redO.add(geometry); redO.add(query); redO.add(layer); redO.add(datasource); red.setOptions(redO); options.add(red); return options; }
int main(int argc, char *argv[]) { terrama2::core::RiscoDeFogo* riscodefogo = new terrama2::core::RiscoDeFogo(); terrama2::core::TerraMA2Init terrama2Init("", 0); { double x = -52.1958; double y = -21.700; std::vector<std::string> precipitacao = { "S10648241_201703191200.tif", "S10648241_201703201200.tif", "S10648241_201703211200.tif", "S10648241_201703221200.tif", "S10648241_201703231200.tif", "S10648241_201703241200.tif", "S10648241_201703251200.tif", "S10648241_201703261200.tif", "S10648241_201703271200.tif", "S10648241_201703281200.tif", "S10648241_201703291200.tif", "S10648241_201703301200.tif", "S10648241_201703311200.tif", "S10648241_201704011200.tif", "S10648241_201704021200.tif", "S10648241_201704031200.tif", "S10648241_201704041200.tif", "S10648241_201704051200.tif", "S10648241_201704061200.tif", "S10648241_201704071200.tif", "S10648241_201704081200.tif", "S10648241_201704091200.tif", "S10648241_201704101200.tif", "S10648241_201704111200.tif", "S10648241_201704121200.tif", "S10648241_201704131200.tif", "S10648241_201704141200.tif", "S10648241_201704151200.tif", "S10648241_201704161200.tif", "S10648241_201704171200.tif", "S10648241_201704181200.tif", "S10648241_201704191200.tif", "S10648241_201704201200.tif", "S10648241_201704211200.tif", "S10648241_201704221200.tif", "S10648241_201704231200.tif", "S10648241_201704241200.tif", "S10648241_201704251200.tif", "S10648241_201704261200.tif", "S10648241_201704271200.tif", "S10648241_201704281200.tif", "S10648241_201704291200.tif", "S10648241_201704301200.tif", "S10648241_201705011200.tif", "S10648241_201705021200.tif", "S10648241_201705031200.tif", "S10648241_201705041200.tif", "S10648241_201705051200.tif", "S10648241_201705061200.tif", "S10648241_201705071200.tif", "S10648241_201705081200.tif", "S10648241_201705091200.tif", "S10648241_201705101200.tif", "S10648241_201705111200.tif", "S10648241_201705121200.tif", "S10648241_201705131200.tif", "S10648241_201705141200.tif", "S10648241_201705151200.tif", "S10648241_201705161200.tif", "S10648241_201705171200.tif", "S10648241_201705181200.tif", "S10648241_201705191200.tif", "S10648241_201705201200.tif", "S10648241_201705211200.tif", "S10648241_201705221200.tif", "S10648241_201705231200.tif", "S10648241_201705241200.tif", "S10648241_201705251200.tif", "S10648241_201705261200.tif", "S10648241_201705271200.tif", "S10648241_201705281200.tif", "S10648241_201705291200.tif", "S10648241_201705301200.tif", "S10648241_201705311200.tif", "S10648241_201706011200.tif", "S10648241_201706021200.tif", "S10648241_201706031200.tif", "S10648241_201706041200.tif", "S10648241_201706051200.tif", "S10648241_201706061200.tif", "S10648241_201706071200.tif", "S10648241_201706081200.tif", "S10648241_201706091200.tif", "S10648241_201706101200.tif", "S10648241_201706111200.tif", "S10648241_201706121200.tif", "S10648241_201706131200.tif", "S10648241_201706141200.tif", "S10648241_201706151200.tif", "S10648241_201706161200.tif", "S10648241_201706171200.tif", "S10648241_201706181200.tif", "S10648241_201706191200.tif", "S10648241_201706201200.tif", "S10648241_201706211200.tif", "S10648241_201706221200.tif", "S10648241_201706231200.tif", "S10648241_201706241200.tif", "S10648241_201706251200.tif", "S10648241_201706261200.tif", "S10648241_201706271200.tif", "S10648241_201706281200.tif", "S10648241_201706291200.tif", "S10648241_201706301200.tif", "S10648241_201707011200.tif", "S10648241_201707021200.tif", "S10648241_201707031200.tif", "S10648241_201707041200.tif", "S10648241_201707051200.tif", "S10648241_201707061200.tif", "S10648241_201707071200.tif", "S10648241_201707081200.tif", "S10648241_201707091200.tif", "S10648241_201707101200.tif", "S10648241_201707111200.tif", "S10648241_201707121200.tif", "S10648241_201707131200.tif", "S10648241_201707141200.tif", "S10648241_201707151200.tif", "S10648241_201707161200.tif" }; std::string humidity = TERRAMA2_DATA_DIR+"/dados_amb/saida/umidade/"; std::string nameFileHumidity = "UMRS201707161820.tif"; std::string temperature = TERRAMA2_DATA_DIR+"/dados_amb/saida/temperatura/"; std::string nameFileTemperature = "TEMP201707161820.tif"; std::string result = TERRAMA2_DATA_DIR+"/dados_amb/saida/resultado_Script_Guilherme3/"; std::string nameFileResult = "result201707162120.tif"; std::string landcover = TERRAMA2_DATA_DIR+"/dados_amb/entrada/rf/landcover/"; std::string nameFileLand = "landcover_2012.tif"; std::string gabarito = TERRAMA2_DATA_DIR+"/dados_amb/entrada/"; std::string nameFileGab = "RF.20170716.tif"; double prec1 = 0; double prec2 = 0; double prec3 = 0; double prec4 = 0; double prec5 = 0; double prec6_10 = 0; double prec11_15 = 0; double prec16_30 = 0; double prec31_60 = 0; double prec61_90 = 0; double prec91_120 = 0; std::string path = TERRAMA2_DATA_DIR+"/dados_amb/saida/precipitacao/"; for(int i = 0; i < 120; ++i) { const auto& prec = *(precipitacao.rbegin()+i); std::shared_ptr<te::da::DataSource> datasource(te::da::DataSourceFactory::make("GDAL", "file://"+path+prec)); //RAII for open/closing the datasource terrama2::core::OpenClose<std::shared_ptr<te::da::DataSource> > openClose(datasource); std::shared_ptr<te::da::DataSourceTransactor> transactor(datasource->getTransactor()); std::shared_ptr<te::da::DataSet> dataset(transactor->getDataSet(prec)); std::shared_ptr<te::rst::Raster> raster(dataset->getRaster(0)); te::rst::Grid* grid = raster->getGrid(); te::rst::Band* band = raster->getBand(0); double colD, rowD; grid->geoToGrid(x, y, colD, rowD); int col = std::round(colD); int row = std::round(rowD); double value; band->getValue(col, row, value); int dia = i+1; if(dia == 1) prec1 += value; if(dia == 2) prec2 += value; if(dia == 3) prec3 += value; if(dia == 4) prec4 += value; if(dia == 5) prec5 += value; if(dia > 5 && dia <= 10) prec6_10 += value; if(dia > 10 && dia <= 15) prec11_15 += value; if(dia > 15 && dia <= 30) prec16_30 += value; if(dia > 30 && dia <= 60) prec31_60 += value; if(dia > 60 && dia <= 90) prec61_90 += value; if(dia > 90 && dia <= 120) prec91_120 += value; } // 2 - Calc Precipitation factors double fp1 = exp(-0.14 * prec1); double fp2 = exp(-0.07 * prec2); double fp3 = exp(-0.04 * prec3); double fp4 = exp(-0.03 * prec4); double fp5 = exp(-0.02 * prec5); double fp6_10 = exp(-0.01 * prec6_10); double fp11_15 = exp(-0.008 * prec11_15); double fp16_30 = exp(-0.004 * prec16_30); double fp31_60 = exp(-0.002 * prec31_60); double fp61_90 = exp(-0.001 * prec61_90); double fp91_120 = exp(-0.0007 * prec91_120); //3 - calc days of dryness double pse = 105. * fp1 * fp2 * fp3 * fp4 * fp5 * fp6_10 * fp11_15 * fp16_30 * fp31_60 * fp61_90 * fp91_120; //4 - basic fire hazard double tipo_vegetacao = riscodefogo->XYLinhaCol(x, y, landcover, nameFileLand); std::cout << "tipo_vegetacao: " << tipo_vegetacao << std::endl; double a = riscodefogo->valorAVegetacao(tipo_vegetacao); std::cout << "A: " << a << std::endl; std::cout << "prec1: " << prec1 << "\t" << "prec2: " << prec2 << "\t" << "prec3: " << prec3 << "\t" << "prec4: " << prec4 << "\t" << "prec5: " << prec5 << "\t" << "prec6_10: " << prec6_10 << "\t" << "prec11_15: " << prec11_15 << "\t" << "prec16_30: " << prec16_30 << "\t" << "prec31_60: " << prec31_60 << "\t" << "prec61_90: " << prec61_90 << "\t" << "prec91_120: " << prec91_120 << std::endl; // Adjustment double PSE = riscodefogo->maxPSE(tipo_vegetacao, pse); std::cout << "PSE: " << PSE << std::endl; double rb = 0.9 * (1. + std::sin((a*PSE-90.)*3.1416/180.)) * 0.5; if(rb > 0.9) rb = 0.9; std::cout << "RB: " << rb << std::endl; //5 - humidity factor double ur = riscodefogo->XYLinhaCol(x,y, humidity, nameFileHumidity); std::cout << "UR: " << ur << std::endl; double fu = ur * -0.006 + 1.3; std::cout << "FU: " << fu << std::endl; //6 - factor temperature double tempMax = riscodefogo->XYLinhaCol(x,y,temperature, nameFileTemperature); std::cout << "TEMPMAX: " << tempMax << "\t" << tempMax-273.15 << std::endl; double ft = (tempMax-273.15) * 0.02 + 0.4; std::cout << "FT: " << ft << std::endl; //7 - generate observatory risk double rf = rb * ft * fu; if(rf > 1) rf = 1; std::cout << "RF: " << rf << std::endl; double res = riscodefogo->XYLinhaCol(x,y, result, nameFileResult); std::cout << "RESULT: " << res << std::endl; double gab = riscodefogo->XYLinhaCol(x,y,gabarito, nameFileGab); std::cout << "GABARITO: " << gab << std::endl; } return 0; }
int main(int argc, char* argv[]) { terrama2::core::initializeTerraMA(); std::cout << "NOT WORKING" << std::endl; return 1; { QUrl uri; uri.setScheme("postgis"); uri.setHost(QString::fromStdString(TERRAMA2_DATABASE_HOST)); uri.setPort(std::stoi(TERRAMA2_DATABASE_PORT)); uri.setUserName(QString::fromStdString(TERRAMA2_DATABASE_USERNAME)); uri.setPassword(QString::fromStdString(TERRAMA2_DATABASE_PASSWORD)); uri.setPath(QString::fromStdString("/"+TERRAMA2_DATABASE_DBNAME)); //DataProvider information terrama2::core::DataProvider* dataProvider = new terrama2::core::DataProvider(); terrama2::core::DataProviderPtr dataProviderPtr(dataProvider); dataProvider->uri = uri.url().toStdString(); dataProvider->intent = terrama2::core::DataProviderIntent::COLLECTOR_INTENT; dataProvider->dataProviderType = "POSTGIS"; dataProvider->active = true; //DataSeries information terrama2::core::DataSeries* dataSeries = new terrama2::core::DataSeries(); terrama2::core::DataSeriesPtr dataSeriesPtr(dataSeries); auto& semanticsManager = terrama2::core::SemanticsManager::getInstance(); dataSeries->semantics = semanticsManager.getSemantics("DCP-postgis"); //DataSet information terrama2::core::DataSetDcp* dataSet = new terrama2::core::DataSetDcp(); dataSet->active = true; dataSet->format.emplace("table_name", "inpe"); dataSet->format.emplace("timestamp_property", "datetime"); dataSeries->datasetList.emplace_back(dataSet); //accessing data terrama2::core::DataAccessorDcpPostGIS accessor(dataProviderPtr, dataSeriesPtr); //empty filter terrama2::core::Filter filter; filter.lastValue = true; terrama2::core::DcpSeriesPtr dcpSeries = accessor.getDcpSeries(filter); std::cout << "\nLast data timestamp: " << accessor.lastDateTime()->toString() << std::endl; assert(dcpSeries->dcpSeriesMap().size() == 1); auto datasetSeries = (*dcpSeries->dcpSeriesMap().begin()).second; std::shared_ptr<te::da::DataSet> teDataSet = datasetSeries.syncDataSet->dataset(); std::string tableName = dataSet->format["table_name"]; // creates a DataSource to the data and filters the dataset, // also joins if the DCP comes from separated files std::shared_ptr<te::da::DataSource> datasource(te::da::DataSourceFactory::make("POSTGIS")); std::map<std::string, std::string> connInfo {{"PG_HOST", uri.host().toStdString()}, {"PG_PORT", std::to_string(uri.port())}, {"PG_USER", uri.userName().toStdString()}, {"PG_PASSWORD", uri.password().toStdString()}, {"PG_DB_NAME", uri.path().section("/", 1, 1).toStdString()}, {"PG_CONNECT_TIMEOUT", "4"}, {"PG_CLIENT_ENCODING", "UTF-8"} }; datasource->setConnectionInfo(connInfo); // RAII for open/closing the datasource terrama2::core::OpenClose<std::shared_ptr<te::da::DataSource>> openClose(datasource); // get a transactor to interact to the data source std::shared_ptr<te::da::DataSourceTransactor> transactor(datasource->getTransactor()); auto primaryKey = datasetSeries.teDataSetType->getPrimaryKey(); assert(primaryKey); assert(!teDataSet->isEmpty()); auto pkName = primaryKey->getName(); std::string sql("DELETE FROM " +tableName+" WHERE "+pkName+" NOT IN ("); auto pos = datasetSeries.teDataSetType->getPropertyPosition(pkName); teDataSet->moveBeforeFirst(); while (teDataSet->moveNext()) { sql+=std::to_string(teDataSet->getInt32(pos))+","; } sql.pop_back(); sql+=")"; transactor->execute(sql); std::cout << "dataset size: " << teDataSet->size() << std::endl; } terrama2::core::finalizeTerraMA(); return 0; }
/////////////////////////////////////////////////////////////////////////////// // Open a ShapeDatasource (all shp files in a directory) HRESULT CShapeDataSource::Open(const char *pszNewName, bool fUpdate, bool fTestOpen) { _ASSERTE(0 == m_Layers.size()); _ASSERTE(NULL != pszNewName); struct _stat stat; m_strName = pszNewName; m_fUpdatable = fUpdate; // Is the given path a directory or a regular file? os_string strPath (pszNewName); // evtl. abschließendes '\' abschneiden size_t pos = strPath.find_last_of("\\/"); if (os_npos != pos && pos == strPath.size()-1) strPath.replace(pos, 1, ""); if (0 != _stat(strPath.c_str(), &stat) || (!(stat.st_mode & _S_IFDIR) && !(stat.st_mode & _S_IFREG))) { if (fTestOpen) return S_FALSE; return TRIASHP_E_BADFILENAME; } // Build a list of filenames we figure are Shape files. if (stat.st_mode & _S_IFREG) { // given name relates to a file if (S_OK != OpenFile(pszNewName, fUpdate, fTestOpen)) { if (fTestOpen) return S_FALSE; return TRIASHP_E_OPENFILE_FAILED; } return S_OK; } else { vector<os_string> Candidates; vector<os_string> Failed; os_path dir (pszNewName); RETURN_FAILED_HRESULT(ReadDir (pszNewName, Candidates)); for (vector<os_string>::iterator it = Candidates.begin(); it != Candidates.end(); ++it) { os_path path (*it); if (stricmp (path.extension().c_str(), g_cbShp)) continue; // vollständigen Dateinamen bauen if (dir.has_directory()) path.directory(dir.directory()); if (dir.has_drive()) path.drive(*dir.drive().c_str()); if (S_OK != OpenFile (os_string(path).c_str(), fUpdate, fTestOpen) && !fTestOpen) Failed.push_back(os_string(path)); } if (!fTestOpen && 0 == m_Layers.size() && !fUpdate) { if (!m_fDeniedToOpen) { os_path datasource(strPath); VMessageBox(IDS_DBENGINE_DESC, MB_ICONEXCLAMATION|MB_OK, IDS_COULDNOTOPENSHAPE, datasource.base().c_str()); m_fDeniedToOpen = true; } return TRIASHP_W_NOFILESFOUND; } // Fehlerbahandlung if (Failed.size() > 0) { int iResult = IDNO; if (!m_fDeniedToOpen) { os_path datasource(strPath); if (1 == Failed.size()) { iResult = VMessageBox(IDS_DBENGINE_DESC, MB_ICONQUESTION|MB_YESNO, IDS_COULDNOTOPENFILE, Failed[0].c_str(), datasource.base().c_str()); } else { os_string failed; for (vector<os_string>::iterator it = Failed.begin(); it != Failed.end(); ++it) { failed += " "; failed += *it; failed += '\n'; } iResult = VMessageBox(IDS_DBENGINE_DESC, MB_ICONQUESTION|MB_YESNO, IDS_COULDNOTOPENFILES, failed.c_str(), datasource.base().c_str()); } m_fDeniedToOpen = true; } if (IDNO == iResult) { m_Layers.clear(); return TRIASHP_E_OPENFILE_FAILED; } } } // Datenbank jetzt geöffnet m_fOpened = true; return (m_Layers.size() > 0 || fUpdate) ? S_OK : S_FALSE; }
int main(int argc, char **argv) { std::cout << "MapReduce Wordcount Application"; if (argc < 2) { std::cerr << "Usage: wordcount directory [num_map_tasks]\n"; return 1; } mapreduce::specification spec; spec.input_directory = argv[1]; std::cout << "\n" << std::max(1,(int)boost::thread::hardware_concurrency()) << " CPU cores"; std::cout << "\n" << typeid(wordcount::job).name() << "\n"; mapreduce::results result; wordcount::job::datasource_type datasource(spec); try { if (argc > 2) spec.map_tasks = atoi(argv[2]); if (argc > 3) spec.reduce_tasks = atoi(argv[3]); else spec.reduce_tasks = std::max(1U,boost::thread::hardware_concurrency()); std::cout << "\nRunning Parallel WordCount MapReduce..."; wordcount::job job(datasource, spec); job.run<mapreduce::schedule_policy::cpu_parallel<wordcount::job> >(result); std::cout << "\nMapReduce Finished."; std::cout << std::endl << "\nMapReduce statistics:"; std::cout << "\n MapReduce job runtime : " << result.job_runtime << " seconds, of which..."; std::cout << "\n Map phase runtime : " << result.map_runtime << " seconds"; std::cout << "\n Reduce phase runtime : " << result.reduce_runtime << " seconds"; std::cout << "\n\n Map:"; std::cout << "\n Total Map keys : " << result.counters.map_keys_executed; std::cout << "\n Map keys processed : " << result.counters.map_keys_completed; std::cout << "\n Map key processing errors : " << result.counters.map_key_errors; std::cout << "\n Number of Map Tasks run (in parallel) : " << result.counters.actual_map_tasks; std::cout << "\n Fastest Map key processed in : " << *std::min_element(result.map_times.begin(), result.map_times.end()) << " seconds"; std::cout << "\n Slowest Map key processed in : " << *std::max_element(result.map_times.begin(), result.map_times.end()) << " seconds"; std::cout << "\n Average time to process Map keys : " << std::accumulate(result.map_times.begin(), result.map_times.end(), boost::posix_time::time_duration()) / result.map_times.size() << " seconds"; std::cout << "\n\n Reduce:"; std::cout << "\n Total Reduce keys : " << result.counters.reduce_keys_executed; std::cout << "\n Reduce keys processed : " << result.counters.reduce_keys_completed; std::cout << "\n Reduce key processing errors : " << result.counters.reduce_key_errors; std::cout << "\n Number of Reduce Tasks run (in parallel): " << result.counters.actual_reduce_tasks; std::cout << "\n Number of Result Files : " << result.counters.num_result_files; if (result.reduce_times.size() > 0) { std::cout << "\n Fastest Reduce key processed in : " << *std::min_element(result.reduce_times.begin(), result.reduce_times.end()) << " seconds"; std::cout << "\n Slowest Reduce key processed in : " << *std::max_element(result.reduce_times.begin(), result.reduce_times.end()) << " seconds"; std::cout << "\n Average time to process Reduce keys : " << std::accumulate(result.reduce_times.begin(), result.reduce_times.end(), boost::posix_time::time_duration()) / result.map_times.size() << " seconds"; } wordcount::job::const_result_iterator it = job.begin_results(); wordcount::job::const_result_iterator ite = job.end_results(); if (it != ite) { typedef std::list<wordcount::job::keyvalue_t> frequencies_t; frequencies_t frequencies; frequencies.push_back(*it); frequencies_t::reverse_iterator it_smallest = frequencies.rbegin(); for (++it; it!=ite; ++it) { if (frequencies.size() < 10) // show top 10 { frequencies.push_back(*it); if (it->second < it_smallest->second) it_smallest = frequencies.rbegin(); } else if (it->second > it_smallest->second) { *it_smallest = *it; it_smallest = std::min_element(frequencies.rbegin(), frequencies.rend(), mapreduce::detail::less_2nd<wordcount::job::keyvalue_t>); } } frequencies.sort(mapreduce::detail::greater_2nd<wordcount::job::keyvalue_t>); std::cout << "\n\nMapReduce results:"; for (frequencies_t::const_iterator freq=frequencies.begin(); freq!=frequencies.end(); ++freq) printf("\n%.*s\t%d", freq->first.second, freq->first.first, freq->second); } } catch (std::exception &e) { std::cout << std::endl << "Error: " << e.what(); } return 0; }