int main(int argc, char *argv[]) { int status=0; // 0 = pass, failures are incremented bool success = true; #ifdef ALBANY_DEBUG Teuchos::GlobalMPISession mpiSession(&argc, &argv); #else // bypass printing process startup info Teuchos::GlobalMPISession mpiSession(&argc, &argv, NULL); #endif Kokkos::initialize(argc, argv); #ifdef ALBANY_FLUSH_DENORMALS _MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_ON); _MM_SET_DENORMALS_ZERO_MODE(_MM_DENORMALS_ZERO_ON); #endif #ifdef ALBANY_CHECK_FPE // Catch FPEs. Follow Main_SolveT.cpp's approach to checking for floating // point exceptions. //_mm_setcsr(_MM_MASK_MASK &~ (_MM_MASK_OVERFLOW | _MM_MASK_INVALID | _MM_MASK_DIV_ZERO) ); _MM_SET_EXCEPTION_MASK(_MM_GET_EXCEPTION_MASK() & ~_MM_MASK_INVALID); #endif using Teuchos::RCP; using Teuchos::rcp; RCP<Teuchos::FancyOStream> out(Teuchos::VerboseObjectBase::getDefaultOStream()); // Command-line argument for input file Albany::CmdLineArgs cmd; cmd.parse_cmdline(argc, argv, *out); try { RCP<Teuchos::Time> totalTime = Teuchos::TimeMonitor::getNewTimer("Albany: ***Total Time***"); RCP<Teuchos::Time> setupTime = Teuchos::TimeMonitor::getNewTimer("Albany: Setup Time"); Teuchos::TimeMonitor totalTimer(*totalTime); //start timer Teuchos::TimeMonitor setupTimer(*setupTime); //start timer RCP<const Teuchos_Comm> comm = Tpetra::DefaultPlatform::getDefaultPlatform().getComm(); // Connect vtune for performance profiling if (cmd.vtune) { Albany::connect_vtune(comm->getRank()); } Albany::SolverFactory slvrfctry(cmd.xml_filename, comm); RCP<Epetra_Comm> appComm = Albany::createEpetraCommFromTeuchosComm(comm); RCP<Albany::Application> app; const RCP<Thyra::ModelEvaluator<double> > solver = slvrfctry.createThyraSolverAndGetAlbanyApp(app, appComm, appComm); setupTimer.~TimeMonitor(); // PHX::InitializeKokkosDevice(); Teuchos::ParameterList &solveParams = slvrfctry.getAnalysisParameters().sublist("Solve", /*mustAlreadyExist =*/ false); // By default, request the sensitivities if not explicitly disabled solveParams.get("Compute Sensitivities", true); Teuchos::Array<Teuchos::RCP<const Thyra::VectorBase<double> > > thyraResponses; Teuchos::Array<Teuchos::Array<Teuchos::RCP<const Thyra::MultiVectorBase<double> > > > thyraSensitivities; // The PoissonSchrodinger_SchroPo and PoissonSchroMosCap1D tests seg fault as albanyApp is null - // For now, do not resize the response vectors. FIXME sort out this issue. if(Teuchos::nonnull(app)) Piro::PerformSolveBase(*solver, solveParams, thyraResponses, thyraSensitivities, app->getAdaptSolMgr()->getSolObserver()); else Piro::PerformSolveBase(*solver, solveParams, thyraResponses, thyraSensitivities); Teuchos::Array<Teuchos::RCP<const Epetra_Vector> > responses; Teuchos::Array<Teuchos::Array<Teuchos::RCP<const Epetra_MultiVector> > > sensitivities; epetraFromThyra(appComm, thyraResponses, thyraSensitivities, responses, sensitivities); const int num_p = solver->Np(); // Number of *vectors* of parameters const int num_g = solver->Ng(); // Number of *vectors* of responses *out << "Finished eval of first model: Params, Responses " << std::setprecision(12) << std::endl; Teuchos::ParameterList& parameterParams = slvrfctry.getParameters().sublist("Problem").sublist("Parameters"); int num_param_vecs = (parameterParams.isType<int>("Number")) ? int(parameterParams.get("Number", 0) > 0) : parameterParams.get("Number of Parameter Vectors", 0); const Thyra::ModelEvaluatorBase::InArgs<double> nominal = solver->getNominalValues(); double norm2; for (int i=0; i<num_p; i++) { const Teuchos::RCP<const Epetra_Vector> p_init = epetraVectorFromThyra(appComm, nominal.get_p(i)); if(i < num_param_vecs) p_init->Print(*out << "\nParameter vector " << i << ":\n"); else { //distributed parameters, we print only 2-norm p_init->Norm2(&norm2); *out << "\nDistributed Parameter " << i << ": " << norm2 << " (two-norm)\n" << std::endl; } } for (int i=0; i<num_g-1; i++) { const RCP<const Epetra_Vector> g = responses[i]; bool is_scalar = true; if (app != Teuchos::null) is_scalar = app->getResponse(i)->isScalarResponse(); if (is_scalar) { g->Print(*out << "\nResponse vector " << i << ":\n"); if (num_p == 0) { // Just calculate regression data status += slvrfctry.checkSolveTestResults(i, 0, g.get(), NULL); } else { for (int j=0; j<num_p; j++) { const RCP<const Epetra_MultiVector> dgdp = sensitivities[i][j]; if (Teuchos::nonnull(dgdp)) { if(j < num_param_vecs) { dgdp->Print(*out << "\nSensitivities (" << i << "," << j << "): \n"); status += slvrfctry.checkSolveTestResults(i, j, g.get(), dgdp.get()); } else { const Epetra_Map serial_map(-1, 1, 0, dgdp.get()->Comm()); Epetra_MultiVector norms(serial_map,dgdp->NumVectors()); // RCP<Albany::ScalarResponseFunction> response = rcp_dynamic_cast<Albany::ScalarResponseFunction>(app->getResponse(i)); // int numResponses = response->numResponses(); *out << "\nSensitivities (" << i << "," << j << ") for Distributed Parameters: (two-norm)\n"; *out << " "; for(int ir=0; ir<dgdp->NumVectors(); ++ir) { (*dgdp)(ir)->Norm2(&norm2); (*norms(ir))[0] = norm2; *out << " " << norm2; } *out << "\n" << std::endl; status += slvrfctry.checkSolveTestResults(i, j, g.get(), &norms); } } } } } } // Create debug output object Teuchos::ParameterList &debugParams = slvrfctry.getParameters().sublist("Debug Output", true); bool writeToMatrixMarketSoln = debugParams.get("Write Solution to MatrixMarket", false); bool writeToMatrixMarketDistrSolnMap = debugParams.get("Write Distributed Solution and Map to MatrixMarket", false); bool writeToCoutSoln = debugParams.get("Write Solution to Standard Output", false); const RCP<const Epetra_Vector> xfinal = responses.back(); double mnv; xfinal->MeanValue(&mnv); *out << "Main_Solve: MeanValue of final solution " << mnv << std::endl; *out << "\nNumber of Failed Comparisons: " << status << std::endl; if (writeToCoutSoln == true) std::cout << "xfinal: " << *xfinal << std::endl; #ifdef ALBANY_PERIDIGM #if defined(ALBANY_EPETRA) if (Teuchos::nonnull(LCM::PeridigmManager::self())) { *out << setprecision(12) << "\nPERIDIGM-ALBANY OPTIMIZATION-BASED COUPLING FINAL FUNCTIONAL VALUE = " << LCM::PeridigmManager::self()->obcEvaluateFunctional() << "\n" << std::endl; } #endif #endif if (debugParams.get<bool>("Analyze Memory", false)) Albany::printMemoryAnalysis(std::cout, comm); if (writeToMatrixMarketSoln == true) { //create serial map that puts the whole solution on processor 0 int numMyElements = (xfinal->Comm().MyPID() == 0) ? app->getDiscretization()->getMap()->NumGlobalElements() : 0; const Epetra_Map serial_map(-1, numMyElements, 0, xfinal->Comm()); //create importer from parallel map to serial map and populate serial solution xfinal_serial Epetra_Import importOperator(serial_map, *app->getDiscretization()->getMap()); Epetra_Vector xfinal_serial(serial_map); xfinal_serial.Import(*app->getDiscretization()->getSolutionField(), importOperator, Insert); //writing to MatrixMarket file EpetraExt::MultiVectorToMatrixMarketFile("xfinal.mm", xfinal_serial); } if (writeToMatrixMarketDistrSolnMap == true) { //writing to MatrixMarket file EpetraExt::MultiVectorToMatrixMarketFile("xfinal_distributed.mm", *xfinal); EpetraExt::BlockMapToMatrixMarketFile("xfinal_distributed_map.mm", *app->getDiscretization()->getMap()); } } TEUCHOS_STANDARD_CATCH_STATEMENTS(true, std::cerr, success); if (!success) status+=10000; Teuchos::TimeMonitor::summarize(*out,false,true,false/*zero timers*/); Kokkos::finalize_all(); return status; }
int main(int argc, char *argv[]) { int status=0; // 0 = pass, failures are incremented bool success = true; Teuchos::GlobalMPISession mpiSession(&argc,&argv); using Teuchos::RCP; using Teuchos::rcp; RCP<Teuchos::FancyOStream> out(Teuchos::VerboseObjectBase::getDefaultOStream()); //*********************************************************** // Command-line argument for input file //*********************************************************** Albany::CmdLineArgs cmd; cmd.parse_cmdline(argc, argv, *out); std::string xmlfilename_coupled = cmd.xml_filename; try { RCP<Teuchos::Time> totalTime = Teuchos::TimeMonitor::getNewTimer("AlbanySG: ***Total Time***"); RCP<Teuchos::Time> setupTime = Teuchos::TimeMonitor::getNewTimer("AlbanySG: Setup Time"); Teuchos::TimeMonitor totalTimer(*totalTime); //start timer //*********************************************************** // Set up coupled solver first to setup comm's //*********************************************************** Teuchos::RCP<Epetra_Comm> globalComm = Albany::createEpetraCommFromMpiComm(Albany_MPI_COMM_WORLD); // Connect vtune for performance profiling if (cmd.vtune) { Albany::connect_vtune(globalComm->MyPID()); } Albany::SolverFactory coupled_slvrfctry(xmlfilename_coupled, Albany::createTeuchosCommFromEpetraComm(globalComm)); Teuchos::ParameterList& coupledParams = coupled_slvrfctry.getParameters(); Teuchos::ParameterList& coupledSystemParams = coupledParams.sublist("Coupled System"); Teuchos::Array<std::string> model_filenames = coupledSystemParams.get<Teuchos::Array<std::string> >("Model XML Files"); int num_models = model_filenames.size(); Teuchos::Array< RCP<Albany::Application> > apps(num_models); Teuchos::Array< RCP<EpetraExt::ModelEvaluator> > models(num_models); Teuchos::Array< RCP<Teuchos::ParameterList> > piroParams(num_models); Teuchos::RCP< Teuchos::ParameterList> coupledPiroParams = Teuchos::rcp(&(coupledParams.sublist("Piro")),false); Teuchos::RCP<Piro::Epetra::StokhosSolver> coupledSolver = Teuchos::rcp(new Piro::Epetra::StokhosSolver(coupledPiroParams, globalComm)); Teuchos::RCP<const Epetra_Comm> app_comm = coupledSolver->getSpatialComm(); // Set up each model Teuchos::Array< Teuchos::RCP<NOX::Epetra::Observer> > observers(num_models); for (int m=0; m<num_models; m++) { Albany::SolverFactory slvrfctry( model_filenames[m], Albany::createTeuchosCommFromEpetraComm(app_comm)); models[m] = slvrfctry.createAlbanyAppAndModel(apps[m], app_comm); Teuchos::ParameterList& appParams = slvrfctry.getParameters(); piroParams[m] = Teuchos::rcp(&(appParams.sublist("Piro")),false); observers[m] = Teuchos::rcp(new Albany_NOXObserver(apps[m])); } // Setup network model std::string network_name = coupledSystemParams.get("Network Model", "Param To Response"); RCP<Piro::Epetra::AbstractNetworkModel> network_model; if (network_name == "Param To Response") network_model = rcp(new Piro::Epetra::ParamToResponseNetworkModel); else if (network_name == "Reactor Network") network_model = rcp(new Albany::ReactorNetworkModel(1)); else TEUCHOS_TEST_FOR_EXCEPTION( true, std::logic_error, "Invalid network model name " << network_name); RCP<EpetraExt::ModelEvaluator> coupledModel = rcp(new Piro::Epetra::NECoupledModelEvaluator(models, piroParams, network_model, coupledPiroParams, globalComm, observers)); coupledSolver->setup(coupledModel); // Solve coupled system EpetraExt::ModelEvaluator::InArgs inArgs = coupledSolver->createInArgs(); EpetraExt::ModelEvaluator::OutArgs outArgs = coupledSolver->createOutArgs(); for (int i=0; i<inArgs.Np(); i++) if (inArgs.supports(EpetraExt::ModelEvaluator::IN_ARG_p_sg, i)) inArgs.set_p_sg(i, coupledSolver->get_p_sg_init(i)); for (int i=0; i<outArgs.Ng(); i++) if (outArgs.supports(EpetraExt::ModelEvaluator::OUT_ARG_g_sg, i)) { RCP<Stokhos::EpetraVectorOrthogPoly> g_sg = coupledSolver->create_g_sg(i); outArgs.set_g_sg(i, g_sg); } coupledSolver->evalModel(inArgs, outArgs); // Print results bool printResponse = coupledSystemParams.get("Print Response Expansion", true); int idx = outArgs.Ng()-1; Teuchos::RCP<Stokhos::EpetraVectorOrthogPoly> g_sg = outArgs.get_g_sg(idx); Teuchos::RCP<Stokhos::SGModelEvaluator> sg_model = coupledSolver->get_sg_model(); Teuchos::RCP<Stokhos::EpetraVectorOrthogPoly> g_sg_local = //sg_model->import_solution_poly(*(g_sg->getBlockVector())); g_sg; Epetra_Vector g_mean(*(g_sg->coefficientMap())); Epetra_Vector g_std_dev(*(g_sg->coefficientMap())); g_sg->computeMean(g_mean); g_sg->computeStandardDeviation(g_std_dev); RCP<Epetra_Vector> g_mean_local = rcp(&g_mean,false); RCP<Epetra_Vector> g_std_dev_local = rcp(&g_std_dev,false); if (g_mean.Map().DistributedGlobal()) { Epetra_LocalMap local_map(g_mean.GlobalLength(), 0, g_mean.Map().Comm()); g_mean_local = rcp(new Epetra_Vector(local_map)); g_std_dev_local = rcp(new Epetra_Vector(local_map)); Epetra_Import importer(local_map, g_mean.Map()); g_mean_local->Import(g_mean, importer, Insert); g_std_dev_local->Import(g_std_dev, importer, Insert); } out->precision(16); *out << std::endl << "Final value of coupling variables:" << std::endl << "Mean:" << std::endl << *g_mean_local << std::endl << "Std. Dev.:" << std::endl << *g_std_dev_local << std::endl; if (printResponse) *out << "PCE:" << std::endl << *g_sg_local << std::endl; status += coupled_slvrfctry.checkSGTestResults( 0, g_sg_local, g_mean_local.get(), g_std_dev_local.get()); *out << "\nNumber of Failed Comparisons: " << status << std::endl; } TEUCHOS_STANDARD_CATCH_STATEMENTS(true, std::cerr, success); if (!success) status+=10000; Teuchos::TimeMonitor::summarize(*out,false,true,false/*zero timers*/); return status; }