void test_octet_seq (const CORBA::Object_var object) { ACE_Sample_History history (niterations); Test::octet_load ol (sz); ol.length (sz); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (int i = 0; i < niterations; ++i) { ACE_hrtime_t start = ACE_OS::gethrtime (); Test::Timestamp start_time = static_cast <Test::Timestamp> (start); CORBA::Request_var request = object->_request ("test_octet_method"); request->add_in_arg("octet_load") <<= ol; request->add_in_arg("send_time") <<= start_time; request->set_return_type (CORBA::_tc_ulonglong); request->invoke (); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); if (do_dump_history) { history.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); }
void ACE_Sample_History::collect_basic_stats (ACE_Basic_Stats &stats) const { for (size_t i = 0; i != this->sample_count_; ++i) { stats.sample (this->samples_[i]); } }
void Client_Task::accumulate_and_dump (ACE_Basic_Stats &totals, const ACE_TCHAR *msg, ACE_High_Res_Timer::global_scale_factor_type gsf) { totals.accumulate (this->latency_); this->latency_.dump_results (msg, gsf); }
void test_octet_seq (Test::Roundtrip_ptr roundtrip) { ACE_Sample_History history (niterations); Test::octet_load ol (sz); ol.length (sz); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (int i = 0; i < niterations; ++i) { ACE_hrtime_t start = ACE_OS::gethrtime (); (void) roundtrip->test_octet_method (ol, start); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "Octet test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); if (do_dump_history) { history.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); }
int Client_Task::run_test (void) { ACE_hrtime_t test_start = 0; ACE_hrtime_t test_end = 0; try { test_start = ACE_OS::gethrtime (); this->roundtrip_->start_test (); this->svc (); this->roundtrip_->end_test (); test_end = ACE_OS::gethrtime (); } catch (const CORBA::Exception&) { return 0; } // High resolution timer calibration ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_Basic_Stats totals; this->accumulate_and_dump (totals, ACE_TEXT("Task"), gsf); totals.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, totals.samples_count ()); return 1; }
void LogNotifyConsumer::evaluateTiming() { ACE_hrtime_t testEnd = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "Logging client test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_UINT32 gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); //history_->dump_samples ("HISTORY", gsf); ACE_Basic_Stats stats; history_->collect_basic_stats (stats); stats.dump_results ("Total", gsf); ACE_Throughput_Stats::dump_throughput ("Logging client total", gsf, testEnd - testStart_, stats.samples_count ()); delete history_; history_ = NULL; }
void Continuous_Worker::print_stats (ACE_Sample_History &history, ACE_hrtime_t test_end) { ACE_GUARD (TAO_SYNCH_MUTEX, mon, this->synchronizers_.worker_lock_); if (individual_continuous_worker_stats) { ACE_DEBUG ((LM_DEBUG, "\n************ Statistics for thread %t ************\n\n")); ACE_DEBUG ((LM_DEBUG, "Iterations = %d\n", history.sample_count ())); if (do_dump_history) { history.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); } history.collect_basic_stats (this->collective_stats_); ACE_hrtime_t elapsed_time_for_current_thread = test_end - test_start; if (elapsed_time_for_current_thread > this->time_for_test_) this->time_for_test_ = elapsed_time_for_current_thread; }
void Control::join (Federated_Test::Peer_ptr peer) { { ACE_GUARD (TAO_SYNCH_MUTEX, ace_mon, this->mutex_); if (this->peers_count_ == this->peers_expected_) return; this->peers_[this->peers_count_++] = Federated_Test::Peer::_duplicate (peer); if (this->peers_count_ < this->peers_expected_) return; } /// Automatically shutdown the ORB ACE_Utils::Auto_Functor<CORBA::ORB,ORB_Shutdown> orb_shutdown (this->orb_.in ()); /// Automatically shutdown the peers typedef ACE_Utils::Auto_Functor<Federated_Test::Peer,Shutdown<Federated_Test::Peer> > Peer_Shutdown; ACE_Auto_Basic_Array_Ptr<Peer_Shutdown> peer_shutdown ( new Peer_Shutdown[this->peers_count_] ); size_t i; for (i = 0; i != this->peers_count_; ++i) { peer_shutdown[i].reset(this->peers_[i].in()); } ACE_DEBUG ((LM_DEBUG, "Control (%P|%t) Building the federation\n")); /// Build the EC federation for (i = 0; i != this->peers_count_; ++i) { for (size_t j = 0; j != this->peers_count_; ++j) { if (i != j) { this->peers_[j]->connect (this->peers_[i].in ()); } } } /// ... run the test(s) ... for (i = 0; i != this->peers_count_; ++i) { /// ... automatically release the object references ... ACE_Auto_Basic_Array_Ptr<Federated_Test::Loopback_var> loopbacks ( new Federated_Test::Loopback_var[2*this->peers_count_] ); /// ... and automatically disconnect the loopbacks ... typedef Auto_Disconnect<Federated_Test::Loopback> Loopback_Disconnect; ACE_Auto_Basic_Array_Ptr<auto_ptr<Loopback_Disconnect> > disconnects ( new auto_ptr<Loopback_Disconnect>[2*this->peers_count_] ); ACE_DEBUG ((LM_DEBUG, "Control (%P|%t) Running test for peer %d\n", i)); CORBA::Long experiment_id = 128 + i; CORBA::Long base_event_type = ACE_ES_EVENT_UNDEFINED; size_t lcount = 0; size_t j; for (j = 0; j != this->peers_count_; ++j) { if (j != i) { loopbacks[lcount] = this->peers_[j]->setup_loopback (experiment_id, base_event_type); ACE_auto_ptr_reset (disconnects[lcount], new Loopback_Disconnect ( loopbacks[lcount].in ())); lcount++; loopbacks[lcount] = this->peers_[j]->setup_loopback (experiment_id, base_event_type + 2); ACE_auto_ptr_reset (disconnects[lcount], new Loopback_Disconnect ( loopbacks[lcount].in ())); lcount++; } } Federated_Test::Experiment_Results_var results = this->peers_[i]->run_experiment (experiment_id, this->iterations_); ACE_Sample_History history (results->length ()); for (CORBA::ULong k = 0; k != results->length (); ++k) history.sample (results[k]); // We use a fake scale factor because the peer already converted // to microseconds... const ACE_UINT32 fake_scale_factor = 1; ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), fake_scale_factor); if (this->do_dump_history_) { history.dump_samples (ACE_TEXT("HISTORY"), fake_scale_factor); } } }
void Paced_Worker::print_stats (ACE_hrtime_t test_end) { ACE_GUARD (TAO_SYNCH_MUTEX, mon, this->synchronizers_.worker_lock_); CORBA::ULong missed_total_deadlines = this->missed_start_deadlines_ + this->missed_end_deadlines_; CORBA::ULong made_total_deadlines = this->history_.max_samples () - missed_total_deadlines; ACE_DEBUG ((LM_DEBUG, "\n************ Statistics for thread %t ************\n\n")); ACE_DEBUG ((LM_DEBUG, "Priority = %d/%d; Rate = %d/sec; Iterations = %d; ", this->CORBA_priority_, this->native_priority_, this->rate_, this->history_.max_samples ())); if (count_missed_end_deadlines) ACE_DEBUG ((LM_DEBUG, "Deadlines made/missed[start,end]/%% = %d/%d[%d,%d]/%.2f%%; Effective Rate = %.2f\n", made_total_deadlines, missed_total_deadlines, this->missed_start_deadlines_, this->missed_end_deadlines_, made_total_deadlines * 100 / (double) this->history_.max_samples (), made_total_deadlines / to_seconds (test_end - test_start, gsf))); else ACE_DEBUG ((LM_DEBUG, "Deadlines made/missed/%% = %d/%d/%.2f%%; Effective Rate = %.2f\n", made_total_deadlines, missed_total_deadlines, made_total_deadlines * 100 / (double) this->history_.max_samples (), made_total_deadlines / to_seconds (test_end - test_start, gsf))); if (do_dump_history) { this->history_.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; this->history_.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); if (print_missed_invocations) { ACE_DEBUG ((LM_DEBUG, "\nMissed start invocations are: ")); for (CORBA::ULong j = 0; j != this->missed_start_deadlines_; ++j) { ACE_DEBUG ((LM_DEBUG, "%d ", this->missed_start_invocations_[j])); } ACE_DEBUG ((LM_DEBUG, "\n")); if (count_missed_end_deadlines) { ACE_DEBUG ((LM_DEBUG, "\nMissed end invocations are: ")); for (CORBA::ULong j = 0; j != this->missed_end_deadlines_; ++j) { ACE_DEBUG ((LM_DEBUG, "%d ", this->missed_end_invocations_[j])); } ACE_DEBUG ((LM_DEBUG, "\n")); } } }
int Latency_Query_Client::run (void) { // CORBA::DefinitionKind dk; CORBA::AttributeMode am; try { for (int j = 0; j < 100; ++j) { am = this->attr_->mode (); if (am != CORBA::ATTR_NORMAL) { return -1; } } ACE_Sample_History history (this->iterations_); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (CORBA::ULong i = 0; i < this->iterations_; ++i) { ACE_hrtime_t start = ACE_OS::gethrtime (); am = this->attr_->mode (); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } ACE_hrtime_t test_end = ACE_OS::gethrtime (); if (this->debug_) { ACE_DEBUG ((LM_DEBUG, "test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); if (this->do_dump_history_) { history.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); } } catch (const CORBA::Exception& ex) { ex._tao_print_exception ("Latency_Query_Client::run:"); return -1; } return 0; }
int main(int argc, char * argv[]) { Server server(argc, argv); SparrowMotion_var motion = server.resolveName<SparrowMotion>("Motion"); Odometry_var odometry = server.resolveName<Odometry>("Odometry"); PositionIDL startPos, current; int ticks = 0; int niterations = 10; if (argc > 1) niterations = ACE_OS::atoi(argv[1]); cout << "start test" << endl; try { ACE_Sample_History history (niterations); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (int i = 0; i < niterations; ++i) { motion->setLRPower(0, 0); ACE_OS::sleep(ACE_Time_Value(1, (int)floor(rand1() * 100000.))); startPos = odometry->getPosition(); cout << "start position:" << startPos << endl; ACE_hrtime_t start = ACE_OS::gethrtime (); motion->setLRPower(10000, 10000); do { current = odometry->getWaitPosition(); ++ticks; } while (startPos.point.x == current.point.x && startPos.point.y == current.point.y && startPos.heading == current.heading); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } motion->setLRPower(0, 0); ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_UINT32 gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); history.dump_samples ("HISTORY", gsf); ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results ("Total", gsf); ACE_Throughput_Stats::dump_throughput ("Total", gsf, test_end - test_start, stats.samples_count ()); // data evaluation cout << "ticks: " << ticks << endl; cout << "end position: " << current << endl; } catch (const Miro::Exception & e) { cerr << "Miro exception thrown:" << e << endl; return 1; } return 0; }
int ACE_TMAIN (int argc, ACE_TCHAR *argv[]) { int priority = (ACE_Sched_Params::priority_min (ACE_SCHED_FIFO) + ACE_Sched_Params::priority_max (ACE_SCHED_FIFO)) / 2; ACE_OS::sched_params (ACE_Sched_Params (ACE_SCHED_FIFO, priority, ACE_SCOPE_PROCESS)); int n = 50000; int insertion = 1; try { CORBA::ORB_var orb = CORBA::ORB_init (argc, argv); ACE_Get_Opt get_opt (argc, argv, ACE_TEXT("dien:")); int opt; while ((opt = get_opt ()) != EOF) { switch (opt) { case 'd': TAO_debug_level++; break; case 'i': insertion = 1; break; case 'e': insertion = 0; break; case 'n': n = ACE_OS::atoi (get_opt.opt_arg ()); break; case '?': default: ACE_DEBUG ((LM_DEBUG, "Usage: %s " "-d debug" "-n <num> " "\n", argv[0])); return -1; } } CORBA::Boolean result = 0; int j; { CORBA::Object_var obj = orb->string_to_object ("corbaloc:iiop:localhost:1234/Foo/Bar"); Param_Test_var param_test = TAO::Narrow_Utils<Param_Test>::unchecked_narrow (obj.in ()); TAO_Stub *stub = param_test->_stubobj (); stub->type_id = CORBA::string_dup ("IDL:Param_Test:1.0"); ACE_Sample_History history (n); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (j = 0; j != n; ++j) { CORBA::Any any; if (insertion == 1) { ACE_hrtime_t start = ACE_OS::gethrtime (); any <<= param_test.in (); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); Param_Test_ptr o; result = any >>= o; } else { any <<= param_test.in (); Param_Test_ptr o; ACE_hrtime_t start = ACE_OS::gethrtime (); result = any >>= o; ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } } ACE_hrtime_t test_end = ACE_OS::gethrtime (); if (insertion == 1) { ACE_DEBUG ((LM_DEBUG, "Objref insertion test finished\n")); } else { ACE_DEBUG ((LM_DEBUG, "Objref extraction test finished\n")); } ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); }
void test_i::end_test (void) { // Record end time. this->test_end_ = ACE_OS::gethrtime (); if (do_dump_history) { this->inter_arrival_times_->dump_samples (ACE_TEXT("Inter-arrival times"), gsf); } ACE_Basic_Stats stats; this->inter_arrival_times_->collect_basic_stats (stats); ACE_DEBUG ((LM_DEBUG, "Max Latency = %6d ", stats.max_ / gsf / 1000)); ACE_DEBUG ((LM_DEBUG, "Invocations expected / received / missed / missed %% = %6d / %6d / %6d / %5.2f\n", this->iterations_, this->number_of_invocations_received_, this->iterations_ - this->number_of_invocations_received_, (this->iterations_ - this->number_of_invocations_received_) / (double) this->iterations_ * 100)); if (print_missed_invocations) { ACE_DEBUG ((LM_DEBUG, "\nFollowing invocations were never received:\n")); for (CORBA::ULong i = 0; i < this->iterations_; ++i) { if (this->invocations_received_[i] == 0) { ACE_DEBUG ((LM_DEBUG, "%d ", i)); } } ACE_DEBUG ((LM_DEBUG, "\n")); } if (TAO_debug_level > 0) { ACE_DEBUG ((LM_DEBUG, "Session id ends %d\n", this->session_id_)); stats.dump_results (ACE_TEXT("Inter-arrival times"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Inter-arrival times"), gsf, this->test_end_ - this->test_start_, stats.samples_count ()); } this->session_id_ = -1; delete this->inter_arrival_times_; }
void Worker::print_stats (void) { CORBA::ULong missed_total_deadlines = this->missed_start_deadlines_ + this->missed_end_deadlines_; CORBA::ULong made_total_deadlines = this->iterations_ - missed_total_deadlines; ACE_DEBUG ((LM_DEBUG, "\n************ Statistics ************\n\n")); // // Senders-side stats for PACED invocations are not too relevant // since we are doing one way calls. // if (this->test_type_ == ::Protocols::Sender_Controller::PACED) { ACE_DEBUG ((LM_DEBUG, "Rate = %d/sec; Iterations = %d; ", this->invocation_rate_, this->iterations_)); if (this->count_missed_end_deadlines_) ACE_DEBUG ((LM_DEBUG, "Deadlines made/missed[start,end]/%% = %d/%d[%d,%d]/%.2f%%; Effective Rate = %.2f\n", made_total_deadlines, missed_total_deadlines, this->missed_start_deadlines_, this->missed_end_deadlines_, made_total_deadlines * 100 / (double) this->iterations_, made_total_deadlines / to_seconds (this->test_end_ - this->test_start_, gsf))); else ACE_DEBUG ((LM_DEBUG, "Deadlines made/missed/%% = %d/%d/%.2f%%; Effective Rate = %.2f\n", made_total_deadlines, missed_total_deadlines, made_total_deadlines * 100 / (double) this->iterations_, made_total_deadlines / to_seconds (this->test_end_ - this->test_start_, gsf))); if (this->print_missed_invocations_) { ACE_DEBUG ((LM_DEBUG, "\nMissed start invocations are:\n")); for (CORBA::ULong j = 0; j < this->missed_start_deadlines_; ++j) { ACE_DEBUG ((LM_DEBUG, "%d ", this->missed_start_invocations_[j])); } ACE_DEBUG ((LM_DEBUG, "\n")); if (this->count_missed_end_deadlines_) { ACE_DEBUG ((LM_DEBUG, "\nMissed end invocations are:\n")); for (CORBA::ULong j = 0; j < this->missed_end_deadlines_; ++j) { ACE_DEBUG ((LM_DEBUG, "%d ", this->missed_end_invocations_[j])); } ACE_DEBUG ((LM_DEBUG, "\n")); } } } // Individual calls are relevant for the PACED and LATENCY tests. if (this->test_type_ == ::Protocols::Sender_Controller::PACED || this->test_type_ == ::Protocols::Sender_Controller::LATENCY) { if (this->do_dump_history_) { this->history_.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; this->history_.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, this->test_end_ - this->test_start_, this->iterations_); } else { ACE_hrtime_t elapsed_time = this->test_end_ - this->test_start_; double seconds = to_seconds (elapsed_time, gsf); ACE_hrtime_t bits = this->iterations_; bits *= this->message_size_ * 8; ACE_DEBUG ((LM_DEBUG, "%Q bits sent in %5.1f seconds at a rate of %5.2f Mbps\n", bits, seconds, bits / seconds / 1000 / 1000)); } }
int main(int argc, char * argv[]) { // Parameters to be passed to the services Sparrow::Parameters * parameters = Sparrow::Parameters::instance(); // Config file processing Miro::ConfigDocument * config = new Miro::ConfigDocument(argc, argv); config->setSection("Sparrow99"); config->getParameters("sparrowBoard", *parameters); delete config; #ifdef DEBUG cout << " sparrow paramters:" << endl << *parameters << endl; #endif // Initialize server daemon. Service service; cout << "initialized" << endl; Miro::PositionIDL startPos, current; service.reactorTask.open(NULL); int ticks = 0; int niterations = 10; if (argc > 1) niterations = ACE_OS::atoi(argv[1]); cout << "start test" << endl; try { ACE_Sample_History history (niterations); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (int i = 0; i < niterations; ++i) { service.connection.setPower(0, 0); ACE_OS::sleep(ACE_Time_Value(1, (int)floor(rand1() * 100000.))); startPos = service.odometryImpl->getPosition(); cout << "start position:" << startPos << endl; ACE_hrtime_t start = ACE_OS::gethrtime (); service.connection.setPower(10000, 10000); do { current = service.odometryImpl->getWaitPosition(); ++ticks; } while (startPos.point.x == current.point.x && startPos.point.y == current.point.y && startPos.heading == current.heading); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } service.connection.setPower(0, 0); ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_UINT32 gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); history.dump_samples ("HISTORY", gsf); ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results ("Total", gsf); ACE_Throughput_Stats::dump_throughput ("Total", gsf, test_end - test_start, stats.samples_count ()); // data evaluation cout << "ticks: " << ticks << endl; cout << "end position: " << current << endl; } catch (const Miro::Exception & e) { cerr << "Miro exception thrown:" << e << endl; return 1; } service.reactorTask.cancel(); return 0; }
int ACE_TMAIN(int argc, ACE_TCHAR *argv[]) { int priority = (ACE_Sched_Params::priority_min (ACE_SCHED_FIFO) + ACE_Sched_Params::priority_max (ACE_SCHED_FIFO)) / 2; // Enable FIFO scheduling, e.g., RT scheduling class on Solaris. if (ACE_OS::sched_params (ACE_Sched_Params (ACE_SCHED_FIFO, priority, ACE_SCOPE_PROCESS)) != 0) { if (ACE_OS::last_error () == EPERM) { ACE_DEBUG ((LM_DEBUG, "client (%P|%t): user is not superuser, " "test runs in time-shared class\n")); } else ACE_ERROR ((LM_ERROR, "client (%P|%t): sched_params failed\n")); } try { CORBA::ORB_var orb = CORBA::ORB_init (argc, argv); if (parse_args (argc, argv) != 0) return 1; CORBA::Object_var object = orb->string_to_object (ior); Test::Roundtrip_var roundtrip = Test::Roundtrip::_narrow (object.in ()); if (CORBA::is_nil (roundtrip.in ())) { ACE_ERROR_RETURN ((LM_ERROR, "Nil Test::Roundtrip reference <%s>\n", ior), 1); } /// Begin the test ACE_DEBUG ((LM_DEBUG, "Starting threads\n")); Client_Task task0 (data_type, sz, roundtrip.in (), niterations); Client_Task task1 (data_type, sz, roundtrip.in (), niterations); Client_Task task2 (data_type, sz, roundtrip.in (), niterations); Client_Task task3 (data_type, sz, roundtrip.in (), niterations); ACE_hrtime_t test_start = ACE_OS::gethrtime (); task0.activate (THR_NEW_LWP | THR_JOINABLE); task1.activate (THR_NEW_LWP | THR_JOINABLE); task2.activate (THR_NEW_LWP | THR_JOINABLE); task3.activate (THR_NEW_LWP | THR_JOINABLE); task0.thr_mgr()->wait (); ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "Threads finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); ACE_Basic_Stats totals; task0.accumulate_and_dump (totals, ACE_TEXT("Task[0]"), gsf); task1.accumulate_and_dump (totals, ACE_TEXT("Task[1]"), gsf); task2.accumulate_and_dump (totals, ACE_TEXT("Task[2]"), gsf); task3.accumulate_and_dump (totals, ACE_TEXT("Task[3]"), gsf); totals.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, totals.samples_count ()); if (do_shutdown) { roundtrip->shutdown (); } } catch (const CORBA::Exception& ex) { ex._tao_print_exception ("Exception caught:"); return 1; } return 0; }
int ACE_TMAIN(int argc, ACE_TCHAR *argv[]) { int priority = (ACE_Sched_Params::priority_min (ACE_SCHED_FIFO) + ACE_Sched_Params::priority_max (ACE_SCHED_FIFO)) / 2; // Enable FIFO scheduling, e.g., RT scheduling class on Solaris. if (ACE_OS::sched_params (ACE_Sched_Params (ACE_SCHED_FIFO, priority, ACE_SCOPE_PROCESS)) != 0) { if (ACE_OS::last_error () == EPERM) { ACE_DEBUG ((LM_DEBUG, "client (%P|%t): user is not superuser, " "test runs in time-shared class\n")); } else ACE_ERROR ((LM_ERROR, "client (%P|%t): sched_params failed\n")); } try { CORBA::ORB_var orb = CORBA::ORB_init (argc, argv); if (parse_args (argc, argv) != 0) return 1; CORBA::Object_var object = orb->string_to_object (ior); Test::Roundtrip_var roundtrip = Test::Roundtrip::_narrow (object.in ()); if (CORBA::is_nil (roundtrip.in ())) { ACE_ERROR_RETURN ((LM_ERROR, "Nil Test::Roundtrip reference <%s>\n", ior), 1); } for (int j = 0; j < 100; ++j) { ACE_hrtime_t start = 0; (void) roundtrip->test_method (start); } ACE_Sample_History history (niterations); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (int i = 0; i < niterations; ++i) { ACE_hrtime_t start = ACE_OS::gethrtime (); (void) roundtrip->test_method (start); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); if (do_dump_history) { history.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); if (do_shutdown) { roundtrip->shutdown (); } } catch (const CORBA::Exception& ex) { ex._tao_print_exception ("Exception caught:"); return 1; } return 0; }
void MyImpl::RoundTripClient_exec_i::start () { ///////////// Start test ///////////////////////////////////////// // Let's try to start tests from here... Benchmark::LatencyTest_var lt = context_->get_connection_latency (); ACE_DEBUG ((LM_INFO, "MyImpl::RoundTripClient_exec::start obtain obj ref\n")); CORBA::Long test_data = 0L; //Warm up the system int i = 0; for (i=0; i < 100; i++) lt->makeCall (test_data); ACE_DEBUG ((LM_DEBUG, "MyImpl::start: warmed up the system\n")); ACE_Sample_History history (niterations); ACE_hrtime_t test_start = ACE_OS::gethrtime (); for (i = 0; i < niterations; ++i) { ACE_hrtime_t start = ACE_OS::gethrtime (); //Test value to be sent to the server long test = 0; (void) lt->makeCall (test); ACE_hrtime_t now = ACE_OS::gethrtime (); history.sample (now - start); } ACE_hrtime_t test_end = ACE_OS::gethrtime (); ACE_DEBUG ((LM_DEBUG, "test finished\n")); ACE_DEBUG ((LM_DEBUG, "High resolution timer calibration....")); ACE_High_Res_Timer::global_scale_factor_type gsf = ACE_High_Res_Timer::global_scale_factor (); ACE_DEBUG ((LM_DEBUG, "done\n")); ACE_Env_Value<int> envar (ACE_TEXT("CIAO_DUMP_SAMPLE_HISTORY"), 0); if (envar != 0) { history.dump_samples (ACE_TEXT("HISTORY"), gsf); } ACE_Basic_Stats stats; history.collect_basic_stats (stats); stats.dump_results (ACE_TEXT("Total"), gsf); ACE_Throughput_Stats::dump_throughput (ACE_TEXT("Total"), gsf, test_end - test_start, stats.samples_count ()); }