void ALEXHistogramsWidget::selectRangeRect(double x, double y , double width , double height ) { qDebug()<<"selectRangeRect x="<<x<<" y="<<y<<" width="<<width<<" height="<<height; ad->FRETparams.MinS=qMin(y,y+height); ad->FRETparams.MaxS=qMax(y,y+height); ad->FRETparams.MinP=qMin(x,x+width); ad->FRETparams.MaxP=qMax(x,x+width); if (pltSelectionRect) { pltSelectionRect->set_bottomleftrectangle(x,y,width,height); // pltSelectionRect->set_alpha(ui->spinBoxAlpha->value()); QColor fillColor=pltSelectionRect->get_fillColor(); fillColor.setAlpha(ui->spinBoxAlpha->value()); pltSelectionRect->set_fillColor(fillColor); ui->widPlotDensity->update_plot(); } // qDebug()<<ad->FRETparams.toString(); writeStatistics(); plotHistogramPR(); plotHistogramS(); if(ui->comboBoxPlot->currentIndex()==1) { ui->widPlot->get_plotter()->deleteGraph(vRangeGraphTgTr); JKQTPverticalRange* gr = new JKQTPverticalRange(ui->widPlot->get_plotter()); gr->set_rangeMin(ad->FRETparams.MinP); gr->set_rangeMax(ad->FRETparams.MaxP); gr->set_invertedRange(true); QColor col = QColor(Qt::lightGray); col.setAlpha(ui->spinBoxAlpha->value()); gr->set_fillColor(col); gr->set_style(Qt::NoPen); gr->set_plotCenterLine(false); vRangeGraphTgTr = ui->widPlot->addGraph(gr); ui->widPlot->update(); } }
void ALEXHistogramsWidget::selectRange(double xmin, double xmax , double ymin , double ymax ) { qDebug("selectRange"); ad->FRETparams.MinS=qMin(ymin,ymax); ad->FRETparams.MaxS=qMax(ymin,ymax); ad->FRETparams.MinP=qMin(xmin,xmax); ad->FRETparams.MaxP=qMax(xmin,xmax); writeStatistics(); plotHistogramPR(); plotHistogramS(); }
void ALEXHistogramsWidget::on_pushButtonEval_clicked() { qDebug("ALEXHistogramsWidget::evaluate"); if(ad->isEmpty()||ad->isEmptyBurst()) {AlexEvalLog::warning("no data"); return;} QApplication::setOverrideCursor(QCursor(Qt::WaitCursor)); getFRETparameters(); int res=analysisFRET(ad->burstVectorDual,ad->FRETparams); if (res) AlexEvalLog::warning("Error in FRET analysis"); writeStatistics(); drawPlots(); QApplication::restoreOverrideCursor(); }
void JSONRowOutputStream::writeSuffix() { writeChar('\n', *ostr); writeCString("\t]", *ostr); writeTotals(); writeExtremes(); writeCString(",\n\n", *ostr); writeCString("\t\"rows\": ", *ostr); writeIntText(row_count, *ostr); writeRowsBeforeLimitAtLeast(); if (write_statistics) writeStatistics(); writeChar('\n', *ostr); writeCString("}\n", *ostr); ostr->next(); }
void CSoundCardRepeaterTXRXThread::getStatistics() { if (m_rptState == DSRS_VALID) m_radioCount++; if (m_rptState == DSRS_NETWORK) m_networkCount++; if (m_tx) m_transmitCount++; m_timeCount++; if ((m_timeCount % DSTAR_TICKS_PER_SEC) == 0U) { wxDateTime dateTime = wxDateTime::Now(); unsigned int hour = dateTime.GetHour(); if (hour != m_lastHour) { writeStatistics(); m_lastHour = hour; } } }
void CSoundCardRepeaterTXRXThread::run() { // Wait here until we have the essentials to run while (!m_killed && (m_soundcard == NULL || m_protocolHandler == NULL || m_rptCallsign.IsEmpty() || m_rptCallsign.IsSameAs(wxT(" ")) || m_controller == NULL)) ::wxMilliSleep(500UL); // 1/2 sec if (m_killed) return; m_stopped = false; m_controller->setActive(false); m_controller->setRadioTransmit(false); m_pollTimer.start(); wxDateTime dateTime = wxDateTime::Now(); m_lastHour = dateTime.GetHour(); m_inBuffer.clear(); wxLogMessage(wxT("Starting the sound card transmitter and receiver thread")); unsigned int count = 0U; wxStopWatch timer; while (!m_killed) { timer.Start(); // Process the incoming D-Star transmission receiveRadio(); // Process network traffic receiveNetwork(); repeaterStateMachine(); // Send the network poll if needed and restart the timer if (m_pollTimer.hasExpired()) { #if defined(__WINDOWS__) m_protocolHandler->writePoll(wxT("win_sound-") + VERSION); #else m_protocolHandler->writePoll(wxT("linux_sound-") + VERSION); #endif m_pollTimer.reset(); } // Clock the heartbeat output every one second count++; if (count == 50U) { m_controller->setHeartbeat(); count = 0U; } // Set the output state if (m_tx || (m_activeHangTimer.isRunning() && !m_activeHangTimer.hasExpired())) { m_controller->setActive(true); } else { m_controller->setActive(false); m_activeHangTimer.stop(); } // Check the shutdown state, state changes are done here to bypass the state machine which is // frozen when m_disable is asserted m_disable = m_controller->getDisable(); if (m_disable) { if (m_rptState != DSRS_SHUTDOWN) { m_watchdogTimer.stop(); m_activeHangTimer.stop(); m_hangTimer.stop(); m_networkBuffer.clear(); m_bitBuffer.clear(); m_networkRun = 0U; m_networkStarted = false; m_controller->setActive(false); m_controller->setRadioTransmit(false); m_rptState = DSRS_SHUTDOWN; } } else { if (m_rptState == DSRS_SHUTDOWN) { m_watchdogTimer.stop(); m_hangTimer.stop(); m_rptState = DSRS_LISTENING; m_protocolHandler->reset(); } } // Send the output data if (m_networkStarted) transmitNetwork(); else if (m_networkRun >= NETWORK_RUN_FRAME_COUNT) transmitNetwork(); else transmit(); getStatistics(); unsigned int ms = timer.Time(); clock(ms); } writeStatistics(); wxLogMessage(wxT("Stopping the sound card transmitter and receiver thread")); m_controller->setActive(false); m_controller->setRadioTransmit(false); m_controller->close(); delete m_controller; m_soundcard->close(); delete m_soundcard; delete m_audioDelay; delete m_pttDelay; if (m_reader != NULL) { m_reader->close(); delete m_reader; } m_protocolHandler->close(); delete m_protocolHandler; #if defined(TX_TO_WAV_FILE) if (m_writer != NULL) { m_writer->close(); delete m_writer; } #endif }
/** * trains the model using the patterns returned by the given query or the default query if none is given */ double Trainer::train(Optimizer& optimizer, ErrorFunction& errFct, size_t iterations) throw(MachineLearningException) { double error = 0; if(TRAINING_OUTPUT) writeHeader("Normal trainer", optimizer, errFct, iterations); // std::cout << "Query: \n" << query << std::endl; try { if(nFeatures() != model.getInputDimension() && model.iterativeTraining()) { std::cerr << "Number of features: " << nFeatures() << "\nModel input size: " << model.getInputDimension() << std::endl; throw MachineLearningException("Number of selected features is not equal to the model's input size"); } Array<double> in, target; Array<double> out(model.getOutputDimension()); size_t nRows = readDatabase(in, target); // do the actual training optimizer.init(model.getModel()); // check if we are dealing with an svm, in this case the iterations argument is ignored if(SVM_Optimizer* svmOpt = dynamic_cast<SVM_Optimizer*>(&optimizer)){ MyMultiClassSVM* csvm = dynamic_cast<MyMultiClassSVM*>(&model); MyEpsilon_SVM* esvm = dynamic_cast<MyEpsilon_SVM*>(&model); if(csvm) { optimizeDistorted(*svmOpt, csvm->getSVM(), errFct, in, target); } else if(esvm) { optimizeDistorted(*svmOpt, esvm->getSVM(), errFct, in, target); } assert_true(esvm || csvm) << "Trying to call SVM_Optimizer with a non Epsilon- or MultiClassSVM model"; error = errFct.error(model.getModel(), in, target); } else if(iterations != 0) { for(size_t i = 0; i < iterations; ++i) { optimizeDistorted(optimizer, model.getModel(), errFct, in, target); if(TRAINING_OUTPUT) writeStatistics(i, in, target, errFct, -1.0); } error = errFct.error(model.getModel(), in, target); } else // error = this->earlyStopping(optimizer, errFct, in, target, 10); error = this->myEarlyStopping(optimizer, errFct, in, target, 10, std::max(static_cast<size_t>(1),nRows/1000)); size_t misClass = 0; for(size_t i = 0; i < in.dim(0); ++i ) { model.model(in.subarr(i,i), out); //std::cout << in.subarr(i,i) << round(out(0)) << std::endl; //std::cout << oneOfNtoIdx(target.subarr(i,i)) << " - " << oneOfNtoIdx(out) << std::endl; if(model.usesOneOfNCoding()) { if(oneOfNtoIdx(target.subarr(i,i)) != oneOfNtoIdx(out)) ++misClass; } else if(target.subarr(i,i)(0) != round(out(0))) ++misClass; } LOG(INFO) << "Missclassification rate: " << (double(misClass)/in.dim(0)) * 100.0 << "%\n"; } catch(Kompex::SQLiteException& sqle) { const std::string err = "\nSQL query for training data failed\n" ; LOG(ERROR) << err << std::endl; sqle.Show(); throw ml::MachineLearningException(err); } catch(SharkException& se) { LOG(ERROR) << se.what() << std::endl; throw ml::MachineLearningException("Shark error"); }catch (std::exception& exception) { const std::string err = "\nQuery for data failed\n" ; LOG(ERROR) << err << exception.what() << std::endl; throw ml::MachineLearningException(err); } return error; }
double Trainer::myEarlyStopping(Optimizer& optimizer, ErrorFunction& errFct, Array<double>& in, Array<double>& target, size_t validationSize, size_t nBatches) { size_t n = in.dim(0); // the number of training patterns size_t nVal = double(n) / 100 * validationSize; size_t nTrain = n - nVal; // generate a vector containing the indices for all training patterns std::vector<size_t> trainIndices(n); for(size_t i = 0; i < n; ++i) trainIndices[i] = i; std::random_shuffle(trainIndices.begin(), trainIndices.end()); // copy validation patterns to a new array since they can be used always in the same order Array<double> valData, valTarget; for(size_t i = 0; i < nVal; ++i) { valData.append_rows(in.subarr(trainIndices[i],trainIndices[i])[0]); valTarget.append_rows(target.subarr(trainIndices[i], trainIndices[i])[0]); } // create one Array for each batch size_t batchsize = nTrain/nBatches; size_t bulge = nTrain%nBatches; std::vector<Array<double>> trainBatchesData, trainBatchesTarget; trainBatchesData.reserve(nBatches); trainBatchesTarget.reserve(nBatches); size_t cnt = nVal; for(size_t i = 0; i < nBatches; ++i) { trainBatchesData.push_back(in.subarr(trainIndices[cnt],trainIndices[cnt])); trainBatchesTarget.push_back(target.subarr(trainIndices[cnt], trainIndices[cnt])); ++cnt; for(size_t j = 1; j < batchsize + ((i < bulge) ? 1 : 0); ++j) { trainBatchesData[i].append_rows(in.subarr(trainIndices[cnt],trainIndices[cnt])[0]); trainBatchesTarget[i].append_rows(target.subarr(trainIndices[cnt], trainIndices[cnt])[0]); ++cnt; } } trainIndices.resize(nBatches); for(size_t i = 0; i < nBatches; ++i) trainIndices[i] = i; // double err = DBL_MAX; // size_t cnt = 0; size_t striplen = 5; // MyModel* bestModel; EarlyStopping estop(striplen);//, worsen(1); double trainErr = 0, valErr = 0; int epoch = 0; for(; epoch < 10000; ++epoch) { // permute training data std::random_shuffle(trainIndices.begin(), trainIndices.end()); trainErr = 0; //perform online training for(size_t i = 0; i < nBatches; ++i) { optimizeDistorted(optimizer, model.getModel(), errFct, trainBatchesData[trainIndices[i]], trainBatchesTarget[trainIndices[i]]); trainErr += errFct.error(model.getModel(), trainBatchesData[trainIndices[i]], trainBatchesTarget[trainIndices[i]]); } trainErr /= nBatches; // trainErr = errFct.error(model, in, target); valErr = errFct.error(model.getModel(), valData, valTarget); // std::cout << epoch << ": " << trainErr << " - " << valErr << std::endl; /* implement rollback only if needed worsen.update(trainErr, valErr); if(!worsen.one_of_all( 1.0, 1.0, 1.0, 3)) { Mode; } */ estop.update(trainErr, valErr); if(TRAINING_OUTPUT) writeStatistics(epoch, in, target, errFct, valErr); // std::cout << "GL " << estop.GL(12.0) << "\nTP " << estop.TP(0.5) << "\nPQ " << estop.PQ(15.0) << "\nUP " << estop.UP(5) << std::endl; if(estop.one_of_all(GL, TP, PQ, UP)) { LOG(INFO) << "Early stopping after " << epoch << " iterations\n"; break; } } if(epoch == 10000) { LOG(INFO) << "Unstopped!\n"; } LOG(INFO) << "Train error " << trainErr << std::endl; return valErr; }