int MoveScaleTree::move() { numcalls++; // Propose to change all nodes and recedges by a scaled amount RecTree * rectree=param->getRecTree(); double lprior=rectree->prior(param)+param->logPriorOfRho()+param->logPriorOfTheta(); // update the tree double logscale=param->getScaleTreeSize() *(gsl_rng_uniform(rng) * 2.0 -1.0); double scale =exp(logscale); dlog(1)<<"Proposing to scale tree TMRCA by "<<scale<<"..."; scaleTree(scale); param->setRho(param->getRho()/scale); param->setTheta(param->getTheta()/scale); double newlprior=rectree->prior(param) + param->logPriorOfRho() + param->logPriorOfTheta(); if(log(gsl_rng_uniform(rng))>newlprior-lprior+(2.0*rectree->numRecEdge()+rectree->getN()-3.0)*logscale) { dlog(1)<<" Rejected!"<<endl; scaleTree(1.0/scale); param->setRho(param->getRho()*scale); param->setTheta(param->getTheta()*scale); #if defined DEBUG //test its still ok int tmp=0; for (int i=0; i<param->getData()->getL(); i++) if(fround(param->getLLsite(i),5)!=fround(store[i],5)) { tmp=1; cout<<"Site "<<i<<" has ll before "<<store[i]<<" and after "<<param->getLLsite(i)<<" "; for (int j=0; j<rectree->numRecEdge(); j++) { if (rectree->getRecEdge(j)->affectsSite(i)) cout<<j<<" "; } cout<<endl; } if(fround(ll,5)!=fround(param->getLL(),5)) { cout<<"Total ll before "<<ll<<" and after "<<param->getLL()<<endl; } try { param->testTree(); } catch(char * x) { cout<<x<<endl<<"Movescaletree restore: broke the log liks"<<endl; exit(1); } if(tmp==1) { cerr<<"Problem replacing after move"<<endl; throw("Move not reversed correctly"); } #endif return(0); } else dlog(1)<<" Accepted!"<<endl;// accept the modified tree numaccept++; return(1); }
void TreeCanvas::zoomToFit(void) { QMutexLocker locker(&layoutMutex); if (root != NULL) { BoundingBox bb; bb = root->getBoundingBox(); QWidget* p = parentWidget(); if (p) { double newXScale = static_cast<double>(p->width()) / (bb.right - bb.left + Layout::extent); double newYScale = static_cast<double>(p->height()) / (root->getShape()->depth() * Layout::dist_y + 2*Layout::extent); int scale0 = static_cast<int>(std::min(newXScale, newYScale)*100); if (scale0<LayoutConfig::minScale) scale0 = LayoutConfig::minScale; if (scale0>LayoutConfig::maxAutoZoomScale) scale0 = LayoutConfig::maxAutoZoomScale; if (!smoothScrollAndZoom) { scaleTree(scale0); } else { zoomTimeLine.stop(); int zoomCurrent = static_cast<int>(scale*100); int targetZoom = scale0; targetZoom = std::min(std::max(targetZoom, LayoutConfig::minScale), LayoutConfig::maxAutoZoomScale); zoomTimeLine.setFrameRange(zoomCurrent,targetZoom); zoomTimeLine.start(); } } } }
void TreeCanvas::wheelEvent(QWheelEvent* event) { if (event->modifiers() & Qt::ShiftModifier) { event->accept(); if (event->orientation() == Qt::Vertical && !autoZoom) scaleTree(scale*100+ceil(static_cast<double>(event->delta())/4.0), event->x(), event->y()); } else { event->ignore(); } }
void TreeCanvas::timerEvent(QTimerEvent* e) { if (e->timerId() == layoutDoneTimerId) { if (!smoothScrollAndZoom) { scaleTree(targetScale); } else { zoomTimeLine.stop(); int zoomCurrent = static_cast<int>(scale*100); int targetZoom = targetScale; targetZoom = std::min(std::max(targetZoom, LayoutConfig::minScale), LayoutConfig::maxAutoZoomScale); zoomTimeLine.setFrameRange(zoomCurrent,targetZoom); zoomTimeLine.start(); } QWidget::update(); killTimer(layoutDoneTimerId); layoutDoneTimerId = 0; } }
void updateWeightsAndTrim( int treeidx, vector<int>& sidx ) { int i, n = (int)w->sidx.size(); int nvars = (int)varIdx.size(); double sumw = 0., C = 1.; cv::AutoBuffer<double> buf(n + nvars); double* result = buf.data(); float* sbuf = (float*)(result + n); Mat sample(1, nvars, CV_32F, sbuf); int predictFlags = bparams.boostType == Boost::DISCRETE ? (PREDICT_MAX_VOTE | RAW_OUTPUT) : PREDICT_SUM; predictFlags |= COMPRESSED_INPUT; for( i = 0; i < n; i++ ) { w->data->getSample(varIdx, w->sidx[i], sbuf ); result[i] = predictTrees(Range(treeidx, treeidx+1), sample, predictFlags); } // now update weights and other parameters for each type of boosting if( bparams.boostType == Boost::DISCRETE ) { // Discrete AdaBoost: // weak_eval[i] (=f(x_i)) is in {-1,1} // err = sum(w_i*(f(x_i) != y_i))/sum(w_i) // C = log((1-err)/err) // w_i *= exp(C*(f(x_i) != y_i)) double err = 0.; for( i = 0; i < n; i++ ) { int si = w->sidx[i]; double wval = w->sample_weights[si]; sumw += wval; err += wval*(result[i] != w->cat_responses[si]); } if( sumw != 0 ) err /= sumw; C = -log_ratio( err ); double scale = std::exp(C); sumw = 0; for( i = 0; i < n; i++ ) { int si = w->sidx[i]; double wval = w->sample_weights[si]; if( result[i] != w->cat_responses[si] ) wval *= scale; sumw += wval; w->sample_weights[si] = wval; } scaleTree(roots[treeidx], C); } else if( bparams.boostType == Boost::REAL || bparams.boostType == Boost::GENTLE ) { // Real AdaBoost: // weak_eval[i] = f(x_i) = 0.5*log(p(x_i)/(1-p(x_i))), p(x_i)=P(y=1|x_i) // w_i *= exp(-y_i*f(x_i)) // Gentle AdaBoost: // weak_eval[i] = f(x_i) in [-1,1] // w_i *= exp(-y_i*f(x_i)) for( i = 0; i < n; i++ ) { int si = w->sidx[i]; CV_Assert( std::abs(w->ord_responses[si]) == 1 ); double wval = w->sample_weights[si]*std::exp(-result[i]*w->ord_responses[si]); sumw += wval; w->sample_weights[si] = wval; } } else if( bparams.boostType == Boost::LOGIT ) { // LogitBoost: // weak_eval[i] = f(x_i) in [-z_max,z_max] // sum_response = F(x_i). // F(x_i) += 0.5*f(x_i) // p(x_i) = exp(F(x_i))/(exp(F(x_i)) + exp(-F(x_i))=1/(1+exp(-2*F(x_i))) // reuse weak_eval: weak_eval[i] <- p(x_i) // w_i = p(x_i)*1(1 - p(x_i)) // z_i = ((y_i+1)/2 - p(x_i))/(p(x_i)*(1 - p(x_i))) // store z_i to the data->data_root as the new target responses const double lb_weight_thresh = FLT_EPSILON; const double lb_z_max = 10.; for( i = 0; i < n; i++ ) { int si = w->sidx[i]; sumResult[i] += 0.5*result[i]; double p = 1./(1 + std::exp(-2*sumResult[i])); double wval = std::max( p*(1 - p), lb_weight_thresh ), z; w->sample_weights[si] = wval; sumw += wval; if( w->ord_responses[si] > 0 ) { z = 1./p; w->ord_responses[si] = std::min(z, lb_z_max); } else { z = 1./(1-p); w->ord_responses[si] = -std::min(z, lb_z_max); } } } else CV_Error(CV_StsNotImplemented, "Unknown boosting type"); /*if( bparams.boostType != Boost::LOGIT ) { double err = 0; for( i = 0; i < n; i++ ) { sumResult[i] += result[i]*C; if( bparams.boostType != Boost::DISCRETE ) err += sumResult[i]*w->ord_responses[w->sidx[i]] < 0; else err += sumResult[i]*w->cat_responses[w->sidx[i]] < 0; } printf("%d trees. C=%.2f, training error=%.1f%%, working set size=%d (out of %d)\n", (int)roots.size(), C, err*100./n, (int)sidx.size(), n); }*/ // renormalize weights if( sumw > FLT_EPSILON ) normalizeWeights(); if( bparams.weightTrimRate <= 0. || bparams.weightTrimRate >= 1. ) return; for( i = 0; i < n; i++ ) result[i] = w->sample_weights[w->sidx[i]]; std::sort(result, result + n); // as weight trimming occurs immediately after updating the weights, // where they are renormalized, we assume that the weight sum = 1. sumw = 1. - bparams.weightTrimRate; for( i = 0; i < n; i++ ) { double wval = result[i]; if( sumw <= 0 ) break; sumw -= wval; } double threshold = i < n ? result[i] : DBL_MAX; sidx.clear(); for( i = 0; i < n; i++ ) { int si = w->sidx[i]; if( w->sample_weights[si] >= threshold ) sidx.push_back(si); } }