void TrajectoryClassifier::diff_using_bfs( std:: vector<IndexType>& labels,std::vector<IndexType>& centerVtxId,IndexType centerFrame ) { SampleSet& set = SampleSet::get_instance(); IndexType max_label= *max_element(labels.begin(),labels.end()); vector< std::set<IndexType> > label_bucket(max_label+1); //IndexType centerFrame = 10; for ( int i=0; i<labels.size(); i++ ) { label_bucket[labels[i]].insert( i ); } IndexType new_label = max_label; Loggger<<"max label before post:"<<new_label<<endl; for ( IndexType l=0; l<label_bucket.size(); l++ ) { std::set<IndexType>& idx_set = label_bucket[l]; if (idx_set.size()==0) { continue; } IndexType idx_set_size = idx_set.size(); Matrix3X vtx_data; vtx_data.setZero( 3, idx_set_size ); size_t i=0; for (std::set<IndexType>::iterator iter = idx_set.begin(); iter != idx_set.end(); iter++,i++) { IndexType a = *iter; vtx_data(0,i) = set[centerFrame][centerVtxId[*iter]].x(); vtx_data(1,i) = set[centerFrame][centerVtxId[*iter]].y(); vtx_data(2,i) = set[centerFrame][centerVtxId[*iter]].z(); } #ifdef USE_RADIUS_NEAREST ScalarType rad = set[centerFrame].box_diag(); BFSClassifier<ScalarType> classifier(vtx_data,rad); #else ScalarType rad = set[centerFrame].box_diag()/10; BFSClassifier<ScalarType> classifier(vtx_data,rad,10); #endif classifier.run(); int *sep_label = classifier.get_class_label(); i=0; for (std::set<IndexType>::iterator iter = idx_set.begin(); iter != idx_set.end(); iter++,i++) { if(sep_label[i]==0)continue; labels[*iter] = new_label + sep_label[i]; } new_label += classifier.get_num_of_class()-1; } Loggger<<"max label after post:"<<new_label<<endl; }
int main(int argc, char **argv){ ros::init(argc, argv, "cascade_classifier"); ros::NodeHandle nh; ros::NodeHandle pnh("~"); vision::CascadeClassifier classifier(nh, pnh); ros::spin(); }
KeyDetectionResult KeyFinder::keyOfChromagram( Workspace& workspace, const Parameters& params ) const { KeyDetectionResult result; // working copy of chromagram Chromagram* ch = new Chromagram(*workspace.chroma); ch->reduceToOneOctave(); // get harmonic change signal and segment Segmentation segmenter; std::vector<unsigned int> segmentBoundaries = segmenter.getSegmentationBoundaries(ch, params); segmentBoundaries.push_back(ch->getHops()); // sentinel // get key estimates for each segment KeyClassifier classifier( params.getSimilarityMeasure(), params.getToneProfile(), params.getOffsetToC(), params.getCustomToneProfile() ); std::vector<float> keyWeights(24); // TODO: not ideal using int cast of key_t enum. Hash? for (int s = 0; s < (signed) segmentBoundaries.size() - 1; s++) { KeyDetectionResultSegment segment; segment.firstHop = segmentBoundaries[s]; segment.lastHop = segmentBoundaries[s+1] - 1; // collapse segment's time dimension std::vector<float> segmentChroma(ch->getBands(), 0.0); for (unsigned int hop = segment.firstHop; hop <= segment.lastHop; hop++) { for (unsigned int band = 0; band < ch->getBands(); band++) { float value = ch->getMagnitude(hop, band); segmentChroma[band] += value; segment.energy += value; } } segment.chromaVector = segmentChroma; segment.key = classifier.classify(segmentChroma); if (segment.key != SILENCE) keyWeights[segment.key] += segment.energy; result.segments.push_back(segment); } delete ch; // get global key result.globalKeyEstimate = SILENCE; float mostCommonKeyWeight = 0.0; for (int k = 0; k < (signed)keyWeights.size(); k++) { if (keyWeights[k] > mostCommonKeyWeight) { mostCommonKeyWeight = keyWeights[k]; result.globalKeyEstimate = (key_t)k; } } return result; }
void CSG::classifyFaceGroups(const V2Set & /* shared_edges */, VertexClassification &vclass, carve::mesh::MeshSet<3> *poly_a, const carve::geom::RTreeNode<3, carve::mesh::Face<3> *> *poly_a_rtree, FLGroupList &a_loops_grouped, const detail::LoopEdges & /* a_edge_map */, carve::mesh::MeshSet<3> *poly_b, const carve::geom::RTreeNode<3, carve::mesh::Face<3> *> *poly_b_rtree, FLGroupList &b_loops_grouped, const detail::LoopEdges & /* b_edge_map */, CSG::Collector &collector) { ClassifyFaceGroups classifier(collector, hooks); #if defined(CARVE_DEBUG) std::cerr << "initial groups: " << a_loops_grouped.size() << " a groups" << std::endl; std::cerr << "initial groups: " << b_loops_grouped.size() << " b groups" << std::endl; #endif performClassifyFaceGroups( a_loops_grouped, b_loops_grouped, vclass, poly_a, poly_a_rtree, poly_b, poly_b_rtree, classifier, collector, hooks); }
void online_test(const vector<vector<MatrixXd> >& vec_data, const VectorXi& vec_label, const vector<MatrixXd>& weight_0, const MatrixXd& bias_0, const vector<MatrixXd>& weight_1, const MatrixXd& bias_1, const vector<MatrixXd>& weight_2, const MatrixXd& bias_2, const MatrixXd& weight_class, const MatrixXd& bias_class, const int& num_kerns1, const int& num_kerns2, const int& num_kerns3, const int& kern_size, const int& pool_size) { vector<vector<MatrixXd> > test_data; copy(vec_data.begin(), vec_data.begin() + 1, back_inserter(test_data)); VectorXi test_label; test_label = vec_label.segment(0, 1200); ConvPoolLayer *p_conv0 = new ConvPoolLayer(test_data, num_kerns1, kern_size, "same", "tanh", weight_0, bias_0); Pool *p_pool0 = new Pool(p_conv0->batch_maps_activated, pool_size, pool_size); ConvPoolLayer *p_conv1 = new ConvPoolLayer(p_pool0->output_batch_pooled, num_kerns2, kern_size, "same", "tanh", weight_1, bias_1); Pool *p_pool1 = new Pool(p_conv1->batch_maps_activated, pool_size, pool_size); ConvPoolLayer *p_conv2 = new ConvPoolLayer(p_pool1->output_batch_pooled, num_kerns3, kern_size, "same", "tanh", weight_2, bias_2); Pool *p_pool2 = new Pool(p_conv2->batch_maps_activated, pool_size, pool_size); MatrixXd feature_vectors; get_feature_vector(p_pool2->output_batch_pooled, feature_vectors); // classifier.input = feature_vectors; Softmax classifier(feature_vectors, 9, test_label, 1, 1, weight_class, bias_class); classifier.calculation_output(); cout << classifier.m.transpose() << endl; int accuracy = 0; for(int i = 0; i < test_label.size(); i++) { if(classifier.m(i) == test_label(i)) { accuracy ++; } } cout << "accuracy : " << accuracy << endl; }
int main(int argc, char *argv[]) { std::string filenameImage("small.png"); double errorThreshold = 10.; int channel = 0; double gamma = 1.0; std::string histogramsTraining("training/training.dat"); std::string outputPath(""); // initialize LPIP detector LPIPDetector detector(filenameImage, channel, errorThreshold, 3, gamma); std::cout << "Detecting LISOs in the image..." << std::endl; detector.detect(outputPath); std::vector<LISO> lisoSet = detector.getLisoSet(); // save LISO map cv::Mat1f lisoMap = detector.getLisoMap(); std::stringstream path1; path1 << outputPath << "liso_map.png"; std::cout << "Save LISO map to " << path1.str() << std::endl; imwrite(path1.str(), lisoMap*255); // compute features and save histograms as images Trainer trainer; trainer.computeFeatures(lisoSet, lisoMap); std::cout << "Computing RQ Histogram..." << std::endl; Classifier classifier(histogramsTraining); classifier.createHistRQ(100, 50, lisoSet, false, "RQ_histogram"); classifier.createHistRQ(100, 50, lisoSet, true, "RQ_histogram_weighted"); return 0; }
/** * Reimplemented from UMLWidget::saveToXMI to save * classifierwidget data either to 'interfacewidget' or 'classwidget' * XMI element. */ void ClassifierWidget::saveToXMI(QDomDocument & qDoc, QDomElement & qElement) { QDomElement conceptElement; UMLClassifier *umlc = classifier(); QString tagName = umlc->isInterface() ? "interfacewidget" : "classwidget"; conceptElement = qDoc.createElement(tagName); UMLWidget::saveToXMI( qDoc, conceptElement ); conceptElement.setAttribute("showoperations", visualProperty(ShowOperations)); conceptElement.setAttribute("showpubliconly", visualProperty(ShowPublicOnly)); conceptElement.setAttribute("showopsigs", m_operationSignature); conceptElement.setAttribute("showpackage", visualProperty(ShowPackage)); conceptElement.setAttribute("showscope", visualProperty(ShowVisibility)); if (! umlc->isInterface()) { conceptElement.setAttribute("showattributes", visualProperty(ShowAttributes)); conceptElement.setAttribute("showattsigs", m_attributeSignature); } if (umlc->isInterface() || umlc->isAbstract()) { conceptElement.setAttribute("drawascircle", visualProperty(DrawAsCircle)); } qElement.appendChild(conceptElement); }
void CSG::halfClassifyFaceGroups(const V2Set & /* shared_edges */, VertexClassification &vclass, carve::mesh::MeshSet<3> *poly_a, const carve::geom::RTreeNode<3, carve::mesh::Face<3> *> *poly_a_rtree, FLGroupList &a_loops_grouped, const detail::LoopEdges & /* a_edge_map */, carve::mesh::MeshSet<3> *poly_b, const carve::geom::RTreeNode<3, carve::mesh::Face<3> *> *poly_b_rtree, FLGroupList &b_loops_grouped, const detail::LoopEdges & /* b_edge_map */, std::list<std::pair<FaceClass, carve::mesh::MeshSet<3> *> > &b_out) { HalfClassifyFaceGroups classifier(b_out, hooks); GroupPoly group_poly(poly_b, b_out); performClassifyFaceGroups( a_loops_grouped, b_loops_grouped, vclass, poly_a, poly_a_rtree, poly_b, poly_b_rtree, classifier, group_poly, hooks); }
/** * Set the AssociationWidget when this ClassWidget acts as an * association class. */ void ClassifierWidget::setClassAssociationWidget(AssociationWidget *assocwidget) { m_classAssociationWidget = assocwidget; UMLAssociation *umlassoc = 0; if (assocwidget) { umlassoc = assocwidget->association(); } classifier()->setClassAssoc(umlassoc); }
/* * Function : process_spectral_msg * Description : process the spectral SAMP message * Input params : pointer to ath_ssdinfo, pointer to SAMP message * Return : SUCCESS or FAILURE * */ void process_spectral_msg(ath_ssd_info_t *pinfo, SPECTRAL_SAMP_MSG* msg) { SPECTRAL_SAMP_DATA *ss_data; int is_ht2040 = 0; ss_data = &msg->samp_data; is_ht2040 = ss_data->spectral_data_len > 100?1:0; if (pinfo->init_classifier) { info("initializing classifier"); /* reset the interference information */ clear_interference_info(pinfo); pinfo->init_classifier = FALSE; init_bandinfo(&pinfo->lwrband, &pinfo->uprband, ENABLE_CLASSIFIER_PRINT); ms_init_classifier(&pinfo->lwrband, &pinfo->uprband, &ss_data->classifier_params); } classifier(&pinfo->lwrband, ss_data->spectral_tstamp, ss_data->spectral_last_tstamp, ss_data->spectral_lower_rssi, ss_data->spectral_nb_lower, ss_data->spectral_lower_max_index); if (is_ht2040) { classifier(&pinfo->uprband, ss_data->spectral_tstamp, ss_data->spectral_last_tstamp, ss_data->spectral_upper_rssi, ss_data->spectral_nb_upper, ss_data->spectral_upper_max_index); } /* update the detected interference details */ update_interf_info(pinfo, &pinfo->lwrband); update_interf_info(pinfo, &pinfo->uprband); /* update the detected interference in message */ add_interference_report(pinfo, &ss_data->interf_list); }
int ClassifierWidget::displayedMembers(Uml::Object_Type ot) { int count = 0; UMLClassifierListItemList list = classifier()->getFilteredList(ot); foreach (UMLClassifierListItem *m , list ) { if (!(m_bShowPublicOnly && m->visibility() != Uml::Visibility::Public)) count++; } return count; }
/** * Changes this classifier from a class to an interface. Attributes * are hidden and stereotype is shown. This widget is also updated. */ void ClassifierWidget::changeToInterface() { m_baseType = WidgetBase::wt_Interface; classifier()->setBaseType(UMLObject::ot_Interface); setVisualProperty(ShowAttributes, false); setVisualProperty(ShowStereotype, true); updateTextItemGroups(); }
void SoftCascadeLearner::doPosteriors(const nor_utils::Args& args) { SoftCascadeClassifier classifier(args, _verbose); string testFileName = args.getValue<string>("posteriors", 0); string shypFileName = args.getValue<string>("posteriors", 1); string outFileName = args.getValue<string>("posteriors", 2); int numStages = args.getValue<int>("posteriors", 3); classifier.savePosteriors(testFileName, shypFileName, outFileName, numStages); }
/** * Event handler for hover leave events. */ void ClassifierWidget::hoverLeaveEvent(UMLSceneHoverEvent * event) { Q_UNUSED(event); if (!visualProperty(DrawAsCircle)) { UMLClassifier* umlC = classifier(); if (umlC && !umlC->isInterface()) { m_attributeExpanderBox->setVisible(false); } m_operationExpanderBox->setVisible(false); } }
void FilterBoostLearner::doROC(const nor_utils::Args& args) { FilterBoostClassifier classifier(args, _verbose); // -posteriors <dataFile> <shypFile> <outFileName> string testFileName = args.getValue<string>("roc", 0); string shypFileName = args.getValue<string>("roc", 1); string outFileName = args.getValue<string>("roc", 2); int numIterations = args.getValue<int>("roc", 3); classifier.saveROC(testFileName, shypFileName, outFileName, numIterations); }
void experiment(const std::vector<cv::Mat> &type1s, const std::vector<cv::Mat> &type2s, const std::vector<cv::Mat> &others, const std::vector<cv::Mat> tests, std::vector<std::string> listFileTest){ ClassifyPca classifier(0); ClusteringPca cluster(12); std::vector<cv::Mat> images; std::vector<int> labels; for(unsigned int i = 0; i < type1s.size(); i++){ images.push_back(type1s[i]); labels.push_back(1); } for(unsigned int i = 0; i < type2s.size(); i++){ images.push_back(type2s[i]); labels.push_back(2); } struct timeval tim; gettimeofday(&tim, NULL); double t1=tim.tv_sec+(tim.tv_usec/1000000.0); classifier.train(images, labels); //debug(classifier.predict(images[0])); cluster.train(others, K); std::vector<std::vector<std::string>> results(K + 3); gettimeofday(&tim, NULL); double t2=tim.tv_sec+(tim.tv_usec/1000000.0); for(unsigned int i = 0; i < tests.size(); i++){ int predicted = classifier.predict(tests[i]); if(predicted < 1){ predicted = cluster.predict(tests[i]); } // std::cout<<listFileTest[i]<< " is classified as class " << predicted << std::endl; std::vector<std::string> v = results.at(predicted); v.push_back(listFileTest[i]); results.at(predicted) = v; } gettimeofday(&tim, NULL); double t3 = tim.tv_sec+(tim.tv_usec/1000000.0); std::cout<< "Predict time: " << t3 - t2<< std::endl; std::cout<< "Trainning time: " << t2 - t1<< std::endl; for(unsigned int i = 0; i < results.size(); i++){ std::vector<std::string> v = results[i]; for (unsigned int j = 0; j < v.size(); j++){ std::cout<<v[j]<< " is classified as class " << i << std::endl; } } }
void SoftCascadeLearner::classify(const nor_utils::Args& args) { SoftCascadeClassifier classifier(args, _verbose); string testFileName = args.getValue<string>("test", 0); string shypFileName = args.getValue<string>("test", 1); int numIterations = args.getValue<int>("test", 2); string outResFileName = ""; if ( args.getNumValues("test") > 3 ) args.getValue("test", 3, outResFileName); classifier.run(testFileName, shypFileName, numIterations, outResFileName); }
void FilterBoostLearner::classify(const nor_utils::Args& args) { FilterBoostClassifier classifier(args, _verbose); // -test <dataFile> <shypFile> string testFileName = args.getValue<string>("test", 0); string shypFileName = args.getValue<string>("test", 1); int numIterations = args.getValue<int>("test", 2); string outResFileName; if ( args.getNumValues("test") > 3 ) args.getValue("test", 3, outResFileName); classifier.run(testFileName, shypFileName, numIterations, outResFileName); }
/* * Create a new person from the provided stamped pose and point cloud, and insert them into the map. */ Person& PersonDetector::create_person(const geometry_msgs::Pose& pose, const sensor_msgs::PointCloud2& cloud) { // Make a new person with the trivial person classifier, for now, and then give them an initial pose. Person lifeform(_current_uid++); lifeform.push_pose(pose); // boost::shared_ptr<PersonClassifier> classifier(new ShirtColorPersonClassifier(cloud, 30.0)); boost::shared_ptr<PersonClassifier> classifier(new TrivialPersonClassifier); // Then insert them. tracked()[lifeform.uid()] = lifeform; classifiers()[lifeform.uid()] = classifier; // Return a reference to the person in the map. return tracked()[lifeform.uid()]; }
void AdaBoostMHLearner::doPosteriors(const nor_utils::Args& args) { AdaBoostMHClassifier classifier(args, _verbose); int numofargs = args.getNumValues( "posteriors" ); // -posteriors <dataFile> <shypFile> <outFile> <numIters> string testFileName = args.getValue<string>("posteriors", 0); string shypFileName = args.getValue<string>("posteriors", 1); string outFileName = args.getValue<string>("posteriors", 2); int numIterations = args.getValue<int>("posteriors", 3); int period = 0; if ( numofargs == 5 ) period = args.getValue<int>("posteriors", 4); classifier.savePosteriors(testFileName, shypFileName, outFileName, numIterations, period); }
/** * Changes this classifier from an interface to a class. Attributes * and stereotype visibility is got from the view OptionState. This * widget is also updated. */ void ClassifierWidget::changeToClass() { m_baseType = WidgetBase::wt_Class; classifier()->setBaseType(UMLObject::ot_Class); bool showAtts = true; bool showStereotype = false; if (umlScene()) { const Settings::OptionState& ops = umlScene()->optionState(); showAtts = ops.classState.showAtts; showStereotype = ops.classState.showStereoType; } setVisualProperty(ShowAttributes, showAtts); setVisualProperty(ShowStereotype, showStereotype); updateTextItemGroups(); }
void FilterBoostLearner::doConfusionMatrix(const nor_utils::Args& args) { FilterBoostClassifier classifier(args, _verbose); // -cmatrix <dataFile> <shypFile> if ( args.hasArgument("cmatrix") ) { string testFileName = args.getValue<string>("cmatrix", 0); string shypFileName = args.getValue<string>("cmatrix", 1); classifier.printConfusionMatrix(testFileName, shypFileName); } // -cmatrixfile <dataFile> <shypFile> <outFile> else if ( args.hasArgument("cmatrixfile") ) { string testFileName = args.getValue<string>("cmatrix", 0); string shypFileName = args.getValue<string>("cmatrix", 1); string outResFileName = args.getValue<string>("cmatrix", 2); classifier.saveConfusionMatrix(testFileName, shypFileName, outResFileName); } }
void detectMultiscale(const std::string &model_file, const std::string &trained_file, const std::string &mean_file, const std::string &label_file, const cv::Mat &inputImg, const cv::Size &minSize, const cv::Size &maxSize, std::vector<cv::Rect> &rectsOut) { CaffeClassifier <MatT> classifier(model_file, trained_file, mean_file, label_file, 64 ); int wsize = classifier.getInputGeometry().width; std::vector<std::pair<MatT, float> > scaledimages; std::vector<cv::Rect> rects; std::vector<int> scales; std::vector<int> scalesOut; generateInitialWindows(inputImg, minSize, maxSize, wsize, scaledimages, rects, scales); runDetection(classifier, scaledimages, rects, scales, .9, "bin", rectsOut, scalesOut); for(size_t i = 0; i < rectsOut.size(); i++) { float scale = scaledimages[scalesOut[i]].second; rectsOut[i] = cv::Rect(rectsOut[i].x/scale, rectsOut[i].y/scale, rectsOut[i].width/scale, rectsOut[i].height/scale); } }
QSize ClassifierWidget::calculateTemplatesBoxSize() { UMLTemplateList list = classifier()->getTemplateList(); int count = list.count(); if (count == 0) { return QSize(0, 0); } int width, height; height = width = 0; QFont font = UMLWidget::font(); font.setItalic(false); font.setUnderline(false); font.setBold(false); const QFontMetrics fm(font); height = count * fm.lineSpacing() + (MARGIN*2); foreach (UMLTemplate *t , list ) { int textWidth = fm.size(0, t->toString() ).width(); if (textWidth > width) width = textWidth; }
void prob3b(){ // filenames std::string train1 = "Data_Prog2/Training_1.ppm"; std::string ref1 = "Data_Prog2/ref1.ppm"; // variable declarations int M, N, Q; bool type; // make image objects readImageHeader(train1.c_str(), N, M, Q, type); ImageType image1(N, M, Q); ImageType refimage1(N, M, Q); readImage(train1.c_str(),image1); readImage(ref1.c_str(),refimage1); // make skin colors Matrix skin_colors, non_skin_colors; makeColorMatrices(image1, refimage1, skin_colors, non_skin_colors, true); // estimate parameters for skin-color class std::vector<double> mean1 = getSampleMean(skin_colors); std::cout << "sample_mean1 = "; print_vec(mean1); Matrix cov1 = getSampleVar(skin_colors, mean1); std::cout << "sample_cov1 = "; print_matrix(cov1); // set up parameters for non-skin-color class std::vector<double> mean2 = getSampleMean(non_skin_colors); Matrix cov2 = getSampleVar(non_skin_colors, mean2); std::cout << "sample_mean2 = "; print_vec(mean2); std::cout << "sample_cov2 = "; print_matrix(cov2); // make classifier QuadraticDiscriminant classifier(mean1, mean2, cov1, cov2, 0.08, 0.92); std::string out1 = "Data_Prog2/out1b.ppm"; testSkinRecognition(classifier, image1, refimage1, out1, true); std::string train3 = "Data_Prog2/Training_3.ppm"; std::string ref3 = "Data_Prog2/ref3.ppm"; readImageHeader(train3.c_str(), N, M, Q, type); ImageType image3(N, M, Q); ImageType refimage3(N, M, Q); readImage(train3.c_str(),image3); readImage(ref3.c_str(),refimage3); std::string out3 = "Data_Prog2/out3b.ppm"; testSkinRecognition(classifier, image3, refimage3, out3, true); std::string train6 = "Data_Prog2/Training_6.ppm"; std::string ref6 = "Data_Prog2/ref6.ppm"; readImageHeader(train6.c_str(), N, M, Q, type); ImageType image6(N, M, Q); ImageType refimage6(N, M, Q); readImage(train6.c_str(),image6); readImage(ref6.c_str(),refimage6); std::string out6 = "Data_Prog2/out6b.ppm"; testSkinRecognition(classifier, image6, refimage6, out6, true); }
//======================================================================= //function : GetMinDistance //purpose : //======================================================================= Standard_Real GEOMUtils::GetMinDistance (const TopoDS_Shape& theShape1, const TopoDS_Shape& theShape2, gp_Pnt& thePnt1, gp_Pnt& thePnt2) { Standard_Real aResult = 1.e9; // Issue 0020231: A min distance bug with torus and vertex. // Make GetMinDistance() return zero if a sole VERTEX is inside any of SOLIDs // which of shapes consists of only one vertex? TopExp_Explorer exp1(theShape1,TopAbs_VERTEX), exp2(theShape2,TopAbs_VERTEX); TopoDS_Shape V1 = exp1.More() ? exp1.Current() : TopoDS_Shape(); TopoDS_Shape V2 = exp2.More() ? exp2.Current() : TopoDS_Shape(); exp1.Next(); exp2.Next(); if ( exp1.More() ) V1.Nullify(); if ( exp2.More() ) V2.Nullify(); // vertex and container of solids TopoDS_Shape V = V1.IsNull() ? V2 : V1; TopoDS_Shape S = V1.IsNull() ? theShape1 : theShape2; if ( !V.IsNull() ) { // classify vertex against solids gp_Pnt p = BRep_Tool::Pnt( TopoDS::Vertex( V ) ); for ( exp1.Init( S, TopAbs_SOLID ); exp1.More(); exp1.Next() ) { BRepClass3d_SolidClassifier classifier( exp1.Current(), p, 1e-6); if ( classifier.State() == TopAbs_IN ) { thePnt1 = p; thePnt2 = p; return 0.0; } } } // End Issue 0020231 // skl 30.06.2008 // additional workaround for bugs 19899, 19908 and 19910 from Mantis double dist = GEOMUtils::GetMinDistanceSingular (theShape1, theShape2, thePnt1, thePnt2); if (dist > -1.0) { return dist; } BRepExtrema_DistShapeShape dst (theShape1, theShape2); if (dst.IsDone()) { gp_Pnt P1, P2; for (int i = 1; i <= dst.NbSolution(); i++) { P1 = dst.PointOnShape1(i); P2 = dst.PointOnShape2(i); Standard_Real Dist = P1.Distance(P2); if (aResult > Dist) { aResult = Dist; thePnt1 = P1; thePnt2 = P2; } } } return aResult; }
void ClassifierWidget::slotMenuSelection(QAction* action) { ListPopupMenu::Menu_Type sel = m_pMenu->getMenuType(action); switch (sel) { case ListPopupMenu::mt_Attribute: case ListPopupMenu::mt_Operation: case ListPopupMenu::mt_Template: { Uml::Object_Type ot = ListPopupMenu::convert_MT_OT(sel); if (Object_Factory::createChildObject(classifier(), ot)) { updateComponentSize(); update(); UMLApp::app()->document()->setModified(); } break; } case ListPopupMenu::mt_Show_Operations: case ListPopupMenu::mt_Show_Operations_Selection: toggleShowOps(); break; case ListPopupMenu::mt_Show_Attributes: case ListPopupMenu::mt_Show_Attributes_Selection: toggleShowAtts(); break; case ListPopupMenu::mt_Show_Public_Only: case ListPopupMenu::mt_Show_Public_Only_Selection: toggleShowPublicOnly(); break; case ListPopupMenu::mt_Show_Operation_Signature: case ListPopupMenu::mt_Show_Operation_Signature_Selection: toggleShowOpSigs(); break; case ListPopupMenu::mt_Show_Attribute_Signature: case ListPopupMenu::mt_Show_Attribute_Signature_Selection: toggleShowAttSigs(); break; case ListPopupMenu::mt_Visibility: case ListPopupMenu::mt_Visibility_Selection: toggleShowVisibility(); break; case ListPopupMenu::mt_Show_Packages: case ListPopupMenu::mt_Show_Packages_Selection: toggleShowPackage(); break; case ListPopupMenu::mt_Show_Stereotypes: case ListPopupMenu::mt_Show_Stereotypes_Selection: toggleShowStereotype(); break; case ListPopupMenu::mt_DrawAsCircle: case ListPopupMenu::mt_DrawAsCircle_Selection: toggleDrawAsCircle(); break; case ListPopupMenu::mt_ChangeToClass: case ListPopupMenu::mt_ChangeToClass_Selection: changeToClass(); break; case ListPopupMenu::mt_ChangeToInterface: case ListPopupMenu::mt_ChangeToInterface_Selection: changeToInterface(); break; default: UMLWidget::slotMenuSelection(action); break; } }
QSize ClassifierWidget::calculateSize() { if (!m_pObject) { return UMLWidget::calculateSize(); } if (classifier()->isInterface() && m_bDrawAsCircle) { return calculateAsCircleSize(); } const QFontMetrics &fm = getFontMetrics(UMLWidget::FT_NORMAL); const int fontHeight = fm.lineSpacing(); // width is the width of the longest 'word' int width = 0, height = 0; // consider stereotype if (m_bShowStereotype && !m_pObject->stereotype().isEmpty()) { height += fontHeight; // ... width const QFontMetrics &bfm = UMLWidget::getFontMetrics(UMLWidget::FT_BOLD); const int stereoWidth = bfm.size(0,m_pObject->stereotype(true)).width(); if (stereoWidth > width) width = stereoWidth; } // consider name height += fontHeight; // ... width QString displayedName; if (m_bShowPackage) displayedName = m_pObject->fullyQualifiedName(); else displayedName = m_pObject->name(); const UMLWidget::FontType nft = (m_pObject->isAbstract() ? FT_BOLD_ITALIC : FT_BOLD); //const int nameWidth = getFontMetrics(nft).boundingRect(displayName).width(); const int nameWidth = UMLWidget::getFontMetrics(nft).size(0,displayedName).width(); if (nameWidth > width) width = nameWidth; // consider attributes const int numAtts = displayedAttributes(); if (numAtts == 0) { height += fontHeight / 2; // no atts, so just add a bit of space } else { height += fontHeight * numAtts; // calculate width of the attributes UMLClassifierListItemList list = classifier()->getFilteredList(Uml::ot_Attribute); foreach (UMLClassifierListItem *a , list ) { if (m_bShowPublicOnly && a->visibility() != Uml::Visibility::Public) continue; const int attWidth = fm.size(0,a->toString(m_ShowAttSigs)).width(); if (attWidth > width) width = attWidth; } } // consider operations const int numOps = displayedOperations(); if (numOps == 0) { height += fontHeight / 2; // no ops, so just add a bit of space } else { height += numOps * fontHeight; // ... width UMLOperationList list(classifier()->getOpList()); foreach (UMLOperation* op , list) { if (m_bShowPublicOnly && op->visibility() != Uml::Visibility::Public) continue; const QString displayedOp = op->toString(m_ShowOpSigs); UMLWidget::FontType oft; oft = (op->isAbstract() ? UMLWidget::FT_ITALIC : UMLWidget::FT_NORMAL); const int w = UMLWidget::getFontMetrics(oft).size(0,displayedOp).width(); if (w > width) width = w; } } // consider template box _as last_ ! QSize templatesBoxSize = calculateTemplatesBoxSize(); if (templatesBoxSize.width() != 0) { // add width to largest 'word' width += templatesBoxSize.width() / 2; } if (templatesBoxSize.height() != 0) { height += templatesBoxSize.height() - MARGIN; } // allow for height margin if (!m_bShowOperations && !m_bShowAttributes && !m_bShowStereotype) { height += MARGIN * 2; } // allow for width margin width += MARGIN * 2; return QSize(width, height); }
/** * Reimplemented from UMLWidget::updateTextItemGroups to * calculate the Text strings, their properties and also hide/show * them based on the current state. */ void ClassifierWidget::updateTextItemGroups() { // Invalidate stuff and recalculate them. invalidateDummies(); TextItemGroup *headerGroup = textItemGroupAt(HeaderGroupIndex); TextItemGroup *attribOpGroup = textItemGroupAt(AttribOpGroupIndex); TextItemGroup *templateGroup = textItemGroupAt(TemplateGroupIndex); attribOpGroup->setAlignment(Qt::AlignVCenter | Qt::AlignLeft); templateGroup->setAlignment(Qt::AlignVCenter | Qt::AlignLeft); UMLClassifier *umlC = classifier(); UMLClassifierListItemList attribList = umlC->getFilteredList(UMLObject::ot_Attribute); UMLClassifierListItemList opList = umlC->getFilteredList(UMLObject::ot_Operation); // Set up template group and template text items. UMLTemplateList tlist = umlC->getTemplateList(); templateGroup->setTextItemCount(tlist.size()); bool templateHide = shouldDrawAsCircle(); // Hide if draw as circle. for(int i = 0; i < tlist.size(); ++i) { UMLTemplate *t = tlist[i]; templateGroup->textItemAt(i)->setText(t->toString()); templateGroup->textItemAt(i)->setExplicitVisibility(!templateHide); } // Stereo type and name. const int headerItemCount = 2; headerGroup->setTextItemCount(headerItemCount); const int cnt = attribList.count() + opList.count(); attribOpGroup->setTextItemCount(cnt); // Setup Stereo text item. TextItem *stereoItem = headerGroup->textItemAt(StereotypeItemIndex); stereoItem->setBold(true); stereoItem->setText(umlC->stereotype(true)); bool v = !shouldDrawAsCircle() && visualProperty(ShowStereotype) && !(umlC->stereotype(false).isEmpty()); stereoItem->setExplicitVisibility(v); // name item is always visible. TextItem *nameItem = headerGroup->textItemAt(NameItemIndex); nameItem->setBold(true); nameItem->setItalic(umlC->isAbstract()); nameItem->setUnderline(shouldDrawAsCircle()); QString nameText = name(); if (visualProperty(ShowPackage) == true) { nameText = umlC->fullyQualifiedName(); } bool showNameOnly = (!visualProperty(ShowAttributes) && !visualProperty(ShowOperations) && !visualProperty(ShowStereotype) && !shouldDrawAsCircle()); nameItem->setText(nameText); int attribStartIndex = 0; int opStartIndex = attribStartIndex + attribList.size(); // Now setup attribute texts. int visibleAttributes = 0; for (int i=0; i < attribList.size(); ++i) { UMLClassifierListItem *obj = attribList[i]; TextItem *item = attribOpGroup->textItemAt(attribStartIndex + i); item->setItalic(obj->isAbstract()); item->setUnderline(obj->isStatic()); item->setText(obj->toString(m_attributeSignature)); bool v = !shouldDrawAsCircle() && ( !visualProperty(ShowPublicOnly) || obj->visibility() == Uml::Visibility::Public) && visualProperty(ShowAttributes) == true; item->setExplicitVisibility(v); if (v) { ++visibleAttributes; } } // Update expander box to reflect current state and also visibility m_attributeExpanderBox->setExpanded(visualProperty(ShowAttributes)); const QString dummyText; // Setup line and dummies. if (!showNameOnly) { // Stuff in a dummy item as spacer if there are no attributes, if (!shouldDrawAsCircle() && (visibleAttributes == 0 || !visualProperty(ShowAttributes))) { m_dummyAttributeItem = new TextItem(dummyText); int index = attribStartIndex; if (visibleAttributes == 0 && !attribList.isEmpty()) { index = opStartIndex; } attribOpGroup->insertTextItemAt(index, m_dummyAttributeItem); m_lineItem2Index = index; ++opStartIndex; } else { // Now set the second index. m_lineItem2Index = opStartIndex - 1; } } int visibleOperations = 0; for (int i=0; i < opList.size(); ++i) { UMLClassifierListItem *obj = opList[i]; TextItem *item = attribOpGroup->textItemAt(opStartIndex + i); item->setItalic(obj->isAbstract()); item->setUnderline(obj->isStatic()); item->setText(obj->toString(m_operationSignature)); bool v = !shouldDrawAsCircle() && ( !visualProperty(ShowPublicOnly) || obj->visibility() == Uml::Visibility::Public) && visualProperty(ShowOperations); item->setExplicitVisibility(v); if (v) { ++visibleOperations; } } m_operationExpanderBox->setExpanded(visualProperty(ShowOperations)); if (!showNameOnly) { if (!shouldDrawAsCircle() && (visibleOperations == 0 || !visualProperty(ShowOperations))) { m_dummyOperationItem = new TextItem(dummyText); attribOpGroup->insertTextItemAt(opStartIndex+opList.size(), m_dummyOperationItem); } } UMLWidget::updateTextItemGroups(); }
/** * Will be called when a menu selection has been made from the * popup menu. * * @param action The action that has been selected. */ void ClassifierWidget::slotMenuSelection(QAction* action) { ListPopupMenu *menu = ListPopupMenu::menuFromAction(action); if (!menu) { uError() << "Action's data field does not contain ListPopupMenu pointer"; return; } ListPopupMenu::MenuType sel = menu->getMenuType(action); switch (sel) { case ListPopupMenu::mt_Attribute: case ListPopupMenu::mt_Operation: case ListPopupMenu::mt_Template: { UMLObject::ObjectType ot = ListPopupMenu::convert_MT_OT(sel); if (Object_Factory::createChildObject(classifier(), ot)) { UMLApp::app()->document()->setModified(); } break; } case ListPopupMenu::mt_Show_Operations: case ListPopupMenu::mt_Show_Operations_Selection: toggleVisualProperty(ShowOperations); break; case ListPopupMenu::mt_Show_Attributes: case ListPopupMenu::mt_Show_Attributes_Selection: toggleVisualProperty(ShowAttributes); break; case ListPopupMenu::mt_Show_Public_Only: case ListPopupMenu::mt_Show_Public_Only_Selection: toggleVisualProperty(ShowPublicOnly); break; case ListPopupMenu::mt_Show_Operation_Signature: case ListPopupMenu::mt_Show_Operation_Signature_Selection: toggleVisualProperty(ShowOperationSignature); break; case ListPopupMenu::mt_Show_Attribute_Signature: case ListPopupMenu::mt_Show_Attribute_Signature_Selection: toggleVisualProperty(ShowAttributeSignature); break; case ListPopupMenu::mt_Visibility: case ListPopupMenu::mt_Visibility_Selection: toggleVisualProperty(ShowVisibility); break; case ListPopupMenu::mt_Show_Packages: case ListPopupMenu::mt_Show_Packages_Selection: toggleVisualProperty(ShowPackage); break; case ListPopupMenu::mt_Show_Stereotypes: case ListPopupMenu::mt_Show_Stereotypes_Selection: toggleVisualProperty(ShowStereotype); break; case ListPopupMenu::mt_DrawAsCircle: case ListPopupMenu::mt_DrawAsCircle_Selection: toggleVisualProperty(DrawAsCircle); break; case ListPopupMenu::mt_ChangeToClass: case ListPopupMenu::mt_ChangeToClass_Selection: changeToClass(); break; case ListPopupMenu::mt_ChangeToInterface: case ListPopupMenu::mt_ChangeToInterface_Selection: changeToInterface(); break; default: UMLWidget::slotMenuSelection(action); break; } }