int main() { try { vpRobotPtu46 robot ; vpColVector q(2) ; vpERROR_TRACE(" ") ; robot.setRobotState(vpRobot::STATE_POSITION_CONTROL) ; q = 0; vpCTRACE << "Set position in the articular frame: " << q.t(); robot.setPosition(vpRobot::ARTICULAR_FRAME, q) ; q[0] = vpMath::rad(10); q[1] = vpMath::rad(20); vpCTRACE << "Set position in the articular frame: " << q.t(); robot.setPosition(vpRobot::ARTICULAR_FRAME, q) ; vpColVector qm(2) ; robot.getPosition(vpRobot::ARTICULAR_FRAME, qm) ; vpCTRACE << "Position in the articular frame " << qm.t() ; vpColVector qdot(2) ; robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ; #if 0 qdot = 0 ; qdot[0] = vpMath::rad(10) ; qdot[1] = vpMath::rad(10) ; vpCTRACE << "Set camera frame velocity " << qdot.t() ; robot.setVelocity(vpRobot::CAMERA_FRAME, qdot) ; sleep(2) ; qdot = 0 ; qdot[0] = vpMath::rad(-10) ; qdot[1] = vpMath::rad(-10) ; vpCTRACE << "Set camera frame velocity " << qdot.t() ; robot.setVelocity(vpRobot::CAMERA_FRAME, qdot) ; sleep(2) ; #endif qdot = 0 ; // qdot[0] = vpMath::rad(0.1) ; qdot[1] = vpMath::rad(10) ; vpCTRACE << "Set articular frame velocity " << qdot.t() ; robot.setVelocity(vpRobot::ARTICULAR_FRAME, qdot) ; sleep(2) ; qdot = 0 ; qdot[0] = vpMath::rad(-5); //qdot[1] = vpMath::rad(-5); vpCTRACE << "Set articular frame velocity " << qdot.t() ; robot.setVelocity(vpRobot::ARTICULAR_FRAME, qdot) ; sleep(2) ; } catch (...) { std::cout << "Sorry PtU46 not available ..." << std::endl; } return 0; }
void DBClientReplicaSet::say(Message& toSend, bool isRetry, string* actualServer) { if (!isRetry) _lazyState = LazyState(); const int lastOp = toSend.operation(); if (lastOp == dbQuery) { // TODO: might be possible to do this faster by changing api DbMessage dm(toSend); QueryMessage qm(dm); shared_ptr<ReadPreferenceSetting> readPref( _extractReadPref( qm.query, qm.queryOptions ) ); if ( _isSecondaryQuery( qm.ns, qm.query, *readPref ) ) { LOG( 3 ) << "dbclient_rs say using secondary or tagged node selection in " << _getMonitor()->getName() << ", read pref is " << readPref->toBSON() << " (primary : " << ( _master.get() != NULL ? _master->getServerAddress() : "[not cached]" ) << ", lastTagged : " << ( _lastSlaveOkConn.get() != NULL ? _lastSlaveOkConn->getServerAddress() : "[not cached]" ) << ")" << endl; string lastNodeErrMsg; for (size_t retry = 0; retry < MAX_RETRY; retry++) { _lazyState._retries = retry; try { DBClientConnection* conn = selectNodeUsingTags(readPref); if (conn == NULL) { break; } if (actualServer != NULL) { *actualServer = conn->getServerAddress(); } conn->say(toSend); _lazyState._lastOp = lastOp; _lazyState._secondaryQueryOk = true; _lazyState._lastClient = conn; } catch ( const DBException& DBExcep ) { StringBuilder errMsgBuilder; errMsgBuilder << "can't callLazy replica set node " << _lastSlaveOkHost.toString() << ": " << causedBy( DBExcep ); lastNodeErrMsg = errMsgBuilder.str(); LOG(1) << lastNodeErrMsg << endl; invalidateLastSlaveOkCache(); continue; } return; } StringBuilder assertMsg; assertMsg << "Failed to call say, no good nodes in " << _getMonitor()->getName(); if ( !lastNodeErrMsg.empty() ) { assertMsg << ", last error: " << lastNodeErrMsg; } uasserted(16380, assertMsg.str()); } } LOG( 3 ) << "dbclient_rs say to primary node in " << _getMonitor()->getName() << endl; DBClientConnection* master = checkMaster(); if (actualServer) *actualServer = master->getServerAddress(); _lazyState._lastOp = lastOp; _lazyState._secondaryQueryOk = false; // Don't retry requests to primary since there is only one host to try _lazyState._retries = MAX_RETRY; _lazyState._lastClient = master; master->say(toSend); return; }
/*===================================================== * Menu::create(const urania::MenuDesc& desc) * MenuDescからMenuを生成 */ urania::RCP_Menu urania::Menu::create(const urania::MenuDesc& desc) { RCP_Menu qm(new Menu); qm->link__(desc.createHandle__()); return qm; }
void OverlayEditorScene::contextMenuEvent(QGraphicsSceneContextMenuEvent *e) { QGraphicsScene::contextMenuEvent(e); if (e->isAccepted()) return; if (! e->widget()) return; QGraphicsPixmapItem *item = childAt(e->scenePos()); QMenu qm(e->widget()); QMenu *qmLayout = qm.addMenu(tr("Layout preset")); QAction *qaLayoutLargeAvatar = qmLayout->addAction(tr("Large square avatar")); QAction *qaLayoutText = qmLayout->addAction(tr("Avatar and Name")); QMenu *qmTrans = qm.addMenu(tr("User Opacity")); QActionGroup *qagUser = new QActionGroup(&qm); QAction *userOpacity[8]; for (int i=0;i<8;++i) { qreal o = (i + 1) / 8.0; userOpacity[i] = new QAction(tr("%1%").arg(o * 100.0f, 0, 'f', 1), qagUser); userOpacity[i]->setCheckable(true); userOpacity[i]->setData(o); if (qFuzzyCompare(qgiGroup->opacity(), o)) userOpacity[i]->setChecked(true); qmTrans->addAction(userOpacity[i]); } QAction *color = NULL; QAction *fontAction = NULL; QAction *objectOpacity[8]; for (int i=0;i<8;++i) objectOpacity[i] = NULL; QAction *boxpen[4] = { NULL, NULL, NULL, NULL}; QAction *boxpad[4] = { NULL, NULL, NULL, NULL}; QAction *boxpencolor = NULL; QAction *boxfillcolor = NULL; QAction *align[6]; for (int i=0;i<6;++i) align[i] = NULL; if (item) { qm.addSeparator(); QMenu *qmObjTrans = qm.addMenu(tr("Object Opacity")); QActionGroup *qagObject = new QActionGroup(&qm); for (int i=0;i<8;++i) { qreal o = i + 1 / 8.0; objectOpacity[i] = new QAction(tr("%1%").arg(o * 100.0f, 0, 'f', 1), qagObject); objectOpacity[i]->setCheckable(true); objectOpacity[i]->setData(o); if (qFuzzyCompare(item->opacity(), o)) objectOpacity[i]->setChecked(true); qmObjTrans->addAction(objectOpacity[i]); } QMenu *qmObjAlign = qm.addMenu(tr("Alignment")); Qt::Alignment a; if (item == qgpiAvatar) a = os.qaAvatar; else if (item == qgpiChannel) a = os.qaChannel; else if (item == qgpiMuted) a = os.qaMutedDeafened; else a = os.qaUserName; align[0] = qmObjAlign->addAction(tr("Left")); align[0]->setCheckable(true); align[0]->setData(Qt::AlignLeft); if (a & Qt::AlignLeft) align[0]->setChecked(true); align[1] = qmObjAlign->addAction(tr("Center")); align[1]->setCheckable(true); align[1]->setData(Qt::AlignHCenter); if (a & Qt::AlignHCenter) align[1]->setChecked(true); align[2] = qmObjAlign->addAction(tr("Right")); align[2]->setCheckable(true); align[2]->setData(Qt::AlignRight); if (a & Qt::AlignRight) align[2]->setChecked(true); qmObjAlign->addSeparator(); align[3] = qmObjAlign->addAction(tr("Top")); align[3]->setCheckable(true); align[3]->setData(Qt::AlignTop); if (a & Qt::AlignTop) align[3]->setChecked(true); align[4] = qmObjAlign->addAction(tr("Center")); align[4]->setCheckable(true); align[4]->setData(Qt::AlignVCenter); if (a & Qt::AlignVCenter) align[4]->setChecked(true); align[5] = qmObjAlign->addAction(tr("Bottom")); align[5]->setCheckable(true); align[5]->setData(Qt::AlignBottom); if (a & Qt::AlignBottom) align[5]->setChecked(true); if ((item != qgpiAvatar) && (item != qgpiMuted)) { color = qm.addAction(tr("Color...")); fontAction = qm.addAction(tr("Font...")); } } if (qgpiBox->isVisible()) { qm.addSeparator(); QMenu *qmBox = qm.addMenu(tr("Bounding box")); QMenu *qmPen = qmBox->addMenu(tr("Pen width")); QMenu *qmPad = qmBox->addMenu(tr("Padding")); boxpencolor = qmBox->addAction(tr("Pen color")); boxfillcolor = qmBox->addAction(tr("Fill color")); QActionGroup *qagPen = new QActionGroup(qmPen); QActionGroup *qagPad = new QActionGroup(qmPad); for (int i=0;i<4;++i) { qreal v = (i) ? powf(2.0f, static_cast<float>(-10 + i)) : 0.0f; boxpen[i] = new QAction(QString::number(i), qagPen); boxpen[i]->setData(v); boxpen[i]->setCheckable(true); if (qFuzzyCompare(os.fBoxPenWidth, v)) boxpen[i]->setChecked(true); qmPen->addAction(boxpen[i]); boxpad[i] = new QAction(QString::number(i), qagPad); boxpad[i]->setData(v); boxpad[i]->setCheckable(true); if (qFuzzyCompare(os.fBoxPad, v)) boxpad[i]->setChecked(true); qmPad->addAction(boxpad[i]); } } QAction *act = qm.exec(e->screenPos()); if (! act) return; for (int i=0;i<8;++i) { if (userOpacity[i] == act) { float o = static_cast<float>(act->data().toReal()); os.fUser[tsColor] = o; qgiGroup->setOpacity(o); } } for (int i=0;i<8;++i) { if (objectOpacity[i] == act) { qreal o = act->data().toReal(); if (item == qgpiMuted) os.fMutedDeafened = o; else if (item == qgpiAvatar) os.fAvatar = o; else if (item == qgpiChannel) os.fChannel = o; else if (item == qgpiName) os.fUserName = o; item->setOpacity(o); } } for (int i=0;i<4;++i) { if (boxpen[i] == act) { os.fBoxPenWidth = act->data().toReal(); moveBox(); } else if (boxpad[i] == act) { os.fBoxPad = act->data().toReal(); moveBox(); } } for (int i=0;i<6;++i) { if (align[i] == act) { Qt::Alignment *aptr; if (item == qgpiAvatar) aptr = & os.qaAvatar; else if (item == qgpiChannel) aptr = & os.qaChannel; else if (item == qgpiMuted) aptr = & os.qaMutedDeafened; else aptr = & os.qaUserName; Qt::Alignment a = static_cast<Qt::Alignment>(act->data().toInt()); if (a & Qt::AlignHorizontal_Mask) { *aptr = (*aptr & ~Qt::AlignHorizontal_Mask) | a; } else { *aptr = (*aptr & ~Qt::AlignVertical_Mask) | a; } updateSelected(); } } if (act == boxpencolor) { QColor qc = QColorDialog::getColor(os.qcBoxPen, e->widget(), tr("Pick pen color"), QColorDialog::DontUseNativeDialog | QColorDialog::ShowAlphaChannel); if (! qc.isValid()) return; os.qcBoxPen = qc; moveBox(); } else if (act == boxfillcolor) { QColor qc = QColorDialog::getColor(os.qcBoxFill, e->widget(), tr("Pick fill color"), QColorDialog::DontUseNativeDialog | QColorDialog::ShowAlphaChannel); if (! qc.isValid()) return; os.qcBoxFill = qc; moveBox(); } else if (act == color) { QColor *col = NULL; if (item == qgpiChannel) col = & os.qcChannel; else if (item == qgpiName) col = & os.qcUserName[tsColor]; if (! col) return; QColor qc = QColorDialog::getColor(*col, e->widget(), tr("Pick color"), QColorDialog::DontUseNativeDialog); if (! qc.isValid()) return; qc.setAlpha(255); if (qc == *col) return; *col = qc; updateSelected(); } else if (act == fontAction) { QFont *fontptr = (item == qgpiChannel) ? &os.qfChannel : &os.qfUserName; qgpiSelected = NULL; qgriSelected->hide(); // QFontDialog doesn't really like graphics view. At all. QFontDialog qfd; qfd.setOptions(QFontDialog::DontUseNativeDialog); qfd.setCurrentFont(*fontptr); qfd.setWindowTitle(tr("Pick font")); int ret; if (g.ocIntercept) { QGraphicsProxyWidget *qgpw = new QGraphicsProxyWidget(NULL, Qt::Window); qgpw->setWidget(&qfd); addItem(qgpw); qgpw->setZValue(3.0f); qgpw->setPanelModality(QGraphicsItem::PanelModal); qgpw->setPos(- qgpw->boundingRect().width() / 2.0f, - qgpw->boundingRect().height() / 2.0f); qgpw->show(); ret = qfd.exec(); qgpw->hide(); qgpw->setWidget(NULL); delete qgpw; } else { Qt::WindowFlags wf = g.mw->windowFlags(); if (wf.testFlag(Qt::WindowStaysOnTopHint)) qfd.setWindowFlags(qfd.windowFlags() | Qt::WindowStaysOnTopHint); ret = qfd.exec(); } if (! ret) return; *fontptr = qfd.selectedFont(); resync(); } else if (act == qaLayoutLargeAvatar) { os.setPreset(OverlaySettings::LargeSquareAvatar); resync(); } else if (act == qaLayoutText) { os.setPreset(OverlaySettings::AvatarAndName); resync(); } }
int main(int argc, char **argv) { #if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394) #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) try { vpImage<unsigned char> I; // Create a gray level image container double lambda = 0.1; // Scale parameter used to estimate the depth Z of the blob from its surface //double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85 (Logitec sphere) double coef = 1.2/13.0; // At 1m, the blob has a surface of 11.3 (AVT Pike 032C) double L = 0.21; // 3D horizontal segment length double Z_d = 0.8; // Desired distance along Z between camera and segment bool normalized = true; // segment normilized features are used // Warning: To have a non singular task of rank 3, Y_d should be different from 0 so that // the optical axis doesn't intersect the horizontal segment double Y_d = -.11; // Desired distance along Y between camera and segment. vpColVector qm(2); // Measured head position qm = 0; double qm_pan = 0; // Measured pan position (tilt is not handled in that example) #ifdef USE_REAL_ROBOT // Initialize the biclops head vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg"); biclops.setDenavitHartenbergModel(vpBiclops::DH1); // Move to the initial position vpColVector q(2); q=0; // q[0] = vpMath::rad(63); // q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera sphere tilt so that the camera is parallel to the plane biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL) ; biclops.setPosition( vpRobot::ARTICULAR_FRAME, q ); //biclops.setPositioningVelocity(50); biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm); qm_pan = qm[0]; // Now the head will be controlled in velocity biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ; // Initialize the pioneer robot vpRobotPioneer pioneer; ArArgumentParser parser(&argc, argv); parser.loadDefaultArguments(); // ArRobotConnector connects to the robot, get some initial data from it such as type and name, // and then loads parameter files for this robot. ArRobotConnector robotConnector(&parser, &pioneer); if(!robotConnector.connectRobot()) { ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot."); if(parser.checkHelpAndWarnUnparsed()) { Aria::logOptions(); Aria::exit(1); } } if (!Aria::parseArgs()) { Aria::logOptions(); Aria::shutdown(); return false; } pioneer.useSonar(false); // disable the sonar device usage // Wait 3 sec to be sure that the low level Aria thread used to control // the robot is started. Without this delay we experienced a delay (arround 2.2 sec) // between the velocity send to the robot and the velocity that is really applied // to the wheels. sleep(3); std::cout << "Pioneer robot connected" << std::endl; #endif vpPioneerPan robot_pan; // Generic robot that computes the velocities for the pioneer and the biclops head // Camera parameters. In this experiment we don't need a precise calibration of the camera vpCameraParameters cam; // Create the camera framegrabber #if defined(VISP_HAVE_V4L2) // Create a grabber based on v4l2 third party lib (for usb cameras under Linux) vpV4l2Grabber g; g.setScale(1); g.setInput(0); g.setDevice("/dev/video1"); g.open(I); // Logitec sphere parameters cam.initPersProjWithoutDistortion(558, 555, 312, 210); #elif defined(VISP_HAVE_DC1394) // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux) vp1394TwoGrabber g(false); g.setVideoMode(vp1394TwoGrabber::vpVIDEO_MODE_640x480_MONO8); g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_30); // AVT Pike 032C parameters cam.initPersProjWithoutDistortion(800, 795, 320, 216); #elif defined(VISP_HAVE_CMU1394) // Create a grabber based on CMU 1394 third party lib (for firewire cameras under windows) vp1394CMUGrabber g; g.setVideoMode(0, 5); // 640x480 MONO8 g.setFramerate(4); // 30 Hz g.open(I); // AVT Pike 032C parameters cam.initPersProjWithoutDistortion(800, 795, 320, 216); #endif // Acquire an image from the grabber g.acquire(I); // Create an image viewer #if defined(VISP_HAVE_X11) vpDisplayX d(I, 10, 10, "Current frame"); #elif defined(VISP_HAVE_GDI) vpDisplayGDI d(I, 10, 10, "Current frame"); #endif vpDisplay::display(I); vpDisplay::flush(I); // The 3D segment consists in two horizontal dots vpDot2 dot[2]; for (int i=0; i <2; i++) { dot[i].setGraphics(true); dot[i].setComputeMoments(true); dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner and outside points with bad gray level dot[i].initTracking(I); vpDisplay::flush(I); } vpServo task; task.setServo(vpServo::EYEINHAND_L_cVe_eJe) ; task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE) ; task.setLambda(lambda) ; vpVelocityTwistMatrix cVe ; // keep to identity cVe = robot_pan.get_cVe() ; task.set_cVe(cVe) ; std::cout << "cVe: \n" << cVe << std::endl; vpMatrix eJe; // Update the robot jacobian that depends on the pan position robot_pan.set_eJe(qm_pan); // Get the robot jacobian eJe = robot_pan.get_eJe() ; task.set_eJe(eJe) ; std::cout << "eJe: \n" << eJe << std::endl; // Define a 3D horizontal segment an its cordinates in the image plane vpPoint P[2]; P[0].setWorldCoordinates(-L/2, 0, 0); P[1].setWorldCoordinates( L/2, 0, 0); // Define the desired camera position vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0, 0); // Here we are in front of the segment for (int i=0; i <2; i++) { P[i].changeFrame(cMo); P[i].project(); // Here the x,y parameters obtained by perspective projection are computed } // Estimate the depth of the segment extremity points double surface[2]; double Z[2]; // Depth of the segment points for (int i=0; i<2; i++) { // Surface of the blob estimated from the image moment m00 and converted in meters surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py())); // Initial depth of the blob Z[i] = coef * surface[i] ; } // Use here a feature segment builder vpFeatureSegment s_segment(normalized), s_segment_d(normalized); // From the segment feature we use only alpha vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]); s_segment.setZ1(Z[0]); s_segment.setZ2(Z[1]); // Set the desired feature vpFeatureBuilder::create(s_segment_d, P[0], P[1]); s_segment.setZ1( P[0].get_Z() ); // Desired depth s_segment.setZ2( P[1].get_Z() ); task.addFeature(s_segment, s_segment_d, vpFeatureSegment::selectXc() | vpFeatureSegment::selectL() | vpFeatureSegment::selectAlpha()); #ifdef USE_PLOTTER //Create a window (500 by 500) at position (700, 10) with two graphics vpPlot graph(2, 500, 500, 700, 10, "Curves..."); //The first graphic contains 3 curve and the second graphic contains 3 curves graph.initGraph(0,3); graph.initGraph(1,3); graph.setTitle(0, "Velocities"); graph.setTitle(1, "Error s-s*"); graph.setLegend(0, 0, "vx"); graph.setLegend(0, 1, "wz"); graph.setLegend(0, 2, "w_pan"); graph.setLegend(1, 0, "xm/l"); graph.setLegend(1, 1, "1/l"); graph.setLegend(1, 2, "alpha"); #endif vpColVector v; // vz, wx unsigned int iter = 0; try { while(1) { #ifdef USE_REAL_ROBOT // Get the new pan position biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm); #endif qm_pan = qm[0]; // Acquire a new image g.acquire(I); // Set the image as background of the viewer vpDisplay::display(I); // Display the desired position of the segment for (int i=0; i<2; i++) P[i].display(I, cam, vpColor::red, 3); // Does the blob tracking for (int i=0; i<2; i++) dot[i].track(I); for (int i=0; i<2; i++) { // Surface of the blob estimated from the image moment m00 and converted in meters surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py())); // Initial depth of the blob Z[i] = coef * surface[i] ; } // Update the features vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]); // Update the depth of the point. Useful only if current interaction matrix is used // when task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) is set s_segment.setZ1(Z[0]); s_segment.setZ2(Z[1]); robot_pan.get_cVe(cVe); task.set_cVe(cVe); // Update the robot jacobian that depends on the pan position robot_pan.set_eJe(qm_pan); // Get the robot jacobian eJe = robot_pan.get_eJe(); // Update the jacobian that will be used to compute the control law task.set_eJe(eJe); // Compute the control law. Velocities are computed in the mobile robot reference frame v = task.computeControlLaw(); // std::cout << "-----" << std::endl; // std::cout << "v: " << v.t() << std::endl; // std::cout << "error: " << task.getError().t() << std::endl; // std::cout << "L:\n " << task.getInteractionMatrix() << std::endl; // std::cout << "eJe:\n " << task.get_eJe() << std::endl; // std::cout << "cVe:\n " << task.get_cVe() << std::endl; // std::cout << "L_cVe_eJe:\n" << task.getInteractionMatrix() * task.get_cVe() * task.get_eJe() << std::endl; // task.print() ; if (task.getTaskRank() != 3) std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl; #ifdef USE_PLOTTER graph.plot(0, iter, v); // plot velocities applied to the robot graph.plot(1, iter, task.getError()); // plot error vector #endif #ifdef USE_REAL_ROBOT // Send the velocity to the robot vpColVector v_pioneer(2); // vx, wz v_pioneer[0] = v[0]; v_pioneer[1] = v[1]; vpColVector v_biclops(2); // qdot pan and tilt v_biclops[0] = v[2]; v_biclops[1] = 0; std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s " << vpMath::deg(v_pioneer[1]) << " deg/s" << std::endl; std::cout << "Send velocity to the biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl; pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer); biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops) ; #endif // Draw a vertical line which corresponds to the desired x coordinate of the dot cog vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red); vpDisplay::flush(I); // A click in the viewer to exit if ( vpDisplay::getClick(I, false) ) break; iter ++; //break; } } catch(...) { } #ifdef USE_REAL_ROBOT std::cout << "Ending robot thread..." << std::endl; pioneer.stopRunning(); // wait for the thread to stop pioneer.waitForRunExit(); #endif // Kill the servo task task.print() ; task.kill(); } catch(vpException &e) { std::cout << "Catch an exception: " << e << std::endl; return 1; } #endif #endif }
int main() { circe::SceneApp<> app(800, 800); std::shared_ptr<circe::InstanceSet> spheres, quads; // generate a bunch of random quads // but now each instance has a transform matrix size_t n = 400; // generate base mesh ponos::RawMeshSPtr sphereMesh( ponos::create_icosphere_mesh(ponos::point3(), 1.f, 0, false, false)); ponos::RawMeshSPtr quadMesh(ponos::create_quad_mesh( ponos::point3(0, 0, 0), ponos::point3(1, 0, 0), ponos::point3(1, 1, 0), ponos::point3(0, 1, 0), false, false)); ponos::RawMeshSPtr wquadMesh(ponos::create_quad_wireframe_mesh( ponos::point3(0, 0, 0), ponos::point3(1, 0, 0), ponos::point3(1, 1, 0), ponos::point3(0, 1, 0))); // ponos::RawMeshSPtr circleMesh(ponos::RawMeshes::icosphere()); ponos::RawMeshSPtr segmentMesh( ponos::RawMeshes::segment(ponos::point2(1, 0))); ponos::RawMeshSPtr cube = ponos::RawMeshes::cube(); // circe::SceneMesh qm(*wquadMesh.get()); circe::SceneMesh qm(segmentMesh); const char *fs = CIRCE_INSTANCES_FS; const char *vs = CIRCE_INSTANCES_VS; circe::ShaderProgram quadShader(vs, nullptr, fs); quadShader.addVertexAttribute("position", 0); quadShader.addVertexAttribute("color", 1); quadShader.addVertexAttribute("transform_matrix", 2); quadShader.addUniform("model_view_matrix", 3); quadShader.addUniform("projection_matrix", 4); quads.reset(new circe::InstanceSet(qm, quadShader, n / 2)); { // create a buffer for particles positions + sizes circe::BufferDescriptor trans = circe::create_array_stream_descriptor(16); trans.addAttribute("transform_matrix", 16, 0, trans.dataType); uint tid = quads->add(trans); // create a buffer for particles colors circe::BufferDescriptor col = circe::create_array_stream_descriptor(4); // r g b a col.addAttribute("color", 4, 0, col.dataType); // 4 -> r g b a uint colid = quads->add(col); quads->resize(n); circe::ColorPalette palette = circe::HEAT_MATLAB_PALETTE; ponos::RNGSampler sampler; ponos::HaltonSequence rng; for (size_t i = 0; i < n; i++) { auto color = palette((1.f * i) / n, 1.f); auto c = quads->instanceF(colid, i); c[0] = color.r; c[1] = color.g; c[2] = color.b; c[3] = color.a; c[3] = 0.4; auto m = quads->instanceF(tid, i); float t[16]; (ponos::scale(rng.randomFloat(), rng.randomFloat(), rng.randomFloat()) * ponos::translate(ponos::vec3(sampler.sample( ponos::bbox3(ponos::point3(-5, 0, 0), ponos::point3(5, 5, 5)))))) .matrix() .column_major(t); for (size_t k = 0; k < 16; k++) m[k] = t[k]; } } // app.scene.add(spheres.get()); app.scene.add(quads.get()); circe::SceneObjectSPtr grid(new circe::CartesianGrid(5)); app.scene.add(grid.get()); app.run(); return 0; }
void OverlayUserGroup::contextMenuEvent(QGraphicsSceneContextMenuEvent *event) { event->accept(); #ifdef Q_OS_MAC bool embed = g.ocIntercept != NULL; QMenu qm(embed ? NULL : event->widget()); if (embed) { QGraphicsScene *scene = g.ocIntercept->qgv.scene(); scene->addWidget(&qm); } #else QMenu qm(g.ocIntercept ? g.mw : event->widget()); #endif QMenu *qmShow = qm.addMenu(OverlayClient::tr("Filter")); QAction *qaShowTalking = qmShow->addAction(OverlayClient::tr("Only talking")); qaShowTalking->setCheckable(true); if (os->osShow == OverlaySettings::Talking) qaShowTalking->setChecked(true); QAction *qaShowActive = qmShow->addAction(OverlayClient::tr("Talking and recently active")); qaShowActive->setCheckable(true); if (os->osShow == OverlaySettings::Active) qaShowActive->setChecked(true); QAction *qaShowHome = qmShow->addAction(OverlayClient::tr("All in current channel")); qaShowHome->setCheckable(true); if (os->osShow == OverlaySettings::HomeChannel) qaShowHome->setChecked(true); QAction *qaShowLinked = qmShow->addAction(OverlayClient::tr("All in linked channels")); qaShowLinked->setCheckable(true); if (os->osShow == OverlaySettings::LinkedChannels) qaShowLinked->setChecked(true); qmShow->addSeparator(); QAction *qaShowSelf = qmShow->addAction(OverlayClient::tr("Always show yourself")); qaShowSelf->setCheckable(true); qaShowSelf->setEnabled(os->osShow == OverlaySettings::Talking || os->osShow == OverlaySettings::Active); if (os->bAlwaysSelf) qaShowSelf->setChecked(true); qmShow->addSeparator(); QAction *qaConfigureRecentlyActiveTime = qmShow->addAction(OverlayClient::tr("Configure recently active time (%1 seconds)...").arg(os->uiActiveTime)); qaConfigureRecentlyActiveTime->setEnabled(os->osShow == OverlaySettings::Active); QMenu *qmColumns = qm.addMenu(OverlayClient::tr("Columns")); QAction *qaColumns[6]; for (unsigned int i=1;i<=5;++i) { qaColumns[i] = qmColumns->addAction(QString::number(i)); qaColumns[i]->setCheckable(true); qaColumns[i]->setChecked(i == os->uiColumns); } QMenu *qmSort = qm.addMenu(OverlayClient::tr("Sort")); QAction *qaSortAlphabetically = qmSort->addAction(OverlayClient::tr("Alphabetically")); qaSortAlphabetically->setCheckable(true); if (os->osSort == OverlaySettings::Alphabetical) qaSortAlphabetically->setChecked(true); QAction *qaSortLastStateChange = qmSort->addAction(OverlayClient::tr("Last state change")); qaSortLastStateChange->setCheckable(true); if (os->osSort == OverlaySettings::LastStateChange) qaSortLastStateChange->setChecked(true); QAction *qaEdit = qm.addAction(OverlayClient::tr("Edit...")); QAction *qaZoom = qm.addAction(OverlayClient::tr("Reset Zoom")); QAction *act = qm.exec(event->screenPos()); if (! act) return; if (act == qaEdit) { if (g.ocIntercept) { QMetaObject::invokeMethod(g.ocIntercept, "openEditor", Qt::QueuedConnection); } else { OverlayEditor oe(qApp->activeModalWidget(), NULL, os); connect(&oe, SIGNAL(applySettings()), this, SLOT(updateLayout())); oe.exec(); } } else if (act == qaZoom) { os->fZoom = 1.0f; updateLayout(); } else if (act == qaShowTalking) { os->osShow = OverlaySettings::Talking; updateUsers(); } else if (act == qaShowActive) { os->osShow = OverlaySettings::Active; updateUsers(); } else if (act == qaShowHome) { os->osShow = OverlaySettings::HomeChannel; updateUsers(); } else if (act == qaShowLinked) { os->osShow = OverlaySettings::LinkedChannels; updateUsers(); } else if (act == qaShowSelf) { os->bAlwaysSelf = ! os->bAlwaysSelf; updateUsers(); } else if (act == qaConfigureRecentlyActiveTime) { // FIXME: This might not be the best place to configure this setting, but currently // there's not really a suitable place to put this. In the future an additional tab // might be added for some advanced overlay options, which could then include this // setting. bool ok; int newValue = QInputDialog::getInt( qm.parentWidget(), OverlayClient::tr("Configure recently active time"), OverlayClient::tr("Amount of seconds users remain active after talking:"), os->uiActiveTime, 1, 2147483647, 1, &ok); if (ok) { os->uiActiveTime = newValue; } updateUsers(); } else if (act == qaSortAlphabetically) { os->osSort = OverlaySettings::Alphabetical; updateUsers(); } else if (act == qaSortLastStateChange) { os->osSort = OverlaySettings::LastStateChange; updateUsers(); } else { for (int i=1;i<=5;++i) { if (act == qaColumns[i]) { os->uiColumns = i; updateLayout(); } } } }
void ClientRDPSNDChannel::receive(InStream & chunk) { if (this->wave_data_to_wait) { this->wave_data_to_wait -= chunk.in_remain(); if (this->wave_data_to_wait < 0) { this->wave_data_to_wait = 0; } if (this->last_PDU_is_WaveInfo) { chunk.in_skip_bytes(4); this->last_PDU_is_WaveInfo = false; } if (this->impl_sound) { this->impl_sound->setData(chunk.get_current(), chunk.in_remain()); } if (!(this->wave_data_to_wait)) { if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU"); } LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 1"); if (this->impl_sound) { LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 2"); uint8_t data[] = {'\0'}; LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 3"); this->impl_sound->setData(data, 1); LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 4"); this->impl_sound->play(); LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 5"); // LOG(LOG_INFO, "ClientRDPSNDChannel::receive play!!!"); } LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 6"); StaticOutStream<16> out_stream; rdpsnd::RDPSNDPDUHeader header(rdpsnd::SNDC_WAVECONFIRM, 4); header.emit(out_stream); LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 7"); rdpsnd::WaveConfirmPDU wc(this->last_wTimeStamp, this->last_cBlockNo); wc.emit(out_stream); InStream chunk_to_send(out_stream.get_bytes()); LOG(LOG_INFO, "SERVER >> RDPEA: Wave PDU 8"); this->callback->send_to_mod_channel( channel_names::rdpsnd , chunk_to_send , out_stream.get_offset() , this->channel_flags ); if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "CLIENT >> RDPEA: Wave Confirm PDU"); } } } else { rdpsnd::RDPSNDPDUHeader header; header.receive(chunk); switch (header.msgType) { case rdpsnd::SNDC_FORMATS: { if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: Server Audio Formats and Version PDU"); } rdpsnd::ServerAudioFormatsandVersionHeader safsvh; safsvh.receive(chunk); StaticOutStream<1024> out_stream; rdpsnd::RDPSNDPDUHeader header_out(rdpsnd::SNDC_FORMATS, 38); header_out.emit(out_stream); rdpsnd::ClientAudioFormatsandVersionHeader cafvh( this->dwFlags , this->dwVolume , this->dwPitch , this->wDGramPort , this->wNumberOfFormats , this->wVersion ); cafvh.emit(out_stream); for (uint16_t i = 0; i < safsvh.wNumberOfFormats; i++) { rdpsnd::AudioFormat format; format.receive(chunk); // format.log(); if (format.wFormatTag == rdpsnd::WAVE_FORMAT_PCM) { format.emit(out_stream); if (this->impl_sound) { this->impl_sound->n_sample_per_sec = format.nSamplesPerSec; this->impl_sound->bit_per_sample = format.wBitsPerSample; this->impl_sound->n_channels = format.nChannels; this->impl_sound->n_block_align = format.nBlockAlign; this->impl_sound->bit_per_sec = format.nSamplesPerSec * (format.wBitsPerSample/8) * format.nChannels; } else { //LOG(LOG_WARNING, "No Sound System module found"); } } } InStream chunk_to_send(out_stream.get_bytes()); this->callback->send_to_mod_channel( channel_names::rdpsnd , chunk_to_send , out_stream.get_offset() , this->channel_flags ); if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "CLIENT >> RDPEA: Client Audio Formats and Version PDU"); } StaticOutStream<32> quality_stream; rdpsnd::RDPSNDPDUHeader header_quality(rdpsnd::SNDC_QUALITYMODE, 8); header_quality.emit(quality_stream); rdpsnd::QualityModePDU qm(rdpsnd::HIGH_QUALITY); qm.emit(quality_stream); InStream chunk_to_send2(quality_stream.get_bytes()); this->callback->send_to_mod_channel( channel_names::rdpsnd , chunk_to_send2 , quality_stream.get_offset() , this->channel_flags ); if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "CLIENT >> RDPEA: Quality Mode PDU"); } } break; case rdpsnd::SNDC_TRAINING: { if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: Training PDU"); } rdpsnd::TrainingPDU train; train.receive(chunk); StaticOutStream<32> out_stream; rdpsnd::RDPSNDPDUHeader header_quality(rdpsnd::SNDC_TRAINING, 4); header_quality.emit(out_stream); rdpsnd::TrainingConfirmPDU train_conf(train.wTimeStamp, train.wPackSize); train_conf.emit(out_stream); InStream chunk_to_send(out_stream.get_bytes()); this->callback->send_to_mod_channel( channel_names::rdpsnd , chunk_to_send , out_stream.get_offset() , this->channel_flags ); if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "CLIENT >> RDPEA: Training Confirm PDU"); } } break; case rdpsnd::SNDC_WAVE: { if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: Wave Info PDU"); } this->wave_data_to_wait = header.BodySize - 8; rdpsnd::WaveInfoPDU wi; wi.receive(chunk); this->last_cBlockNo = wi.cBlockNo; this->last_wTimeStamp = wi.wTimeStamp; if (this->impl_sound) { this->impl_sound->init(header.BodySize - 12); this->impl_sound->setData(wi.Data, 4); } this->last_PDU_is_WaveInfo = true; } break; case rdpsnd::SNDC_CLOSE: if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: Close PDU"); } break; case rdpsnd::SNDC_SETVOLUME: if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: SNDC_SETVOLUME PDU"); } { rdpsnd::VolumePDU v; v.receive(chunk); } break; case rdpsnd::SNDC_SETPITCH: if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: SNDC_SETPITCH PDU"); } { rdpsnd::PitchPDU p; p.receive(chunk); } break; // case rdpsnd::SNDC_CRYPTKEY: // LOG(LOG_INFO, "SERVER >> RDPEA: SNDC_CRYPTKEY PDU"); // break; // case rdpsnd::SNDC_WAVEENCRYPT: // LOG(LOG_INFO, "SERVER >> RDPEA: SNDC_WAVEENCRYPT PDU"); // break; case rdpsnd::SNDC_QUALITYMODE: if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: SNDC_QUALITYMODE PDU"); } { rdpsnd::QualityModePDU qm; qm.receive(chunk); } break; case rdpsnd::SNDC_WAVE2: if (bool(this->verbose & RDPVerbose::rdpsnd)) { LOG(LOG_INFO, "SERVER >> RDPEA: SNDC_WAVE2 PDU"); } { this->wave_data_to_wait = header.BodySize - 12; rdpsnd::Wave2PDU w2; w2.receive(chunk); if (this->impl_sound) { this->impl_sound->init(header.BodySize - 12); this->impl_sound->setData(chunk.get_current(), chunk.in_remain()); } this->last_PDU_is_WaveInfo = true; } break; default: LOG(LOG_WARNING, "SERVER >> RDPEA: Unknown message type: %x", header.msgType); break; } } }
int VHTI_check_dictionary_secrets (TrusteeRevealedDictionarySecrets trustee_dict_secrets, SignedTrusteeDictionaryCommitments trustee_dict_comm, GeneralPurposePublicKey tsig_pubkey, BlankBallot blank_ballot, CheckResults *check_dictionary_secrets_result) { int result = 0; // Assume success until told otherwise int check_res = 0; auto_BN pm(NULL); // The Election Modulus auto_BN qm(NULL); // The Election Subgroup Modulus auto_BN gen(NULL); // The Election Subgroup Generator auto_BN ePublicKey(NULL); // The Election Public Key // An OpenSSL structure that holds BIGNUM temporary variables used by // library functions auto_BN_CTX ctx(BN_CTX_new()); *check_dictionary_secrets_result = NULL; try { VH_nonzero (ctx, BN_CTX_NEW); VH_zero(::VHTI_validate(TRUSTEE_REVEALED_DICTIONARY_SECRETS, trustee_dict_secrets), VALIDATION_FALIURE); VH_zero(::VHTI_validate(SIGNED_TRUSTEE_DICTIONARY_COMMITMENTS, trustee_dict_comm), VALIDATION_FAILURE); VH_zero(::VHTI_validate(BLANK_BALLOT, blank_ballot), VALIDATION_FAILURE); // An empty xml tree to hold CheckResults VHUtil::xml_tree xml_res("<" CHECK_RESULTS "/>"); VHUtil::xml_node root_res = xml_res.root(); // An xml tree from the BlankBallot VHUtil::xml_tree_group_check xml_bb(blank_ballot, pm, qm, gen, ePublicKey); VHUtil::xml_node root_bb = xml_bb.root(); // An xml tree from the TrusteeDictionaryCommitments VHUtil::xml_tree_group_check xml_tdcs(trustee_dict_comm, TRUSTEE_DICTIONARY_COMMITMENTS, tsig_pubkey, pm, qm, gen, ePublicKey); VHUtil::xml_node root_tdcs = xml_tdcs.root(); // Make tree out of TrusteeRevealedDictionarySecrets VHUtil::xml_tree_group_check xml_tds(trustee_dict_secrets, pm, qm, gen, ePublicKey); VHUtil::xml_node root_tds = xml_tds.root(); // Check that the secrets and the commitments are from the same Trustee auto_BN aep_from_secrets = xml2BN(root_tds->e(AUTHORITY)->e(AUTHORITY_EVALUATION_POINT)); auto_BN aep_from_commitments = xml2BN(root_tdcs->e(AUTHORITY)->e(AUTHORITY_EVALUATION_POINT)); if (BN_cmp(aep_from_secrets, aep_from_commitments)) { // Non zero answer means they are not equal check_res = 1; root_res->add_characters("Secrets and Commitments not from the same Trustee"); } // Check that g^secret = commitment for each question for (int i=0; !check_res && i<root_tds->element_count(); i++) { if ((root_tds->e(i)->name() != BSN_REVEALED_DICTIONARY_SECRETS) || check_res) { continue; } auto_BN current_bsn = xml2BN(root_tds->e(i)->e(BALLOT_SEQUENCE_NUMBER)); // Find the corresponding BSNDictionaryCommitments in xml_tdcs for (int j=0; j<root_tdcs->element_count(); j++) { if ((root_tdcs->e(j)->name() != BSN_DICTIONARY_COMMITMENTS) || check_res) { continue; } auto_BN test_bsn = xml2BN(root_tdcs->e(j)->e(BALLOT_SEQUENCE_NUMBER)); if (BN_cmp(current_bsn, test_bsn)) { continue; } // These are the commitments we want. Just step through in order. for (int ii=0; ii<root_tdcs->e(j)->element_count(); ii++) { if ((root_tdcs->e(j)->e(ii)->name() != DICTIONARY_COMMITMENT) || check_res) { continue; } auto_BN comm_value = xml2BN(root_tdcs->e(j)->e(ii)); auto_BN secret_value = xml2BN(root_tds->e(i)->e(ii)); auto_BN g_exp; VHInternal::fixed_mod_exp(g_exp, gen, secret_value, pm, ctx); check_res = BN_cmp(g_exp, comm_value); if (0 == check_res) { continue; } root_res->add_characters("Revealed Dictionary Secrets Check Failure"); } } } if (check_res == 0) { root_res->add_characters("Revealed Dictionary Secrets Check Success"); } std::ostringstream oss; oss << xml_res; *check_dictionary_secrets_result = VHTI_dup(oss.str().c_str()); } catch (const VHUtil::Exception & e) { VHTI_set_last_error(e); result = e.getResultNo(); } return result; }