Beispiel #1
0
void OccView::mouseMoveEvent(QMouseEvent*)
{
    if (m_view->IfWindow()) {
        QPoint curPos = mapFromGlobal(QCursor::pos());
        if (curPos == m_prevPos) return;

        switch (m_mode) {
        case Selection:
            m_document->context()->MoveTo(curPos.x(), curPos.y(), m_view);
            if (m_document->context()->HasDetected()) {
                Handle(AIS_InteractiveObject) ais = m_document->context()->DetectedInteractive();
                Handle(TPrsStd_AISPresentation) prs = Handle(TPrsStd_AISPresentation)::DownCast(ais->GetOwner());
                if (m_detectedLabel != prs->Label())
                    unhilight(m_detectedLabel);
                m_detectedLabel = prs->Label();
            } else {
                unhilight(m_detectedLabel);
                m_detectedLabel.Nullify();
            }
            emit detected(m_detectedLabel);

            break;
        case Rotation:
            m_view->Rotation(curPos.x(), curPos.y());
            break;
        case Panning:
            m_view->Pan(curPos.x() - m_prevPos.x(), m_prevPos.y() - curPos.y());
            break;
        }

        m_prevPos = curPos;
    }
}
Beispiel #2
0
void check_last() {
  if (login == DET_IGNORE)
    return;

  if (conf.username) {
    char *out = NULL, buf[50] = "";

    simple_snprintf(buf, sizeof(buf), STR("last -10 %s"), conf.username);
    if (shell_exec(buf, NULL, &out, NULL)) {
      if (out) {
        char *p = NULL;

        p = strchr(out, '\n');
        if (p)
          *p = 0;
        if (strlen(out) > 10) {
          if (last_buf[0]) {
            if (strncmp(last_buf, out, sizeof(last_buf))) {
              char *work = NULL;
              size_t siz = strlen(out) + 7 + 2 + 1;

              work = (char *) my_calloc(1, siz);

              simple_snprintf(work, siz, STR("Login: %s"), out);
              detected(DETECT_LOGIN, work);
              free(work);
            }
          }
          strlcpy(last_buf, out, sizeof(last_buf));
        }
        free(out);
      }
    }
  }
}
/**
 * Process input data stream
 */
void DoubleClickFilter::process()
{
    // Store latest input (shift the buffer
    for( int k=0; k<mInputStream.rows; k++ ) {
        mInputStream.at< float >(k, 0) = mInputStream.at< float >(k+1, 0);
    }
    mInputStream.at< float >( mInputStream.rows-1, 0) = Input;

    // Find maximum cross correlation with a pattern
    double max = 0.0;
    for( int i=0; i<mPatterns.size(); i++ )
    {
        double x = xcorr( mPatterns.at(i), mInputStream );

        max = qMax( max, x );
    }
    XCorr = max;

    // If the pattern matches the input stream the xcorr is > Threshold,
    // in this case trigger the detected() signal.
    if( max >= Threshold )
    {
        emit detected();
        Detected = true;
    }
    else
    {
        Detected = false;
    }
}
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);

    MyWindow window;
    window.showMaximized();

    QThread thread;
    Detector *detector = new Detector();
    detector->moveToThread(&thread);
    detector->connect(&thread,SIGNAL(started()),SLOT(work()));


    QThread thread2;
    MyBluetooth *bt = new MyBluetooth();
    bt->moveToThread(&thread2);
    bt->connect(&thread2,SIGNAL(started()),SLOT(scanList()));
    bt->connect(&window,SIGNAL(updateScan()),SLOT(scanList()));
    bt->connect(detector,SIGNAL(detected()),SLOT(checkAlert()));

    //typedef std::list<Phone> listPhone;
    qRegisterMetaType<std::list<Phone*> >("List<Phone>");
    window.connect(bt,SIGNAL(finishedScan(std::list<Phone*>)),SLOT(setList(std::list<Phone*>)));
    window.connect(bt,SIGNAL(finishedCheck(std::list<Phone*>)),SLOT(checkAlert(std::list<Phone*>)));

    thread.start();
    thread2.start();

    return a.exec();
}
      void TestDetector::detectOneObject()
      {
        InternalMessage("Model","Model::TestDetector::detectOneObject entering") ;
        /*!
          We create a ship with a detector and a second object to detect.
        */
        std::auto_ptr<Kernel::Model> model(new Kernel::Model("TestDetector::detectOneObject")) ;
        model->init() ;

        Kernel::Object* system = model->createObject() ;

        Kernel::Object* ship = system->createObject() ;
        ship->addTrait(new Positioned()) ;
        ship->addTrait(new Oriented()) ;
        ship->addTrait(new Mobile()) ;
        ship->addTrait(new Solid(Mesh("test_ship.mesh"))) ;
        ship->addTrait(new Massive(Mass::Kilogram(1000))) ;
        ship->addTrait(new Computer()) ;
        ship->addTrait(new Detector()) ;
        Detector::connect(ship,ship) ;

        Kernel::Object* ship2 = system->createObject() ;
        ship2->addTrait(new Positioned(Position::Meter(0,0,500))) ;
        ship2->addTrait(new Massive(Mass::Kilogram(1000))) ;
        ship2->addTrait(new Oriented()) ;
        ship2->addTrait(new Mobile()) ;
        ship2->addTrait(new Solid(Mesh("test_ship.mesh"))) ;

        std::set<Kernel::Object*> detected(ship->getTrait<Computer>()->getDetectedObjects()) ;

        CPPUNIT_ASSERT(!detected.empty()) ;
        CPPUNIT_ASSERT(detected.find(ship2) != detected.end()) ;

        InternalMessage("Model","Model::TestDetector::detectOneObject leaving") ;
      }
void UdpDetectorDialog::startDetection() {
    list->clear();
    detect_button->setEnabled(false);

    detector = new UdpDetector;
    connect(detector, SIGNAL(detected(QString, QString)), this, SLOT(addServerAddress(QString, QString)));
    QTimer::singleShot(2000, this, SLOT(stopDetection()));

    detector->detect();
}
void UdpDetector::onReadReady() {
    while (socket->hasPendingDatagrams()) {
        QHostAddress from;
        QByteArray data;
        data.resize(socket->pendingDatagramSize());
        socket->readDatagram(data.data(), data.size(), &from);

        QString server_name = QString::fromUtf8(data);
        emit detected(server_name, from.toString());
    }
}
bool QTest2Recognizer::start()
{
    Q_EMIT test2();

    Q_EMIT detected("test2");

    Q_EMIT test3(true);
    active = true;

    return true;
}
Beispiel #9
0
void Objectness::evaluatePerImgRecall(const vector<vector<Vec4i> > &boxesTests, CStr &saveName, const int NUM_WIN)
{
    vecD recalls(NUM_WIN);
    vecD avgScore(NUM_WIN);
    const int TEST_NUM = _voc.testSet.size();
    for (int i = 0; i < TEST_NUM; i++) {
        const vector<Vec4i> &boxesGT = _voc.gtTestBoxes[i];
        const vector<Vec4i> &boxes = boxesTests[i];
        const int gtNumCrnt = boxesGT.size();
        vecI detected(gtNumCrnt);
        vecD score(gtNumCrnt);
        double sumDetected = 0, abo = 0;
        for (int j = 0; j < NUM_WIN; j++) {
            if (j >= (int)boxes.size()) {
                recalls[j] += sumDetected/gtNumCrnt;
                avgScore[j] += abo/gtNumCrnt;
                continue;
            }

            for (int k = 0; k < gtNumCrnt; k++)	{
                double s = DataSetVOC::interUnio(boxes[j], boxesGT[k]);
                score[k] = max(score[k], s);
                detected[k] = score[k] >= 0.5 ? 1 : 0;
            }
            sumDetected = 0, abo = 0;
            for (int k = 0; k < gtNumCrnt; k++)
                sumDetected += detected[k], abo += score[k];
            recalls[j] += sumDetected/gtNumCrnt;
            avgScore[j] += abo/gtNumCrnt;
        }
    }

    for (int i = 0; i < NUM_WIN; i++) {
        recalls[i] /=  TEST_NUM;
        avgScore[i] /= TEST_NUM;
    }

    int idx[8] = {1, 10, 100, 1000, 2000, 3000, 4000, 5000};
    for (int i = 0; i < 8; i++) {
        if (idx[i] > NUM_WIN)
            continue;
        printf("%d:%.3g,%.3g\t", idx[i], recalls[idx[i] - 1], avgScore[idx[i] - 1]);
    }
    printf("\n");

    FILE* f = fopen(_S(_voc.resDir + saveName), "w");
    CV_Assert(f != NULL);
    fprintf(f, "figure(1);\n\n");
    PrintVector(f, recalls, "DR");
    PrintVector(f, avgScore, "MABO");
    fprintf(f, "semilogx(1:%d, DR(1:%d));\nhold on;\nsemilogx(1:%d, DR(1:%d));\naxis([1, 5000, 0, 1]);\nhold off;\n", NUM_WIN, NUM_WIN, NUM_WIN, NUM_WIN);
    fclose(f);
}
double Illustrate::evaluatePerImgRecall(const vector<vector<Vec4i>> &boxesTests, const int NUM_WIN)
{
	cout << __FUNCTION__ << " called:(" << __LINE__ << ")" << endl;

	vecD recalls(NUM_WIN), avgScore(NUM_WIN);

	const int TEST_NUM = _voc.testSet.size();

	for (int i = 0; i < TEST_NUM; i++) {
		const vector<Vec4i> &boxesGT = _voc.gtTestBoxes[i];
		const vector<Vec4i> &boxes = boxesTests[i];
		const int gtNumCrnt = boxesGT.size();
		vecI detected(gtNumCrnt);
		vecD score(gtNumCrnt);
		double sumDetected = 0, abo = 0;
		for (int j = 0; j < NUM_WIN; j++) {
			if (j < (int)boxes.size()) {
				for (int k = 0; k < gtNumCrnt; k++) {
					double s = Common::interUnio(boxes[j], boxesGT[k]);
					score[k] = max(score[k], s);
					detected[k] = score[k] >= 0.5 ? 1 : 0;
				}
				sumDetected = 0, abo = 0;
				for (int k = 0; k < gtNumCrnt; k++) {
					sumDetected += detected[k];
					abo += score[k];
				}
			}
			recalls[j] += sumDetected / gtNumCrnt / TEST_NUM;
			avgScore[j] += abo / gtNumCrnt / TEST_NUM;
		}
	}

	__PrintVector(recalls, _resDir + "RECALLS.csv");
	__PrintVector(avgScore, _resDir + "aveScore.csv");

	cout << "---RESULT----------------------------------------------------------------------" << endl;
	int idx[18] = { 1, 10, 100, 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000, 15000, 20000, 30000, 40000, NUM_WIN };
	for (int i = 0; i < 18; i++) {
		if (idx[i] > NUM_WIN)
			continue;
		printf("PROPOSAL:%5d   ", idx[i]);
		printf("RECALLS :%6.5f   ", recalls[idx[i] - 1]);
		printf("AVESCORE:%6.5f\n", avgScore[idx[i] - 1]);
	}
	cout << "-------------------------------------------------------------------------------" << endl;
	return recalls[NUM_WIN - 1];
}
Beispiel #11
0
void MyObject::create()
{
//! [Receiving sensor gesture signals]

// Create a QSensorGestureManager
    QSensorGestureManager gestureManager;

// Get a list of known recognizers
    QStringList recognizersList = gestureManager.gestureIds();

// Create a QSensorGeture object for each of those gesture recognizers
        QSensorGesture *gesture = new QSensorGesture( gestureManager.gestureIds(), this);
// Connect the known signals up.
        connect(gesture, SIGNAL(detected(QString)), this, SLOT(gestureDetected(QString)));
//! [Receiving sensor gesture signals]
}
Beispiel #12
0
void check_promisc()
{
#ifdef SIOCGIFCONF
  if (promisc == DET_IGNORE)
    return;

  int sock = socket(AF_INET, SOCK_DGRAM, 0);

  if (sock < 0)
    return;

  struct ifconf ifcnf;
  char buf[1024] = "";

  ifcnf.ifc_len = sizeof(buf);
  ifcnf.ifc_buf = buf;
  if (ioctl(sock, SIOCGIFCONF, &ifcnf) < 0) {
    close(sock);
    return;
  }

  char *reqp = NULL, *end_req = NULL;

  reqp = buf;				/* pointer to start of array */
  end_req = buf + ifcnf.ifc_len;	/* pointer to end of array */
  while (reqp < end_req) { 
    struct ifreq ifreq, *ifr = NULL;

    ifr = (struct ifreq *) reqp;	/* start examining interface */
    ifreq = *ifr;
    if (!ioctl(sock, SIOCGIFFLAGS, &ifreq)) {	/* we can read this interface! */
      /* sdprintf("Examing interface: %s", ifr->ifr_name); */
      if (ifreq.ifr_flags & IFF_PROMISC) {
        char which[101] = "";

        simple_snprintf(which, sizeof(which), STR("Detected promiscuous mode on interface: %s"), ifr->ifr_name);
        ioctl(sock, SIOCSIFFLAGS, &ifreq);	/* set flags */
        detected(DETECT_PROMISC, which);
	break;
      }
    }
    /* move pointer to next array element (next interface) */
    reqp += sizeof(ifr->ifr_name) + sizeof(ifr->ifr_addr);
  }
  close(sock);
#endif /* SIOCGIFCONF */
}
Beispiel #13
0
int main( void )
{
    WDTCTL = WDTPW + WDTHOLD; // Disable watchdog timer
      
    P1DIR = 0b01000111; // IN:UltraEcho(P1.6) Colour1(P1.1),Colour2(P1.2),Colour3(P1.3) OUT:UltraTrig(P1.7)
    P2DIR = 0b00110110; // OUT:MotorOppCap(P2.1 and P2.2), MotorCap(P2.3 and P2.4)

    // Configure the Basic Clock Module
    DCOCTL  = CALDCO_1MHZ;
    BCSCTL1 = CALBC1_1MHZ;


    // Main loop repeats forever
    while(1)
    {
		if (detected() == 1)  // If ultrasonic detects within 77cm 
        {
            led(1); // Turn on the LED
            forward();  // Drive both motors forward
        }

        else    // If ultrasonic doesn't detect within 74cm  
        {
            led(0); // Turn off the LED
            turn_left(); // Begin Spinning to the left. 
        }

        if (colourFor() == 1)
        {
            backward(); // Drive both motors forward.
        }

        if (colourBkL() == 1)
        {
            turn_right(); // Spin the robot to the right.
        }

        if (colourBkR() == 1)
        {
            turn_left(); // Spin the robot to the left.
        }

        __delay_cycles(50000); // Delay for 50ms to allow echo pulse to die down
    }
} 
void FaceRecognitionFilterOpenCVImpl::process (cv::Mat &mat)
{
  if (this->running && this->activeAlgorithms.size() > 0) {
    std::time_t frameTime = std::time(nullptr);
    int satisfiedThreshold = 0;

    // empty previous results
    results.clear();

    this->p_face_training->get_face_recognition().predict(
            this->activeAlgorithms, mat, this->labels, this->confidences,
            this->targetWidth, this->targetHeight,
            this->minimumWidthFace, this->minimumHeightFace);

    for (size_t i = 0; i < this->activeAlgorithms.size(); i++) {
      OpenCVFaceRecognizer recognizer = this->activeAlgorithms[i];
      const string& algorithm = FaceRecognition::OpenCVFaceRecognizerToString.at(recognizer);
      int label = this->labels[i];
      double confidence = this->confidences[i];
      double threshold = this->confidenceThresholdsMap[recognizer];

      if (confidence >= 0.0 && confidence < threshold) {
        satisfiedThreshold++;
      }
      std::string labelString;
      std::ostringstream convert;
      convert << label;
      labelString = convert.str();

      auto confidencePair = make_shared<AlgorithmConfidencePair>(AlgorithmConfidencePair(algorithm, confidence));
      auto predictionResult = make_shared<AlgorithmPredictionResult>(AlgorithmPredictionResult(labelString, confidencePair));
      results.push_back(predictionResult);
    }

    if (satisfiedThreshold > 0) {
      FaceDetected detected(getSharedPtr(), FaceDetected::getName(), results, (int) frameTime);
      // stop detecting once recognized
      this->running = false;
      signalFaceDetected(detected);
    }
  }
}
Beispiel #15
0
void QShakeSensorGestureRecognizer::accelChanged()
{
    qreal x = accel->reading()->x();
    qreal y = accel->reading()->y();
    qreal z = accel->reading()->z();

    currentData.x = x;
    currentData.y = y;
    currentData.z = z;

    if (qAbs(prevData.x - currentData.x)  < 1
            && qAbs(prevData.y - currentData.y)  < 1
            && qAbs(prevData.z - currentData.z)  < 1) {
        prevData.x = currentData.x;
        prevData.y = currentData.y;
        prevData.z = currentData.z;
        return;
    }

    bool wasShake = checkForShake(prevData, currentData, THRESHOLD);
    if (!shaking && wasShake &&
        shakeCount >= NUMBER_SHAKES) {
        shaking = true;
        shakeCount = 0;
        Q_EMIT shake();
        Q_EMIT detected("shake");

    } else if (wasShake) {

        shakeCount++;
        if (shakeCount > NUMBER_SHAKES) {
            timer->start();
        }
    }

    prevData.x = currentData.x;
    prevData.y = currentData.y;
    prevData.z = currentData.z;
}
Beispiel #16
0
void QrScanner::process(const QString& data) {
	if(m_bClosing)
		return;
	QUrl url(data);
	logToConsole(QString("Processing: %1").arg(data));
	logToConsole(QString("Scheme: %1").arg(url.scheme()));
	if(!QString("otpauth").compare(url.scheme(), Qt::CaseInsensitive)){
		bool hotp = false;
		int digits = 6;
		QString path = QUrl::fromPercentEncoding(url.path().mid(1).toAscii());
		QString secret = url.queryItemValue("secret");
		if(QString("totp").compare(url.host(), Qt::CaseInsensitive)){
			hotp = true;
		}
		if(url.hasQueryItem("digits")){
			digits = url.queryItemValue("digits").toInt();
		}
		Q_EMIT detected(path, secret, digits, hotp);
		logToConsole(data);
		logToConsole(QString("Path: %1, Secret: %2, Digit: %3, HOTP: %4").arg(path).arg(secret).arg(digits).arg(hotp));
	}
}
Beispiel #17
0
int main()
{
	// read the image
	cv::Mat image= cv::imread(IMAGE_FOLDER "/boldt.jpg");
	if (!image.data)
		return 0; 

	// show original image
	cv::namedWindow("Original image");
	cv::imshow("Original image",image);

	// convert into HSV space
	cv::Mat hsv;
	cv::cvtColor(image, hsv, CV_BGR2HSV);

	// split the 3 channels into 3 images
	std::vector<cv::Mat> channels;
	cv::split(hsv,channels);
	// channels[0] is the Hue
	// channels[1] is the Saturation
	// channels[2] is the Value

	// display value
	cv::namedWindow("Value");
	cv::imshow("Value",channels[2]);

	// display saturation
	cv::namedWindow("Saturation");
	cv::imshow("Saturation",channels[1]);

	// display hue
	cv::namedWindow("Hue");
	cv::imshow("Hue",channels[0]);

	// image with fixed value
	cv::Mat newImage;
	cv::Mat tmp(channels[2].clone());
	// Value channel will be 255 for all pixels
	channels[2]= 255;  
	// merge back the channels
	cv::merge(channels,hsv);
	// re-convert to BGR
	cv::cvtColor(hsv,newImage,CV_HSV2BGR);

	cv::namedWindow("Fixed Value Image");
	cv::imshow("Fixed Value Image",newImage);

	// image with fixed saturation
	channels[1]= 255;
	channels[2]= tmp;
	cv::merge(channels,hsv);
	cv::cvtColor(hsv,newImage,CV_HSV2BGR);

	cv::namedWindow("Fixed saturation");
	cv::imshow("Fixed saturation",newImage);

	// image with fixed value and fixed saturation
	channels[1]= 255;
	channels[2]= 255;
	cv::merge(channels,hsv);
	cv::cvtColor(hsv,newImage,CV_HSV2BGR);

	cv::namedWindow("Fixed saturation/value");
	cv::imshow("Fixed saturation/value",newImage);

	// Testing skin detection

	// read the image
	image= cv::imread(IMAGE_FOLDER "/girl.jpg");
	if (!image.data)
		return 0; 

	// show original image
	cv::namedWindow("Original image");
	cv::imshow("Original image",image);

	// detect skin tone
	cv::Mat mask;
	detectHScolor(image, 
		160, 10, // hue from 320 degrees to 20 degrees 
		25, 166, // saturation from ~0.1 to 0.65
		mask);

	// show masked image
	cv::Mat detected(image.size(), CV_8UC3, cv::Scalar(0, 0, 0));
	image.copyTo(detected, mask);
	cv::imshow("Detection result",detected);

	// A test comparing luminance and brightness

	// create linear intensity image
	cv::Mat linear(100,256,CV_8U);
	for (int i=0; i<256; i++) {

		linear.col(i)= i;
	}

	// create a Lab image
	linear.copyTo(channels[0]);
	cv::Mat constante(100,256,CV_8U,cv::Scalar(128));
	constante.copyTo(channels[1]);
	constante.copyTo(channels[2]);
	cv::merge(channels,image);

	// convert back to BGR
	cv::Mat brightness;
	cv::cvtColor(image,brightness, CV_Lab2BGR);
	cv::split(brightness, channels);

	// create combined image
	cv::Mat combined(200,256, CV_8U);
	cv::Mat half1(combined,cv::Rect(0,0,256,100));
	linear.copyTo(half1);
	cv::Mat half2(combined,cv::Rect(0,100,256,100));
	channels[0].copyTo(half2);

	cv::namedWindow("Luminance vs Brightness");
	cv::imshow("Luminance vs Brightness",combined);

	cv::waitKey();
}
Beispiel #18
0
static void got_cont(int z)
{
  detected(DETECT_HIJACK, "POSSIBLE HIJACK DETECTED (!! MAY BE BOX REBOOT !!)");
}
void tst_Sensors2QMLAPI::testGesture()
{
    QTemplateGesturePlugin* plugin = new QTemplateGesturePlugin();
    QList <QSensorGestureRecognizer *> recognizers = plugin->createRecognizers();
    QSensorGestureManager manager;

    QmlSensorGesture* gs = new QmlSensorGesture(this);
    gs->componentComplete();
    QSignalSpy spy_availableGesturesChanged(gs, SIGNAL(availableGesturesChanged()));
    QSignalSpy spy_detected(gs, SIGNAL(detected(QString)));
    QSignalSpy spy_gesturesChanged(gs, SIGNAL(gesturesChanged()));
    QSignalSpy spy_validGesturesChanged(gs, SIGNAL(validGesturesChanged()));
    QSignalSpy spy_invalidGesturesChanged(gs, SIGNAL(invalidGesturesChanged()));
    QSignalSpy spy_enabledChanged(gs, SIGNAL(enabledChanged()));

    //This flag is needed if you run this unit test with an alread installed template plugin
    bool registered = false;
    for (int i = 0; i < recognizers.count(); i++){
        registered = manager.registerSensorGestureRecognizer(recognizers[i]);
    }
    if (registered) {
        QCOMPARE(spy_availableGesturesChanged.count(), 2);
    }

    //check creation of a not known plugin
    QCOMPARE(spy_invalidGesturesChanged.count(), 0);
    QCOMPARE(spy_gesturesChanged.count(), 0);
    gs->setGestures(QStringList() << "lollipop");
    QCOMPARE(spy_gesturesChanged.count(), 1);
    QCOMPARE(spy_invalidGesturesChanged.count(), 1);

    //check creation of a known plugin
    QCOMPARE(spy_validGesturesChanged.count(), 0);
    QCOMPARE(spy_gesturesChanged.count(), 1);
    spy_invalidGesturesChanged.clear();
    spy_validGesturesChanged.clear();
    gs->setGestures(QStringList() << "QtSensors.template");
    QCOMPARE(spy_gesturesChanged.count(), 2);
    QCOMPARE(spy_invalidGesturesChanged.count(), 1);
    QCOMPARE(spy_validGesturesChanged.count(), 1);

    //enable "QtSensors.template"
    QCOMPARE(spy_enabledChanged.count(), 0);
    QCOMPARE(spy_detected.count(), 0);
    gs->setEnabled(true);
    QCOMPARE(spy_enabledChanged.count(), 1);
    QCOMPARE(spy_detected.count(), 1);

    //set gesture during running sensor should not emit gesture changed
    spy_gesturesChanged.clear();
    gs->setGestures(QStringList() << "QtSensors.template2");
    QCOMPARE(spy_gesturesChanged.count(), 0);

    gs->setEnabled(false);

    QmlSensorGesture* gs1 = new QmlSensorGesture(this);
    QSignalSpy spy1_detected(gs1, SIGNAL(detected(QString)));
    QSignalSpy spy1_gesturesChanged(gs1, SIGNAL(gesturesChanged()));
    QSignalSpy spy1_validGesturesChanged(gs1, SIGNAL(validGesturesChanged()));
    QSignalSpy spy1_invalidGesturesChanged(gs1, SIGNAL(invalidGesturesChanged()));
    QSignalSpy spy1_enabledChanged(gs1, SIGNAL(enabledChanged()));
    gs1->componentComplete();

    //set enable = true without gesture should
    gs1->setEnabled(true);
    QCOMPARE(spy1_enabledChanged.count(), 1);
    gs1->setEnabled(false);
    spy1_enabledChanged.clear();

    //reding gestures check if we get back an empty string list
    QStringList gestures = gs1->gestures();
    QCOMPARE(gestures.count(), 0);
    QStringList validgestures = gs1->validGestures();
    QCOMPARE(validgestures.count(), 0);
    QStringList invalidgestures = gs1->invalidGestures();
    QCOMPARE(invalidgestures.count(), 0);

    //check types "QtSensors.template" "QtSensors.template1" "lollipop"
    //expect valid 2 not available 1
    gestures << "QtSensors.template" << "QtSensors.template1" << "lollipop";
    gs1->setGestures(gestures);
    gestures = gs1->gestures();
    QCOMPARE(gestures.count(), 3);
    QCOMPARE(spy1_validGesturesChanged.count(), 1);
    QCOMPARE(spy1_invalidGesturesChanged.count(), 1);
    QCOMPARE(spy1_gesturesChanged.count(), 1);
    //set same gesture again should not emit gesture changed
    gs1->setGestures(gestures);
    QCOMPARE(spy1_gesturesChanged.count(), 1);

    spy1_gesturesChanged.clear();
    gestures.clear();
    gs1->setGestures(gestures);
    QCOMPARE(spy1_gesturesChanged.count(), 1);

    //enable "QtSensors.template" and "QtSensors.template1"
    gestures << "QtSensors.template" << "QtSensors.template1";
    gs1->setEnabled(false);
    gs1->setGestures(gestures);
    spy1_enabledChanged.clear();
    spy1_detected.clear();
    gs1->setEnabled(true);
    QCOMPARE(spy1_enabledChanged.count(), 1);
    QCOMPARE(spy1_detected.count(), 2);
    gs1->setEnabled(false);

    //check sensor shouldn't run until the componentComplete gets called
    QmlSensorGesture* gs2 = new QmlSensorGesture(this);
    QSignalSpy spy2_detected(gs2, SIGNAL(detected(QString)));
    gs2->setGestures(QStringList() << "QtSensors.template");
    gs2->setEnabled(true);
    QCOMPARE(spy2_detected.count(), 0);
    gs2->componentComplete();
    QCOMPARE(spy2_detected.count(), 1);
}
Beispiel #20
0
static PyObject* PyBobIpFlandmark_locate(PyBobIpFlandmarkObject* self,  PyObject *args, PyObject* kwds) {
BOB_TRY
  char** kwlist = s_locate.kwlist();

  PyBlitzArrayObject* image;
  int bbx[4];

  if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&iiii", kwlist,  &PyBlitzArray_Converter, &image, &bbx[0], &bbx[1], &bbx[2], &bbx[3])) return 0;

  // create bounding box in format (top, left, bottom, right)
  bbx[2] += bbx[0] - 1;
  bbx[3] += bbx[1] - 1;

  auto image_ = make_safe(image);

  // check
  if (image->type_num != NPY_UINT8 || image->ndim != 2) {
    PyErr_Format(PyExc_TypeError, "`%s' input `image' data must be a 2D array with dtype `uint8' (i.e. a gray-scaled image), but you passed a %" PY_FORMAT_SIZE_T "d array with data type `%s'", Py_TYPE(self)->tp_name, image->ndim, PyBlitzArray_TypenumAsString(image->type_num));
    return 0;
  }

  // detect
  std::vector<double> detected(2*self->flandmark->data.options.M);
  bob::ip::flandmark::flandmark_detect(*PyBlitzArrayCxx_AsBlitz<uint8_t, 2>(image), bbx, self->flandmark, &detected[0]);

  // extract landmarks
  blitz::Array<double, 2> landmarks(self->flandmark->data.options.M, 2);
  for (int k = 0; k < self->flandmark->data.options.M; ++k){
    landmarks(k,0) = detected[2*k];
    landmarks(k,1) = detected[2*k+1];
  }

  return PyBlitzArrayCxx_AsNumpy(landmarks);
BOB_CATCH_MEMBER("locate", 0)
};

static PyMethodDef PyBobIpFlandmark_methods[] = {
  {
    s_locate.name(),
    (PyCFunction)PyBobIpFlandmark_locate,
    METH_VARARGS|METH_KEYWORDS,
    s_locate.doc()
  },
  {0} /* Sentinel */
};

PyObject* PyBobIpFlandmark_Repr(PyBobIpFlandmarkObject* self) {

  /**
   * Expected output:
   *
   * <bob.ip.flandmark(model='...')>
   */

  PyObject* retval = PyUnicode_FromFormat("<%s(model='%s')>",  Py_TYPE(self)->tp_name, self->filename);

#if PYTHON_VERSION_HEX < 0x03000000
  if (!retval) return 0;
  PyObject* tmp = PyObject_Str(retval);
  Py_DECREF(retval);
  retval = tmp;
#endif

  return retval;

}
Beispiel #21
0
void QTemplateGestureRecognizer::timeout()
{
    Q_EMIT detected(id());
}
Beispiel #22
0
bool QTemplateGestureRecognizer::start()
{
    Q_EMIT detected(id());
    _timer.start();
    return _timer.isActive();
}
Beispiel #23
0
float BoardDetector::detect(const vector<Marker> &detectedMarkers,const  BoardConfiguration &BConf, Board &Bdetected, Mat camMatrix,Mat distCoeff,float markerSizeMeters)throw (cv::Exception)
{
// cout<<"markerSizeMeters="<<markerSizeMeters<<endl;
    ///find among detected markers these that belong to the board configuration
    Mat detected(BConf._markersId.size(),CV_32SC1); //stores the indices of the makers
    detected.setTo(Scalar(-1));//-1 mean not detected
    int nMarkInBoard=0;//total number of markers detected
    for (unsigned int i=0;i<detectedMarkers.size();i++) {
        bool found=false;
        int id=detectedMarkers[i].id;
        //find it
        for (  int j=0;j<detected.size().height && ! found;j++)
            for (  int k=0;k<detected.size().width && ! found;k++)
                if ( BConf._markersId.at<int>(j,k)==id) {
                    detected.at<int>(j,k)=i;
                    nMarkInBoard++;
                    found=true;
                    Bdetected.push_back(detectedMarkers[i]);
                    if (markerSizeMeters>0)
                        Bdetected.back().ssize=markerSizeMeters;
                }
    }
    Bdetected.conf=BConf;
    if (markerSizeMeters!=-1)
        Bdetected.markerSizeMeters=markerSizeMeters;
//calculate extrinsic if there is information for that
    if (camMatrix.rows!=0 && markerSizeMeters>0 && detectedMarkers.size()>1) {
        // now, create the matrices for finding the extrinsics
        Mat objPoints(4*nMarkInBoard,3,CV_32FC1);
        Mat imagePoints(4*nMarkInBoard,2,CV_32FC1);
        //size in meters of inter-marker distance
        double markerDistanceMeters= double(BConf._markerDistancePix) * markerSizeMeters / double(BConf._markerSizePix);



        int currIndex=0;
        for (  int y=0;y<detected.size().height;y++)
            for (  int x=0;x<detected.size().width;x++) {
                if (  detected.at<int>(y,x)!=-1 ) {

                    vector<Point2f> points =detectedMarkers[ detected.at<int>(y,x) ];
                    //set first image points
                    for (int p=0;p<4;p++) {
                        imagePoints.at<float>(currIndex+p,0)=points[p].x;
                        imagePoints.at<float>(currIndex+p,1)=points[p].y;
                    }

                    //tranaltion to make the Ref System be in center
                    float TX=-(  ((detected.size().height-1)*(markerDistanceMeters+markerSizeMeters) +markerSizeMeters) /2) ;
                    float TY=-(  ((detected.size().width-1)*(markerDistanceMeters+markerSizeMeters) +markerSizeMeters)/2);
                    //points in real refernce system. We se the center in the bottom-left corner
                    float AY=x*(markerDistanceMeters+markerSizeMeters ) +TY;
                    float AX=y*(markerDistanceMeters+markerSizeMeters )+TX;
                    objPoints.at<float>( currIndex,0)= AX;
                    objPoints.at<float>( currIndex,1)= AY;
                    objPoints.at<float>( currIndex,2)= 0;
                    objPoints.at<float>( currIndex+1,0)= AX;
                    objPoints.at<float>( currIndex+1,1)= AY+markerSizeMeters;
                    objPoints.at<float>( currIndex+1,2)= 0;
                    objPoints.at<float>( currIndex+2,0)= AX+markerSizeMeters;
                    objPoints.at<float>( currIndex+2,1)= AY+markerSizeMeters;
                    objPoints.at<float>( currIndex+2,2)= 0;
                    objPoints.at<float>( currIndex+3,0)= AX+markerSizeMeters;
                    objPoints.at<float>( currIndex+3,1)= AY;
                    objPoints.at<float>( currIndex+3,2)= 0;
                    currIndex+=4;
                }
            }

        CvMat cvCamMatrix=camMatrix;
        CvMat cvDistCoeffs;
        Mat zeros=Mat::zeros(4,1,CV_32FC1);
        if (distCoeff.rows>=4)  cvDistCoeffs=distCoeff;
        else  cvDistCoeffs=zeros;
        CvMat cvImgPoints=imagePoints;
        CvMat cvObjPoints=objPoints;

        CvMat cvRvec=Bdetected.Rvec;
        CvMat cvTvec=Bdetected.Tvec;
        cvFindExtrinsicCameraParams2(&cvObjPoints, &cvImgPoints, &cvCamMatrix, &cvDistCoeffs,&cvRvec,&cvTvec);
        //now, rotate 90 deg in X so that Y axis points up
        rotateXAxis(Bdetected.Rvec);
        //cout<<Bdetected.Rvec.at<float>(0,0)<<" "<<Bdetected.Rvec.at<float>(1,0)<<Bdetected.Rvec.at<float>(2,0)<<endl;
        //cout<<Bdetected.Tvec.at<float>(0,0)<<" "<<Bdetected.Tvec.at<float>(1,0)<<Bdetected.Tvec.at<float>(2,0)<<endl;
    }
    return double(nMarkInBoard)/double( BConf._markersId.size().width*BConf._markersId.size().height);
}
Beispiel #24
0
void check_trace(int start)
{
  if (trace == DET_IGNORE || trace == DET_WARN)
    trace = DET_DIE;
//    return;

#ifdef DEBUG
  trace = DET_IGNORE;
#endif /* DEBUG */

  if (trace == DET_IGNORE)
    return;

  pid_t parent = getpid();

  /* we send ourselves a SIGTRAP, if we recieve, we're not being traced, otherwise we are. */
  signal(SIGTRAP, got_sigtrap);
  traced = 1;
  raise(SIGTRAP);
  /* no longer need this__asm__("INT3"); //SIGTRAP */
  signal(SIGTRAP, SIG_DFL);

  if (!traced) {
    signal(SIGINT, got_sigtrap);
    traced = 1;
    raise(SIGINT);
    signal(SIGINT, SIG_DFL);
  }

  if (traced) {
    if (start) {
      kill(parent, SIGKILL);
      exit(1);
    } else
      detected(DETECT_TRACE, STR("I'm being traced!"));
  } else {
    if (!start)
      return;

#ifndef __sun__
    int x, i;

  /* now, let's attempt to ptrace ourself */
    switch ((x = fork())) {
      case -1:
        return;
      case 0:		//child
        i = ptrace(PT_ATTACH, parent, 0, 0);
        /* EPERM is given on fbsd when security.bsd.unprivileged_proc_debug=0 */
        if (i == -1 && errno != EPERM) {
          if (start) {
            kill(parent, SIGKILL);
            exit(1);
          } else
            detected(DETECT_TRACE, STR("I'm being traced!"));
        } else {
          waitpid(parent, NULL, 0);
          ptrace(PT_DETACH, parent, (char *) 1, 0);
          kill(parent, SIGCHLD);
        }
        exit(0);
      default:		//parent
        waitpid(x, NULL, 0);
    }
#endif
  }
}
Beispiel #25
0
void check_processes()
{
  if (badprocess == DET_IGNORE)
    return;

  char *proclist = NULL, *out = NULL, *p = NULL, *np = NULL, *curp = NULL, buf[1024] = "", bin[128] = "";

  proclist = process_list[0] ? process_list : NULL;

  if (!proclist)
    return;

  if (!shell_exec("ps x", NULL, &out, NULL))
    return;

  /* Get this binary's filename */
  strlcpy(buf, shell_escape(binname), sizeof(buf));
  p = strrchr(buf, '/');
  if (p) {
    p++;
    strlcpy(bin, p, sizeof(bin));
  } else {
    bin[0] = 0;
  }
  /* Fix up the "permitted processes" list */
  p = (char *) my_calloc(1, strlen(proclist) + strlen(bin) + 6);
  strcpy(p, proclist);
  strcat(p, " ");
  strcat(p, bin);
  strcat(p, " ");
  proclist = p;
  curp = out;
  while (curp) {
    np = strchr(curp, '\n');
    if (np)
      *np++ = 0;
    if (atoi(curp) > 0) {
      char *pid = NULL, *tty = NULL, *mystat = NULL, *mytime = NULL, cmd[512] = "", line[2048] = "";

      strlcpy(line, curp, sizeof(line));
      /* it's a process line */
      /* Assuming format: pid tty stat time cmd */
      pid = newsplit(&curp);
      tty = newsplit(&curp);
      mystat = newsplit(&curp);
      mytime = newsplit(&curp);
      strlcpy(cmd, curp, sizeof(cmd));
      /* skip any <defunct> procs "/bin/sh -c" crontab stuff and binname crontab stuff */
      if (!strstr(cmd, "<defunct>") && !strncmp(cmd, "/bin/sh -c", 10) && 
          !strncmp(cmd, shell_escape(binname), strlen(shell_escape(binname)))) {
        /* get rid of any args */
        if ((p = strchr(cmd, ' ')))
          *p = 0;
        /* remove [] or () */
        if (strlen(cmd)) {
          p = cmd + strlen(cmd) - 1;
          if (((cmd[0] == '(') && (*p == ')')) || ((cmd[0] == '[') && (*p == ']'))) {
            *p = 0;
            strcpy(buf, cmd + 1);
            strcpy(cmd, buf);
          }
        }

        /* remove path */
        if ((p = strrchr(cmd, '/'))) {
          p++;
          strcpy(buf, p);
          strcpy(cmd, buf);
        }

        /* skip "ps" */
        if (strcmp(cmd, "ps")) {
          /* see if proc's in permitted list */
          strcat(cmd, " ");
          if ((p = strstr(proclist, cmd))) {
            /* Remove from permitted list */
            while (*p != ' ')
              *p++ = 1;
          } else {
            char *work = NULL;
            size_t size = 0;

            size = strlen(line) + 22;
            work = (char *) my_calloc(1, size);
            simple_snprintf(work, size, "Unexpected process: %s", line);
            detected(DETECT_PROCESS, work);
            free(work);
          }
        }
      }
    }
    curp = np;
  }
  free(proclist);
  if (out)
    free(out);
}