void Tst_qsensorgesturePluginsTest::tst_sensor_plugins_qtsensors_all() { QStringList list; list << "QtSensors.cover"; list << "QtSensors.doubletap"; list << "QtSensors.hover"; list << "QtSensors.pickup"; list << "QtSensors.shake2"; list << "QtSensors.slam"; list << "QtSensors.turnover"; list << "QtSensors.twist"; list << "QtSensors.whip"; QSensorGestureManager manager; QScopedPointer<QSensorGesture> gesture(new QSensorGesture(list)); QVERIFY(gesture->invalidIds().count() == 0); QVERIFY(gesture->validIds().count() == 9); QVERIFY(gesture->gestureSignals().count() == 14); gesture->startDetection(); QVERIFY(gesture->isActive()); QSensorGestureRecognizer *recognizer = manager.sensorGestureRecognizer("QtSensors.turnover"); recognizer->stopBackend(); QVERIFY(!recognizer->isActive()); QVERIFY(gesture->isActive()); }
void Tst_qsensorgesturePluginsTest::tst_sensor_plugins_shake() { QSensorGestureManager manager; QVERIFY(manager.gestureIds().contains("QtSensors.shake")); QSensorGestureRecognizer *recognizer = manager.sensorGestureRecognizer("QtSensors.shake"); QCOMPARE(recognizer->isActive(), false); QTest::ignoreMessage(QtWarningMsg, "Not starting. Gesture Recognizer not initialized "); recognizer->startBackend(); QCOMPARE(recognizer->isActive(), false); QTest::ignoreMessage(QtWarningMsg, "Not stopping. Gesture Recognizer not initialized "); recognizer->stopBackend(); QCOMPARE(recognizer->isActive(), false); QScopedPointer<QSensorGesture> gesture(new QSensorGesture(QStringList() << "QtSensors.shake")); QCOMPARE(gesture->isActive(),false); QCOMPARE(gesture->validIds(), QStringList() << "QtSensors.shake"); QCOMPARE(QStringList() << recognizer->id(), gesture->validIds()); gesture->startDetection(); QCOMPARE(gesture->isActive(),true); QCOMPARE(recognizer->isActive(), true); gesture->stopDetection(); QCOMPARE(recognizer->isActive(), false); }
TEST_F(HTMLVideoElementTest, setBufferingStrategy_UserPause) { setSrc("http://foo.bar/"); MockWebMediaPlayer* player = webMediaPlayer(); ASSERT_TRUE(player); // On play, the strategy is set to normal. EXPECT_CALL(*player, setBufferingStrategy(WebMediaPlayer::BufferingStrategy::Normal)); m_video->play(); ::testing::Mock::VerifyAndClearExpectations(player); // On a user pause, the strategy is set to aggressive. EXPECT_CALL(*player, setBufferingStrategy( WebMediaPlayer::BufferingStrategy::Aggressive)); { UserGestureIndicator gesture( DocumentUserGestureToken::create(&m_video->document())); m_video->pause(); } ::testing::Mock::VerifyAndClearExpectations(player); // On play, the strategy is set to normal. EXPECT_CALL(*player, setBufferingStrategy(WebMediaPlayer::BufferingStrategy::Normal)); m_video->play(); ::testing::Mock::VerifyAndClearExpectations(player); }
void XN_CALLBACK_TYPE GestureProgress_callback(xn::GestureGenerator &generator, const XnChar *strGesture, const XnPoint3D *pPosition, XnFloat fProgress, void *pCookie) { std::string gesture (strGesture); BP::object& func = ((BP::object*)pCookie)[1]; //Call the function func(generator, gesture, convertVec3D(*pPosition), fProgress); }
/** Internal callback implementations **/ void XN_CALLBACK_TYPE GestureRecognized_callback(xn::GestureGenerator &generator, const XnChar *strGesture, const XnPoint3D *pIDPosition, const XnPoint3D *pEndPosition, void *pCookie) { std::string gesture (strGesture); BP::object& func = ((BP::object*)pCookie)[0]; //Call the function func(generator, gesture, convertVec3D(*pIDPosition), convertVec3D(*pEndPosition)); }
int QDeclarativeGestureArea::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QDeclarativeItem::qt_metacall(_c, _id, _a); if (_id < 0) return _id; #ifndef QT_NO_PROPERTIES if (_c == QMetaObject::ReadProperty) { void *_v = _a[0]; switch (_id) { case 0: *reinterpret_cast< QGesture**>(_v) = gesture(); break; } _id -= 1; } else if (_c == QMetaObject::WriteProperty) { _id -= 1; } else if (_c == QMetaObject::ResetProperty) { _id -= 1; } else if (_c == QMetaObject::QueryPropertyDesignable) { _id -= 1; } else if (_c == QMetaObject::QueryPropertyScriptable) { _id -= 1; } else if (_c == QMetaObject::QueryPropertyStored) { _id -= 1; } else if (_c == QMetaObject::QueryPropertyEditable) { _id -= 1; } else if (_c == QMetaObject::QueryPropertyUser) { _id -= 1; } #endif // QT_NO_PROPERTIES return _id; }
void QtWebPageEventHandler::handleSingleTapEvent(const QTouchEvent::TouchPoint& point) { m_postponeTextInputStateChanged = true; QTransform fromItemTransform = m_webPage->transformFromItem(); WebGestureEvent gesture(WebEvent::GestureSingleTap, fromItemTransform.map(point.pos()).toPoint(), point.screenPos().toPoint(), WebEvent::Modifiers(0), 0, IntSize(point.rect().size().toSize()), FloatPoint(0, 0)); m_webPageProxy->handleGestureEvent(gesture); }
void QtWebPageEventHandler::handleSingleTapEvent(const QTouchEvent::TouchPoint& point) { deactivateTapHighlight(); m_postponeTextInputStateChanged = true; QTransform fromItemTransform = m_webPage->transformFromItem(); NativeWebGestureEvent gesture(point, WebEvent::GestureSingleTap, fromItemTransform); m_webPageProxy->handleGestureEvent(gesture); }
static int MovedEvent( vlc_object_t *p_this, char const *psz_var, vlc_value_t oldval, vlc_value_t newval, void *p_data ) { intf_thread_t *p_intf = (intf_thread_t *)p_data; intf_sys_t *p_sys = p_intf->p_sys; (void) p_this; (void) psz_var; (void) oldval; vlc_mutex_lock( &p_sys->lock ); if( p_sys->b_button_pressed ) { int i_horizontal = newval.coords.x - p_sys->i_last_x; int i_vertical = newval.coords.y - p_sys->i_last_y; int pattern = 0; i_horizontal = i_horizontal / p_sys->i_threshold; i_vertical = i_vertical / p_sys->i_threshold; if( i_horizontal < 0 ) { msg_Dbg( p_intf, "left gesture (%d)", i_horizontal ); pattern = LEFT; } else if( i_horizontal > 0 ) { msg_Dbg( p_intf, "right gesture (%d)", i_horizontal ); pattern = RIGHT; } if( i_vertical < 0 ) { msg_Dbg( p_intf, "up gesture (%d)", i_vertical ); pattern = UP; } else if( i_vertical > 0 ) { msg_Dbg( p_intf, "down gesture (%d)", i_vertical ); pattern = DOWN; } if( pattern ) { p_sys->i_last_x = newval.coords.x; p_sys->i_last_y = newval.coords.y; if( gesture( p_sys->i_pattern, p_sys->i_num_gestures - 1 ) != pattern ) { p_sys->i_pattern |= pattern << ( p_sys->i_num_gestures * 4 ); p_sys->i_num_gestures++; } } } vlc_mutex_unlock( &p_sys->lock ); return VLC_SUCCESS; }
void Tst_qsensorgesturePluginsTest::tst_sensor_plugins_qtsensors() { QFETCH(QString, gestureId); QSensorGestureManager manager; QVERIFY(manager.gestureIds().contains(gestureId)); QScopedPointer<QSensorGesture> gesture(new QSensorGesture(QStringList() << gestureId)); QScopedPointer<QSensorGesture> gesture2(new QSensorGesture(QStringList() << gestureId)); QCOMPARE(gesture->isActive(),false); QCOMPARE(gesture->validIds(), QStringList() << gestureId); QSensorGestureRecognizer *recognizer = manager.sensorGestureRecognizer(gestureId); QCOMPARE(QStringList() << recognizer->id(), gesture->validIds()); QCOMPARE(QStringList() << QStringList(), gesture->invalidIds()); QVERIFY(recognizer->isActive() == false); QVERIFY(gesture->isActive() == false); QVERIFY(gesture2->isActive() == false); gesture->startDetection(); QVERIFY(gesture->isActive() == true); QVERIFY(gesture2->isActive() == false); QVERIFY(recognizer->isActive() == true); gesture2->startDetection(); QVERIFY(gesture->isActive() == true); QVERIFY(gesture2->isActive() == true); QVERIFY(recognizer->isActive() == true); gesture2->stopDetection(); QVERIFY(gesture->isActive() == true); QVERIFY(gesture2->isActive() == false); QVERIFY(recognizer->isActive() == true); gesture->stopDetection(); QVERIFY(gesture->isActive() == false); QVERIFY(gesture2->isActive() == false); QVERIFY(recognizer->isActive() == false); QVERIFY(recognizer->gestureSignals().count() > 1); QVERIFY(recognizer->gestureSignals().contains("detected(QString)")); }
bool Gesture::x11Event( XEvent* ev_P ) { /* kdDebug(1217) << k_funcinfo << " ( type = " << ev_P->type << " )" << KeyRelease << " " << KeyPress <<endl; if( ev_P->type == XKeyPress || ev_P->type == XKeyRelease ) { return voice_handler->x11Event( ev_P ); }*/ if( ev_P->type == ButtonPress && ev_P->xbutton.button == button ) { kdDebug( 1217 ) << "GESTURE: mouse press" << endl; stroke.reset(); stroke.record( ev_P->xbutton.x, ev_P->xbutton.y ); nostroke_timer.start( timeout, true ); recording = true; start_x = ev_P->xbutton.x_root; start_y = ev_P->xbutton.y_root; return true; } else if( ev_P->type == ButtonRelease && ev_P->xbutton.button == button && recording ) { recording = false; nostroke_timer.stop(); stroke.record( ev_P->xbutton.x, ev_P->xbutton.y ); QString gesture( stroke.translate()); if( gesture.isEmpty()) { kdDebug( 1217 ) << "GESTURE: replay" << endl; XAllowEvents( qt_xdisplay(), AsyncPointer, CurrentTime ); XUngrabPointer( qt_xdisplay(), CurrentTime ); mouse_replay( true ); return true; } kdDebug( 1217 ) << "GESTURE: got: " << gesture << endl; emit handle_gesture( gesture, windows_handler->window_at_position( start_x, start_y )); return true; } else if( ev_P->type == MotionNotify && recording ) { // ignore small initial movement if( nostroke_timer.isActive() && abs( start_x - ev_P->xmotion.x_root ) < 10 && abs( start_y - ev_P->xmotion.y_root ) < 10 ) return true; nostroke_timer.stop(); stroke.record( ev_P->xmotion.x, ev_P->xmotion.y ); } return false; }
bool EventHandler::handleEvent(SDL_Event &event) { switch (event.type) { case SDL_TEXTINPUT: textInput(std::string(event.text.text)); break; case SDL_KEYUP: keyRelease((int32_t) event.key.keysym.sym); break; case SDL_KEYDOWN: // we are handling this on our own if (!event.key.repeat) keyPress((int32_t) event.key.keysym.sym, (int16_t) event.key.keysym.mod); break; case SDL_MOUSEMOTION: { if (event.motion.which == SDL_TOUCH_MOUSEID) break; SDL_Window *window = SDL_GetWindowFromID(event.motion.windowID); if (!(SDL_GetWindowFlags(window) & SDL_WINDOW_INPUT_FOCUS)) break; mouseMotion(event.motion.x, event.motion.y, event.motion.xrel, event.motion.yrel); break; } case SDL_MOUSEBUTTONDOWN: if (event.button.which == SDL_TOUCH_MOUSEID) break; mouseButtonPress(event.button.x, event.button.y, event.button.button); break; case SDL_MOUSEBUTTONUP: if (event.button.which == SDL_TOUCH_MOUSEID) break; mouseButtonRelease(event.button.x, event.button.y, event.button.button); break; case SDL_MOUSEWHEEL: if (event.wheel.which == SDL_TOUCH_MOUSEID) break; mouseWheel(event.wheel.x, event.wheel.y); break; case SDL_CONTROLLERAXISMOTION: { const uint8_t axis = event.caxis.axis; if (axis != SDL_CONTROLLER_AXIS_LEFTX && axis != SDL_CONTROLLER_AXIS_LEFTY && axis != SDL_CONTROLLER_AXIS_RIGHTX && axis != SDL_CONTROLLER_AXIS_RIGHTY) break; const bool horizontal = (axis == SDL_CONTROLLER_AXIS_LEFTX || axis == SDL_CONTROLLER_AXIS_RIGHTX); joystickMotion(horizontal, event.caxis.value); break; } case SDL_CONTROLLERBUTTONDOWN: controllerButtonPress(getControllerButtonName(event.cbutton.button)); break; case SDL_CONTROLLERBUTTONUP: controllerButtonRelease(getControllerButtonName(event.cbutton.button)); break; case SDL_CONTROLLERDEVICEADDED: joystickDeviceAdded(event.cdevice.which); break; case SDL_CONTROLLERDEVICEREMOVED: joystickDeviceRemoved(event.cdevice.which); break; case SDL_JOYDEVICEADDED: joystickDeviceAdded(event.jdevice.which); break; case SDL_JOYDEVICEREMOVED: joystickDeviceRemoved(event.jdevice.which); break; case SDL_DOLLARRECORD: gestureRecord(event.dgesture.gestureId); break; case SDL_DOLLARGESTURE: gesture(event.dgesture.gestureId, event.dgesture.error, event.dgesture.numFingers); break; case SDL_MULTIGESTURE: multiGesture(event.mgesture.dTheta, event.mgesture.dDist, event.mgesture.numFingers); break; case SDL_JOYHATMOTION: break; case SDL_JOYBUTTONDOWN: joystickButtonPress(event.jbutton.button); break; case SDL_JOYBUTTONUP: joystickButtonRelease(event.jbutton.button); break; case SDL_JOYAXISMOTION: joystickMotion(event.jaxis.axis == 0, event.jaxis.value); break; case SDL_FINGERDOWN: fingerPress(event.tfinger.fingerId, event.tfinger.x, event.tfinger.y); break; case SDL_FINGERUP: fingerRelease(event.tfinger.fingerId, event.tfinger.x, event.tfinger.y); break; case SDL_FINGERMOTION: fingerMotion(event.tfinger.fingerId, event.tfinger.x, event.tfinger.y, event.tfinger.dx, event.tfinger.dy); break; case SDL_WINDOWEVENT: switch (event.window.event) { case SDL_WINDOWEVENT_RESIZED: case SDL_WINDOWEVENT_SIZE_CHANGED: for (EventObservers::iterator i = _observers.begin(); i != _observers.end(); ++i) { (*i)->onWindowResize(); } break; case SDL_WINDOWEVENT_CLOSE: return false; } break; } return true; }
bool Gesture::x11Event( XEvent* ev_P ) { /* kDebug() << " ( type = " << ev_P->type << " )" << KeyRelease << " " << KeyPress ; if( ev_P->type == XKeyPress || ev_P->type == XKeyRelease ) { return voice_handler->x11Event( ev_P ); }*/ if( ev_P->type == ButtonPress && ev_P->xbutton.button == button ) { kDebug() << "GESTURE: mouse press"; stroke.reset(); stroke.record( ev_P->xbutton.x, ev_P->xbutton.y ); nostroke_timer.start( timeout ); recording = true; start_x = ev_P->xbutton.x_root; start_y = ev_P->xbutton.y_root; return true; } // if stroke is finished... postprocess the data and send a signal. // then wait for incoming matching scores and execute the best fit. else if( ev_P->type == ButtonRelease && ev_P->xbutton.button == button && recording ) { recording = false; nostroke_timer.stop(); stroke.record( ev_P->xbutton.x, ev_P->xbutton.y ); StrokePoints gesture( stroke.processData() ); if( gesture.isEmpty() ) { kDebug() << "GESTURE: replay"; XAllowEvents( QX11Info::display(), AsyncPointer, CurrentTime ); XUngrabPointer( QX11Info::display(), CurrentTime ); mouse_replay( true ); return true; } // prepare for the incoming scores from different triggers maxScore = 0.0; bestFit = NULL; emit handle_gesture( gesture ); // the signal is emitted directly, so we get all trigger scores before // the next lines are executed. bestFit should now contain // a pointer to the ActionData with the best-matching gesture. if( bestFit != NULL ) { // set up the windows_handler WId window = windows_handler->window_at_position( start_x, start_y ); windows_handler->set_action_window( window ); // then execute the action associated with the best match. bestFit->execute(); } return true; } else if( ev_P->type == MotionNotify && recording ) { // ignore small initial movement if( nostroke_timer.isActive() && abs( start_x - ev_P->xmotion.x_root ) < 10 && abs( start_y - ev_P->xmotion.y_root ) < 10 ) return true; nostroke_timer.stop(); stroke.record( ev_P->xmotion.x, ev_P->xmotion.y ); } return false; }
/***************************************************************************** * MouseEvent: callback for mouse events *****************************************************************************/ static int MouseEvent( vlc_object_t *p_this, char const *psz_var, vlc_value_t oldval, vlc_value_t newval, void *p_data ) { VLC_UNUSED(p_this); VLC_UNUSED(oldval); int pattern = 0; signed int i_horizontal, i_vertical; intf_thread_t *p_intf = (intf_thread_t *)p_data; intf_sys_t *p_sys = p_intf->p_sys; vlc_mutex_lock( &p_sys->lock ); /* don't process new gestures before the last events are processed */ if( p_sys->b_got_gesture ) { vlc_mutex_unlock( &p_sys->lock ); return VLC_SUCCESS; } if( !strcmp( psz_var, "mouse-moved" ) && p_sys->b_button_pressed ) { p_sys->i_mouse_x = newval.coords.x; p_sys->i_mouse_y = newval.coords.y; i_horizontal = p_sys->i_mouse_x - p_sys->i_last_x; i_horizontal = i_horizontal / p_sys->i_threshold; i_vertical = p_sys->i_mouse_y - p_sys->i_last_y; i_vertical = i_vertical / p_sys->i_threshold; if( i_horizontal < 0 ) { msg_Dbg( p_intf, "left gesture (%d)", i_horizontal ); pattern = LEFT; } else if( i_horizontal > 0 ) { msg_Dbg( p_intf, "right gesture (%d)", i_horizontal ); pattern = RIGHT; } if( i_vertical < 0 ) { msg_Dbg( p_intf, "up gesture (%d)", i_vertical ); pattern = UP; } else if( i_vertical > 0 ) { msg_Dbg( p_intf, "down gesture (%d)", i_vertical ); pattern = DOWN; } if( pattern ) { p_sys->i_last_y = p_sys->i_mouse_y; p_sys->i_last_x = p_sys->i_mouse_x; if( gesture( p_sys->i_pattern, p_sys->i_num_gestures - 1 ) != pattern ) { p_sys->i_pattern |= pattern << ( p_sys->i_num_gestures * 4 ); p_sys->i_num_gestures++; } } } else if( !strcmp( psz_var, "mouse-button-down" ) ) { if( (newval.i_int & p_sys->i_button_mask) && !p_sys->b_button_pressed ) { p_sys->b_button_pressed = true; var_GetCoords( p_sys->p_vout, "mouse-moved", &p_sys->i_last_x, &p_sys->i_last_y ); } else if( !( newval.i_int & p_sys->i_button_mask ) && p_sys->b_button_pressed ) { p_sys->b_button_pressed = false; p_sys->b_got_gesture = true; } } vlc_mutex_unlock( &p_sys->lock ); return VLC_SUCCESS; }