void SocketIoClient::webSocketEvent(WStype_t type, uint8_t * payload, size_t length) {
	String msg;
	switch(type) {
		case WStype_DISCONNECTED:
			SOCKETIOCLIENT_DEBUG("[SIoC] Disconnected!\n");
			break;
		case WStype_CONNECTED:
			SOCKETIOCLIENT_DEBUG("[SIoC] Connected to url: %s\n",  payload);
			break;
		case WStype_TEXT:
			msg = String((char*)payload);
			if(msg.startsWith("42")) {
				trigger(getEventName(msg).c_str(), getEventPayload(msg).c_str(), length);
			} else if(msg.startsWith("2")) {
				_webSocket.sendTXT("3");
			} else if(msg.startsWith("40")) {
				trigger("connect", NULL, 0);
			} else if(msg.startsWith("41")) {
				trigger("disconnect", NULL, 0);
			}
			break;
		case WStype_BIN:
			SOCKETIOCLIENT_DEBUG("[SIoC] get binary length: %u\n", length);
			hexdump((uint32_t*) payload, length);
		break;
	}
}
示例#2
0
 virtual void onEvent(uavcan::dynamic_node_id_server::TraceCode code, uavcan::int64_t argument)
 {
     const uavcan::MonotonicDuration ts = SystemClockDriver().getMonotonic() - startup_ts_;
     std::cout << "EVENT [" << id_ << "]\t" << ts.toString() << "\t"
               << code << "\t" << getEventName(code) << "\t" << argument << std::endl;
     event_log_.push_back(EventLogEntry(code, argument));
 }
NS_CC_BEGIN

static EventListener::ListenerID __getListenerID(Event* event)
{
    EventListener::ListenerID ret;
    switch (event->getType())
    {
        case Event::Type::ACCELERATION:
            ret = EventListenerAcceleration::LISTENER_ID;
            break;
        case Event::Type::CUSTOM:
            {
                auto customEvent = static_cast<EventCustom*>(event);
                ret = customEvent->getEventName();
            }
            break;
        case Event::Type::KEYBOARD:
            ret = EventListenerKeyboard::LISTENER_ID;
            break;
        case Event::Type::MOUSE:
            ret = EventListenerMouse::LISTENER_ID;
            break;
        case Event::Type::TOUCH:
            // Touch listener is very special, it contains two kinds of listeners, EventListenerTouchOneByOne and EventListenerTouchAllAtOnce.
            // return UNKNOWN instead.
            CCASSERT(false, "Don't call this method if the event is for touch.");
            break;
        default:
            CCASSERT(false, "Invalid type!");
            break;
    }
    
    return ret;
}
示例#4
0
void
VRPNButtonDevice::sendEvent(int buttonNumber, bool down)
{
  std::string ename = getEventName(buttonNumber);
  if (down)
    _pendingEvents.append(new Event(ename + "_down"));
  else
    _pendingEvents.append(new Event(ename + "_up"));
}
void InputDeviceVRPNButton::sendEvent(int buttonNumber, bool down, const TimeStamp &msg_time)
{
	std::string ename = getEventName(buttonNumber);
	if (down) {
		_pendingEvents.push_back(EventRef(new Event(ename + "_down", nullptr, buttonNumber, msg_time)));
	}
	else {
		_pendingEvents.push_back(EventRef(new Event(ename + "_up", nullptr, buttonNumber, msg_time)));
	}
}
/**
The tracker reports the transformation that would transform stuff
in tracker space to device space (that is, to the space relative to
the origin of the tracking device).  First we change the units on
the translation part of this since we usually work in feet and most
tracking systems report mesurements in some metric system.  Then we
have a transformation in feet relative to the device.  We multiply
by a transformation that transforms stuff in device space to IS3D
room space.  You can think of this as what rotation, then
translation would move the origin of RoomSpace to the origin of
tracking device.  This is the deviceToRoom coordinate frame.
*/
void VRVRPNTrackerDevice::processEvent(const VRMatrix4 &vrpnEvent, int sensorNum)
{

    if ((_ignoreZeroes) && (vrpnEvent.getColumn(3) == VRVector3(0,0,0))) {
    	return;
	}
	_newReportFlag = true;

	// first, adjust units of trackerToDevice.  after this, everything
	// is in RoomSpace units (typically feet for VRG3D).
	VRMatrix4 trackerToDevice = vrpnEvent;

	// convert a left handed coordinate system to a right handed one
	// not sure if this actually works..
	if (_convertLHtoRH) {
		/*** This is code from syzygy..
		CoordinateFrame switchMat(Matrix3(1,0,0, 0,0,1, 0,-1,0), Vector3::zero());
		CoordinateFrame transMat(switchMat.inverse() * vrpnEvent.translation);
		CoordinateFrame rotMat(vrpnEvent.rotation, Vector3::zero());
		trackerToDevice = transMat * switchMat.inverse() * rotMat * switchMat;
		***/

		// This code is based on the article "Conversion of Left-Handed
		// Coordinates to Right-Handed Coordinates" by David Eberly,
		// available online:
		// http://www.geometrictools.com/Documentation/LeftHandedToRightHanded.pdf
		trackerToDevice[3][2] = -trackerToDevice[3][2];

		trackerToDevice[2][0] = -trackerToDevice[2][0];
		trackerToDevice[2][1] = -trackerToDevice[2][1];
		trackerToDevice[0][2] = -trackerToDevice[0][2];
		trackerToDevice[1][2] = -trackerToDevice[1][2];
	}

	trackerToDevice[3][0] *= _trackerUnitsToRoomUnitsScale;
	trackerToDevice[3][1] *= _trackerUnitsToRoomUnitsScale;
	trackerToDevice[3][2] *= _trackerUnitsToRoomUnitsScale;

	VRMatrix4 eventRoom = _finalOffset[sensorNum] * _deviceToRoom * trackerToDevice * _propToTracker[sensorNum];

	if ((_printSensor0) && (sensorNum == 0)) {
        VRVector3 translation = eventRoom.getColumn(3);
		std::cout << translation << std::endl;
	}

	VRDataIndex di;
    std::string name = getEventName(sensorNum) + "_Move";
	di.addData(name + "/Transform", eventRoom);
    _pendingEvents.push(di.serialize(name));
}
示例#7
0
void VRVRPNButtonDevice::sendEvent(int buttonNumber, bool down)
{
    VRDataIndex di;
	std::string name = getEventName(buttonNumber);
	if (down) {
        name = name + "_Down";
		di.addData(name + "/id", buttonNumber);
	    _pendingEvents.push(di.serialize(name));
	}
	else {
        name = name + "_Up";
		di.addData(name + "/id", buttonNumber);
	    _pendingEvents.push(di.serialize(name));
	}
}
示例#8
0
NS_CC_BEGIN

static EventListener::ListenerID __getListenerID(Event* event)
{
    EventListener::ListenerID ret;
    switch (event->getType())
    {
        case Event::Type::ACCELERATION:
            ret = EventListenerAcceleration::LISTENER_ID;
            break;
        case Event::Type::CUSTOM:
            {
                auto customEvent = static_cast<EventCustom*>(event);
                ret = customEvent->getEventName();
            }
            break;
        case Event::Type::KEYBOARD:
            ret = EventListenerKeyboard::LISTENER_ID;
            break;
        case Event::Type::MOUSE:
            ret = EventListenerMouse::LISTENER_ID;
            break;
        case Event::Type::FOCUS:
            ret = EventListenerFocus::LISTENER_ID;
            break;
        case Event::Type::TOUCH:
            // Touch listener is very special, it contains two kinds of listeners, EventListenerTouchOneByOne and EventListenerTouchAllAtOnce.
            // return UNKNOWN instead.
            CCASSERT(false, "Don't call this method if the event is for touch.");
            break;
		case Event::Type::JOYSTICK:
			ret = EventListenerJoystick::LISTENER_ID;
			break;
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID || CC_TARGET_PLATFORM == CC_PLATFORM_IOS)
        case Event::Type::GAME_CONTROLLER:
            ret = EventListenerController::LISTENER_ID;
            break;
#endif
        default:
            CCASSERT(false, "Invalid type!");
            break;
    }
    
    return ret;
}
示例#9
0
        void EventManager::handleEvents()
        {
            EventData eventData;

            while(!eventQueue.empty())
            {
                if(eventQueue.try_pop(eventData))
                {
#ifdef DEBUG
                    if(signals[eventData.first]->empty())
                    {
                        LOG(WARNING) << "Unhandled event " << getEventName(eventData.first);
                    }
#endif
                    (*signals[eventData.first])(eventData.first, eventData.second);
                }
            }
        }
示例#10
0
/**
   The tracker reports the transformation that would transform stuff
   in tracker space to device space (that is, to the space relative to
   the origin of the tracking device).  First we change the units on
   the translation part of this since we usually work in feet and most
   tracking systems report mesurements in some metric system.  Then we
   have a transformation in feet relative to the device.  We multiply
   by a transformation that transforms stuff in device space to IS3D
   room space.  You can think of this as what rotation, then
   translation would move the origin of RoomSpace to the origin of
   tracking device.  This is the deviceToRoom coordinate frame.
 */
void
VRPNTrackerDevice::processEvent(const CoordinateFrame &vrpnEvent, int sensorNum)
{
  _newReportFlag = true;

  // first, adjust units of trackerToDevice.  after this, everything
  // is in RoomSpace units (typically feet for VRG3D).
  CoordinateFrame trackerToDevice = vrpnEvent;

  // convert a left handed coordinate system to a right handed one
  // not sure if this actually works..
  if (_convertLHtoRH) {
    /*** This is code from syzygy..        
    CoordinateFrame switchMat(Matrix3(1,0,0, 0,0,1, 0,-1,0), Vector3::zero());
    CoordinateFrame transMat(switchMat.inverse() * vrpnEvent.translation);
    CoordinateFrame rotMat(vrpnEvent.rotation, Vector3::zero());
    trackerToDevice = transMat * switchMat.inverse() * rotMat * switchMat;
    ***/
    
    // This code is based on the article "Conversion of Left-Handed
    // Coordinates to Right-Handed Coordinates" by David Eberly,
    // available online:
    // http://www.geometrictools.com/Documentation/LeftHandedToRightHanded.pdf
    trackerToDevice.translation[2] = -trackerToDevice.translation[2];

    trackerToDevice.rotation[0][2] = -trackerToDevice.rotation[0][2];
    trackerToDevice.rotation[1][2] = -trackerToDevice.rotation[1][2];
    trackerToDevice.rotation[2][0] = -trackerToDevice.rotation[2][0];
    trackerToDevice.rotation[2][1] = -trackerToDevice.rotation[2][1];
  }

  trackerToDevice.translation *= _trackerUnitsToRoomUnitsScale;

  CoordinateFrame eventRoom = _finalOffset[sensorNum] * _deviceToRoom * trackerToDevice * _propToTracker[sensorNum];

  if ((_printSensor0) && (sensorNum == 0)) {
    std::cout << eventRoom.translation << std::endl;
  }
  _pendingEvents.append(new Event(getEventName(sensorNum), eventRoom));  
}
示例#11
0
MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);  

    for (int i = 0; i < nEvents; ++i) {
        ui->eventComboBox->addItem(getEventName(i));
    }

    ui->tableWidget->setColumnCount(2);

    QStringList list;
    list << "Argument" << "Value";
    ui->tableWidget->setHorizontalHeaderLabels(list);

    QStringList list2;
    list2 << "X" << "Y";

#if ENS_DIM == 3
    list2 << "Z";
#else
    ui->tableWidget_mesh->setRowCount(2);
#endif
    ui->tableWidget_mesh->setVerticalHeaderLabels(list2);


    QStringList list3;
    list3 << "Start" << "End";
    ui->tableWidget_mesh->setHorizontalHeaderLabels(list3);

    vec masses = ones(1);

    ensemble = new Ensemble(masses);

    mat topology(ENS_DIM, 2);
    topology << 0 << ENS_NX << endr << 0 << ENS_NY;

#if ENS_DIM == 3
    topology << 0 << ENS_NZ << endr;
#endif

    mainMesh = new MainMesh(topology, *ensemble);

    for (int i = 0; i < 2; ++i) {
        for (int j = 0; j < ENS_DIM; ++j) {
            ui->tableWidget_mesh->setItem(j, i, new QTableWidgetItem);
        }
    }

    addMesh(mainMesh);

    epsTable << 1 << 1 << endr << 1 << 1;
    sigmaTable << 1 << 1 << endr << 1 << 1;

    forceAddEvent(MDSOLVER);
    forceAddEvent(VELOCITYVERLET1);
    forceAddEvent(PERIODIC);
    forceAddEvent(LENNARDJONESFORCE);
    forceAddEvent(VELOCITYVERLET2);
    params.dt = 1./60;
    forceAddEvent(STALL);
    params.dt = 0.005;
    ui->eventComboBox->setCurrentIndex(0);

    running = false;

}
示例#12
0
/**
 * \brief	This will save input settings according to how the user did map the buttons,
 * 			axes or keys to the commands.
 */
void CInput::saveControlconfig()
{
	CConfiguration Configuration(CONFIGFILENAME);
	Configuration.Parse();

	std::string section;
	for(size_t i=0 ; i<NUM_INPUTS ; i++)
	{
		section = "input" + itoa(i);

		Configuration.WriteString(section, "Left", getEventName(IC_LEFT, i));
		Configuration.WriteString(section, "Up", getEventName(IC_UP, i));
		Configuration.WriteString(section, "Right", getEventName(IC_RIGHT, i));
		Configuration.WriteString(section, "Down", getEventName(IC_DOWN, i));
		Configuration.WriteString(section, "Upper-Left", getEventName(IC_UPPERLEFT, i));
		Configuration.WriteString(section, "Upper-Right", getEventName(IC_UPPERRIGHT, i));
		Configuration.WriteString(section, "Lower-Left", getEventName(IC_LOWERLEFT, i));
		Configuration.WriteString(section, "Lower-Right", getEventName(IC_LOWERRIGHT, i));
		Configuration.WriteString(section, "Jump", getEventName(IC_JUMP, i));
		Configuration.WriteString(section, "Pogo", getEventName(IC_POGO, i));
		Configuration.WriteString(section, "Fire", getEventName(IC_FIRE, i));
		Configuration.WriteString(section, "Status", getEventName(IC_STATUS, i));
		Configuration.WriteString(section, "Help", getEventName(IC_HELP, i));
		Configuration.WriteString(section, "Quit", getEventName(IC_QUIT, i));
		Configuration.SetKeyword(section, "TwoButtonFiring", TwoButtonFiring[i]);
	}
	Configuration.saveCfgFile();
}