Exemple #1
0
bool MoviePlayer::prepareFrame() {
	if (!_videoLooping && _videoDecoder->endOfVideo()) {
		_videoFinished = true;
	}

	if (_videoPause) {
		return false;
	}

	if (_videoFinished) {
		if (g_grim->getMode() == GrimEngine::SmushMode) {
			g_grim->setMode(GrimEngine::NormalMode);
		}
		_videoPause = true;
		return false;
	}

	if (_videoDecoder->getTimeToNextFrame() > 0)
		return false;

	handleFrame();
	_internalSurface = _videoDecoder->decodeNextFrame();
	if (_frame != _videoDecoder->getCurFrame()) {
		_updateNeeded = true;
	}

	_movieTime = _videoDecoder->getTime();
	_frame = _videoDecoder->getCurFrame();

	return true;
}
Exemple #2
0
const Graphics::Surface *CelDecoder::CelVideoTrack::decodeNextFrame() {
	// Read chunk
	/*uint32 frameSize = */ _fileStream->readUint32LE();
	uint16 frameType = _fileStream->readUint16LE();

	switch (frameType) {
	case FRAME_TYPE:
		handleFrame();
		break;
	default:
		error("FlicDecoder::decodeFrame(): unknown main chunk type (type = 0x%02X)", frameType);
		break;
	}

	_curFrame++;
	_nextFrameStartTime += _frameDelay;

	if (_atRingFrame) {
		// If we decoded the ring frame, seek to the second frame
		_atRingFrame = false;
		if (_frameCount == 1) {
			_fileStream->seek(_offsetFrame1);
		} else
			_fileStream->seek(_offsetFrame2);
	}

	if (_curFrame == 0)
		_transparentColourIndex = *(byte *)_surface->getBasePtr(0, 0);

	return _surface;
}
const Graphics::Surface *FlicDecoder::FlicVideoTrack::decodeNextFrame() {
	// Read chunk
	/*uint32 frameSize = */ _fileStream->readUint32LE();
	uint16 frameType = _fileStream->readUint16LE();

	switch (frameType) {
	case FRAME_TYPE:
		handleFrame();
		break;
	default:
		error("FlicDecoder::decodeFrame(): unknown main chunk type (type = 0x%02X)", frameType);
		break;
	 }

	_curFrame++;
	_nextFrameStartTime += _frameDelay;

	if (_atRingFrame) {
		// If we decoded the ring frame, seek to the second frame
		_atRingFrame = false;
		_fileStream->seek(_offsetFrame2);
	}

	return _surface;
}
void QualisysDriver::run() {

  prt_packet = port_protocol.GetRTPacket();
  CRTPacket::EPacketType e_type;
  port_protocol.GetCurrentFrame(CRTProtocol::Component6dEuler);

  if(port_protocol.ReceiveRTPacket(e_type, true)) {

    switch(e_type) {
      // Case 1 - sHeader.nType 0 indicates an error
      case CRTPacket::PacketError:
        ROS_ERROR_STREAM_THROTTLE(
            1, "Error when streaming frames: "
            << port_protocol.GetRTPacket()->GetErrorString());
        break;

      // Case 2 - No more data
      case CRTPacket::PacketNoMoreData:
        ROS_WARN_STREAM_THROTTLE(1, "No more data");
        break;

      // Case 3 - Data received
      case CRTPacket::PacketData:
        handleFrame();
        break;

      default:
        ROS_ERROR_THROTTLE(1, "Unknown CRTPacket case");
        break;
    }
  }

  return;
}
int Dispatcher::spin(MonotonicTime deadline)
{
    int num_frames_processed = 0;
    do
    {
        CanIOFlags flags = 0;
        CanRxFrame frame;
        const int res = canio_.receive(frame, deadline, flags);
        if (res < 0)
        {
            return res;
        }
        if (res > 0)
        {
            if (flags & CanIOFlagLoopback)
            {
                handleLoopbackFrame(frame);
            }
            else
            {
                num_frames_processed++;
                handleFrame(frame);
            }
            notifyRxFrameListener(frame, flags);
        }
    }
    while (sysclock_.getMonotonic() < deadline);

    return num_frames_processed;
}
int Dispatcher::spinOnce()
{
    int num_frames_processed = 0;

    while (true)
    {
        CanIOFlags flags = 0;
        CanRxFrame frame;
        const int res = canio_.receive(frame, MonotonicTime(), flags);
        if (res < 0)
        {
            return res;
        }
        else if (res > 0)
        {
            if (flags & CanIOFlagLoopback)
            {
                handleLoopbackFrame(frame);
            }
            else
            {
                num_frames_processed++;
                handleFrame(frame);
            }
            notifyRxFrameListener(frame, flags);
        }
        else
        {
            break;      // No frames left
        }
    }

    return num_frames_processed;
}
Exemple #7
0
void CGEEngine::mainLoop() {
	_vga->show();
	_commandHandlerTurbo->runCommand();
	_commandHandler->runCommand();

	// Handle a delay between game frames
	handleFrame();

	// Handle any pending events
	_eventManager->poll();
}
bool CameraManipulator::handle(const osgGA::GUIEventAdapter& ea, osgGA::GUIActionAdapter& us)
{
    switch(ea.getEventType())
    {
        case(osgGA::GUIEventAdapter::FRAME):
        {
			return handleFrame(ea, us);
        }
        default:
            break;
    }

    if (ea.getHandled()) return false;

    switch(ea.getEventType())
    {
        case(osgGA::GUIEventAdapter::PUSH):
        {
			return handlePush(ea, us);
        }
        case(osgGA::GUIEventAdapter::RELEASE):
        {
            return handleRelease(ea, us);
        }
        case(osgGA::GUIEventAdapter::DRAG):
        case(osgGA::GUIEventAdapter::SCROLL):
        {
			return handleScroll(ea, us);
        }
        case(osgGA::GUIEventAdapter::MOVE):
        {
            return false;
        }
		case(osgGA::GUIEventAdapter::KEYDOWN):
		{
			return handleKeyDown(ea, us);
		}
		case(osgGA::GUIEventAdapter::KEYUP):
		{
			return handleKeyUp( ea, us );
		}
        case(osgGA::GUIEventAdapter::FRAME):
		{
            if (_thrown)
            {
                if (calcMovement()) us.requestRedraw();
            }

            return false;
		}
        default:
            return false;
    }
}
void PXCAPI GesturePipeline::runThread(void)
{
    if (!_enabled)
        return;
    _isRunning = true;
    log("Camera started\n");
    while (_isRunning && _enabled)
    {
        if (_pxcSenseManager->IsConnected())
            handleFrame();
        Sleep(_interval);
    }
    _isRunning = false;
    _endthreadex(0);
}
status_t PanoramaThread::waitForAndExecuteMessage()
{
    LOG2("@%s", __FUNCTION__);
    status_t status = NO_ERROR;
    Message msg;
    mMessageQueue.receive(&msg);

    switch (msg.id)
    {
        case MESSAGE_ID_STITCH:
            status = handleStitch(msg.data.stitch);
            break;
        case MESSAGE_ID_EXIT:
            status = handleExit();
            break;
        case MESSAGE_ID_FRAME:
            status = handleFrame(msg.data.frame);
            break;
        case MESSAGE_ID_FINALIZE:
            status = handleMessageFinalize();
            break;
        case MESSAGE_ID_START_PANORAMA:
            status = handleMessageStartPanorama();
            break;
        case MESSAGE_ID_STOP_PANORAMA:
            status = handleMessageStopPanorama(msg.data.stop);
            break;
        case MESSAGE_ID_START_PANORAMA_CAPTURE:
            status = handleMessageStartPanoramaCapture();
            break;
        case MESSAGE_ID_STOP_PANORAMA_CAPTURE:
            status = handleMessageStopPanoramaCapture();
            break;
        case MESSAGE_ID_THUMBNAILSIZE:
            status = handleMessageThumbnailSize(msg.data.thumbnail);
            break;
        case MESSAGE_ID_FLUSH:
            status = handleMessageFlush();
            break;
        default:
            status = INVALID_OPERATION;
            break;
    }
    if (status != NO_ERROR) {
        LOGE("operation failed, ID = %d, status = %d", msg.id, status);
    }
    return status;
}
Exemple #11
0
void FtpServer::accept_new_clients(){
	for(int i=0; i < MAX_CLIENTS; i++)	{
		// if open slot
		if( !(client[i].conn)) {
			// if accept client works
			if(accept_client(&client[i])) {
				client[i].runner = true;
				updateStatus(CLIENT_CONN);

				while(client[i].runner){
					// Get this clients request frame
					getFrame(&client[i]);
					//Handle request frame
					client[i].runner = handleFrame(&client[i]);
				}
			}
		}
	}
}
Exemple #12
0
QString Converter::encodingSettingsToX264(QString line)
{
  if (line.isEmpty()) {
    return QObject::tr("empty input nothing to do,...");
  }
  ignored.clear();
  QStringList x264;
  QStringList lines = line.split("/", QString::SkipEmptyParts);
  bool modded = false;
  foreach(line, lines) {
    line = line.trimmed();
    if (handleBFrames(line, x264)) {
      continue;
    }
    if (handleGop(line, x264)) {
      continue;
    }
    if (handleFrame(line, x264, modded)) {
      continue;
    }
    if (handleMotion(line, x264)) {
      continue;
    }
    if (handleRate(line, x264)) {
      continue;
    }
    if (handleRest(line, x264)) {
      continue;
    }
    if (handleGlobal(line, x264)) {
      continue;
    }
    if (line.startsWith("rc=")) {
      line += " " + QObject::tr("since rc does not hold any useful info");
    }
    ignored << line;
  }
status_t PostProcThread::waitForAndExecuteMessage()
{
    LOG2("@%s", __FUNCTION__);
    status_t status = NO_ERROR;
    Message msg;
    mMessageQueue.receive(&msg);

    switch (msg.id)
    {
    case MESSAGE_ID_FRAME:
        status = handleFrame(msg.data.frame);
        break;
    case MESSAGE_ID_EXIT:
        status = handleExit();
        break;
    case MESSAGE_ID_START_FACE_DETECTION:
        status = handleMessageStartFaceDetection();
        break;
    case MESSAGE_ID_STOP_FACE_DETECTION:
        status = handleMessageStopFaceDetection();
        break;
    case MESSAGE_ID_CAPTURE_ON_TRIGGER:
        status = handleMessageCaptureOnTrigger();
        break;
    case MESSAGE_ID_STOP_CAPTURE_ON_TRIGGER:
        status = handleMessageStopCaptureOnTrigger();
        break;
    case MESSAGE_ID_START_SMART_SHUTTER:
        status = handleMessageStartSmartShutter(msg.data.smartShutterParam);
        break;
    case MESSAGE_ID_STOP_SMART_SHUTTER:
        status = handleMessageStopSmartShutter(msg.data.smartShutterParam);
        break;
    case MESSAGE_ID_IS_SMILE_RUNNING:
        status = handleMessageIsSmileRunning();
        break;
    case MESSAGE_ID_GET_SMILE_THRESHOLD:
        status = handleMessageGetSmileThreshold();
        break;
    case MESSAGE_ID_IS_BLINK_RUNNING:
        status = handleMessageIsBlinkRunning();
        break;
    case MESSAGE_ID_GET_BLINK_THRESHOLD:
        status = handleMessageGetBlinkThreshold();
        break;
    case MESSAGE_ID_IS_SMART_CAPTURE_TRIGGERED:
        status = handleMessageIsSmartCaptureTriggered();
        break;
    case MESSAGE_ID_RESET_SMART_CAPTURE_TRIGGER:
        status = handleMessageResetSmartCaptureTrigger();
        break;
    case MESSAGE_ID_FORCE_SMART_CAPTURE_TRIGGER:
        status = handleMessageForceSmartCaptureTrigger();
        break;
    case MESSAGE_ID_START_FACE_RECOGNITION:
        status = handleMessageStartFaceRecognition();
        break;
    case MESSAGE_ID_STOP_FACE_RECOGNITION:
        status = handleMessageStopFaceRecognition();
        break;
    case MESSAGE_ID_IS_FACE_RECOGNITION_RUNNING:
        status = handleMessageIsFaceRecognitionRunning();
        break;
    case MESSAGE_ID_LOAD_ISP_EXTENSIONS:
        status = handleMessageLoadIspExtensions(msg.data.loadIspExtensions);
        break;
    case MESSAGE_ID_UNLOAD_ISP_EXTENSIONS:
        status = handleMessageUnloadIspExtensions();
        break;
    case MESSAGE_ID_SET_ZOOM:
        status = handleMessageSetZoom(msg.data.config);
        break;
    case MESSAGE_ID_SET_ROTATION:
        status = handleMessageSetRotation(msg.data.config);
        break;
    case MESSAGE_ID_SET_AUTO_LOW_LIGHT:
        status = handleMessageSetAutoLowLight(msg.data.config);
        break;
    default:
        status = INVALID_OPERATION;
        break;
    }
    if (status != NO_ERROR) {
        LOGE("operation failed, ID = %d, status = %d", msg.id, status);
    }
    return status;
}
/** Handles events. Returns true if handled, false otherwise.*/
bool OrbitCameraManipulator::handle( const osgGA::GUIEventAdapter& ea, osgGA::GUIActionAdapter& aa )
{
	switch( ea.getEventType() )
	{

	case osgGA::GUIEventAdapter::FRAME:
		return handleFrame( ea, aa );

	case osgGA::GUIEventAdapter::RESIZE:
		return handleResize( ea, aa );

	default:
		break;
	}

	if( ea.getHandled() )
	{
		return false;
	}

	computeRayPointer( ea, aa );

	bool handled = false;
	switch( ea.getEventType() )
	{
	case osgGA::GUIEventAdapter::MOVE:
		handled = handleMouseMove( ea, aa );
		break;

	case osgGA::GUIEventAdapter::DRAG:
		handled = handleMouseDrag( ea, aa );
		break;

	case osgGA::GUIEventAdapter::PUSH:
		handled = handleMousePush( ea, aa );
		break;

	case osgGA::GUIEventAdapter::RELEASE:
		handled = handleMouseRelease( ea, aa );
		break;

	case osgGA::GUIEventAdapter::KEYDOWN:
		handled = handleKeyDown( ea, aa );
		break;

	case osgGA::GUIEventAdapter::KEYUP:
		m_control_key_down = false;
		handled = handleKeyUp( ea, aa );
		break;

	case osgGA::GUIEventAdapter::SCROLL:
		if( _flags & PROCESS_MOUSE_WHEEL )
			handled = handleMouseWheel( ea, aa );
		else
			handled = false;
		break;

	default:
		handled = false;
	}

	return handled;
}