Пример #1
0
//----------------------------------------
void ofxOpenNITracker::startTracking(XnUserID nID) {
	ofLogVerbose(LOG_NAME) << "Start tracking user" << nID;
	openNI->getUserGenerator().GetSkeletonCap().StartTracking(nID);
  setUserState(nID, ofxOpenNIUser::StartTracking);
}
Пример #2
0
//--------------------------------------------------------------
void ofApp::keyReleased(int key){
    ofLogVerbose("ofApp") << "keyReleased() key = " << key;
}
Пример #3
0
//--------------------------------------------------------------
void ofApp::mouseReleased(int x, int y, int button){
    ofLogVerbose("ofApp") << "mouseReleased() x = " << x << "/" << y << " button=" << button;
}
Пример #4
0
COMXVideo::~COMXVideo()
{
	ofRemoveListener(ofEvents().update, this, &COMXVideo::onUpdate);
	ofLogVerbose(__func__) << "removed update listener";
}
Пример #5
0
// -----------------------------------------------------------------------------
void ofxRtMidiIn::ignoreTypes(bool midiSysex, bool midiTiming, bool midiSense) {
	midiIn.ignoreTypes(midiSysex, midiTiming, midiSense);
	ofLogVerbose("ofxMidiIn") <<"ignore types on " << portName << ": sysex: " << midiSysex
	    << " timing: " << midiTiming << " sense: " << midiSense;
}
//------------------------------------------------------------------------------
ofxIpVideoServerRouteHandler::~ofxIpVideoServerRouteHandler() {
    
    ofLogVerbose("ofxIpVideoServerRouteHandler::~ofxIpVideoServerRouteHandler") << "Destroyed.";
}
Пример #7
0
//--------------------------------------------------------------
bool ofxCLEye::initGrabber(int w, int h, int deviceID, int frameRate, bool useTexture,
						   bool useGrayscale, bool useThread){
	setDeviceID(deviceID);
	setDesiredFrameRate(frameRate);
	setUseThread(useThread);
	setUseGrayscale(useGrayscale);
	setUseTexture(useTexture);

	if(w == 640 && h == 480){
		resolution = CLEYE_VGA;
	}
	else if(w == 320 && h == 240){
		resolution = CLEYE_QVGA;
	}
	else{
		ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): selected resolution " + ofToString(w) + "x"
			+ ofToString(h) + " is not available with ofxCLEye";
		ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): using 640x480 instead";
		resolution = CLEYE_VGA;
	}

	if(desiredFrameRate < 0){
		ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): selected framerate" + ofToString(desiredFrameRate) + "is not available with ofxCLeye";
		ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): using 60fps instead";
		desiredFrameRate = 60;
	}

	GUID guid = getDeviceGUID(requestedDeviceID);
	cam = CLEyeCreateCamera(guid, colorMode, resolution, desiredFrameRate);

	if(cam == NULL){
		ofLogError(OFX_CLEYE_MODULE_NAME) << "initGrabber(): error when creating instance of CLEyeCamera.";
		return false;
	}

	initialized = CLEyeCameraStart(cam);

	if(!initialized){
		ofLogError(OFX_CLEYE_MODULE_NAME) << "initGrabber(): can't start the CLEye camera.";
		return false;
	}

	CLEyeCameraGetFrameDimensions(cam, width, height);

	// oF code style says to not use ternary operators, but sometimes they are really convenient.
	// Native color image from camera is RGBA (4 channels)
	viPixels = new unsigned char[width * height * ((colorMode == CLEYE_MONO_PROCESSED) ? 1 : 4)];

	pixels.allocate(width, height, (colorMode == CLEYE_MONO_PROCESSED) ? 1 : 3);
	pixels.set(0);

	if(usingTexture){
		int glFormat = (colorMode == CLEYE_MONO_PROCESSED) ? GL_LUMINANCE : GL_RGB;
		texture.allocate(width, height, glFormat);
		texture.loadData((unsigned char *)pixels.getPixels(), width, height, glFormat);
	}

	if(usingThread){
		startThread(true);
		ofLogVerbose(OFX_CLEYE_MODULE_NAME) << "initGrabber(): thread started.";
	}

	return true;
}
OMX_ERRORTYPE NonTextureEngine::onCameraEventParamOrConfigChanged()
{

	ofLogVerbose(__func__) << "START";
	
	OMX_ERRORTYPE error = OMX_SendCommand(camera, OMX_CommandStateSet, OMX_StateIdle, NULL);
	if (error != OMX_ErrorNone) 
	{
		ofLog(OF_LOG_ERROR, "camera OMX_SendCommand OMX_StateIdle FAIL error: 0x%08x", error);
	}
	
	//Enable Camera Output Port
	OMX_CONFIG_PORTBOOLEANTYPE cameraport;
	OMX_INIT_STRUCTURE(cameraport);
	cameraport.nPortIndex = CAMERA_OUTPUT_PORT;
	cameraport.bEnabled = OMX_TRUE;
	
	error =OMX_SetParameter(camera, OMX_IndexConfigPortCapturing, &cameraport);	
	if (error != OMX_ErrorNone) 
	{
		ofLog(OF_LOG_ERROR, "camera enable Output Port FAIL error: 0x%08x", error);
	}
	
	
	
	if (omxCameraSettings.doRecording) 
	{		
		if (omxCameraSettings.doRecordingPreview) 
		{
			//Set up renderer
			setupRenderer();
		} 
		
		
		//set up encoder
		OMX_CALLBACKTYPE encoderCallbacks;
		encoderCallbacks.EventHandler		= &BaseEngine::encoderEventHandlerCallback;
		encoderCallbacks.EmptyBufferDone	= &BaseEngine::encoderEmptyBufferDone;
		encoderCallbacks.FillBufferDone		= &NonTextureEngine::encoderFillBufferDone;
		
		
		string encoderComponentName = "OMX.broadcom.video_encode";
		
		error =OMX_GetHandle(&encoder, (OMX_STRING)encoderComponentName.c_str(), this , &encoderCallbacks);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_GetHandle FAIL error: 0x%08x", error);
		}
		
		configureEncoder();
		
		if (omxCameraSettings.doRecordingPreview) 
		{
			//Create camera->video_render Tunnel
			error = OMX_SetupTunnel(camera, CAMERA_PREVIEW_PORT, render, VIDEO_RENDER_INPUT_PORT);
			if (error != OMX_ErrorNone) 
			{
				ofLog(OF_LOG_ERROR, "camera->video_render OMX_SetupTunnel FAIL error: 0x%08x", error);
			}
		}

		// Tunnel camera video output port and encoder input port
		error = OMX_SetupTunnel(camera, CAMERA_OUTPUT_PORT, encoder, VIDEO_ENCODE_INPUT_PORT);
		if(error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "CAMERA_OUTPUT_PORT->VIDEO_ENCODE_INPUT_PORT OMX_SetupTunnel FAIL error: 0x%08x", error);
		}

		
		//Set encoder to Idle
		error = OMX_SendCommand(encoder, OMX_CommandStateSet, OMX_StateIdle, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_SendCommand OMX_StateIdle FAIL error: 0x%08x", error);
		}
		
		//Set camera to Idle
		error = OMX_SendCommand(camera, OMX_CommandStateSet, OMX_StateIdle, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "camera OMX_SendCommand OMX_StateIdle FAIL error: 0x%08x", error);
		}
		
		if (omxCameraSettings.doRecordingPreview)
		{
			//Enable camera preview port
			error = OMX_SendCommand(camera, OMX_CommandPortEnable, CAMERA_PREVIEW_PORT, NULL);
			if (error != OMX_ErrorNone) 
			{
				ofLog(OF_LOG_ERROR, "camera OMX_CommandPortEnable CAMERA_PREVIEW_PORT FAIL error: 0x%08x", error);
			}
		}
	
		//Enable camera output port
		error = OMX_SendCommand(camera, OMX_CommandPortEnable, CAMERA_OUTPUT_PORT, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "camera OMX_CommandPortEnable CAMERA_OUTPUT_PORT FAIL error: 0x%08x", error);
		}
		
		//Enable encoder input port
		error = OMX_SendCommand(encoder, OMX_CommandPortEnable, VIDEO_ENCODE_INPUT_PORT, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_CommandPortEnable VIDEO_ENCODE_INPUT_PORT FAIL error: 0x%08x", error);
		}
		
		//Enable encoder output port
		error = OMX_SendCommand(encoder, OMX_CommandPortEnable, VIDEO_ENCODE_OUTPUT_PORT, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_CommandPortEnable VIDEO_ENCODE_OUTPUT_PORT FAIL error: 0x%08x", error);
		}
		
		if (omxCameraSettings.doRecordingPreview) 
		{
			//Enable render input port
			error = OMX_SendCommand(render, OMX_CommandPortEnable, VIDEO_RENDER_INPUT_PORT, NULL);
			if (error != OMX_ErrorNone) 
			{
				ofLog(OF_LOG_ERROR, "render enable output port FAIL error: 0x%08x", error);
			}
		}

		OMX_PARAM_PORTDEFINITIONTYPE encoderOutputPortDefinition;
		OMX_INIT_STRUCTURE(encoderOutputPortDefinition);
		encoderOutputPortDefinition.nPortIndex = VIDEO_ENCODE_OUTPUT_PORT;
		error =OMX_GetParameter(encoder, OMX_IndexParamPortDefinition, &encoderOutputPortDefinition);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_GetParameter OMX_IndexParamPortDefinition FAIL error: 0x%08x", error);
		}else 
		{
			ofLogVerbose(__func__) << "VIDEO_ENCODE_OUTPUT_PORT eColorFormat: " << OMX_Maps::getInstance().colorFormatTypes[encoderOutputPortDefinition.format.video.eColorFormat];
		}

		error =  OMX_AllocateBuffer(encoder, &encoderOutputBuffer, VIDEO_ENCODE_OUTPUT_PORT, NULL, encoderOutputPortDefinition.nBufferSize);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_AllocateBuffer VIDEO_ENCODE_OUTPUT_PORT FAIL error: 0x%08x", error);
			
		}
		

		//Start camera
		error = OMX_SendCommand(camera, OMX_CommandStateSet, OMX_StateExecuting, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "camera OMX_StateExecuting FAIL error: 0x%08x", error);
		}
		
		//Start encoder
		error = OMX_SendCommand(encoder, OMX_CommandStateSet, OMX_StateExecuting, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_StateExecuting FAIL error: 0x%08x", error);		
		}
		
		if (omxCameraSettings.doRecordingPreview) 
		{
			
			//Start renderer
			error = OMX_SendCommand(render, OMX_CommandStateSet, OMX_StateExecuting, NULL);
			if (error != OMX_ErrorNone) 
			{
				ofLog(OF_LOG_ERROR, "render OMX_StateExecuting FAIL error: 0x%08x", error);		
			}
			
			setupDisplay();
			
		}
		
		
		error = OMX_FillThisBuffer(encoder, encoderOutputBuffer);
		
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "encoder OMX_FillThisBuffer FAIL error: 0x%08x", error);		
		}
		
		bool doThreadBlocking	= true;
		startThread(doThreadBlocking);
		
	}else 
	{
		setupRenderer();
		
		
		//Create camera->video_render Tunnel
		error = OMX_SetupTunnel(camera, CAMERA_OUTPUT_PORT, render, VIDEO_RENDER_INPUT_PORT);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "camera->video_render OMX_SetupTunnel FAIL error: 0x%08x", error);
		}
		
		//Enable camera output port
		error = OMX_SendCommand(camera, OMX_CommandPortEnable, CAMERA_OUTPUT_PORT, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "camera enable output port FAIL error: 0x%08x", error);
		}
		
		//Enable render input port
		error = OMX_SendCommand(render, OMX_CommandPortEnable, VIDEO_RENDER_INPUT_PORT, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "render enable output port FAIL error: 0x%08x", error);
		}
		
		
		//Start renderer
		error = OMX_SendCommand(render, OMX_CommandStateSet, OMX_StateExecuting, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "render OMX_StateExecuting FAIL error: 0x%08x", error);		
		}
		
		//Start camera
		error = OMX_SendCommand(camera, OMX_CommandStateSet, OMX_StateExecuting, NULL);
		if (error != OMX_ErrorNone) 
		{
			ofLog(OF_LOG_ERROR, "camera OMX_StateExecuting FAIL error: 0x%08x", error);
		}
		
		setupDisplay();
				
	}

	isOpen = true;
	return error;
}
Пример #9
0
/**
 * Convert a MMAL status return value to a simple boolean of success
 * ALso displays a fault if code is not success
 *
 * @param status The error code to convert
 * @return 0 if status is sucess, 1 otherwise
 */
int mmal_status_to_int(MMAL_STATUS_T status)
{
	if (status == MMAL_SUCCESS)
		return 0;
	else
	{
		switch (status)
		{
			case MMAL_ENOMEM :   ofLogVerbose() << "Out of memory"; break;
			case MMAL_ENOSPC :   ofLogVerbose() << "Out of resources (other than memory)"; break;
			case MMAL_EINVAL:    ofLogVerbose() << "Argument is invalid"; break;
			case MMAL_ENOSYS :   ofLogVerbose() << "Function not implemented"; break;
			case MMAL_ENOENT :   ofLogVerbose() << "No such file or directory"; break;
			case MMAL_ENXIO :    ofLogVerbose() << "No such device or address"; break;
			case MMAL_EIO :      ofLogVerbose() << "I/O error"; break;
			case MMAL_ESPIPE :   ofLogVerbose() << "Illegal seek"; break;
			case MMAL_ECORRUPT : ofLogVerbose() << "Data is corrupt \attention FIXME: not POSIX"; break;
			case MMAL_ENOTREADY :ofLogVerbose() << "Component is not ready \attention FIXME: not POSIX"; break;
			case MMAL_ECONFIG :  ofLogVerbose() << "Component is not configured \attention FIXME: not POSIX"; break;
			case MMAL_EISCONN :  ofLogVerbose() << "Port is already connected "; break;
			case MMAL_ENOTCONN : ofLogVerbose() << "Port is disconnected"; break;
			case MMAL_EAGAIN :   ofLogVerbose() << "Resource temporarily unavailable. Try again later"; break;
			case MMAL_EFAULT :   ofLogVerbose() << "Bad address"; break;
			default :            ofLogVerbose() << "Unknown status error"; break;
		}
		
		return 1;
	}
}
Пример #10
0
//--------------------------------------------------------------
void ofApp::update(){

	// OSC receiver queues up new messages, so you need to iterate
	// through waiting messages to get each incoming message

	// check for waiting messages
	while(serverReceiver.hasWaitingMessages()){
		// get the next message
		ofxOscMessage m;
		serverReceiver.getNextMessage(m);
		//Log received message for easier debugging of participants' messages:
		ofLogVerbose("Server recvd msg " + getOscMsgAsString(m) + " from " + m.getRemoteHost());

		// check the address of the incoming message
		if(m.getAddress() == "/typing"){
			//Identify host of incoming msg
			string incomingHost = m.getRemoteHost();
			//See if incoming host is a new one:
			if(std::find(knownClients.begin(), knownClients.end(), incomingHost)
			   == knownClients.end()){
				knownClients.push_back(incomingHost); //add new host to list
			}
			// get the first argument (we're only sending one) as a string
			if(m.getNumArgs() > 0){
				if(m.getArgType(0) == OFXOSC_TYPE_STRING){
					//reimplemented message display:
					//If vector has reached max size, delete the first/oldest element
					if(serverMessages.size() == maxServerMessages){
						serverMessages.erase(serverMessages.begin());
					}
					//Add message text at the end of the vector
					serverMessages.push_back(m.getArgAsString(0));

					//Broadcast message to other chat participants
					broadcastReceivedMessage(m.getArgAsString(0));
				}
			}
		}
		// handle getting random OSC messages here
		else{
			ofLogWarning("Server got weird message: " + m.getAddress());
		}
	}

	// Client side:

	// OSC receiver queues up new messages, so you need to iterate
	// through waiting messages to get each incoming message

	// check for waiting messages
	while(clientReceiver.hasWaitingMessages()){
		// get the next message
		ofxOscMessage m;
		clientReceiver.getNextMessage(m);
		ofLogNotice("Client just received a message");
		// check the address of the incoming message
		if(m.getAddress() == "/chatlog"){
			// get the first argument (we're only sending one) as a string
			if(m.getNumArgs() > 0){
				if(m.getArgType(0) == OFXOSC_TYPE_STRING){
					string oldMessages = clientMessages;
					clientMessages = m.getArgAsString(0) + "\n" + oldMessages;
				}
			}
		}
	}

	//this is purely workaround for a mysterious OSCpack bug on 64bit linux
	// after startup, reinit the receivers
	// must be a timing problem, though - in debug, stepping through, it works.
	if(ofGetFrameNum() == 60){
		clientReceiver.setup(clientRecvPort);
		serverReceiver.setup(serverRecvPort);
	}
}
Пример #11
0
bool ofGstVideoPlayer::allocate(){
	if(bIsAllocated){
		return true;
	}

	guint64 durationNanos = videoUtils.getDurationNanos();

	nFrames		  = 0;
	if(GstPad* pad = gst_element_get_static_pad(videoUtils.getSink(), "sink")){
#if GST_VERSION_MAJOR==0
		int width,height;
		if(gst_video_get_size(GST_PAD(pad), &width, &height)){
			if(!videoUtils.allocate(width,height,internalPixelFormat)) return false;
		}else{
			ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height";
			return false;
		}

		const GValue *framerate = gst_video_frame_rate(pad);
		fps_n=0;
		fps_d=0;
		if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){
			fps_n = gst_value_get_fraction_numerator (framerate);
			fps_d = gst_value_get_fraction_denominator (framerate);
			nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d;
			ofLogVerbose("ofGstVideoPlayer") << "allocate(): framerate: " << fps_n << "/" << fps_d;
		}else{
			ofLogWarning("ofGstVideoPlayer") << "allocate(): cannot get framerate, frame seek won't work";
		}
		bIsAllocated = true;
#else
		if(GstCaps *caps = gst_pad_get_current_caps (GST_PAD (pad))){
			GstVideoInfo info;
			gst_video_info_init (&info);
			if (gst_video_info_from_caps (&info, caps)){
				ofPixelFormat format = ofGstVideoUtils::getOFFormat(GST_VIDEO_INFO_FORMAT(&info));
				if(format!=internalPixelFormat){
					ofLogVerbose("ofGstVideoPlayer") << "allocating as " << info.width << "x" << info.height << " " << info.finfo->description << " " << info.finfo->name;
					internalPixelFormat = format;
				}
				if(!videoUtils.allocate(info.width,info.height,format)) return false;
			}else{
				ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height";
				return false;
			}

			fps_n = info.fps_n;
			fps_d = info.fps_d;
			nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d;
			gst_caps_unref(caps);
			bIsAllocated = true;
		}else{
			ofLogError("ofGstVideoPlayer") << "allocate(): cannot get pipeline caps";
			bIsAllocated = false;
		}
#endif
		gst_object_unref(GST_OBJECT(pad));
	}else{
		ofLogError("ofGstVideoPlayer") << "allocate(): cannot get sink pad";
		bIsAllocated = false;
	}
	return bIsAllocated;
}
void OpenCVMinimalParticle::setup(ofFbo* fbo_)
{
    setFBO(fbo_);
    doUpdateBackground = true;
    
    OpenCVEngine::getInstance().setup();


    singleWallWidth = OpenCVEngine::getInstance().videoSourceWidth;
    wholeWallWidth = OpenCVEngine::getInstance().canvasWidth;
    int wallHeight = OpenCVEngine::getInstance().canvasHeight;
    
    particleLayer = NULL;
    
    particleLayer = new ParticleLayer();
    
    backgroundFBO.allocate(wholeWallWidth, wallHeight);
    backgroundFBO.begin();
    ofClear(0, 0, 0, 0);
    backgroundFBO.end();
    
    contentFBO.allocate(wholeWallWidth, wallHeight);
    contentFBO.begin();
    ofClear(0, 0, 0, 0);
    contentFBO.end();
    
    particleSize = 400;
    particleLayer->setup(particleSize);
    
    leftWall.id = 0;

    leftWall.wall = OpenCVEngine::getInstance().getWall(0);
    
    rightWall.id = 1;
    rightWall.wall = OpenCVEngine::getInstance().getWall(1);
    
    for (int x =0; x<backgroundFBO.getWidth(); x+=particleSize)
    {
        for (int y =0; y<backgroundFBO.getHeight(); y+=particleSize)
        {
            ofVec3f particleOrigin(x, y, 0);
            ofxParticle* particle = particleLayer->addParticle(particleOrigin);
            if (particleOrigin.x<=singleWallWidth)
            {
                leftWall.particles.push_back(particle);
            }else
            {
                if (particleOrigin.x>=singleWallWidth)
                {
                    rightWall.particles.push_back(particle);
                }else{
                
                    ofLogVerbose() << "particleOrigin: " << particleOrigin << " NOT REGISTERING";
                }
            }
            
        }
    }
    wallGroups.push_back(&leftWall);
    wallGroups.push_back(&rightWall);
    
    ofLoadImage(particleTexture, "dot.png");
    
    ColorPair c1; //1.jpg
    c1.color1 = ofColor(255, 22, 93); //red
    c1.color2 = ofColor(171, 255, 99); //green
    
    ColorPair c2; //3.jpg
    c2.color1 = ofColor(229, 255, 86); //lime
    c2.color2 = ofColor(0, 255, 234); //teal
    
    ColorPair c3; //4.jpg
    c3.color1 = ofColor(255, 137, 87); //orange
    c3.color2 = ofColor(223, 87, 255); //purple
    
    colorPairs.push_back(c1);
    colorPairs.push_back(c2);
    colorPairs.push_back(c3);
    
    currentColorPairIndex = ofRandom(colorPairs.size());
}
Пример #13
0
//----------------------------------------
void XN_CALLBACK_TYPE ofxOpenNITracker::UserCalibration_CalibrationStart(xn::SkeletonCapability& capability, XnUserID nID, void* pCookie){
	ofLogVerbose(LOG_NAME) << "Calibration started for user" << nID;
}
Пример #14
0
//----------------------------------------
void XN_CALLBACK_TYPE ofxOpenNITracker::UserPose_PoseDetected(xn::PoseDetectionCapability& rCapability, const XnChar* strPose, XnUserID nID, void* pCookie){
	ofxOpenNITracker* tracker = static_cast<ofxOpenNITracker*>(pCookie);
	ofLogVerbose(LOG_NAME) << "Pose" << strPose << "detected for user" << nID;
	tracker->requestCalibration(nID);
	tracker->stopPoseDetection(nID);
}
//-------------------------------------------
void ofxAssimpModelLoader::loadGLResources(){

	ofLogVerbose("ofxAssimpModelLoader") << "loadGLResources(): starting";

    // create new mesh helpers for each mesh, will populate their data later.
    modelMeshes.resize(scene->mNumMeshes,ofxAssimpMeshHelper());

    // create OpenGL buffers and populate them based on each meshes pertinant info.
    for (unsigned int i = 0; i < scene->mNumMeshes; ++i){
        ofLogVerbose("ofxAssimpModelLoader") << "loadGLResources(): loading mesh " << i;
        // current mesh we are introspecting
        aiMesh* mesh = scene->mMeshes[i];

        // the current meshHelper we will be populating data into.
        ofxAssimpMeshHelper & meshHelper = modelMeshes[i];
        //ofxAssimpMeshHelper meshHelper;
		
        //meshHelper.texture = NULL;

        // Handle material info
        aiMaterial* mtl = scene->mMaterials[mesh->mMaterialIndex];
        aiColor4D dcolor, scolor, acolor, ecolor;

        if(AI_SUCCESS == aiGetMaterialColor(mtl, AI_MATKEY_COLOR_DIFFUSE, &dcolor)){
            meshHelper.material.setDiffuseColor(aiColorToOfColor(dcolor));
        }

        if(AI_SUCCESS == aiGetMaterialColor(mtl, AI_MATKEY_COLOR_SPECULAR, &scolor)){
        	meshHelper.material.setSpecularColor(aiColorToOfColor(scolor));
        }

        if(AI_SUCCESS == aiGetMaterialColor(mtl, AI_MATKEY_COLOR_AMBIENT, &acolor)){
        	meshHelper.material.setAmbientColor(aiColorToOfColor(acolor));
        }

        if(AI_SUCCESS == aiGetMaterialColor(mtl, AI_MATKEY_COLOR_EMISSIVE, &ecolor)){
        	meshHelper.material.setEmissiveColor(aiColorToOfColor(ecolor));
        }

        float shininess;
        if(AI_SUCCESS == aiGetMaterialFloat(mtl, AI_MATKEY_SHININESS, &shininess)){
			meshHelper.material.setShininess(shininess);
		}

        int blendMode;
		if(AI_SUCCESS == aiGetMaterialInteger(mtl, AI_MATKEY_BLEND_FUNC, &blendMode)){
			if(blendMode==aiBlendMode_Default){
				meshHelper.blendMode=OF_BLENDMODE_ALPHA;
			}else{
				meshHelper.blendMode=OF_BLENDMODE_ADD;
			}
		}

        // Culling
        unsigned int max = 1;
        int two_sided;
        if((AI_SUCCESS == aiGetMaterialIntegerArray(mtl, AI_MATKEY_TWOSIDED, &two_sided, &max)) && two_sided)
            meshHelper.twoSided = true;
        else
            meshHelper.twoSided = false;

        // Load Textures
        int texIndex = 0;
        aiString texPath;

        // TODO: handle other aiTextureTypes
        if(AI_SUCCESS == mtl->GetTexture(aiTextureType_DIFFUSE, texIndex, &texPath)){
            ofLogVerbose("ofxAssimpModelLoader") << "loadGLResource(): loading image from \"" << texPath.data << "\"";
            string modelFolder = file.getEnclosingDirectory();
            string relTexPath = ofFilePath::getEnclosingDirectory(texPath.data,false);
            string texFile = ofFilePath::getFileName(texPath.data);
            string realPath = ofFilePath::join(ofFilePath::join(modelFolder, relTexPath), texFile);
            
            if(ofFile::doesFileExist(realPath) == false) {
                ofLogError("ofxAssimpModelLoader") << "loadGLResource(): texture doesn't exist: \""
					<< file.getFileName() + "\" in \"" << realPath << "\"";
            }
            
            ofxAssimpTexture assimpTexture;
            bool bTextureAlreadyExists = false;
            for(int j=0; j<textures.size(); j++) {
                assimpTexture = textures[j];
                if(assimpTexture.getTexturePath() == realPath) {
                    bTextureAlreadyExists = true;
                    break;
                }
            }
            if(bTextureAlreadyExists) {
                meshHelper.assimpTexture = assimpTexture;
                ofLogVerbose("ofxAssimpModelLoader") << "loadGLResource(): texture already loaded: \""
					<< file.getFileName() + "\" from \"" << realPath << "\"";
            } else {
                ofTexture texture;
                bool bTextureLoadedOk = ofLoadImage(texture, realPath);
                if(bTextureLoadedOk) {
                    textures.push_back(ofxAssimpTexture(texture, realPath));
                    assimpTexture = textures.back();
                    meshHelper.assimpTexture = assimpTexture;
                    ofLogVerbose("ofxAssimpModelLoader") << "loadGLResource(): texture loaded, dimensions: "
						<< texture.getWidth() << "x" << texture.getHeight();
                } else {
                    ofLogError("ofxAssimpModelLoader") << "loadGLResource(): couldn't load texture: \""
						<< file.getFileName() + "\" from \"" << realPath << "\"";
                }
            }
        }

        meshHelper.mesh = mesh;
        aiMeshToOfMesh(mesh, meshHelper.cachedMesh, &meshHelper);
        meshHelper.cachedMesh.setMode(OF_PRIMITIVE_TRIANGLES);
        meshHelper.validCache = true;
        meshHelper.hasChanged = false;

		int numOfAnimations = scene->mNumAnimations;
		for (int i = 0; i<numOfAnimations; i++) {
			aiAnimation * animation = scene->mAnimations[i];
			animations.push_back(ofxAssimpAnimation(scene, animation));
		}

        if(hasAnimations()){
			meshHelper.animatedPos.resize(mesh->mNumVertices);
			if(mesh->HasNormals()){
				meshHelper.animatedNorm.resize(mesh->mNumVertices);
			}
        }


        int usage;
        if(getAnimationCount()){
#ifndef TARGET_OPENGLES
        	if(!ofIsGLProgrammableRenderer()){
        		usage = GL_STATIC_DRAW;
        	}else{
        		usage = GL_STREAM_DRAW;
        	}
#else
        	usage = GL_DYNAMIC_DRAW;
#endif
        }else{
        	usage = GL_STATIC_DRAW;

        }

        meshHelper.vbo.setVertexData(&mesh->mVertices[0].x,3,mesh->mNumVertices,usage,sizeof(aiVector3D));
        if(mesh->HasVertexColors(0)){
        	meshHelper.vbo.setColorData(&mesh->mColors[0][0].r,mesh->mNumVertices,GL_STATIC_DRAW,sizeof(aiColor4D));
        }
        if(mesh->HasNormals()){
        	meshHelper.vbo.setNormalData(&mesh->mNormals[0].x,mesh->mNumVertices,usage,sizeof(aiVector3D));
        }
        if (meshHelper.cachedMesh.hasTexCoords()){			
        	meshHelper.vbo.setTexCoordData(meshHelper.cachedMesh.getTexCoordsPointer()[0].getPtr(),mesh->mNumVertices,GL_STATIC_DRAW,sizeof(ofVec2f));
        }

        meshHelper.indices.resize(mesh->mNumFaces * 3);
        int j=0;
        for (unsigned int x = 0; x < mesh->mNumFaces; ++x){
			for (unsigned int a = 0; a < mesh->mFaces[x].mNumIndices; ++a){
				meshHelper.indices[j++]=mesh->mFaces[x].mIndices[a];
			}
		}

        meshHelper.vbo.setIndexData(&meshHelper.indices[0],meshHelper.indices.size(),GL_STATIC_DRAW);

        //modelMeshes.push_back(meshHelper);
    }
    


    ofLogVerbose("ofxAssimpModelLoader") << "loadGLResource(): finished";
}
Пример #16
0
//--------------------------------------------------------------
void ofApp::onConnect( ofxLibwebsockets::Event& args ){
    ofLogVerbose()<<"on connected";
}
bool OMXEGLImage::Open(COMXStreamInfo &hints, OMXClock *clock, EGLImageKHR eglImage_)
{
	eglImage = eglImage_;
	OMX_ERRORTYPE omx_err   = OMX_ErrorNone;
	std::string decoder_name;

	m_video_codec_name      = "";
	m_codingType            = OMX_VIDEO_CodingUnused;

	m_decoded_width  = hints.width;
	m_decoded_height = hints.height;


	if(!m_decoded_width || !m_decoded_height)
	return false;

	if(hints.extrasize > 0 && hints.extradata != NULL)
	{
		m_extrasize = hints.extrasize;
		m_extradata = (uint8_t *)malloc(m_extrasize);
		memcpy(m_extradata, hints.extradata, hints.extrasize);
	}

	switch (hints.codec)
	{
	case CODEC_ID_H264:
	{
	  switch(hints.profile)
	  {
		case FF_PROFILE_H264_BASELINE:
		  // (role name) video_decoder.avc
		  // H.264 Baseline profile
		  decoder_name = OMX_H264BASE_DECODER;
		  m_codingType = OMX_VIDEO_CodingAVC;
		  m_video_codec_name = "omx-h264";
		  break;
		case FF_PROFILE_H264_MAIN:
		  // (role name) video_decoder.avc
		  // H.264 Main profile
		  decoder_name = OMX_H264MAIN_DECODER;
		  m_codingType = OMX_VIDEO_CodingAVC;
		  m_video_codec_name = "omx-h264";
		  break;
		case FF_PROFILE_H264_HIGH:
		  // (role name) video_decoder.avc
		  // H.264 Main profile
		  decoder_name = OMX_H264HIGH_DECODER;
		  m_codingType = OMX_VIDEO_CodingAVC;
		  m_video_codec_name = "omx-h264";
		  break;
		case FF_PROFILE_UNKNOWN:
		  decoder_name = OMX_H264HIGH_DECODER;
		  m_codingType = OMX_VIDEO_CodingAVC;
		  m_video_codec_name = "omx-h264";
		  break;
		default:
		  decoder_name = OMX_H264HIGH_DECODER;
		  m_codingType = OMX_VIDEO_CodingAVC;
		  m_video_codec_name = "omx-h264";
		  break;
	  }
	}
	break;
	case CODEC_ID_MPEG4:
	  // (role name) video_decoder.mpeg4
	  // MPEG-4, DivX 4/5 and Xvid compatible
	  decoder_name = OMX_MPEG4_DECODER;
	  m_codingType = OMX_VIDEO_CodingMPEG4;
	  m_video_codec_name = "omx-mpeg4";
	  break;
	case CODEC_ID_MPEG1VIDEO:
	case CODEC_ID_MPEG2VIDEO:
	  // (role name) video_decoder.mpeg2
	  // MPEG-2
	  decoder_name = OMX_MPEG2V_DECODER;
	  m_codingType = OMX_VIDEO_CodingMPEG2;
	  m_video_codec_name = "omx-mpeg2";
	  break;
	case CODEC_ID_H263:
	  // (role name) video_decoder.mpeg4
	  // MPEG-4, DivX 4/5 and Xvid compatible
	  decoder_name = OMX_MPEG4_DECODER;
	  m_codingType = OMX_VIDEO_CodingMPEG4;
	  m_video_codec_name = "omx-h263";
	  break;
	case CODEC_ID_VP6:
	case CODEC_ID_VP6F:
	case CODEC_ID_VP6A:
	  // (role name) video_decoder.vp6
	  // VP6
	  decoder_name = OMX_VP6_DECODER;
	  m_codingType = OMX_VIDEO_CodingVP6;
	  m_video_codec_name = "omx-vp6";
	break;
	case CODEC_ID_VP8:
	  // (role name) video_decoder.vp8
	  // VP8
	  decoder_name = OMX_VP8_DECODER;
	  m_codingType = OMX_VIDEO_CodingVP8;
	  m_video_codec_name = "omx-vp8";
	break;
	case CODEC_ID_THEORA:
	  // (role name) video_decoder.theora
	  // theora
	  decoder_name = OMX_THEORA_DECODER;
	  m_codingType = OMX_VIDEO_CodingTheora;
	  m_video_codec_name = "omx-theora";
	break;
	case CODEC_ID_MJPEG:
	case CODEC_ID_MJPEGB:
	  // (role name) video_decoder.mjpg
	  // mjpg
	  decoder_name = OMX_MJPEG_DECODER;
	  m_codingType = OMX_VIDEO_CodingMJPEG;
	  m_video_codec_name = "omx-mjpeg";
	break;
	case CODEC_ID_VC1:
	case CODEC_ID_WMV3:
	  // (role name) video_decoder.vc1
	  // VC-1, WMV9
	  decoder_name = OMX_VC1_DECODER;
	  m_codingType = OMX_VIDEO_CodingWMV;
	  m_video_codec_name = "omx-vc1";
	  break;    
	default:
	  ofLog(OF_LOG_VERBOSE, "Video codec id unknown: %x\n", hints.codec);
	  return false;
	break;
	}


	std::string componentName = "";

	componentName = decoder_name;
	if(!m_omx_decoder.Initialize(componentName, OMX_IndexParamVideoInit))
	return false;

	componentName = "OMX.broadcom.egl_render";
	if(!m_omx_render.Initialize(componentName, OMX_IndexParamVideoInit))
	return false;

	componentName = "OMX.broadcom.video_scheduler";
	if(!m_omx_sched.Initialize(componentName, OMX_IndexParamVideoInit))
	return false;

	if(clock == NULL)
	return false;

	m_av_clock = clock;
	m_omx_clock = m_av_clock->GetOMXClock();

	if(m_omx_clock->GetComponent() == NULL)
	{
		m_av_clock = NULL; 
		m_omx_clock = NULL;
		return false; 
	}

	m_omx_tunnel_decoder.Initialize(&m_omx_decoder, m_omx_decoder.GetOutputPort(), &m_omx_sched, m_omx_sched.GetInputPort());
	m_omx_tunnel_sched.Initialize(&m_omx_sched, m_omx_sched.GetOutputPort(), &m_omx_render, m_omx_render.GetInputPort());
	m_omx_tunnel_clock.Initialize(m_omx_clock, m_omx_clock->GetInputPort() + 1, &m_omx_sched, m_omx_sched.GetOutputPort() + 1);

	omx_err = m_omx_tunnel_clock.Establish(false);
	if(omx_err != OMX_ErrorNone)
	{
		ofLog(OF_LOG_VERBOSE, "\nOMXEGLImage::Open m_omx_tunnel_clock.Establish\n");
		return false;
	}

	omx_err = m_omx_decoder.SetStateForComponent(OMX_StateIdle);
	if (omx_err != OMX_ErrorNone)
	{
		ofLog(OF_LOG_VERBOSE, "\nOMXEGLImage::Open m_omx_decoder.SetStateForComponent\n");
		return false;
	}

	OMX_VIDEO_PARAM_PORTFORMATTYPE formatType;
	OMX_INIT_STRUCTURE(formatType);
	formatType.nPortIndex = m_omx_decoder.GetInputPort();
	formatType.eCompressionFormat = m_codingType;

	if (hints.fpsscale > 0 && hints.fpsrate > 0)
	{
		formatType.xFramerate = (long long)(1<<16)*hints.fpsrate / hints.fpsscale;
	}
	else
	{
		formatType.xFramerate = 25 * (1<<16);
	}

	omx_err = m_omx_decoder.SetParameter(OMX_IndexParamVideoPortFormat, &formatType);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_decoder SET OMX_IndexParamVideoPortFormat PASS";
	}else 
	{
		ofLogError() << "m_omx_decoder SET OMX_IndexParamVideoPortFormat FAIL";
		return false;
	}

	OMX_PARAM_PORTDEFINITIONTYPE portParam;
	OMX_INIT_STRUCTURE(portParam);
	portParam.nPortIndex = m_omx_decoder.GetInputPort();

	omx_err = m_omx_decoder.GetParameter(OMX_IndexParamPortDefinition, &portParam);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_decoder GET OMX_IndexParamPortDefinition PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder GET OMX_IndexParamPortDefinition FAIL omx_err(0x%08x)\n", omx_err);
		return false;
	}
	portParam.nPortIndex = m_omx_decoder.GetInputPort();
	// JVC: I think numVideoBuffers can be probed for an optimal amount
	// omxplayer uses 60 but maybe that takes away GPU memory for other operations?
	int numVideoBuffers = 60;
	portParam.nBufferCountActual = numVideoBuffers; 

	portParam.format.video.nFrameWidth  = m_decoded_width;
	portParam.format.video.nFrameHeight = m_decoded_height;

	omx_err = m_omx_decoder.SetParameter(OMX_IndexParamPortDefinition, &portParam);
	if(omx_err == OMX_ErrorNone)
	{
	  ofLogVerbose() << "m_omx_decoder SET OMX_IndexParamPortDefinition PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder SET OMX_IndexParamPortDefinition FAIL omx_err(0x%08x)\n", omx_err);
		return false;
	}
	
	// broadcom omx entension:
	// When enabled, the timestamp fifo mode will change the way incoming timestamps are associated with output images.
	// In this mode the incoming timestamps get used without re-ordering on output images
	/*OMX_PARAM_BRCMVIDEODECODEERRORCONCEALMENTTYPE concanParam;
	OMX_INIT_STRUCTURE(concanParam);
	concanParam.bStartWithValidFrame = OMX_FALSE;
	
	omx_err = m_omx_decoder.SetParameter(OMX_IndexParamBrcmVideoDecodeErrorConcealment, &concanParam);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose()	<< "m_omx_decoder OMX_IndexParamBrcmVideoDecodeErrorConcealment PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder OMX_IndexParamBrcmVideoDecodeErrorConcealment FAIL omx_err(0x%08x)\n", omx_err);
		return false;
	}
	
	if(NaluFormatStartCodes(hints.codec, m_extradata, m_extrasize))
	{
		OMX_NALSTREAMFORMATTYPE nalStreamFormat;
		OMX_INIT_STRUCTURE(nalStreamFormat);
		nalStreamFormat.nPortIndex = m_omx_decoder.GetInputPort();
		nalStreamFormat.eNaluFormat = OMX_NaluFormatOneNaluPerBuffer;
		
		omx_err = m_omx_decoder.SetParameter((OMX_INDEXTYPE)OMX_IndexParamNalStreamFormatSelect, &nalStreamFormat);
		if (omx_err == OMX_ErrorNone)
		{
			ofLogVerbose()	<< "Open OMX_IndexParamNalStreamFormatSelect PASS";
		}else 
		{
			ofLog(OF_LOG_ERROR, "Open OMX_IndexParamNalStreamFormatSelect FAIL (0%08x)\n", omx_err);
			return false;
		}

	}*/
	

	// Alloc buffers for the omx intput port.
	omx_err = m_omx_decoder.AllocInputBuffers();
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_decoder AllocInputBuffers PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder AllocInputBuffers FAIL omx_err(0x%08x)\n", omx_err);
		return false;
	}


	omx_err = m_omx_tunnel_decoder.Establish(false);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_tunnel_decoder Establish PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_tunnel_decoder Establish FAIL omx_err(0x%08x)\n", omx_err);
		return false;
	}
	
	m_av_clock->SetSpeed(DVD_PLAYSPEED_NORMAL);
	m_av_clock->OMXStateExecute();
	m_av_clock->OMXStart();
	
	omx_err = m_omx_decoder.SetStateForComponent(OMX_StateExecuting);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_decoder OMX_StateExecuting PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder OMX_StateExecuting FAIL omx_err(0x%08x)", omx_err);
		return false;
	}
	

	omx_err = m_omx_tunnel_sched.Establish(false);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_tunnel_sched Establish PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_tunnel_sched Establish FAIL omx_err(0x%08x)", omx_err);
		return false;
	}

	omx_err = m_omx_sched.SetStateForComponent(OMX_StateExecuting);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_sched OMX_StateExecuting PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_sched OMX_StateExecuting FAIL omx_err(0x%08x)", omx_err);
		return false;
	}
	
	omx_err = m_omx_render.SetStateForComponent(OMX_StateIdle);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_render OMX_StateIdle PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_render OMX_StateIdle FAIL omx_err(0x%08x)", omx_err);
		return false;
	}
	
	ofLogVerbose() << "m_omx_render.GetOutputPort(): " << m_omx_render.GetOutputPort();
	m_omx_render.EnablePort(m_omx_render.GetOutputPort(), true);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_render Enable OUTPUT Port PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_render Enable OUTPUT Port  FAIL omx_err(0x%08x)", omx_err);
		return false;
	}
	
	omx_err = m_omx_render.UseEGLImage(&eglBuffer, m_omx_render.GetOutputPort(), NULL, eglImage);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_render UseEGLImage PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_render UseEGLImage  FAIL omx_err(0x%08x)", omx_err);
		return false;
	}

	
	if(SendDecoderConfig())
	{
		ofLogVerbose() << "SendDecoderConfig PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "SendDecoderConfig FAIL");
		return false;
	}
	
	m_omx_render.SetCustomDecoderFillBufferDoneHandler(onFillBufferDone);
	omx_err = m_omx_render.SetStateForComponent(OMX_StateExecuting);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_render OMX_StateExecuting PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_render OMX_StateExecuting FAIL omx_err(0x%08x)", omx_err);
		return false;
	}
	omx_err = m_omx_render.FillThisBuffer(eglBuffer);
	if(omx_err == OMX_ErrorNone)
	{
		ofLogVerbose() << "m_omx_render FillThisBuffer PASS";
	}else 
	{
		ofLog(OF_LOG_ERROR, "m_omx_render FillThisBuffer FAIL omx_err(0x%08x)", omx_err);
		return false;
	}
	
	m_is_open           = true;
	m_drop_state        = false;
	m_setStartTime      = true;
	m_setStartTimeText  = true;


	ofLog(OF_LOG_VERBOSE, 
	"%s::%s - decoder_component(0x%p), input_port(0x%x), output_port(0x%x)\n",
	CLASSNAME, __func__, m_omx_decoder.GetComponent(), m_omx_decoder.GetInputPort(), m_omx_decoder.GetOutputPort());

	m_first_frame   = true;
	return true;
}
Пример #18
0
//--------------------------------------------------------------
void ofApp::onOpen( ofxLibwebsockets::Event& args ){
    ofLogVerbose()<<"on open";
}
Пример #19
0
void PostRouteFileHandler::handlePart(const Poco::Net::MessageHeader& header,
                                      std::istream& stream)
{
    if (header.has("Content-Type"))
    {
        std::string contentType = header["Content-Type"];

        if (!_route.settings().getValidContentTypes().empty() && !isContentTypeValid(contentType))
        {
            ofLogError("PostRouteFileHandler::handlePart") << "Invalid content type: " << contentType;
            return; // reject
        }
    }
    else
    {
        ofLogError("PostRouteFileHandler::handlePart") << "No Content-Type header.";
        return;
    }


    // Is this an uploaded file and are we allowing files to be uploaded?
    if (header.has("Content-Disposition") &&
        _route.settings().getMaximumFileUploadSize() > 0)
    {
        std::string contentDisposition = header["Content-Disposition"];

        Poco::Net::NameValueCollection parameters;

        Poco::Net::MessageHeader::splitParameters(contentDisposition.begin(),
                                                  contentDisposition.end(),
                                                  parameters);

        std::string formFileName = parameters.get("filename", "");
        std::string formFieldName = parameters.get("name", "");

        if (!formFileName.empty())
        {
            try
            {
//                std::stringstream ss;

                std::filesystem::path uploadFolder = _route.settings().getUploadFolder();
                std::filesystem::path uniqueFilename = Poco::UUIDGenerator::defaultGenerator().createOne().toString();
                std::filesystem::path originalFilename = formFileName;
                //std::filesystem::extension(originalFilename)
                
                std::filesystem::path p = uploadFolder;
                p /= uniqueFilename;
                p += std::filesystem::extension(originalFilename);
                
//                ss << _route.settings().getUploadFolder();
//                ss << "/";
//                ss << Poco::UUIDGenerator::defaultGenerator().createOne().toString();
//                ss << ".";
//                ss << Poco::Path(formFileName).getExtension();

                std::string newFilename = ofToDataPath(p, true);

                ofFile file(newFilename, ofFile::WriteOnly, true);

                Poco::Net::MediaType contentType(header["Content-Type"]);

                PostUploadEventArgs args(_evt,
                                         _postId,
                                         formFieldName,
                                         formFileName,
                                         newFilename,
                                         contentType,
                                         0,
                                         PostUploadEventArgs::UPLOAD_STARTING);

                ofNotifyEvent(_route.events.onHTTPUploadEvent, args, &_route);

                ofLogVerbose("PostRouteFileHandler::handlePart") << "Writing file to absolute path : " << file.getAbsolutePath();

                // The section below is from StreamCopier::copyStream,
                // and might be used for upload progress feedback

                Poco::Buffer<char> buffer(_route.settings().getWriteBufferSize());

                stream.read(buffer.begin(), _route.settings().getWriteBufferSize());

                unsigned long long sz = 0;
                unsigned long long n = stream.gcount();

                while (n > 0)
                {
                    if (sz > _route.settings().getMaximumFileUploadSize())
                    {
                        ofLogError("PostRouteFileHandler::handlePart") << "File upload size exceeded.  Removing file.";
                        file.close();
                        ofFile::removeFile(newFilename, false);

                        return;
                    }

                    sz += n;

                    file.write(buffer.begin(), n);

                    if (stream && file)
                    {
                        stream.read(buffer.begin(),
                                    _route.settings().getWriteBufferSize());

                        n = stream.gcount();
                    }
                    else
                    {
                        n = 0;
                    }

                    PostUploadEventArgs uploadArgs(_evt,
                                                   _postId,
                                                   formFieldName,
                                                   formFileName,
                                                   newFilename,
                                                   contentType,
                                                   sz,
                                                   PostUploadEventArgs::UPLOAD_PROGRESS);

                    ofNotifyEvent(_route.events.onHTTPUploadEvent,
                                  uploadArgs,
                                  &_route);
                }

                file.close();

                PostUploadEventArgs finishedArgs(_evt,
                                                 _postId,
                                                 formFieldName,
                                                 formFileName,
                                                 newFilename,
                                                 contentType,
                                                 sz,
                                                 PostUploadEventArgs::UPLOAD_FINISHED);

                ofNotifyEvent(_route.events.onHTTPUploadEvent,
                              finishedArgs,
                              &_route);

            }
            catch (const Poco::Exception& exc)
            {
                ofLogError("PostRouteFileHandler::handlePart") << exc.displayText();
            }
            catch (const std::exception& exc)
            {
                ofLogError("PostRouteFileHandler::handlePart") << exc.what();
            }
            catch ( ... )
            {
                ofLogError("PostRouteFileHandler::handlePart") << "Uncaught thread exception: Unknown exception.";
            }
        }
        else
        {
            ofLogError("PostRouteFileHandler::handlePart") << "No filename in header.";
        }
    }
}
Пример #20
0
//--------------------------------------------------------------
void ofApp::onClose( ofxLibwebsockets::Event& args ){
    ofLogVerbose()<<"on close";
}
Пример #21
0
bool ofxDepthImageSequence::loadSequence(string newSequenceDirectory){
    

	ofDirectory sequenceList(newSequenceDirectory);
	if(!sequenceList.exists()){
		ofLogError("ofxDepthImageSequence -- sequence directory " + newSequenceDirectory + " does not exist!");
		return false;
	}
    
	if(sequenceLoaded){
		images.clear();
		sequenceLoaded = false;
	}


	sequenceList.allowExt("png");
	int numFiles = sequenceList.listDir();
	if(numFiles == 0){
		ofLogError("ofxTLDepthImageSequence -- sequence directory " + newSequenceDirectory + " is empty!");
		return false;
	}
	

	bool checkedForTimestamp = false;
	unsigned long firstFrameTimeOffset = 0;
	for(int i = 0; i < numFiles; i++){
        //backwards compat...
		if(sequenceList.getName(i).find("poster") != string::npos){
			ofLogWarning("discarding poster frame " + sequenceList.getPath(i) );
			continue;
		}
		
		if(!checkedForTimestamp){
			framesHaveTimestamps = sequenceList.getName(i).find("millis") != string::npos;
			checkedForTimestamp = true;
			ofLogVerbose("Frames have timestamps? " + string((framesHaveTimestamps ? "yes!" : "no :(")) );
		}
		
		
		images.push_back( DepthImage() );
		DepthImage& img = images[images.size()-1];
		img.path = sequenceList.getPath(i);
		
		if(framesHaveTimestamps){
			vector<string> split = ofSplitString(sequenceList.getName(i), "_", true, true);
			for(int l = 0; l < split.size(); l++){
				if(split[l] == "millis"){
					img.timestamp = ofToInt(split[l+1]);
					if(i == 0){
						firstFrameTimeOffset = img.timestamp;
					}
					img.timestamp -= firstFrameTimeOffset;
				}
			}
		}

		images.push_back( img );
	}
	
	//currentFrame = -1;
    if(framesHaveTimestamps){
	    durationInMillis = images[images.size()-1].timestamp;
    }

	ofLogVerbose("sequence is loaded " + ofToString( images.size() ));
    sequenceDirectory = newSequenceDirectory;
    sequenceLoaded = true;
	setFrame(0);
    updatePixels();
//	startThread();
	return true;
}
Пример #22
0
//--------------------------------------------------------------
void ofApp::onIdle( ofxLibwebsockets::Event& args ){
    ofLogVerbose()<<"on idle";
}
//------------------------------------------------------------
bool ofOpenALSoundPlayer_TimelineAdditions::loadSound(string fileName, bool is_stream){

    string ext = ofToLower(ofFilePath::getFileExt(fileName));
    if(ext != "wav" && ext != "aif" && ext != "aiff"){
        ofLogError("Sound player can only load .wav or .aiff files");
        return false;
    }
       
    fileName = ofToDataPath(fileName);

	bLoadedOk = false;
	bMultiPlay = false;
	isStreaming = is_stream;
	
	// [1] init sound systems, if necessary
	initialize();

	// [2] try to unload any previously loaded sounds
	// & prevent user-created memory leaks
	// if they call "loadSound" repeatedly, for example

	unloadSound();

	ALenum format=AL_FORMAT_MONO16;

	if(!isStreaming){
		readFile(fileName, buffer);
	}else{
		stream(fileName, buffer);
	}

    if(channels == 0){
        ofLogError("ofOpenALSoundPlayer_TimelineAdditions -- File not found");
        return false;
    }
    
	int numFrames = buffer.size()/channels;
	if(isStreaming){
		buffers.resize(channels*2);
	}else{
		buffers.resize(channels);
	}
	alGenBuffers(buffers.size(), &buffers[0]);
	if(channels==1){
		sources.resize(1);
		alGenSources(1, &sources[0]);
		if (alGetError() != AL_NO_ERROR){
			ofLog(OF_LOG_WARNING,"ofOpenALSoundPlayer_TimelineAdditions: openAL error reported generating sources for " + fileName);
			return false;
		}

		for(int i=0; i<(int)buffers.size(); i++){
			alBufferData(buffers[i],format,&buffer[0],buffer.size()*2,samplerate);
			if (alGetError() != AL_NO_ERROR){
				ofLog(OF_LOG_ERROR,"ofOpenALSoundPlayer_TimelineAdditions: error creating buffer");
				return false;
			}
			if(isStreaming){
				stream(fileName,buffer);
			}
		}
		if(isStreaming){
			alSourceQueueBuffers(sources[0],buffers.size(),&buffers[0]);
		}else{
			alSourcei (sources[0], AL_BUFFER,   buffers[0]);
		}

		alSourcef (sources[0], AL_PITCH,    1.0f);
		alSourcef (sources[0], AL_GAIN,     1.0f);
	    alSourcef (sources[0], AL_ROLLOFF_FACTOR,  0.0);
	    alSourcei (sources[0], AL_SOURCE_RELATIVE, AL_TRUE);
	}else{
		vector<vector<short> > multibuffer;
		multibuffer.resize(channels);
		sources.resize(channels);
		alGenSources(channels, &sources[0]);
		if(isStreaming){
			for(int s=0; s<2;s++){
				for(int i=0;i<channels;i++){
					multibuffer[i].resize(buffer.size()/channels);
					for(int j=0;j<numFrames;j++){
						multibuffer[i][j] = buffer[j*channels+i];
					}
					alBufferData(buffers[s*2+i],format,&multibuffer[i][0],buffer.size()/channels*2,samplerate);
					if (alGetError() != AL_NO_ERROR){
						ofLog(OF_LOG_ERROR,"ofOpenALSoundPlayer_TimelineAdditions: error creating stereo buffers for " + fileName);
						return false;
					}
					alSourceQueueBuffers(sources[i],1,&buffers[s*2+i]);
					stream(fileName,buffer);
				}
			}
		}else{
			for(int i=0;i<channels;i++){
				multibuffer[i].resize(buffer.size()/channels);
				for(int j=0;j<numFrames;j++){
					multibuffer[i][j] = buffer[j*channels+i];
				}
				alBufferData(buffers[i],format,&multibuffer[i][0],buffer.size()/channels*2,samplerate);
				if (alGetError() != AL_NO_ERROR){
					ofLog(OF_LOG_ERROR,"ofOpenALSoundPlayer_TimelineAdditions: error creating stereo buffers for " + fileName);
					return false;
				}
				alSourcei (sources[i], AL_BUFFER,   buffers[i]   );
			}
		}

		for(int i=0;i<channels;i++){
			if (alGetError() != AL_NO_ERROR){
				ofLog(OF_LOG_ERROR,"ofOpenALSoundPlayer_TimelineAdditions: error creating stereo sources for " + fileName);
				return false;
			}

			// only stereo panning
			if(i==0){
				float pos[3] = {-1,0,0};
				alSourcefv(sources[i],AL_POSITION,pos);
			}else{
				float pos[3] = {1,0,0};
				alSourcefv(sources[i],AL_POSITION,pos);
			}
			alSourcef (sources[i], AL_ROLLOFF_FACTOR,  0.0);
			alSourcei (sources[i], AL_SOURCE_RELATIVE, AL_TRUE);
		}
	}
	ofLogVerbose("ofOpenALSoundPlayer_TimelineAdditions: successfully loaded " + fileName);
	bLoadedOk = true;
	return true;
}
//--------------------------------------------------------------
void testApp::guiEvent(ofxUIEventArgs &e)
{
	string name = e.widget->getName();
	int kind = e.widget->getKind();
    if(kind == OFX_UI_WIDGET_TOGGLE)
    {
        ofxUIToggle *toggle = (ofxUIToggle *) e.widget;
        if (name == "ENEMIES") {
            creatingEnemies = true;
        }
        if (name == "COLLECTABLES") {
            creatingEnemies = false;
        }
    }
    if (name == "Included Patterns") {
        if (patternList->isOpen() == true) {
            cout << "Show Remove Button" << endl;
            removeButton->setVisible(true);
            removeButton->setLabelText("REMOVE");
        }
        else {
            cout << "Hide Remove Button" << endl;
            removeButton->setVisible(false);
        }
    }

    if (name == "Starting") {
        ofxUISlider *speedSlider = (ofxUISlider *) e.getSlider();
        speed = speedSlider->getScaledValue();
        currentSpeed = speed;
    }
    
    if (name == "Multiplier") {
        ofxUISlider *multiSlider = (ofxUISlider *) e.getSlider();
        multiplier = multiSlider->getScaledValue();

    }

    
    if(name == "SIZE")
	{
		ofxUISlider *slider = (ofxUISlider *) e.getSlider();
        pointSize = slider->getScaledValue();
	}
    
    if(name == "clear pattern") {
        buttonCount++;
        if(buttonCount ==2) {
            enemies.clear();
            collectables.clear();
            XML.clear();
        }
    }

    if (name == "save pattern") {
        buttonCount++;
        if (buttonCount == 2) {
            savePattern();
            buttonCount = 0;
        }
    }
    if (name == "add to level") {
        buttonCount++;
        if (buttonCount == 2) {
            patterns.push_back(patternInput->getTextString() + ".xml");
            patternList->addToggle(patternInput->getTextString());
            savePattern();
            buttonCount = 0;
        }
    }
    if (name == "REMOVE PATTERN") {
        buttonCount++;
        if (buttonCount ==2) {
            vector<ofxUIWidget *> &selected = patternList->getSelected();
            for(int i = 0; i < selected.size(); i++)
            {
                cout << "SELECTED: " << selected[i]->getName() << endl;
                patterns.erase(patterns.begin() + i);
                patternList->removeToggle(selected[i]->getName());
            }
            buttonCount = 0;
        }
    }
        
    if (name == "SAVE LEVEL") {
        buttonCount++;
        if (buttonCount == 2) {
            ofxXmlSettings levelXML;

            levelXML.setValue("name", levelInput->getTextString());
            levelXML.setValue("author", authorInput->getTextString());
            levelXML.setValue("complete", completeInput->getTextString());
            levelXML.setValue("speed", speedSlider->getValue());
            levelXML.setValue("multiplier", multiSlider->getValue());
            
            for (int i = 0; i < patterns.size(); i++) {
                levelXML.setValue("file", patterns[i], i);

            }
            
            
            levelXML.saveFile("new.level");
            levelXML.clear();
            buttonCount = 0;
        }
    }
    if(name == "OPEN EXISTING PATTERN") {
        buttonCount++;
        if (buttonCount == 2) {
		ofFileDialogResult openFileResult= ofSystemLoadDialog("Select a pattern to open");
        
		//Check if the user opened a file
		if (openFileResult.bSuccess){

            cout <<             openFileResult.getName() << endl;
			ofLogVerbose("User selected a file");
			
			//We have a file, check it and process it
            ofFile file (openFileResult.getPath());
            
            if (file.exists()){
                
                string fileExtension = ofToUpper(file.getExtension());
                
                if (fileExtension == "XML") {
                    cout << "it's XML" << " " << file.getAbsolutePath() << endl;
                    patternInput->setTextString(file.getBaseName());
                    canvasLocation.set(0, 500);
                    if( XML.loadFile(file.getAbsolutePath()) ){
                        enemies.clear();
                        collectables.clear();
                        for (int i = 0; i < XML.getNumTags("enemy"); i++) {
                            ofPoint tmpPoint;
                            tmpPoint.set(XML.getValue("enemy:x", 0, i), XML.getValue("enemy:y", 0, i), XML.getValue("enemy:size", 0, i));
                            enemies.push_back(tmpPoint);
                        }
                        for (int i = 0; i < XML.getNumTags("collectables"); i++) {
                            ofPoint tmpPoint;
                            tmpPoint.set(XML.getValue("collectable:x", 0, i), XML.getValue("collectable:y", 0, i), XML.getValue("collectable:size", 0, i));
                            collectables.push_back(tmpPoint);
                        }
                        
                    }
                    else {
                        cout << "can't load file" << endl;
                    }
                }
                else {
                    cout << "it's not XML" << endl;

                }
                
                
            }
		
			
		}
            buttonCount = 0;
        }
        
    }
    
}
Пример #25
0
//--------------------------------------------------------------
void ofApp::exit(){
    ofLogVerbose("ofApp") << "exit()";
}
bool DepthSource::setup(DeviceController& deviceController)
{
	doRawDepth = deviceController.settings.doRawDepth;
	
	Status status = STATUS_OK;
	status = videoStream.create(deviceController.device, SENSOR_DEPTH);
	if (status == STATUS_OK)
	{
		ofLogVerbose() << "Find DepthSource stream PASS";
		status = videoStream.start();
		if (status == STATUS_OK)
		{
			ofLogVerbose() << "Start DepthSource stream PASS";
		}else 
		{
			
			ofLogError() << "Start DepthSource stream FAIL: " << OpenNI::getExtendedError();
			videoStream.destroy();
		}
	}else
	{
		ofLogError() << "Find DepthSource stream FAIL: " <<  OpenNI::getExtendedError();
	}
	if (videoStream.isValid())
	{
		if(!deviceController.settings.useOniFile && !deviceController.isKinect)
		{
			const VideoMode* requestedMode = deviceController.findMode(SENSOR_DEPTH); 
			if (requestedMode != NULL) 
			{
				videoStream.setVideoMode(*requestedMode);
			}
		}
		allocateBuffers();
		
		
		if (!deviceController.isKinect)
		{
			deviceMaxDepth	= videoStream.getMaxPixelValue();
		}else 
		{
			deviceMaxDepth = 10000;
		}
		ofLogVerbose() << "deviceMaxDepth: " << deviceMaxDepth;
		status = videoStream.addNewFrameListener(this);
		if (status == STATUS_OK)
		{
			ofLogVerbose() << "DepthSource videoStream addNewFrameListener PASS";
		}else
		{
			ofLogError() << "DepthSource videoStream addNewFrameListener FAIL: " <<  OpenNI::getExtendedError();
		}

		
		
		isOn = true;
	}else 
	{
		ofLogError() << "DepthSource is INVALID";
	}
	
	
	return isOn;
}
Пример #27
0
//--------------------------------------------------------------
void ofApp::mouseMoved(int x, int y ){
    ofLogVerbose("ofApp") << "mouseMoved() x = " << x << "/" << y;
}
Пример #28
0
// Here we handle application state changes only
void Application::onKeyPressed(ofKeyEventArgs & args){

	// For now we set the state of the new system and also the old
	// before it is completely ported to the state system.

	switch(args.key){
	 case OF_KEY_SHIFT:
		 _shiftKeyDown = true;
		 break;
		 
	 case '/':
		 _shiftKeyDown = !_shiftKeyDown;
		 break;
		 
	 case OF_KEY_HOME:
		 _cmdManager.exec(
			 new ofx::piMapper::SetApplicationStateCmd(
				 this, PresentationState::instance(),
				 &_gui, GuiMode::NONE));
		 break;

	 case '2':
		 _cmdManager.exec(
			 new ofx::piMapper::SetApplicationStateCmd(
				 this, TextureMappingState::instance(),
				 &_gui, GuiMode::TEXTURE_MAPPING));
		 break;

	 case '3':
		 _cmdManager.exec(
			 new ofx::piMapper::SetApplicationStateCmd(
				 this, ProjectionMappingState::instance(),
				 &_gui, GuiMode::PROJECTION_MAPPING));
		 break;

	 case '4':
		 _cmdManager.exec(
			 new ofx::piMapper::SetApplicationStateCmd(
				 this, SourceSelectionState::instance(),
				 &_gui, GuiMode::SOURCE_SELECTION));
		 break;
	 case OF_OSMC_KEY_CONTEXT_MENU:
		 ofLogVerbose(__func__) << args.key << " was pressed";
		 _cmdManager.exec(
			 new ofx::piMapper::SetApplicationStateCmd(
				 this, PlaylistSelectionState::instance(),
				 &_gui, GuiMode::PLAYLIST_SELECTION_MODE));
		 break;

	 case 'f':
		 ofToggleFullscreen();
		 break;

	 case 'M':
		 ofShowCursor();
		 break;

	 case 'm':
		 ofHideCursor();
		 break;

	 case 'i':
		 _info.toggle();
		 break;

	 case 's':
		 _surfaceManager.saveXmlSettings(SettingsLoader::instance()->getLastLoadedFilename());
		 break;

	 case 'z':
		 _cmdManager.undo();
		 break;

	 default:
		 // All the other keypresses are handled by the application state onKeyPressed
		 _state->onKeyPressed(this, args);
		 break;
	}
}
Пример #29
0
//--------------------------------------------------------------
void ofApp::windowResized(int w, int h){
    ofLogVerbose("ofApp") << "windowResized() w = " << w  << " h = " << h;
}
Пример #30
0
//----------------------------------------
void ofxOpenNITracker::requestCalibration(XnUserID nID) {
	ofLogVerbose(LOG_NAME) << "Calibration requested for user" << nID;
	openNI->getUserGenerator().GetSkeletonCap().RequestCalibration(nID, TRUE);
  setUserState(nID, ofxOpenNIUser::Calibrating);
}