Exemplo n.º 1
0
//--------------------------------------------------------------
void PlayerAsset::processOscCommand(const string& command, const ofxOscMessage& m) {
    
    if(isMatch(command,"load")) {
        if(validateOscSignature("s", m)) {
            string assetAlias = getArgAsStringUnchecked(m, 0);
            
            if(hasAssetManager()) {
                BaseMediaAsset* asset = getAssetManager()->getAsset(assetAlias);
                
                if(asset != NULL) {
                    if(asset->isPlayable()) {
                        PlayableAsset* playableAsset = dynamic_cast<PlayableAsset*>(asset);
                        if(playableAsset != NULL) {
                            load(playableAsset);
                        } else {
                            ofLogError("PlayerAsset") << assetAlias << " could not be cast to a playable asset.";
                        }
                        
                    } else {
                        ofLogError("PlayerAsset") << assetAlias << " is not a playable asset.";
                    }
                } else {
                    ofLogError("PlayerAsset") << "no asset called " << assetAlias << " exists.";
                }
            }
            
            
            
        }
    } else if(isMatch(command,"start")) {
        player->start();
    } else if(isMatch(command,"stop")) {
        player->stop();
    } else if(isMatch(command,"pause")) {
        if(validateOscSignature("[sfi]", m)) {
            player->setPaused(getArgAsBoolUnchecked(m,0));
        }
    } else if(isMatch(command,"loopmode")) {
        if(validateOscSignature("s", m)) {
            string loopMode = getArgAsStringUnchecked(m,0);
            if(isMatch(loopMode,"NONE")) {
                player->setLoopType(OF_LOOP_NONE);
            } else if(isMatch(loopMode,"LOOP")) {
                player->setLoopType(OF_LOOP_NORMAL);
            } else if(isMatch(loopMode,"PALINDROME")) {
                player->setLoopType(OF_LOOP_PALINDROME);
            } else {
                ofLog(OF_LOG_WARNING, "FrameBufferPlayer: Unknown loop mode: " + loopMode);
            }
        }
    } else if(isMatch(command,"looppoints")) {
        if(validateOscSignature("[fi][fi]", m)) {
            player->setLoopPoints(getArgAsFloatUnchecked(m,0),getArgAsFloatUnchecked(m,1));
        }
    } else if(isMatch(command,"looppointstart")) {
        if(validateOscSignature("[fi]", m)) {
            player->setLoopPointStart(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"looppointend")) {
        if(validateOscSignature("[fi]", m)) {
            player->setLoopPointStart(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"looppointsn")) {
        if(validateOscSignature("[fi][fi]", m)) {
            player->setLoopPointsNorm(getArgAsFloatUnchecked(m,0),getArgAsFloatUnchecked(m,1));
        }
    } else if(isMatch(command,"looppointstartn")) {
        if(validateOscSignature("[fi]", m)) {
            player->setLoopPointStartNorm(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"looppointendn")) {
        if(validateOscSignature("[fi]", m)) {
            player->setLoopPointStartNorm(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"frame")) {
        if(validateOscSignature("[fi]", m)) {
            player->setFrame(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"framen")) {
        if(validateOscSignature("[fi]", m)) {
            player->setFrameNorm(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"speed")) {
        if(validateOscSignature("[fi]", m)) {
            player->setSpeed(getArgAsFloatUnchecked(m,0));
        }
    } else if(isMatch(command,"dump")) {
        dump();
    } else {
        // unknown command
    }
}
Exemplo n.º 2
0
void ofGstUtils::gstHandleMessage(){
	GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(gstPipeline));
	while(gst_bus_have_pending(bus)) {
		GstMessage* msg = gst_bus_pop(bus);

		ofLog(OF_LOG_VERBOSE,"GStreamer: Got %s message", GST_MESSAGE_TYPE_NAME(msg));

		switch (GST_MESSAGE_TYPE (msg)) {

			case GST_MESSAGE_BUFFERING:
				gint pctBuffered;
				gst_message_parse_buffering(msg,&pctBuffered);
				ofLog(OF_LOG_VERBOSE,"GStreamer: buffering %i\%", pctBuffered);
				if(bIsStream && !bLoaded){
					ofGstDataLock(&gstData);
					allocate();
					ofGstDataUnlock(&gstData);
				}
				if(pctBuffered<100){
					gst_element_set_state (gstPipeline, GST_STATE_PAUSED);
				}else if(!bPaused){
					gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
				}
			break;

			case GST_MESSAGE_DURATION:{
				GstFormat format=GST_FORMAT_TIME;
				//if(!
						gst_element_query_duration(gstPipeline,&format,&durationNanos);
					//	)
					//ofLog(OF_LOG_WARNING,"GStreamer: cannot query duration");
			}break;

			case GST_MESSAGE_STATE_CHANGED:{
				GstState oldstate, newstate, pendstate;
				gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
				if(!bLoaded){
					ofGstDataLock(&gstData);
					allocate();
					ofGstDataUnlock(&gstData);
				}
				gstData.pipelineState=newstate;
				/*seek_lock();
				if(posChangingPaused && newstate==GST_STATE_PLAYING){
					gst_element_set_state (gstPipeline, GST_STATE_PAUSED);
					posChangingPaused=false;
				}
				seek_unlock();*/

				ofLog(OF_LOG_VERBOSE,"GStreamer: state changed from " + getName(oldstate) + " to " + getName(newstate) + " (" + getName(pendstate) + ")");
			}break;

			case GST_MESSAGE_ASYNC_DONE:
				gstData.speed=speed;
				ofLog(OF_LOG_VERBOSE,"GStreamer: async done");
			break;

			case GST_MESSAGE_ERROR: {
				GError *err;
				gchar *debug;
				gst_message_parse_error(msg, &err, &debug);

				ofLog(OF_LOG_ERROR, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s",
					  gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);

				g_error_free(err);
				g_free(debug);

				gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL);

			}break;

			case GST_MESSAGE_EOS:
				ofLog(OF_LOG_VERBOSE,"GStreamer: end of the stream.");
				bIsMovieDone = true;

				switch(loopMode){

					case OF_LOOP_NORMAL:{
						GstFormat format = GST_FORMAT_TIME;
						GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT);
						gint64 pos;
						gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos);

						float loopSpeed;
						if(pos>0)
							loopSpeed=-speed;
						else
							loopSpeed=speed;
						if(!gst_element_seek(GST_ELEMENT(gstPipeline),
											speed,
											format,
											flags,
											GST_SEEK_TYPE_SET,
											0,
											GST_SEEK_TYPE_SET,
											durationNanos)) {
							ofLog(OF_LOG_WARNING,"GStreamer: unable to seek");
						}
					}break;

					case OF_LOOP_PALINDROME:{
						GstFormat format = GST_FORMAT_TIME;
						GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT);
						gint64 pos;
						gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos);
						float loopSpeed;
						if(pos>0)
							loopSpeed=-speed;
						else
							loopSpeed=speed;
						if(!gst_element_seek(GST_ELEMENT(gstPipeline),
											loopSpeed,
											GST_FORMAT_UNDEFINED,
											flags,
											GST_SEEK_TYPE_NONE,
											0,
											GST_SEEK_TYPE_NONE,
											0)) {
							ofLog(OF_LOG_WARNING,"GStreamer: unable to seek");
						}
					}break;
				}

			break;

			default:
				ofLog(OF_LOG_VERBOSE,"GStreamer: unhandled message");
			break;
		}
		gst_message_unref(msg);
	}

	gst_object_unref(GST_OBJECT(bus));
}
Exemplo n.º 3
0
bool ofGstUtils::initGrabber(int w, int h, int framerate){
	bpp = 24;
	if(!camData.bInited) get_video_devices(camData);

	if(camData.webcam_devices.size()==0){
		ofLog(OF_LOG_ERROR,"ofGstUtils: no devices found exiting without initializing");
		return false;
	}

	ofGstVideoFormat & format = selectFormat(w, h, framerate);

	ofLog(OF_LOG_NOTICE,"ofGstUtils: selected format: " + ofToString(format.width) + "x" + ofToString(format.height) + " " + format.mimetype + " framerate: " + ofToString(format.choosen_framerate.numerator) + "/" + ofToString(format.choosen_framerate.denominator));

	bIsCamera = true;
	bHavePixelsChanged 	= false;

	width = w;
	height = h;

	gstData.loop		= g_main_loop_new (NULL, FALSE);


	const char * decodebin = "";
	if(format.mimetype == "video/x-raw-bayer")
		decodebin = "bayer2rgb !";
	else if(format.mimetype != "video/x-raw-yuv" && format.mimetype != "video/x-raw-rgb")
		decodebin = "decodebin !";

	const char * scale = "ffmpegcolorspace !";
	if( w!=format.width || h!=format.height )	scale = "ffvideoscale method=2 !";


	string format_str_pipeline = string("%s name=video_source device=%s ! ") +
								 "%s,width=%d,height=%d,framerate=%d/%d ! " +
								 "%s %s " +
								 "appsink name=sink  caps=\"video/x-raw-rgb, width=%d, height=%d, bpp=24\"";

	gchar* pipeline_string =g_strdup_printf (
				      format_str_pipeline.c_str(),
				      camData.webcam_devices[deviceID].gstreamer_src.c_str(),
				      camData.webcam_devices[deviceID].video_device.c_str(),
				      format.mimetype.c_str(),
				      format.width,
				      format.height,
				      format.choosen_framerate.numerator,
				      format.choosen_framerate.denominator,
				      decodebin, scale,
				      w,h);

	ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string);

	GError * error = NULL;
	gstPipeline = gst_parse_launch (pipeline_string, &error);

	gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink");

	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);


	if(startPipeline()){
		play();
		return true;
	}else{
		return false;
	}
}
Exemplo n.º 4
0
//--------------------------------------------
void ofCairoRenderer::draw(ofImage & img, float x, float y, float z, float w, float h, float sx, float sy, float sw, float sh){
	ofPixelsRef raw = img.getPixelsRef();
	bool shouldCrop = sx != 0 || sy != 0 || sw != w || sh != h;
	ofPixels cropped;
	if(shouldCrop) {
		cropped.allocate(sw, sh, raw.getImageType());
		raw.cropTo(cropped, sx, sy, sw, sh);
	}
	ofPixelsRef pix = shouldCrop ? cropped : raw;
	
	pushMatrix();
	translate(x,y,z);
	scale(w/pix.getWidth(),h/pix.getHeight());
	cairo_surface_t *image;
	int stride=0;
	int picsize = pix.getWidth()* pix.getHeight();
	unsigned char *imgPix = pix.getPixels();

	static vector<unsigned char> swapPixels;

	switch(pix.getImageType()){
	case OF_IMAGE_COLOR:
#ifdef TARGET_LITTLE_ENDIAN
		swapPixels.resize(picsize * 4);

		for(int p= 0; p<picsize; p++) {
			swapPixels[p*4] = imgPix[p*3 +2];
			swapPixels[p*4 +1] = imgPix[p*3 +1];
			swapPixels[p*4 +2] = imgPix[p*3];
		}
#else
		swapPixels.resize(picsize * 4);

		for(int p= 0; p<picsize; p++) {
			swapPixels[p*4] = imgPix[p*3];
			swapPixels[p*4 +1] = imgPix[p*3 +1];
			swapPixels[p*4 +2] = imgPix[p*3 +2];
		}
#endif
		stride = cairo_format_stride_for_width (CAIRO_FORMAT_RGB24, pix.getWidth());
		image = cairo_image_surface_create_for_data(&swapPixels[0], CAIRO_FORMAT_RGB24, pix.getWidth(), pix.getHeight(), stride);
		break;
	case OF_IMAGE_COLOR_ALPHA:
#ifdef TARGET_LITTLE_ENDIAN
		swapPixels.resize(picsize * 4);

		for(int p= 0; p<picsize; p++) {
			swapPixels[p*4] = imgPix[p*4+2];
			swapPixels[p*4 +1] = imgPix[p*4+1];
			swapPixels[p*4 +2] = imgPix[p*4];
			swapPixels[p*4 +3] = imgPix[p*4+3];
		}
		stride = cairo_format_stride_for_width (CAIRO_FORMAT_ARGB32, pix.getWidth());
		image = cairo_image_surface_create_for_data(&swapPixels[0], CAIRO_FORMAT_ARGB32, pix.getWidth(), pix.getHeight(), stride);
#else
		stride = cairo_format_stride_for_width (CAIRO_FORMAT_ARGB32, pix.getWidth());
		image = cairo_image_surface_create_for_data(pix.getPixels(), CAIRO_FORMAT_ARGB32, pix.getWidth(), pix.getHeight(), stride);
#endif
		break;
	case OF_IMAGE_GRAYSCALE:
		swapPixels.resize(picsize * 4);

		for(int p= 0; p<picsize; p++) {
			swapPixels[p*4] = imgPix[p];
			swapPixels[p*4 +1] = imgPix[p];
			swapPixels[p*4 +2] = imgPix[p];
		}
		stride = cairo_format_stride_for_width (CAIRO_FORMAT_RGB24, pix.getWidth());
		image = cairo_image_surface_create_for_data(&swapPixels[0], CAIRO_FORMAT_RGB24, pix.getWidth(), pix.getHeight(), stride);
		break;
	case OF_IMAGE_UNDEFINED:
	default:
		ofLog(OF_LOG_ERROR,"ofCairoRenderer: trying to render undefined type image");
		popMatrix();
		return;
		break;
	}
	cairo_set_source_surface (cr, image, 0,0);
	cairo_paint (cr);
	cairo_surface_flush(image);
	cairo_surface_destroy (image);
	popMatrix();
}
Exemplo n.º 5
0
int COMXAudioCodecOMX::Decode(BYTE* pData, int iSize)
{
	int iBytesUsed, got_frame;
	if (!m_pCodecContext)
	{
		return -1;
	}

	m_iBufferSize1 = AVCODEC_MAX_AUDIO_FRAME_SIZE;
	m_iBufferSize2 = 0;

	AVPacket avpkt;
	av_init_packet(&avpkt);
	avpkt.data = pData;
	avpkt.size = iSize;
	iBytesUsed = avcodec_decode_audio4( m_pCodecContext
	             , m_pFrame1
	             , &got_frame
	             , &avpkt);
	if (iBytesUsed < 0 || !got_frame)
	{
		m_iBufferSize1 = 0;
		m_iBufferSize2 = 0;
		return iBytesUsed;
	}
	m_iBufferSize1 = av_samples_get_buffer_size(NULL, m_pCodecContext->channels, m_pFrame1->nb_samples, m_pCodecContext->sample_fmt, 1);

	/* some codecs will attempt to consume more data than what we gave */
	if (iBytesUsed > iSize)
	{
		ofLog(OF_LOG_ERROR, "COMXAudioCodecOMX::Decode - decoder attempted to consume more data than given");
		iBytesUsed = iSize;
	}

	if(m_iBufferSize1 == 0 && iBytesUsed >= 0)
	{
		m_iBuffered += iBytesUsed;
	}
	else
	{
		m_iBuffered = 0;
	}

	if(m_pCodecContext->sample_fmt != AV_SAMPLE_FMT_S16 && m_iBufferSize1 > 0)
	{
		if(m_pConvert && m_pCodecContext->sample_fmt != m_iSampleFormat)
		{
			swr_free(&m_pConvert);
		}

		if(!m_pConvert)
		{
			m_iSampleFormat = m_pCodecContext->sample_fmt;
			m_pConvert = swr_alloc_set_opts(NULL,
			             av_get_default_channel_layout(m_pCodecContext->channels),
			             AV_SAMPLE_FMT_S16, m_pCodecContext->sample_rate,
			             av_get_default_channel_layout(m_pCodecContext->channels),
			             m_pCodecContext->sample_fmt, m_pCodecContext->sample_rate,
			             0, NULL);
		}

		if(!m_pConvert || swr_init(m_pConvert) < 0)
		{
			ofLog(OF_LOG_ERROR, "COMXAudioCodecOMX::Decode - Unable to convert %d to AV_SAMPLE_FMT_S16", m_pCodecContext->sample_fmt);
			m_iBufferSize1 = 0;
			m_iBufferSize2 = 0;
			return iBytesUsed;
		}

		int len = m_iBufferSize1 / av_get_bytes_per_sample(m_pCodecContext->sample_fmt);
		if(swr_convert(m_pConvert, &m_pBuffer2, len, (const uint8_t**)m_pFrame1->data, m_pFrame1->nb_samples) < 0)
		{
			ofLog(OF_LOG_ERROR, "COMXAudioCodecOMX::Decode - Unable to convert %d to AV_SAMPLE_FMT_S16", (int)m_pCodecContext->sample_fmt);
			m_iBufferSize1 = 0;
			m_iBufferSize2 = 0;
			return iBytesUsed;
		}

		m_iBufferSize1 = 0;
		m_iBufferSize2 = len * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);
	}

	return iBytesUsed;
}
Exemplo n.º 6
0
soundMap::soundMap() {


    midiOut.listPorts();
    midiOut.openPort(0);

    //svgPaths.load("flrPlnDistortededitted5.svg");
    svgPaths.load("flrPlnDistortededitted5copy.svg");

    room =9;

    //    ofLog()<<room;
    if(room==2) {
        // 37 tracks per channel
        // 592
        // we could have up to 74 audio files.
        //midiOut.sendNoteOn(9, 1, 30);
        //midiOut.sendNoteOn(9, 1, 127);
        //midiOut.se
        //midiOut.sendControlChange(9,85,0);
        //midiOut.sendControlChange(8,85,127);
    }
    timeline.setup(); //registers events
    timeline.setDurationInSeconds(500); //sets time
    timeline.setLoopType(OF_LOOP_NORMAL); //turns the timeline to loop

    ofVec2f point;
    point.set( 0, 580 );
    timeline.setOffset(point);
    timeline.addFlags("temporalAudioMarkers");
    ofAddListener(timeline.events().bangFired, this, &soundMap::receivedBang);

    // turn everything off - so nothing is weird
    //midiOut.sendNoteOn(2, 50,50);


    ofLog()<< "nn";
    midiOut.sendNoteOn(1, 50,50);
    midiOut.sendNoteOn(1, 48,50);
    midiOut.sendNoteOn(1, 49,50);
    midiOut.sendNoteOn(1, 51,50);
    midiOut.sendNoteOn(1, 44,50);
    midiOut.sendNoteOn(1, 45,50);
    midiOut.sendNoteOn(1, 46,50);
    midiOut.sendNoteOn(1, 47,50);
    midiOut.sendNoteOn(1, 40,50);
    midiOut.sendNoteOn(1, 41,50);
    midiOut.sendNoteOn(1, 43,50);

    //midiOut.sendNoteOn(1,42,50);
    //midiOut.sendControlChange(1,22,2.5);

    // play sound depending on which room

    convesation.posX= 150;
    convesation.posY= 150;
    convesation.spreadFactor =  30;
    convesation.width = 10;
    convesation.HighestVolume = 100;
    convesation.toAnimate = "PathTest.xml";

    clock.posX = 481;
    clock.posY = 244;
    clock.spreadFactor = 20;
    clock.width = 40;
    clock.HighestVolume = 20;
    clock.clipToRoom = 1;
    clock.muffleFactor = .5;
    clock.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));

    showerDrip.posX = 525;
    showerDrip.posY = 105;
    showerDrip.spreadFactor = 18;
    showerDrip.width = 30;
    showerDrip.clipToRoom =2;
    showerDrip.muffleFactor =.4;
    showerDrip.HighestVolume = 30;
    showerDrip.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));

    kitchenClock.posX = 588;
    kitchenClock.posY = 212;
    kitchenClock.spreadFactor = 30;
    kitchenClock.width =40;
    kitchenClock.clipToRoom =3;
    kitchenClock.muffleFactor =.6;
    kitchenClock.HighestVolume = 50;
    kitchenClock.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));

    fridge.posX = 468;
    fridge.posY = 182;
    fridge.spreadFactor = 4;
    fridge.width =80;
    fridge.clipToRoom =3;
    fridge.muffleFactor =.8;
    fridge.HighestVolume = 40;
    fridge.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));

    windowBack.posX = 658;
    windowBack.posY = 553;
    windowBack.spreadFactor = 15;
    windowBack.width =5;
    windowBack.clipToRoom =3;
    windowBack.muffleFactor =.8;
    windowBack.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));

    windowFront.posX = 0;
    windowFront.posY = 389;
    windowFront.spreadFactor = 20;
    windowFront.width =40;
    windowFront.clipToRoom = 1;
    windowFront.muffleFactor = .5;
    windowFront.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));

    theRattle.posX = 136;
    theRattle.posY = 108;
    theRattle.spreadFactor= 3;
    theRattle.width = 20;
    theRattle.clipToRoom = 1;
    theRattle.muffleFactor =.4;
    theRattle.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    theRattle.duration = 22;
    theRattle.timePast = -1;
    theRattle.goTime = false;

    catPurring.posX = 163;
    catPurring.posY = 131;
    catPurring.spreadFactor= 7;
    catPurring.width = 30;
    catPurring.clipToRoom = 1;
    catPurring.muffleFactor =.4;
    catPurring.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    catPurring.duration = 13;
    catPurring.timePast = -1;
    catPurring.goTime = false;

    skinScratch.posX = 277;
    skinScratch.posY = 116;
    skinScratch.spreadFactor= 17;
    skinScratch.width = 40;
    skinScratch.clipToRoom = 1;
    skinScratch.muffleFactor =.4;
    skinScratch.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    skinScratch.duration = 24;
    skinScratch.HighestVolume =80;
    skinScratch.timePast = -1;
    skinScratch.goTime = false;

    writing.posX = 306;
    writing.posY = 497;
    writing.spreadFactor= 30;
    writing.width = 20;
    writing.clipToRoom = 1;
    writing.muffleFactor =.4;
    writing.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    writing.duration = 243;
    writing.timePast = -1;
    writing.goTime = false;

    crying.posX = 277;
    crying.posY = 116;
    crying.spreadFactor= 40;
    crying.width = 20;
    crying.clipToRoom = 1;
    crying.muffleFactor =.5;
    crying.HighestVolume = 60;
    crying.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    crying.duration = 22;
    crying.timePast = -1;
    crying.goTime = false;

    showerTime.posX = 712;
    showerTime.posY = 153;
    showerTime.spreadFactor= 70;
    showerTime.width = 20;
    showerTime.clipToRoom = 2;
    showerTime.muffleFactor =.4;
    showerTime.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    showerTime.duration = 733;
    showerTime.timePast = -1;
    showerTime.HighestVolume = 50;
    showerTime.goTime = false;

    tubDrip.posX = 712;
    tubDrip.posY = 153;
    tubDrip.spreadFactor= 13;
    tubDrip.width = 20;
    tubDrip.clipToRoom = 2;
    tubDrip.muffleFactor =.4;
    tubDrip.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    tubDrip.duration = 25;
    tubDrip.timePast = -1;
    tubDrip.goTime = false;

    mixing.posX = 630;
    mixing.posY = 502;
    mixing.spreadFactor= 35;
    mixing.width = 20;
    mixing.clipToRoom = 3;
    mixing.muffleFactor =.4;
    mixing.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    mixing.duration = 36;
    mixing.timePast = -1;
    mixing.goTime = false;

    stove.posX = 532;
    stove.posY = 285;
    stove.spreadFactor= 15;
    stove.width = 30;
    stove.clipToRoom = 3;
    stove.muffleFactor =.4;
    stove.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    stove.duration = 69;
    stove.timePast = -1;
    stove.goTime = false;

    eggCrack.posX = 620;
    eggCrack.posY = 489;
    eggCrack.spreadFactor= 30 ;
    eggCrack.width = 40;
    eggCrack.clipToRoom = 3;
    eggCrack.muffleFactor =.4;
    eggCrack.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    eggCrack.duration = 24;
    eggCrack.timePast = -1;
    eggCrack.goTime = false;

    appleChop.posX = 607;
    appleChop.posY = 493;
    appleChop.spreadFactor= 40;
    appleChop.width = 20;
    appleChop.clipToRoom = 3;
    appleChop.muffleFactor =.4;
    appleChop.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    appleChop.duration = 79;
    appleChop.timePast = -1;
    appleChop.goTime = false;

    //livingRoom mug
    mug1.posX = 247;
    mug1.posY = 492;
    mug1.spreadFactor= 10;
    mug1.width = 40;
    mug1.clipToRoom = 1;
    mug1.muffleFactor =.4;
    mug1.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    mug1.duration = 1;
    mug1.timePast = -1;
    mug1.goTime = false;

    //kitchen mug
    mug2.posX = 623;
    mug2.posY = 497;
    mug2.spreadFactor= 7;
    mug2.width = 20;
    mug2.clipToRoom = 3;
    mug2.muffleFactor =.4;
    mug2.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    mug2.duration = 7;
    mug2.timePast = -1;
    mug2.goTime = false;

    waterPour.posX =253;
    waterPour.posY = 510;
    waterPour.spreadFactor= 80;
    waterPour.width = 20;
    waterPour.clipToRoom = 1;
    waterPour.muffleFactor =.4;
    waterPour.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    waterPour.duration = 8;
    waterPour.timePast = -1;
    waterPour.goTime = false;


    kettle.posX = 553;
    kettle.posY = 301;
    kettle.spreadFactor= 30;
    kettle.width = 20;
    kettle.clipToRoom = 2;
    kettle.muffleFactor =.4;
    kettle.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    kettle.duration = 22;
    kettle.timePast = -1;
    kettle.goTime = false;

    brushTeeth.posX = 583;
    brushTeeth.posY = 105;
    brushTeeth.spreadFactor= 50;
    brushTeeth.width = 20;
    brushTeeth.clipToRoom = 2;
    brushTeeth.muffleFactor =.4;
    brushTeeth.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    brushTeeth.duration = 22;
    brushTeeth.timePast = -1;
    brushTeeth.goTime = false;

    longCooking.posX = 472;
    longCooking.posY = 275;
    longCooking.spreadFactor= 20;
    longCooking.width = 30;
    longCooking.clipToRoom = 3;
    longCooking.HighestVolume = 30;
    longCooking.muffleFactor =.65;
    longCooking.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    longCooking.duration = 537;
    longCooking.timePast = -1;
    longCooking.goTime = false;

    humming.posX = 270;
    humming.posY = 430;
    humming.spreadFactor= 70;
    humming.width = 50;
    humming.clipToRoom = 1;
    humming.muffleFactor =.7;
    humming.HighestVolume = 50;
    humming.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    humming.duration = 53;
    humming.timePast = -1;
    humming.goTime = false;

    gwenRun.posX = 235;
    gwenRun.posY = 285;
    gwenRun.spreadFactor= 50;
    gwenRun.width = 40;
    gwenRun.clipToRoom = 1;
    gwenRun.muffleFactor =.5;
    gwenRun.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    gwenRun.duration = 4;
    gwenRun.timePast = -1;
    gwenRun.goTime = false;

    overheardUpstairs.posX = 686;
    overheardUpstairs.posY = 431;
    overheardUpstairs.spreadFactor= 15;
    overheardUpstairs.width = 20;
    overheardUpstairs.clipToRoom = 3;
    overheardUpstairs.muffleFactor =.9;
    overheardUpstairs.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    overheardUpstairs.duration = 76;
    overheardUpstairs.timePast = -1;
    overheardUpstairs.goTime = false;

    dishes.posX = 530;
    dishes.posY = 376;
    dishes.spreadFactor= 30;
    dishes.width = 20;
    dishes.clipToRoom = 3;
    dishes.muffleFactor =.4;
    dishes.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    dishes.duration = 1352;
    dishes.timePast = -1;
    dishes.HighestVolume = 60;
    dishes.goTime = false;

    sleep.posX = 298;
    sleep.posY = 205;
    sleep.spreadFactor= 15;
    sleep.width = 20;
    sleep.clipToRoom = 1;
    sleep.muffleFactor =.4;
    sleep.color = ofColor((int)ofRandom(219,249),(int)ofRandom(187,217),(int)ofRandom(187,217));
    sleep.duration = 86;
    sleep.timePast = -1;
    sleep.goTime = false;

    int controlNums[28] = {22,23,24,25,26,27,28,29,105,106,107,108,90,102,103,104,86, 87,88,89,31,85,109,110,111,113,114,112};
    for(int i=0; i< 28; i++ ) {
        midiOut.sendControlChange(1,controlNums[i],100);
        midiOut.sendControlChange(1,controlNums[i],127);
        midiOut.sendControlChange(1,controlNums[i],0);
        ofLog()<<"hry";
    }


}
Exemplo n.º 7
0
// --------------------------------------------------
void ofxProjectorBlend::setup(int resolutionWidth,
							  int resolutionHeight,
							  int _numProjectors,
							  int _pixelOverlap,
							  ofxProjectorBlendLayout _layout,
							  ofxProjectorBlendRotation _rotation)
{

	string l = "horizontal";
	if(layout==ofxProjectorBlend_Vertical) l = "vertical";

	string r = "normal";
	if(rotation==ofxProjectorBlend_RotatedLeft) r = "rotated left";
	else if(rotation==ofxProjectorBlend_RotatedRight) r = "rotated right";

	ofLog(OF_LOG_NOTICE, "ofxProjectorBlend: res: %d x %d * %d, overlap: %d pixels, layout: %s, rotation: %s\n", resolutionWidth, resolutionHeight, _numProjectors, _pixelOverlap, l.c_str(), r.c_str());
	numProjectors = _numProjectors;
	layout = _layout;
	rotation = _rotation;

	if(numProjectors <= 0){
		ofLog(OF_LOG_ERROR, "Cannot initialize with " + ofToString(this->numProjectors) + " projectors.");
		return;
	}

	//allow editing projector heights
	for(int i = 0; i < numProjectors; i++){
		projectorHeightOffset.push_back( 0 );
	}

	pixelOverlap = _pixelOverlap;

	if(rotation == ofxProjectorBlend_NoRotation) {
		singleChannelWidth = resolutionWidth;
		singleChannelHeight = resolutionHeight;
	}
	else {
		singleChannelWidth = resolutionHeight;
		singleChannelHeight = resolutionWidth;
	}

	if(layout == ofxProjectorBlend_Vertical) {
		fullTextureWidth = singleChannelWidth;
		fullTextureHeight = singleChannelHeight*numProjectors - pixelOverlap*(numProjectors-1);
	}
	else if(layout == ofxProjectorBlend_Horizontal) {
		fullTextureWidth = singleChannelWidth*numProjectors - pixelOverlap*(numProjectors-1);
		fullTextureHeight = singleChannelHeight;
	} else {
		ofLog(OF_LOG_ERROR, "ofxProjectorBlend: You have used an invalid ofxProjectorBlendLayout in ofxProjectorBlend::setup()");
		return;
	}


	displayWidth = resolutionWidth*numProjectors;
	displayHeight = resolutionHeight;

	fullTexture.allocate(fullTextureWidth, fullTextureHeight, GL_RGB, 4);

	blendShader.unload();
	blendShader.setupShaderFromSource(GL_FRAGMENT_SHADER, ofxProjectorBlendFragShader(numProjectors-1));
	blendShader.setupShaderFromSource(GL_VERTEX_SHADER, ofxProjectorBlendVertShader);
	blendShader.linkProgram();

	gamma.resize(numProjectors-1, 0.5);
	blendPower.resize(numProjectors-1, 1);
	luminance.resize(numProjectors-1, 0);
}
Exemplo n.º 8
0
//----------------------------------------------------------------
bool ofSerial::setup(int deviceNumber, int baud){

	int deviceCount = 0;

	string str			= "";
	string device		= "";
	bool deviceFound	= false;

	//---------------------------------------------
	#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	//---------------------------------------------

		//----------------------------------------------------
		//We will find serial devices by listing the directory

		DIR *dir;
		struct dirent *entry;
		dir = opendir("/dev");

		if (dir == NULL){
			ofLog(OF_LOG_ERROR,"ofSerial: error listing devices in /dev");
		}

		while ((entry = readdir(dir)) != NULL){
			str = (char *)entry->d_name;
			#ifdef TARGET_OSX
			//if( str.substr(0,3) == "cu." || str.substr(0,4) == "tty." ){
			if( str.find("cu.usbserial") == 0 || str.find("tty.usbserial") == 0 ){
			#else
			if( str.substr(0,4) == "ttyS" || str.substr(0,6) == "ttyUSB" || str.substr(0,3) == "rfc" ){
			#endif
				if(deviceCount == deviceNumber){
					device = "/dev/"+str;
					deviceFound = true;
					ofLog(OF_LOG_NOTICE,"ofSerial device %i - /dev/%s  <--selected", deviceCount, str.c_str());
				}else ofLog(OF_LOG_NOTICE,"ofSerial device %i - /dev/%s", deviceCount, str.c_str());
				deviceCount++;
			}
		}

        if(deviceFound){
            return setup(device, baud);
        }else{
            ofLog(OF_LOG_ERROR,"ofSerial: could not find device %i - only %i devices found", deviceNumber, deviceCount);
            return false;
        }

	//---------------------------------------------
    #endif
    //---------------------------------------------

	//---------------------------------------------
	#ifdef TARGET_WIN32
	//---------------------------------------------

		enumerateWin32Ports();
		if (deviceNumber < nPorts){
			device = portNamesShort[deviceNumber];
			deviceFound = true;
		}

        if(deviceFound){
            return setup(device, baud);
        }else{
            ofLog(OF_LOG_ERROR,"ofSerial: could not find device %i - only %i devices found", deviceNumber, nPorts);
            return false;
        }

	//---------------------------------------------
    #endif
    //---------------------------------------------


}

//----------------------------------------------------------------
bool ofSerial::setup(string portName, int baud){

	bInited = false;

	//---------------------------------------------
	#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	//---------------------------------------------

	    ofLog(OF_LOG_NOTICE,"ofSerialInit: opening port %s @ %d bps", portName.c_str(), baud);
		fd = open(portName.c_str(), O_RDWR | O_NOCTTY | O_NONBLOCK);
		if(fd == -1){
			ofLog(OF_LOG_ERROR,"ofSerial: unable to open port");
			return false;
		}

		struct termios options;
		tcgetattr(fd,&oldoptions);
		options = oldoptions;
		switch(baud){
		   case 300: 	cfsetispeed(&options,B300);
						cfsetospeed(&options,B300);
						break;
		   case 1200: 	cfsetispeed(&options,B1200);
						cfsetospeed(&options,B1200);
						break;
		   case 2400: 	cfsetispeed(&options,B2400);
						cfsetospeed(&options,B2400);
						break;
		   case 4800: 	cfsetispeed(&options,B4800);
						cfsetospeed(&options,B4800);
						break;
		   case 9600: 	cfsetispeed(&options,B9600);
						cfsetospeed(&options,B9600);
						break;
		   case 14400: 	cfsetispeed(&options,B14400);
						cfsetospeed(&options,B14400);
						break;
		   case 19200: 	cfsetispeed(&options,B19200);
						cfsetospeed(&options,B19200);
						break;
		   case 28800: 	cfsetispeed(&options,B28800);
						cfsetospeed(&options,B28800);
						break;
		   case 38400: 	cfsetispeed(&options,B38400);
						cfsetospeed(&options,B38400);
						break;
		   case 57600:  cfsetispeed(&options,B57600);
						cfsetospeed(&options,B57600);
						break;
		   case 115200: cfsetispeed(&options,B115200);
						cfsetospeed(&options,B115200);
						break;

			default:	cfsetispeed(&options,B9600);
						cfsetospeed(&options,B9600);
						ofLog(OF_LOG_ERROR,"ofSerialInit: cannot set %i baud setting baud to 9600\n", baud);
						break;
		}

		options.c_cflag |= (CLOCAL | CREAD);
		options.c_cflag &= ~PARENB;
		options.c_cflag &= ~CSTOPB;
		options.c_cflag &= ~CSIZE;
		options.c_cflag |= CS8;
		tcsetattr(fd,TCSANOW,&options);

		bInited = true;
		ofLog(OF_LOG_NOTICE,"sucess in opening serial connection");

	    return true;
	//---------------------------------------------
    #endif
    //---------------------------------------------


    //---------------------------------------------
	#ifdef TARGET_WIN32
	//---------------------------------------------

	// open the serial port:
	// "COM4", etc...

	hComm=CreateFileA(portName.c_str(),GENERIC_READ|GENERIC_WRITE,0,0,
					OPEN_EXISTING,0,0);

	if(hComm==INVALID_HANDLE_VALUE){
		ofLog(OF_LOG_ERROR,"ofSerial: unable to open port");
		return false;
	}


	// now try the settings:
	COMMCONFIG cfg;
	DWORD cfgSize;
	char  buf[80];

	cfgSize=sizeof(cfg);
	GetCommConfig(hComm,&cfg,&cfgSize);
	int bps = baud;
	sprintf(buf,"baud=%d parity=N data=8 stop=1",bps);

	#if (_MSC_VER)       // microsoft visual studio
		// msvc doesn't like BuildCommDCB,
		//so we need to use this version: BuildCommDCBA
		if(!BuildCommDCBA(buf,&cfg.dcb)){
			ofLog(OF_LOG_ERROR,"ofSerial: unable to build comm dcb; (%s)",buf);
		}
	#else
		if(!BuildCommDCB(buf,&cfg.dcb)){
			ofLog(OF_LOG_ERROR,"ofSerial: Can't build comm dcb; %s",buf);
		}
	#endif


	// Set baudrate and bits etc.
	// Note that BuildCommDCB() clears XON/XOFF and hardware control by default

	if(!SetCommState(hComm,&cfg.dcb)){
		ofLog(OF_LOG_ERROR,"ofSerial: Can't set comm state");
	}
	//ofLog(OF_LOG_NOTICE,buf,"bps=%d, xio=%d/%d",cfg.dcb.BaudRate,cfg.dcb.fOutX,cfg.dcb.fInX);

	// Set communication timeouts (NT)
	COMMTIMEOUTS tOut;
	GetCommTimeouts(hComm,&oldTimeout);
	tOut = oldTimeout;
	// Make timeout so that:
	// - return immediately with buffered characters
	tOut.ReadIntervalTimeout=MAXDWORD;
	tOut.ReadTotalTimeoutMultiplier=0;
	tOut.ReadTotalTimeoutConstant=0;
	SetCommTimeouts(hComm,&tOut);

	bInited = true;
	return true;
	//---------------------------------------------
	#endif
	//---------------------------------------------
}
Exemplo n.º 9
0
bool OMXEGLImage::Open(COMXStreamInfo& hints, OMXClock *clock, EGLImageKHR eglImage)
{


	OMX_ERRORTYPE error   = OMX_ErrorNone;


	m_video_codec_name      = "";
	m_codingType            = OMX_VIDEO_CodingUnused;

	m_decoded_width  = hints.width;
	m_decoded_height = hints.height;



	if(!m_decoded_width || !m_decoded_height)
	{
		return false;
	}

	if(hints.extrasize > 0 && hints.extradata != NULL)
	{
		m_extrasize = hints.extrasize;
		m_extradata = (uint8_t *)malloc(m_extrasize);
		memcpy(m_extradata, hints.extradata, hints.extrasize);
	}

	ProcessCodec(hints);


	std::string componentName = decoder_name;
	if(!m_omx_decoder.Initialize(componentName, OMX_IndexParamVideoInit))
	{
		return false;
	}

	componentName = "OMX.broadcom.egl_render";
	if(!m_omx_render.Initialize(componentName, OMX_IndexParamVideoInit))
	{
		return false;
	}

	componentName = "OMX.broadcom.video_scheduler";
	if(!m_omx_sched.Initialize(componentName, OMX_IndexParamVideoInit))
	{
		return false;
	}

	if(clock == NULL)
	{
		return false;
	}

	m_av_clock = clock;
	m_omx_clock = m_av_clock->GetOMXClock();

	if(m_omx_clock->GetComponent() == NULL)
	{
		m_av_clock = NULL;
		m_omx_clock = NULL;
		return false;
	}

	m_omx_tunnel_decoder.Initialize(&m_omx_decoder,		m_omx_decoder.GetOutputPort(),		&m_omx_sched,	m_omx_sched.GetInputPort());
	m_omx_tunnel_sched.Initialize(	&m_omx_sched,		m_omx_sched.GetOutputPort(),		&m_omx_render,	m_omx_render.GetInputPort());
	m_omx_tunnel_clock.Initialize(	m_omx_clock,		m_omx_clock->GetInputPort() + 1,	&m_omx_sched,	m_omx_sched.GetOutputPort() + 1);


	error = m_omx_decoder.SetStateForComponent(OMX_StateIdle);
	if (error != OMX_ErrorNone)
	{
		ofLogError(__func__) << "m_omx_decoder OMX_StateIdle FAIL";
		return false;
	}

	OMX_VIDEO_PARAM_PORTFORMATTYPE formatType;
	OMX_INIT_STRUCTURE(formatType);
	formatType.nPortIndex = m_omx_decoder.GetInputPort();
	formatType.eCompressionFormat = m_codingType;

	if (hints.fpsscale > 0 && hints.fpsrate > 0)
	{
		formatType.xFramerate = (long long)(1<<16)*hints.fpsrate / hints.fpsscale;
	}
	else
	{
		formatType.xFramerate = 25 * (1<<16);
	}

	error = m_omx_decoder.SetParameter(OMX_IndexParamVideoPortFormat, &formatType);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_decoder SET OMX_IndexParamVideoPortFormat PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder GET OMX_IndexParamVideoPortFormat FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}

	OMX_PARAM_PORTDEFINITIONTYPE portParam;
	OMX_INIT_STRUCTURE(portParam);
	portParam.nPortIndex = m_omx_decoder.GetInputPort();

	error = m_omx_decoder.GetParameter(OMX_IndexParamPortDefinition, &portParam);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_decoder GET OMX_IndexParamPortDefinition PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder GET OMX_IndexParamPortDefinition FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}

	int numVideoBuffers = 32; //20 is minimum - can get up to 80
	portParam.nBufferCountActual = numVideoBuffers;

	portParam.format.video.nFrameWidth  = m_decoded_width;
	portParam.format.video.nFrameHeight = m_decoded_height;


	error = m_omx_decoder.SetParameter(OMX_IndexParamPortDefinition, &portParam);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_decoder SET OMX_IndexParamPortDefinition PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder SET OMX_IndexParamPortDefinition FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}


	error = m_omx_tunnel_clock.Establish(false);
	if(error != OMX_ErrorNone)
	{
		ofLogError(__func__) << "m_omx_tunnel_clock.Establish FAIL";
		return false;
	}


	OMX_PARAM_BRCMVIDEODECODEERRORCONCEALMENTTYPE concanParam;
	OMX_INIT_STRUCTURE(concanParam);
	concanParam.bStartWithValidFrame = OMX_FALSE;

	error = m_omx_decoder.SetParameter(OMX_IndexParamBrcmVideoDecodeErrorConcealment, &concanParam);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__)	<< "m_omx_decoder OMX_IndexParamBrcmVideoDecodeErrorConcealment PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder OMX_IndexParamBrcmVideoDecodeErrorConcealment FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}

	if(NaluFormatStartCodes(hints.codec, m_extradata, m_extrasize))
	{
		OMX_NALSTREAMFORMATTYPE nalStreamFormat;
		OMX_INIT_STRUCTURE(nalStreamFormat);
		nalStreamFormat.nPortIndex = m_omx_decoder.GetInputPort();
		nalStreamFormat.eNaluFormat = OMX_NaluFormatStartCodes;

		error = m_omx_decoder.SetParameter((OMX_INDEXTYPE)OMX_IndexParamNalStreamFormatSelect, &nalStreamFormat);
		if (error == OMX_ErrorNone)
		{
			ofLogVerbose(__func__)	<< "Open OMX_IndexParamNalStreamFormatSelect PASS";
		}
		else
		{
			ofLog(OF_LOG_ERROR, "Open OMX_IndexParamNalStreamFormatSelect FAIL (0%08x)\n", error);
			return false;
		}

	}

	// broadcom omx entension:
	// When enabled, the timestamp fifo mode will change the way incoming timestamps are associated with output images.
	// In this mode the incoming timestamps get used without re-ordering on output images.
	if(hints.ptsinvalid)
	{
		OMX_CONFIG_BOOLEANTYPE timeStampMode;
		OMX_INIT_STRUCTURE(timeStampMode);
		timeStampMode.bEnabled = OMX_TRUE;
		error = m_omx_decoder.SetParameter((OMX_INDEXTYPE)OMX_IndexParamBrcmVideoTimestampFifo, &timeStampMode);

		if (error == OMX_ErrorNone)
		{
			ofLogVerbose(__func__)	<< "Open OMX_IndexParamBrcmVideoTimestampFifo PASS";
		}
		else
		{
			ofLog(OF_LOG_ERROR, "Open OMX_IndexParamBrcmVideoTimestampFifo error (0%08x)\n", error);
			return false;
		}


	}


	// Alloc buffers for the omx intput port.
	error = m_omx_decoder.AllocInputBuffers();
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_decoder AllocInputBuffers PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder AllocInputBuffers FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}



	error = m_omx_tunnel_decoder.Establish(false);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_tunnel_decoder Establish PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_tunnel_decoder Establish FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}

	error = m_omx_decoder.SetStateForComponent(OMX_StateExecuting);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_decoder OMX_StateExecuting PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_decoder OMX_StateExecuting FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}


	error = m_omx_tunnel_sched.Establish(false);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_tunnel_sched Establish PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_tunnel_sched Establish FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}

	error = m_omx_sched.SetStateForComponent(OMX_StateExecuting);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_sched OMX_StateExecuting PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_sched OMX_StateExecuting FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}
#if 0
	OMX_CONFIG_PORTBOOLEANTYPE discardMode;
	OMX_INIT_STRUCTURE(discardMode);
	discardMode.nPortIndex = m_omx_render.GetInputPort();
	discardMode.bEnabled = OMX_FALSE;
	error = m_omx_render.SetParameter(OMX_IndexParamBrcmVideoEGLRenderDiscardMode, &discardMode);	
	if (error != OMX_ErrorNone) 
	{
		ofLog(OF_LOG_ERROR, "m_omx_render OMX_SetParameter OMX_IndexParamBrcmVideoEGLRenderDiscardMode FAIL error: %s", COMXCore::getOMXError(error).c_str());
	}
#endif	

	OMX_PARAM_PORTDEFINITIONTYPE portParamRenderInput;
	OMX_INIT_STRUCTURE(portParamRenderInput);
	portParamRenderInput.nPortIndex = m_omx_render.GetInputPort();

	error = m_omx_render.GetParameter(OMX_IndexParamPortDefinition, &portParamRenderInput);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render GET OMX_IndexParamPortDefinition PASS";

	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render GET OMX_IndexParamPortDefinition FAIL error: %s", COMXCore::getOMXError(error).c_str());

	}

	OMX_PARAM_PORTDEFINITIONTYPE portParamRenderOutput;
	OMX_INIT_STRUCTURE(portParamRenderOutput);
	portParamRenderOutput.nPortIndex = m_omx_render.GetOutputPort();

	error = m_omx_render.GetParameter(OMX_IndexParamPortDefinition, &portParamRenderOutput);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render GET OMX_IndexParamPortDefinition PASS";

	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render GET OMX_IndexParamPortDefinition FAIL error: %s", COMXCore::getOMXError(error).c_str());

	}
	
	// Alloc buffers for the m_omx_render input port.
	error = m_omx_render.AllocInputBuffers();
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render AllocInputBuffers PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render AllocInputBuffers FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}
	
	
	error = m_omx_render.SetStateForComponent(OMX_StateIdle);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render OMX_StateIdle PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render OMX_StateIdle FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}
	

	ofLogVerbose(__func__) << "m_omx_render.GetOutputPort(): " << m_omx_render.GetOutputPort();
	m_omx_render.EnablePort(m_omx_render.GetOutputPort(), false);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render Enable OUTPUT Port PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render Enable OUTPUT Port  FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}


	OMX_BUFFERHEADERTYPE* eglBuffer = NULL;
	error = m_omx_render.UseEGLImage(&eglBuffer, m_omx_render.GetOutputPort(), NULL, eglImage);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render UseEGLImage PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render UseEGLImage  FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}


	if(SendDecoderConfig())
	{
		ofLogVerbose(__func__) << "SendDecoderConfig PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "SendDecoderConfig FAIL");
		return false;
	}


	m_omx_render.SetCustomDecoderFillBufferDoneHandler(&OMXEGLImage::onFillBufferDone);
	error = m_omx_render.SetStateForComponent(OMX_StateExecuting);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render OMX_StateExecuting PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render OMX_StateExecuting FAIL error: %s", COMXCore::getOMXError(error).c_str());
		return false;
	}
	error = m_omx_render.FillThisBuffer(eglBuffer);
	if(error == OMX_ErrorNone)
	{
		ofLogVerbose(__func__) << "m_omx_render FillThisBuffer PASS";
	}
	else
	{
		ofLog(OF_LOG_ERROR, "m_omx_render FillThisBuffer FAIL error: %s", COMXCore::getOMXError(error).c_str());
		if (error == OMX_ErrorIncorrectStateOperation)
		{
			ofLogError(__func__) << "NEED EGL HACK";
		}
		return false;
	}

	m_is_open           = true;
	m_drop_state        = false;
	m_setStartTime      = true;


	ofLog(OF_LOG_VERBOSE,
	      "%s::%s - decoder_component: 0x%p, input_port: 0x%x, output_port: 0x%x \n",
	      "OMXEGLImage", __func__, m_omx_decoder.GetComponent(), m_omx_decoder.GetInputPort(), m_omx_decoder.GetOutputPort());

	m_first_frame   = true;
	// start from assuming all recent frames had valid pts
	m_history_valid_pts = ~0;
	return true;
}
//---------------------------------------------------------------------------
bool ofVideoPlayer::loadMovie(string name){


	//--------------------------------------
	#ifdef OF_VIDEO_PLAYER_QUICKTIME
	//--------------------------------------

		initializeQuicktime();			// init quicktime
		closeMovie();					// if we have a movie open, close it
		bLoaded 				= false;	// try to load now

		if( name.substr(0, 7) == "http://" || name.substr(0,7) == "rtsp://" ){
			if(! createMovieFromURL(name, moviePtr) ) return false;
		}else{
			name 					= ofToDataPath(name);
			if( !createMovieFromPath((char *)name.c_str(), moviePtr) ) return false;
		}

		bool bDoWeAlreadyHaveAGworld = false;
		if (width != 0 && height != 0){
			bDoWeAlreadyHaveAGworld = true;
		}
		Rect 				movieRect;
		GetMovieBox(moviePtr, &(movieRect));
		if (bDoWeAlreadyHaveAGworld){
			// is the gworld the same size, then lets *not* de-allocate and reallocate:
			if (width == movieRect.right &&
				height == movieRect.bottom){
				SetMovieGWorld (moviePtr, offscreenGWorld, nil);
			} else {
				width 	= movieRect.right;
				height 	= movieRect.bottom;
				delete(pixels);
				delete(offscreenGWorldPixels);
				if ((offscreenGWorld)) DisposeGWorld((offscreenGWorld));
				createImgMemAndGWorld();
			}
		} else {
			width	= movieRect.right;
			height 	= movieRect.bottom;
			createImgMemAndGWorld();
		}

		if (moviePtr == NULL){
			return false;
		}

		//----------------- callback method
	    myDrawCompleteProc = NewMovieDrawingCompleteUPP (DrawCompleteProc);
		SetMovieDrawingCompleteProc (moviePtr, movieDrawingCallWhenChanged,  myDrawCompleteProc, (long)this);

		// ------------- get the total # of frames:
		nFrames				= 0;
		TimeValue			curMovieTime;
		curMovieTime		= 0;
		TimeValue			duration;

		//OSType whichMediaType	= VIDEO_TYPE; // mingw chokes on this
		OSType whichMediaType	= FOUR_CHAR_CODE('vide');

		short flags				= nextTimeMediaSample + nextTimeEdgeOK;

		while( curMovieTime >= 0 ) {
			nFrames++;
			GetMovieNextInterestingTime(moviePtr,flags,1,&whichMediaType,curMovieTime,0,&curMovieTime,&duration);
			flags = nextTimeMediaSample;
		}
		nFrames--; // there's an extra time step at the end of themovie




		// ------------- get some pixels in there ------
		GoToBeginningOfMovie(moviePtr);
		SetMovieActiveSegment(moviePtr, -1,-1);
		MoviesTask(moviePtr,0);

		#if defined(TARGET_OSX) && defined(__BIG_ENDIAN__)
			convertPixels(offscreenGWorldPixels, pixels, width, height);
		#endif

		if (bUseTexture == true){
			tex.loadData(pixels, width, height, GL_RGB);
		}

		bStarted 				= false;
		bLoaded 				= true;
		bPlaying 				= false;
		bHavePixelsChanged 		= false;
		speed 					= 1;

		return true;

	//--------------------------------------
	#else
	//--------------------------------------


		if(gstUtils.loadMovie(name)){
			if(bUseTexture){
				tex.allocate(gstUtils.getWidth(),gstUtils.getHeight(),GL_RGB,false);
				tex.loadData(gstUtils.getPixels(), gstUtils.getWidth(), gstUtils.getHeight(), GL_RGB);
			}
			height=gstUtils.getHeight();
			width=gstUtils.getWidth();
			bLoaded = true;
			allocated = true;
			ofLog(OF_LOG_VERBOSE,"ofVideoPlayer: movie loaded");
			return true;
		}else{
			ofLog(OF_LOG_ERROR,"ofVideoPlayer couldn't load movie");
			return false;
		}


	//--------------------------------------
	#endif
	//--------------------------------------



}
Exemplo n.º 11
0
//----------------------------------------------------------------
void ofSerial::enumerateDevices(){

	//---------------------------------------------
	#if defined( TARGET_OSX )
	//---------------------------------------------

		//----------------------------------------------------
		//We will find serial devices by listing the directory

		DIR *dir;
		struct dirent *entry;
		dir = opendir("/dev");
		string str			= "";
		string device		= "";
		int deviceCount		= 0;

		if (dir == NULL){
			ofLog(OF_LOG_ERROR,"ofSerial: error listing devices in /dev");
		} else {
			printf("ofSerial: listing devices\n");
			while ((entry = readdir(dir)) != NULL){
				str = (char *)entry->d_name;
				//if( str.substr(0,3) == "cu." || str.substr(0,4) == "tty." ){
				if( str.find("cu.usbserial") == 0 || str.find("tty.usbserial") == 0 ){
					printf("device %i - %s\n", deviceCount, str.c_str());
					deviceCount++;
				}
			}
		}

	//---------------------------------------------
    #endif
    //---------------------------------------------

	//---------------------------------------------
	#if defined( TARGET_LINUX )
	//---------------------------------------------

		//----------------------------------------------------
		//We will find serial devices by listing the directory

		DIR *dir;
		struct dirent *entry;
		dir = opendir("/dev");
		string str			= "";
		string device		= "";
		int deviceCount		= 0;

		if (dir == NULL){
			ofLog(OF_LOG_ERROR,"ofSerial: error listing devices in /dev");
		} else {
			printf("ofSerial: listing devices\n");
			while ((entry = readdir(dir)) != NULL){
				str = (char *)entry->d_name;
				if( str.substr(0,4) == "ttyS" || str.substr(0,6) == "ttyUSB" || str.substr(0,3) == "rfc" ){
					printf("device %i - %s\n", deviceCount, str.c_str());
					deviceCount++;
				}
			}
		}

	//---------------------------------------------
	#endif
	//---------------------------------------------

	//---------------------------------------------
	#ifdef TARGET_WIN32
	//---------------------------------------------

		enumerateWin32Ports();
		printf("ofSerial: listing devices (%i total)\n", nPorts);
		for (int i = 0; i < nPorts; i++){
			printf("device %i -- %s", i, portNamesFriendly[i]);
		}

	//---------------------------------------------
    #endif
    //---------------------------------------------

}
//------------------------------------
//for getting a reference to the texture
ofTexture & ofVideoPlayer::getTextureReference(){
	if(!tex.bAllocated() ){
		ofLog(OF_LOG_WARNING, "ofVideoPlayer - getTextureReference - texture is not allocated");
	}
	return tex;
}
Exemplo n.º 13
0
void ofxAssert(bool condition, string message) {
    if (!condition) {
        ofLog(OF_LOG_ERROR,"%s", message.c_str());
        std::exit(1);
    }
}
Exemplo n.º 14
0
void ofxNotice(string msg) {
    ofLog(OF_LOG_NOTICE, msg);
}
Exemplo n.º 15
0
void ofxFFTBase::updateAudioData(ofxFFTData & audioData, float * dataNew) {
    audioData.data.clear();
    audioData.data.resize(audioData.size, 0);
    
    for(int i=0; i<audioData.size; i++) {
        audioData.data[i] = dataNew[i];
    }
    
    audioData.peakValue = 0;
    audioData.peakAverage = 0;
    
    for(int i=0; i<audioData.size; i++) {
        if(audioData.peakValue < audioData.data[i]) {
            audioData.peakValue = audioData.data[i];
        }
    }
    
    for(int i=0; i<audioData.size; i++) {
        float p = i / (float)(audioData.size - 1);
        
        float dataVal;
        dataVal = audioData.data[i]; // use magnitude for fft data.
        dataVal *= audioData.linearEQIntercept + p * audioData.linearEQSlope; // scale value.
        
        if(isinf(dataVal)) {
            ofLog(OF_LOG_ERROR, "ofxFFTBase::updateAudioData - audio data value is infinity.");
            audioData.peakValue = 0;
            return;
        }
        
        float dataPeakRatio;
        dataPeakRatio = dataVal / audioData.peakValue;
        
        float dataMaxVal;
        dataMaxVal = audioData.dataMax[i];
        
        if(dataMaxVal < dataVal) {
            dataMaxVal = dataVal;
        }
        
        float dataNormVal;
        dataNormVal = 0;
        if(dataMaxVal > 0) {
            dataNormVal = dataVal / dataMaxVal; // normalise data between 0 and 1.
        }
        
        if(dataVal < 0.1) {
            dataNormVal = 0;
        }
        
        dataMaxVal *= audioData.maxDecay; // decay the max value.
        
        audioData.dataNorm[i] = dataNormVal;
        
        audioData.dataMax[i] = dataMaxVal;
        
        float dataPeakVal;
        dataPeakVal = audioData.dataPeak[i];
        dataPeakVal *= audioData.peakDecay; // decay peak value.
        
        if(dataPeakVal < dataNormVal) { // check if new peak.
            dataPeakVal = dataNormVal;
        }
        
        audioData.dataPeak[i] = dataPeakVal;
        
        audioData.peakAverage += dataPeakVal; // sum of all peaks.
        
        int dataCutVal; // switch data (on/off).
        if(dataPeakVal < audioData.cutThreshold) {
            dataCutVal = 1;
        } else {
            dataCutVal = 0;
        }
        
        audioData.dataCut[i] = dataCutVal;
    }
    
    audioData.peakAverage /= audioData.size;
}
Exemplo n.º 16
0
bool OMXEGLImage::Decode(uint8_t *pData, int iSize, double pts)
{
	CSingleLock lock (m_critSection);
	OMX_ERRORTYPE error;

	if( m_drop_state || !m_is_open )
	{
		return true;
	}

	unsigned int demuxer_bytes = (unsigned int)iSize;
	uint8_t *demuxer_content = pData;

	if (demuxer_content && demuxer_bytes > 0)
	{
		while(demuxer_bytes)
		{
			// 500ms timeout
			OMX_BUFFERHEADERTYPE *omx_buffer = m_omx_decoder.GetInputBuffer(500);
			if(omx_buffer == NULL)
			{
				ofLog(OF_LOG_ERROR, "OMXVideo::Decode timeout\n");
				//printf("COMXVideo::Decode timeout\n");
				return false;
			}

			omx_buffer->nFlags = 0;
			omx_buffer->nOffset = 0;

			if(m_setStartTime)
			{
				omx_buffer->nFlags |= OMX_BUFFERFLAG_STARTTIME;
				ofLog(OF_LOG_VERBOSE, "OMXVideo::Decode VDec : setStartTime %f\n", (pts == DVD_NOPTS_VALUE ? 0.0 : pts) / DVD_TIME_BASE);
				m_setStartTime = false;
			}
			else if(pts == DVD_NOPTS_VALUE)
			{
				omx_buffer->nFlags |= OMX_BUFFERFLAG_TIME_UNKNOWN;
			}

			omx_buffer->nTimeStamp = ToOMXTime((uint64_t)(pts == DVD_NOPTS_VALUE) ? 0 : pts);
			omx_buffer->nFilledLen = (demuxer_bytes > omx_buffer->nAllocLen) ? omx_buffer->nAllocLen : demuxer_bytes;
			memcpy(omx_buffer->pBuffer, demuxer_content, omx_buffer->nFilledLen);

			demuxer_bytes -= omx_buffer->nFilledLen;
			demuxer_content += omx_buffer->nFilledLen;

			if(demuxer_bytes == 0)
			{
				omx_buffer->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME;
			}

			int nRetry = 0;
			while(true)
			{
				//ofLogVerbose(__func__) << "nRetry: " << nRetry;
				error = m_omx_decoder.EmptyThisBuffer(omx_buffer);
				if (error == OMX_ErrorNone)
				{
					//ofLog(OF_LOG_VERBOSE, "VideD:  pts:%.0f size:%d)\n", pts, iSize);
					break;
				}
				else
				{
					ofLogError(__func__) << "OMX_EmptyThisBuffer() FAIL: " << COMXCore::getOMXError(error);
					nRetry++;
				}
				if(nRetry == 5)
				{
					ofLogError(__func__) << "OMX_EmptyThisBuffer() FAILED 5 TIMES";
					return false;
				}
			}
		}

		return true;
	}
	return false;
}
Exemplo n.º 17
0
void soundMap::receivedBang(ofxTLBangEventArgs& bang) {
    ofLogNotice("Bang fired from track " + bang.flag);
    if(bang.flag ==  "rattle") {
        ofLog()<< "pimg";
        theRattle.goTime =true;
    }
    else if(bang.flag ==  "catPurring") {
        ofLog()<< "pimg";
        catPurring.goTime =true;
    }
    else if(bang.flag ==  "skinScratch") {
        ofLog()<< "pimg";
        skinScratch.goTime =true;
    }
    else if(bang.flag ==  "writing") {
        ofLog()<< "pimg";
        writing.goTime =true;
    }
    else if(bang.flag ==  "crying") {
        ofLog()<< "pimg";
        crying.goTime =true;
    }
    else if(bang.flag ==  "showerTime") {
        ofLog()<< "pimg";
        showerTime.goTime =true;
    }
    else if(bang.flag ==  "tubDrip") {
        ofLog()<< "pimg";
        tubDrip.goTime =true;
    }
    else if(bang.flag ==  "mixing") {
        ofLog()<< "pimg";
        mixing.goTime =true;
    }
    else if(bang.flag ==  "kettle") {
        ofLog()<< "pimg";
        kettle.goTime =true;
    }
    else if(bang.flag ==  "appleChop") {
        ofLog()<< "pimg";
        appleChop.goTime =true;
    }
    else if(bang.flag ==  "brushTeeth") {
        ofLog()<< "pimg";
        brushTeeth.goTime =true;
    }
    else if(bang.flag ==  "mug1") {
        ofLog()<< "pimg";
        mug1.goTime =true;
    }
    else if(bang.flag ==  "mug2") {
        ofLog()<< "pimg";
        mug2.goTime =true;
    }
    else if(bang.flag ==  "waterPour") {
        ofLog()<< "pimg";
        waterPour.goTime =true;
    }
    else if(bang.flag ==  "eggCrack") {
        ofLog()<< "pimg";
        eggCrack.goTime =true;
    }
    else if(bang.flag ==  "stove") {
        ofLog()<< "pimg";
        stove.goTime =true;
    }
    else if(bang.flag ==  "longCooking") {
        longCooking.goTime =true;
        ofLog()<<"setToTrue";
    }
    else if(bang.flag ==  "humming") {
        humming.goTime =true;
    }
    else if(bang.flag ==  "overheardUpstairs") {
        overheardUpstairs.goTime =true;
    }
    else if(bang.flag ==  "gwenRun") {
        gwenRun.goTime =true;
    }
    else if(bang.flag ==  "dishes") {
        dishes.goTime =true;
    }
    else if(bang.flag ==  "sleep") {
        sleep.goTime =true;
    }
}
//-----------------------------------------------------------
bool ofxSosoTrueTypeFont::loadFont(string filename, int fontsize, bool _bAntiAliased, bool _bFullCharacterSet, bool makeContours, bool makeMipMaps, float simplifyAmt, int dpi){	//soso - added makeMipMaps (see below)
    
    
	bMakeContours = makeContours;
    
	//------------------------------------------------
	if (bLoadedOk == true){
        
		// we've already been loaded, try to clean up :
		unloadTextures();
	}
	//------------------------------------------------
    
	if( dpi == 0 ){
		dpi = ttfGlobalDpi;
	}
    
	filename = ofToDataPath(filename);
    
	bLoadedOk 			= false;
	bAntiAliased 		= _bAntiAliased;
	bFullCharacterSet 	= _bFullCharacterSet;
	fontSize			= fontsize;
    
	//--------------- load the library and typeface
	
    FT_Error err;
    
    FT_Library library;
    if (err = FT_Init_FreeType( &library )){
		ofLog(OF_LOG_ERROR,"ofTrueTypeFont::loadFont - Error initializing freetype lib: FT_Error = %d", err);
		return false;
	}
    
	FT_Face face;
    
	if (err = FT_New_Face( library, filename.c_str(), 0, &face )) {
        // simple error table in lieu of full table (see fterrors.h)
        string errorString = "unknown freetype";
        if(err == 1) errorString = "INVALID FILENAME";
        ofLog(OF_LOG_ERROR,"ofTrueTypeFont::loadFont - %s: %s: FT_Error = %d", errorString.c_str(), filename.c_str(), err);
		return false;
	}
    
	//FT_Set_Char_Size( face, fontsize << 6, fontsize << 6, dpi, dpi); //of
	//FT_Set_Char_Size( face, 0, fontsize*dpi, 0, dpi); //soso
    FT_Set_Char_Size( face, 0, fontsize*64, 0, dpi); //soso
	
	lineHeight = fontsize * 1.43f;
    
	//------------------------------------------------------
	//kerning would be great to support:
	//ofLog(OF_LOG_NOTICE,"FT_HAS_KERNING ? %i", FT_HAS_KERNING(face));
	//------------------------------------------------------
    
	//nCharacters = bFullCharacterSet ? 256 : 128 - NUM_CHARACTER_TO_START;
	nCharacters = bFullCharacterSet ? 512 : 128 - NUM_CHARACTER_TO_START;
    
	//--------------- initialize character info and textures
	cps.resize(nCharacters);
    
	if(bMakeContours){
		charOutlines.clear();
		charOutlines.assign(nCharacters, ofTTFCharacter());
	}
    
	vector<ofPixels> expanded_data(nCharacters);
    
	long areaSum=0;
    
	//--------------------- load each char -----------------------
	for (int i = 0 ; i < nCharacters; i++){
        
		//------------------------------------------ anti aliased or not:
		//if(err = FT_Load_Glyph( face, FT_Get_Char_Index( face, (unsigned char)(i+NUM_CHARACTER_TO_START) ), FT_LOAD_DEFAULT )){
		if(err = FT_Load_Glyph( face, getFTCharIndex( face, (unsigned char)(i+NUM_CHARACTER_TO_START) ), FT_LOAD_DEFAULT )){		//soso replaced FT_Get_Char_Index with our custom version
			ofLog(OF_LOG_ERROR,"ofTrueTypeFont::loadFont - Error with FT_Load_Glyph %i: FT_Error = %d", i, err);
            
		}
        
		if (bAntiAliased == true) FT_Render_Glyph(face->glyph, FT_RENDER_MODE_NORMAL);
		else FT_Render_Glyph(face->glyph, FT_RENDER_MODE_MONO);
        
		//------------------------------------------
		FT_Bitmap& bitmap= face->glyph->bitmap;
        
        
		// prepare the texture:
		//int width  = ofNextPow2( bitmap.width + border*2 );
        // int height = ofNextPow2( bitmap.rows  + border*2 );
         
         
         //// ------------------------- this is fixing a bug with small type
         //// ------------------------- appearantly, opengl has trouble with
         //// ------------------------- width or height textures of 1, so we
         //// ------------------------- we just set it to 2...
         //if (width == 1) width = 2;
         //if (height == 1) height = 2;
        
        
		if(bMakeContours){
			if( printVectorInfo )printf("\n\ncharacter %c: \n", char( i+NUM_CHARACTER_TO_START ) );
            
			//int character = i + NUM_CHARACTER_TO_START;
			charOutlines[i] = makeContoursForCharacter( face );
			if(simplifyAmt>0)
				charOutlines[i].simplify(simplifyAmt);
			charOutlines[i].getTessellation();
		}
        
        
		// -------------------------
		// info about the character:
		cps[i].character		= i;
		cps[i].height 			= face->glyph->bitmap_top;
		cps[i].width 			= face->glyph->bitmap.width;
		cps[i].setWidth 		= face->glyph->advance.x >> 6;
		cps[i].topExtent 		= face->glyph->bitmap.rows;
		cps[i].leftExtent		= face->glyph->bitmap_left;
        
		int width  = cps[i].width;
		int height = bitmap.rows;
        
        
		cps[i].tW				= width;
		cps[i].tH				= height;
        
        
        
		GLint fheight	= cps[i].height;
		GLint bwidth	= cps[i].width;
		GLint top		= cps[i].topExtent - cps[i].height;
		GLint lextent	= cps[i].leftExtent;
        
		GLfloat	corr, stretch;
        
		//this accounts for the fact that we are showing 2*visibleBorder extra pixels
		//so we make the size of each char that many pixels bigger
		stretch = 0;//(float)(visibleBorder * 2);
        
		corr	= (float)(( (fontSize - fheight) + top) - fontSize);
        
		cps[i].x1		= lextent + bwidth + stretch;
		cps[i].y1		= fheight + corr + stretch;
		cps[i].x2		= (float) lextent;
		cps[i].y2		= -top + corr;
        
        
		// Allocate Memory For The Texture Data.
		expanded_data[i].allocate(width, height, 2);
		//-------------------------------- clear data:
		expanded_data[i].set(0,255); // every luminance pixel = 255
		expanded_data[i].set(1,0);
        
        
		if (bAntiAliased == true){
			ofPixels bitmapPixels;
			bitmapPixels.setFromExternalPixels(bitmap.buffer,bitmap.width,bitmap.rows,1);
			expanded_data[i].setChannel(1,bitmapPixels);
		} else {
			//-----------------------------------
			// true type packs monochrome info in a
			// 1-bit format, hella funky
			// here we unpack it:
			unsigned char *src =  bitmap.buffer;
			for(int j=0; j <bitmap.rows;j++) {
				unsigned char b=0;
				unsigned char *bptr =  src;
				for(int k=0; k < bitmap.width ; k++){
					expanded_data[i][2*(k+j*width)] = 255;
                    
					if (k%8==0){
						b = (*bptr++);
					}
                    
					expanded_data[i][2*(k+j*width) + 1] = b&0x80 ? 255 : 0;
					b <<= 1;
				}
				src += bitmap.pitch;
			}
			//-----------------------------------
		}
        
		areaSum += (cps[i].width+border*2)*(cps[i].height+border*2);
	}
    
    
	vector<charProps> sortedCopy = cps;
	sort(sortedCopy.begin(),sortedCopy.end(),&compare_cps);
    
	// pack in a texture, algorithm to calculate min w/h from
	// http://upcommons.upc.edu/pfc/bitstream/2099.1/7720/1/TesiMasterJonas.pdf
	//cout << areaSum << endl;
    
	bool packed = false;
	float alpha = logf(areaSum)*1.44269;
    
	int w;
	int h;
	while(!packed){
		w = pow(2,floor((alpha/2.f) + 0.5)); // there doesn't seem to be a round in cmath for windows.
		//w = pow(2,round(alpha/2.f));
		h = w;//pow(2,round(alpha - round(alpha/2.f)));
		int x=0;
		int y=0;
		int maxRowHeight = sortedCopy[0].tH + border*2;
		for(int i=0;i<(int)cps.size();i++){
			if(x+sortedCopy[i].tW + border*2>w){
				x = 0;
				y += maxRowHeight;
				maxRowHeight = sortedCopy[i].tH + border*2;
				if(y + maxRowHeight > h){
					alpha++;
					break;
				}
			}
			x+= sortedCopy[i].tW + border*2;
			if(i==(int)cps.size()-1) packed = true;
		}
        
	}
    
    
    
	ofPixels atlasPixels;
	atlasPixels.allocate(w,h,2);
	atlasPixels.set(0,255);
	atlasPixels.set(1,0);
    
    
	int x=0;
	int y=0;
	int maxRowHeight = sortedCopy[0].tH + border*2;
	for(int i=0;i<(int)cps.size();i++){
		ofPixels & charPixels = expanded_data[sortedCopy[i].character];
        
		if(x+sortedCopy[i].tW + border*2>w){
			x = 0;
			y += maxRowHeight;
			maxRowHeight = sortedCopy[i].tH + border*2;
		}
        
		cps[sortedCopy[i].character].t2		= float(x + border)/float(w);
		cps[sortedCopy[i].character].v2		= float(y + border)/float(h);
		cps[sortedCopy[i].character].t1		= float(cps[sortedCopy[i].character].tW + x + border)/float(w);
		cps[sortedCopy[i].character].v1		= float(cps[sortedCopy[i].character].tH + y + border)/float(h);
		charPixels.pasteInto(atlasPixels,x+border,y+border);
		x+= sortedCopy[i].tW + border*2;
	}
    
    
	texAtlas.allocate(atlasPixels.getWidth(),atlasPixels.getHeight(),GL_LUMINANCE_ALPHA,false);
    
	if(bAntiAliased && fontsize>20){
		if (makeMipMaps) { //soso 
			//texAtlas.enableMipmaps();
			//texAtlas.setTextureMinMagFilter(GL_LINEAR_MIPMAP_LINEAR,GL_LINEAR_MIPMAP_LINEAR); //soso
            
		} else	//soso
			texAtlas.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
	}else{
		texAtlas.setTextureMinMagFilter(GL_NEAREST,GL_NEAREST);
	}
    
	texAtlas.loadData(atlasPixels.getPixels(),atlasPixels.getWidth(),atlasPixels.getHeight(),GL_LUMINANCE_ALPHA);
    
    
    ///////////////////////////////////////////////////////////////////////sosoAddon
    //until ofTexture fully supports mipmaps, we gotta do it manually here - AFTER loadData is called on the texture
    //it's a redo of what happens inside tex.loadData(), but instead we build the mipmaps
    if(makeMipMaps){
        glEnable(texAtlas.getTextureData().textureTarget);
        glBindTexture(texAtlas.getTextureData().textureTarget, (GLuint) texAtlas.getTextureData().textureID);
        
        glTexParameteri(texAtlas.getTextureData().textureTarget, GL_GENERATE_MIPMAP_SGIS, true);	
        glTexParameteri( texAtlas.getTextureData().textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri( texAtlas.getTextureData().textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);			
        glTexParameteri( texAtlas.getTextureData().textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
        glTexParameteri( texAtlas.getTextureData().textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_LINEAR);
        gluBuild2DMipmaps(texAtlas.getTextureData().textureTarget, texAtlas.getTextureData().glType,
                          w, h, texAtlas.getTextureData().glType, texAtlas.getTextureData().pixelType, atlasPixels.getPixels());
        glDisable(texAtlas.getTextureData().textureTarget);
    }   
    //////////////////////////////////////////////////////////////////////
    

	//Sosolimited - load kerning pairs 
	
	//initialize all pairs to 0
	for (int i = 0; i < FONT_NUM_CHARS; i++) {
		for (int j = 0; j < FONT_NUM_CHARS; j++) {
			kerningPairs[i][j] = 0;
		}
	}	
	//find out if the face has kerning
	FT_Bool use_kerning = (FT_Bool)FT_HAS_KERNING(face);
	if(use_kerning) printf("ofxSosoTrueTypeFont::loadFont() - kerning is supported\n");
	else printf("ofxSosoTrueTypeFont::loadFont() - kerning is NOT supported\n");
	
	FT_UInt glyph_index_r, glyph_index_l;
	
	for (int i = 0; i < FONT_NUM_CHARS; i++) {
		// convert character code to glyph index  
		glyph_index_r = FT_Get_Char_Index(face, i + NUM_CHARACTER_TO_START);
		
		for (int j = 0; j < FONT_NUM_CHARS; j++) {
			// convert character code to glyph index    			
			glyph_index_l = FT_Get_Char_Index(face, j + NUM_CHARACTER_TO_START);

			// retrieve kerning distance 
			if (use_kerning  &&  glyph_index_l  &&  glyph_index_r) {
								
				FT_Vector  delta;
				FT_Get_Kerning( face, glyph_index_l, glyph_index_r, FT_KERNING_DEFAULT, &delta );					
						
				kerningPairs[i][j] = delta.x >> 6;

				//if(i<127)
				//if(fabs((float)kerningPairs[i][j]) > 0) printf("kerningPairs: %c%c = %d, delta = %d\n", i + NUM_CHARACTER_TO_START, j + NUM_CHARACTER_TO_START, kerningPairs[i][j], delta.x);
			}
		}		
	}
void MenuAnimationEnter::draw() {
    
    if (frameNumberAtStartOfAnimation == 0) {
        frameNumberAtStartOfAnimation = ofGetFrameNum();
    }

    int framesIntoTheAnimtion = ofGetFrameNum() - frameNumberAtStartOfAnimation;

	float t = ofClamp(framesIntoTheAnimtion / 60., 0, 1);
	
	// all functions take input 0. ~ 1., and outputs 0. ~ 1.
	float horizSlide = 60 * ofxEasingFunc::Cubic::easeInOut(t);
	
    if (!backGroundImage.isAllocated()){
        if (getSharedData().isIpad) {
            if (getSharedData().isRetina) {
                backGroundImage.loadImage("Default-Portrait@2x~ipad.png");
                ofLog(OF_LOG_NOTICE, "loaded image " + ofToString(backGroundImage.isAllocated()));
            }
        }
    }
    
    ofBackground(0, 255, 0);
	//ofSetColor(255, 0, 0);
    backGroundImage.draw(0,0);
    
    //enable blending!
	//We are going to use a blend mode that adds
	//all the colors to white.
	ofEnableAlphaBlending();
	//glBlendFunc(GL_ONE, GL_ONE);
    
    
    // debug draw of tight bounding box around the write
    //ofSetColor(125,0,0,179);
    //ofRect(15, 270 - ascender, appTitleStringBoundingBox.width, appTitleStringBoundingBox.height);
    
    ofSetColor(0,0,0,179);

    // Put the info from the global state into local variables.
    // We are not going to change these values so there WON'T be a need to
    // put these back into the global state later.
    int marginSize = getSharedData().marginSize;
    int ascender = getSharedData().ascender;
    int upperVerticalMarginSize = getSharedData().upperVerticalMarginSize;
    int totalVerticalMarginSize = getSharedData().totalVerticalMarginSize;
    ofRectangle appTitleStringBoundingBox =  getSharedData().appTitleStringBoundingBox;

    
    ofRect(15 - marginSize, 270 - ascender - upperVerticalMarginSize, appTitleStringBoundingBox.width + 2*marginSize, appTitleStringBoundingBox.height + totalVerticalMarginSize);
    
    
    ofDisableAlphaBlending();
    ofSetColor(255);
    // text origin is on the baseline. So for example
    // all the descenders (like in "g" and "j") will be BELOW
	getSharedData().testFont.drawString(getSharedData().appTitleString, 15 + horizSlide, 270);
    
    if (framesIntoTheAnimtion > 60){
        changeState("firstScreenState");
    }

    
}
Exemplo n.º 20
0
//--------------------------------------------------------------
void ofxMtlMapping2D::mousePressed(int x, int y, int button)
{
    if (ofxMtlMapping2DControls::mapping2DControls()->isHit(x, y))
        return;
    
    if(!ofxMtlMapping2DControls::mapping2DControls()->editShapes())
        return;
    
    
    // ----
    // A vertex has been selected
    if (ofxMtlMapping2DVertex::activeVertex || button == 2) {
      return;
    }
    
    // ----
    // Select an existing shape
    list<ofxMtlMapping2DShape*>::iterator it;
    for (it=ofxMtlMapping2DShapes::pmShapes.begin(); it!=ofxMtlMapping2DShapes::pmShapes.end(); it++) {
        ofxMtlMapping2DShape* shape = *it;
        bool grabbedOne = false;
        if(ofxMtlMapping2DControls::mapping2DControls()->mappingMode() == MAPPING_MODE_OUTPUT) {
            if(shape->hitTest(x, y)) {
                grabbedOne = true;
                shape->enable();
            }
        } else if (ofxMtlMapping2DControls::mapping2DControls()->mappingMode() == MAPPING_MODE_INPUT) {
            if (shape->inputPolygon || shape->shapeType != MAPPING_2D_SHAPE_MASK) {
                if(shape->inputPolygon->hitTest(x, y)) {
                    grabbedOne = true;
                    shape->inputPolygon->enable();
                }
            }
        }
        
        if(grabbedOne) {
            // Put active shape at the top of the list
            ofxMtlMapping2DShapes::pmShapes.push_front(shape);
            ofxMtlMapping2DShapes::pmShapes.erase(it);
            
            return;
        }
    }
    
    // ----
    if(ofxMtlMapping2DSettings::kIsManuallyAddingDeletingVertexEnabled && ofxMtlMapping2DControls::mapping2DControls()->mappingMode() == MAPPING_MODE_OUTPUT) {
        // Add vertex to the selected shape
        if(ofxMtlMapping2DShape::activeShape) {
            // Only if the shape is a Mask
            if (ofxMtlMapping2DShape::activeShape->shapeType == MAPPING_2D_SHAPE_MASK) {
                ofxMtlMapping2DShape* shape = ofxMtlMapping2DShape::activeShape;
                if (shape) {
                    ofLog(OF_LOG_NOTICE, "Add vertex to shape %i", shape->shapeId);
                    shape->addPoint(x, y);
                } else {
                    ofLog(OF_LOG_NOTICE, "No shape has been selected, can not add a vertex");
                }
            }
        }
    }
    
}
Exemplo n.º 21
0
void ofCairoRenderer::draw(ofMesh & vertexData, ofPolyRenderMode mode, bool useColors, bool useTextures, bool useNormals){
    if(useColors || useTextures || useNormals){
        ofLog(OF_LOG_WARNING,"Cairo rendering for meshes doesn't support colors, textures, or normals. drawing wireframe...");
    }
	draw(vertexData,false,false,false);
}
Exemplo n.º 22
0
//--------------------------------------------------------------
void ofxMtlMapping2D::loadShapesList()
{
    // UI
    ofxMtlMapping2DControls::mapping2DControls()->clearShapesList();
    
    // Delete everything
    while(!ofxMtlMapping2DShapes::pmShapes.empty()) delete ofxMtlMapping2DShapes::pmShapes.back(), ofxMtlMapping2DShapes::pmShapes.pop_back();
    ofxMtlMapping2DShapes::pmShapes.clear();
    ofxMtlMapping2DShape::resetActiveShapeVars();
    
    
    //LOAD XML
    // ----
	//the string is printed at the top of the app
	//to give the user some feedback
	string feedBackMessage = "loading " + _mappingXmlFilePath;
	ofLog(OF_LOG_NOTICE, "Status > " + feedBackMessage);
    
	//we load our settings file
	//if it doesn't exist we can still make one
	//by hitting the 's' key
	if( _shapesListXML.loadFile(_mappingXmlFilePath) ){
		feedBackMessage = _mappingXmlFilePath + " loaded!";
	}else{
		feedBackMessage = "unable to load " + _mappingXmlFilePath + " check data/ folder";
	}
    ofLog(OF_LOG_NOTICE, "Status > " + feedBackMessage);
    
    
    int shapeId = -1;
    
    // ----
	//this is a more advanced use of ofXMLSettings
	//we are going to be reading multiple tags with the same name
	
	//lets see how many <root> </root> tags there are in the xml file
	int numRootTags = _shapesListXML.getNumTags("root");
	int numShapeTags = 0;
	
	//if there is at least one <root> tag we can read the list of cards
	//and then load their associated xml file
	if(numRootTags > 0){
		//we push into the last <root> tag
		//this temporarirly treats the tag as
		//the document root.
		_shapesListXML.pushTag("root", 0);
		
		//we see how many params/items we have stored in <card> tags
		numShapeTags = _shapesListXML.getNumTags("shape");
        ofLog(OF_LOG_NOTICE, "Status > numShapeTags :: " + ofToString(numShapeTags));
		
		if(numShapeTags > 0){			
			for(int i = 0; i < numShapeTags; i++){
				ofxMtlMapping2DShape* newShape;
				
				shapeId = _shapesListXML.getAttribute("shape", "id", 0, i);
				
				_shapesListXML.pushTag("shape", i);
				
                //SHAPES SETTINGS
                int numShapeSettingTags = _shapesListXML.getNumTags("setting");
                
                string shapeType = _shapesListXML.getValue("setting", "nan", 0);

                if (shapeType == "quad") {
                    newShape = new ofxMtlMapping2DQuad();
                    newShape->shapeType = MAPPING_2D_SHAPE_QUAD;
                } else if (shapeType == "triangle") {
                    newShape = new ofxMtlMapping2DTriangle();
                    newShape->shapeType = MAPPING_2D_SHAPE_TRIANGLE;
                } else if (shapeType == "mask") {
                    newShape = new ofxMtlMapping2DMask();
                    newShape->shapeType = MAPPING_2D_SHAPE_MASK;
                } else {
                    newShape = new ofxMtlMapping2DQuad();
                    newShape->shapeType = MAPPING_2D_SHAPE_QUAD;
                }
                
                if(numShapeSettingTags > 0) {
                    for(int j = 0; j < numShapeSettingTags; j++){
                        string key = _shapesListXML.getAttribute("setting", "key", "nc", j); 
                        string value = _shapesListXML.getValue("setting", "", j);
                        newShape->shapeSettings[key] = value;                        
                    }
                }	
				
                //OUTPUT VERTICES
                _shapesListXML.pushTag("outputVertices", 0);
                int numVertexItemTags = _shapesListXML.getNumTags("vertex");
                for (int j = 0; j < numVertexItemTags; j++) {
                    int x = _shapesListXML.getAttribute("vertex", "x", 0, j); 
                    int y = _shapesListXML.getAttribute("vertex", "y", 0, j);
                    
                    //Create a new vertex
                    ofxMtlMapping2DVertex* newVertex = new ofxMtlMapping2DVertex();
                    newVertex->init(x-newVertex->width/2, y-newVertex->height/2);
                    newShape->vertices.push_back(newVertex);
                }
                _shapesListXML.popTag();
                
                
                if(newShape->shapeType != MAPPING_2D_SHAPE_MASK) {
                    //INPUT QUADS
                    _shapesListXML.pushTag("inputPolygon", 0);

                    //Create a new vertex
                    newShape->inputPolygon = new ofxMtlMapping2DInput();
                
                    //INPUT VERTICES
                    numVertexItemTags = _shapesListXML.getNumTags("vertex");
                    for (int k = 0; k < numVertexItemTags; k++) {
                        int x = _shapesListXML.getAttribute("vertex", "x", 0, k); 
                        int y = _shapesListXML.getAttribute("vertex", "y", 0, k);
                        
                        //Create a new vertex
                        ofxMtlMapping2DVertex* newVertex = new ofxMtlMapping2DVertex();
                        newVertex->init(x-newVertex->width/2, y-newVertex->height/2);
                        newVertex->isDefiningTectureCoord = true;
                        newShape->inputPolygon->vertices.push_back(newVertex);
                    }
                    
                    newShape->inputPolygon->init(shapeId);
                    _shapesListXML.popTag();
                }
                
                newShape->init(shapeId);
                ofxMtlMapping2DShapes::pmShapes.push_front(newShape);
                
                ofxMtlMapping2DControls::mapping2DControls()->addShapeToList(shapeId, newShape->shapeType);
				
				_shapesListXML.popTag();
				
			}
		}
		
		//this pops us out of the <root> tag
		//sets the root back to the xml document
		_shapesListXML.popTag();
	}
    
    ofxMtlMapping2DShape::nextShapeId = shapeId;
}
Exemplo n.º 23
0
//--------------------------------------------------------------
void ofApp::setup(){
	ofSetWindowTitle("Story Teller");
	//Software2552::SoundIn::setup();// move to timeline or scene
	//Software2552::SoundOut::setup();// move to timeline or scene

	ofSetLogLevel(OF_LOG_NOTICE);//OF_LOG_VERBOSE
	timeline.setup();
	timeline.readScript("json3.json");
	timeline.start();
	//paragraphs.build(ofGetWidth());
	
	//ofSetFullscreen(true);
	//ofSetFrameRate(60);
	//ofBackgroundHex(0x00000);
	//ofSetLogLevel(OF_LOG_NOTICE);//ofSetLogLevel(OF_LOG_VERBOSE);
	
	// we add this listener before setting up so the initial circle resolution is correct
#if 0
	circleResolution.addListener(this, &ofApp::circleResolutionChanged);
	ringButton.addListener(this, &ofApp::ringButtonPressed);

	gui.setup(); // most of the time you don't need a name
	gui.add(filled.setup("fill", true));
	gui.add(radius.setup("radius", 140, 10, 300));
	gui.add(center.setup("center", ofVec2f(ofGetWidth()*.5, ofGetHeight()*.5), ofVec2f(0, 0), ofVec2f(ofGetWidth(), ofGetHeight())));
	gui.add(color.setup("color", ofColor(100, 100, 140), ofColor(0, 0), ofColor(255, 255)));
	gui.add(circleResolution.setup("circle res", 5, 3, 90));
	gui.add(twoCircles.setup("two circles"));
	gui.add(ringButton.setup("ring"));
	gui.add(screenSize.setup("screen size", ofToString(ofGetWidth()) + "x" + ofToString(ofGetHeight())));

	bHide = false;

	ring.load("ring.wav");
#endif // 0

	years = 0;
	return;
	// show images, then wash them away

#if 0
	images.push_back(ofImage("C:\\Users\\mark\\Pictures\\maps\\Res37.jpe"));
	images.push_back(ofImage("C:\\Users\\mark\\Pictures\\maps\\Res37-2.jpe"));
	images.push_back(ofImage("C:\\Users\\mark\\Pictures\\maps\\Res37-2.jpe"));
	franklinBook14.load("frabk.ttf", 14, true, true, true);
	franklinBook14.setLineHeight(18.0f);
	franklinBook14.setLetterSpacing(1.037);
	ofDisableDepthTest();
	return;
	backgroundImage.allocate(ofGetWidth(), ofGetHeight(), OF_IMAGE_COLOR);
	backgroundImage.loadImage("C:\\Users\\mark\\Documents\\iclone\\images\\robot.jpg");
	// read the directory for the images
	// we know that they are named in seq
	ofDirectory dir;

	int nFiles = dir.listDir("C:\\Users\\mark\\Documents\\iclone\\images");
	if (nFiles) {

		for (int i = 0; i < dir.size(); i++) {

			// add the image to the vector
			string filePath = dir.getPath(i);
			images.push_back(ofImage());
			images.back().load(filePath);

		}

	}
	else ofLog(OF_LOG_WARNING) << "Could not find folder";

	// this toggle will tell the sequence
		// be be indepent of the app fps
	bFrameIndependent = true;

	// this will readJsonValue the speed to play 
	// the animation back we readJsonValue the
	// default to 24fps
	sequenceFPS = 24;

	// readJsonValue the app fps 
	appFPS = 18;
	ofSetFrameRate(appFPS);

	myPlayer.loadMovie("C:\\Users\\mark\\Documents\\iclone\\background2.mp4");
	myPlayer.setLoopState(OF_LOOP_NORMAL);
	myPlayer.play();

	robot.loadMovie("C:\\Users\\mark\\Documents\\iclone\\images\\robot2.mp4");
	robot.setLoopState(OF_LOOP_NORMAL);
	robot.play();


	pathLines.setMode(OF_PRIMITIVE_LINE_STRIP);
	ofDisableArbTex(); // we need GL_TEXTURE_2D for our models coords.
	ofEnableDepthTest();
	light.enable();
	cam.setPosition(0, 0, 100);

	headTrackedCamera.setNearClip(0.01f);
	headTrackedCamera.setFarClip(1000.0f);

	//defining the real world coordinates of the window which is being headtracked is important for visual accuracy
	windowWidth = 0.3f;
	windowHeight = 0.2f;

	windowTopLeft = ofVec3f(-windowWidth / 2.0f,
		+windowHeight / 2.0f,
		0.0f);
	windowBottomLeft = ofVec3f(-windowWidth / 2.0f,
		-windowHeight / 2.0f,
		0.0f);
	windowBottomRight = ofVec3f(+windowWidth / 2.0f,
		-windowHeight / 2.0f,
		0.0f);

	//we use this constant since we're using a really hacky headtracking in this example
	//if you use something that can properly locate the head in 3d (like a kinect), you don't need this fudge factor
	viewerDistance = 0.4f;


	//From2552Software::Sound sound;
	//sound.test();

	myKinect.open();
	//audio.setup(&myKinect);
	//audio.setup(&myKinect);
	//audio.setup(&myKinect);

	//faces.setup(&myKinect);
	bodies.useFaces();
	bodies.setup(&myKinect);

	//ofSetWindowShape(640 * 2, 480 * 2);
	ofDisableDepthTest(); // draws in order vs z order

#endif // 0

}
Exemplo n.º 24
0
//--------------------------------------------------------------
void ofxMtlMapping2D::saveShapesList()
{
    
    list<ofxMtlMapping2DShape*> pmShapesCopy;
    pmShapesCopy.resize (ofxMtlMapping2DShapes::pmShapes.size());
    copy (ofxMtlMapping2DShapes::pmShapes.begin(), ofxMtlMapping2DShapes::pmShapes.end(), pmShapesCopy.begin());
    pmShapesCopy.sort(Comparator());

    ofxXmlSettings newShapesListXML;
	int shapeCounter = 0;
	
	newShapesListXML.addTag("root");
	newShapesListXML.pushTag("root", 0);
	
	//Create/Update XML
    list<ofxMtlMapping2DShape*>::reverse_iterator it;
    for (it=pmShapesCopy.rbegin(); it!=pmShapesCopy.rend(); it++) {
        ofxMtlMapping2DShape* shape = *it;
		
		int tagNum = newShapesListXML.addTag("shape");
		newShapesListXML.addAttribute("shape", "id", shape->shapeId, tagNum);
		newShapesListXML.pushTag("shape", tagNum);
		
        //Shape settings
        map<string,string>::iterator itShape;
        for ( itShape=shape->shapeSettings.begin() ; itShape != shape->shapeSettings.end(); itShape++ ) {
            int tagNum = newShapesListXML.addTag("setting");            
            newShapesListXML.addAttribute("setting", "key", (*itShape).first, tagNum);
            newShapesListXML.setValue("setting", (*itShape).second, tagNum);
        }
		
        //Output Vertex/Vertices
        tagNum = newShapesListXML.addTag("outputVertices");
        newShapesListXML.pushTag("outputVertices", tagNum);
        list<ofxMtlMapping2DVertex*>::iterator itVertex;
        for (itVertex=shape->vertices.begin(); itVertex!=shape->vertices.end(); itVertex++) {
            ofxMtlMapping2DVertex* vertex = *itVertex;
            
            int tagNum = newShapesListXML.addTag("vertex");
            newShapesListXML.addAttribute("vertex", "x", (int)vertex->center.x, tagNum);
            newShapesListXML.addAttribute("vertex", "y", (int)vertex->center.y, tagNum);
        }
        newShapesListXML.popTag();
        
        if(shape->shapeType != MAPPING_2D_SHAPE_MASK) {
            //Input Quads
            tagNum = newShapesListXML.addTag("inputPolygon");
            newShapesListXML.pushTag("inputPolygon", tagNum);
            //Vertices
            for (itVertex=shape->inputPolygon->vertices.begin(); itVertex!=shape->inputPolygon->vertices.end(); itVertex++) {
                ofxMtlMapping2DVertex* vertex = *itVertex;
                
                int tagNum = newShapesListXML.addTag("vertex");
                newShapesListXML.addAttribute("vertex", "x", (int)vertex->center.x, tagNum);
                newShapesListXML.addAttribute("vertex", "y", (int)vertex->center.y, tagNum);
            }
            newShapesListXML.popTag();
		}
		newShapesListXML.popTag();
		
		shapeCounter++;
	}
	
	//Save to file
	newShapesListXML.saveFile(_mappingXmlFilePath);
    ofLog(OF_LOG_NOTICE, "Status > settings saved to xml!");

}
Exemplo n.º 25
0
void COMXAudioCodecOMX::BuildChannelMap()
{
	if (m_channels == m_pCodecContext->channels && m_layout == m_pCodecContext->channel_layout)
	{
		return;    //nothing to do here
	}

	m_channels = m_pCodecContext->channels;
	m_layout   = m_pCodecContext->channel_layout;

	int index = 0;
	if(m_pCodecContext->codec_id == CODEC_ID_AAC && m_pCodecContext->channels == 3)
	{
		m_channelMap[index++] = PCM_FRONT_CENTER;
		m_channelMap[index++] = PCM_FRONT_LEFT;
		m_channelMap[index++] = PCM_FRONT_RIGHT;
	}
	else if(m_pCodecContext->codec_id == CODEC_ID_AAC && m_pCodecContext->channels == 4)
	{
		m_channelMap[index++] = PCM_FRONT_CENTER;
		m_channelMap[index++] = PCM_FRONT_LEFT;
		m_channelMap[index++] = PCM_FRONT_RIGHT;
		m_channelMap[index++] = PCM_BACK_CENTER;
	}
	else if(m_pCodecContext->codec_id == CODEC_ID_AAC && m_pCodecContext->channels == 5)
	{
		m_channelMap[index++] = PCM_FRONT_CENTER;
		m_channelMap[index++] = PCM_FRONT_LEFT;
		m_channelMap[index++] = PCM_FRONT_RIGHT;
		m_channelMap[index++] = PCM_BACK_LEFT;
		m_channelMap[index++] = PCM_BACK_RIGHT;
	}
	else if(m_pCodecContext->codec_id == CODEC_ID_AAC && m_pCodecContext->channels == 6)
	{
		m_channelMap[index++] = PCM_FRONT_CENTER;
		m_channelMap[index++] = PCM_FRONT_LEFT;
		m_channelMap[index++] = PCM_FRONT_RIGHT;
		m_channelMap[index++] = PCM_BACK_LEFT;
		m_channelMap[index++] = PCM_BACK_RIGHT;
		m_channelMap[index++] = PCM_LOW_FREQUENCY;
	}
	else if(m_pCodecContext->codec_id == CODEC_ID_AAC && m_pCodecContext->channels == 7)
	{
		m_channelMap[index++] = PCM_FRONT_CENTER;
		m_channelMap[index++] = PCM_FRONT_LEFT;
		m_channelMap[index++] = PCM_FRONT_RIGHT;
		m_channelMap[index++] = PCM_BACK_LEFT;
		m_channelMap[index++] = PCM_BACK_RIGHT;
		m_channelMap[index++] = PCM_BACK_CENTER;
		m_channelMap[index++] = PCM_LOW_FREQUENCY;
	}
	else if(m_pCodecContext->codec_id == CODEC_ID_AAC && m_pCodecContext->channels == 8)
	{
		m_channelMap[index++] = PCM_FRONT_CENTER;
		m_channelMap[index++] = PCM_SIDE_LEFT;
		m_channelMap[index++] = PCM_SIDE_RIGHT;
		m_channelMap[index++] = PCM_FRONT_LEFT;
		m_channelMap[index++] = PCM_FRONT_RIGHT;
		m_channelMap[index++] = PCM_BACK_LEFT;
		m_channelMap[index++] = PCM_BACK_RIGHT;
		m_channelMap[index++] = PCM_LOW_FREQUENCY;
	}
	else
	{

		int64_t layout;
		int bits = count_bits(m_pCodecContext->channel_layout);
		if (bits == m_pCodecContext->channels)
		{
			layout = m_pCodecContext->channel_layout;
		}
		else
		{
			ofLog(OF_LOG_ERROR, "COMXAudioCodecOMX::GetChannelMap - FFmpeg reported %d channels, but the layout contains %d ignoring", m_pCodecContext->channels, bits);
			layout = av_get_default_channel_layout(m_pCodecContext->channels);
		}

		if (layout & AV_CH_FRONT_LEFT           )
		{
			m_channelMap[index++] = PCM_FRONT_LEFT           ;
		}
		if (layout & AV_CH_FRONT_RIGHT          )
		{
			m_channelMap[index++] = PCM_FRONT_RIGHT          ;
		}
		if (layout & AV_CH_FRONT_CENTER         )
		{
			m_channelMap[index++] = PCM_FRONT_CENTER         ;
		}
		if (layout & AV_CH_LOW_FREQUENCY        )
		{
			m_channelMap[index++] = PCM_LOW_FREQUENCY        ;
		}
		if (layout & AV_CH_BACK_LEFT            )
		{
			m_channelMap[index++] = PCM_BACK_LEFT            ;
		}
		if (layout & AV_CH_BACK_RIGHT           )
		{
			m_channelMap[index++] = PCM_BACK_RIGHT           ;
		}
		if (layout & AV_CH_FRONT_LEFT_OF_CENTER )
		{
			m_channelMap[index++] = PCM_FRONT_LEFT_OF_CENTER ;
		}
		if (layout & AV_CH_FRONT_RIGHT_OF_CENTER)
		{
			m_channelMap[index++] = PCM_FRONT_RIGHT_OF_CENTER;
		}
		if (layout & AV_CH_BACK_CENTER          )
		{
			m_channelMap[index++] = PCM_BACK_CENTER          ;
		}
		if (layout & AV_CH_SIDE_LEFT            )
		{
			m_channelMap[index++] = PCM_SIDE_LEFT            ;
		}
		if (layout & AV_CH_SIDE_RIGHT           )
		{
			m_channelMap[index++] = PCM_SIDE_RIGHT           ;
		}
		if (layout & AV_CH_TOP_CENTER           )
		{
			m_channelMap[index++] = PCM_TOP_CENTER           ;
		}
		if (layout & AV_CH_TOP_FRONT_LEFT       )
		{
			m_channelMap[index++] = PCM_TOP_FRONT_LEFT       ;
		}
		if (layout & AV_CH_TOP_FRONT_CENTER     )
		{
			m_channelMap[index++] = PCM_TOP_FRONT_CENTER     ;
		}
		if (layout & AV_CH_TOP_FRONT_RIGHT      )
		{
			m_channelMap[index++] = PCM_TOP_FRONT_RIGHT      ;
		}
		if (layout & AV_CH_TOP_BACK_LEFT        )
		{
			m_channelMap[index++] = PCM_TOP_BACK_LEFT        ;
		}
		if (layout & AV_CH_TOP_BACK_CENTER      )
		{
			m_channelMap[index++] = PCM_TOP_BACK_CENTER      ;
		}
		if (layout & AV_CH_TOP_BACK_RIGHT       )
		{
			m_channelMap[index++] = PCM_TOP_BACK_RIGHT       ;
		}
	}

	//terminate the channel map
	m_channelMap[index] = PCM_INVALID;
	if(m_pCodecContext->channels == 6)
	{
		m_channelMap[6] = PCM_INVALID;
		m_channelMap[7] = PCM_INVALID;
		m_channelMap[8] = PCM_INVALID;
	}
}
Exemplo n.º 26
0
// -----------------------------------------------------------------------------
void ofxRtMidiIn::ignoreTypes(bool midiSysex, bool midiTiming, bool midiSense) {
	midiIn.ignoreTypes(midiSysex, midiTiming, midiSense);
	ofLog(OF_LOG_VERBOSE, "ofxMidiIn: ignore types on %s: sysex: %d timing: %d sense: %d",
			portName.c_str(), midiSysex, midiTiming, midiSense);
}
Exemplo n.º 27
0
static void get_video_devices (ofGstCamData & cam_data)
{
#ifdef TARGET_LINUX
	int fd, ok;

	struct udev * my_udev;
	struct udev_enumerate * enumerate;
	struct udev_list_entry * list;
	struct udev_list_entry * entry;

	my_udev = udev_new();
	enumerate = udev_enumerate_new(my_udev);
	udev_enumerate_scan_devices(enumerate);
	list = udev_enumerate_get_list_entry(enumerate);

	/*udev_list_entry_foreach(entry,list){
		const char * name = udev_list_entry_get_name(entry);
		struct udev_device * device = udev_device_new_from_syspath(my_udev, name);
		const char * subsystem = udev_device_get_subsystem(device);
		if(strcmp(subsystem,"video4linux")==0){
			num_devices++;
		}
	}*/

	ofLog (OF_LOG_NOTICE, "Probing devices with udev...");

	/* Initialize webcam structures */
	udev_list_entry_foreach(entry,list){
		const char * name = udev_list_entry_get_name(entry);
		struct udev_device * device = udev_device_new_from_syspath(my_udev, name);
		string subsystem = udev_device_get_subsystem(device);

		if(subsystem != "video4linux") continue;
		const char  *gstreamer_src, *product_name;
		struct v4l2_capability  v2cap;
		struct video_capability v1cap;
		string vendor_id;
		string product_id;

		const char * dev_node = udev_device_get_devnode(device);
		struct udev_list_entry * properties = udev_device_get_properties_list_entry(device);
		struct udev_list_entry * property;
		udev_list_entry_foreach(property,properties){
			const char * name = udev_list_entry_get_name(property);

			if(strcmp(name,"ID_VENDOR_ID")==0){
				vendor_id = udev_list_entry_get_value(property);
			}

			if(strcmp(name,"ID_MODEL_ID")==0){
				product_id = udev_list_entry_get_value(property);
			}

		}


		ofLog (OF_LOG_NOTICE, "Found device " + vendor_id + ":" + product_id + ", getting capabilities...");

		/* vbi devices support capture capability too, but cannot be used,
		 * so detect them by device name */
		if (strstr (dev_node, "vbi"))
		{
			ofLog (OF_LOG_WARNING, "Skipping vbi device: %s", dev_node);
			continue;
		}


		if ((fd = open (dev_node, O_RDONLY | O_NONBLOCK)) < 0)
		{
			ofLog (OF_LOG_WARNING, "Failed to open %s: %s", dev_node, strerror (errno));
			continue;
		}

		ok = ioctl (fd, VIDIOC_QUERYCAP, &v2cap);
		if (ok < 0)
		{
			ok = ioctl (fd, VIDIOCGCAP, &v1cap);
			if (ok < 0)
			{
				ofLog (OF_LOG_WARNING, "Error while probing v4l capabilities for %s: %s",
						dev_node, strerror (errno));
				close (fd);
				continue;
			}
			ofLog (OF_LOG_NOTICE,"Detected v4l device: %s", v1cap.name);
			ofLog (OF_LOG_NOTICE,"Device type: %d", v1cap.type);
			gstreamer_src = "v4lsrc";
			product_name  = v1cap.name;
		}
		else
		{
			guint cap = v2cap.capabilities;
			ofLog (OF_LOG_NOTICE,"Detected v4l2 device: %s", v2cap.card);
			ofLog (OF_LOG_NOTICE,"Driver: %s, version: %d", v2cap.driver, v2cap.version);
			/* g_print ("Bus info: %s\n", v2cap.bus_info); */ /* Doesn't seem anything useful */
			ofLog (OF_LOG_NOTICE,"Capabilities: 0x%08X", v2cap.capabilities);
			if (!(cap & V4L2_CAP_VIDEO_CAPTURE))
			{
			  ofLog (OF_LOG_NOTICE,"Device %s seems to not have the capture capability, (radio tuner?)\n"
					 "Removing it from device list.", dev_node);
			close (fd);
			continue;
			}
			gstreamer_src = "v4l2src";
			product_name  = (char *) v2cap.card;
		}


		ofGstDevice gst_device;
		gst_device.video_device = dev_node;
		gst_device.gstreamer_src = gstreamer_src;
		gst_device.product_name = product_name;
		cam_data.webcam_devices.push_back(gst_device);
		/*cam_data.webcam_devices[cam_data.num_webcam_devices].video_device      = dev_node;
		cam_data.webcam_devices[cam_data.num_webcam_devices].gstreamer_src     = gstreamer_src;
		cam_data.webcam_devices[cam_data.num_webcam_devices].product_name      = product_name;
		cam_data.webcam_devices[cam_data.num_webcam_devices].num_video_formats = 0;
		cam_data.webcam_devices[cam_data.num_webcam_devices].supported_resolutions =
		  g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
		cam_data.num_webcam_devices++;*/

		close (fd);
	}

	cam_data.bInited=true;
#endif
}
Exemplo n.º 28
0
//--------------------------------------------------------------
void testApp::setup(){
	ofEnableAlphaBlending();
    ofxXmlSettings xml  = stateMachine.getSharedData().xml;
    if(xml.loadFile(settingFileName))
	{
		if(xml.pushTag("DATA"))
		{
			stateMachine.getSharedData().counter = xml.getValue("COUNTER", 0);
			stateMachine.getSharedData().path_to_save = xml.getValue("CAPTURE_PATH", "./captures");
			stateMachine.getSharedData().numDigi = xml.getValue("DIGI", 5);
			ofDirectory dir;
			if(dir.listDir(stateMachine.getSharedData().path_to_save)<1)
			{
				dir.createDirectory(stateMachine.getSharedData().path_to_save);
			}
			
			xml.popTag();
		}
	}
	else
	{
		ofLog(OF_LOG_ERROR,"Faile to load "+ settingFileName);
	}
    
	//    // setup shared data
    ofxControlPanel::setBackgroundColor(simpleColor(30, 30, 60, 100));
	ofxControlPanel::setTextColor(simpleColor(240, 50, 50, 255));
    stateMachine.getSharedData().panel.setup(ofGetWidth(),ofGetHeight());
	stateMachine.getSharedData().panel.loadFont("MONACO.TTF", 8);		
	stateMachine.getSharedData().panel.addPanel("General", 4,false);
    
	ofxControlPanel::setBackgroundColor(simpleColor(60, 30, 30, 100));	
	stateMachine.getSharedData().panel.addPanel("FaceTracking", 5, false);
    ofxControlPanel::setBackgroundColor(simpleColor(60, 30, 30, 100));	
	stateMachine.getSharedData().panel.addPanel("FaceTracking0", 4, false);
    ofxControlPanel::setBackgroundColor(simpleColor(60, 30, 30, 100));	
	stateMachine.getSharedData().panel.addPanel("FaceTracking1", 4, false);
    
	ofxControlPanel::setBackgroundColor(simpleColor(70, 70, 30, 100));	
	stateMachine.getSharedData().panel.addPanel("FaceMapEdit", 4, false);
    
	ofxControlPanel::setBackgroundColor(simpleColor(30, 30, 30, 100));	
    //some dummy vars we will update to show the variable lister object
	appFrameRate	= ofGetFrameRate();
    stateMachine.getSharedData().panel.setWhichPanel("General");
    stateMachine.getSharedData().panel.setWhichColumn(0);
	stateMachine.getSharedData().panel.addChartPlotter("some chart", guiStatVarPointer("app fps", &appFrameRate, GUI_VAR_FLOAT, true, 2), 200, 50, 200, 5, 80);
    vector<string> loglevel;
    loglevel.push_back("OF_LOG_VERBOSE");
    loglevel.push_back("OF_LOG_NOTICE");
    loglevel.push_back("OF_LOG_WARNING");
    loglevel.push_back("OF_LOG_ERROR");
    loglevel.push_back("OF_LOG_FATAL_ERROR");
    loglevel.push_back("OF_LOG_SILENT");
    stateMachine.getSharedData().panel.addTextDropDown("LogLevel","LogLevel", 0, loglevel);
	
	// initialise state machine
	stateMachine.addState(new IndexState());
	stateMachine.addState(new SelectPlayerState());
	stateMachine.addState(new PlayState());
	stateMachine.addState(new EditState());
	stateMachine.changeState("PlayState");
    stateMachine.getSharedData().panel.loadSettings("settings.xml");
    stateMachine.getSharedData().panel.hide();
    stateMachine.getSharedData().numPlayer = 2;
    
}
Exemplo n.º 29
0
bool ofGstUtils::allocate(){
	// wait for paused state to query the duration
	if(!bIsStream){
		GstState state = GST_STATE_PAUSED;
		gst_element_get_state(gstPipeline,&state,NULL,2*GST_SECOND);
	}
	if(!bIsCamera){
		GstFormat format=GST_FORMAT_TIME;
		if(!gst_element_query_duration(gstPipeline,&format,&durationNanos))
			ofLog(OF_LOG_WARNING,"GStreamer: cannot query time duration");

		gstData.durationNanos = durationNanos;
		gstData.nFrames		  = 0;
	}

	// query width, height, fps and do data allocation
	if (bIsCamera || (width!=0 && height!=0)) {
		pixels=new unsigned char[width*height*bpp/8];
		gstData.pixels=new unsigned char[width*height*bpp/8];
		memset(pixels,0,width*height*bpp/8);
		memset(gstData.pixels,0,width*height*bpp/8);
		gstData.width = width;
		gstData.height = height;
		gstData.totalsize = 0;
		gstData.lastFrame = 0;
	}else if(gstSink!=NULL && !bIsCustomWithSink){
		if(GstPad* pad = gst_element_get_static_pad(gstSink, "sink")){
			if(gst_video_get_size(GST_PAD(pad), &width, &height)){
				pixels=new unsigned char[width*height*bpp/8];
				gstData.pixels=new unsigned char[width*height*bpp/8];
				memset(pixels,0,width*height*bpp/8);
				memset(gstData.pixels,0,width*height*bpp/8);
				gstData.width = width;
				gstData.height = height;
				gstData.totalsize = 0;
				gstData.lastFrame = 0;
			}else{
				ofLog(OF_LOG_ERROR,"GStreamer: cannot query width and height");
				return false;
			}

			const GValue *framerate;
			framerate = gst_video_frame_rate(pad);
			fps_n=0;
			fps_d=0;
			if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){
				fps_n = gst_value_get_fraction_numerator (framerate);
				fps_d = gst_value_get_fraction_denominator (framerate);
				gstData.nFrames = (float)(durationNanos / GST_SECOND) * (float)fps_n/(float)fps_d;
				ofLog(OF_LOG_VERBOSE,"ofGstUtils: framerate: %i/%i",fps_n,fps_d);
			}else{
				ofLog(OF_LOG_WARNING,"Gstreamer: cannot get framerate, frame seek won't work");
			}
			gst_object_unref(GST_OBJECT(pad));
		}else{
			ofLog(OF_LOG_ERROR,"GStreamer: cannot get sink pad");
			return false;
		}
	}


	bLoaded = true;
	bHavePixelsChanged = true;
	bStarted = true;
	return bLoaded;
}
bool ofxFlacEncoder::encode(string wavInput, string flacOutput) {
    
    //ofLog(OF_LOG_VERBOSE, "init encoding (device%d)",deviceId);
	FLAC__bool ok = true;
	FLAC__StreamEncoder *encoder = 0;
	FLAC__StreamEncoderInitStatus init_status;
	FILE *fin;
	unsigned sample_rate = 0;
	unsigned channels = 0;
	unsigned bps = 0;
    
	if((fin = fopen(ofToDataPath(wavInput).c_str(), "rb")) == NULL){
        
		//ofLog(OF_LOG_ERROR, "ERROR: opening %s for output\n", wavFile);
		return false;
	}
    
	// read and validate wav header
	if(fread(buffer, 1, 44, fin) != 44 || memcmp(buffer, "RIFF", 4)
       || memcmp(buffer + 8, "WAVEfmt \020\000\000\000\001\000\002\000", 16)
       || memcmp(buffer + 32, "\004\000\020\000data", 8)){
		ofLog(OF_LOG_ERROR,
              "invalid/unsupported WAVE file, only 16bps stereo WAVE in canonical form allowed");
		//fclose(fin);
		//return false;
	}
	sample_rate = ((((((unsigned) buffer[27] << 8) | buffer[26]) << 8) | buffer[25]) << 8)
    | buffer[24];
	channels = 2;
	bps = 16;
	total_samples = (((((((unsigned) buffer[43] << 8) | buffer[42]) << 8) | buffer[41]) << 8)
                     | buffer[40]) / 4;
    
	// allocate the encoder
	if((encoder = FLAC__stream_encoder_new()) == NULL){
		ofLog(OF_LOG_ERROR, " allocating encoder\n");
		fclose(fin);
		return false;
	}
    
	ok &= FLAC__stream_encoder_set_verify(encoder, true);
	ok &= FLAC__stream_encoder_set_compression_level(encoder, 5);
	ok &= FLAC__stream_encoder_set_channels(encoder, channels);
	ok &= FLAC__stream_encoder_set_bits_per_sample(encoder, bps);
	ok &= FLAC__stream_encoder_set_sample_rate(encoder, sample_rate);
	ok &= FLAC__stream_encoder_set_total_samples_estimate(encoder, total_samples);
    
	// initialize encoder
	if(ok){
		init_status = FLAC__stream_encoder_init_file(encoder, ofToDataPath(flacOutput).c_str(), NULL, NULL);
		if(init_status != FLAC__STREAM_ENCODER_INIT_STATUS_OK){
			ofLog(OF_LOG_ERROR, "initializing encoder: ");
			ofLog(OF_LOG_ERROR, FLAC__StreamEncoderInitStatusString[init_status]);
			ok = false;
		}
	}
    
	//ofLog(OF_LOG_VERBOSE, "start encoding (device%d)",deviceId);
	/* read blocks of samples from WAVE file and feed to encoder */
	if(ok){
		size_t left = (size_t) total_samples;
		while(ok && left){
			size_t need = (left > READSIZE ? (size_t) READSIZE : (size_t) left);
			if(fread(buffer, channels * (bps / 8), need, fin) != need){
				ofLog(OF_LOG_ERROR, "reading from WAVE file");
				ok = false;
			}else{
				/* convert the packed little-endian 16-bit PCM samples from WAVE into an interleaved FLAC__int32 buffer for libFLAC */
				size_t i;
				for(i = 0; i < need * channels; i++){
					/* inefficient but simple and works on big- or little-endian machines */
					pcm[i] = (FLAC__int32) (((FLAC__int16) (FLAC__int8) buffer[2 * i + 1] << 8)
                                            | (FLAC__int16) buffer[2 * i]);
				}
				/* feed samples to encoder */
				ok = FLAC__stream_encoder_process_interleaved(encoder, pcm, need);
			}
			left -= need;
		}
	}
    
	ok &= FLAC__stream_encoder_finish(encoder);
    
    //	fprintf(stderr, "encoding: %s\n", ok ? "succeeded" : "FAILED");
    //	fprintf(stderr,
    //			"   state: %s\n",
    //			FLAC__StreamEncoderStateString[FLAC__stream_encoder_get_state(encoder)]);
    
	FLAC__stream_encoder_delete(encoder);
	fclose(fin);
    
	return ok;


    
}