Exemple #1
0
//--------------------------------------------------------------
void ofApp::openFileEvent(int &whichEditor) {
	// received an editor open via CTRL/Super + o
	
	ofLogNotice() << "received open event for editor " << whichEditor
		<< " with filename " << editor.getEditorFilename(whichEditor);
}
//----------------------------------------------------------------
void ofSerial::buildDeviceList(){

	deviceType = "serial";
	devices.clear();

	vector <string> prefixMatch;

	#ifdef TARGET_OSX
		prefixMatch.push_back("cu.");
		prefixMatch.push_back("tty.");
	#endif
	#ifdef TARGET_LINUX
		#ifdef TARGET_RASPBERRY_PI
			prefixMatch.push_back("ttyACM");
		#endif

		prefixMatch.push_back("ttyS");
		prefixMatch.push_back("ttyUSB");
		prefixMatch.push_back("rfc");
	#endif


	#if defined( TARGET_OSX ) || defined( TARGET_LINUX )

	DIR *dir;
	struct dirent *entry;
	dir = opendir("/dev");

	string deviceName	= "";
	int deviceCount		= 0;

	if (dir == NULL){
		ofLogError("ofSerial") << "buildDeviceList(): error listing devices in /dev";
	} else {
		//for each device
		while((entry = readdir(dir)) != NULL){
			deviceName = (char *)entry->d_name;

			//we go through the prefixes
			for(int k = 0; k < (int)prefixMatch.size(); k++){
				//if the device name is longer than the prefix
				if( deviceName.size() > prefixMatch[k].size() ){
					//do they match ?
					if( deviceName.substr(0, prefixMatch[k].size()) == prefixMatch[k].c_str() ){
						devices.push_back(ofSerialDeviceInfo("/dev/"+deviceName, deviceName, deviceCount));
						deviceCount++;
						break;
					}
				}
			}
		}
		closedir(dir);
	}

	#endif

	//---------------------------------------------
	#ifdef TARGET_WIN32
	//---------------------------------------------
	enumerateWin32Ports();
	ofLogNotice("ofSerial") << "found " << nPorts << " devices";
	for (int i = 0; i < nPorts; i++){
		//NOTE: we give the short port name for both as that is what the user should pass and the short name is more friendly
		devices.push_back(ofSerialDeviceInfo(string(portNamesShort[i]), string(portNamesShort[i]), i));
	}
	//---------------------------------------------
	#endif
    //---------------------------------------------

	//here we sort the device to have the aruino ones first.
	partition(devices.begin(), devices.end(), isDeviceArduino);
	//we are reordering the device ids. too!
	for(int k = 0; k < (int)devices.size(); k++){
		devices[k].deviceID = k;
	}

	bHaveEnumeratedDevices = true;
}
//----------------------------------------------------------------
bool ofSerial::setup(string portName, int baud){

	bInited = false;

	//---------------------------------------------
	#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	//---------------------------------------------

		//lets account for the name being passed in instead of the device path
		if( portName.size() > 5 && portName.substr(0, 5) != "/dev/" ){
			portName = "/dev/" + portName;
		}

	    ofLogNotice("ofSerial") << "opening " << portName << " @ " << baud << " bps";
		fd = open(portName.c_str(), O_RDWR | O_NOCTTY | O_NONBLOCK);
		if(fd == -1){
			ofLogError("ofSerial") << "unable to open " << portName;
			return false;
		}

		struct termios options;
		tcgetattr(fd,&oldoptions);
		options = oldoptions;
		switch(baud){
		   case 300: 	cfsetispeed(&options,B300);
						cfsetospeed(&options,B300);
						break;
		   case 1200: 	cfsetispeed(&options,B1200);
						cfsetospeed(&options,B1200);
						break;
		   case 2400: 	cfsetispeed(&options,B2400);
						cfsetospeed(&options,B2400);
						break;
		   case 4800: 	cfsetispeed(&options,B4800);
						cfsetospeed(&options,B4800);
						break;
		   case 9600: 	cfsetispeed(&options,B9600);
						cfsetospeed(&options,B9600);
						break;
		   case 14400: 	cfsetispeed(&options,B14400);
						cfsetospeed(&options,B14400);
						break;
		   case 19200: 	cfsetispeed(&options,B19200);
						cfsetospeed(&options,B19200);
						break;
		   case 28800: 	cfsetispeed(&options,B28800);
						cfsetospeed(&options,B28800);
						break;
		   case 38400: 	cfsetispeed(&options,B38400);
						cfsetospeed(&options,B38400);
						break;
		   case 57600:  cfsetispeed(&options,B57600);
						cfsetospeed(&options,B57600);
						break;
		   case 115200: cfsetispeed(&options,B115200);
						cfsetospeed(&options,B115200);
						break;
		   case 230400: cfsetispeed(&options,B230400);
						cfsetospeed(&options,B230400);
						break;

			default:	cfsetispeed(&options,B9600);
						cfsetospeed(&options,B9600);
						ofLogError("ofSerial") << "setup(): cannot set " << baud << " bps, setting to 9600";
						break;
		}

		options.c_cflag |= (CLOCAL | CREAD);
		options.c_cflag &= ~PARENB;
		options.c_cflag &= ~CSTOPB;
		options.c_cflag &= ~CSIZE;
		options.c_cflag |= CS8;
		tcsetattr(fd,TCSANOW,&options);

		bInited = true;
		ofLogNotice("ofSerial") << "opened " << portName << " sucessfully @ " << baud << " bps";

	    return true;
	//---------------------------------------------
    #endif
    //---------------------------------------------


    //---------------------------------------------
	#ifdef TARGET_WIN32
	//---------------------------------------------

	char pn[sizeof(portName)];
	int num;
	if (sscanf(portName.c_str(), "COM%d", &num) == 1) {
		// Microsoft KB115831 a.k.a if COM > COM9 you have to use a different
		// syntax
		sprintf(pn, "\\\\.\\COM%d", num);
	} else {
		strncpy(pn, (const char *)portName.c_str(), sizeof(portName)-1);
	}

	// open the serial port:
	// "COM4", etc...

	hComm=CreateFileA(pn,GENERIC_READ|GENERIC_WRITE,0,0,
					OPEN_EXISTING,0,0);

	if(hComm==INVALID_HANDLE_VALUE){
		ofLogError("ofSerial") << "setup(): unable to open " << portName;
		return false;
	}


	// now try the settings:
	COMMCONFIG cfg;
	DWORD cfgSize;
	char  buf[80];

	cfgSize=sizeof(cfg);
	GetCommConfig(hComm,&cfg,&cfgSize);
	int bps = baud;
	sprintf(buf,"baud=%d parity=N data=8 stop=1",bps);

	#if (_MSC_VER)       // microsoft visual studio
		// msvc doesn't like BuildCommDCB,
		//so we need to use this version: BuildCommDCBA
		if(!BuildCommDCBA(buf,&cfg.dcb)){
			ofLogError("ofSerial") << "setup(): unable to build comm dcb, (" << buf << ")";
		}
	#else
		if(!BuildCommDCB(buf,&cfg.dcb)){
			ofLogError("ofSerial") << "setup(): unable to build comm dcb, (" << buf << ")";
		}
	#endif


	// Set baudrate and bits etc.
	// Note that BuildCommDCB() clears XON/XOFF and hardware control by default

	if(!SetCommState(hComm,&cfg.dcb)){
		ofLogError("ofSerial") << "setup(): couldn't set comm state: " << cfg.dcb.BaudRate << " bps, xio " << cfg.dcb.fInX << "/" << cfg.dcb.fOutX;;
	}
	//ofLogNotice("ofSerial") << "bps=" << cfg.dcb.BaudRate << ", xio=" << cfg.dcb.fInX << "/" << cfg.dcb.fOutX;

	// Set communication timeouts (NT)
	COMMTIMEOUTS tOut;
	GetCommTimeouts(hComm,&oldTimeout);
	tOut = oldTimeout;
	// Make timeout so that:
	// - return immediately with buffered characters
	tOut.ReadIntervalTimeout=MAXDWORD;
	tOut.ReadTotalTimeoutMultiplier=0;
	tOut.ReadTotalTimeoutConstant=0;
	SetCommTimeouts(hComm,&tOut);

	bInited = true;
	return true;
	//---------------------------------------------
	#endif
	//---------------------------------------------
}
//--------------------------------------------------------------
//	Load Settings from the config.xml file
//--------------------------------------------------------------
void CalibrationUtils::loadXMLSettings()
{
	bGoToNextStep = false;

	// Can this load via http?
	if( calibrationXML.loadFile("calibration.xml"))
		//WOOT!
		ofLogNotice() << "Calibration Loaded!";
	else
		//FAIL!
		ofLogNotice() << "No calibration Found...";
		// GENERATE DEFAULT XML DATA WHICH WILL BE SAVED INTO THE CONFIG

	bool bboxRoot = true;
	bool screenRoot = true;

	bCalibrating = false;
	calibrationStep = 0;

	//Set grid and init everything that relates to teh grid
	GRID_X		= calibrationXML.getValue("SCREEN:GRIDMESH:GRIDX", 50);
	GRID_Y		= calibrationXML.getValue("SCREEN:GRIDMESH:GRIDY", 50);

	setGrid(GRID_X, GRID_Y);

	//Bounding Box Points
	if(bboxRoot)
	{
	    vector2df ul(calibrationXML.getValue("SCREEN:BOUNDINGBOX:ulx", 0.000000),calibrationXML.getValue("SCREEN:BOUNDINGBOX:uly", 0.000000));
	    vector2df lr(calibrationXML.getValue("SCREEN:BOUNDINGBOX:lrx", 1.000000),calibrationXML.getValue("SCREEN:BOUNDINGBOX:lry", 1.000000));
		rect2df boundingbox(ul, lr);
		setScreenBBox(boundingbox);
	}
	else
		setScreenScale(1.0f);

	//Calibration Points
	if(screenRoot)
	{
		//lets see how many <STROKE> </STROKE> tags there are in the xml file
		int numDragTags = calibrationXML.getNumTags("SCREEN:POINT");

		//if there is at least one <POINT> tag we can read the list of points
		if(numDragTags > 0)
		{
			//we push into the last POINT tag this temporarirly treats the tag as the document root.
			calibrationXML.pushTag("SCREEN:POINT", numDragTags-1);

			//we see how many points we have stored in <POINT> tags
			int numPtTags = calibrationXML.getNumTags("POINT");

			if(numPtTags > 0)
			{
				//We then read those x y values into our array
				for(int i = 0; i < numPtTags; i++)
				{
					//the last argument of getValue can be used to specify
					//which tag out of multiple tags you are refering to.
					int x = calibrationXML.getValue("POINT:X", 0.000000, i);
					int y = calibrationXML.getValue("POINT:Y", 0.000000, i);

					cameraPoints[i] = vector2df(x,y);
					printf("Calibration: %f, %f\n", cameraPoints[i].X, cameraPoints[i].Y);

					bscreenPoints = true;
					bcameraPoints = true;
				}
			}
			calibrationXML.popTag(); //Set XML root back to highest level
		}
	}
	//End calibrationXML Calibration Settings

	//Set the camera calibated box.
	calculateBox();
//	computeCameraToScreenMap();
}
Exemple #5
0
//--------------------------------------------------------------
void ofApp::update(){

	// OSC receiver queues up new messages, so you need to iterate
	// through waiting messages to get each incoming message

	// check for waiting messages
	while(serverReceiver.hasWaitingMessages()){
		// get the next message
		ofxOscMessage m;
		serverReceiver.getNextMessage(m);
		//Log received message for easier debugging of participants' messages:
		ofLogVerbose("Server recvd msg " + getOscMsgAsString(m) + " from " + m.getRemoteHost());

		// check the address of the incoming message
		if(m.getAddress() == "/typing"){
			//Identify host of incoming msg
			string incomingHost = m.getRemoteHost();
			//See if incoming host is a new one:
			if(std::find(knownClients.begin(), knownClients.end(), incomingHost)
			   == knownClients.end()){
				knownClients.push_back(incomingHost); //add new host to list
			}
			// get the first argument (we're only sending one) as a string
			if(m.getNumArgs() > 0){
				if(m.getArgType(0) == OFXOSC_TYPE_STRING){
					//reimplemented message display:
					//If vector has reached max size, delete the first/oldest element
					if(serverMessages.size() == maxServerMessages){
						serverMessages.erase(serverMessages.begin());
					}
					//Add message text at the end of the vector
					serverMessages.push_back(m.getArgAsString(0));

					//Broadcast message to other chat participants
					broadcastReceivedMessage(m.getArgAsString(0));
				}
			}
		}
		// handle getting random OSC messages here
		else{
			ofLogWarning("Server got weird message: " + m.getAddress());
		}
	}

	// Client side:

	// OSC receiver queues up new messages, so you need to iterate
	// through waiting messages to get each incoming message

	// check for waiting messages
	while(clientReceiver.hasWaitingMessages()){
		// get the next message
		ofxOscMessage m;
		clientReceiver.getNextMessage(m);
		ofLogNotice("Client just received a message");
		// check the address of the incoming message
		if(m.getAddress() == "/chatlog"){
			// get the first argument (we're only sending one) as a string
			if(m.getNumArgs() > 0){
				if(m.getArgType(0) == OFXOSC_TYPE_STRING){
					string oldMessages = clientMessages;
					clientMessages = m.getArgAsString(0) + "\n" + oldMessages;
				}
			}
		}
	}

	//this is purely workaround for a mysterious OSCpack bug on 64bit linux
	// after startup, reinit the receivers
	// must be a timing problem, though - in debug, stepping through, it works.
	if(ofGetFrameNum() == 60){
		clientReceiver.setup(clientRecvPort);
		serverReceiver.setup(serverRecvPort);
	}
}
void ofxVideoRecorder::retirePipeNumber(int num) {
	if (!openPipes.erase(num)) {
		ofLogNotice() << "ofxVideoRecorder::retirePipeNumber(): trying to retire a pipe number that is not being tracked: " << num << endl;
	}
}
void kinectGuiApp::pauseVideo() {
    getCurVideo().setPaused(true);
    ofLogNotice() << "Pause video: " << getCurVideo().getMoviePath();
}
void ofxMosquitto::on_subscribe(uint16_t mid, int qos_count, const uint8_t *granted_qos){
	ofLogNotice("ofxMosquitto -- Subscribed successfully");
}
void ofxMosquitto::on_connect(int rc){
	ofLogNotice("ofxMosquitto -- connected successfully");	
}
Exemple #10
0
//--------------------------------------------------------
static ofTTFCharacter makeContoursForCharacter(FT_Face &face){

		//int num			= face->glyph->outline.n_points;
		int nContours	= face->glyph->outline.n_contours;
		int startPos	= 0;

		char * tags		= face->glyph->outline.tags;
		FT_Vector * vec = face->glyph->outline.points;

		ofTTFCharacter charOutlines;
		charOutlines.setUseShapeColor(false);

		for(int k = 0; k < nContours; k++){
			if( k > 0 ){
				startPos = face->glyph->outline.contours[k-1]+1;
			}
			int endPos = face->glyph->outline.contours[k]+1;

			if(printVectorInfo){
				ofLogNotice("ofTrueTypeFont") << "--NEW CONTOUR";
			}

			//vector <ofPoint> testOutline;
			ofPoint lastPoint;

			for(int j = startPos; j < endPos; j++){

				if( FT_CURVE_TAG(tags[j]) == FT_CURVE_TAG_ON ){
					lastPoint.set((float)vec[j].x, (float)-vec[j].y, 0);
					if(printVectorInfo){
						ofLogNotice("ofTrueTypeFont") << "flag[" << j << "] is set to 1 - regular point - " << lastPoint.x <<  lastPoint.y;
					}
					charOutlines.lineTo(lastPoint/64);

				}else{
					if(printVectorInfo){
						ofLogNotice("ofTrueTypeFont") << "flag[" << j << "] is set to 0 - control point";
					}

					if( FT_CURVE_TAG(tags[j]) == FT_CURVE_TAG_CUBIC ){
						if(printVectorInfo){
							ofLogNotice("ofTrueTypeFont") << "- bit 2 is set to 2 - CUBIC";
						}

						int prevPoint = j-1;
						if( j == 0){
							prevPoint = endPos-1;
						}

						int nextIndex = j+1;
						if( nextIndex >= endPos){
							nextIndex = startPos;
						}

						ofPoint nextPoint( (float)vec[nextIndex].x,  -(float)vec[nextIndex].y );

						//we need two control points to draw a cubic bezier
						bool lastPointCubic =  ( FT_CURVE_TAG(tags[prevPoint]) != FT_CURVE_TAG_ON ) && ( FT_CURVE_TAG(tags[prevPoint]) == FT_CURVE_TAG_CUBIC);

						if( lastPointCubic ){
							ofPoint controlPoint1((float)vec[prevPoint].x,	(float)-vec[prevPoint].y);
							ofPoint controlPoint2((float)vec[j].x, (float)-vec[j].y);
							ofPoint nextPoint((float) vec[nextIndex].x,	-(float) vec[nextIndex].y);

							//cubic_bezier(testOutline, lastPoint.x, lastPoint.y, controlPoint1.x, controlPoint1.y, controlPoint2.x, controlPoint2.y, nextPoint.x, nextPoint.y, 8);
							charOutlines.bezierTo(controlPoint1.x/64, controlPoint1.y/64, controlPoint2.x/64, controlPoint2.y/64, nextPoint.x/64, nextPoint.y/64);
						}

					}else{

						ofPoint conicPoint( (float)vec[j].x,  -(float)vec[j].y );

						if(printVectorInfo){
							ofLogNotice("ofTrueTypeFont") << "- bit 2 is set to 0 - conic- ";
							ofLogNotice("ofTrueTypeFont") << "--- conicPoint point is " << conicPoint.x << conicPoint.y;
						}

						//If the first point is connic and the last point is connic then we need to create a virutal point which acts as a wrap around
						if( j == startPos ){
							bool prevIsConnic = (  FT_CURVE_TAG( tags[endPos-1] ) != FT_CURVE_TAG_ON ) && ( FT_CURVE_TAG( tags[endPos-1]) != FT_CURVE_TAG_CUBIC );

							if( prevIsConnic ){
								ofPoint lastConnic((float)vec[endPos - 1].x, (float)-vec[endPos - 1].y);
								lastPoint = (conicPoint + lastConnic) / 2;

								if(printVectorInfo){
									ofLogNotice("ofTrueTypeFont") << "NEED TO MIX WITH LAST";
									ofLogNotice("ofTrueTypeFont") << "last is " << lastPoint.x << " " << lastPoint.y;
								}
							}
						}

						//bool doubleConic = false;

						int nextIndex = j+1;
						if( nextIndex >= endPos){
							nextIndex = startPos;
						}

						ofPoint nextPoint( (float)vec[nextIndex].x,  -(float)vec[nextIndex].y );

						if(printVectorInfo){
							ofLogNotice("ofTrueTypeFont") << "--- last point is " << lastPoint.x << " " <<  lastPoint.y;
						}

						bool nextIsConnic = (  FT_CURVE_TAG( tags[nextIndex] ) != FT_CURVE_TAG_ON ) && ( FT_CURVE_TAG( tags[nextIndex]) != FT_CURVE_TAG_CUBIC );

						//create a 'virtual on point' if we have two connic points
						if( nextIsConnic ){
							nextPoint = (conicPoint + nextPoint) / 2;
							if(printVectorInfo){
								ofLogNotice("ofTrueTypeFont") << "|_______ double connic!";
							}
						}
						if(printVectorInfo){
							ofLogNotice("ofTrueTypeFont") << "--- next point is " << nextPoint.x << " " << nextPoint.y;
						}

						//quad_bezier(testOutline, lastPoint.x, lastPoint.y, conicPoint.x, conicPoint.y, nextPoint.x, nextPoint.y, 8);
						charOutlines.quadBezierTo(lastPoint.x/64, lastPoint.y/64, conicPoint.x/64, conicPoint.y/64, nextPoint.x/64, nextPoint.y/64);

						if( nextIsConnic ){
							lastPoint = nextPoint;
						}
					}
				}

			//end for
			}
			charOutlines.close();
		}

	return charOutlines;
}
Exemple #11
0
//-----------------------------------------------------------
bool ofTrueTypeFont::load(const std::string& _filename, int _fontSize, bool _bAntiAliased, bool _bFullCharacterSet, bool _makeContours, float _simplifyAmt, int _dpi) {
	#if defined(TARGET_ANDROID)
	ofAddListener(ofxAndroidEvents().unloadGL,this,&ofTrueTypeFont::unloadTextures);
	ofAddListener(ofxAndroidEvents().reloadGL,this,&ofTrueTypeFont::reloadTextures);
	#endif
	int border = 1;
	initLibraries();

	// if we've already been loaded, try to clean up :
	unloadTextures();

	if( _dpi == 0 ){
		_dpi = ttfGlobalDpi;
	}



	bLoadedOk 			= false;
	bAntiAliased 		= _bAntiAliased;
	bFullCharacterSet 	= _bFullCharacterSet;
	fontSize			= _fontSize;
	bMakeContours 		= _makeContours;
	simplifyAmt			= _simplifyAmt;
	dpi 				= _dpi;

	//--------------- load the library and typeface


	if(!loadFontFace(_filename,_fontSize,face,filename)){
        return false;
	}


	FT_Set_Char_Size( face, fontSize << 6, fontSize << 6, dpi, dpi);
	float fontUnitScale = ((float)fontSize * dpi) / (72 * face->units_per_EM);
	lineHeight = face->height * fontUnitScale;
	ascenderHeight = face->ascender * fontUnitScale;
	descenderHeight = face->descender * fontUnitScale;
	glyphBBox.set(face->bbox.xMin * fontUnitScale,
				  face->bbox.yMin * fontUnitScale,
				  (face->bbox.xMax - face->bbox.xMin) * fontUnitScale,
				  (face->bbox.yMax - face->bbox.yMin) * fontUnitScale);
	useKerning = FT_HAS_KERNING( face );

	//------------------------------------------------------
	//kerning would be great to support:
	//ofLogNotice("ofTrueTypeFont") << "FT_HAS_KERNING ? " <<  FT_HAS_KERNING(face);
	//------------------------------------------------------

	nCharacters = (bFullCharacterSet ? 256 : 128) - NUM_CHARACTER_TO_START;

	//--------------- initialize character info and textures
	cps.resize(nCharacters);

	if(bMakeContours){
		charOutlines.assign(nCharacters, ofTTFCharacter());
		charOutlinesNonVFlipped.assign(nCharacters, ofTTFCharacter());
		charOutlinesContour.assign(nCharacters, ofTTFCharacter());
		charOutlinesNonVFlippedContour.assign(nCharacters, ofTTFCharacter());
	}else{
		charOutlines.resize(1);
	}

	vector<ofPixels> expanded_data(nCharacters);

	long areaSum=0;
	FT_Error err;


	//--------------------- load each char -----------------------
	for (int i = 0 ; i < nCharacters; i++){

		//------------------------------------------ anti aliased or not:
		int glyph = (unsigned char)(i+NUM_CHARACTER_TO_START);
		if (glyph == 0xA4) glyph = 0x20AC; // hack to load the euro sign, all codes in 8859-15 match with utf-32 except for this one
		err = FT_Load_Glyph( face, FT_Get_Char_Index( face, glyph ), bAntiAliased ?  FT_LOAD_FORCE_AUTOHINT : FT_LOAD_DEFAULT );
        if(err){
			ofLogError("ofTrueTypeFont") << "loadFont(): FT_Load_Glyph failed for char " << i << ": FT_Error " << err;

		}

		if (bAntiAliased == true) FT_Render_Glyph(face->glyph, FT_RENDER_MODE_NORMAL);
		else FT_Render_Glyph(face->glyph, FT_RENDER_MODE_MONO);

		//------------------------------------------


		if(bMakeContours){
			if(printVectorInfo){
				ofLogNotice("ofTrueTypeFont") <<  "character " << char(i+NUM_CHARACTER_TO_START);
			}

			//int character = i + NUM_CHARACTER_TO_START;
			charOutlines[i] = makeContoursForCharacter( face );
			charOutlinesContour[i] = charOutlines[i];
			charOutlinesContour[i].setFilled(false);
			charOutlinesContour[i].setStrokeWidth(1);

			charOutlinesNonVFlipped[i] = charOutlines[i];
			charOutlinesNonVFlipped[i].translate(ofVec3f(0,cps[i].height));
			charOutlinesNonVFlipped[i].scale(1,-1);
			charOutlinesNonVFlippedContour[i] = charOutlines[i];
			charOutlinesNonVFlippedContour[i].setFilled(false);
			charOutlinesNonVFlippedContour[i].setStrokeWidth(1);


			if(simplifyAmt>0){
				charOutlines[i].simplify(simplifyAmt);
				charOutlinesNonVFlipped[i].simplify(simplifyAmt);
				charOutlinesContour[i].simplify(simplifyAmt);
				charOutlinesNonVFlippedContour[i].simplify(simplifyAmt);
			}
		}


		// -------------------------
		// info about the character:
		FT_Bitmap& bitmap= face->glyph->bitmap;

		// Note: Using decltype here to avoid warnings across
		// platforms using differing versions of freetype 2.
		decltype(bitmap.width) width  = bitmap.width;
		decltype(bitmap.rows) height = bitmap.rows;

		cps[i].characterIndex	= i;
		cps[i].glyph			= glyph;
		cps[i].height 			= face->glyph->metrics.height>>6;
		cps[i].width 			= face->glyph->metrics.width>>6;
		cps[i].bearingX			= face->glyph->metrics.horiBearingX>>6;
		cps[i].bearingY			= face->glyph->metrics.horiBearingY>>6;
		cps[i].xmin				= face->glyph->bitmap_left;
		cps[i].xmax				= cps[i].xmin + cps[i].width;
		cps[i].ymin				= -face->glyph->bitmap_top;
		cps[i].ymax				= cps[i].ymin + cps[i].height;
		cps[i].advance			= face->glyph->metrics.horiAdvance>>6;


		cps[i].tW				= cps[i].width;
		cps[i].tH				= cps[i].height;


		areaSum += (cps[i].tW+border*2)*(cps[i].tH+border*2);

		if(width==0 || height==0) continue;

		// Allocate Memory For The Texture Data.
		expanded_data[i].allocate(width, height, OF_PIXELS_GRAY_ALPHA);
		//-------------------------------- clear data:
		expanded_data[i].set(0,255); // every luminance pixel = 255
		expanded_data[i].set(1,0);


		if (bAntiAliased == true){
			ofPixels bitmapPixels;
			bitmapPixels.setFromExternalPixels(bitmap.buffer,width,height,OF_PIXELS_GRAY);
			expanded_data[i].setChannel(1,bitmapPixels);
		} else {
			//-----------------------------------
			// true type packs monochrome info in a
			// 1-bit format, hella funky
			// here we unpack it:
			unsigned char *src =  bitmap.buffer;
			for(decltype(height) j=0; j < height; j++) {
				unsigned char b=0;
				unsigned char *bptr =  src;
				for(decltype(width) k=0; k < width; k++){
					expanded_data[i][2*(k+j*width)] = 255;

					if (k%8==0){
						b = (*bptr++);
					}

					expanded_data[i][2*(k+j*width) + 1] = b&0x80 ? 255 : 0;
					b <<= 1;
				}
				src += bitmap.pitch;
			}
			//-----------------------------------
		}
	}

	vector<charProps> sortedCopy = cps;
	sort(sortedCopy.begin(),sortedCopy.end(),&compare_cps);

	// pack in a texture, algorithm to calculate min w/h from
	// http://upcommons.upc.edu/pfc/bitstream/2099.1/7720/1/TesiMasterJonas.pdf
	//ofLogNotice("ofTrueTypeFont") << "loadFont(): areaSum: " << areaSum

	bool packed = false;
	float alpha = logf(areaSum)*1.44269;

	int w;
	int h;
	while(!packed){
		w = pow(2,floor((alpha/2.f) + 0.5)); // there doesn't seem to be a round in cmath for windows.
		//w = pow(2,round(alpha/2.f));
		h = w;//pow(2,round(alpha - round(alpha/2.f)));
		int x=0;
		int y=0;
		int maxRowHeight = sortedCopy[0].tH + border*2;
		for(int i=0;i<(int)cps.size();i++){
			if(x+sortedCopy[i].tW + border*2>w){
				x = 0;
				y += maxRowHeight;
				maxRowHeight = sortedCopy[i].tH + border*2;
				if(y + maxRowHeight > h){
					alpha++;
					break;
				}
			}
			x+= sortedCopy[i].tW + border*2;
			if(i==(int)cps.size()-1) packed = true;
		}

	}



	ofPixels atlasPixelsLuminanceAlpha;
	atlasPixelsLuminanceAlpha.allocate(w,h,OF_PIXELS_GRAY_ALPHA);
	atlasPixelsLuminanceAlpha.set(0,255);
	atlasPixelsLuminanceAlpha.set(1,0);


	int x=0;
	int y=0;
	int maxRowHeight = sortedCopy[0].tH + border*2;
	for(int i=0;i<(int)cps.size();i++){
		ofPixels & charPixels = expanded_data[sortedCopy[i].characterIndex];

		if(x+sortedCopy[i].tW + border*2>w){
			x = 0;
			y += maxRowHeight;
			maxRowHeight = sortedCopy[i].tH + border*2;
		}

		cps[sortedCopy[i].characterIndex].t1		= float(x + border)/float(w);
		cps[sortedCopy[i].characterIndex].v1		= float(y + border)/float(h);
		cps[sortedCopy[i].characterIndex].t2		= float(cps[sortedCopy[i].characterIndex].tW + x + border)/float(w);
		cps[sortedCopy[i].characterIndex].v2		= float(cps[sortedCopy[i].characterIndex].tH + y + border)/float(h);
		charPixels.pasteInto(atlasPixelsLuminanceAlpha,x+border,y+border);
		x+= sortedCopy[i].tW + border*2;
	}
	texAtlas.allocate(atlasPixelsLuminanceAlpha,false);

	if(bAntiAliased && fontSize>20){
		texAtlas.setTextureMinMagFilter(GL_LINEAR,GL_LINEAR);
	}else{
		texAtlas.setTextureMinMagFilter(GL_NEAREST,GL_NEAREST);
	}
	texAtlas.loadData(atlasPixelsLuminanceAlpha);

	// ------------- close the library and typeface
  	bLoadedOk = true;
	return true;
}
Exemple #12
0
void ofApp::setup() {

    ofSetLogLevel(OF_LOG_NOTICE);

    ofLogNotice() << "setup()";

    if (ofApp::isSemibreve()) ofLogNotice() << "Going to run Semibreve version";

    if (ofApp::isOsx())     ofLogNotice() << "OSX detected";
    if (ofApp::isIos())     ofLogNotice() << "iOS detected";
    if (ofApp::isAndroid()) ofLogNotice() << "Android detected";

    if (ofApp::isPhone())   ofLogNotice() << "Phone detected";
    if (ofApp::isTablet())  ofLogNotice() << "Tablet detected";

    // if (ofApp::isIphone())  ofLogNotice() << "iPhone detected";
    // if (ofApp::isIpad())    ofLogNotice() << "iPad detected";

    // if (ofApp::isAndroidPhone())   ofLogNotice() << "Android phone detected";
    // if (ofApp::isAndroidTablet())  ofLogNotice() << "Android tablet detected";

    #if defined TARGET_OSX
    ofLogNotice() << "Running OSX version";
    ofSetDataPathRoot("../Resources/data/");
    #endif

    #if defined TARGET_SEMIBREVE
    ofLogNotice() << "Running SEMIBREVE version";
    oscReceiver.setup(RECEIVE_PORT);
    oscSender.setup(HOST, SEND_PORT);
    #endif

    #if defined TARGET_OF_IOS
     if (ofApp::isTablet()) {
        ofSetOrientation(OF_ORIENTATION_90_LEFT);
        swiper.setup();
        ofAddListener(swiper.swipeRecognized, this, &ofApp::onSwipe);
        swiping = false;
    } else {
        swiper.setup();
        ofAddListener(swiper.swipeRecognized, this, &ofApp::onSwipe);
        swiping = false;
    }
    #endif

#ifndef TARGET_OSX
    if (isAndroid() || isIos()) {
        ofxAccelerometer.setup();
        accelCount = 0;
        crop = 0;
    }
#endif
    
    if (!ofApp::isIos()) {
        ofLogNotice() << "Registering for touch events if not ios";
        ofRegisterTouchEvents(this);
    }

    ofSetFrameRate(FRAME_RATE);
    ofSetCircleResolution(CIRCLE_RESOLUTION);

    if (multitouch) ofHideCursor();

    ofApp::language = ofApp::getSystemLanguage();
    ofLogNotice() << "Language is " << ofApp::language;

    initTranslations();
    initModules();
    setupModules();
    loadModuleSounds();

    initImages();

    appState = ABOUT;

    inactivityState = ACTIVE;

    // init global vars
    aboutY = 0;
    splashAlpha = 255;
    arrowDownY = ofGetHeight()/3*2;
    arrowDownYBase = arrowDownY;
    arrowDownDir = 1;
    showSwipeInfo = true;
    ofApp::maxParticleY = round(ofGetHeight() * (1-LIMIT_PARTICLE));

    uint swipeFontSize;
    if (isTablet()) swipeFontSize = 26;
    else swipeFontSize = 20;
    swipeFont.load(UI_FONT_FACE, swipeFontSize);

}
Exemple #13
0
//--------------------------------------------------------------
void testApp::userEvent(ofxOpenNIUserEvent & event){
    ofLogNotice() << getUserStatusAsString(event.userStatus) << "for user" << event.id << "from device" << event.deviceID;
}
Exemple #14
0
//--------------------------------------------------------------
void ofApp::saveFileEvent(int &whichEditor) {
	// received an editor save via CTRL/Super + s or CTRL/Super + d
	
	ofLogNotice() << "received save event for editor " << whichEditor
		<< " with filename " << editor.getEditorFilename(whichEditor);
}
void ofxVideoDataWriterThread::threadedFunction() {
#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	if (fd == -1) {
		fd = ::open(filePath.c_str(), O_WRONLY);
	}
#endif
	//maybe create file here? these threads act as the client and the main thread as the server?
	while (isThreadRunning())
	{
		ofPixels * frame = NULL;
		if (queue->Consume(frame) && frame) {
			bIsWriting = true;
			int b_offset = 0;
			int b_remaining = frame->getWidth()*frame->getHeight()*frame->getBytesPerPixel();

			while (b_remaining > 0 && isThreadRunning())
			{
#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
				errno = 0;

				int b_written = ::write(fd, ((char *)frame->getPixels()) + b_offset, b_remaining);
#endif
#ifdef TARGET_WIN32
				DWORD b_written;
				if (!WriteFile(videoHandle, ((char *)frame->getPixels()) + b_offset, b_remaining, &b_written, 0)) {
					LPTSTR errorText = NULL;

					FormatMessageW(
						// use system message tables to retrieve error text
						FORMAT_MESSAGE_FROM_SYSTEM
						// allocate buffer on local heap for error text
						| FORMAT_MESSAGE_ALLOCATE_BUFFER
						// Important! will fail otherwise, since we're not 
						// (and CANNOT) pass insertion parameters
						| FORMAT_MESSAGE_IGNORE_INSERTS,
						NULL,    // unused with FORMAT_MESSAGE_FROM_SYSTEM
						GetLastError(),
						MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
						(LPTSTR)&errorText,  // output 
						0, // minimum size for output buffer
						NULL);   // arguments - see note 
					wstring ws = errorText;
					string error(ws.begin(), ws.end());
					ofLogNotice("Video Thread") << "WriteFile to pipe failed: " << error;
					break;
				}
#endif
				if (b_written > 0) {
					b_remaining -= b_written;
					b_offset += b_written;
					if (b_remaining != 0) {
						ofLogWarning("ofxVideoDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - b_remaining is not 0 -> " << b_written << " - " << b_remaining << " - " << b_offset << ".";
						//break;
					}
				}
				else if (b_written < 0) {
					ofLogError("ofxVideoDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - write to PIPE failed with error -> " << errno << " - " << strerror(errno) << ".";
					break;
				}
				else {
					if (bClose) {
						ofLogVerbose("ofxVideoDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - Nothing was written and bClose is TRUE.";
						break; // quit writing so we can close the file
					}
					ofLogWarning("ofxVideoDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - Nothing was written. Is this normal?";
				}

				if (!isThreadRunning()) {
					ofLogWarning("ofxVideoDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - The thread is not running anymore let's get out of here!";
				}
			}
			bIsWriting = false;
			frame->clear();
			delete frame;
		}
		else {
			conditionMutex.lock();
			condition.wait(conditionMutex);
			conditionMutex.unlock();
		}
	}

#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	::close(fd);
#endif
#ifdef TARGET_WIN32
	FlushFileBuffers(videoHandle);
	DisconnectNamedPipe(videoHandle);
	CloseHandle(videoHandle);
#endif
	}
void DeselectSurfaceCmd::exec() {
    ofLogNotice("DeselectSurfaceCmd", "exec");
    _surface = _surfaceManager->getSelectedSurface();
    _selectedVertexIndex = _surfaceManager->getSelectedVertexIndex();
    _surfaceManager->deselectSurface();
}
void ofxVideoRecorder::close()
{
	if (!bIsInitialized) return;

	bIsRecording = false;

#if defined( TARGET_OSX ) || defined( TARGET_LINUX )

	if (bRecordVideo && bRecordAudio) {
		//set pipes to non_blocking so we dont get stuck at the final writes
		audioThread.setPipeNonBlocking();
		videoThread.setPipeNonBlocking();

		while (frames.size() > 0 && audioFrames.size() > 0) {
			// if there are frames in the queue or the thread is writing, signal them until the work is done.
			videoThread.signal();
			audioThread.signal();
		}
	}
	else if (bRecordVideo) {
		//set pipes to non_blocking so we dont get stuck at the final writes
		videoThread.setPipeNonBlocking();

		while (frames.size() > 0) {
			// if there are frames in the queue or the thread is writing, signal them until the work is done.
			videoThread.signal();
		}
	}
	else if (bRecordAudio) {
		//set pipes to non_blocking so we dont get stuck at the final writes
		audioThread.setPipeNonBlocking();

		while (audioFrames.size() > 0) {
			// if there are frames in the queue or the thread is writing, signal them until the work is done.
			audioThread.signal();
		}
	}

	//at this point all data that ffmpeg wants should have been consumed
	// one of the threads may still be trying to write a frame,
	// but once close() gets called they will exit the non_blocking write loop
	// and hopefully close successfully

	bIsInitialized = false;

	if (bRecordVideo) {
		videoThread.close();
	}
	if (bRecordAudio) {
		audioThread.close();
	}

	retirePipeNumber(pipeNumber);

	ffmpegThread.waitForThread();
#endif
#ifdef TARGET_WIN32 
	if (bRecordVideo) {
		videoThread.close();
	}
	if (bRecordAudio) {
		audioThread.close();
	}

	//at this point all data that ffmpeg wants should have been consumed
	// one of the threads may still be trying to write a frame,
	// but once close() gets called they will exit the non_blocking write loop
	// and hopefully close successfully

	if (bRecordAudio && bRecordVideo) {
		ffmpegAudioThread.waitForThread();
		ffmpegVideoThread.waitForThread();

		//need to do one last script here to join the audio and video recordings

		stringstream finalCmd;

		/*finalCmd << ffmpegLocation << " -y " << " -i " << filePath << "_vtemp" << movFileExt << " -i " << filePath << "_atemp" << movFileExt << " \\ ";
		finalCmd << "-filter_complex \"[0:0] [1:0] concat=n=2:v=1:a=1 [v] [a]\" \\";
		finalCmd << "-map \"[v]\" -map \"[a]\" ";
		finalCmd << " -vcodec " << videoCodec << " -b:v " << videoBitrate << " -b:a " << audioBitrate << " ";
		finalCmd << filePath << movFileExt;*/

		finalCmd << ffmpegLocation << " -y " << " -i " << filePath << "_vtemp" << movFileExt << " -i " << filePath << "_atemp" << audioFileExt << " ";
		finalCmd << "-c:v copy -c:a copy -strict experimental ";
		finalCmd << filePath << movFileExt;

		ofLogNotice("FFMpeg Merge") << "\n==============================================\n Merge Command \n==============================================\n";
		ofLogNotice("FFMpeg Merge") << finalCmd.str();
		//ffmpegThread.setup(finalCmd.str());
		system(finalCmd.str().c_str());

		//delete the unmerged files
		stringstream removeCmd;
		ofStringReplace(filePath, "/", "\\");
		removeCmd << "DEL " << filePath << "_vtemp" << movFileExt << " " << filePath << "_atemp" << audioFileExt;
		system(removeCmd.str().c_str());

	}

	ffmpegThread.waitForThread();

#endif
	// TODO: kill ffmpeg process if its taking too long to close for whatever reason.
	ofLogNotice("ofxVideoRecorder") << "\n==============================================\n Closed ffmpeg \n==============================================\n";
	bIsInitialized = false;
	}
void DeselectSurfaceCmd::undo() {
    ofLogNotice("DeselectSurfaceCmd", "undo");
    _surfaceManager->selectSurface(_surface);
    _surfaceManager->selectVertex(_selectedVertexIndex);
}
void kinectGuiApp::playVideo() {
    getCurVideo().play();
    ofLogNotice() << "Playing video: " << getCurVideo().getMoviePath();
}
Exemple #20
0
ofxAubioBeat::~ofxAubioBeat()
{
    if (tempo) del_aubio_tempo(tempo);
    cleanup();
    ofLogNotice() << "deleted ofxAubioBeat";
}
CostumeManager::~CostumeManager()
{
    ofLogNotice() <<"CostumeManager::Destructor" ;
}
Exemple #22
0
//--------------------------------------------------------------
void menu::update(float d1) {
	
//	ofLogNotice("Update");
	if(bAddCircle) {
		ofLogNotice("Update-AddCircle");
		addCircle(ofPoint(ofGetMouseX(),ofGetMouseY()));
	}
	if(bAddBox) {
		addBox(ofPoint(ofGetMouseX(),ofGetMouseY()));
	}
	
	box2d.update();	
	
	if(!bAddCircle && !bAddBox) {
		ofVec2f mouse(ofGetMouseX(), ofGetMouseY());

		int ncircles = circles.size();
		int nboxes = boxes.size();
		int ntot = ncircles+nboxes;
		
		// = Fuerzas entre particulas
		// Se puede hacer estadistico por ahora
		// Tambien se puede hacer por densidades:
		// - calcular pto medio de cada tipo de particula y carga total
		// - calcular la fuerza ejercida sobre cada particula por todos los grupos de particulas
		ptoMed_circles = ptoMedio(circles);
		ptoMed_boxes = ptoMedio(boxes);
		if(swFuerzaDensidad) {
			for(int i=0; i<circles.size(); i++) {
				float dis1 = ptoMed_circles.distance(circles[i].get()->getPosition());
				float dis2 = ptoMed_boxes.distance(circles[i].get()->getPosition());
				if(dis1 > minDisInt) circles[i].get()->addRepulsionForce(ptoMed_circles, 2*(ncircles/ntot)/dis1);
				if(dis2 > minDisInt) circles[i].get()->addAttractionPoint(ptoMed_boxes, 2*(nboxes/ntot)/dis2);
			}
			for(int i=0; i<boxes.size(); i++) {
				float dis1 = ptoMed_boxes.distance(boxes[i].get()->getPosition());
				float dis2 = ptoMed_circles.distance(boxes[i].get()->getPosition());
				if(dis1 > minDisInt) boxes[i].get()->addRepulsionForce(ptoMed_boxes, 2*(nboxes/ntot)/dis1);
				if(dis2 > minDisInt) boxes[i].get()->addAttractionPoint(ptoMed_circles, 2*(ncircles/ntot)/dis2);
			}
		}
		
		
		// = Interaccion mouse/TUIOs:
		// Solo se da si el mouse esta en la zona de confinamiento
		// y depende de si el mouse is Pressed o no:
		
		// pressed: 
		//  - REPULSION circulo y ATRACCION box: si dist-mouse < distMinima
		//  - ATRACCION circulo y REPULSION box: si dist-mouse > distMaxima
		// 
		
		// o poner los tuios impares con atraccion y los impares con repulsion (<<== probemos esta)
		interaccion_point(mouse, !	isMousePressed);
		
		// interaccion TUIOS
		
		
		
		// = Fuerza de confinamiento
		for(int i=0; i<circles.size(); i++) {
			float dis = centro.distance(circles[i].get()->getPosition());
			if(dis > distConf) circles[i].get()->addAttractionPoint(centro, 0.5);//3, 9);
		}
		for(int i=0; i<boxes.size(); i++) {
			float dis = centro.distance(boxes[i].get()->getPosition());
			if(dis > distConf) boxes[i].get()->addAttractionPoint(centro, 0.5);//3, 9);
		}
	
	}
	
	bAddCircle=false;
	bAddBox=false;

	// - - - - TUIOS - - - - 
    //tuioClient.getMessage();

	// botones (tangibleHandler's ==> touchElements)
//    buttonCollide.update_prev(anillo.getParticlePosition());
//    buttonSpeed1.update_prev(anillo.getParticlePosition());
//    buttonSpeed2.update_prev(anillo.getParticlePosition());
    
//	touchElements.update();
	
	// cursores recibidos
    hands.update();
	
	// recorrer el vector de cursores que hay en hands (tangiblesHandler) e interactuar si esta dentro del círculo
//	int ttAct = ofGetElapsedTimeMillis();
//	float sqLim = (radioInt*radioInt)*0.9;
//	int limTime = floor(1000/ofGetFrameRate())+20;
	for(int i=0; i<hands.objectsCol.size(); i++) {
		//		ofLogNotice("handShadow num: " + ofToString(i));
		handShadow * h = (handShadow *) hands.objectsCol[i];
		//ofLogNotice("Id: " + ofToString(h->cursor_id) + "  x,y: " + ofToString(h->x)+"/"+ofToString(h->y)+ "   age: " + ofToString(h->age));

		bool bPar = false;
		if(h->cursor_id%2 == 0)		   bPar = true;
		   
		interaccion_point(ofPoint(h->x,h->y), bPar);
		
	}
    interactionUpdate();
	
	
}
Exemple #23
0
void ofApp::setupScrollViews(){

	float padding = 4;
	float paddingH = 20;

	//scrollview config
	TexturedObjectScrollView::ScrollViewConfig config;
	config.numRows = 3;
	config.drawLoadingRects = true;
	config.customTileDraw = true; //calls onDrawTile() for each on-sreen tile
	config.imgFadeInDuration = 0.1;
	config.padding = padding;
	config.sizeToLoad = TEXTURE_ORIGINAL;
	config.mipmaps = true;
	config.maxScrollHandleW = 200;
	config.scrollFriction = 0.99;
	config.snapToImage = false;
	config.marginPadding = padding;
	config.bgColor = ofColor(22);
	config.scrollHandleColor = ofColor(128);
	config.scrollHandleVertPostion = 4;
	config.scrollviewEdgeGradientWidth = 30;
	config.numFboSamples = 4;
	config.canvasLookAheadDist = ofGetWidth() ; // load offscreen images that are 200 far from screen
	config.unloadDelay = 5.0; //only unload images once they have been N seconds offscreen

	scrollView = new TexturedObjectScrollView();
	scrollView->setName("scrollView");
	ofRectangle viewArea = ofRectangle(paddingH, paddingH + 25, ofGetWidth() - 2 * paddingH, 610);

	scrollView->setup(viewArea, config);
	scene->addChild(scrollView);

	//register for click notification
	ofAddListener(scrollView->eventTextureClicked, this, &ofApp::onSrollImageClicked);

	//register for draw custom notifications
	ofAddListener(scrollView->eventTileDraw, this, &ofApp::onDrawTile);

	//prepare the list of objects you want to show on the scrollview
	vector<TexturedObjectScrollView::TexturedObjectTexture> imagesToShow;

	//add CH content - every image inside every object
	for(auto chObj : chObjects){
		TexturedObjectScrollView::TexturedObjectTexture tex;
		tex.texObj = chObj;
		int c = 0;
		for(auto img : chObj->images){
			tex.texIndex = c;
			imagesToShow.push_back(tex);
			c++;
		}
	}

	//add CWRU content
	for(auto cwruObj : cwruObjects){
		TexturedObjectScrollView::TexturedObjectTexture tex;
		tex.texObj = cwruObj;
		tex.texIndex = 0;
		imagesToShow.push_back(tex);
	}

	//shuffle images
	//std::random_shuffle(imagesToShow.begin(), imagesToShow.end());

	//finally load the contents
	scrollView->loadContent(imagesToShow);
	ofLogNotice("ofApp") << "showing " << imagesToShow.size() << " images.";
}
Exemple #24
0
//--------------------------------------------------------------
void menu::setup() {
	ofLogNotice("menu - SETUP - inicio");
	
	ofSetVerticalSync(true);
	ofBackgroundHex(0x000000);
	ofSetLogLevel(OF_LOG_NOTICE);
	ofEnableAntiAliasing();
	ofEnableSmoothing();

	box2d.init();
	box2d.setGravity(0, 0);
	box2d.setFPS(30.0);

	bola.loadImage("images/dot.png");
	ladoPart1 = 5;
	ladoPart2 = 10;
	
	// Cargar colores
	coloresBN.push_back(ofColor(0));
	coloresBN.push_back(ofColor(150));
	coloresBN.push_back(ofColor(255));

	// Modos dibujo
	bDraw4Forces = true;
	fRed = true;

	// centro
	centroScreen = ofVec2f(ofGetWidth()/2, W_HEIGHT/2); // ofGetHeight()/2);
	
	// Borde Negro circular
	borde.clear();
	ofColor ctmp = ofColor::black;
	borde.setFillColor(ctmp);
	//http://www.openframeworks.cc/documentation/graphics/ofPath.html#show_setPolyWindingMode
	borde.setPolyWindingMode(OF_POLY_WINDING_ODD);
	// rectangulo 
	borde.rectangle(0,0,ofGetWidth(),ofGetHeight());
	borde.setCircleResolution(60);
	// Paso a posicion de la pantall centrada en ancho y ajustada al borde superior
//	borde.circle(ofGetWidth()/2,ofGetHeight()/2,ofGetHeight()/2*0.95);
//	centro = ofVec2f(ofGetWidth()/2.0, ofGetHeight()/2.0);
//	distConf = ofGetHeight()/2.0*0.9;
	borde.circle(ofGetWidth()/2, W_HEIGHT/2,W_HEIGHT/2*0.95);	
	centro = ofVec2f(ofGetWidth()/2.0, W_HEIGHT/2.0);
	distConf = W_HEIGHT/2.0*0.9;
	
	minDisInt = 25;
	
	// fbos
	fbo1.allocate(ofGetHeight()/2,W_HEIGHT/2);// ofGetHeight()/2);
	fbo2.allocate(ofGetHeight()/2,W_HEIGHT/2);// ofGetHeight()/2);
	fbo3.allocate(ofGetHeight()/2,W_HEIGHT/2);// ofGetHeight()/2);
	fbo4.allocate(ofGetHeight()/2,W_HEIGHT/2);// ofGetHeight()/2);	
	
	
	//
	// crear Botones TUIO
	//

	//buttonSpeed1.set(centro.x-rMed-(dRad*0.9/2),centro.y-(dRad*0.9/2), dRad*0.9,dRad*0.9);
    menuButton button1;
	button1.set(0, 0, centro.x, centro.y);
//	button1.setup("E·M", ofColor::fromHex(0xF70D1A) );	// ferrary red
	button1.setup(EM, ofColor::fromHex(0xF70D1A) , &mdisplay);	// ferrary red
    buttons.push_back(button1);
	
    menuButton button2;
	button2.set(centro.x, 0, centro.x, centro.y);
	button2.setup(FUERTE, ofColor::chartreuse, &mdisplay);
    buttons.push_back(button2);
    menuButton button3;
	button3.set(centro.x,centro.y, centro.x, centro.y);
	button3.setup(GRAVEDAD, ofColor::skyBlue, &mdisplay);//	fromHex(0x64E986) );	// Algae Green	// Emerald
    buttons.push_back(button3);
    menuButton button4;
	button4.set(0, centro.y, centro.x, centro.y);
	button4.setup(DEBIL, ofColor::pink , &mdisplay);
     buttons.push_back(button4);
	touchElements.addObject(buttons[0]);
	touchElements.addObject(buttons[1]);
	touchElements.addObject(buttons[2]);
	touchElements.addObject(buttons[3]);

	
//	init_Escena();
	ofLogNotice("menu - SETUP - fin");
	
}
Exemple #25
0
//--------------------------------------------------------------
void testApp::userEvent(ofxOpenNIUserEvent & event){
    // show user event messages in the console
    ofLogNotice() << getUserStatusAsString(event.userStatus) << "for user" << event.id << "from device" << event.deviceID;
}
void ofxAudioDataWriterThread::threadedFunction() {
#if defined( TARGET_OSX ) || defined( TARGET_LINUX )

	if (fd == -1) {
		//write only, fd is the handle what is the windows eqivalent
		fd = ::open(filePath.c_str(), O_WRONLY);
}
#endif
	while (isThreadRunning())
	{
		audioFrameShort * frame = NULL;
		if (queue->Consume(frame) && frame) {
			bIsWriting = true;
			int b_offset = 0;
			int b_remaining = frame->size*sizeof(short);
			while (b_remaining > 0) {
#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
				int b_written = ::write(fd, ((char *)frame->data) + b_offset, b_remaining);
#endif
#ifdef TARGET_WIN32
				DWORD b_written;
				if (!WriteFile(audioHandle, ((char *)frame->data) + b_offset, b_remaining, &b_written, 0)) {
					LPTSTR errorText = NULL;

					FormatMessageW(
						// use system message tables to retrieve error text
						FORMAT_MESSAGE_FROM_SYSTEM
						// allocate buffer on local heap for error text
						| FORMAT_MESSAGE_ALLOCATE_BUFFER
						// Important! will fail otherwise, since we're not 
						// (and CANNOT) pass insertion parameters
						| FORMAT_MESSAGE_IGNORE_INSERTS,
						NULL,    // unused with FORMAT_MESSAGE_FROM_SYSTEM
						GetLastError(),
						MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
						(LPTSTR)&errorText,  // output 
						0, // minimum size for output buffer
						NULL);   // arguments - see note 
					wstring ws = errorText;
					string error(ws.begin(), ws.end());
					ofLogNotice("Audio Thread") << "WriteFile to pipe failed: " << error;
				}
#endif
				if (b_written > 0) {
					b_remaining -= b_written;
					b_offset += b_written;
				}
				else if (b_written < 0) {
					ofLogError("ofxAudioDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - write to PIPE failed with error -> " << errno << " - " << strerror(errno) << ".";
					break;
				}
				else {
					if (bClose) {
						break; // quit writing so we can close the file
					}
				}

				if (!isThreadRunning()) {
					ofLogWarning("ofxAudioDataWriterThread") << ofGetTimestampString("%H:%M:%S:%i") << " - The thread is not running anymore let's get out of here!";
				}
			}
			bIsWriting = false;
			delete[] frame->data;
			delete frame;
		}
		else {
			conditionMutex.lock();
			condition.wait(conditionMutex);
			conditionMutex.unlock();
		}
	}

#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	::close(fd);
#endif
#ifdef TARGET_WIN32
	FlushFileBuffers(audioHandle);
	DisconnectNamedPipe(audioHandle);
	CloseHandle(audioHandle);
#endif
}
//----------------------------------------------------------------
void ofSerial::listDevices(){
	buildDeviceList();
	for(int k = 0; k < (int)devices.size(); k++){
		ofLogNotice("ofSerial") << "[" << devices[k].getDeviceID() << "] = "<< devices[k].getDeviceName().c_str();
	}
}
bool ofxVideoRecorder::setupCustomOutput(int w, int h, float fps, int sampleRate, int channels, string outputString, bool sysClockSync, bool silent)
{
	if (bIsInitialized)
	{
		close();
	}

	bIsSilent = silent;
	bSysClockSync = sysClockSync;

	bRecordAudio = (sampleRate > 0 && channels > 0);
	bRecordVideo = (w > 0 && h > 0 && fps > 0);
	bFinishing = false;

	videoFramesRecorded = 0;
	audioSamplesRecorded = 0;

	if (!bRecordVideo && !bRecordAudio) {
		ofLogWarning() << "ofxVideoRecorder::setupCustomOutput(): invalid parameters, could not setup video or audio stream.\n"
			<< "video: " << w << "x" << h << "@" << fps << "fps\n"
			<< "audio: " << "channels: " << channels << " @ " << sampleRate << "Hz\n";
		return false;
	}
	videoPipePath = "";
	audioPipePath = "";
	pipeNumber = requestPipeNumber();
	if (bRecordVideo) {
		width = w;
		height = h;
		frameRate = fps;

#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
		// recording video, create a FIFO pipe
		videoPipePath = ofFilePath::getAbsolutePath("ofxvrpipe" + ofToString(pipeNumber));

		ofStringReplace(videoPipePath, " ", "\\ ");

		if (!ofFile::doesFileExist(videoPipePath)) {
			string cmd = "bash --login -c 'mkfifo " + videoPipePath + "'";
			system(cmd.c_str());
		}
#endif
#ifdef TARGET_WIN32

		char vpip[128];
		int num = ofRandom(1024);
		sprintf(vpip, "\\\\.\\pipe\\videoPipe%d", num);
		vPipename = convertCharArrayToLPCWSTR(vpip);

		hVPipe = CreateNamedPipe(
			vPipename, // name of the pipe
			PIPE_ACCESS_OUTBOUND, // 1-way pipe -- send only
			PIPE_TYPE_BYTE, // send data as a byte stream
			1, // only allow 1 instance of this pipe
			0, // outbound buffer defaults to system default
			0, // no inbound buffer
			0, // use default wait time
			NULL // use default security attributes
			);

		if (!(hVPipe != INVALID_HANDLE_VALUE)) {
			if (GetLastError() != ERROR_PIPE_BUSY)
			{
				ofLogError("Video Pipe") << "Could not open video pipe.";
			}
			// All pipe instances are busy, so wait for 5 seconds. 
			if (!WaitNamedPipe(vPipename, 5000))
			{
				ofLogError("Video Pipe") << "Could not open video pipe: 5 second wait timed out.";
			}
		}

#endif
	}

	if (bRecordAudio) {
		this->sampleRate = sampleRate;
		audioChannels = channels;

#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
		// recording video, create a FIFO pipe
		audioPipePath = ofFilePath::getAbsolutePath("ofxarpipe" + ofToString(pipeNumber));

		ofStringReplace(audioPipePath, " ", "\\ ");

		if (!ofFile::doesFileExist(audioPipePath)) {

			string cmd = "bash --login -c 'mkfifo " + audioPipePath + "'";
			system(cmd.c_str());
		}
#endif
#ifdef TARGET_WIN32


		char apip[128];
		int num = ofRandom(1024);
		sprintf(apip, "\\\\.\\pipe\\videoPipe%d", num);
		aPipename = convertCharArrayToLPCWSTR(apip);

		hAPipe = CreateNamedPipe(
			aPipename,
			PIPE_ACCESS_OUTBOUND, // 1-way pipe -- send only
			PIPE_TYPE_BYTE, // send data as a byte stream
			1, // only allow 1 instance of this pipe
			0, // outbound buffer defaults to system default
			0, // no inbound buffer
			0, // use default wait time
			NULL // use default security attributes
			);

		if (!(hAPipe != INVALID_HANDLE_VALUE)) {
			if (GetLastError() != ERROR_PIPE_BUSY)
			{
				ofLogError("Audio Pipe") << "Could not open audio pipe.";
			}
			// All pipe instances are busy, so wait for 5 seconds. 
			if (!WaitNamedPipe(aPipename, 5000))
			{
				ofLogError("Audio Pipe") << "Could not open pipe: 5 second wait timed out.";
			}
		}

#endif
	}

	stringstream cmd;
	// basic ffmpeg invocation, -y option overwrites output file
#if defined( TARGET_OSX ) || defined( TARGET_LINUX )
	cmd << "bash --login -c '" << ffmpegLocation << (bIsSilent ? " -loglevel quiet " : " ") << "-y";
	if (bRecordAudio) {
		cmd << " -acodec pcm_s16le -f s16le -ar " << sampleRate << " -ac " << audioChannels << " -i " << audioPipePath;
	}
	else { // no audio stream
		cmd << " -an";
	}
	if (bRecordVideo) { // video input options and file
		cmd << " -r " << fps << " -s " << w << "x" << h << " -f rawvideo -pix_fmt " << pixelFormat << " -i " << videoPipePath << " -r " << fps;
	}
	else { // no video stream
		cmd << " -vn";
	}
	cmd << " " + outputString + "' &";

	//cerr << cmd.str();

	ffmpegThread.setup(cmd.str()); // start ffmpeg thread, will wait for input pipes to be opened

	if (bRecordAudio) {
		audioThread.setup(audioPipePath, &audioFrames);
	}
	if (bRecordVideo) {
		videoThread.setup(videoPipePath, &frames);
	}
#endif
#ifdef TARGET_WIN32
	//evidently there are issues with multiple named pipes http://trac.ffmpeg.org/ticket/1663

	if (bRecordAudio && bRecordVideo) {
		bool fSuccess;

		// Audio Thread

		stringstream aCmd;
		aCmd << ffmpegLocation << " -y " << " -f s16le -acodec " << audioCodec << " -ar " << sampleRate << " -ac " << audioChannels;
		aCmd << " -i " << convertWideToNarrow(aPipename) << " -b:a " << audioBitrate << " " << outputString << "_atemp" << audioFileExt;

		ffmpegAudioThread.setup(aCmd.str());
		ofLogNotice("FFMpeg Command") << aCmd.str() << endl;

		fSuccess = ConnectNamedPipe(hAPipe, NULL);
		if (!fSuccess)
		{
			LPTSTR errorText = NULL;

			FormatMessageW(
				// use system message tables to retrieve error text
				FORMAT_MESSAGE_FROM_SYSTEM
				// allocate buffer on local heap for error text
				| FORMAT_MESSAGE_ALLOCATE_BUFFER
				// Important! will fail otherwise, since we're not 
				// (and CANNOT) pass insertion parameters
				| FORMAT_MESSAGE_IGNORE_INSERTS,
				NULL,    // unused with FORMAT_MESSAGE_FROM_SYSTEM
				GetLastError(),
				MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
				(LPTSTR)&errorText,  // output 
				0, // minimum size for output buffer
				NULL);   // arguments - see note 
			wstring ws = errorText;
			string error(ws.begin(), ws.end());
			ofLogError("Audio Pipe") << "SetNamedPipeHandleState failed: " << error;
		}
		else {
			ofLogNotice("Audio Pipe") << "\n==========================\nAudio Pipe Connected Successfully\n==========================\n" << endl;
			audioThread.setup(hAPipe, &audioFrames);
		}

		// Video Thread

		stringstream vCmd;
		vCmd << ffmpegLocation << " -y " << " -r " << fps << " -s " << w << "x" << h << " -f rawvideo -pix_fmt " << pixelFormat;
		vCmd << " -i " << convertWideToNarrow(vPipename) << " -vcodec " << videoCodec << " -b:v " << videoBitrate << " " << outputString << "_vtemp" << movFileExt;

		ffmpegVideoThread.setup(vCmd.str());
		ofLogNotice("FFMpeg Command") << vCmd.str() << endl;

		fSuccess = ConnectNamedPipe(hVPipe, NULL);
		if (!fSuccess)
		{
			LPTSTR errorText = NULL;

			FormatMessageW(
				// use system message tables to retrieve error text
				FORMAT_MESSAGE_FROM_SYSTEM
				// allocate buffer on local heap for error text
				| FORMAT_MESSAGE_ALLOCATE_BUFFER
				// Important! will fail otherwise, since we're not 
				// (and CANNOT) pass insertion parameters
				| FORMAT_MESSAGE_IGNORE_INSERTS,
				NULL,    // unused with FORMAT_MESSAGE_FROM_SYSTEM
				GetLastError(),
				MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
				(LPTSTR)&errorText,  // output 
				0, // minimum size for output buffer
				NULL);   // arguments - see note 
			wstring ws = errorText;
			string error(ws.begin(), ws.end());
			ofLogError("Video Pipe") << "SetNamedPipeHandleState failed: " << error;
		}
		else {
			ofLogNotice("Video Pipe") << "\n==========================\nVideo Pipe Connected Successfully\n==========================\n" << endl;
			videoThread.setup(hVPipe, &frames);
		}
	}
	else {
		cmd << ffmpegLocation << " -y ";
		if (bRecordAudio) {
			cmd << " -f s16le -acodec " << audioCodec << " -ar " << sampleRate << " -ac " << audioChannels << " -i " << convertWideToNarrow(aPipename);
		}
		else { // no audio stream
			cmd << " -an";
		}
		if (bRecordVideo) { // video input options and file
			cmd << " -r " << fps << " -s " << w << "x" << h << " -f rawvideo -pix_fmt " << pixelFormat << " -i " << convertWideToNarrow(vPipename);
		}
		else { // no video stream
			cmd << " -vn";
		}
		if (bRecordAudio)
			cmd << " -b:a " << audioBitrate;
		if (bRecordVideo)
			cmd << " -vcodec " << videoCodec << " -b:v " << videoBitrate;
		cmd << " " << outputString << movFileExt;

		ofLogNotice("FFMpeg Command") << cmd.str() << endl;

		ffmpegThread.setup(cmd.str()); // start ffmpeg thread, will wait for input pipes to be opened

		if (bRecordAudio) {
			//this blocks, so we have to call it after ffmpeg is listening for a pipe
			bool fSuccess = ConnectNamedPipe(hAPipe, NULL);
			if (!fSuccess)
			{
				LPTSTR errorText = NULL;

				FormatMessageW(
					// use system message tables to retrieve error text
					FORMAT_MESSAGE_FROM_SYSTEM
					// allocate buffer on local heap for error text
					| FORMAT_MESSAGE_ALLOCATE_BUFFER
					// Important! will fail otherwise, since we're not 
					// (and CANNOT) pass insertion parameters
					| FORMAT_MESSAGE_IGNORE_INSERTS,
					NULL,    // unused with FORMAT_MESSAGE_FROM_SYSTEM
					GetLastError(),
					MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
					(LPTSTR)&errorText,  // output 
					0, // minimum size for output buffer
					NULL);   // arguments - see note 
				wstring ws = errorText;
				string error(ws.begin(), ws.end());
				ofLogError("Audio Pipe") << "SetNamedPipeHandleState failed: " << error;
			}
			else {
				ofLogNotice("Audio Pipe") << "\n==========================\nAudio Pipe Connected Successfully\n==========================\n" << endl;
				audioThread.setup(hAPipe, &audioFrames);
			}
		}
		if (bRecordVideo) {
			//this blocks, so we have to call it after ffmpeg is listening for a pipe
			bool fSuccess = ConnectNamedPipe(hVPipe, NULL);
			if (!fSuccess)
			{
				LPTSTR errorText = NULL;

				FormatMessageW(
					// use system message tables to retrieve error text
					FORMAT_MESSAGE_FROM_SYSTEM
					// allocate buffer on local heap for error text
					| FORMAT_MESSAGE_ALLOCATE_BUFFER
					// Important! will fail otherwise, since we're not 
					// (and CANNOT) pass insertion parameters
					| FORMAT_MESSAGE_IGNORE_INSERTS,
					NULL,    // unused with FORMAT_MESSAGE_FROM_SYSTEM
					GetLastError(),
					MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
					(LPTSTR)&errorText,  // output 
					0, // minimum size for output buffer
					NULL);   // arguments - see note 
				wstring ws = errorText;
				string error(ws.begin(), ws.end());
				ofLogError("Video Pipe") << "SetNamedPipeHandleState failed: " << error;
			}
			else {
				ofLogNotice("Video Pipe") << "\n==========================\nVideo Pipe Connected Successfully\n==========================\n" << endl;
				videoThread.setup(hVPipe, &frames);
			}
		}

	}
#endif

	bIsInitialized = true;
	bIsRecording = false;
	bIsPaused = false;

	startTime = 0;
	recordingDuration = 0;
	totalRecordingDuration = 0;

	return bIsInitialized;
		}
Exemple #29
0
Winner::Winner(App *a){
    app = a;
    ofLogNotice() << "State: " << toString();
    timer = ofGetElapsedTimeMillis();
    back = 0;
};
//--------------------------------------------------------------
void testApp::setup()
{
	ofSetWindowShape(640*2,800);
	ofSetBackgroundColor(0);
	ofSetLogLevel(OF_LOG_NOTICE);
	ofSetVerticalSync(true);
	glEnable(GL_DEPTH_TEST);
	ofEnableLighting();
	light.enable();
	pathLines.setMode(OF_PRIMITIVE_LINE_STRIP);

	camFixed = false;
	//PXC
	mSession = PXCUPipeline_Create();

	
	if(!PXCUPipeline_Init(mSession, (PXCUPipeline)(PXCU_PIPELINE_COLOR_VGA|PXCU_PIPELINE_DEPTH_QVGA|PXCU_PIPELINE_GESTURE)))
	{
		ofLogNotice() << "Unable to initialize session" << endl;
		return;
	}

	if(PXCUPipeline_QueryRGBSize(mSession, &mCW, &mCH))
	{

		mRGBMap = new unsigned char[mCW*mCH*4];
		mRGBTexture.allocate(mCW,mCH,GL_RGBA);
	}
	if(PXCUPipeline_QueryLabelMapSize(mSession, &mLW, &mLH))
	{
		mLabelMap = new unsigned char[mLW*mLH];
		mLabelTexture.allocate(mLW,mLH,GL_LUMINANCE);
	}
	if(PXCUPipeline_QueryDepthMapSize(mSession, &mLW, &mLH))
	{
		mDepthBuffer = new short[mLW*mLH];
		mDepthMap = new unsigned char[mLW*mLH];
		mDepthTexture.allocate(mLW,mLH,GL_LUMINANCE);
	}
	if(PXCUPipeline_QueryIRMapSize(mSession, &mLW, &mLH))
	{
		mIRBuffer = new short[mLW*mLH];
		mIRMap = new unsigned char[mLW*mLH];
		mIRTexture.allocate(mLW,mLH,GL_LUMINANCE);
	}

	//cam
	cam.resetTransform();
	cam.setFov(60);
	cam.clearParent();
	cam.setPosition(0, 10, 120);
	cam.setParent(hero);
	camSwith = false;
	frc.set(0,0,0);
	//node 
	float freqMult = 1;
	float amp = 30;	
	float scale = 1;
	hero_Pos.set(0,0,0);
	hero.setPosition(ofVec3f(sin(ofGetElapsedTimef() * freqMult) * amp, cos(ofGetElapsedTimef() * freqMult) * amp, sin(ofGetElapsedTimef() * freqMult * 0.7) * amp));
	hero.setOrientation(ofPoint(0,0,0));
	hero.setScale(scale);
	foc.set(0,0,0);

	for (int i = 0; i < 600; i ++)
	{
		freqMult = ofRandom(0.5,2);
		amp = ofRandom(10,30);	
		scale = ofRandom(1,15);
		ofNode temNode;
		temNode.setPosition(ofRandom(-1000,1000),ofRandom(-1000,1000),ofRandom(-50000,0));
		temNode.setOrientation(ofVec3f(sin(ofGetElapsedTimef() * freqMult * 0.2) * amp * 5, cos(ofGetElapsedTimef() * freqMult * 0.2) * amp * 5, sin(ofGetElapsedTimef() * freqMult * 0.2 * 0.7) * amp * 5));
		temNode.setScale(scale);
		nodes.push_back(temNode);
	}
}