//--------------------------------------------------------------
void ofApp::update() {
	grabber.update();
	if (grabber.isFrameNew()) {
		rgbImage.setFromPixels(grabber.getPixels().getData(), GRABBER_WIDTH, GRABBER_HEIGHT);
		rgbImage.mirror(false, true);
		hsbImage = rgbImage;
		hsbImage.convertRgbToHsv();
		hsbImage.convertToGrayscalePlanarImages(hue, saturation, brightness);

		for (int i = 0; i < GRABBER_WIDTH*GRABBER_HEIGHT; i++) {
			// Als de hue van mijn plaatje tussen de -5 en +5 zit dan zet je in de filtered die kleur op wit
			if (ofInRange(hue.getPixels()[i], findHue - 5, findHue + 5)) {
				filtered.getPixels()[i] = 255;
			}
			else {
				filtered.getPixels()[i] = 0;
			}

			if (ofInRange(hue.getPixels()[i], findHue_2 - 2, findHue_2 + 2)) {
				filtered2.getPixels()[i] = 255;
			}
			else {
				filtered2.getPixels()[i] = 0;
			}
		}
		filtered.flagImageChanged(); //vertel dat de plaatje is veranderd
		filtered2.flagImageChanged();
		contours.findContours(filtered, 50, GRABBER_WIDTH*GRABBER_HEIGHT / 2, 1, false);  // 1 = hoevel blobs = contouren
		contours2.findContours(filtered2, 10, GRABBER_WIDTH*GRABBER_HEIGHT / 4, 3, false);
	}
}
Пример #2
0
void Interface::mousePressed( int x, int y ){
    for (int i=0; i<slider.size(); i++) {
        Slider &s = slider[i];
        ofRectangle r = s.rect;
        if ( ofInRange( x, r.x, r.x + r.width ) && ofInRange( y, r.y, r.y + r.height ) ) {
            selected = i;
            *s.value = ofMap( x, r.x, r.x + r.width, s.minV, s.maxV, true );
        }
    }
}
Пример #3
0
//---------------------------------------------------------------------
//обработка событий мыши
//type 0 - move, 1 - press, 2 - drag, 3 - release
//mouseState - указатель, использует ли кто-то мышь
void kuNavigator::mouse( int x, int y, int button, void *&mouseState, int type ) {
    bool mouseCaptured = ( mouseState != 0 );
    bool captured = mouseCaptured && (mouseState == this);
    if ( mouseCaptured && !captured ) { return; }
    
    if ( type == 0 ) {  //move
        return;
    }
    
    float time0 = time0Pix();
    float time1 = time1Pix();
    if ( type == 1 ) { //press
        if ( ofInRange( x, time0 - 5, time1 + 5 ) && ofInRange( y, y_, y_+h_+8 ) ) {
            dragLeft = ( (time0 + 5 < time1 && x < time0 + 5 )
                        || (time0 + 5 >= time1 && x < time0 ) );
            dragRight = ( (time1 - 5 > time0 && x > time1 - 5 )
                                     || (time1 - 5 <= time0 && x > time1 ) );
            if ( !dragLeft && !dragRight ) {    //если очень узкая - тащим оба конца
                dragLeft = dragRight = true;
            }
            dragLeftDelta = ( time0 - x );
            dragRightDelta = ( time1 - x );
            state = 1;
            mouseState = this;
        }
        return;
    }
    if ( type == 2 ) { //drag
        if ( captured ) {
            float newtime0 = ( dragLeft ) ? ( pixToTime(dragLeftDelta + x)) : time0_;
            float newtime1 = ( dragRight ) ? ( pixToTime(dragRightDelta + x)) : time1_;
            
            if ( dragLeft && newtime0 > newtime1 ) newtime0 = newtime1;
            newtime1 = max( newtime0, newtime1 );
            float len = newtime1 - newtime0;
            if ( !ofInRange( newtime0, 0, duration_ ) ) {
                newtime0 = ofClamp( newtime0, 0, duration_ );
                if ( dragRight ) newtime1 = newtime0 + len;
            }
            if ( !ofInRange( newtime1, 0, duration_ ) ) {
                newtime1 = ofClamp( newtime1, 0, duration_ );
                if ( dragLeft ) newtime0 = newtime1 - len;
            }
            setSegment( newtime0, newtime1 );
        }
        return;
    }
    if ( type == 3 ) { //release
        if ( captured ) {
            mouseState = 0;
        }
        state = 0;
        return;
    }
}
Пример #4
0
OFX_OBJLOADER_BEGIN_NAMESPACE

void load(string path, ofMesh& mesh, bool generateNormals, bool flipFace)
{
	path = ofToDataPath(path);

	mesh.clear();

	GLMmodel* m;

	m = glmReadOBJ((char*)path.c_str());

	if (generateNormals)
	{
		glmFacetNormals(m);
		glmVertexNormals(m, 90);
	}

	if (flipFace)
	{
		glmReverseWinding(m);
	}

	for (int j = 0; j < m->numtriangles; j++)
	{
		const GLMtriangle &tri = m->triangles[j];

		for (int k = 0; k < 3; k++)
		{
			GLfloat *v = m->vertices + (tri.vindices[k] * 3);
			mesh.addVertex(ofVec3f(v[0], v[1], v[2]));

			if (m->colors)
			{
				GLfloat *c = m->colors + (tri.vindices[k] * 3);
				mesh.addColor(ofFloatColor(c[0], c[1], c[2]));
			}

			if (m->normals && ofInRange(tri.nindices[k], 0, m->numnormals))
			{
				GLfloat *n = m->normals + (tri.nindices[k] * 3);
				mesh.addNormal(ofVec3f(n[0], n[1], n[2]));
			}

			if (m->texcoords && ofInRange(tri.tindices[k], 0, m->numtexcoords))
			{
				GLfloat *c = m->texcoords + (tri.tindices[k] * 2);
				mesh.addTexCoord(ofVec2f(c[0], c[1]));
			}
		}
	}

	glmDelete(m);
}
Пример #5
0
//--------------------------------------------------------------------------------
ofRectangle ofxCvImage::getIntersectionROI( const ofRectangle& r1, const ofRectangle& r2 ) {
    // Calculates the intersection rectangle of two overlapping rectangles.
    // The following pattern can be used to do image operation
    // on the intersection (overlapping part) of two ROIs
    //
    // ofRectangle iRoi = img1.getIntersectionROI( img1.getROI(), img2.getROI() );
    // if( iRoi.width > 0 && iRoi.height > 0 ) {
    //     img1.pushROI();
    //     img1.setROI( iRoi );
    //     img2.pushROI();
    //     img2.setROI( iRoi );
    //
    //     // do image operation here
    //
    //     img1.popROI();
    //     img2.popROI();
    // }
    //

    int r1x1 = (int)r1.x;
    int r1y1 = (int)r1.y;
    int r1x2 = (int)(r1.x+r1.width);
    int r1y2 = (int)(r1.y+r1.height);

    int r2x1 = (int)r2.x;
    int r2y1 = (int)r2.y;
    int r2x2 = (int)(r2.x+r2.width);
    int r2y2 = (int)(r2.y+r2.height);

    int r3x1 = 0;
    int r3y1 = 0;
    int r3x2 = 0;
    int r3y2 = 0;

    bool bIntersect =  ( ( ofInRange(r2x1, r1x1,r1x2) || ofInRange(r1x1, r2x1,r2x2) ) &&
                         ( ofInRange(r2y1, r1y1,r1y2) || ofInRange(r1y1, r2y1,r2y2) ) );

    if( bIntersect ){
        r3x1 = MAX( r1x1, r2x1 );
        r3y1 = MAX( r1y1, r2y1 );

        r3x2 = MIN( r1x2, r2x2 );
        r3y2 = MIN( r1y2, r2y2 );

        return ofRectangle( r3x1,r3y1, r3x2-r3x1,r3y2-r3y1 );

    } else {
        return ofRectangle( 0,0, 0,0 );
    }
}
Пример #6
0
//--------------------------------------------------------------
void Particle::update( float dt ){
	if ( live ) {
		//Rotate vel
		vel.rotate( 0, 0, param.rotate * dt );

		ofPoint acc;         //Acceleration
		ofPoint delta = pos - param.eCenter;
		float len = delta.length();
		if ( ofInRange( len, 0, param.eRad ) ) {
			delta.normalize();

			//Attraction/repulsion force 
			acc += delta * param.force;

			//Spinning force
			acc.x += -delta.y * param.spinning;
			acc.y += delta.x * param.spinning;
		}
		vel += acc * dt;            //Euler method
		vel *= ( 1 - param.friction );  //Friction

		//Update pos
		pos += vel * dt;    //Euler method

		//Update time and check if particle should die
		time += dt;
		if ( time >= lifeTime ) {
			live = false;   //Particle is now considered as died
		}
	}
}
Пример #7
0
void Particle::update( float dt ){

	if ( live ) 
	{
		//Rotate vel
		vel.rotate( 0, 0, param.rotate * dt );

		ofPoint acc;
		ofPoint delta = pos - param.eCenter;
		float len = delta.length();
		if (ofInRange(len,0,param.eRad))
		{
			delta.normalize();

			//Силы притяжения/отталкивания
			acc += delta * param.force;

			//Закручивающая сила
			acc.x += -delta.y * param.spinning;
			acc.y += delta.x * param.spinning;
		}
		vel += acc *dt;
		vel *= (1 - param.friction);

		//Update pos
		pos += vel * dt; //Euler method
		//Update time and check if particle should die
		time += dt;
		if ( time >= lifeTime )
		{
			live = false; //Particle is now considered as died
		}
	}
}
Пример #8
0
//--------------------------------------------------------------
void ofApp::update() {

    openNIDevice.update();		//Update depth camera
    if ( !calibrating && corners.size() == 4 ) {
        //Analyze depth

        //Get current depth image
        ofShortPixels &input = openNIDevice.getDepthRawPixels();

        //Process pixels
        int w = input.getWidth();
        int h = input.getHeight();
        int minV = 30;		//Minimal distance in mm
        int maxV = 150; 		//Maximal distance in mm
        for (int Y=0; Y<H; Y++) {
            for (int X=0; X<W; X++) {
                //Process pixel (X, Y)

                //Transform screen coordinates (X, Y)
                //to depth image coordinates (x, y)
                float a = float(X) / W;
                float b = float(Y) / H;
                ofPoint p =
                    (1-a) * (1-b) * corners[0]
                    + a * (1-b) * corners[1]
                    + a * b * corners[2]
                    + (1-a) * b * corners[3];

                int x = int( p.x );
                int y = int( p.y );

                if ( x >= 0 && x < w && y >= 0 && y < h ) {
                    //Getting depth values
                    int inputZ = input.getPixels()[ x + w * y ];
                    int backgroundZ = backgroundDepth.getPixels()[ x + w * y ];

                    int delta;
                    //Check for zero values - it means that depth camera
                    //does not compute distance in the pixel
                    if ( inputZ != 0 && backgroundZ != 0 ) {
                        delta = backgroundZ - inputZ;
                    }
                    else {
                        delta = -1;
                    }
                    //Output value
                    if ( ofInRange( delta, minV, maxV ) ) {
                        int value = ofMap( delta, minV, maxV, 0, 255, true );
                        outPixels.getPixels()[ X + W * Y ] = value;
                    }
                }


            }
        }
        outImage.setFromPixels( outPixels );
    }

}
Пример #9
0
//--------------------------------------------------------------------------------
bool ofSkeletonFrame::isValid() 
{ 
	if(joints[0].z == -1)
		return false;

    ofPoint min(FLT_MAX, FLT_MAX);
    ofPoint max(0, 0);
    for(int i=0; i<NUI_SKELETON_POSITION_COUNT; i++)
    {        
        if(joints[i].x > max.x) max.x = joints[i].x;
        if(joints[i].y > max.y) max.y = joints[i].y;
        if(joints[i].x < min.x) min.x = joints[i].x;
        if(joints[i].y < min.y) min.y = joints[i].y;
    }
           
    ofRectangle boundingRect(min.x, min.y, max.x-min.x, max.y-min.y);
    float area = boundingRect.width * boundingRect.height;

    if(!ofInRange(area, minValidArea, maxValidArea)) {
        if(area!=0) ofLog(OF_LOG_VERBOSE, "Area ("+ofToString(area)+") not in range");
        return false;
    }
    
    // If the foot is higher than the head, we've got a f****d up skeleton
    if(joints[NUI_SKELETON_POSITION_FOOT_LEFT].y < joints[NUI_SKELETON_POSITION_HEAD].y) {
        ofLog(OF_LOG_VERBOSE, "not valid bcz foot is above head");
        return false;
    }
    
    if(getAngleABC(NUI_SKELETON_POSITION_HIP_CENTER, NUI_SKELETON_POSITION_SPINE, NUI_SKELETON_POSITION_SHOULDER_CENTER) > 10) {
        ofLog(OF_LOG_VERBOSE, "not valid bcz of hip->spine->shoulder angle");
        return false;
    }
    
    if(boundingRect.height < minValidHeight) { 
        ofLog(OF_LOG_VERBOSE, "Min height not met. Height="+ofToString(boundingRect.height));
        return false;
    }
    
    if(joints[NUI_SKELETON_POSITION_FOOT_RIGHT].y < joints[NUI_SKELETON_POSITION_HEAD].y) {
		ofLog(OF_LOG_VERBOSE, "Right foot is above head");
        return false;
    }
    
    if((joints[NUI_SKELETON_POSITION_KNEE_LEFT].y < joints[NUI_SKELETON_POSITION_SHOULDER_CENTER].y)) {
		ofLog(OF_LOG_VERBOSE, "Left knee is above shoulders");
        return false;
	}
    
    if((joints[NUI_SKELETON_POSITION_KNEE_RIGHT].y < joints[NUI_SKELETON_POSITION_SHOULDER_CENTER].y)) {
		ofLog(OF_LOG_VERBOSE, "Right knee is above shoulders");
        return false;
	}
    
	return true;
}
Пример #10
0
void ofxBvh::setFrame(int index)
{
	if (ofInRange(index, 0, frames.size()) && getFrame() != index)
	{
		currentFrame = frames[index];
		play_head = (float)index * frame_time;
		
		need_update = true;
	}
}
Пример #11
0
//--------------------------------------------------------------
ofxCvContourFinder Vision::findColor(int colorDetected){	
    ofxCvContourFinder contours;
    //filter image based on the hue value were looking for
    for (int i=0; i<capW*capH; i++) {
        filtered.getPixels()[i] = ofInRange(hue.getPixels()[i], colorDetected-7, colorDetected+7) ? 255 : 0;
    }
    
    filtered.flagImageChanged();
    //run the contour finder on the filtered image to find blobs with a certain hue
    contours.findContours(filtered,  (capW*capH)/20, (capW*capH), 1, false);
    
    return contours;
}
Пример #12
0
//---------------------------------------------------------------------
//type 0 - move, 1 - press, 2 - drag, 3 - release
void kuButton::mouse( int x, int y, int button, void *&mouseState, int type ) {
    bool mouseCaptured = ( mouseState != 0 );
    bool captured = mouseCaptured && (mouseState == this);
    if ( mouseCaptured && !captured ) { return; }

    if ( !visible_ || !enabled_ ) {
        if ( captured ) {
            mouseState = 0;
            state_ = 0;
        }
        return;
    }
    
    if ( type == 0 ) {  //move
        return;
    }
    if ( type == 1 ) { //press
        if ( ofInRange( x, X_, X_ + W_ ) && ofInRange( y, Y_, Y_ + H_ ) ) {
            state_ = 1;
            mouseState = this;
            pressed_ = true;
            if ( toggle_ ) {
                down_ = !down_;
            }
        }
        return;
    }
    if ( !captured ) { return; }

    if ( type == 2 ) { //drag
        return;
    }
    if ( type == 3 ) { //release
        state_ = 0;
        mouseState = 0;
        return;
    }
    
}
Пример #13
0
//--------------------------------------------------------------
void ofApp::update(){
    
    movie.update();
    
    if (movie.isFrameNew()) {
        
        //copy webcam pixels to rgb image
        rgb.setFromPixels(movie.getPixels(), w, h);
        
        //mirror horizontal
        rgb.mirror(false, true);
        
        //duplicate rgb
        hsb = rgb;
        
        //convert to hsb
        hsb.convertRgbToHsv();
        
        //store the three channels as grayscale images
        hsb.convertToGrayscalePlanarImages(hue, sat, bri);
        
        //filter image based on the hue value were looking for
        for (int i=0; i<w*h; i++) {
            filtered.getPixels()[i] = ofInRange(hue.getPixels()[i],findHue-5,findHue+5) ? 255 : 0;
        }
        
        filtered.flagImageChanged();
        //run the contour finder on the filtered image to find blobs with a certain hue
        contours.findContours(filtered, 50, w*h/2, 1, false);
    }
    
    for (int j=0; j<5; j++) {
        for (int i=0; i<contours.nBlobs; i++) {
            if (contours.blobs[i].centroid.x > rectangles[j][0] && contours.blobs[i].centroid.x < rectangles[j][2] + rectangles[j][0] && contours.blobs[i].centroid.y > rectangles[j][1] && contours.blobs[i].centroid.y < rectangles[j][3] + rectangles[j][1])
            {
                cout << j << endl;
                ofxOscMessage m;
                m.setAddress("/boxNumber");
                m.addIntArg(j);
                sender->sendMessage(m);
            }
            else {
                cout << "No contact" << endl;
                
            };
        };
    };
    
}
Пример #14
0
void MSGlitchNoise::update(ofPixels &sourcePixels)
{
    MSGlitch::update(sourcePixels);
    if (!hasStarted) return;

    for (int i=0; i<sourcePixels.size(); i+=3)
    {
        if (ofInRange(ofRandom(0, 1), 0.0f, NOISE_AMOUNT))
        {
            float noiseStrength = 1.0f - NOISE_STRENGTH;
            sourcePixels[i + 0] *= noiseStrength;
            sourcePixels[i + 1] *= noiseStrength;
            sourcePixels[i + 2] *= noiseStrength;
        }
    }
}
void Particle::update(float elapsedTime) {
	if (!isAlive) return;
	speed.rotate(0, 0, param->rotate* elapsedTime);

	ofVec3f acceleration;
	ofVec3f distance = position - param->startingPoint;
	//bekijk of de particles ergens in de cirkel bevindt
	if (ofInRange(distance.length(), 0, param->radius)) {
		distance.normalize(); //max 1 lang, of anders schieten de deeltjes heel ver
		acceleration += distance * param->force;
		acceleration.x = -distance.y * param->spinning;
		acceleration.y = distance.x * param->spinning;
	}

	speed += acceleration * elapsedTime;
	speed *= (1 - param->friction); //hoe groter de friction, hoe langzamer die gaat

	position += speed* elapsedTime;
	age += elapsedTime;
	if (age >= maxAge) isAlive = false;
}
Пример #16
0
void Particle::update(float dt){
    if (live) {
        vel.rotate(0, 0, param->rotate * dt); //steer the particle over its lifetime
        ofVec3f acc;
        ofVec3f delta = pos - param->eCenter;
        float len = delta.length(); //figure out how far from the center the particle is
        if (ofInRange(len, 0, param->eRad)) { //if distance is below a certain threshhold
            delta.normalize();
            acc += delta * param->force; //add an outward force
            acc.x += -delta.y * param->spinning; //as it moves outward, steer the particle in a counter clockwise direction
            acc.y += delta.x * param->spinning;
        }
        vel += acc * dt; //add the acceleration to the velocity
        vel *= (1-param->friction); //slow the particle down with friction
        
        pos += vel * dt; //change the position with the final velocity
        time += dt; //update the current age of the particle
        if(time >= lifeTime){ //if it is too old, kill the particle
            live = false;
        }
    }
}
Пример #17
0
void Particle::update(float dt) {
    if(live){
        //rotate velocity
        vel.rotate(0,0,param.rotate*dt);
        
        ofPoint acc;
        ofPoint delta = pos -param.eCenter; //distance between particle and emitter
        float len = delta.length(); //gets length of above distance
        
        if (ofInRange (len, 0, param.eRad)){
            delta.normalize();
            cout << "delta:" <<delta << endl;
            
            //attraction & repulsion forces
            acc+= delta*param.force;
            
            
            //spinning forces
            acc.x += delta.x*param.spin;
            acc.y += delta.y*param.spin;
        }
        
        vel+= acc*dt;
        
        //friction
        pos += vel*dt;
        
        //update position
        
        pos += vel*dt;
        
        //update time and check if it should die
        time+= dt;
        if (time>= lifeTime) {
            live= false;
        }
    }
    
}
void ofxLineSegmentObject::setVertexPos(int iVertNum, float iX, float iY, float iZ)
{
	if(ofInRange(iVertNum, 0, vertices.size()-1)){
		vertices[iVertNum].set(iX, iY, iZ);
	}
}
bool colonyCell::isInsideBoard(ofPoint p){ return (ofInRange(p.x, 0, ofGetWidth()) && ofInRange(p.y, 0, ofGetHeight())); }
Пример #20
0
//--------------------------------------------------------------
void testApp::update(){
    
	ofSetWindowTitle(ofToString(ofGetFrameRate(), 0));


	//////////// KINECT 1
    kinect1.update();
    if (kinect1.isFrameNew()) {
        
		hsb.resize(w, h);
        //copy webcam pixels to rgb image
        hsb.setFromPixels(kinect1.getPixels(), w, h);
		hsb.resize(w/resize, h / resize);

        //convert to hsb
        hsb.convertRgbToHsv();
		
        
        //store the three channels as grayscale images
        hsb.convertToGrayscalePlanarImage(hue, 0);
		hue.erode();
		hue.dilate();
		
        
        //filter image based on the hue value we're looking for
		for (int i=0; i<hue.width*hue.height; i++) {
			filtered.getPixels()[i] = ofInRange(hue.getPixels()[i], findHue - 15,  findHue + 15) ? 255 : 0;
        }
        filtered.flagImageChanged();

        //run the contour finder on the filtered image to find blobs with a certain hue
        contours.findContours(filtered, 50, (w/resize)*(h/resize)/2, 1, false);

		if (contours.nBlobs > 0) {

			kinect1CentroidX = contours.blobs[0].centroid.x;
			kinect1CentroidY = contours.blobs[0].centroid.y;

			width = contours.blobs[0].boundingRect.width / 2;
			height = contours.blobs[0].boundingRect.height / 2;
			
			
			if (kinect1CentroidY - offset < 0) {
				topM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), (kinect1CentroidY) * (int)(resize) );
			}
			else{
				topM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), (kinect1CentroidY - offset) * (int)(resize) );
			}

			if (kinect1CentroidY + offset > h / resize) {
				bottomM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), (kinect1CentroidY) * (int)(resize) );
			} else {
				bottomM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), (kinect1CentroidY + offset) * (int)(resize) );
			}

			if (kinect1CentroidX - offset < 0) {
				leftM = kinect1.getWorldCoordinateAt( (kinect1CentroidX) * (int)(resize) , kinect1CentroidY * (int)(resize) );
			} else {
				leftM = kinect1.getWorldCoordinateAt( (kinect1CentroidX - offset) * (int)(resize) , kinect1CentroidY * (int)(resize) );
			}

			if (kinect1CentroidX + offset > w / 4) {
				rightM = kinect1.getWorldCoordinateAt( ( kinect1CentroidX) * (int)(resize), kinect1CentroidY * (int)(resize) );
			} else {
				rightM = kinect1.getWorldCoordinateAt( ( kinect1CentroidX + offset) * (int)(resize), kinect1CentroidY * (int)(resize) );
			}

			/*
			topM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), (kinect1CentroidY + offset) * (int)(resize) );
			bottomM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), (kinect1CentroidY - offset) * (int)(resize) );
			leftM = kinect1.getWorldCoordinateAt( (kinect1CentroidX + offset) * (int)(resize) , kinect1CentroidY * (int)(resize) );
			rightM = kinect1.getWorldCoordinateAt( ( kinect1CentroidX - offset) * (int)(resize), kinect1CentroidY * (int)(resize) );
			kinect1CentroidZ = kinect1.getDistanceAt( kinect1CentroidX * (int)(resize), kinect1CentroidY * (int)(resize));

			*/
			centroidM = kinect1.getWorldCoordinateAt( kinect1CentroidX * (int)(resize), kinect1CentroidY * (int)(resize) );
			
			

			//racket1Angle = atan( (PI / 2) / 75 * (bottomM.z - topM.z));

			int off = 25;

			if ( leftM.z - rightM.z > -off && leftM.z - rightM.z < off) {
				racket1AngleVerti = leftM.z - rightM.z;

			}

			if ( topM.z - bottomM.z > -off && topM.z - bottomM.z < off) {
				racket1AngleHori = topM.z - bottomM.z;
			}

			if (centroidM.z < 400) {
				centroidM.z = 400;
			}

			// modif centre kinect 01
			centroidM.y -= 240;
			if(centroidM.y==0){ // condition pour no detect
				centroidM.y=0;
			}
			


		}
    }

	//////////// KINECT 2
    kinect2.update();
    if (kinect2.isFrameNew()) {
        
		hsb2.resize(w, h);
        //copy webcam pixels to rgb image
        hsb2.setFromPixels(kinect2.getPixels(), w, h);
		hsb2.resize(w/resize, h / resize);

        //convert to hsb
        hsb2.convertRgbToHsv();
		
        
        //store the three channels as grayscale images
        hsb2.convertToGrayscalePlanarImage(hue2, 0);
		        hsb.convertToGrayscalePlanarImage(hue, 0);
		hue2.erode();
		hue2.dilate();
        
        //filter image based on the hue value we're looking for
		for (int i=0; i<hue2.width*hue2.height; i++) {
			filtered2.getPixels()[i] = ofInRange(hue2.getPixels()[i], findHue - 15,  findHue + 15) ? 255 : 0;
        }
        filtered2.flagImageChanged();

        //run the contour finder on the filtered image to find blobs with a certain hue
        contours2.findContours(filtered2, 50, (w/resize)*(h/resize)/2, 1, false);

		if (contours2.nBlobs > 0) {

			kinect2CentroidX = contours2.blobs[0].centroid.x;
			kinect2CentroidY = contours2.blobs[0].centroid.y;

			width = contours2.blobs[0].boundingRect.width / 2;
			height = contours2.blobs[0].boundingRect.height / 2;
			

			if (kinect2CentroidY - offset < 0) {
				topM = kinect2.getWorldCoordinateAt( kinect2CentroidX * (int)(resize), (kinect2CentroidY) * (int)(resize) );
			}
			else{
				topM = kinect2.getWorldCoordinateAt( kinect2CentroidX * (int)(resize), (kinect2CentroidY - offset) * (int)(resize) );
			}

			if (kinect2CentroidY + offset > h / resize) {
				bottomM = kinect2.getWorldCoordinateAt( kinect2CentroidX * (int)(resize), (kinect2CentroidY) * (int)(resize) );
			} else {
				bottomM = kinect2.getWorldCoordinateAt( kinect2CentroidX * (int)(resize), (kinect2CentroidY + offset) * (int)(resize) );
			}

			if (kinect2CentroidX - offset < 0) {
				leftM = kinect2.getWorldCoordinateAt( (kinect2CentroidX) * (int)(resize) , kinect2CentroidY * (int)(resize) );
			} else {
				leftM = kinect2.getWorldCoordinateAt( (kinect2CentroidX - offset) * (int)(resize) , kinect2CentroidY * (int)(resize) );
			}

			if (kinect2CentroidX + offset > w / 4) {
				rightM = kinect2.getWorldCoordinateAt( ( kinect2CentroidX) * (int)(resize), kinect2CentroidY * (int)(resize) );
			} else {
				rightM = kinect2.getWorldCoordinateAt( ( kinect2CentroidX + offset) * (int)(resize), kinect2CentroidY * (int)(resize) );
			}

			centroid2M = kinect2.getWorldCoordinateAt( kinect2CentroidX * (int)(resize), kinect2CentroidY * (int)(resize) );



			//racket1Angle = atan( (PI / 2) / 75 * (bottomM.z - topM.z));

			int off = 25;

			if ( leftM.z - rightM.z > -off && leftM.z - rightM.z < off) {
				racket2AngleVerti = leftM.z - rightM.z;

			}

			if ( topM.z - bottomM.z > -off && topM.z - bottomM.z < off) {
				racket2AngleHori = topM.z - bottomM.z;
			}

			if (centroid2M.z < 400) {
				centroid2M.z = 400;
			}

			// modif centre kinect 02
			centroid2M.y -= 240;
			
		}
    }


	// osc stuff
	ofxOscMessage message;

	if (kinect1.isConnected()) {
		/*
		message.setAddress("/kinect1/connected");
		message.addStringArg("Kinect 1 connected : (Serial) " + kinect1.getSerial() );
		message.addIntArg(1);
		*/

		if (!centroidM.x == 0 && !centroidM.y == 0 && !centroidM.z == 0)  {
		message.setAddress("/kinect1/position");
		message.addFloatArg(centroidM.x);
		message.addFloatArg(centroidM.y);
		message.addFloatArg(centroidM.z);
		message.addFloatArg(racket1AngleHori);
		message.addFloatArg(racket1AngleVerti);
		}
		sender.sendMessage(message);
	} else {
		message.setAddress("/kinect1/connected");
		message.addStringArg("Kinect 1 not found ");
		message.addIntArg(0);
		sender.sendMessage(message);
	}

	ofxOscMessage message2;

	if (kinect2.isConnected()) {
		/*
		message.setAddress("/kinect2/connected");
		message.addStringArg("Kinect 2 connected : (Serial) " + kinect2.getSerial() );
		message.addIntArg(1);
		*/
		if (!centroid2M.x == 0 && !centroid2M.y == 0 && !centroid2M.z == 0)  {
		message2.setAddress("/kinect2/position");
		message2.addFloatArg(centroid2M.x);
		message2.addFloatArg(centroid2M.y);
		message2.addFloatArg(centroid2M.z);
		message2.addFloatArg(racket2AngleHori);
		message2.addFloatArg(racket2AngleVerti);
		}
		
		sender.sendMessage(message2);
	} else {
		message2.setAddress("/kinect2/connected");
		message2.addStringArg("Kinect 2 not found ");
		message2.addIntArg(0);
		sender.sendMessage(message);
	}


}
Пример #21
0
//--------------------------------------------------------------
void shaderApp::update()
{
	float now = ofGetElapsedTimef();
	bool textChanged = false;

    if(now > timeMarkerTime) {
        ofLogNotice("timestamp") << ofGetElapsedTimef();
        timeMarkerTime = now + 10;
    }
    
	while(receiver.hasWaitingMessages()){

		ofxOscMessage m;
		receiver.getNextMessage(&m);
        
        if(m.getAddress() == "/camera"){
            displayMode = MODE_CAMERA;
        }
        
		if(m.getAddress() == "/text"){
			lineOne = m.getArgAsString(0);
			lineTwo = m.getArgAsString(1);
            ofLogNotice() << lineOne << " - " << lineTwo;
			textChanged = true;
		}

		if(m.getAddress() == "/filter") {
			string filterToUse = m.getArgAsString(0);
			map<string, OMX_IMAGEFILTERTYPE>::iterator it;
			for(it = OMX_Maps::getInstance().imageFilters.begin();
				it!=OMX_Maps::getInstance().imageFilters.end(); ++it) {
				string name = (*it).first;
				OMX_IMAGEFILTERTYPE filter = (*it).second;

				if (ofToLower(name) == ofToLower(filterToUse))  {
					currentFilter = filterToUse;
					videoGrabber.applyImageFilter(filter);
				}
			}
		}
		
		if(m.getAddress() == "/info") {
			int i = m.getArgAsInt32(0);
			doDrawInfo = i;
		}

		if(m.getAddress() == "/video") {
            if(displayMode!=MODE_VIDEO) {
   
                ofDirectory dir;
                dir.listDir(VIDEO_DIRECTORY);
                int n = 0;
                
                // If we pass in an integer and it's within range of the number of videos we have,
                // play that video.  Otherwise play a random one.
                if(m.getNumArgs() > 0 && ofInRange(m.getArgAsInt32(0), 0, dir.size()-1)) {
                    n = m.getArgAsInt32(0);
                } else {
                    n = ofRandom(0, (int)dir.size());
                }
                
                
                string videoPath = dir.getPath(n);
                string videoName = dir.getName(n);
                ofLogNotice("shaderApp") << "!!! PLAYING " << n << " - " << videoName;

                playVideo(videoName, videoPath);
            }
		}

	}

	if(textChanged)
	{
		overlayFbo.begin();
            int dropshadow = 8;
        
			ofClear(0, 0, 0, 0);
			ofRectangle box;
			int x;
			int y = font[0].getLineHeight()+50;
		
			box = font[0].getStringBoundingBox(lineOne, 0, 0);
			x = (overlayFbo.getWidth()/2.0) - (box.width/2.0);
			ofSetColor(ofColor::black);
			font[0].drawString(lineOne, x+dropshadow, y+dropshadow);
			ofSetColor(ofColor::white);
			font[0].drawString(lineOne, x, y);
			

			y += font[1].getLineHeight()+50;


			box = font[1].getStringBoundingBox(lineTwo, 0, 0);
			x = (overlayFbo.getWidth()/2.0) - (box.width/2.0);
			ofSetColor(ofColor::black);
			font[1].drawString(lineTwo, x+dropshadow, y+dropshadow);
			ofSetColor(ofColor::white);
			font[1].drawString(lineTwo, x, y);

		overlayFbo.end();
	}


	if (displayMode == MODE_CAMERA && videoGrabber.isFrameNew())
	{
		camFbo.begin();
		ofClear(0, 0, 0, 0);
		shader.begin();
			shader.setUniformTexture("tex0", videoGrabber.getTextureReference(), videoGrabber.getTextureID());
			shader.setUniform1f("time", ofGetElapsedTimef());
			shader.setUniform2f("resolution", videoGrabber.getWidth(), videoGrabber.getHeight());
			videoGrabber.draw();
		shader.end();
		camFbo.end();
	}
    
    if(displayMode == MODE_VIDEO && now > videoEndTime)
    {
        ofLogVerbose(__func__) << "Forcefully ending video";
        displayMode = MODE_CAMERA;
    }
    
    
//    if(stopVideo)
//    {
//        ofLogVerbose(__func__) << "Stopping Video";
//        if(omxPlayer.isTextureEnabled) {
//            //clear the texture if you want
//            omxPlayer.getTextureReference().clear();
//        }
//        displayMode = MODE_CAMERA;
//        stopVideo=false;
//    }
}
Пример #22
0
//--------------------------------------------------------------
void testApp::update(){
    //static long int elementCount=0;
	static Position *pos[NUM_MARKERS],*last[NUM_MARKERS];
	static bool t=0;

	if(t==0)
	{
		for(int i=0;i<NUM_MARKERS;i++)
			pos[i]=last[i]=start[i];
		t=1;
	}
	
    movie.update();
    
    if (movie.isFrameNew()) {
        
        //copy webcam pixels to rgb image
        rgb.setFromPixels(movie.getPixels(), w, h);
        
        //mirror horizontal
        rgb.mirror(false, true);
        
        //duplicate rgb
        hsb = rgb;
        
        //convert to hsb
        hsb.convertRgbToHsv();
        
        //store the three channels as grayscale images
        hsb.convertToGrayscalePlanarImages(hue, sat, bri);
        
        //filter image based on the hue value were looking for
        for (int i=0; i<w*h; i++) {
            //filtered.getPixels()[i] = ofInRange(hue.getPixels()[i],findHue-5,findHue+5) ? 255 : 0;
			filtered[0].getPixels()[i] = ofInRange(hue.getPixels()[i],Hue1-5,Hue1+5) ? 255 : 0;
			filtered[1].getPixels()[i] = ofInRange(hue.getPixels()[i],Hue2-5,Hue2+5) ? 255 : 0;
			filtered[2].getPixels()[i] = ofInRange(hue.getPixels()[i],Hue3-5,Hue3+5) ? 255 : 0;
			filtered[3].getPixels()[i] = ofInRange(hue.getPixels()[i],Hue4-5,Hue4+5) ? 255 : 0;
        }
        filtered[0].flagImageChanged();
		filtered[1].flagImageChanged();
		filtered[2].flagImageChanged();
		filtered[3].flagImageChanged();


        //run the contour finder on the filtered image to find blobs with a certain hue
        contours[0].findContours(filtered[0], 50, w*h/2, 1, false);
		contours[1].findContours(filtered[1], 50, w*h/2, 1, false);
		contours[2].findContours(filtered[2], 50, w*h/2, 1, false);
		contours[3].findContours(filtered[3], 50, w*h/2, 1, false);

		for(int i=0;i<NUM_MARKERS;i++)
			pos[i]=last[i];

		for(int j=0;j<NUM_MARKERS;j++)
		for (int i=0; i<contours[j].nBlobs; i++) 
		{
			/*float tx=contours[j].blobs[i].centroid.x*4.7;
			float ty=contours[j].blobs[i].centroid.y*3.2;
			distanceLimiter(last[j]->x,last[j]->y,tx,ty);

			pos[j]->x=tx+WIDTH/4;
			pos[j]->y=ty;*/
			pos[j]->x=contours[j].blobs[i].centroid.x*2.4;
			pos[j]->y=contours[j].blobs[i].centroid.y*3.2;
			pos[j]->next=new Position();
			pos[j]=pos[j]->next;
			elementCount[j]++;
		}
		//elementCount--;
		for(int i=0;i<NUM_MARKERS;i++)
			last[i]=pos[i];
    }
}
Пример #23
0
void glintLineChecker::update(ofxCvGrayscaleImage & eyeImg, int nGlints, ofxCvContourFinder & contourFinder, bool bUseGlintInBrightEye, ofxCvContourFinder & contourFinderBright){
	
	if (nGlints == 2) {
		
		lineSegments.clear();
		unsigned char * pixels = eyeImg.getPixels();
		
		for (int j = 0; j < eyeImg.height; j++){
			lineSegment temp;
			bool bStarted = false;
			
			for (int i = 0; i < eyeImg.width - 1; i++) {
				
				int pixela = pixels [ j * eyeImg.width + i];
				int pixelb = pixels [ j * eyeImg.width + i + 1];
				
				if ((pixela == 255) && (pixelb == 0)) {
					// yeah!! we are starting !! 
					temp.startx = i;
					temp.starty = j;
					bStarted = true;
				}
				
				if ((pixela == 0) && (pixelb == 255)) {
					
					if (bStarted == true) {
						// cool we are ending :)
						temp.endx = i;
						temp.endy = j;
						temp.distance = temp.endx - temp.startx;
						
						lineSegments.push_back(temp);
						//printf("adding line segment %i %i %i %i -- %i \n", temp.startx, temp.starty, temp.endx, temp.endy, temp.distance );
						
						bStarted = false;
					}
				}
			}
		}
		
		if (bDeleteLine) {
			// remove_if doesn't work now, so for now..
//			lineSegments.erase(remove_if(lineSegments.begin(), lineSegments.end(), glintChecker::lineInRange), lineSegments.end());
			for (int i = 0; i < lineSegments.size(); i++) {
				if (lineInRange(lineSegments[i])) {
					lineSegments.erase(lineSegments.begin() + i);
					i--;
				}
			}
			
			for (int i = 0; i < lineSegments.size(); i++) {
				if (bUseGlintInBrightEye) {
					if (lineCrossGlintInBrightEye(lineSegments[i], contourFinderBright)) {
						lineSegments.erase(lineSegments.begin() + i);
						i--;
					}
				}
			}
		}
		
		cvSetZero(myStripesImage.getCvImage());
		unsigned char * stripepixels = myStripesImage.getPixels();
		
		for (int i = 0; i < lineSegments.size(); i++) {
			int startx = lineSegments[i].startx;
			int endx = lineSegments[i].endx;
			int y = lineSegments[i].starty;
			for (int j = startx; j < endx; j++){
				stripepixels[y * myStripesImage.width + j] = 255;
			}
		}
		myStripesImage.flagImageChanged();
		int nBlobs = linesFinder.findContours(myStripesImage, 100, 10000, 1, false, true);
		
		
		leftGlintID = -1;
		rightGlintID = -1;

		if (nBlobs > 0) {
			ofRectangle foundLinesRect = linesFinder.blobs[0].boundingRect;
			
			for (int i = 0; i < contourFinder.blobs.size(); i++){
				ofRectangle blobRect = contourFinder.blobs[i].boundingRect;
				if (ofInRange(foundLinesRect.x, blobRect.x, blobRect.x + blobRect.width) &&
					(ofInRange(foundLinesRect.y, blobRect.y, blobRect.y + blobRect.height) ||
					 ofInRange(foundLinesRect.y + foundLinesRect.height, blobRect.y, blobRect.y + blobRect.height))){
						leftGlintID = i;
					} else if (ofInRange(foundLinesRect.x + foundLinesRect.width, blobRect.x, blobRect.x + blobRect.width) &&
							   (ofInRange(foundLinesRect.y, blobRect.y, blobRect.y + blobRect.height) ||
								ofInRange(foundLinesRect.y + foundLinesRect.height, blobRect.y, blobRect.y + blobRect.height))) {
						rightGlintID = i;
					}
			}
		}
	}
}
Пример #24
0
//--------------------------------------------------------------
void testApp::update(){
    
    //next frame of movie
    movie.update();
    
    //if there is a new frame,
    if (movie.isFrameNew()) {
        
        //copy webcam pixels to rgb image
        rgb.setFromPixels(movie.getPixels(), w, h);
        
        //mirror horizontal so it makes sense
        rgb.mirror(false, true);
        
        //clone the rgb image into the hsb image
        hsb = rgb;
        
        //convert the hsb image to Hsv values, so we can get hue, saturation, and brightness
        hsb.convertRgbToHsv();
        
        //store the three channels of the Hsv into their own contrast masks (grayscale images)
        hsb.convertToGrayscalePlanarImages(hue, sat, bri);
        
        
///////////////////////////////
//////////color tracking begins
///////////////////////////////
        
        //PINK COLOR TRACKING
        //filter image based on the hue value we're looking for
        //looping through every pixel in the contour mask of pink hue, and pulling out pink values.
        //storing them in the filteredPink array
        for (int i=0; i<w*h; i++) {
            filteredPink.getPixels()[i] = ofInRange(hue.getPixels()[i],pink-5,pink+5);
        }
        
        //run the contour finder on the filtered image to create blobs from points that are in proximity of each other
        contoursPink.findContours(filteredPink, 50, w*h, 1, false);
                
        //calculate velocity of color, smooth velocity, and send to OSC with color name
        pinkNote.colorVel(contoursPink, "pink");        
        
        
        //BLUE COLOR TRACKING
        //filter image based on the hue value were looking for
        for (int i=0; i<w*h; i++) {
            filteredBlue.getPixels()[i] = ofInRange(hue.getPixels()[i],blue-5,blue+5);
        }
        
        //run the contour finder on the filtered image to find blobs with a certain hue
        contoursBlue.findContours(filteredBlue, 50, w*h/2, 1, false);
        
        //calculate velocity of color, smooth velocity, and send to OSC with color name
        blueNote.colorVel(contoursBlue, "blue");
       
        
        
        //GREEN COLOR TRACKING
        //filter image based on the hue value were looking for
        for (int i=0; i<w*h; i++) {
            filteredGreen.getPixels()[i] = ofInRange(hue.getPixels()[i],green-5,green+5);
        }
        
        //run the contour finder on the filtered image to find blobs with a certain hue
        contoursGreen.findContours(filteredGreen, 50, w*h, 1, false);
        
        //calculate velocity of color, smooth velocity, and send to OSC with color name
        greenNote.colorVel(contoursGreen, "green");        
        
        
        
        //RED COLOR TRACKING
        //filter image based on the hue value were looking for
        for (int i=0; i<w*h; i++) {
            filteredRed.getPixels()[i] = ofInRange(hue.getPixels()[i],red-5,red+5);
        }
        
        //run the contour finder on the filtered image to find blobs with a certain hue
        contoursRed.findContours(filteredRed, 50, w*h/2, 1, false);
        
        //calculate velocity of color, smooth velocity, and send to OSC with color name
        redNote.colorVel(contoursRed, "red");

    }

    
////////////////
//SEND MESSAGE
///////////////
    
    //NOTE: VelocitySmooth was netting dropping to zero within seconds, so we are NOT sending velocitySmoothed. We are sending velocity as a placeholder until we figure out velocitySmoothed. 
    
////////SEND PINK
    
    //if the movement on the x axis of the colored blob is greater than 1 unit (in either direction), then:
    if(pinkNote.velocity > 5){       
        if (slowYourRoll % 20 == 0){
            
            //build message p with the address /playtone, the arguement pinkNote.velocity and the string "pink".
            ofxOscMessage p;
            p.setAddress("/playtone");
            p.addIntArg(pinkNote.velocity);
            p.addStringArg("pink");
            sender.sendMessage(p);
            //cout << "pink velocity of " << pinkNote.velocity << " sent." << endl;
        }
    slowYourRoll++;
   }
    

////////SEND BLUE
    
    //if the movement on the x axis of the colored blob is greater than 1 unit (in either direction), then:
    if(blueNote.velocity > 5){
        if (slowYourRoll % 20 == 0){
            
            //build message p with the address /playtone, the arguement pinkNote.velocity and the string "blue".
            ofxOscMessage b;
            b.setAddress("/playtone");
            b.addIntArg(blueNote.velocity);
            b.addStringArg("blue");
            sender.sendMessage(b);
            //cout << "blue velocity of " << blueNote.velocity << " sent." << endl;
        }
        slowYourRoll++;
    }
    
    
////////SEND GREEN
    
    //if the movement on the x axis of the colored blob is greater than 1 unit (in either direction), then:
    if(greenNote.velocity > 5){
        if (slowYourRoll % 20 == 0){
            
            //build message p with the address /playtone, the arguement pinkNote.velocity and the string "green".
            ofxOscMessage g;
            g.setAddress("/playtone");
            g.addIntArg(greenNote.velocity);
            g.addStringArg("green");
            sender.sendMessage(g);
            //cout << "green velocity of " << greenNote.velocity << " sent." << endl;
            cout << "green smoothed velocity of " << greenNote.velocitySmoothed << " sent." << endl;

        }
        slowYourRoll++;
    }
    
    
////////SEND RED
    
    //if the movement on the x axis of the colored blob is greater than 1 unit (in either direction), then:
    if(redNote.velocity > 5){
        if (slowYourRoll % 20 == 0){
            
            //build message p with the address /playtone, the arguement pinkNote.velocity and the string "red".
            ofxOscMessage r;
            r.setAddress("/playtone");
            r.addIntArg(redNote.velocity);
            r.addStringArg("red");
            sender.sendMessage(r);
            //cout << "red velocity of " << redNote.velocity << " sent." << endl;

        }
        slowYourRoll++;
    }

    
} // end of void testApp::update()
Пример #25
0
//--------------------------------------------------------------
//Inverting the mapping (mapX, mapY), with antialiasing.
//TODO: probably there is a simpler way to do this.
void testApp::inverseMapping( ofxCvFloatImage &mapX, ofxCvFloatImage &mapY ){
	if ( !fx.bAllocated ) {
		fx.allocate( w, h );
		fy.allocate( w, h );
		weight.allocate( w, h );
	}
	fx.set( 0 );
	fy.set( 0 );
	weight.set( 0 );

	float *mapXPixels = mapX.getPixelsAsFloats();
	float *mapYPixels = mapY.getPixelsAsFloats();
	float *fxPixels = fx.getPixelsAsFloats();
	float *fyPixels = fy.getPixelsAsFloats();
	float *weightPixels = weight.getPixelsAsFloats();

	for (int y=0; y<h; y++) {
		for (int x=0; x<w; x++) {
			float MX = mapXPixels[ x + w * y ];
			float MY = mapYPixels[ x + w * y ];

			int mx0 = int( MX );	
			int my0 = int( MY );
			int mx1 = mx0 + 1;	
			int my1 = my0 + 1;
			float weightX = MX - mx0;
			float weightY = MY - my0;

			mx0 = ofClamp( mx0, 0, w-1 );	//Bound
			my0 = ofClamp( my0, 0, h-1 );
			mx1 = ofClamp( mx1, 0, w-1 );	
			my1 = ofClamp( my1, 0, h-1 );
			for (int b=0; b<2; b++) {
				for (int a=0; a<2; a++) {
					int x1 = ( a == 0 ) ? mx0 : mx1;
					int y1 = ( b == 0 ) ? my0 : my1;
					int i1 = x1 + w * y1;
					float wgh = ( ( a == 0 ) ? ( 1 - weightX ) : weightX ) 
						* ( ( b == 0 ) ? ( 1 - weightY ) : weightY );
					fxPixels[ i1 ] += x * wgh; 
					fyPixels[ i1 ] += y * wgh; 
					weightPixels[ i1 ] += wgh;
				}
			}
		}
	}

	//Compute map for non-zero weighted pixels
	int zeros = 0;		//Count of zeros pixels
	for (int y=0; y<h; y++) {
		for (int x=0; x<w; x++) {
			int i = x + w * y;
			float X = fxPixels[ i ];
			float Y = fyPixels[ i ];
			float Weight = weightPixels[ i ];
			if ( Weight > 0 ) { 
				X /= Weight;
				Y /= Weight;
			}
			else {
				X = x;
				Y = y;
				zeros++;
			}
			mapXPixels[ i ] = X;
			mapYPixels[ i ] = Y;
			weightPixels[ i ] = Weight;
		}
	}

	//Fill zero-weighted pixels by weighting of near non-zero weighted pixels
	const int rad = 2;
	const int diam = 2 * rad + 1;
	float filter[ diam * diam ];
	float sum = 0;
	for (int b=-rad; b<=rad; b++) {
		for (int a=-rad; a<=rad; a++) {
			float wgh = rad + 1 - max( abs( a ), abs( b ) );
			filter[ a+rad + diam * (b+rad) ] = wgh;
			sum += wgh;
		}
	}
	for (int i=0; i<diam*diam; i++) {
		filter[ i ] /= sum;
	}

	int zeros0 = -1;
	while ( zeros > 0 && (zeros0 == -1 || zeros0 > zeros) ) {
		zeros0 = zeros;
		zeros = 0;
		for (int y=0; y<h; y++) {
			for (int x=0; x<w; x++) {
				int i = x + w * y;
				if (weightPixels[ i ] < 0.0001 ) {
					float mX = 0;
					float mY = 0;
					float mWeight = 0;
					int x1, y1, i1;
					float wgh;
					for (int b = -rad; b<=rad; b++) {
						for (int a = -rad; a<=rad; a++) {
							x1 = x + a;
							y1 = y + b;
							if ( ofInRange( x1, 0, w-1 ) && ofInRange( y1, 0, h-1 ) ) {
								i1 = x1 + w * y1;
								if ( weightPixels[ i1 ] >= 0.0001 ) {
									wgh = filter[ a+rad + diam * (b+rad) ] * weightPixels[ i1 ];
									mX += mapXPixels[i1] * wgh;
									mY += mapYPixels[i1] * wgh;
									mWeight += wgh;
								}
							}
						}
					}
					if ( mWeight > 0 ) {
						mapXPixels[ i ] = mX / mWeight;
						mapYPixels[ i ] = mY / mWeight;
						weightPixels[ i ] = mWeight;
					}
					else {
						zeros++;
					}
				}
			}
		}
	}

	mapX.flagImageChanged();
	mapY.flagImageChanged();
}
Пример #26
0
void loadGroup(string path, map<string, ofMesh>& groups, bool generateNormals)
{
	path = ofToDataPath(path);

	groups.clear();

	GLMmodel* m;

	m = glmReadOBJ((char*)path.c_str());

	if (generateNormals)
	{
		glmFacetNormals(m);
	}

	glmReverseWinding(m);

	GLMgroup *g = m->groups;

	while (g)
	{
		string name = g->name;

		ofMesh t;
		GLMtriangle *p = m->triangles;

		for (int j = 0; j < g->numtriangles; j++)
		{
			GLMtriangle tri = p[g->triangles[j]];

			for (int k = 0; k < 3; k++)
			{
				GLfloat *v = m->vertices + (tri.vindices[k] * 3);
				t.addVertex(ofVec3f(v[0], v[1], v[2]));

				if (m->colors)
				{
					GLfloat *c = m->colors + (tri.vindices[k] * 3);
					t.addColor(ofFloatColor(c[0], c[1], c[2]));
				}

				if (m->normals && ofInRange(tri.nindices[k], 0, m->numnormals))
				{
					GLfloat *n = m->normals + (tri.nindices[k] * 3);
					t.addNormal(ofVec3f(n[0], n[1], n[2]));
				}

				if (m->texcoords && ofInRange(tri.tindices[k], 0, m->numtexcoords))
				{
					GLfloat *c = m->texcoords + (tri.tindices[k] * 2);
					t.addTexCoord(ofVec2f(c[0], c[1]));
				}
			}
		}

		groups[name] = t;
		g = g->next;
	}

	glmDelete(m);
}
Пример #27
0
bool Boundary1f::contains(float f){
	return ofInRange(f, min, max);
}