Пример #1
0
void ofxSlitScan::setDelayMap(ofPixels& map){
	if(map.getWidth() != width || map.getHeight() != height){
		ofLog(OF_LOG_ERROR,"ofxSlitScan Error -- Map dimensions do not match image dimensions. given %fx%f, need %dx%d\n", map.getWidth(), map.getHeight(), width, height);
		return;
	}
	setDelayMap(map.getPixels(), map.getImageType());
}
Пример #2
0
void testApp::createParticlesFromPixels ( ofPixels pix )
{
    int w = pix.getWidth() ; 
    int h = pix.getHeight() ;
    
    unsigned char * pixels = pix.getPixels() ; 
    //offsets to center the particles on screen
    int xOffset = (ofGetWidth() - w ) /2 ;
    //We're drawing the actual video too so we'll bump down where the video pixels are drawn too
    int yOffset = (ofGetHeight() - h ) * 0.825f ;
    
    //Loop through all the rows
    for ( int x = 0 ; x < w ; x+=sampling )
    {
        //Loop through all the columns
        for ( int y = 0 ; y < h ; y+=sampling )
        {
            //Pixels are stored as unsigned char ( 0 <-> 255 ) as RGB
            //If our image had transparency it would be 4 for RGBA
            int index = ( y * w + x ) * 3 ;
            ofColor color ;
            color.r = pixels[index] ;       //red pixel
            color.g = pixels[index+1] ;     //green pixel
            color.b = pixels[index+2] ;     //blue pixel
            
            particles.push_back( Particle ( ofPoint ( x + xOffset , y + yOffset ) , color ) ) ;
        }
    }

}
Пример #3
0
void ofxPBO::loadData(const ofPixels & pixels){
	if(pboIds.empty()){
		ofLogError() << "pbo not allocated";
		return;
	}
    index = (index + 1) % pboIds.size();
    int nextIndex = (index + 1) % pboIds.size();
    
	// bind PBO to update pixel values
	glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, pboIds[nextIndex]);
    
	// map the buffer object into client's memory
	// Note that glMapBufferARB() causes sync issue.
	// If GPU is working with this buffer, glMapBufferARB() will wait(stall)
	// for GPU to finish its job. To avoid waiting (stall), you can call
	// first glBufferDataARB() with NULL pointer before glMapBufferARB().
	// If you do that, the previous data in PBO will be discarded and
	// glMapBufferARB() returns a new allocated pointer immediately
	// even if GPU is still working with the previous data.
	glBufferDataARB(GL_PIXEL_UNPACK_BUFFER_ARB, dataSize, 0, GL_STREAM_DRAW_ARB);
	GLubyte* ptr = (GLubyte*)glMapBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, GL_WRITE_ONLY_ARB);
	if(ptr)
	{
		// update data directly on the mapped buffer
		memcpy(ptr,pixels.getPixels(),dataSize);
		glUnmapBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB); // release pointer to mapping buffer
	}
    
    // it is good idea to release PBOs with ID 0 after use.
    // Once bound with 0, all pixel operations behave normal ways.
    glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
}
Пример #4
0
void ofxSlitScan::addImage(ofPixels& image){
	if(image.getImageType() != type){
		ofLog(OF_LOG_ERROR, "ofxSlitScan -- adding image of the wrong type");
		return;
	}
	addImage( image.getPixels() );
}
Пример #5
0
//--------------------------------------------------------------
void testApp::draw(){
    ofSetHexColor(0xFFFFFF);
    ofBackground(0);

    if(bShowInput) grayImage.drawROI(roi.x, roi.y);
    if(bShowOutput) fbo.draw(0, 0);
    
    L.draw(pix);
    
    if(bInfo){
        ofSetHexColor(0xFF0000);
        char reportStr[1024];
    
        sprintf(reportStr, "[P] process on/off [F] snapshot [7 8 9 0] roi mask");
        ofDrawBitmapString(reportStr, 20, 10);
        sprintf(reportStr, "fps:%3.0f opencv:%3.2f madMapper:%3.2f", ofGetFrameRate(), t1, t2);
        ofDrawBitmapString(reportStr, 20, 25);
        sprintf(reportStr, "[1] show input [2] show output [i] info ");
        ofDrawBitmapString(reportStr, 20, 40);
        sprintf(reportStr, "[c] Contrast %.2f [b] Brightness %.2f ", contrast, brightness);
        ofDrawBitmapString(reportStr, 20, 55);
        sprintf(reportStr, "gray image [%4d, %4d] fbo [%4.f, %4.f] ",
                roiW, roiH, fbo.getWidth(), fbo.getHeight());
        
        int idx = (mouseY * pix.getWidth()+ mouseX) * pix.getBytesPerPixel();
        
        sprintf(reportStr, "pixels %d", pix.getPixels()[idx]);
        ofDrawBitmapString(reportStr, 20, 85);
    } 
}
Пример #6
0
//----------------------------------------------------------
void ofTexture::readToPixels(ofPixels & pixels){
#ifndef TARGET_OPENGLES
	pixels.allocate(texData.width,texData.height,ofGetImageTypeFromGLType(texData.glTypeInternal));
	bind();
	glGetTexImage(texData.textureTarget,0,ofGetGlFormat(pixels),GL_UNSIGNED_BYTE, pixels.getPixels());
	unbind();
#endif
}
Пример #7
0
ofPixels::ofPixels(const ofPixels & mom){
	bAllocated = false;
	pixels = NULL;
	if(mom.isAllocated()){
		allocate(mom.getWidth(),mom.getHeight(),mom.getImageType());
		memcpy(pixels,mom.getPixels(),mom.getWidth()*mom.getHeight()*mom.getBytesPerPixel());
	}
}
void Detector::newFrame(ofPixels & pixels){
	Poco::ScopedLock<ofMutex> lock(setupMutex);
	if(state!=Running || !pixels.getPixels()) return;
	if(pixels.getImageType()==OF_IMAGE_COLOR){
		if ( overrideWidth > 0 )
		{
			ofPixels tmpPixels = pixels;
			tmpPixels.resize( overrideWidth, overrideHeight );
			colorImg = tmpPixels.getPixels();
		}
		else
		{
			colorImg = pixels.getPixels();
		}
		img = colorImg;
		fern.update(img);
		//img640.scaleIntoMe(img);
	}else{
		if ( overrideWidth > 0 )
		{
			ofPixels tmpPixels = pixels;
			tmpPixels.resize( overrideWidth, overrideHeight );
			fern.update(tmpPixels);
		}
		else
		{
			fern.update( pixels );
		}
		//img640.scaleIntoMe(img,CV_INTER_LINEAR);
	}
	isNewFrame = true;

	mutex.lock();
	findOpenCvHomography(&srcQuad[0],&fern.getLastQuad()[0],homography.getPtr());
	mutex.unlock();

	int curr_time = ofGetElapsedTimeMillis();
	frames++;
	if(curr_time - fps_time >=1000){
		fps=float(frames*1000)/(curr_time - fps_time);
		fps_time=curr_time;
		frames=0;
	}
}
Пример #9
0
void ofFbo::readToPixels(ofPixels & pixels, int attachmentPoint){
#ifndef TARGET_OPENGLES
	getTextureReference(attachmentPoint).readToPixels(pixels);
#else
	bind();
	int format,type;
	ofGetGlFormatAndType(settings.internalformat,format,type);
	glReadPixels(0,0,settings.width, settings.height, format, GL_UNSIGNED_BYTE, pixels.getPixels());
	unbind();
#endif
}
Пример #10
0
void ofFbo::readToPixels(ofPixels & pixels, int attachmentPoint){
	if(!bIsAllocated) return;
#ifndef TARGET_OPENGLES
	getTextureReference(attachmentPoint).readToPixels(pixels);
#else
	pixels.allocate(settings.width,settings.height,ofGetImageTypeFromGLType(settings.internalformat));
	bind();
	int format = ofGetGLFormatFromInternal(settings.internalformat);
	glReadPixels(0,0,settings.width, settings.height, format, GL_UNSIGNED_BYTE, pixels.getPixels());
	unbind();
#endif
}
Пример #11
0
void removeIslands(ofPixels& img) {
	int w = img.getWidth(), h = img.getHeight();
	int ia1=-w-1,ia2=-w-0,ia3=-w+1,ib1=-0-1,ib3=-0+1,ic1=+w-1,ic2=+w-0,ic3=+w+1;
	unsigned char* p = img.getPixels();
	for(int y = 1; y + 1 < h; y++) {
		for(int x = 1; x + 1 < w; x++) {
			int i = y * w + x;
			if(p[i]) {
				if(!p[i+ia1]&&!p[i+ia2]&&!p[i+ia3]&&!p[i+ib1]&&!p[i+ib3]&&!p[i+ic1]&&!p[i+ic2]&&!p[i+ic3]) {
					p[i] = 0;
				}
			}
		}
	}
}
Пример #12
0
ofColor testApp::getColorAtPos(ofPixels & pixels, int x, int y){
	
	ofColor pickedColor;
	
	if( x >= 0 && x < pixels.getWidth() && y >= 0 && y < pixels.getHeight() ){
	
		unsigned char * pix = pixels.getPixels();
		int channels = pixels.getNumChannels();
		
		int posInMem = ( y * pixels.getWidth() + x) * channels;
			
		unsigned char r = pix[posInMem]; 
		unsigned char g = pix[posInMem+1]; 
		unsigned char b = pix[posInMem+2]; 
		
		pickedColor.set(r, g, b);
	}
	
	return pickedColor;
}
Пример #13
0
//----------------------------------------------------
void putBmpIntoPixels(FIBITMAP * bmp, ofPixels &pix, bool swapForLittleEndian = true){
	int width			= FreeImage_GetWidth(bmp);
	int height			= FreeImage_GetHeight(bmp);
	int bpp				= FreeImage_GetBPP(bmp);

	FIBITMAP * bmpTemp = NULL;

	switch (bpp){
		case 8:
			if (FreeImage_GetColorType(bmp) == FIC_PALETTE) {
				bmpTemp = FreeImage_ConvertTo24Bits(bmp);
				bmp = bmpTemp;
				bpp = FreeImage_GetBPP(bmp);
			} else {
			// do nothing we are grayscale
			}
		break;
		case 24:
			// do nothing we are color
		break;
		case 32:
			// do nothing we are colorAlpha
		break;
		default:
			bmpTemp = FreeImage_ConvertTo24Bits(bmp);
			bmp = bmpTemp;
			bpp = FreeImage_GetBPP(bmp);
		break;
	}

	int bytesPerPixel	= bpp / 8;
	pix.allocate(width, height, bpp);
	FreeImage_ConvertToRawBits(pix.getPixels(), bmp, width*bytesPerPixel, bpp, FI_RGBA_RED_MASK, FI_RGBA_GREEN_MASK, FI_RGBA_BLUE_MASK, true);  // get bits

	if (bmpTemp != NULL) FreeImage_Unload(bmpTemp);

	#ifdef TARGET_LITTLE_ENDIAN
		if(swapForLittleEndian)
			pix.swapRgb();
	#endif
}
Пример #14
0
//------------------------------------------------------------------------------
bool BaseWebSocketSessionManager::sendBinary(AbstractWebSocketConnection* connection,
                                             ofPixels& pixels)
{
    return sendBinary(connection,pixels.getPixels(),
                      static_cast<unsigned int>(pixels.size()));
}
Пример #15
0
/**
 2値画像から輪郭追跡を実行し、各輪郭点を順番に格納した配列を返します.
 @param src 入力画像ピクセル配列
 */
vector<ofPoint> getContourPoints(const ofPixels src)
{
    
    //参考: http://homepage2.nifty.com/tsugu/sotuken/binedge/#detailed
    //格子の開始点(vec)
    //  _____
    // |0 7 6|
    // |1 p 5|
    // |2 3 4|
    //  -----
    
    vector<ofPoint> dstPts;
    int w = src.getWidth();
    int h = src.getHeight();
    
    if (src.getNumChannels() != 1) return dstPts;
    
    //画像内を捜査し有効画素を探す
    for(int i=0; i < w * h; i++)
    {
        
        //捜査画素が端なら何もしない
        if (isSide(i, w, h)) continue;
        
        //有効画素があった場合は追跡処理の開始
        if( src.getPixels()[i] != 0 ) {
            int p = i;      //注目画素index
            int tp = 0;     //追跡画素index
            int vec = 2;    //最初の調査点を左下にセットする
            bool isFirst = true;
            
            dstPts.push_back(convertIndex2Points(p, w));
            
            //追跡開始点と追跡点が同じ座標なるまで輪郭追跡処理
            while( p != i || isFirst ) {
                switch(vec) {
                    case 0:    //左上を調査
                        tp = p - w - 1;
                        //追跡画素が端っこでなければ調査開始、端っこなら次の画素を追跡
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                vec = 6;
                                break;
                            }
                        }
                    case 1:    //左を調査
                        tp = p - 1;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                vec = 0;
                                break;
                            }
                        }
                    case 2:    //左下を調査
                        tp = p + w - 1;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                isFirst = false;
                                vec = 0;
                                break;
                            }
                        }
                    case 3:    //下を調査
                        tp = p + w;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                isFirst = false;
                                vec = 2;
                                break;
                            }
                        }
                    case 4:    //右下を調査
                        tp = p + w + 1;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                isFirst = false;
                                vec = 2;
                                break;
                            }
                        }
                    case 5:    //右を調査
                        tp = p + 1;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                isFirst = false;
                                vec = 4;
                                break;
                            }
                        }
                        else {
                            //孤立点であった場合
                            if( isFirst ) {
                                isFirst = false;
                                break;
                            }
                        }
                    case 6:    //右上を調査
                        tp = p - w + 1;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                vec = 4;
                                break;
                            }
                        }
                    case 7:    //上を調査
                        tp = p - w;
                        if (!isSide(tp, w, h)) {
                            if( src.getPixels()[tp] != 0 ) {
                                p = tp;
                                dstPts.push_back(convertIndex2Points(p, w));
                                vec = 6;
                                break;
                            }
                        }
                        vec = 0;
                }
            }
            return dstPts; //追跡終了
        }
    }
    cout << "[ERROR] 有効画素が見つかりません" << endl;
    return dstPts;
}
Пример #16
0
//--------------------------------------------------------------------------------
void ofxCvImage::setRoiFromPixels( const ofPixels & pixels ){
	setRoiFromPixels(pixels.getPixels(),pixels.getWidth(),pixels.getHeight());
}
Пример #17
0
//----------------------------------------------------------
void ofTexture::loadData(const ofPixels & pix, int glFormat){
	ofSetPixelStorei(pix.getWidth(),pix.getBytesPerChannel(),ofGetNumChannelsFromGLFormat(glFormat));
	loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), glFormat, ofGetGlType(pix));
}
Пример #18
0
//----------------------------------------------------------
void ofTexture::loadData(const ofPixels & pix){
	ofSetPixelStorei(pix.getBytesStride());
	loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), ofGetGlFormat(pix), ofGetGlType(pix));
}
Пример #19
0
//----------------------------------------------------------
void ofTexture::loadData(ofPixels & pix){
	loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), pix.getGlDataType());
}
Пример #20
0
//----------------------------------------------------------
void ofTexture::loadData(const ofPixels & pix){
	loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), ofGetGlFormat(pix));
}
Пример #21
0
void ofPixels::operator=(const ofPixels & mom){
	if(mom.isAllocated()){
		allocate(mom.getWidth(),mom.getHeight(),mom.getImageType());
		memcpy(pixels,mom.getPixels(),mom.getWidth()*mom.getHeight()*mom.getBytesPerPixel());
	}
}