//--------------------------------- int ofGetGlInternalFormat(const ofPixels& pix) { return ofGetGLInternalFormatFromPixelFormat(pix.getPixelFormat()); }
//---------------------------------------------------------- void ofTexture::loadData(const ofPixels & pix){ ofSetPixelStorei(pix.getBytesStride()); loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), ofGetGlFormat(pix), ofGetGlType(pix)); }
//---------------------------------------------------------- void ofTexture::loadData(const ofPixels & pix, int glFormat){ ofSetPixelStorei(pix.getWidth(),pix.getBytesPerChannel(),ofGetNumChannelsFromGLFormat(glFormat)); loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), glFormat, ofGetGlType(pix)); }
void ofxTexture3d::loadData(ofPixels & pix, int d, int xOffset, int yOffset, int zOffset) { loadData(pix.getData(), pix.getWidth(), pix.getHeight(), d, xOffset, yOffset, zOffset, ofGetGlFormat(pix)); }
//---------------------------------------------------------- void ofTexture::loadData(const ofPixels & pix, int glFormat){ ofSetPixelStoreiAlignment(GL_UNPACK_ALIGNMENT,pix.getWidth(),pix.getBytesPerChannel(),ofGetNumChannelsFromGLFormat(glFormat)); loadData(pix.getData(), pix.getWidth(), pix.getHeight(), glFormat, ofGetGlType(pix)); }
void ofxAruco::getMarkerImage(int markerID, int size, ofPixels & pixels){ cv::Mat m = aruco::Marker::createMarkerImage(markerID,size); pixels.setFromPixels(m.data,size,size,OF_IMAGE_GRAYSCALE); }
GLuint ofxImGui::loadPixels(ofPixels& pixels) { return engine->loadTextureImage2D(pixels.getData(), pixels.getWidth(), pixels.getHeight()); }
// by now we're copying everything (no pointers) void ofxGifFile::addFrame(ofPixels _px, int _left, int _top, bool useTexture, GifFrameDisposal disposal, float _duration){ ofxGifFrame f; if(getNumFrames() == 0){ accumPx = _px; // we assume 1st frame is fully drawn if ( !useTexture ){ f.setUseTexture(false); } f.setFromPixels(_px , _left, _top, _duration); gifDuration = _duration; } else { // add new pixels to accumPx int cropOriginX = _left; int cropOriginY = _top; // [todo] make this loop only travel through _px, not accumPx for (int i = 0; i < accumPx.getWidth() * accumPx.getHeight(); i++) { int x = i % accumPx.getWidth(); int y = i / accumPx.getWidth(); if (x >= _left && x < _left + _px.getWidth() && y >= _top && y < _top + _px.getHeight()){ int cropX = x - cropOriginX; // (i - _left) % _px.getWidth(); int cropY = y - cropOriginY; //int cropI = cropX + cropY * _px.getWidth(); if ( _px.getColor(cropX, cropY).a == 0 ){ switch ( disposal ) { case GIF_DISPOSAL_BACKGROUND: _px.setColor(x,y,bgColor); break; case GIF_DISPOSAL_LEAVE: case GIF_DISPOSAL_UNSPECIFIED: _px.setColor(x,y,accumPx.getColor(cropX, cropY)); // accumPx.setColor(x,y,_px.getColor(cropX, cropY)); break; case GIF_DISPOSAL_PREVIOUS: _px.setColor(x,y,accumPx.getColor(cropX, cropY)); break; } } else { accumPx.setColor(x, y, _px.getColor(cropX, cropY) ); } } else { if ( _px.getColor(x, y) == bgColor ){ switch ( disposal ) { case GIF_DISPOSAL_BACKGROUND: accumPx.setColor(x,y,bgColor); break; case GIF_DISPOSAL_UNSPECIFIED: case GIF_DISPOSAL_LEAVE: accumPx.setColor(x,y,_px.getColor(x, y)); break; case GIF_DISPOSAL_PREVIOUS: _px.setColor(x,y,accumPx.getColor(x, y)); break; } } else { accumPx.setColor(x, y, _px.getColor(x, y) ); } } } if ( !useTexture ){ f.setUseTexture(false); } f.setFromPixels(_px,_left, _top, _duration); } accumPx = _px; // gifFrames.push_back(f); }
void ofxVideoBuffers::getNewImage(ofPixels pix, ofImageType type) { pix.setImageType(type); buffer.push_back(pix); }
void ofPixels::operator=(const ofPixels & mom){ if(mom.isAllocated()){ allocate(mom.getWidth(),mom.getHeight(),mom.getImageType()); memcpy(pixels,mom.getPixels(),mom.getWidth()*mom.getHeight()*mom.getBytesPerPixel()); } }
void ofxImageTS::pixelate(ofPixels pixels, int pixelRatio) { if(pixelRatio > 4 || pixelRatio < 0) { ofLogNotice("Pixel Ratio must be between 0 and 5"); } else { ofPixels R,G,B, copy; if(pixels.getWidth() < pixels.getHeight()) pixels.resize(640,480); if(pixels.getWidth() > pixels.getHeight()) pixels.resize(480,640); copy.allocate(pixels.getWidth(), pixels.getHeight(), OF_PIXELS_RGB); copy = pixels; R = copy.getChannel(0); G = copy.getChannel(1); B = copy.getChannel(2); int camWidth = pixels.getWidth(); int camHeight = pixels.getHeight(); int boxWidth = pixels.getWidth()/(pow(2,pixelRatio)*10); int boxHeight = pixels.getHeight()/(pow(2,pixelRatio)*10); float tot = boxWidth*boxHeight; for (int x = 0; x < camWidth; x += boxWidth) { for (int y = 0; y < camHeight; y += boxHeight) { float Red = 0, Green = 0, Blue = 0; for (int k = 0; k < boxWidth; k++) { for (int l = 0; l < boxHeight; l++) { int index = (x + k) + (y + l) * camWidth; Red += R[index]; Green += G[index]; Blue += B[index]; } ofSetColor(Red/tot,Green/tot,Blue/tot); ofFill(); ofDrawRectangle(x, y, boxWidth, boxHeight); } } } } }
//---------------------------------------------------------- void ofTexture::loadData(ofPixels & pix){ loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), pix.getGlDataType()); }
/** 2値画像から輪郭追跡を実行し、各輪郭点を順番に格納した配列を返します. @param src 入力画像ピクセル配列 */ vector<ofPoint> getContourPoints(const ofPixels src) { //参考: http://homepage2.nifty.com/tsugu/sotuken/binedge/#detailed //格子の開始点(vec) // _____ // |0 7 6| // |1 p 5| // |2 3 4| // ----- vector<ofPoint> dstPts; int w = src.getWidth(); int h = src.getHeight(); if (src.getNumChannels() != 1) return dstPts; //画像内を捜査し有効画素を探す for(int i=0; i < w * h; i++) { //捜査画素が端なら何もしない if (isSide(i, w, h)) continue; //有効画素があった場合は追跡処理の開始 if( src.getPixels()[i] != 0 ) { int p = i; //注目画素index int tp = 0; //追跡画素index int vec = 2; //最初の調査点を左下にセットする bool isFirst = true; dstPts.push_back(convertIndex2Points(p, w)); //追跡開始点と追跡点が同じ座標なるまで輪郭追跡処理 while( p != i || isFirst ) { switch(vec) { case 0: //左上を調査 tp = p - w - 1; //追跡画素が端っこでなければ調査開始、端っこなら次の画素を追跡 if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); vec = 6; break; } } case 1: //左を調査 tp = p - 1; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); vec = 0; break; } } case 2: //左下を調査 tp = p + w - 1; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); isFirst = false; vec = 0; break; } } case 3: //下を調査 tp = p + w; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); isFirst = false; vec = 2; break; } } case 4: //右下を調査 tp = p + w + 1; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); isFirst = false; vec = 2; break; } } case 5: //右を調査 tp = p + 1; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); isFirst = false; vec = 4; break; } } else { //孤立点であった場合 if( isFirst ) { isFirst = false; break; } } case 6: //右上を調査 tp = p - w + 1; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); vec = 4; break; } } case 7: //上を調査 tp = p - w; if (!isSide(tp, w, h)) { if( src.getPixels()[tp] != 0 ) { p = tp; dstPts.push_back(convertIndex2Points(p, w)); vec = 6; break; } } vec = 0; } } return dstPts; //追跡終了 } } cout << "[ERROR] 有効画素が見つかりません" << endl; return dstPts; }
//-------------------------------------------------------------- void FaceAugmented::setImage(const ofPixels & pixels) { pixels.cropTo(image.getPixels(), roi.getX(), roi.getY(), roi.getWidth(), roi.getHeight()); image.update(); }
std::vector< detected_object > ofxDarknet::yolo( ofPixels & pix, float threshold /*= 0.24f */, float maxOverlap /*= 0.5f */ ) { int originalWidth = pix.getWidth(); int originalHeight = pix.getHeight(); ofPixels pix2( pix ); if (pix2.getImageType() != OF_IMAGE_COLOR) { pix2.setImageType(OF_IMAGE_COLOR); } if( pix2.getWidth() != net.w && pix2.getHeight() != net.h ) { pix2.resize( net.w, net.h ); } image im = convert( pix2 ); layer l = net.layers[ net.n - 1 ]; box *boxes = ( box* ) calloc( l.w*l.h*l.n, sizeof( box ) ); float **probs = ( float** ) calloc( l.w*l.h*l.n, sizeof( float * ) ); for( int j = 0; j < l.w*l.h*l.n; ++j ) probs[ j ] = ( float* ) calloc( l.classes, sizeof( float * ) ); network_predict( net, im.data1 ); get_region_boxes( l, 1, 1, threshold, probs, boxes, 0, 0 ); do_nms_sort( boxes, probs, l.w*l.h*l.n, l.classes, 0.4 ); free_image( im ); std::vector< detected_object > detections; int num = l.w*l.h*l.n; int feature_layer = net.n - 2; layer l1 = net.layers[ feature_layer ]; float * features = get_network_output_layer_gpu(feature_layer); vector<size_t> sorted(num); iota(sorted.begin(), sorted.end(), 0); sort(sorted.begin(), sorted.end(), [&probs, &l](int i1, int i2) { return probs[i1][max_index(probs[i1], l.classes)] > probs[i2][max_index(probs[i2], l.classes)]; }); for( int i = 0; i < num; ++i ) { int idx = sorted[i]; int class1 = max_index( probs[ idx ], l.classes ); float prob = probs[ idx ][ class1 ]; if( prob < threshold ) { continue; } int offset = class1 * 123457 % l.classes; float red = get_color( 2, offset, l.classes ); float green = get_color( 1, offset, l.classes ); float blue = get_color( 0, offset, l.classes ); box b = boxes[ idx ]; int left = ( b.x - b.w / 2. )*im.w; int right = ( b.x + b.w / 2. )*im.w; int top = ( b.y - b.h / 2. )*im.h; int bot = ( b.y + b.h / 2. )*im.h; if( left < 0 ) left = 0; if( right > im.w - 1 ) right = im.w - 1; if( top < 0 ) top = 0; if( bot > im.h - 1 ) bot = im.h - 1; left = ofMap( left, 0, net.w, 0, originalWidth ); top = ofMap( top, 0, net.h, 0, originalHeight ); right = ofMap( right, 0, net.w, 0, originalWidth ); bot = ofMap( bot, 0, net.h, 0, originalHeight ); ofRectangle rect = ofRectangle( left, top, right - left, bot - top ); int rect_idx = floor(idx / l.n); float overlap = 0.0; for (auto d : detections) { float left = max(rect.x, d.rect.x); float right = min(rect.x+rect.width, d.rect.x+d.rect.width); float bottom = min(rect.y+rect.height, d.rect.y+d.rect.height); float top = max(rect.y, d.rect.y); float area_intersection = max(0.0f, right-left) * max(0.0f, bottom-top); overlap = max(overlap, area_intersection / (rect.getWidth() * rect.getHeight())); } if (overlap > maxOverlap) { continue; } detected_object detection; detection.label = names[ class1 ]; detection.probability = prob; detection.rect = rect; detection.color = ofColor( red * 255, green * 255, blue * 255); for (int f=0; f<l1.c; f++) { detection.features.push_back(features[rect_idx + l1.w * l1.h * f]); } detections.push_back( detection ); } free_ptrs((void**) probs, num); free(boxes); return detections; }
//---------------------------------------------------------- void ofTexture::loadData(const ofPixels & pix){ loadData(pix.getPixels(), pix.getWidth(), pix.getHeight(), ofGetGlFormat(pix)); }
void ofxAruco::getBoardImage(ofPixels & pixels){ cv::Mat m = aruco::Board::createBoardImage(boardConfig.size,boardConfig._markerSizePix,boardConfig._markerDistancePix,0,boardConfig); pixels.setFromPixels(m.data,m.cols,m.rows,OF_IMAGE_GRAYSCALE); }
// Set up our sketch. void setup() { ofSetWindowShape(1280, 720); // Set the window size. grabber.initGrabber(1280, 720); // Set the grabber size. pixels.allocate(1280, 720, OF_PIXELS_RGB); // Allocate memory for our pixels. }
void ofxAruco::getThresholdImage(ofPixels & pixels){ cv::Mat m = detector.getThresholdedImage(); pixels.setFromPixels(m.data,m.cols,m.rows,OF_IMAGE_GRAYSCALE); }
//---------------------------------------------------------- void ofTexture::allocate(const ofPixels& pix){ allocate(pix.getWidth(), pix.getHeight(), ofGetGlInternalFormat(pix), ofGetUsingArbTex(), ofGetGlFormat(pix), ofGetGlType(pix)); }
//-------------------------------------------- void ofCairoRenderer::draw(const ofPixels & raw, float x, float y, float z, float w, float h, float sx, float sy, float sw, float sh) const{ bool shouldCrop = sx != 0 || sy != 0 || sw != w || sh != h; ofPixels cropped; if(shouldCrop) { cropped.allocate(sw, sh, raw.getPixelFormat()); raw.cropTo(cropped, sx, sy, sw, sh); } const ofPixels & pix = shouldCrop ? cropped : raw; ofCairoRenderer * mut_this = const_cast<ofCairoRenderer*>(this); mut_this->pushMatrix(); mut_this->translate(x,y,z); mut_this->scale(w/pix.getWidth(),h/pix.getHeight()); cairo_surface_t *image; int stride=0; int picsize = pix.getWidth()* pix.getHeight(); const unsigned char *imgPix = pix.getData(); vector<unsigned char> swapPixels; switch(pix.getImageType()){ case OF_IMAGE_COLOR: #ifdef TARGET_LITTLE_ENDIAN swapPixels.resize(picsize * 4); for(int p= 0; p<picsize; p++) { swapPixels[p*4] = imgPix[p*3 +2]; swapPixels[p*4 +1] = imgPix[p*3 +1]; swapPixels[p*4 +2] = imgPix[p*3]; } #else swapPixels.resize(picsize * 4); for(int p= 0; p<picsize; p++) { swapPixels[p*4] = imgPix[p*3]; swapPixels[p*4 +1] = imgPix[p*3 +1]; swapPixels[p*4 +2] = imgPix[p*3 +2]; } #endif stride = cairo_format_stride_for_width (CAIRO_FORMAT_RGB24, pix.getWidth()); image = cairo_image_surface_create_for_data(&swapPixels[0], CAIRO_FORMAT_RGB24, pix.getWidth(), pix.getHeight(), stride); break; case OF_IMAGE_COLOR_ALPHA: #ifdef TARGET_LITTLE_ENDIAN swapPixels.resize(picsize * 4); for(int p= 0; p<picsize; p++) { swapPixels[p*4] = imgPix[p*4+2]; swapPixels[p*4 +1] = imgPix[p*4+1]; swapPixels[p*4 +2] = imgPix[p*4]; swapPixels[p*4 +3] = imgPix[p*4+3]; } stride = cairo_format_stride_for_width (CAIRO_FORMAT_ARGB32, pix.getWidth()); image = cairo_image_surface_create_for_data(&swapPixels[0], CAIRO_FORMAT_ARGB32, pix.getWidth(), pix.getHeight(), stride); #else stride = cairo_format_stride_for_width (CAIRO_FORMAT_ARGB32, pix.getWidth()); image = cairo_image_surface_create_for_data(pix.getData(), CAIRO_FORMAT_ARGB32, pix.getWidth(), pix.getHeight(), stride); #endif break; case OF_IMAGE_GRAYSCALE: swapPixels.resize(picsize * 4); for(int p= 0; p<picsize; p++) { swapPixels[p*4] = imgPix[p]; swapPixels[p*4 +1] = imgPix[p]; swapPixels[p*4 +2] = imgPix[p]; } stride = cairo_format_stride_for_width (CAIRO_FORMAT_RGB24, pix.getWidth()); image = cairo_image_surface_create_for_data(&swapPixels[0], CAIRO_FORMAT_RGB24, pix.getWidth(), pix.getHeight(), stride); break; case OF_IMAGE_UNDEFINED: default: ofLogError("ofCairoRenderer") << "draw(): trying to draw undefined image type " << pix.getImageType(); mut_this->popMatrix(); return; break; } cairo_set_source_surface (cr, image, 0,0); cairo_paint (cr); cairo_surface_flush(image); cairo_surface_destroy (image); mut_this->popMatrix(); }
//---------------------------------------------------------- void ofTexture::allocate(const ofPixels& pix, bool bUseARBExtention){ allocate(pix.getWidth(), pix.getHeight(), ofGetGlInternalFormat(pix), bUseARBExtention, ofGetGlFormat(pix), ofGetGlType(pix)); }
//-------------------------------------------------------------------------------- void ofxCvImage::setRoiFromPixels( const ofPixels & pixels ){ setRoiFromPixels(pixels.getPixels(),pixels.getWidth(),pixels.getHeight()); }
//---------------------------------------------------------- void ofTexture::loadData(const ofPixels & pix){ ofSetPixelStoreiAlignment(GL_UNPACK_ALIGNMENT,pix.getBytesStride()); loadData(pix.getData(), pix.getWidth(), pix.getHeight(), ofGetGlFormat(pix), ofGetGlType(pix)); }