Пример #1
0
void ofxTurboJpeg::save(ofBuffer &buf, const ofPixels& pix, int jpegQuality)
{
	int pitch = 0, flags = 0, jpegsubsamp = 0;
	unsigned long size = 0;
	
	
	if (pix.getImageType() == OF_IMAGE_COLOR)
	{
		int bpp = 3;
		vector<unsigned char> buffer;
		buffer.resize(pix.getWidth() * pix.getHeight() * bpp);
		
		unsigned char * output = &buffer[0];
		
		tjCompress(handleCompress, (unsigned char*)(pix.getData()), pix.getWidth(), pitch, pix.getHeight(), bpp, output, &size, jpegsubsamp, jpegQuality, flags);
		
		buf.set((const char*)output, size);
	}
	else if (pix.getImageType() == OF_IMAGE_COLOR_ALPHA)
	{
		ofPixels p;
		p.allocate(pix.getWidth(), pix.getHeight(), 3);
		
		const unsigned char *src = pix.getData();
		unsigned char *dst = p.getData();
		
		int num = pix.getWidth() * pix.getHeight();
		for (int i = 0; i < num; i++)
		{
			dst[0] = src[0];
			dst[1] = src[1];
			dst[2] = src[2];
			src += 4;
			dst += 3;
		}
		
		save(buf, p, jpegQuality);
	}
	else if (pix.getImageType() == OF_IMAGE_GRAYSCALE)
	{
		ofPixels p;
		p.allocate(pix.getWidth(), pix.getHeight(), 3);
		
		const unsigned char *src = pix.getData();
		unsigned char *dst = p.getData();
		
		int num = pix.getWidth() * pix.getHeight();
		for (int i = 0; i < num; i++)
		{
			dst[0] = src[0];
			dst[1] = src[0];
			dst[2] = src[0];
			src += 1;
			dst += 3;
		}
		
		save(buf, p, jpegQuality);
	} 
}
Пример #2
0
//----------------------------------------------------------
void ofTexture::loadData(const ofPixels & pix, int glFormat){
	if(!isAllocated()){
		allocate(pix.getWidth(), pix.getHeight(), ofGetGlInternalFormat(pix), ofGetUsingArbTex(), glFormat, ofGetGlType(pix));
	}
	ofSetPixelStoreiAlignment(GL_UNPACK_ALIGNMENT,pix.getWidth(),pix.getBytesPerChannel(),ofGetNumChannelsFromGLFormat(glFormat));
	loadData(pix.getData(), pix.getWidth(), pix.getHeight(), glFormat, ofGetGlType(pix));
}
Пример #3
0
	//----------
	bool Message::getData(ofPixels & data) const {
		auto & header = this->getHeader<Header::Pixels>();
		if (this->hasHeader<Header::Pixels>()) {
			const auto & header = this->getHeader<Header::Pixels>();
			auto bodySize = this->getBodySize();
			ofPixelFormat pixelFormat = (ofPixelFormat)header.pixelFormat;

			//reallocate if we need to
			if (data.getWidth() != header.width || data.getHeight() != header.height || data.getPixelFormat() != pixelFormat) {
				data.allocate(header.width, header.height, pixelFormat);
			}
			if (data.size() != bodySize) {
				OFXSQUASHBUDDIES_ERROR << "Message body is of wrong size to fill pixels. Maybe a bug in sender?";
				return false;
			}
			else {
				memcpy(data.getData(), this->getBodyData(), bodySize);
				return true;
			}
		}
		else {
			OFXSQUASHBUDDIES_WARNING << "Message Header doesn't match Pixels type";
			return false;
		}
	}
Пример #4
0
ccv_dense_matrix_t toCcv(const ofPixels& pix) {
    return ccv_dense_matrix(pix.getHeight(),
                            pix.getWidth(),
                            CCV_8U | CCV_C3,
                            (void*) pix.getData(),
                            0);
}
Пример #5
0
	void Output::publishPixels(ofPixels &pix)
	{
		assert(mutex);

		if (pix.getWidth() == uiFrameWidth
			&& pix.getHeight() == uiFrameHeight)
		{
			mutex->lock();
			if (!back_buffer->isAllocated() ||
				back_buffer->getWidth() != pix.getWidth() ||
				back_buffer->getHeight() != pix.getHeight()) {
				back_buffer->allocate(pix.getWidth(), pix.getHeight(), pix.getNumChannels());
			}
			memcpy(&back_buffer->getData()[1], pix.getData(), pix.size() - 1);
			//*back_buffer = pix;

			if (back_buffer->getNumChannels() != 4)
				back_buffer->setNumChannels(4);

			has_new_frame = true;

			mutex->unlock();
		}
		else
			ofLogError("ofxDeckLinkAPI::Output") << "invalid pixel size";
	}
Пример #6
0
//----------------------------------------------------------
void ofTexture::loadData(const ofPixels & pix){
	if(!isAllocated()){
		allocate(pix);
	}else{
		ofSetPixelStoreiAlignment(GL_UNPACK_ALIGNMENT,pix.getBytesStride());
		loadData(pix.getData(), pix.getWidth(), pix.getHeight(), ofGetGlFormat(pix), ofGetGlType(pix));
	}
}
Пример #7
0
//--------------------------------------------------------------
void ofApp::glitchUpdate(ofPixels _p) {
    
    string compressedFilename = "compressed.jpg";
    
    unsigned char * _c = _p.getData();
    
    float coin = ofRandom(100);
    if (coin > 95) {
        _c = _p.getData() + (int)ofRandom(100);
    }
    
    imgDirectGlitch.setImageType(OF_IMAGE_COLOR);

    float _w = baseArch.fassadeCorner[1].x - baseArch.fassadeCorner[0].x;
    float _h = baseArch.fassadeCorner[2].y - baseArch.fassadeCorner[0].y;
    imgDirectGlitch.setFromPixels(_c, webCam.getWidth(), webCam.getHeight(), OF_IMAGE_COLOR);
    
    imgDirectGlitch.save(compressedFilename, quality);
    
    ofBuffer file = ofBufferFromFile(compressedFilename);
    int fileSize = file.size();
    char * buffer = file.getData();
    
    int whichByte = (int) ofRandom(fileSize);
    
    int whichBit = ofRandom(8);
    

    char bitMask;
    if ( whichBit >4 ) {
        bitMask = 1 << whichBit;
    } else {
        bitMask = 7 << whichBit;
    }
    
    buffer[whichByte] |= bitMask;
    
    ofBufferToFile(compressedFilename, file);
    imgDirectGlitch.load(compressedFilename);
    
    //    float coin = ofRandom(100);
    //    if (coin > 95) {
    //        reset();
    //    }
    
}
Пример #8
0
//----------------------------------------------------------
void ofTexture::readToPixels(ofPixels & pixels) const {
#ifndef TARGET_OPENGLES
	pixels.allocate(texData.width,texData.height,ofGetImageTypeFromGLType(texData.glInternalFormat));
	ofSetPixelStoreiAlignment(GL_PACK_ALIGNMENT,pixels.getWidth(),pixels.getBytesPerChannel(),pixels.getNumChannels());
	glBindTexture(texData.textureTarget,texData.textureID);
	glGetTexImage(texData.textureTarget,0,ofGetGlFormat(pixels),GL_UNSIGNED_BYTE, pixels.getData());
	glBindTexture(texData.textureTarget,0);
#endif
}
Пример #9
0
//----------------------------------------------------------
void ofFbo::readToPixels(ofPixels & pixels, int attachmentPoint) const{
	if(!bIsAllocated) return;
#ifndef TARGET_OPENGLES
	getTexture(attachmentPoint).readToPixels(pixels);
#else
	pixels.allocate(settings.width,settings.height,ofGetImageTypeFromGLType(settings.internalformat));
	bind();
	int format = ofGetGLFormatFromInternal(settings.internalformat);
	glReadPixels(0,0,settings.width, settings.height, format, GL_UNSIGNED_BYTE, pixels.getData());
	unbind();
#endif
}
Пример #10
0
	//----------
	void Message::setData(const ofPixels & data) {
		const auto headerSize = sizeof(Header::Pixels);
		const auto bodySize = data.size(); // inner payload

		this->headerAndData.resize(headerSize + bodySize);

		auto & header = this->getHeader<Header::Pixels>(true);
		header.width = data.getWidth();
		header.height = data.getHeight();
		header.pixelFormat = data.getPixelFormat();

		auto body = this->getBodyData();
		memcpy(body, data.getData(), bodySize);
	}
Пример #11
0
	//----------
	void Decoder::operator<<(const ofPixels& pixels) {
		if (frame == 0) {
			data.allocate(pixels.getWidth(), pixels.getHeight(), payload->getWidth(), payload->getHeight());
		}

		if (frame > payload->getFrameCount() - 1) {
#pragma omp critical(ofLog)
			ofLogWarning("ofxGraycode") << "Can't add more frames, we've already captured a full set. please clear()";
			return;
		}

		if (!pixels.isAllocated()) {
			ofLogError("ofxGraycode") << "Cannot add this capture as the pixels object is empty";
			return;
		}

		const ofPixels* greyPixels;
		if (pixels.getNumChannels() > 1) {
			ofPixels* downsample = new ofPixels();
			downsample->allocate(pixels.getWidth(), pixels.getHeight(), OF_PIXELS_MONO);
			downsample->set(0, 0);
			const uint8_t* in = pixels.getData();
			uint8_t* out = downsample->getData();
			for (int i = 0; i < pixels.size(); i++, out += (i % pixels.getNumChannels() == 0)) {
				*out += *in++ / pixels.getNumChannels();
			}
			greyPixels = downsample;
		}
		else
			greyPixels = &pixels;

		if (this->payload->isOffline())
			captures.push_back(*greyPixels);
		else
			payload->readPixels(frame, *greyPixels);

		frame++;

		if (frame >= payload->getFrameCount()) {
			calc();
			frame = payload->getFrameCount();
		}

		if (greyPixels != &pixels)
			delete greyPixels;
	}
Пример #12
0
bool toDLib(const ofPixels& inPix, array2d<rgb_pixel>& outPix){
    
    int width = inPix.getWidth();
    int height = inPix.getHeight();
    outPix.set_size( height, width );
    int chans = inPix.getNumChannels();
    const unsigned char* data = inPix.getData();
    for ( unsigned n = 0; n < height;n++ )
    {
        const unsigned char* v =  &data[n * width *  chans];
        for ( unsigned m = 0; m < width;m++ )
        {
            if ( chans==1 )
            {
                unsigned char p = v[m];
                assign_pixel( outPix[n][m], p );
            }
            else{
                rgb_pixel p;
                p.red = v[m*3];
                p.green = v[m*3+1];
                p.blue = v[m*3+2];
                assign_pixel( outPix[n][m], p );
            }
        }
    }
//    if(inPix.getNumChannels() == 3){
//        int h = inPix.getHeight();
//        int w = inPix.getWidth();
//        outPix.clear();
//        outPix.set_size(h,w);
//        for (int i = 0; i < h; i++) {
//            for (int j = 0; j < w; j++) {
//                
//                outPix[i][j].red = inPix.getColor(j, i).r; //inPix[i*w + j];
//                outPix[i][j].green = inPix.getColor(j, i).g; //inPix[i*w + j + 1];
//                outPix[i][j].blue = inPix.getColor(j, i).b; //inPix[i*w + j + 2];
//            }
//        }
//        return true;
//    }else{
//        return  false;
//    }
    return true;
}
Пример #13
0
bool ofxTurboJpeg::load(const ofBuffer& buf, ofPixels &pix)
{
	int w, h;
	int subsamp;
	int ok = tjDecompressHeader2(handleDecompress, (unsigned char*)buf.getData(), buf.size(), &w, &h, &subsamp);
	
	if (ok != 0)
	{
		printf("Error in tjDecompressHeader2():\n%s\n", tjGetErrorStr());
		return false;
	}
	
	pix.allocate(w, h, 3);
	
	tjDecompress(handleDecompress, (unsigned char*)buf.getData(), buf.size(), pix.getData(), w, 0, h, 3, 0);
	
	return true;
}
Пример #14
0
bool Frame::scale(ofPixels& pixels)
{
   if (sws_ctx)
   {
       uint8_t * inData[1] = { pixels.getData() }; // RGBA32 have one plane
       //
       // NOTE: In a more general setting, the rows of your input image may
       //       be padded; that is, the bytes per row may not be 4 * width.
       //       In such cases, inLineSize should be set to that padded width.
       //
       int inLinesize[1] = { pixels.getBytesStride() };
       sws_scale(sws_ctx, inData, inLinesize, 0, pixels.getHeight(), frm->data, frm->linesize);
       frm->pts = frm->pts + 1;
       return true;
   }
    // sws_scale(sws_ctx, inData, inLinesize, 0, ctx->height, frame->data, frame->linesize);
    return false;
}
void ofxAndroidMobileVision::process(ofPixels &pixels){
    if(!javaMobileVision){
        ofLogError("ofxAndroidMobileVision") << "update(): java not loaded";
        return;
    }

    JNIEnv *env = ofGetJNIEnv();
    jmethodID javaMethod = env->GetMethodID(javaClass,"update","([BII)I");
    if(!javaMethod ){
        ofLogError("ofxAndroidMobileVision") << "update(): couldn't get java update for MobileVision";
        return;
    }

    jbyteArray arr = env->NewByteArray(pixels.size());
    env->SetByteArrayRegion( arr, 0, pixels.size(), (const signed char*) pixels.getData());
    int numFaces = env->CallIntMethod(javaMobileVision, javaMethod, arr, pixels.getWidth(), pixels.getHeight());
    env->DeleteLocalRef(arr);

    vector<ofxAndroidMobileVisionFace> analyzedfaces;
    for(int i=0;i<numFaces;i++) {
        // Get data
        auto method = env->GetMethodID(javaClass, "getData", "(I)[F");
        jfloatArray data = (jfloatArray) env->CallObjectMethod(javaMobileVision, method, 0);

        jboolean isCopy;
        jfloat *body =  env->GetFloatArrayElements(data, &isCopy);

        ofxAndroidMobileVisionFace face;
        face.smileProbability = body[0];
        face.leftEyeOpenProbability = body[1];
        face.rightEyeOpenProbability = body[2];
        for(int j=0;j<12;j++){
            ofVec2f p;
            p.x = body[j*2+3];
            p.y = body[j*2+4];
            face.landmarks.push_back(p);
        }
        analyzedfaces.push_back(face);

        env->DeleteLocalRef(data);
    }

    fromAnalyze.send(analyzedfaces);
}
Пример #16
0
void drawHistogram(const ofPixels& pix, float height = 128, int skip = 16) {
    vector<float> r(256), g(256), b(256);
    const unsigned char* data = pix.getData();
    int n = pix.size();
    int samples = 0;
    for(int i = 0; i < n; i += 3*skip) {
        r[data[i++]]++;
        g[data[i++]]++;
        b[data[i++]]++;
        samples++;
    }
    ofMesh rmesh, gmesh, bmesh;
    rmesh.setMode(OF_PRIMITIVE_TRIANGLE_STRIP);
    gmesh.setMode(OF_PRIMITIVE_TRIANGLE_STRIP);
    bmesh.setMode(OF_PRIMITIVE_TRIANGLE_STRIP);
    int peak = 0;
    for(int i = 0; i < 255; i++) {
        rmesh.addVertex(ofVec3f(i, 0));
        rmesh.addVertex(ofVec3f(i, r[i]));
        gmesh.addVertex(ofVec3f(i, 0));
        gmesh.addVertex(ofVec3f(i, g[i]));
        bmesh.addVertex(ofVec3f(i, 0));
        bmesh.addVertex(ofVec3f(i, b[i]));
        peak = MAX(peak, r[i]);
        peak = MAX(peak, g[i]);
        peak = MAX(peak, b[i]);
    }
    ofPushMatrix();
    ofPushStyle();
    ofEnableBlendMode(OF_BLENDMODE_ADD);
    ofScale(2, height / peak);
    ofSetColor(255);
    ofDrawLine(256, 0, 256, peak);
    ofTranslate(.5, 0);
    ofSetColor(255, 0, 0);
    rmesh.draw();
    ofSetColor(0, 255, 0);
    gmesh.draw();
    ofSetColor(0, 0, 255);
    bmesh.draw();
    ofPopStyle();
    ofPopMatrix();
}
Пример #17
0
image ofxDarknet::convert( ofPixels & pix )
{
	unsigned char *data = ( unsigned char * ) pix.getData();
	int h = pix.getHeight();
	int w = pix.getWidth();
	int c = pix.getNumChannels();
	int step = w * c;
	image im = make_image( w, h, c );
	int i, j, k, count = 0;;

	for( k = 0; k < c; ++k ) {
		for( i = 0; i < h; ++i ) {
			for( j = 0; j < w; ++j ) {
				im.data1[ count++ ] = data[ i*step + j*c + k ] / 255.;
			}
		}
	}

	return im;
}
Пример #18
0
		// get pixels from a fbo or texture // untested
	void ftUtil::toPixels(ofTexture& _tex, ofPixels& _pixels) {
		ofTextureData& texData = _tex.getTextureData();
		int format = texData.glInternalFormat;
		int readFormat, numChannels;
		
		switch(format){
			case GL_R8: 	readFormat = GL_RED, 	numChannels = 1; break; // or is it GL_R
			case GL_RG8: 	readFormat = GL_RG, 	numChannels = 2; break;
			case GL_RGB8: 	readFormat = GL_RGB, 	numChannels = 3; break;
			case GL_RGBA8:	readFormat = GL_RGBA,	numChannels = 4; break;
			default:
				ofLogWarning("ftUtil") << "toPixels: " << "can only read char texturs to ofPixels";
				return;
		}
		if (_pixels.getWidth() != texData.width || _pixels.getHeight() != texData.height || _pixels.getNumChannels() != numChannels) {
			_pixels.allocate(texData.width, texData.height, numChannels);
		}
		ofSetPixelStoreiAlignment(GL_PACK_ALIGNMENT, texData.width, 1, numChannels);
		glBindTexture(texData.textureTarget, texData.textureID);
		glGetTexImage(texData.textureTarget, 0, readFormat, GL_UNSIGNED_BYTE, _pixels.getData());
		glBindTexture(texData.textureTarget, 0);
	}
Пример #19
0
//--------------------------------------------------------------
void ofApp::draw(){
    ofBackground(0, 0, 0);
    fbo.begin();
    ofClear(0.,0.,0., 0.);
    if(playMode==0){drawWaves();}
    else if (playMode==1){drawBlobs();}
    
    fbo.end();
    fbo.readToPixels(pix);
    pix.getData();
    
    for(int i=0; i<totalSpeakers;i++){
        if(pix.getColor(solenoidArray[i].pos.x, solenoidArray[i].pos.y).r>0 && solenoidArray[i].isActive==false){
            solenoidArray[i].isActive=true;
            solenoidArray[i].timeStamp=ofGetElapsedTimeMillis();
        }
        
        if(solenoidArray[i].isActive != activeSolenoid[i]){
            ofxOscMessage n;
            n.setAddress("/OF");
            n.addFloatArg(i);
            n.addIntArg(playMode); // playMode
            n.addIntArg(solenoidArray[i].isActive); //is active?
            n.addIntArg(nodeID); //index
            sender.sendMessage(n);
            activeSolenoid[i] = solenoidArray[i].isActive;// update our reference node
            cout << "OSC: Noid "<< i << " is " << solenoidArray[i].isActive << " -- nodeID: " << nodeID << endl;
        }
    }
    
    //    fbo.draw(0,0);
    
    //    ofDrawBitmapString(msg, ofGetWidth()/2, ofGetHeight()/2);
    for (int i=0; i<totalSpeakers; i++) {
        drawCircle(i);
    }
    
}
Пример #20
0
GLuint ofxImGui::loadPixels(ofPixels& pixels)
{
    return engine->loadTextureImage2D(pixels.getData(),
                                      pixels.getWidth(),
                                      pixels.getHeight());
}
Пример #21
0
//--------------------------------------------------------------------------------
void ofxCvImage::setRoiFromPixels( const ofPixels & pixels ){
	setRoiFromPixels(pixels.getData(),pixels.getWidth(),pixels.getHeight());
}
Пример #22
0
void ofxTexture3d::loadData(ofPixels & pix, int d, int xOffset, int yOffset, int zOffset)
{
    loadData(pix.getData(), pix.getWidth(), pix.getHeight(), d, xOffset, yOffset, zOffset, ofGetGlFormat(pix));
}
Пример #23
0
//----------------------------------------------------------
void ofTexture::loadData(const ofPixels & pix, int glFormat){
	ofSetPixelStoreiAlignment(GL_UNPACK_ALIGNMENT,pix.getWidth(),pix.getBytesPerChannel(),ofGetNumChannelsFromGLFormat(glFormat));
	loadData(pix.getData(), pix.getWidth(), pix.getHeight(), glFormat, ofGetGlType(pix));
}