void Output::publishPixels(ofPixels &pix) { assert(mutex); if (pix.getWidth() == uiFrameWidth && pix.getHeight() == uiFrameHeight) { mutex->lock(); if (!back_buffer->isAllocated() || back_buffer->getWidth() != pix.getWidth() || back_buffer->getHeight() != pix.getHeight()) { back_buffer->allocate(pix.getWidth(), pix.getHeight(), pix.getNumChannels()); } memcpy(&back_buffer->getData()[1], pix.getData(), pix.size() - 1); //*back_buffer = pix; if (back_buffer->getNumChannels() != 4) back_buffer->setNumChannels(4); has_new_frame = true; mutex->unlock(); } else ofLogError("ofxDeckLinkAPI::Output") << "invalid pixel size"; }
//---------- bool Message::getData(ofPixels & data) const { auto & header = this->getHeader<Header::Pixels>(); if (this->hasHeader<Header::Pixels>()) { const auto & header = this->getHeader<Header::Pixels>(); auto bodySize = this->getBodySize(); ofPixelFormat pixelFormat = (ofPixelFormat)header.pixelFormat; //reallocate if we need to if (data.getWidth() != header.width || data.getHeight() != header.height || data.getPixelFormat() != pixelFormat) { data.allocate(header.width, header.height, pixelFormat); } if (data.size() != bodySize) { OFXSQUASHBUDDIES_ERROR << "Message body is of wrong size to fill pixels. Maybe a bug in sender?"; return false; } else { memcpy(data.getData(), this->getBodyData(), bodySize); return true; } } else { OFXSQUASHBUDDIES_WARNING << "Message Header doesn't match Pixels type"; return false; } }
void ofxAndroidMobileVision::process(ofPixels &pixels){ if(!javaMobileVision){ ofLogError("ofxAndroidMobileVision") << "update(): java not loaded"; return; } JNIEnv *env = ofGetJNIEnv(); jmethodID javaMethod = env->GetMethodID(javaClass,"update","([BII)I"); if(!javaMethod ){ ofLogError("ofxAndroidMobileVision") << "update(): couldn't get java update for MobileVision"; return; } jbyteArray arr = env->NewByteArray(pixels.size()); env->SetByteArrayRegion( arr, 0, pixels.size(), (const signed char*) pixels.getData()); int numFaces = env->CallIntMethod(javaMobileVision, javaMethod, arr, pixels.getWidth(), pixels.getHeight()); env->DeleteLocalRef(arr); vector<ofxAndroidMobileVisionFace> analyzedfaces; for(int i=0;i<numFaces;i++) { // Get data auto method = env->GetMethodID(javaClass, "getData", "(I)[F"); jfloatArray data = (jfloatArray) env->CallObjectMethod(javaMobileVision, method, 0); jboolean isCopy; jfloat *body = env->GetFloatArrayElements(data, &isCopy); ofxAndroidMobileVisionFace face; face.smileProbability = body[0]; face.leftEyeOpenProbability = body[1]; face.rightEyeOpenProbability = body[2]; for(int j=0;j<12;j++){ ofVec2f p; p.x = body[j*2+3]; p.y = body[j*2+4]; face.landmarks.push_back(p); } analyzedfaces.push_back(face); env->DeleteLocalRef(data); } fromAnalyze.send(analyzedfaces); }
void MSGlitcher::update(ofPixels &sourcePixels) { for (int i=0; i<sourcePixels.size(); ++i) { glitchedPixels[i] = sourcePixels[i]; } for (int i=0; i<glitches.size(); ++i) { glitches[i]->update(glitchedPixels); } glitchedTexture.loadData(glitchedPixels); }
ofPixels ofxImageTS::alterColorRGB(ofPixels pixels,float R, float G, float B){ pixels.allocate(pixels.getWidth(), pixels.getHeight(), OF_PIXELS_RGB); ofPixels copy; copy = pixels; for(int i = 0; i < pixels.size()-3; i += 3){ copy[i] = R * pixels[i]; copy[i+1] = G * pixels[i+1]; copy[i+2] = B * pixels[i+2]; } return copy; }
ofPixels ofxImageTS::invertRB(ofPixels pixels){ pixels.allocate(pixels.getWidth(), pixels.getHeight(), OF_PIXELS_RGBA); ofPixels copy; copy = pixels; for(int i = 0; i < pixels.size()-3; i += 4){ copy[i] = pixels[i+2]; copy[i+1] = pixels[i+1]; copy[i+2] = pixels[i]; copy[i+3] = pixels[i+3]; } return copy; }
//-------------------------------------------------------------- void sampleCell::setPointsFirst(const ofPixels &_pix, ofPoint _startPoint){ pix = _pix; startX = _startPoint.x; startY = _startPoint.y; bSettingPoints = true; if(!bRegisteredEvents) { ofRegisterMouseEvents(this); // this will enable our circle class to listen to the mouse events. bRegisteredEvents = true; } ofLog() << "*************************************************"; ofLogVerbose() << "setPoints " << ID << " started empty"; ofLogVerbose() << "cell[" << ID << "] has pix: " << _pix.size(); }
//---------- void Message::setData(const ofPixels & data) { const auto headerSize = sizeof(Header::Pixels); const auto bodySize = data.size(); // inner payload this->headerAndData.resize(headerSize + bodySize); auto & header = this->getHeader<Header::Pixels>(true); header.width = data.getWidth(); header.height = data.getHeight(); header.pixelFormat = data.getPixelFormat(); auto body = this->getBodyData(); memcpy(body, data.getData(), bodySize); }
void MSGlitchNoise::update(ofPixels &sourcePixels) { MSGlitch::update(sourcePixels); if (!hasStarted) return; for (int i=0; i<sourcePixels.size(); i+=3) { if (ofInRange(ofRandom(0, 1), 0.0f, NOISE_AMOUNT)) { float noiseStrength = 1.0f - NOISE_STRENGTH; sourcePixels[i + 0] *= noiseStrength; sourcePixels[i + 1] *= noiseStrength; sourcePixels[i + 2] *= noiseStrength; } } }
void MSGlitchGrayscale::update(ofPixels &sourcePixels) { MSGlitch::update(sourcePixels); if (!hasStarted) return; for (int i=0; i<sourcePixels.size(); i+=3) { int r = sourcePixels[i+0]; int g = sourcePixels[i+1]; int b = sourcePixels[i+2]; int average = (r + g + b) / 3; sourcePixels[i + 0] = (unsigned char)average; sourcePixels[i + 1] = (unsigned char)average; sourcePixels[i + 2] = (unsigned char)average; } }
//---------- void Decoder::operator<<(const ofPixels& pixels) { if (frame == 0) { data.allocate(pixels.getWidth(), pixels.getHeight(), payload->getWidth(), payload->getHeight()); } if (frame > payload->getFrameCount() - 1) { #pragma omp critical(ofLog) ofLogWarning("ofxGraycode") << "Can't add more frames, we've already captured a full set. please clear()"; return; } if (!pixels.isAllocated()) { ofLogError("ofxGraycode") << "Cannot add this capture as the pixels object is empty"; return; } const ofPixels* greyPixels; if (pixels.getNumChannels() > 1) { ofPixels* downsample = new ofPixels(); downsample->allocate(pixels.getWidth(), pixels.getHeight(), OF_PIXELS_MONO); downsample->set(0, 0); const uint8_t* in = pixels.getData(); uint8_t* out = downsample->getData(); for (int i = 0; i < pixels.size(); i++, out += (i % pixels.getNumChannels() == 0)) { *out += *in++ / pixels.getNumChannels(); } greyPixels = downsample; } else greyPixels = &pixels; if (this->payload->isOffline()) captures.push_back(*greyPixels); else payload->readPixels(frame, *greyPixels); frame++; if (frame >= payload->getFrameCount()) { calc(); frame = payload->getFrameCount(); } if (greyPixels != &pixels) delete greyPixels; }
void drawHistogram(const ofPixels& pix, float height = 128, int skip = 16) { vector<float> r(256), g(256), b(256); const unsigned char* data = pix.getData(); int n = pix.size(); int samples = 0; for(int i = 0; i < n; i += 3*skip) { r[data[i++]]++; g[data[i++]]++; b[data[i++]]++; samples++; } ofMesh rmesh, gmesh, bmesh; rmesh.setMode(OF_PRIMITIVE_TRIANGLE_STRIP); gmesh.setMode(OF_PRIMITIVE_TRIANGLE_STRIP); bmesh.setMode(OF_PRIMITIVE_TRIANGLE_STRIP); int peak = 0; for(int i = 0; i < 255; i++) { rmesh.addVertex(ofVec3f(i, 0)); rmesh.addVertex(ofVec3f(i, r[i])); gmesh.addVertex(ofVec3f(i, 0)); gmesh.addVertex(ofVec3f(i, g[i])); bmesh.addVertex(ofVec3f(i, 0)); bmesh.addVertex(ofVec3f(i, b[i])); peak = MAX(peak, r[i]); peak = MAX(peak, g[i]); peak = MAX(peak, b[i]); } ofPushMatrix(); ofPushStyle(); ofEnableBlendMode(OF_BLENDMODE_ADD); ofScale(2, height / peak); ofSetColor(255); ofDrawLine(256, 0, 256, peak); ofTranslate(.5, 0); ofSetColor(255, 0, 0); rmesh.draw(); ofSetColor(0, 255, 0); gmesh.draw(); ofSetColor(0, 0, 255); bmesh.draw(); ofPopStyle(); ofPopMatrix(); }
//------------------------------------------------------------------------------ bool BaseWebSocketSessionManager::sendBinary(AbstractWebSocketConnection* connection, ofPixels& pixels) { return sendBinary(connection,pixels.getPixels(), static_cast<unsigned int>(pixels.size())); }