//-------------------------------------------------------------- bool ofxEditorSyntax::loadFile(const string& xmlFile) { string path = ofToDataPath(xmlFile); ofXml xml; if(!xml.load(path)) { ofLogError("ofxEditorSyntax") << "couldn't load \"" << ofFilePath::getFileName(xmlFile) << "\""; return false; } xml.setToParent(); if(!xml.exists("syntax")) { ofLogWarning("ofxEditorSyntax") << "root xml tag not \"syntax\", ignoring"; return false; } xml.setTo("syntax"); int numTags = xml.getNumChildren(); clear(); for(int i = 0; i < numTags; ++i) { xml.setToChild(i); if(xml.getName() == "lang") {setLang(xml.getValue());} else if(xml.getName() == "files") { int numExts = xml.getNumChildren(); for(int e = 0; e < numExts; ++e) { xml.setToChild(e); if(xml.getName() == "ext") {addFileExt(xml.getValue());} else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown files xml tag \"" << xml.getName() << "\""; } xml.setToParent(); } } else if(xml.getName() == "singlecomment") {singleLineComment = string_to_wstring(xml.getValue());} else if(xml.getName() == "multicomment") { if(xml.exists("begin")) {multiLineCommentBegin = string_to_wstring(xml.getValue("begin"));} if(xml.exists("end")) {multiLineCommentBegin = string_to_wstring(xml.getValue("end"));} } else if(xml.getName() == "preprocessor") {preprocessor = string_to_wstring(xml.getValue());} else if(xml.getName() == "hexliteral") { string b = xml.getValue(); if(b == "true") {setHexLiteral(true);} else if(b == "false") {setHexLiteral(false);} else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown xml bool string \"" << b << "\""; } } else if(xml.getName() == "operator") {operatorChars = string_to_wstring(xml.getValue());} else if(xml.getName() == "punctuation") {punctuationChars = string_to_wstring(xml.getValue());} else if(xml.getName() == "words") { int numWords = xml.getNumChildren(); for(int w = 0; w < numWords; ++w) { xml.setToChild(w); if(xml.getName() == "keyword") {setWord(xml.getValue(), KEYWORD);} else if(xml.getName() == "typename") {setWord(xml.getValue(), TYPENAME);} else if(xml.getName() == "function") {setWord(xml.getValue(), FUNCTION);} else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown words xml tag \"" << xml.getName() << "\""; } xml.setToParent(); } } else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown xml tag \"" << xml.getName() << "\""; } xml.setToParent(); } xml.clear(); return true; }
void ofxDepthImageRecorder::encoderThreadCallback(){ ofxRGBDScene* take = NULL; bool foundDir = false; encoderThread.lock(); if(encodeDirectories.size() != 0){ foundDir = true; take = encodeDirectories.front(); encodeDirectories.pop(); } encoderThread.unlock(); if(!foundDir) { return; } //start to convert if(take->depthFolder == ""){ ofLogError("ofxDepthImageCompressor -- Take has empty path string"); return; } ofDirectory rawDir(take->depthFolder); if(!rawDir.exists() || !rawDir.isDirectory()){ ofLogError("ofxDepthImageRecorder::encoderThreadCallback() -- Does not exist or is not directory " + take->depthFolder); return; } if(!encodingBuffer.isAllocated()){ encodingBuffer.allocate(640,480, OF_IMAGE_GRAYSCALE); } ofLogVerbose("ofxDepthImageCompressor -- Starting to convert " + ofToString(take->uncompressedDepthFrameCount) + " in " + take->depthFolder); //cout << "ofxDepthImageCompressor -- Starting to convert " << ofToString(take->uncompressedDepthFrameCount) << " in " << take->depthFolder << endl; framesToCompress = take->uncompressedDepthFrameCount; rawDir.allowExt("raw"); rawDir.listDir(); for(int i = 0; i < rawDir.numFiles(); i++){ //don't do this while recording while(recording){ // ofLogWarning("ofxDepthImageRecorder -- paused converting while recording..."); ofSleepMillis(25); } if(!encoderThread.isThreadRunning()){ ofLogWarning( "ofxDepthImageRecorder -- Breaking conversion because recorder isn't running"); break; } string path = rawDir.getPath(i); //READ IN THE RAW FILE compressor.readDepthFrame(path, encodingBuffer.getPixels()); //COMPRESS TO PNG compressor.saveToCompressedPng(ofFilePath::removeExt(path)+".png", encodingBuffer.getPixels()); //DELETE the file ofFile::removeFile(rawDir.getPath(i)); //UPDATE COUNTS framesToCompress = take->uncompressedDepthFrameCount; take->uncompressedDepthFrameCount--; take->compressedDepthFrameCount++; } }
void ofPixels_<PixelType>::setColor(const ofColor_<PixelType>& color) { switch(pixelFormat){ case OF_PIXELS_RGB:{ for(auto pixel: getPixelsIter()){ pixel[0] = color.r; pixel[1] = color.g; pixel[2] = color.b; } } break; case OF_PIXELS_BGR:{ for(auto pixel: getPixelsIter()){ pixel[0] = color.b; pixel[1] = color.g; pixel[2] = color.r; } } break; case OF_PIXELS_RGBA:{ for(auto pixel: getPixelsIter()){ pixel[0] = color.r; pixel[1] = color.g; pixel[2] = color.b; pixel[3] = color.a; } } break; case OF_PIXELS_BGRA:{ for(auto pixel: getPixelsIter()){ pixel[0] = color.b; pixel[1] = color.g; pixel[2] = color.r; pixel[3] = color.a; } } break; case OF_PIXELS_GRAY:{ PixelType b = color.getBrightness(); for(iterator i=begin();i!=end();++i){ *i = b; } } break; case OF_PIXELS_GRAY_ALPHA:{ PixelType b = color.getBrightness(); for(auto pixel: getPixelsIter()){ pixel[0] = b; pixel[1] = color.a; } } break; case OF_PIXELS_RGB565: case OF_PIXELS_NV12: case OF_PIXELS_NV21: case OF_PIXELS_YV12: case OF_PIXELS_I420: case OF_PIXELS_YUY2: case OF_PIXELS_UYVY: case OF_PIXELS_Y: case OF_PIXELS_U: case OF_PIXELS_V: case OF_PIXELS_UV: case OF_PIXELS_VU: case OF_PIXELS_UNKNOWN: default: ofLogWarning("ofPixels") << "setting color not supported yet for " << ofToString(pixelFormat) << " format"; break; } }
//-------------------------------------------------------------- vector<ofIndexType> ofConePrimitive::getCapIndices() const { if(getMesh().getMode() != OF_PRIMITIVE_TRIANGLE_STRIP) { ofLogWarning("ofConePrimitive") << "getCapIndices(): must be in triangle strip mode"; } return of3dPrimitive::getIndices( strides[1][0], strides[1][0] + strides[1][1] ); }
void ofxFensterManager::exit_cb(GLFWwindow* windowP_) { //ofxFensterPtr fenster = get()->getFensterByGlfwHandle(windowP_); ofLogWarning("ofxFenster", "WINDOW CLOSING NOT YET HANDLED PROPERY"); }
bool ofGstUtils::gstHandleMessage(GstBus * bus, GstMessage * msg){ if(appsink && appsink->on_message(msg)) return true; /*ofLogVerbose("ofGstUtils") << "gstHandleMessage(): got " << GST_MESSAGE_TYPE_NAME(msg) << " message from " << GST_MESSAGE_SRC_NAME(msg);*/ switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_BUFFERING: gint pctBuffered; gst_message_parse_buffering(msg,&pctBuffered); ofLogVerbose("ofGstUtils") << "gstHandleMessage(): buffering " << pctBuffered; if(pctBuffered<100){ gst_element_set_state (gstPipeline, GST_STATE_PAUSED); }else if(!bPaused){ gst_element_set_state (gstPipeline, GST_STATE_PLAYING); } break; #if GST_VERSION_MAJOR==0 case GST_MESSAGE_DURATION:{ GstFormat format=GST_FORMAT_TIME; gst_element_query_duration(gstPipeline,&format,&durationNanos); }break; #else case GST_MESSAGE_DURATION_CHANGED: gst_element_query_duration(gstPipeline,GST_FORMAT_TIME,&durationNanos); break; #endif case GST_MESSAGE_STATE_CHANGED:{ GstState oldstate, newstate, pendstate; gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); if(isStream && newstate==GST_STATE_PAUSED && !bPlaying ){ bLoaded = true; bPlaying = true; if(!bPaused){ //ofLogVerbose("ofGstUtils") << "gstHandleMessage(): setting stream pipeline to play"; play(); } } /*ofLogVerbose("ofGstUtils") << "gstHandleMessage(): " << GST_MESSAGE_SRC_NAME(msg) << " state changed from " << getName(oldstate) << " to " << getName(newstate) << " (" + getName(pendstate) << ")";*/ }break; case GST_MESSAGE_ASYNC_DONE: ofLogVerbose("ofGstUtils") << "gstHandleMessage(): async done"; break; case GST_MESSAGE_ERROR: { GError *err; gchar *debug; gst_message_parse_error(msg, &err, &debug); gchar * name = gst_element_get_name(GST_MESSAGE_SRC (msg)); ofLogError("ofGstUtils") << "gstHandleMessage(): embedded video playback halted for plugin, module " << name << " reported: " << err->message; g_free(name); g_error_free(err); g_free(debug); gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL); }break; case GST_MESSAGE_EOS:{ ofLogVerbose("ofGstUtils") << "gstHandleMessage(): end of the stream"; bool isClosing = closing; eos_cb(); if(isClosing){ busWatchID = 0; return false; } switch(loopMode){ case OF_LOOP_NORMAL:{ GstFormat format = GST_FORMAT_TIME; GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT); if(speed>0){ if(!gst_element_seek(GST_ELEMENT(gstPipeline), speed, format, flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, -1)) { ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek"; } }else if(speed<0){ if(!gst_element_seek(GST_ELEMENT(gstPipeline),speed, format, flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, durationNanos-1000000)) { ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek"; } } }break; case OF_LOOP_PALINDROME:{ GstFormat format = GST_FORMAT_TIME; GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT); gint64 pos; #if GST_VERSION_MAJOR==0 gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos); #else gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos); #endif float loopSpeed; if(pos>0) loopSpeed=-speed; else loopSpeed=speed; if(!gst_element_seek(GST_ELEMENT(gstPipeline), loopSpeed, GST_FORMAT_UNDEFINED, flags, GST_SEEK_TYPE_NONE, 0, GST_SEEK_TYPE_NONE, 0)) { ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek"; } }break; default: break; } }break; case GST_MESSAGE_LATENCY: gst_bin_recalculate_latency (GST_BIN (getPipeline())); break; case GST_MESSAGE_REQUEST_STATE: { GstState state; gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (msg)); gst_message_parse_request_state (msg, &state); gst_element_set_state (getPipeline(), state); g_free (name); break; } #if GST_VERSION_MAJOR==1 case GST_MESSAGE_HAVE_CONTEXT:{ GstContext *context; const gchar *context_type; gchar *context_str; gst_message_parse_have_context (msg, &context); context_type = gst_context_get_context_type (context); context_str = gst_structure_to_string (gst_context_get_structure (context)); ofLogNotice("ofGstUtils","Got context from element '%s': %s=%s\n", GST_ELEMENT_NAME (GST_MESSAGE_SRC (msg)), context_type, context_str); g_free (context_str); gst_context_unref (context); break; } #endif default: ofLogVerbose("ofGstUtils") << "gstHandleMessage(): unhandled message from " << GST_MESSAGE_SRC_NAME(msg); break; } return true; }
//-------------------------------------------- // transformations //our openGL wrappers ofMatrix4x4 ofCairoRenderer::getCurrentMatrix(ofMatrixMode matrixMode_) const{ ofLogWarning() << "getCurrentMatrix not yet implemented for Cairo Renderer."; return ofMatrix4x4(); }
//-------------------------------------------------------------- bool ofxVideoRecorder::setupCustomOutput(int w, int h, float fps, int sampleRate, int channels, string outputString, bool sysClockSync, bool silent){ if(bIsInitialized) { close(); } bIsSilent = silent; bSysClockSync = sysClockSync; bRecordAudio = (sampleRate > 0 && channels > 0); bRecordVideo = (w > 0 && h > 0 && fps > 0); bFinishing = false; videoFramesRecorded = 0; audioSamplesRecorded = 0; if(!bRecordVideo && !bRecordAudio) { ofLogWarning() << "ofxVideoRecorder::setupCustomOutput(): invalid parameters, could not setup video or audio stream.\n" << "video: " << w << "x" << h << "@" << fps << "fps\n" << "audio: " << "channels: " << channels << " @ " << sampleRate << "Hz\n"; return false; } videoPipePath = ""; audioPipePath = ""; pipeNumber = requestPipeNumber(); if(bRecordVideo) { width = w; height = h; frameRate = fps; // recording video, create a FIFO pipe videoPipePath = ofFilePath::getAbsolutePath("ofxvrpipe" + ofToString(pipeNumber)); if(!ofFile::doesFileExist(videoPipePath)){ string cmd = "bash --login -c 'mkfifo " + videoPipePath + "'"; system(cmd.c_str()); // TODO: add windows compatable pipe creation (does ffmpeg work with windows pipes?) } } if(bRecordAudio) { this->sampleRate = sampleRate; audioChannels = channels; // recording video, create a FIFO pipe audioPipePath = ofFilePath::getAbsolutePath("ofxarpipe" + ofToString(pipeNumber)); if(!ofFile::doesFileExist(audioPipePath)){ string cmd = "bash --login -c 'mkfifo " + audioPipePath + "'"; system(cmd.c_str()); // TODO: add windows compatable pipe creation (does ffmpeg work with windows pipes?) } } stringstream cmd; // basic ffmpeg invocation, -y option overwrites output file cmd << "bash --login -c '" << ffmpegLocation << (bIsSilent?" -loglevel quiet ":" ") << "-y"; if(bRecordAudio){ cmd << " -acodec pcm_s16le -f s16le -ar " << sampleRate << " -ac " << audioChannels << " -i " << audioPipePath; } else { // no audio stream cmd << " -an"; } if(bRecordVideo){ // video input options and file cmd << " -r "<< fps << " -s " << w << "x" << h << " -f rawvideo -pix_fmt " << pixelFormat <<" -i " << videoPipePath << " -r " << fps; } else { // no video stream cmd << " -vn"; } cmd << " "+ outputString +"' &"; //cerr << cmd.str(); ffmpegThread.setup(cmd.str()); // start ffmpeg thread, will wait for input pipes to be opened if(bRecordAudio){ audioThread.setup(audioPipePath, &audioFrames); } if(bRecordVideo){ videoThread.setup(videoPipePath, &frames); } bIsInitialized = true; bIsRecording = false; bIsPaused = false; startTime = 0; recordingDuration = 0; totalRecordingDuration = 0; return bIsInitialized; }
//-------------------------------------------------------------------- bool ofQuickTimeGrabber::qtSelectDevice(int deviceNumber, bool didWeChooseADevice){ //note - check for memory freeing possibly needed for the all SGGetChannelDeviceList mac stuff // also see notes in listDevices() regarding new enunemeration method. //Generate a device list and enumerate //all devices availble to the channel SGDeviceList deviceList; SGGetChannelDeviceList(gVideoChannel, sgDeviceListIncludeInputs, &deviceList); unsigned char pascalName[64]; unsigned char pascalNameInput[64]; int numDevices = (*deviceList)->count; if(numDevices == 0){ ofLogError("ofQuickTimeGrabber") << "no capture devices found"; return false; } int deviceCount = 0; for(int i = 0 ; i < numDevices; ++i) { SGDeviceName nameRec; nameRec = (*deviceList)->entry[i]; SGDeviceInputList deviceInputList = nameRec.inputs; int numInputs = 0; if( deviceInputList ) numInputs = ((*deviceInputList)->count); memcpy(pascalName, (*deviceList)->entry[i].name, sizeof(char) * 64); memset(pascalNameInput, 0, sizeof(char)*64); //this means we can use the capture method if(nameRec.flags != sgDeviceNameFlagDeviceUnavailable){ //if we have a capture 'device' (qt's word not mine - I would prefer 'system' ) that is ready to be used //we go through its inputs to list all physical devices - as there could be more than one! for(int j = 0; j < numInputs; j++){ //if our 'device' has inputs we get their names here if( deviceInputList ){ SGDeviceInputName inputNameRec = (*deviceInputList)->entry[j]; memcpy(pascalNameInput, inputNameRec.name, sizeof(char) * 64); } //if the device number matches we try and setup the device //if we didn't specifiy a device then we will try all devices till one works! if( deviceCount == deviceNumber || !didWeChooseADevice ){ ofLogNotice("ofQuickTimeGrabber") << "attempting to open device [" << deviceCount << "] " << p2cstr(pascalName) << " - " << p2cstr(pascalNameInput); OSErr err1 = SGSetChannelDevice(gVideoChannel, pascalName); OSErr err2 = SGSetChannelDeviceInput(gVideoChannel, j); int successLevel = 0; //if there were no errors then we have opened the device without issue if ( err1 == noErr && err2 == noErr){ successLevel = 2; } //parameter errors are not fatal so we will try and open but will caution the user else if ( (err1 == paramErr || err1 == noErr) && (err2 == noErr || err2 == paramErr) ){ successLevel = 1; } //the device is opened! if ( successLevel > 0 ){ deviceName = (char *)p2cstr(pascalName); deviceName += "-"; deviceName += (char *)p2cstr(pascalNameInput); if(successLevel == 2){ ofLogNotice("ofQuickTimeGrabber") << "device " << deviceName << " opened successfully"; } else{ ofLogWarning("ofQuickTimeGrabber") << "device " << deviceName << " opened with some paramater errors, should be fine though!"; } //no need to keep searching - return that we have opened a device! return true; }else{ //if we selected a device in particular but failed we want to go through the whole list again - starting from 0 and try any device. //so we return false - and try one more time without a preference if( didWeChooseADevice ){ ofLogWarning("ofQuickTimeGrabber") << "problems setting device [" << deviceNumber << "] " << p2cstr(pascalName) << " - " << p2cstr(pascalNameInput) << " *****"; return false; }else{ ofLogWarning("ofQuickTimeGrabber") << "unable to open device, trying next device"; } } } //we count this way as we need to be able to distinguish multiple inputs as devices deviceCount++; } }else{ //ofLogError("ofQuickTimeGrabber") << "(unavailable) device [" << deviceCount << "] " << p2cstr(pascalName); deviceCount++; } } return false; }
////////////////////////////////////////////////////////////////////////////////// // public ////////////////////////////////////////////////////////////////////////////////// ofPixelsRef VideoInput::getPixelsRef() { if (!getIsReady()) ofLogWarning() << "VideoInput::getPixelsRef grabber is not ready"; return video.getPixelsRef(); }
void ofFbo::allocate(Settings _settings) { if(!checkGLSupport()) return; destroy(); // check that passed values are correct if(_settings.width == 0) _settings.width = ofGetWidth(); if(_settings.height == 0) _settings.height = ofGetHeight(); if(_settings.numSamples > maxSamples() && maxSamples() > -1) { ofLogWarning("ofFbo") << "allocate(): clamping numSamples " << _settings.numSamples << " to maxSamples " << maxSamples() << " for frame buffer object" << fbo; _settings.numSamples = maxSamples(); } if(_settings.depthStencilAsTexture && _settings.numSamples){ ofLogWarning("ofFbo") << "allocate(): multisampling not supported with depthStencilAsTexture, setting 0 samples for frame buffer object " << fbo; _settings.numSamples = 0; } //currently depth only works if stencil is enabled. // http://forum.openframeworks.cc/index.php/topic,6837.0.html #ifdef TARGET_OPENGLES if(_settings.useDepth){ _settings.useStencil = true; } if( _settings.depthStencilAsTexture ){ _settings.depthStencilAsTexture = false; ofLogWarning("ofFbo") << "allocate(): depthStencilAsTexture is not available for iOS"; } #endif GLenum depthAttachment = GL_DEPTH_ATTACHMENT; if( _settings.useDepth && _settings.useStencil ){ _settings.depthStencilInternalFormat = GL_DEPTH_STENCIL; #ifdef TARGET_OPENGLES depthAttachment = GL_DEPTH_ATTACHMENT; #else depthAttachment = GL_DEPTH_STENCIL_ATTACHMENT; #endif }else if(_settings.useDepth){ depthAttachment = GL_DEPTH_ATTACHMENT; }else if(_settings.useStencil){ depthAttachment = GL_STENCIL_ATTACHMENT; _settings.depthStencilInternalFormat = GL_STENCIL_INDEX; } // set needed values for allocation on instance settings // the rest will be set by the corresponding methods during allocation settings.width = _settings.width; settings.height = _settings.height; settings.numSamples = _settings.numSamples; // create main fbo // this is the main one we bind for drawing into // all the renderbuffers are attached to this (whether MSAA is enabled or not) glGenFramebuffers(1, &fbo); retainFB(fbo); bind(); //- USE REGULAR RENDER BUFFER if(!_settings.depthStencilAsTexture){ if(_settings.useDepth && _settings.useStencil){ stencilBuffer = depthBuffer = createAndAttachRenderbuffer(_settings.depthStencilInternalFormat, depthAttachment); retainRB(stencilBuffer); retainRB(depthBuffer); }else if(_settings.useDepth){ depthBuffer = createAndAttachRenderbuffer(_settings.depthStencilInternalFormat, depthAttachment); retainRB(depthBuffer); }else if(_settings.useStencil){ stencilBuffer = createAndAttachRenderbuffer(_settings.depthStencilInternalFormat, depthAttachment); retainRB(stencilBuffer); } //- INSTEAD USE TEXTURE }else{ if(_settings.useDepth || _settings.useStencil){ createAndAttachDepthStencilTexture(_settings.textureTarget,_settings.depthStencilInternalFormat,depthAttachment); #ifdef TARGET_OPENGLES // if there's depth and stencil the texture should be attached as // depth and stencil attachments // http://www.khronos.org/registry/gles/extensions/OES/OES_packed_depth_stencil.txt if(_settings.useDepth && _settings.useStencil){ glFramebufferTexture2D(GL_FRAMEBUFFER, GL_STENCIL_ATTACHMENT, GL_TEXTURE_2D, depthBufferTex.texData.textureID, 0); } #endif } } settings.useDepth = _settings.useDepth; settings.useStencil = _settings.useStencil; settings.depthStencilInternalFormat = _settings.depthStencilInternalFormat; settings.depthStencilAsTexture = _settings.depthStencilAsTexture; settings.textureTarget = _settings.textureTarget; settings.wrapModeHorizontal = _settings.wrapModeHorizontal; settings.wrapModeVertical = _settings.wrapModeVertical; settings.maxFilter = _settings.maxFilter; settings.minFilter = _settings.minFilter; // if we want MSAA, create a new fbo for textures #ifndef TARGET_OPENGLES if(_settings.numSamples){ glGenFramebuffers(1, &fboTextures); retainFB(fboTextures); }else{ fboTextures = fbo; } #else fboTextures = fbo; if(_settings.numSamples){ ofLogWarning("ofFbo") << "allocate(): multisampling not supported in OpenGL ES"; } #endif // now create all textures and color buffers if(_settings.colorFormats.size() > 0) { for(int i=0; i<(int)_settings.colorFormats.size(); i++) createAndAttachTexture(_settings.colorFormats[i], i); } else if(_settings.numColorbuffers > 0) { for(int i=0; i<_settings.numColorbuffers; i++) createAndAttachTexture(_settings.internalformat, i); _settings.colorFormats = settings.colorFormats; } else { ofLogWarning("ofFbo") << "allocate(): no color buffers specified for frame buffer object " << fbo; } settings.internalformat = _settings.internalformat; // if textures are attached to a different fbo (e.g. if using MSAA) check it's status if(fbo != fboTextures) { glBindFramebuffer(GL_FRAMEBUFFER, fboTextures); } // check everything is ok with this fbo bIsAllocated = checkStatus(); // unbind it unbind(); /* UNCOMMENT OUTSIDE OF DOING RELEASES // this should never happen if(settings != _settings) ofLogWarning("ofFbo") << "allocation not complete, passed settings not equal to created ones, this is an internal OF bug"; */ }
void ofParameterGroup::fromString(const string & name){ ofLogWarning() << "ofParameterGroup doesn't implement fromString yet"; }
void ofFbo::allocate(Settings _settings) { if(!checkGLSupport()) return; destroy(); if(_settings.width == 0) _settings.width = ofGetWidth(); if(_settings.height == 0) _settings.height = ofGetHeight(); if(_settings.numSamples > maxSamples() && maxSamples() > -1) { ofLogWarning("ofFbo") << "clamping numSamples (" << _settings.numSamples << ") to maxSamples (" << maxSamples() << ")"; _settings.numSamples = maxSamples(); } settings = _settings; // create main fbo // this is the main one we bind for drawing into // all the renderbuffers are attached to this (whether MSAA is enabled or not) glGenFramebuffers(1, &fbo); retainFB(fbo); bind(); if(settings.depthStencilAsTexture && settings.numSamples){ ofLogWarning("ofFbo") << "multisampling not supported with depth as texture, setting 0 samples"; settings.numSamples = 0; } //currently depth only works if stencil is enabled. // http://forum.openframeworks.cc/index.php/topic,6837.0.html #ifdef TARGET_OPENGLES if(settings.useDepth){ settings.useStencil = true; } if( settings.depthStencilAsTexture ){ settings.depthStencilAsTexture = false; ofLogWarning("ofFbo") << "ofFbo::Settings depthStencilAsTexture is not available for iOS" << endl; } #endif GLenum depthAttachment = GL_DEPTH_ATTACHMENT; GLint depthPixelType = GL_UNSIGNED_SHORT; GLint depthFormat = GL_DEPTH_COMPONENT; if( settings.useDepth && settings.useStencil ){ depthFormat = GL_DEPTH_STENCIL; settings.depthStencilInternalFormat = GL_DEPTH_STENCIL; depthPixelType = GL_UNSIGNED_INT_24_8; #ifdef TARGET_OPENGLES depthAttachment = GL_DEPTH_ATTACHMENT; #else depthAttachment = GL_DEPTH_STENCIL_ATTACHMENT; #endif }else if(settings.useDepth){ depthPixelType = GL_UNSIGNED_SHORT; if(settings.depthStencilInternalFormat==GL_DEPTH_COMPONENT16){ depthPixelType = GL_UNSIGNED_SHORT; }else if(settings.depthStencilInternalFormat==GL_DEPTH_COMPONENT24){ depthPixelType = GL_UNSIGNED_INT; } #ifdef GL_DEPTH_COMPONENT32 else if(settings.depthStencilInternalFormat==GL_DEPTH_COMPONENT32){ depthPixelType = GL_UNSIGNED_INT; } #endif depthAttachment = GL_DEPTH_ATTACHMENT; depthFormat = GL_DEPTH_COMPONENT; }else if(settings.useStencil){ depthAttachment = GL_STENCIL_ATTACHMENT; settings.depthStencilInternalFormat = GL_STENCIL_INDEX; depthFormat = GL_STENCIL_INDEX; depthPixelType = GL_UNSIGNED_BYTE; } //- USE REGULAR RENDER BUFFER if(!settings.depthStencilAsTexture){ if(settings.useDepth && settings.useStencil){ stencilBuffer = depthBuffer = createAndAttachRenderbuffer(settings.depthStencilInternalFormat, depthAttachment); retainRB(stencilBuffer); retainRB(depthBuffer); }else if(settings.useDepth){ depthBuffer = createAndAttachRenderbuffer(settings.depthStencilInternalFormat, depthAttachment); retainRB(depthBuffer); }else if(settings.useStencil){ stencilBuffer = createAndAttachRenderbuffer(settings.depthStencilInternalFormat, depthAttachment); retainRB(stencilBuffer); } //- INSTEAD USE TEXTURE }else{ if(settings.useDepth || settings.useStencil){ createAndAttachDepthStencilTexture(settings.textureTarget,settings.depthStencilInternalFormat,depthFormat,depthPixelType,depthAttachment); #ifdef TARGET_OPENGLES // if there's depth and stencil the texture should be attached as // depth and stencil attachments // http://www.khronos.org/registry/gles/extensions/OES/OES_packed_depth_stencil.txt if(settings.useDepth && settings.useStencil){ glFramebufferTexture2D(GL_FRAMEBUFFER, GL_STENCIL_ATTACHMENT, GL_TEXTURE_2D, depthBufferTex.texData.textureID, 0); } #endif } } // if we want MSAA, create a new fbo for textures #ifndef TARGET_OPENGLES if(settings.numSamples){ glGenFramebuffers(1, &fboTextures); retainFB(fboTextures); }else{ fboTextures = fbo; } #else fboTextures = fbo; if(settings.numSamples){ ofLogWarning("ofFbo") << "multisampling not supported in opengles"; } #endif // now create all textures and color buffers for(int i=0; i<settings.numColorbuffers; i++) createAndAttachTexture(i); // if textures are attached to a different fbo (e.g. if using MSAA) check it's status if(fbo != fboTextures) { glBindFramebuffer(GL_FRAMEBUFFER, fboTextures); } // check everything is ok with this fbo checkStatus(); // unbind it unbind(); bIsAllocated = true; }
static bool saveImage(const ofPixels_<PixelType> & _pix, const std::filesystem::path& _fileName, ofImageQualityType qualityLevel) { ofInitFreeImage(); if (_pix.isAllocated() == false){ ofLogError("ofImage") << "saveImage(): couldn't save \"" << _fileName << "\", pixels are not allocated"; return false; } ofFilePath::createEnclosingDirectory(_fileName); std::string fileName = ofToDataPath(_fileName); FREE_IMAGE_FORMAT fif = FIF_UNKNOWN; fif = FreeImage_GetFileType(fileName.c_str(), 0); if(fif == FIF_UNKNOWN) { // or guess via filename fif = FreeImage_GetFIFFromFilename(fileName.c_str()); } if(fif==FIF_JPEG && (_pix.getNumChannels()==4 || _pix.getBitsPerChannel() > 8)){ ofPixels pix3 = _pix; pix3.setNumChannels(3); return saveImage(pix3,_fileName,qualityLevel); } FIBITMAP * bmp = nullptr; #ifdef TARGET_LITTLE_ENDIAN if(sizeof(PixelType) == 1 && (_pix.getPixelFormat()==OF_PIXELS_RGB || _pix.getPixelFormat()==OF_PIXELS_RGBA)) { // Make a local copy. ofPixels_<PixelType> pix = _pix; pix.swapRgb(); bmp = getBmpFromPixels(pix); }else{ #endif bmp = getBmpFromPixels(_pix); #ifdef TARGET_LITTLE_ENDIAN } #endif bool retValue = false; if((fif != FIF_UNKNOWN) && FreeImage_FIFSupportsReading(fif)) { if(fif == FIF_JPEG) { int quality = JPEG_QUALITYSUPERB; switch(qualityLevel) { case OF_IMAGE_QUALITY_WORST: quality = JPEG_QUALITYBAD; break; case OF_IMAGE_QUALITY_LOW: quality = JPEG_QUALITYAVERAGE; break; case OF_IMAGE_QUALITY_MEDIUM: quality = JPEG_QUALITYNORMAL; break; case OF_IMAGE_QUALITY_HIGH: quality = JPEG_QUALITYGOOD; break; case OF_IMAGE_QUALITY_BEST: quality = JPEG_QUALITYSUPERB; break; } retValue = FreeImage_Save(fif, bmp, fileName.c_str(), quality); } else { if(qualityLevel != OF_IMAGE_QUALITY_BEST) { ofLogWarning("ofImage") << "saveImage(): ofImageCompressionType only applies to JPEGs," << " ignoring value for \" "<< fileName << "\""; } if (fif == FIF_GIF) { FIBITMAP* convertedBmp; if(_pix.getImageType() == OF_IMAGE_COLOR_ALPHA) { // this just converts the image to grayscale so it can save something convertedBmp = FreeImage_ConvertTo8Bits(bmp); } else { // this will create a 256-color palette from the image convertedBmp = FreeImage_ColorQuantize(bmp, FIQ_NNQUANT); } retValue = FreeImage_Save(fif, convertedBmp, fileName.c_str()); if (convertedBmp != nullptr){ FreeImage_Unload(convertedBmp); } } else { retValue = FreeImage_Save(fif, bmp, fileName.c_str()); } } } if (bmp != nullptr){ FreeImage_Unload(bmp); } return retValue; }
//-------------------------------------------------------------- void ofFbo::allocate(Settings _settings) { if(!checkGLSupport()) return; clear(); auto renderer = _settings.renderer.lock(); if(renderer){ settings.renderer = renderer; }else{ settings.renderer = ofGetGLRenderer(); } // check that passed values are correct if(_settings.width <= 0 || _settings.height <= 0){ ofLogError("ofFbo") << "width and height have to be more than 0"; } if(_settings.numSamples > maxSamples() && maxSamples() > -1) { ofLogWarning("ofFbo") << "allocate(): clamping numSamples " << _settings.numSamples << " to maxSamples " << maxSamples() << " for frame buffer object" << fbo; _settings.numSamples = maxSamples(); } if(_settings.depthStencilAsTexture && _settings.numSamples){ ofLogWarning("ofFbo") << "allocate(): multisampling not supported with depthStencilAsTexture, setting 0 samples for frame buffer object " << fbo; _settings.numSamples = 0; } //currently depth only works if stencil is enabled. // http://forum.openframeworks.cc/index.php/topic,6837.0.html #ifdef TARGET_OPENGLES if(_settings.useDepth){ _settings.useStencil = true; } if( _settings.depthStencilAsTexture ){ _settings.depthStencilAsTexture = false; ofLogWarning("ofFbo") << "allocate(): depthStencilAsTexture is not available for iOS"; } #endif GLenum depthAttachment = GL_DEPTH_ATTACHMENT; if( _settings.useDepth && _settings.useStencil ){ _settings.depthStencilInternalFormat = GL_DEPTH_STENCIL; #ifdef TARGET_OPENGLES depthAttachment = GL_DEPTH_ATTACHMENT; #else depthAttachment = GL_DEPTH_STENCIL_ATTACHMENT; #endif }else if(_settings.useDepth){ depthAttachment = GL_DEPTH_ATTACHMENT; }else if(_settings.useStencil){ depthAttachment = GL_STENCIL_ATTACHMENT; _settings.depthStencilInternalFormat = GL_STENCIL_INDEX; } // set needed values for allocation on instance settings // the rest will be set by the corresponding methods during allocation settings.width = _settings.width; settings.height = _settings.height; settings.numSamples = _settings.numSamples; // create main fbo // this is the main one we bind for drawing into // all the renderbuffers are attached to this (whether MSAA is enabled or not) glGenFramebuffers(1, &fbo); retainFB(fbo); GLint previousFboId = 0; // note that we are using a glGetInteger method here, which may stall the pipeline. // in the allocate() method, this is not that tragic since this will not be called // within the draw() loop. Here, we need not optimise for performance, but for // simplicity and readability . glGetIntegerv(GL_FRAMEBUFFER_BINDING, &previousFboId); glBindFramebuffer(GL_FRAMEBUFFER, fbo); //- USE REGULAR RENDER BUFFER if(!_settings.depthStencilAsTexture){ if(_settings.useDepth && _settings.useStencil){ stencilBuffer = depthBuffer = createAndAttachRenderbuffer(_settings.depthStencilInternalFormat, depthAttachment); retainRB(stencilBuffer); retainRB(depthBuffer); }else if(_settings.useDepth){ depthBuffer = createAndAttachRenderbuffer(_settings.depthStencilInternalFormat, depthAttachment); retainRB(depthBuffer); }else if(_settings.useStencil){ stencilBuffer = createAndAttachRenderbuffer(_settings.depthStencilInternalFormat, depthAttachment); retainRB(stencilBuffer); } //- INSTEAD USE TEXTURE }else{ if(_settings.useDepth || _settings.useStencil){ createAndAttachDepthStencilTexture(_settings.textureTarget,_settings.depthStencilInternalFormat,depthAttachment); #ifdef TARGET_OPENGLES // if there's depth and stencil the texture should be attached as // depth and stencil attachments // http://www.khronos.org/registry/gles/extensions/OES/OES_packed_depth_stencil.txt if(_settings.useDepth && _settings.useStencil){ glFramebufferTexture2D(GL_FRAMEBUFFER, GL_STENCIL_ATTACHMENT, GL_TEXTURE_2D, depthBufferTex.texData.textureID, 0); } #endif } } settings.useDepth = _settings.useDepth; settings.useStencil = _settings.useStencil; settings.depthStencilInternalFormat = _settings.depthStencilInternalFormat; settings.depthStencilAsTexture = _settings.depthStencilAsTexture; settings.textureTarget = _settings.textureTarget; settings.wrapModeHorizontal = _settings.wrapModeHorizontal; settings.wrapModeVertical = _settings.wrapModeVertical; settings.maxFilter = _settings.maxFilter; settings.minFilter = _settings.minFilter; // if we want MSAA, create a new fbo for textures #ifndef TARGET_OPENGLES if(_settings.numSamples){ glGenFramebuffers(1, &fboTextures); retainFB(fboTextures); }else{ fboTextures = fbo; } #else fboTextures = fbo; if(_settings.numSamples){ ofLogWarning("ofFbo") << "allocate(): multisampling not supported in OpenGL ES"; } #endif // now create all textures and color buffers if(_settings.colorFormats.size() > 0) { for(int i=0; i<(int)_settings.colorFormats.size(); i++) createAndAttachTexture(_settings.colorFormats[i], i); } else if(_settings.numColorbuffers > 0) { for(int i=0; i<_settings.numColorbuffers; i++) createAndAttachTexture(_settings.internalformat, i); _settings.colorFormats = settings.colorFormats; } else { ofLogWarning("ofFbo") << "allocate(): no color buffers specified for frame buffer object " << fbo; } settings.internalformat = _settings.internalformat; dirty.resize(_settings.colorFormats.size(), true); // we start with all color buffers dirty. // if textures are attached to a different fbo (e.g. if using MSAA) check it's status if(fbo != fboTextures) { glBindFramebuffer(GL_FRAMEBUFFER, fboTextures); } // check everything is ok with this fbo bIsAllocated = checkStatus(); // restore previous framebuffer id glBindFramebuffer(GL_FRAMEBUFFER, previousFboId); /* UNCOMMENT OUTSIDE OF DOING RELEASES // this should never happen if(settings != _settings) ofLogWarning("ofFbo") << "allocation not complete, passed settings not equal to created ones, this is an internal OF bug"; */ #ifdef TARGET_ANDROID ofAddListener(ofxAndroidEvents().reloadGL,this,&ofFbo::reloadFbo); #endif }
void FileImporter::threadedFunction(){ ofDirectory ad = ofDirectory(annotationfolderPath); if(!ad.exists()){ ofFileDialogResult f = ofSystemLoadDialog("analysisFiles",true); ad = ofDirectory(f.filePath); if(!ad.exists() || !f.bSuccess){ ofExit(); } } curAnnotationPath = ad.path(); for(BaseFileLoader::loaders_map_type::iterator it = BaseFileLoader::getMap()->begin() ; it!=BaseFileLoader::getMap()->end() ; ++it){ ad.allowExt(it->first); } ad.listDir(); vector<ofFile> segL = ad.getFiles(); if(!segL.size()){ ofLogError("FileImporter","No valid file in " + ad.path()+" ,allowed extentions :"); return; } int globalCount=0; getSubset(annotationfolderPath+"Viza/best.json"); preCache(segL); totalNumFile= segL.size(); int numContainers = 0; numSong = 0; dbgTime = ofGetElapsedTimef(); queue.cancelAll(); BaseFileLoader::audioFolderPath = audiofolderPath; numDone = 0; { ofScopedLock sl(mutex); for(std::vector<ofFile>::iterator p=segL.begin();p!= segL.end();++p){ int contwatch = numContainers; BaseFileLoader * curLoader = BaseFileLoader::getMap()->at(p->getExtension())(ofToString(contwatch)); // indicate context for task curLoader->containerBlock.parsedFile = p->path(); curLoader->fillContainerBlock(p->path()); curLoader->containerBlock.containerIdx = numContainers; curLoader->containerBlock.songIdx = numSong; queue.start(curLoader); numSong+=1; numContainers+= curLoader->containerBlock.numElements; if( contwatch != numContainers){ globalCount++; } else{ ofLogWarning("FileImporter") << "nothing to add for file " << p->path(); } } } ofLogNotice("FileImporter","importing "+ofToString(globalCount)+" annotation files"); }
//------------------------------------------------- void ofThread::startThread(bool mutexBlocks, bool verbose){ ofLogWarning("ofThread") << "- name: " << getThreadName() << " - Calling startThread with verbose is deprecated."; startThread(mutexBlocks); }
bool File::load(string path) { //load the blend file into the stream if(file.is_open()) file.close(); file.open(ofToDataPath(path, true).c_str(), ios::binary); //info should contain blender now, if not it is compressed string info = readString(7); //check if the file is gzipped if(info != "BLENDER") { seek(0); //unzip the blend file to a temp file and reload Poco::InflatingInputStream inflater(file, Poco::InflatingStreamBuf::STREAM_GZIP); Poco::TemporaryFile tempFile; tempFile.keepUntilExit(); std::ofstream out(tempFile.path().c_str(), ios::binary); Poco::StreamCopier::copyStream( inflater, out); out.close(); file.close(); file.open(tempFile.path().c_str(), ios::binary); info = readString(7); if(info != "BLENDER") { ofLogWarning(OFX_BLENDER) << "Could not read blend file " << path; } else { ofLogVerbose(OFX_BLENDER) << "Blend file is gzipped, temporarily decompressed contents to " << tempFile.path(); } } //now extract the rest of the header data string tempString = readString(1); if(tempString == "-") pointerSize = 8; else if(tempString == "_") pointerSize = 4; //ofLogVerbose(OFX_BLENDER) << "Pointer Size is " << pointerSize; if(pointerSize == 4) { readPointer = std::bind(&File::read<unsigned int>, this); } else { readPointer = std::bind(&File::read<unsigned long>, this); } bool littleEndianness; char structPre = ' '; tempString = readString(1); if(tempString == "v") { littleEndianness = true; structPre = '<'; } else if(tempString == "V") { littleEndianness = false; structPre = '>'; } //ofLogVerbose(OFX_BLENDER) << "Struct pre is " << structPre; //ofLogVerbose(OFX_BLENDER) << "Little Endianness is " << littleEndianness; //version version = readString(3); //now go through all them blocks blocks.push_back(Block(this)); //read the first block readHeader(blocks.back()); while(blocks.back().code != "DNA1" && blocks.back().code != "SDNA") { //skip the block data file.seekg(file.tellg() + streamoff(blocks.back().size)); //read a new block blocks.push_back(Block(this)); readHeader(blocks.back()); } //advance readString(4); readString(4); //NAMES unsigned int numNames = read<unsigned int>(); for(unsigned int i=0; i<numNames; i++) { catalog.names.push_back(DNAName(readString(0))); } align(file); //TYPES readString(4); unsigned int numTypes = read<unsigned int>(); //cout << "FOUND TYPES " << numTypes << endl; for(unsigned int i=0; i<numTypes; i++) { catalog.types.push_back(DNAType(readString(0), i)); } align(file); //TYPE LENGTHS readString(4);; for(unsigned int i=0; i<numTypes; i++) { catalog.types[i].size = read<unsigned short>(); if(catalog.types[i].size == 0) //assume it is a pointer catalog.types[i].size = pointerSize; } align(file); //STRUCTURES readString(4); unsigned int numStructs = read<unsigned int>(); //cout << "FOUND STRUCTURES " << numStructs << endl; for(unsigned int i=0; i<numStructs; i++) { //get the type unsigned int index = read<unsigned short>(); DNAType* type = &catalog.types[index]; catalog.structures.push_back(DNAStructure(type)); DNAStructure& structure = catalog.structures.back(); //get the fields for the structure unsigned short numFields = read<unsigned short>(); unsigned int curOffset = 0; for(unsigned int j=0; j<numFields; j++) { unsigned short typeIndex = read<unsigned short>(); unsigned short nameIndex = read<unsigned short>(); DNAType* type = &catalog.types[typeIndex]; DNAName* name = &catalog.names[nameIndex]; structure.fields.push_back(DNAField(type, name, curOffset)); //if the field is a pointer, then only add the pointer size to offset bool offsetSet = false; if(structure.fields.back().isPointer) { int amount = 0; if(structure.fields.back().isArray) { amount = structure.fields.back().arraySizes[0]; } if(amount == 0) amount = 1; curOffset += (pointerSize * amount); offsetSet = true; } else if(structure.fields.back().isArray) { //arrays add n times the size to offset float multi = 0; for(int s: structure.fields.back().arraySizes) { if(s!=-1) { if(multi == 0) multi += s; else multi *= s; } } if(multi != 0) offsetSet = true; curOffset += type->size * multi; } if(!offsetSet) { curOffset += type->size; } } } align(file); //now link all structures with the File Blocks vector<Block>::iterator it = blocks.begin(); while(it != blocks.end()) { (*it).structure = &catalog.structures[(*it).SDNAIndex]; it++; } ofLogVerbose(OFX_BLENDER) << "Loaded \"" << path << "\" - Blender version is " << version; return true; }
void ofCairoRenderer::draw(const ofMesh & vertexData, ofPolyRenderMode mode, bool useColors, bool useTextures, bool useNormals) const{ if(useColors || useTextures || useNormals){ ofLogWarning("ofCairoRenderer") << "draw(): cairo mesh rendering doesn't support colors, textures, or normals. drawing wireframe ..."; } draw(vertexData,false,false,false); }
//-------------------------------------------------------------- bool ofxFileDialog::saveFile(std::string filename) { ofLogWarning("ofxFileDialog") << "ignoring saveFile"; return false; }
//-------------------------------------------------------------- void ofConePrimitive::setCapColor( ofColor color ) { if(getMesh().getMode() != OF_PRIMITIVE_TRIANGLE_STRIP) { ofLogWarning("ofConePrimitive") << "setCapColor(): must be in triangle strip mode"; } getMesh().setColorForIndices( strides[1][0], strides[1][0]+strides[1][1], color ); }
//-------------------------------------------------------------- void ofxFileDialog::redo() { ofLogWarning("ofxFileDialog") << "ignoring redo"; }
//------------------------------------------------- void ofThread::threadedFunction(){ ofLogWarning("ofThread") << "- name: " << getThreadName() << " - Override ofThread::threadedFunction() in your ofThread subclass."; }
//-------------------------------------------------------------- void ofxFileDialog::keyPressedText(int key) { switch(key) { case OF_KEY_UP: case OF_KEY_DOWN: return; case OF_KEY_RIGHT: if(!m_text.empty()) { m_position++; } if(m_position > m_text.size()) { m_position = m_text.size(); } break; case OF_KEY_LEFT: if(m_position > 0) { m_position--; } break; case OF_KEY_DEL: m_text.erase(m_position, 1); break; case OF_KEY_BACKSPACE: if(!m_text.empty() && m_position != 0) { if(m_selection != NONE) { m_text.erase(m_highlightStart, m_highlightEnd - m_highlightStart); m_position -= m_highlightEnd - m_highlightStart; m_selection = NONE; } else { m_text.erase(m_position-1, 1); m_position--; } } break; default: // build multibyte UTF-8 character if(key > 0x80) { if(m_UTF8Bytes == 0) { m_UTF8Bytes = wchar_width(key); } m_UTF8Char.push_back(key); if(m_UTF8Char.length() < m_UTF8Bytes) { return; } } else if(m_UTF8Bytes > 0) { ofLogWarning("ofxFileDialog") << "dropping bad UTF8 bytes"; m_UTF8Bytes = 0; m_UTF8Char = ""; } // ignore control chars if(key != '\n' && key < ' ') { return; } // multibyte UTF8 if(m_UTF8Bytes > 0) { m_UTF8Bytes = 0; } else { // single byte UTF8 & ASCII m_UTF8Char.push_back(key); } m_text.insert(m_position, string_to_wstring(m_UTF8Char)); m_UTF8Char = ""; m_position++; break; } }
static void saveImage(ofPixels_<PixelType> & pix, string fileName, ofImageQualityType qualityLevel) { ofInitFreeImage(); if (pix.isAllocated() == false){ ofLogError("ofImage") << "saveImage(): couldn't save \"" << fileName << "\", pixels are not allocated"; return; } #ifdef TARGET_LITTLE_ENDIAN if(sizeof(PixelType) == 1 && (pix.getPixelFormat()==OF_PIXELS_RGB || pix.getPixelFormat()==OF_PIXELS_RGBA)) { pix.swapRgb(); } #endif FIBITMAP * bmp = getBmpFromPixels(pix); #ifdef TARGET_LITTLE_ENDIAN if(sizeof(PixelType) == 1 && (pix.getPixelFormat()==OF_PIXELS_BGR || pix.getPixelFormat()==OF_PIXELS_BGRA)) { pix.swapRgb(); } #endif ofFilePath::createEnclosingDirectory(fileName); fileName = ofToDataPath(fileName); FREE_IMAGE_FORMAT fif = FIF_UNKNOWN; fif = FreeImage_GetFileType(fileName.c_str(), 0); if(fif == FIF_UNKNOWN) { // or guess via filename fif = FreeImage_GetFIFFromFilename(fileName.c_str()); } if((fif != FIF_UNKNOWN) && FreeImage_FIFSupportsReading(fif)) { if(fif == FIF_JPEG) { int quality = JPEG_QUALITYSUPERB; switch(qualityLevel) { case OF_IMAGE_QUALITY_WORST: quality = JPEG_QUALITYBAD; break; case OF_IMAGE_QUALITY_LOW: quality = JPEG_QUALITYAVERAGE; break; case OF_IMAGE_QUALITY_MEDIUM: quality = JPEG_QUALITYNORMAL; break; case OF_IMAGE_QUALITY_HIGH: quality = JPEG_QUALITYGOOD; break; case OF_IMAGE_QUALITY_BEST: quality = JPEG_QUALITYSUPERB; break; } FreeImage_Save(fif, bmp, fileName.c_str(), quality); } else { if(qualityLevel != OF_IMAGE_QUALITY_BEST) { ofLogWarning("ofImage") << "saveImage(): ofImageCompressionType only applies to JPEGs," << " ignoring value for \" "<< fileName << "\""; } if (fif == FIF_GIF) { FIBITMAP* convertedBmp; if(pix.getImageType() == OF_IMAGE_COLOR_ALPHA) { // this just converts the image to grayscale so it can save something convertedBmp = FreeImage_ConvertTo8Bits(bmp); } else { // this will create a 256-color palette from the image convertedBmp = FreeImage_ColorQuantize(bmp, FIQ_NNQUANT); } FreeImage_Save(fif, convertedBmp, fileName.c_str()); if (convertedBmp != NULL){ FreeImage_Unload(convertedBmp); } } else { FreeImage_Save(fif, bmp, fileName.c_str()); } } } if (bmp != NULL){ FreeImage_Unload(bmp); } }
//-------------------------------------------------------------- void ofxBulletTriMeshShape::create( btDiscreteDynamicsWorld* a_world, ofMesh& aMesh, btTransform &a_bt_tr, float a_mass, glm::vec3 aAAbbMin, glm::vec3 aAAbbMax ) { if( aMesh.getMode() != OF_PRIMITIVE_TRIANGLES ) { ofLogWarning() << " ofxBulletTriMeshShape :: create : mesh must be using triangles, not creating!!" << endl; return; } if( aMesh.getNumIndices() < 3 ) { ofLogWarning() << " ofxBulletTriMeshShape :: create : mesh must have indices, not creating!" << endl; return; } if( !_bInited || _shape == NULL ) { int vertStride = sizeof(btVector3); int indexStride = 3*sizeof(int); totalVerts = (int)aMesh.getNumVertices(); totalIndices = (int)aMesh.getNumIndices(); const int totalTriangles = totalIndices / 3; if( bullet_indices != NULL ) { removeShape(); } if( bullet_vertices != NULL ) { removeShape(); } if( bullet_indexVertexArrays != NULL ) { removeShape(); } if( _shape != NULL ) { removeShape(); } bullet_vertices = new btVector3[ totalVerts ]; bullet_indices = new int[ totalIndices ]; auto& tverts = aMesh.getVertices(); auto& tindices = aMesh.getIndices(); for( int i = 0; i < totalVerts; i++ ) { bullet_vertices[i].setValue( tverts[i].x, tverts[i].y, tverts[i].z ); } for( int i = 0; i < totalIndices; i++ ) { bullet_indices[i] = (int)tindices[i]; } bullet_indexVertexArrays = new btTriangleIndexVertexArray(totalTriangles, bullet_indices, indexStride, totalVerts, (btScalar*) &bullet_vertices[0].x(), vertStride); // if you are having trouble with objects falling through, try passing in smaller or larger aabbMin and aabbMax // to something closer to the size of your object // // btVector3 aabbMin(-10000,-10000,-10000),aabbMax(10000,10000,10000); if( aAAbbMin.length() > 0 && aAAbbMax.length() > 0 ) { btVector3 aabbMin( aAAbbMin.x, aAAbbMin.y, aAAbbMin.z ); btVector3 aabbMax( aAAbbMax.x, aAAbbMax.y, aAAbbMax.z ); _shape = new btBvhTriangleMeshShape(bullet_indexVertexArrays, true, aabbMin, aabbMax ); } else { _shape = new btBvhTriangleMeshShape(bullet_indexVertexArrays, true, true ); } } ofxBulletRigidBody::create( a_world, _shape, a_bt_tr, a_mass ); createInternalUserData(); updateMesh( a_world, aMesh ); }
bool ofGstUtils::gstHandleMessage(GstBus * bus, GstMessage * msg){ if(appsink && appsink->on_message(msg)) return true; ofLogVerbose("ofGstUtils") << "gstHandleMessage(): got " << GST_MESSAGE_TYPE_NAME(msg) << " message from " << GST_MESSAGE_SRC_NAME(msg); switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_BUFFERING: gint pctBuffered; gst_message_parse_buffering(msg,&pctBuffered); ofLogVerbose("ofGstUtils") << "gstHandleMessage(): buffering " << pctBuffered; /*if(pctBuffered<100){ gst_element_set_state (gstPipeline, GST_STATE_PAUSED); }else if(!bPaused){ gst_element_set_state (gstPipeline, GST_STATE_PLAYING); }*/ break; #if GST_VERSION_MAJOR==0 case GST_MESSAGE_DURATION:{ GstFormat format=GST_FORMAT_TIME; gst_element_query_duration(gstPipeline,&format,&durationNanos); }break; #else case GST_MESSAGE_DURATION_CHANGED: gst_element_query_duration(gstPipeline,GST_FORMAT_TIME,&durationNanos); break; #endif case GST_MESSAGE_STATE_CHANGED:{ GstState oldstate, newstate, pendstate; gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); if(isStream && newstate==GST_STATE_PAUSED && !bPlaying ){ bLoaded = true; bPlaying = true; if(!bPaused){ ofLogVerbose("ofGstUtils") << "gstHandleMessage(): setting stream pipeline to play"; play(); } } ofLogVerbose("ofGstUtils") << "gstHandleMessage(): " << GST_MESSAGE_SRC_NAME(msg) << " state changed from " << getName(oldstate) << " to " << getName(newstate) << " (" + getName(pendstate) << ")"; }break; case GST_MESSAGE_ASYNC_DONE: ofLogVerbose("ofGstUtils") << "gstHandleMessage(): async done"; break; case GST_MESSAGE_ERROR: { GError *err; gchar *debug; gst_message_parse_error(msg, &err, &debug); ofLogVerbose("ofGstUtils") << "gstHandleMessage(): embedded video playback halted for plugin, module " << gst_element_get_name(GST_MESSAGE_SRC (msg)) << " reported: " << err->message; g_error_free(err); g_free(debug); gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL); }break; case GST_MESSAGE_EOS: ofLogVerbose("ofGstUtils") << "gstHandleMessage(): end of the stream"; bIsMovieDone = true; if(appsink && !isAppSink) appsink->on_eos(); switch(loopMode){ case OF_LOOP_NORMAL:{ GstFormat format = GST_FORMAT_TIME; GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT); gint64 pos; #if GST_VERSION_MAJOR==0 gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos); #else gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos); #endif if(!gst_element_seek(GST_ELEMENT(gstPipeline), speed, format, flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, durationNanos)) { ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek"; } }break; case OF_LOOP_PALINDROME:{ GstFormat format = GST_FORMAT_TIME; GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT); gint64 pos; #if GST_VERSION_MAJOR==0 gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos); #else gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos); #endif float loopSpeed; if(pos>0) loopSpeed=-speed; else loopSpeed=speed; if(!gst_element_seek(GST_ELEMENT(gstPipeline), loopSpeed, GST_FORMAT_UNDEFINED, flags, GST_SEEK_TYPE_NONE, 0, GST_SEEK_TYPE_NONE, 0)) { ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek"; } }break; default: break; } break; default: ofLogVerbose("ofGstUtils") << "gstHandleMessage(): unhandled message from " << GST_MESSAGE_SRC_NAME(msg); break; } return true; }
void warning(std::string msg) { ofLogWarning("ofxBoxModel") << msg; }
float ofxTimeMeasurements::stopMeasuring(const string & ID, bool accumulate){ if (!enabled) return 0.0f; float ret = 0.0f; string localID = ID; uint64_t timeNow = TM_GET_MICROS(); //get the time before the lock() to avoid affecting ThreadId thread = getThreadID(); bool bIsMainThread = isMainThread(thread); mutex.lock(); unordered_map<ThreadId, ThreadInfo>::iterator threadIt = threadInfo.find(thread); if(threadIt == threadInfo.end()){ //thread not found! mutex.unlock(); return 0.0f; } ThreadInfo & tinfo = threadIt->second; if(tinfo.order > 0){ localID = "T" + ofToString(tinfo.order) + ":" + localID; } core::tree<string> & tr = tinfo.tree; //easier to read, tr is our tree from now on core::tree<string>::iterator & tit = tinfo.tit; if (tit.out() != tr.end()){ tit = tit.out(); }else{ ofLogError("ofxTimeMeasurements") << "tree climbing too high up! (" << localID << ")"; } unordered_map<string,TimeMeasurement*>::iterator it; it = times.find(localID); if ( it == times.end() ){ //not found! ofLogWarning("ofxTimeMeasurements") << "ID ("<< localID << ")not found at stopMeasuring(). Make sure you called startMeasuring with that ID first."; }else{ TimeMeasurement* t = it->second; if ( t->measuring ){ t->measuring = false; t->thread = thread; t->error = false; t->acrossFrames = (bIsMainThread && t->frame != currentFrameNum); //we only care about across-frames in main thread t->microsecondsStop = timeNow; ret = t->duration = timeNow - t->microsecondsStart; if (!freeze) { if (!averaging) { t->avgDuration = t->duration; } else { t->avgDuration = (1.0f - timeAveragePercent) * t->avgDuration + t->duration * timeAveragePercent; } } if (accumulate && !freeze){ t->microsecondsAccum += t->avgDuration; } }else{ //wrong use, start first, then stop t->error = true; ofLogWarning("ofxTimeMeasurements") << "Can't stopMeasuring(" << localID << "). Make sure you called startMeasuring() with that ID first."; } } mutex.unlock(); if(internalBenchmark){ wastedTimeThisFrame += TM_GET_MICROS() - timeNow; } return ret / 1000.0f; //convert to ms }
//---------------------------------------- float ofLight::getSpotConcentration() const{ if(!getIsSpotlight()) { ofLogWarning("ofLight") << "getSpotConcentration(): light " << data->glIndex << " is not a spot light"; } return data->exponent; }