void buildingShapeManager::parseShapesFromFile(string fileName, int _type){ fileName = ofToDataPath(fileName); //cout << "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ito likes exclamation points !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"; ifstream::pos_type size; char * memblock; ifstream checkfile (fileName.c_str(), ios::in|ios::binary|ios::ate); int temp; int nBlobs; int centroidx; int centroidy; int x,y,w,h; int nPts; int ptx, pty; if (checkfile.is_open()){ checkfile.seekg (0); checkfile.read ((char*) &nBlobs, sizeof (int)); //printf("nBlobs-- %i \n", nBlobs); for (int i = 0; i < nBlobs; i++){ shapes.push_back( new buildingShape()); shapes[shapes.size()-1]->type = _type; shapes[shapes.size()-1]->spot = &spot; checkfile.read ((char*) &nPts, sizeof (int)); //printf("nPts-- %i \n", nPts); shapes[shapes.size()-1]->nPts = nPts; checkfile.read ((char*) ¢roidx, sizeof (int)); //printf("centroidx -- %i \n", centroidx); shapes[shapes.size()-1]->centroid.x = centroidx; checkfile.read ((char*) ¢roidy, sizeof (int)); //printf("centroidy -- %i \n", centroidy); shapes[shapes.size()-1]->centroid.y = centroidy; checkfile.read ((char*) &x, sizeof (int)); //printf("rectanglex -- %i \n", x); shapes[shapes.size()-1]->boundingRect.x = x; checkfile.read ((char*) &y, sizeof (int)); //printf("rectangley -- %i \n", y); shapes[shapes.size()-1]->boundingRect.y = y; checkfile.read ((char*) &w, sizeof (int)); //printf("rectanglew -- %i \n", w); shapes[shapes.size()-1]->boundingRect.width = w; checkfile.read ((char*) &h, sizeof (int)); //printf("rectangleh -- %i \n", h); shapes[shapes.size()-1]->boundingRect.height = h; shapes[shapes.size()-1]->pts = new ofPoint[ shapes[shapes.size()-1]->nPts]; float length = 0; for (int j = 0; j < nPts; j++){ checkfile.read ((char*) &ptx, sizeof (int)); checkfile.read ((char*) &pty, sizeof (int)); shapes[shapes.size()-1]->pts[j].set(ptx, pty, 0); if (j > 0){ float dx = ptx - shapes[shapes.size()-1]->pts[j-1].x; float dy = pty - shapes[shapes.size()-1]->pts[j-1].y; length += sqrt (dx*dx + dy*dy); } //printf("blob# -- %i, Pt# -- %i, ptx, pty -- %i, %i \n", i, j, ptx,pty); } shapes[shapes.size()-1]->length = length; } } printf("shapes size = %i \n", shapes.size()); }
//------------------------------------------------------------------ void ofTrueTypeFont::loadFont(string filename, int fontsize, bool _bAntiAliased, bool _bFullCharacterSet, bool makeContours){ bMakeContours = makeContours; //------------------------------------------------ if (bLoadedOk == true){ // we've already been loaded, try to clean up : if (cps != NULL){ delete[] cps; } if (texNames != NULL){ for (int i = 0; i < nCharacters; i++){ glDeleteTextures(1, &texNames[i]); } delete[] texNames; } bLoadedOk = false; } //------------------------------------------------ filename = ofToDataPath(filename); bLoadedOk = false; bAntiAlised = _bAntiAliased; bFullCharacterSet = _bFullCharacterSet; fontSize = fontsize; //--------------- load the library and typeface FT_Library library; if (FT_Init_FreeType( &library )){ ofLog(OF_LOG_ERROR," PROBLEM WITH FT lib"); return; } FT_Face face; if (FT_New_Face( library, filename.c_str(), 0, &face )) { return; } FT_Set_Char_Size( face, fontsize << 6, fontsize << 6, 96, 96); lineHeight = fontsize * 1.43f; //------------------------------------------------------ //kerning would be great to support: //ofLog(OF_LOG_NOTICE,"FT_HAS_KERNING ? %i", FT_HAS_KERNING(face)); //------------------------------------------------------ nCharacters = bFullCharacterSet ? 256 : 128 - NUM_CHARACTER_TO_START; //--------------- initialize character info and textures cps = new charProps[nCharacters]; texNames = new GLuint[nCharacters]; glGenTextures(nCharacters, texNames); if(bMakeContours){ charOutlines.clear(); charOutlines.assign(nCharacters, ofTTFCharacter()); } //--------------------- load each char ----------------------- for (int i = 0 ; i < nCharacters; i++){ //------------------------------------------ anti aliased or not: if(FT_Load_Glyph( face, FT_Get_Char_Index( face, (unsigned char)(i+NUM_CHARACTER_TO_START) ), FT_LOAD_DEFAULT )){ ofLog(OF_LOG_ERROR,"error with FT_Load_Glyph %i", i); } if (bAntiAlised == true) FT_Render_Glyph(face->glyph, FT_RENDER_MODE_NORMAL); else FT_Render_Glyph(face->glyph, FT_RENDER_MODE_MONO); //------------------------------------------ FT_Bitmap& bitmap= face->glyph->bitmap; // 3 pixel border around the glyph // We show 2 pixels of this, so that blending looks good. // 1 pixels is hidden because we don't want to see the real edge of the texture border = 3; visibleBorder = 2; if(bMakeContours){ if( printVectorInfo )printf("\n\ncharacter %c: \n", char( i+NUM_CHARACTER_TO_START ) ); //int character = i + NUM_CHARACTER_TO_START; charOutlines[i] = makeContoursForCharacter( face ); } // prepare the texture: int width = ofNextPow2( bitmap.width + border*2 ); int height = ofNextPow2( bitmap.rows + border*2 ); // ------------------------- this is fixing a bug with small type // ------------------------- appearantly, opengl has trouble with // ------------------------- width or height textures of 1, so we // ------------------------- we just set it to 2... if (width == 1) width = 2; if (height == 1) height = 2; // ------------------------- // info about the character: cps[i].value = i; cps[i].height = face->glyph->bitmap_top; cps[i].width = face->glyph->bitmap.width; cps[i].setWidth = face->glyph->advance.x >> 6; cps[i].topExtent = face->glyph->bitmap.rows; cps[i].leftExtent = face->glyph->bitmap_left; // texture internals cps[i].tTex = (float)(bitmap.width + visibleBorder*2) / (float)width; cps[i].vTex = (float)(bitmap.rows + visibleBorder*2) / (float)height; cps[i].xOff = (float)(border - visibleBorder) / (float)width; cps[i].yOff = (float)(border - visibleBorder) / (float)height; /* sanity check: ofLog(OF_LOG_NOTICE,"%i %i %i %i %i %i", cps[i].value , cps[i].height , cps[i].width , cps[i].setWidth , cps[i].topExtent , cps[i].leftExtent ); */ // Allocate Memory For The Texture Data. unsigned char* expanded_data = new unsigned char[ 2 * width * height]; //-------------------------------- clear data: for(int j=0; j <height;j++) { for(int k=0; k < width; k++){ expanded_data[2*(k+j*width) ] = 255; // every luminance pixel = 255 expanded_data[2*(k+j*width)+1] = 0; } } if (bAntiAlised == true){ //----------------------------------- for(int j=0; j <height; j++) { for(int k=0; k < width; k++){ if ((k<bitmap.width) && (j<bitmap.rows)){ expanded_data[2*((k+border)+(j+border)*width)+1] = bitmap.buffer[k + bitmap.width*(j)]; } } } //----------------------------------- } else { //----------------------------------- // true type packs monochrome info in a // 1-bit format, hella funky // here we unpack it: unsigned char *src = bitmap.buffer; for(int j=0; j <bitmap.rows;j++) { unsigned char b=0; unsigned char *bptr = src; for(int k=0; k < bitmap.width ; k++){ expanded_data[2*((k+1)+(j+1)*width)] = 255; if (k%8==0){ b = (*bptr++);} expanded_data[2*((k+1)+(j+1)*width) + 1] = b&0x80 ? 255 : 0; b <<= 1; } src += bitmap.pitch; } //----------------------------------- } //Now we just setup some texture paramaters. glBindTexture( GL_TEXTURE_2D, texNames[i]); #ifndef TARGET_OF_IPHONE glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP); #endif if (bAntiAlised == true){ glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); } else { glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST); } glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); //Here we actually create the texture itself, notice //that we are using GL_LUMINANCE_ALPHA to indicate that //we are using 2 channel data. #ifndef TARGET_OF_IPHONE // gluBuild2DMipmaps doesn't seem to exist in anything i had in the iphone build... so i commented it out bool b_use_mipmaps = false; // FOR now this is fixed to false, could be an option, left in for legacy... if (b_use_mipmaps){ gluBuild2DMipmaps( GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, width, height, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, expanded_data); } else #endif { glTexImage2D( GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, width, height, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, expanded_data ); } //With the texture created, we don't need to expanded data anymore delete [] expanded_data; } // ------------- close the library and typeface FT_Done_Face(face); FT_Done_FreeType(library); bLoadedOk = true; }
static bool saveImage(const ofPixels_<PixelType> & _pix, const std::filesystem::path& _fileName, ofImageQualityType qualityLevel) { ofInitFreeImage(); if (_pix.isAllocated() == false){ ofLogError("ofImage") << "saveImage(): couldn't save \"" << _fileName << "\", pixels are not allocated"; return false; } ofFilePath::createEnclosingDirectory(_fileName); std::string fileName = ofToDataPath(_fileName); FREE_IMAGE_FORMAT fif = FIF_UNKNOWN; fif = FreeImage_GetFileType(fileName.c_str(), 0); if(fif == FIF_UNKNOWN) { // or guess via filename fif = FreeImage_GetFIFFromFilename(fileName.c_str()); } if(fif==FIF_JPEG && (_pix.getNumChannels()==4 || _pix.getBitsPerChannel() > 8)){ ofPixels pix3 = _pix; pix3.setNumChannels(3); return saveImage(pix3,_fileName,qualityLevel); } FIBITMAP * bmp = nullptr; #ifdef TARGET_LITTLE_ENDIAN if(sizeof(PixelType) == 1 && (_pix.getPixelFormat()==OF_PIXELS_RGB || _pix.getPixelFormat()==OF_PIXELS_RGBA)) { // Make a local copy. ofPixels_<PixelType> pix = _pix; pix.swapRgb(); bmp = getBmpFromPixels(pix); }else{ #endif bmp = getBmpFromPixels(_pix); #ifdef TARGET_LITTLE_ENDIAN } #endif bool retValue = false; if((fif != FIF_UNKNOWN) && FreeImage_FIFSupportsReading(fif)) { if(fif == FIF_JPEG) { int quality = JPEG_QUALITYSUPERB; switch(qualityLevel) { case OF_IMAGE_QUALITY_WORST: quality = JPEG_QUALITYBAD; break; case OF_IMAGE_QUALITY_LOW: quality = JPEG_QUALITYAVERAGE; break; case OF_IMAGE_QUALITY_MEDIUM: quality = JPEG_QUALITYNORMAL; break; case OF_IMAGE_QUALITY_HIGH: quality = JPEG_QUALITYGOOD; break; case OF_IMAGE_QUALITY_BEST: quality = JPEG_QUALITYSUPERB; break; } retValue = FreeImage_Save(fif, bmp, fileName.c_str(), quality); } else { if(qualityLevel != OF_IMAGE_QUALITY_BEST) { ofLogWarning("ofImage") << "saveImage(): ofImageCompressionType only applies to JPEGs," << " ignoring value for \" "<< fileName << "\""; } if (fif == FIF_GIF) { FIBITMAP* convertedBmp; if(_pix.getImageType() == OF_IMAGE_COLOR_ALPHA) { // this just converts the image to grayscale so it can save something convertedBmp = FreeImage_ConvertTo8Bits(bmp); } else { // this will create a 256-color palette from the image convertedBmp = FreeImage_ColorQuantize(bmp, FIQ_NNQUANT); } retValue = FreeImage_Save(fif, convertedBmp, fileName.c_str()); if (convertedBmp != nullptr){ FreeImage_Unload(convertedBmp); } } else { retValue = FreeImage_Save(fif, bmp, fileName.c_str()); } } } if (bmp != nullptr){ FreeImage_Unload(bmp); } return retValue; }
void ofxPd::addOpenFile( string file_path ) { open_files.push_back( ofToDataPath( file_path ) ); }
// return the file path of the requested file in the data directory string urgDisplay::getFilePath(string folder_name, int file_number) { // path to folder containing info string folderPath = ofToDataPath(folder_name); // open path to directory ofDirectory folderDir(folderPath); cout << "In folder " << folder_name; // populate directory with files folderDir.listDir(); // get the specified file in the directory fileName = folderDir.getName(file_number); cout << " returning file " << fileName << endl; // return the path to the file filePath = folderDir.getPath(file_number); return filePath; /* Soutside Alleys, 11/7/2015 setScanParams(100) {x0, y0, z0, x1, y1, z1, ...} 0 not much 1 2 3 4 5 6 7 YES: garage, tree, storefront, etc. 8 bridge Carson St Sidewalk, 11/8/2015 setScanParams(100, 100, 682, 70000., 100000.) {x0, y0, z0, x1, y1, z1, ...} 0 nothing 1 nothing 2 nothing 3 nothing 4 nothing 5 nothing 6 nothing 7 people, tree, open, some buildings 8 people, cars 9 crowd of people, zScale = 20 10 two people sitting on sidewalk 11 small segment of buildings and people 12 some buildings 13 some buildings 14 some buildings, group of people 15 storefronts, some people, hydrants 16 buildings, people outlines 17 long stretch of sidewalk, buildings, outlines of people 18 lots of people, gets really close 19 LOTS OF PEOPLE, storefronts, really rich 20 buildings, neat storefronts 21 Carson St Sidewalk, 11/15/2015 {yaw, pitch, roll, x0, y0, x1, y1, ...} 0 magnometer wasn't working; looks like winding snake CFA Entrance, 11/17/2015 {time, x0, y0 x1, y1, ...} 21 some people 23 Entrances, 11/17/2015 0 Studio for Creative Inquiry, Golan 4 long hallway, lots of people, sparse 5 */ }
static void saveImage(ofPixels_<PixelType> & pix, string fileName, ofImageQualityType qualityLevel) { ofInitFreeImage(); if (pix.isAllocated() == false){ ofLogError("ofImage") << "saveImage(): couldn't save \"" << fileName << "\", pixels are not allocated"; return; } #ifdef TARGET_LITTLE_ENDIAN if(sizeof(PixelType) == 1 && (pix.getPixelFormat()==OF_PIXELS_RGB || pix.getPixelFormat()==OF_PIXELS_RGBA)) { pix.swapRgb(); } #endif FIBITMAP * bmp = getBmpFromPixels(pix); #ifdef TARGET_LITTLE_ENDIAN if(sizeof(PixelType) == 1 && (pix.getPixelFormat()==OF_PIXELS_BGR || pix.getPixelFormat()==OF_PIXELS_BGRA)) { pix.swapRgb(); } #endif ofFilePath::createEnclosingDirectory(fileName); fileName = ofToDataPath(fileName); FREE_IMAGE_FORMAT fif = FIF_UNKNOWN; fif = FreeImage_GetFileType(fileName.c_str(), 0); if(fif == FIF_UNKNOWN) { // or guess via filename fif = FreeImage_GetFIFFromFilename(fileName.c_str()); } if((fif != FIF_UNKNOWN) && FreeImage_FIFSupportsReading(fif)) { if(fif == FIF_JPEG) { int quality = JPEG_QUALITYSUPERB; switch(qualityLevel) { case OF_IMAGE_QUALITY_WORST: quality = JPEG_QUALITYBAD; break; case OF_IMAGE_QUALITY_LOW: quality = JPEG_QUALITYAVERAGE; break; case OF_IMAGE_QUALITY_MEDIUM: quality = JPEG_QUALITYNORMAL; break; case OF_IMAGE_QUALITY_HIGH: quality = JPEG_QUALITYGOOD; break; case OF_IMAGE_QUALITY_BEST: quality = JPEG_QUALITYSUPERB; break; } FreeImage_Save(fif, bmp, fileName.c_str(), quality); } else { if(qualityLevel != OF_IMAGE_QUALITY_BEST) { ofLogWarning("ofImage") << "saveImage(): ofImageCompressionType only applies to JPEGs," << " ignoring value for \" "<< fileName << "\""; } if (fif == FIF_GIF) { FIBITMAP* convertedBmp; if(pix.getImageType() == OF_IMAGE_COLOR_ALPHA) { // this just converts the image to grayscale so it can save something convertedBmp = FreeImage_ConvertTo8Bits(bmp); } else { // this will create a 256-color palette from the image convertedBmp = FreeImage_ColorQuantize(bmp, FIQ_NNQUANT); } FreeImage_Save(fif, convertedBmp, fileName.c_str()); if (convertedBmp != NULL){ FreeImage_Unload(convertedBmp); } } else { FreeImage_Save(fif, bmp, fileName.c_str()); } } } if (bmp != NULL){ FreeImage_Unload(bmp); } }
//-------------------------------------------------------------- GraphDef_ptr load_graph_def(const string path, tensorflow::Env* env) { string of_path(ofToDataPath(path)); GraphDef_ptr graph_def(new tensorflow::GraphDef()); log_error( tensorflow::ReadBinaryProto(env, of_path, graph_def.get()), "Error loading graph " + of_path ); return graph_def; }
// Start recording (once per recorder) //---------------------------------------- bool ofxOpenNIRecorder::startRecord(string sName) { // make sure we don't re-instantiate if we're already recording in stream mode if(is_recording && config.record_type == ONI_STREAMING) { return false; } xn::MockDepthGenerator m_depth; xn::MockImageGenerator m_image; xn::MockIRGenerator m_ir; // reset dropped frame counting variables nLastDepthTime = 0; nLastImageTime = 0; nMissedDepthFrames = 0; nMissedImageFrames = 0; nDepthFrames = 0; nImageFrames = 0; XnStatus result; // set the record file name config.record_name = ofToDataPath(sName, false); bool do_init = false; // by using this do_init method the interface // is transparent to users whichever way we are recording // the second call to startRecording when ONI_CYCLING // dumps the buffer to file (see below stopRecord() if (config.record_type == ONI_CYCLIC && !is_recording) { printf("Start cyclic recording: %s\n", config.record_name.c_str()); // reset cyclic recording variables m_nNextWrite = 0; m_nBufferCount = 0; is_recording = true; } else do_init = true; if (do_init) { // recorder init result = recorder.Create(context->getXnContext()); CHECK_RC(result, "Recorder create"); result = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, config.record_name.c_str()); CHECK_RC(result, "Recorder set destination"); if (config.record_depth) { if (config.record_type == ONI_STREAMING) { // just use the depth generator as the node to record result = recorder.AddNodeToRecording(depth_generator, XN_CODEC_16Z); // XN_CODEC_16Z_EMB_TABLES is smaller, but seems XN_CODEC_16Z is smoother CHECK_RC(result, "Recorder add depth node"); } else if (config.record_type == ONI_CYCLIC) { // create a mock node based on the depth generator to record result = context->getXnContext().CreateMockNodeBasedOn(depth_generator, NULL, m_depth); CHECK_RC(result, "Create depth node"); result = recorder.AddNodeToRecording(m_depth, XN_CODEC_16Z); // XN_CODEC_16Z_EMB_TABLES is smaller, but seems XN_CODEC_16Z is smoother CHECK_RC(result, "Recorder add depth node"); } } // create image node if (config.record_image) { if (config.record_type == ONI_STREAMING) { // just use the image generator as the node to record result = recorder.AddNodeToRecording(image_generator, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add image node"); } else if (config.record_type == ONI_CYCLIC) { // create a mock node based on the image generator to record result = context->getXnContext().CreateMockNodeBasedOn(image_generator, NULL, m_image); CHECK_RC(result, "Create image node"); result = recorder.AddNodeToRecording(m_image, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add image node"); } } // create ir node if (config.record_ir) { if (config.record_type == ONI_STREAMING) { // just use the image generator as the node to record result = recorder.AddNodeToRecording(ir_generator, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add ir node"); } else if (config.record_type == ONI_CYCLIC) { // create a mock node based on the image generator to record result = context->getXnContext().CreateMockNodeBasedOn(ir_generator, NULL, m_ir); CHECK_RC(result, "Create ir node"); result = recorder.AddNodeToRecording(m_ir, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add ir node"); } } // Frame sync is currently not possible with Kinect cameras!! // if we try to frame sync then recording fails #ifndef USINGKINECT // Frame Sync if(xn_depth.IsCapabilitySupported(XN_CAPABILITY_FRAME_SYNC)) { if(depth_generator.GetFrameSyncCap().CanFrameSyncWith(image_generator)) { result = depth_generator.GetFrameSyncCap().FrameSyncWith(image_generator); CHECK_RC(result, "Enable frame sync"); } } #endif if (config.record_type == ONI_STREAMING) { printf("Start streaming recording: %s\n", config.record_name.c_str()); is_recording = true; } else if (config.record_type == ONI_CYCLIC && is_recording) { // Record frames from current position in cyclic buffer loop through to the end if (m_nNextWrite < m_nBufferCount) { // Not first loop, right till end for (XnUInt32 i = m_nNextWrite; i < m_nBufferSize; ++i) { if (config.record_depth) m_depth.SetData(frames[i].depth_frame); if (config.record_image) m_image.SetData(frames[i].image_frame); if (config.record_ir) m_ir.SetData(frames[i].ir_frame); recorder.Record(); } } // Write frames from the beginning of the buffer to the last one written for (XnUInt32 i = 0; i < m_nNextWrite; ++i) { if (config.record_depth) m_depth.SetData(frames[i].depth_frame); if (config.record_image) m_image.SetData(frames[i].image_frame); if (config.record_ir) m_ir.SetData(frames[i].ir_frame); recorder.Record(); } // cleanup recorder.Release(); m_ir.Release(); m_image.Release(); m_depth.Release(); XN_DELETE_ARR(frames); } } return true; }
/****************************************************************************** * The setup function is run once to perform initializations in the application *****************************************************************************/ void ofxNCoreVision::_setup(ofEventArgs &e) { //set the title ofSetWindowTitle(" Community Core Vision v 1.4"); //create filter if(filter == NULL) filter = new ProcessFilters(); if ( filter_fiducial == NULL ){filter_fiducial = new ProcessFilters();} //Load Settings from config.xml file loadXMLSettings(); if(debugMode) { printf("DEBUG MODE : Printing to File\n"); /***************************************************************************************************** * LOGGING ******************************************************************************************************/ /* alright first we need to get time and date so our logs can be ordered */ time ( &rawtime ); timeinfo = localtime ( &rawtime ); strftime (fileName,80,"../logs/log_%B_%d_%y_%H_%M_%S.txt",timeinfo); FILE *stream ; sprintf(fileName, ofToDataPath(fileName).c_str()); if((stream = freopen(fileName, "w", stdout)) == NULL){} /******************************************************************************************************/ } cameraInited = false; //Setup Window Properties ofSetWindowShape(winWidth,winHeight); ofSetVerticalSync(false); //Set vertical sync to false for better performance? //printf("Application Loaded...\n?"); //load camera/video initDevice(); printf("Camera(s)/Video Initialised...\n"); //set framerate ofSetFrameRate(camRate * 1.3); //This will be based on camera fps in the future /***************************************************************************************************** * Allocate images (needed for drawing/processing images) ******************************************************************************************************/ processedImg.allocate(camWidth, camHeight); //main Image that'll be processed. processedImg.setUseTexture(false); //We don't need to draw this so don't create a texture sourceImg.allocate(camWidth, camHeight); //Source Image sourceImg.setUseTexture(false); //We don't need to draw this so don't create a texture //Fiducial Images processedImg_fiducial.allocate(camWidth, camHeight); //main Image that'll be processed. processedImg_fiducial.setUseTexture(false); //We don't need to draw this so don't create a texture /******************************************************************************************************/ //Fonts - Is there a way to dynamically change font size? verdana.loadFont("verdana.ttf", 8, true, true); //Font used for small images //Static Images background.loadImage("images/background.jpg"); //Main (Temp?) Background //GUI Controls controls = ofxGui::Instance(this); setupControls(); //printf("Controls Loaded...\n"); tracker.setCameraSize(camWidth,camHeight); //Setup Calibration //FUCKUP!!!! //calib.setup(camWidth, camHeight, &tracker); calib.SetTracker(&tracker); //Allocate Filters filter->allocate( camWidth, camHeight ); filter_fiducial->allocate( camWidth, camHeight ); //Fiducial Initialisation // factor for Fiducial Drawing. The ImageSize is hardcoded 320x240 Pixel!(Look at ProcessFilters.h at the draw() Method fiducialDrawFactor_Width = 320 / static_cast<float>(filter->camWidth);//camWidth; fiducialDrawFactor_Height = 240 / static_cast<float>(filter->camHeight);//camHeight; /***************************************************************************************************** * Startup Modes ******************************************************************************************************/ //If Standalone Mode (not an addon) if (bStandaloneMode) { printf("Starting in standalone mode...\n\n"); showConfiguration = true; } if (bMiniMode) { showConfiguration = true; bShowInterface = false; printf("Starting in Mini Mode...\n\n"); ofSetWindowShape(190, 200); //minimized size filter->bMiniMode = bMiniMode; } else{ bShowInterface = true; printf("Starting in full mode...\n\n"); } //If Object tracking activated if(contourFinder.bTrackObjects) { templates.loadTemplateXml(); } contourFinder.setTemplateUtils(&templates); tracker.passInFiducialInfo(&fidfinder); #ifdef TARGET_WIN32 //get rid of the console window FreeConsole(); #endif printf("Community Core Vision is setup!\n\n"); }
//--------------------------------------------------------------------------- bool ofQuickTimePlayer::load(string name){ //-------------------------------------- #ifdef OF_VIDEO_PLAYER_QUICKTIME //-------------------------------------- initializeQuicktime(); // init quicktime closeMovie(); // if we have a movie open, close it bLoaded = false; // try to load now // from : https://github.com/openframeworks/openFrameworks/issues/244 // http://developer.apple.com/library/mac/#documentation/QuickTime/RM/QTforWindows/QTforWindows/C-Chapter/3BuildingQuickTimeCa.html // Apple's documentation *seems* to state that a Gworld should have been set prior to calling NewMovieFromFile // So I set a dummy Gworld (1x1 pixel) before calling createMovieFromPath // it avoids crash at the creation of objet ofVideoPlayer after a previous ofVideoPlayer have been deleted #ifdef TARGET_WIN32 if (width != 0 && height != 0){ pixels.clear(); delete [] offscreenGWorldPixels; } width = 1; height = 1; createImgMemAndGWorld(); #endif if( name.substr(0, 7) == "http://" || name.substr(0,7) == "rtsp://" ){ if(! createMovieFromURL(name, moviePtr) ) return false; }else{ name = ofToDataPath(name); if( !createMovieFromPath((char *)name.c_str(), moviePtr) ) return false; } bool bDoWeAlreadyHaveAGworld = false; if (width != 0 && height != 0){ bDoWeAlreadyHaveAGworld = true; } Rect movieRect; GetMovieBox(moviePtr, &(movieRect)); if (bDoWeAlreadyHaveAGworld){ // is the gworld the same size, then lets *not* de-allocate and reallocate: if (width == movieRect.right && height == movieRect.bottom){ SetMovieGWorld (moviePtr, offscreenGWorld, nil); } else { width = movieRect.right; height = movieRect.bottom; pixels.clear(); delete [] offscreenGWorldPixels; if ((offscreenGWorld)) DisposeGWorld((offscreenGWorld)); createImgMemAndGWorld(); } } else { width = movieRect.right; height = movieRect.bottom; createImgMemAndGWorld(); } if (moviePtr == NULL){ return false; } //----------------- callback method myDrawCompleteProc = NewMovieDrawingCompleteUPP (DrawCompleteProc); SetMovieDrawingCompleteProc (moviePtr, movieDrawingCallWhenChanged, myDrawCompleteProc, (long)this); // ------------- get the total # of frames: nFrames = 0; TimeValue curMovieTime; curMovieTime = 0; TimeValue duration; //OSType whichMediaType = VIDEO_TYPE; // mingw chokes on this OSType whichMediaType = FOUR_CHAR_CODE('vide'); short flags = nextTimeMediaSample + nextTimeEdgeOK; while( curMovieTime >= 0 ) { nFrames++; GetMovieNextInterestingTime(moviePtr,flags,1,&whichMediaType,curMovieTime,0,&curMovieTime,&duration); flags = nextTimeMediaSample; } nFrames--; // there's an extra time step at the end of themovie // ------------- get some pixels in there ------ GoToBeginningOfMovie(moviePtr); SetMovieActiveSegment(moviePtr, -1,-1); MoviesTask(moviePtr,0); #if defined(TARGET_OSX) && defined(__BIG_ENDIAN__) convertPixels(offscreenGWorldPixels, pixels.getPixels(), width, height); #endif bStarted = false; bLoaded = true; bPlaying = false; bHavePixelsChanged = false; speed = 1; return true; //-------------------------------------- #endif //-------------------------------------- }
void ofApp::keyPressed(int key){ if (key == 'u') { do_update = true; } // is it this way on most keyboards??? if (key == '=') { zoom_in = true; } if (key == '-') { zoom_out = true; } if (key == OF_KEY_BACKSPACE || key == OF_KEY_DEL) { c.deleteSelection(); } bool cmd = ofGetKeyPressed(OF_KEY_COMMAND); bool ctrl = ofGetKeyPressed(OF_KEY_CONTROL); bool shift = ofGetKeyPressed(OF_KEY_SHIFT); if (key == 's' && (cmd || ctrl)) { c.save(ofToDataPath("lines.txt")); } if (key == 'l' && (cmd || ctrl)) { c.load(ofToDataPath("lines.txt")); } if (key == 'c' && (cmd || ctrl)) { c.copy(); } if (key == 'v' && (cmd || ctrl)) { c.paste(); } if (key == 'z' && (cmd || ctrl) && !shift) { if (c.curr_action_i > c.actions.size() - 1) { c.curr_action_i = c.actions.size() - 1; } if (c.curr_action_i < 0) { c.curr_action_i = 0; } c.actions[c.curr_action_i]->undoAction(&c); c.curr_action_i--; if (c.curr_action_i < 0) { c.curr_action_i = 0; } } if ((key == 'y' && (cmd || ctrl)) || (key == 'z' && (cmd || ctrl) && shift)) { if (c.curr_action_i > c.actions.size() - 1) { c.curr_action_i = c.actions.size() - 1; } if (c.curr_action_i < 0) { c.curr_action_i = 0; } if (c.actions[c.curr_action_i]->undo) { c.actions[c.curr_action_i]->doAction(&c); c.curr_action_i++; } else { c.curr_action_i++; if (c.curr_action_i > c.actions.size() - 1) { c.curr_action_i = c.actions.size() - 1; } c.actions[c.curr_action_i]->doAction(&c); } if (c.curr_action_i > c.actions.size() - 1) { c.curr_action_i = c.actions.size() - 1; } } }
//------------------------------------------------------------ void ofQuicktimeSoundPlayer::loadSound(string fileName, bool stream){ fileName = ofToDataPath(fileName); // TODO: hmm? bMultiPlay = false; // [1] init fmod, if necessary initializeQuicktime(); // [2] try to unload any previously loaded sounds // & prevent user-created memory leaks // if they call "loadSound" repeatedly, for example if (bLoadedOk == true){ unloadSound(); } // [3] load sound OSErr error; //CFBundleRef gameBundle = CFBundleGetMainBundle(); // Find the file in the application bundle. CFURLRef movieFileLocation; movieFileLocation = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)fileName.c_str(), strlen(fileName.c_str()), false); //CFBundleCopyResourceURL(gameBundle, filename, fileExtension, subdirectory); if (movieFileLocation == NULL) return; Handle dataRef; OSType dataRefType; dataRef = NewHandle(sizeof(AliasHandle)); // Get the movie file set up so we can load it in memory. // The second parameter to QTNewDataReferenceFromCFURL is flags. // It should be set to 0. error = QTNewDataReferenceFromCFURL(movieFileLocation, 0, &dataRef, &dataRefType); if(error != noErr) { DisposeHandle(dataRef); CFRelease(movieFileLocation); return; } // Get the movie into memory short fileID = movieInDataForkResID; short flags = 0; error = NewMovieFromDataRef(&soundToPlay, flags, &fileID, dataRef, dataRefType); // Dispose of the memory we allocated. DisposeHandle(dataRef); CFRelease(movieFileLocation); // TODO: check here! bLoadedOk = true; }
void ofApp::setup() { ofSetFrameRate(30); ofEnableAlphaBlending(); /// Example Database std::string transactionDb = ofToDataPath("transaction.sqlite", true); try { // Open a database file in create/write mode SQLite::Database db(transactionDb, SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE); ofLogNotice() << "SQLite database file '" << db.getFilename() << "' opened successfully."; db.exec("DROP TABLE IF EXISTS test"); // Example of a successful transaction : try { // Begin transaction SQLite::Transaction transaction(db); db.exec("CREATE TABLE test (id INTEGER PRIMARY KEY, value TEXT)"); int nb = db.exec("INSERT INTO test VALUES (NULL, \"test\")"); ofLogNotice() << "INSERT INTO test VALUES (NULL, \"test\")\", returned " << nb; // Commit transaction transaction.commit(); } catch (std::exception& e) { ofLogError() << "SQLite exception: " << e.what(); } // Example of a rollbacked transaction : try { // Begin transaction SQLite::Transaction transaction(db); int nb = db.exec("INSERT INTO test VALUES (NULL, \"second\")"); ofLogNotice() << "INSERT INTO test VALUES (NULL, \"second\")\", returned " << nb; nb = db.exec("INSERT INTO test ObviousError"); ofLogNotice() << "INSERT INTO test \"error\", returned " << nb; ofLogNotice() << "SQLite SHOULD have exited ... "; // Commit transaction transaction.commit(); } catch (std::exception& e) { ofLogError() << "EXPECTED: SQLite exception: " << e.what(); // expected error, see above } // Check the results (expect only one row of result, as the second one has been rollbacked by the error) SQLite::Statement query(db, "SELECT * FROM test"); ofLogNotice() << "SELECT * FROM test :"; while (query.executeStep()) { ofLogNotice() << "row (" << query.getColumn(0) << ", \"" << query.getColumn(1) << "\")"; } } catch (std::exception& e) { ofLogError() << "SQLite exception: " << e.what(); } // Clean up. ofFile::removeFile(transactionDb); }
void PMSc10Thanks::setup() { //Primer ha de pillar el nom, sino No es pot generar l'fbo songName = PMSongSelector::getInstance().getSongname(); userName = "******" + PMSharedSettings::getInstance().getUserName(); dateName = ofGetTimestampString("%d/%m/%Y, %H%:%M"); //carrega la imatge, alloca el fbo i genera fbo ofClear(0, 0, 0); printImage.load("TempRender.png"); printFbo.allocate(1181, 1772, GL_RGB); drawIntoFbo(); //exportem fbo i el guardem ofPixels pix; printFbo.readToPixels(pix); // ofSaveImage(pix, "toPrint.png", OF_IMAGE_QUALITY_BEST); ofStringReplace(songName, " ", "_"); ofStringReplace(userName, " ", "_"); ofStringReplace(dateName, " ", "_"); ofStringReplace(dateName, "/", "_"); // string saveFilename= "exports/toprint/"+songName+"-"+userName+"_"+dateName+".png"; // cout<<songName<<" "<<userName<<" "<<dateName<<" "<<saveFilename<<endl; string saveFilename = "exports/toprint/toPrint_" + ofGetTimestampString() + ".png"; ofSaveImage(pix, saveFilename, OF_IMAGE_QUALITY_BEST); //imprimir fbo. #if ENABLE_PRINTING string c = "lp -o media=Custom.4x6in -o page-left=0 -o page-right=0 -o page-top=0 -o page-bottom=0 " + ofToDataPath(saveFilename); system(c.c_str()); #endif countdown.set(); countdown.setAlarm(COUNTDOWN_TIME * 1000); count=0; }
//-------------------------------------------------------------- void ofApp::keyReleased(int key){ if(key=='c') { cuttype = 1-cuttype; if(cuttype==0) themess = "cuttype is square"; if(cuttype==1) themess = "cuttype is beats"; } if(key=='s') { // draw picture themess = "sound to picture... saving"; cout << "sound to picture... saving" << endl; const vector<float>& rawSamples = audio.getRawSamples(); int channels = audio.getChannels(); int n = rawSamples.size(); if(cuttype==0) { // square picture int s = int(sqrt(n/channels)); cout << "img size: " << s << " by " << s << endl; img.allocate(s, s, OF_IMAGE_COLOR); int iptr = 0; // which audio sample are we on? for(int i = 0;i<s;i++) { for(int j = 0;j<s;j++) { float r = ofMap(rawSamples[iptr], -1., 1., 0., 65535.); // left side float g = ofMap(rawSamples[iptr+1], -1., 1., 0., 65535.); // right side cout << r << " " << g << endl; img.setColor(j, i, ofShortColor(r, g, 0)); iptr+=channels; } } } else if(cuttype==1) { // not square picture int nrows = 64; int subdivs = 4; int s = int((n/channels)/nrows); cout << "img size: " << s << " by " << nrows << endl; img.allocate(s, nrows, OF_IMAGE_COLOR); int iptr = 0; // which audio sample are we on? for(int i = 0;i<nrows;i++) { for(int j = 0;j<s;j++) { float r = ofMap(rawSamples[iptr], -1., 1., 0., 65535.); // left side float g = ofMap(rawSamples[iptr+1], -1., 1., 0., 65535.); // right side float b = (j%(s/subdivs)==0)*65535.; cout << r << " " << g << endl; img.setColor(j, i, ofShortColor(r, g, b)); iptr+=channels; } } } img.reloadTexture(); img.saveImage("test.png"); } if(key=='l') { themess = "picture to sound"; // generate sound from picture cout << "picture to sound" << endl; ofShortImage foo; foo.loadImage("test.png"); vector<int32_t> theSamps; cout << "size: " << foo.getWidth() << " by " << foo.getHeight() << endl; float s = foo.getWidth(); float t = foo.getHeight(); for(int i = 0;i<t;i++) { for(int j = 0;j<s;j++) { ofShortColor c = foo.getColor(j, i); float rs = ofMap(c.r, 0., 65535., -1., 1.); float gs = ofMap(c.g, 0., 65535., -1., 1.); int32_t r =int(rs*536870912.); // 2^29 (wtf?) int32_t g =int(gs*536870912.); // 2^29 (wtf?) cout << "sample: " << c.r << " " << c.g << endl; theSamps.push_back(r); theSamps.push_back(g); } } // try dumping the original sound /* const vector<float>& rawSamples = audio.getRawSamples(); int channels = audio.getChannels(); int n = rawSamples.size(); for(int i = 0;i<n;i++) { int32_t s =int(rawSamples[i]*500000000.); cout << rawSamples[i] << " " <<s << endl; theSamps.push_back(s); }*/ // insert file writing code to dump theSamps to disk here: AIFF_Ref ref ; string p = ofToDataPath("foo.aiff", true); cout << p << endl; ref = AIFF_OpenFile(p.c_str(), F_WRONLY) ; if( ref ) { puts("File opened successfully."); AIFF_SetAudioFormat(ref, 2, 44100., 16); AIFF_StartWritingSamples(ref); int32_t* v = theSamps.data(); AIFF_WriteSamples32Bit(ref, v, theSamps.size()); AIFF_EndWritingSamples(ref); AIFF_CloseFile(ref); puts("Closed file."); } } }
void Object::setFilePath(string path) { filePath=ofToDataPath(path); baseName=filePath.substr(filePath.find_last_of('/')+1,filePath.find_last_of('.')-(filePath.find_last_of('/')+1)); rootDir=filePath.substr(0,filePath.find_last_of('/'))+"/"; }
//-------------------------------------------------------------- void testApp::setup() { /* INTRO */ // Preamble cout << "Starting Program " << endl; cout << " ----- \n" << endl; // Set Log Level (To Ensure Serial Debug Info Prints) ofSetLogLevel(OF_LOG_NOTICE); // Read Credentials File credentials = ofBufferFromFile("settings/credentials.txt"); host = credentials.getFirstLine(); username = credentials.getNextLine(); password = credentials.getNextLine(); port = credentials.getNextLine(); // Set Paths pathToDataDirectory = "../../../MEDIA"; pathToLogsDirectory = "../../../LOGS"; pathToUploadsDirectory = "/filefrontend/data/files/uploads/"; /* READ EXISTING FILES */ // Set Path To Root Data Folder string mediaPath = pathToDataDirectory; mediaDirectory = ofDirectory(mediaPath); //Allow Media Types mediaDirectory.allowExt("jpg"); mediaDirectory.allowExt("png"); mediaDirectory.allowExt("mp4"); mediaDirectory.allowExt("zip"); // List mediaDirectory.sort(); mediaDirectory.listDir(); cout << "Media Directory Has " << mediaDirectory.size() << " Valid Files \n" << endl; existingFileNames.resize(mediaDirectory.size()); for (int i = 0; i < mediaDirectory.size(); i++) { existingFileNames[i] = mediaDirectory.getName(i); cout << "Existing File Name #" << i << " is " << existingFileNames[i] << endl; } cout << "Existing Parse Success! \n" << endl; // Connect try { cout << "Attempting To Connect To FTP:" << endl; // Log In To FTP client.setup(host, username, password, 21); client.setVerbose(true); cout << "Connection Success! \n" << endl; } catch(int e) { cout << "The Exception #" << e << " Occured." << endl; } /* LIST REMOTE FILES */ // List Files try { cout << "Attempting To List All Files:" << endl; // Assign List Of Directory To Base String Vector fileNames = client.list(pathToUploadsDirectory); // Resize Trimmed Vector To Match trimmedFileNames.resize(fileNames.size()); existingFileNames.resize(trimmedFileNames.size()); // Loop Through File Names, Trim At Path, And Assign To New Vector for(int i = 0; i < fileNames.size(); i++) { cout << "Original Item #" << i << " is " << fileNames[i] << endl; vector<string> trimmedName = ofSplitString(fileNames[i], "uploads/"); trimmedFileNames[i] = trimmedName[1]; } cout << "" << endl; // Loop Through New Vector To Make Sure for(int i = 0; i < trimmedFileNames.size(); i++) { cout << "Trimmed Item #" << i << " is " << trimmedFileNames[i] << endl; } cout << "Listing Success!\n" << endl; } catch(int e) { cout << "The Exception #" << e << " Occured." << endl; } /* COMPARE EXISTING AND REMOTE FILES */ // Compare Files cout << "Attempting To Compare All Files:" << endl; for(int i = 0; i < trimmedFileNames.size(); i++) { Boolean match = false; // Set Boolean To Test Matches for(int j = 0; j < existingFileNames.size(); j++) { // If The Current File Matches Any File In The Existing List if(trimmedFileNames[i].compare(existingFileNames[j]) == 0 ) { match = true; break; } // Otherwise Mark It As New else { match = false; } } // Test if(match) { cout << "The File " << trimmedFileNames[i] << " Has Previously Been Downloaded" << endl; } else { cout << "The File " << trimmedFileNames[i] << " Seems To Be New" << endl; newFileNames.push_back(trimmedFileNames[i]); // Add To New List } } cout << "Comparing Success!\n" << endl; /* DOWNLOAD NEW FILES */ // Get Files try { cout << "Attempting To Download " << newFileNames.size() << " Files:" << endl; // Download All Files From Uploads Directory To Data Directory for(int i = 0; i < newFileNames.size(); i++) { cout << "Downloading The File " << newFileNames[i] << endl; client.get(newFileNames[i], ofToDataPath(""), pathToUploadsDirectory); } cout << "Downloading Success!\n" << endl; } catch(int e) { cout << "The Exception #" << e << " Occured." << endl; } /* MOVE FILES */ cout << "Listing Files In Data Directory" << endl; // Set Path To Root Data Folder string dataPath = ""; dataDirectory = ofDirectory(dataPath); //Allow Media Types dataDirectory.allowExt("jpg"); dataDirectory.allowExt("png"); dataDirectory.allowExt("mp4"); dataDirectory.allowExt("zip"); // List dataDirectory.sort(); dataDirectory.listDir(); cout << "Data Directory Has " << dataDirectory.size() << " Valid Files" << endl; // Move To Data Directory cout << "Moving To Data Directory" << endl; for (int i = 0; i < dataDirectory.size(); i++) { ofFile file = dataDirectory.getFile(i); cout << "Moving File #" << i << endl; file.moveTo(pathToDataDirectory, true, false); } cout << "Moving Success!\n" << endl; }
//--------------------------------------------------------------------------- bool ofxAlphaVideoPlayer::loadMovie(string name){ //-------------------------------------- #ifdef OF_VIDEO_PLAYER_QUICKTIME //-------------------------------------- initializeQuicktime(); // init quicktime closeMovie(); // if we have a movie open, close it bLoaded = false; // try to load now if( name.substr(0, 7) == "http://" || name.substr(0,7) == "rtsp://" ){ if(! ofxCreateMovieFromURL(name, moviePtr) ) return false; }else{ name = ofToDataPath(name); if( !ofxCreateMovieFromPath((char *)name.c_str(), moviePtr) ) return false; } bool bDoWeAlreadyHaveAGworld = false; if (width != 0 && height != 0){ bDoWeAlreadyHaveAGworld = true; } Rect movieRect; GetMovieBox(moviePtr, &(movieRect)); if (bDoWeAlreadyHaveAGworld){ // is the gworld the same size, then lets *not* de-allocate and reallocate: if (width == movieRect.right && height == movieRect.bottom){ SetMovieGWorld (moviePtr, offscreenGWorld, nil); } else { width = movieRect.right; height = movieRect.bottom; delete(pixels); delete(offscreenGWorldPixels); if ((offscreenGWorld)) DisposeGWorld((offscreenGWorld)); createImgMemAndGWorld(); } } else { width = movieRect.right; height = movieRect.bottom; createImgMemAndGWorld(); } if (moviePtr == NULL){ return false; } //----------------- callback method myDrawCompleteProc = NewMovieDrawingCompleteUPP (ofxDrawCompleteProc); SetMovieDrawingCompleteProc (moviePtr, movieDrawingCallWhenChanged, myDrawCompleteProc, (long)this); // ------------- get the total # of frames: nFrames = 0; TimeValue curMovieTime; curMovieTime = 0; TimeValue duration; //OSType whichMediaType = VIDEO_TYPE; // mingw chokes on this OSType whichMediaType = FOUR_CHAR_CODE('vide'); short flags = nextTimeMediaSample + nextTimeEdgeOK; while( curMovieTime >= 0 ) { nFrames++; GetMovieNextInterestingTime(moviePtr,flags,1,&whichMediaType,curMovieTime,0,&curMovieTime,&duration); flags = nextTimeMediaSample; } nFrames--; // there's an extra time step at the end of themovie // ------------- get some pixels in there ------ GoToBeginningOfMovie(moviePtr); SetMovieActiveSegment(moviePtr, -1,-1); MoviesTask(moviePtr,0); #if defined(TARGET_OSX) && defined(__BIG_ENDIAN__) cout << "convertPixels" <<endl; convertPixels(offscreenGWorldPixels, pixels, width, height); cout << "converPixels end"<<endl; #endif if (bUseTexture == true){ //cout << "LOADMOVIE: using texture" <<endl; tex.loadData(pixels, width, height, GL_RGBA); } bStarted = false; bLoaded = true; bPlaying = false; bHavePixelsChanged = false; speed = 1; return true; //-------------------------------------- #else //-------------------------------------- if(gstUtils.loadMovie(name)){ if(bUseTexture){ tex.allocate(gstUtils.getWidth(),gstUtils.getHeight(),GL_RGBA,false); tex.loadData(gstUtils.getPixels(), gstUtils.getWidth(), gstUtils.getHeight(), GL_RGBA); } bLoaded = true; allocated = true; ofLog(OF_LOG_VERBOSE,"ofxAlphaVideoPlayer: movie loaded"); return true; }else{ ofLog(OF_LOG_ERROR,"ofxAlphaVideoPlayer couldn't load movie"); return false; } //-------------------------------------- #endif //-------------------------------------- }
//-------------------------------------------------------------- void testApp::keyReleased(int key){ //NUEVA AREA TRIANGULAR if(key=='t') { numeros = !numeros; } if(key=='c') mostrar_cam = (mostrar_cam==1)?0:1; if(key=='n'&&nAreas<MAX_AREAS&&!modificar) { nAreas++; creando = true; modificar = false; modificar_area = false; cout<<"Seleccione 3 vertices, Area# "<<nAreas<<endl; } //NUEVO POLIGONO if(key=='p'&&!modificar) { if(!creando_poli&&nAreas<MAX_AREAS) { nAreas++; creando_poli = true; cout<<"Seleccione el numero de vertices que desee y presione la tecla p al terminar."<<endl; areas[nAreas-1].es_poligono = true; areas[nAreas-1].vertices = new ofPoint[MAX_VERTEX]; areas[nAreas-1].v_triang.assign(MAX_VERTEX,ofPoint()); areas[nAreas-1].triangulo.reserve(MAX_VERTEX); } else { creando_poli = false; cout<<"Poligono terminado"<<endl; areas[nAreas-1].calcular_textura(); } } //MOVER if(key=='m') { modificar=(modificar==1)?0:1; if(modificar) modificar_area = false; cout<<"Modificar = "<<modificar<<endl; } //MOVER AREA ORIGINAL if(key=='M') { modificar_area=(modificar_area==1)?0:1; if(modificar_area) modificar = false; cout<<"Modificar Area = "<<modificar_area<<endl; } //BORRAR if(key=='b') { borrar = true; modificar = false; modificar_area = false; cout<<"Borrar = 1"<<endl; } //CARGAR VIDEO if(key=='v') { cout<<"Cargando video"<<endl; ofFileDialogResult cargar_video = ofSystemLoadDialog("Seleccione un video o imagen para mascarear"); if(cargar_video.bSuccess) { videos[nVideos] = video(nVideos,ofPoint(0,(nVideos+1)*HEIGHT),ofPoint(WIDTH,HEIGHT),cargar_video.getPath()); nVideos++; cout<<"Video cargado: "<<cargar_video.getName()<<endl; } } //CARGAR ARCHIVO DE PUNTOS if(key=='l') { cout<<"Cargando Archivo de texto."<<endl; ofFileDialogResult cargar_areas = ofSystemLoadDialog("Seleccione un archivo de texto"); if(cargar_areas.bSuccess) { texto.open(ofToDataPath(cargar_areas.getPath()),ofFile::ReadWrite); char* temp = new char[8]; texto.getline(temp,3); string temp1 = temp; int tempAreas = ofToInt(temp1); if(nAreas+tempAreas<MAX_AREAS) { cout<<"Areas por agregar: "<<tempAreas-nAreas<<endl; for(int i=nAreas;i<nAreas+tempAreas;i++) { cout<<"i ="<<i<<endl; char * vert = new char[3]; texto.getline(vert,3,' '); string temp2 = vert; int vertices = ofToInt(temp2); cout<<"#Vertices = "<<vertices<<" Final del archivo = "<<texto.eof()<<endl; areas[i].es_poligono = true; areas[i].vertices = new ofPoint[vertices]; areas[i].v_triang.assign(vertices,ofPoint()); areas[i].triangulo.reserve(vertices); for(int j=0;j<vertices;j++) { texto.getline(temp,5,' '); string valor = temp; int x = ofToInt(valor); texto.getline(temp,5,' '); valor = temp; int y = ofToInt(valor); areas[i].caprutar_poli(x,y); } areas[i].calcular_textura(); delete vert; } nAreas+=tempAreas; } delete temp; } } if(key=='s') { ofFileDialogResult guardar = ofSystemSaveDialog("Guardar...","Guardar..."); texto.open(ofToDataPath(guardar.getPath()),ofFile::Mode::WriteOnly); if(texto.exists()) cout<<"Archivo creado = "<<texto.create()<<endl; texto<<nAreas<<endl; for(int i=0;i<nAreas;i++) { texto<<areas[i].vCapturados<<" "; for(int j=0;j<areas[i].vCapturados;j++) texto<<areas[i].vertices[j].x<<" "<<areas[i].vertices[j].y<<" "; } texto.close(); } }
void saveMat(Mat mat, string filename) { FileStorage fs(ofToDataPath(filename), FileStorage::WRITE); fs << "Mat" << mat; }
//-------------------------------------------------------------- void CloudsVHXAuth::threadedFunction() { completeArgs.success = false; completeArgs.result = ""; if (mode == REQUEST_TOKEN || mode == REFRESH_TOKEN) { _ssl.setup(); _ssl.setOpt(CURLOPT_CAINFO, ofToDataPath(GetCloudsDataPath(true) + "vhx/cacert.pem")); _ssl.setURL("https://api.vhx.tv/oauth/token"); if (mode == REQUEST_TOKEN) { _ssl.addFormField("client_id", _clientId); _ssl.addFormField("client_secret", _clientSecret); _ssl.addFormField("grant_type", "client_credentials"); } else { _ssl.addFormField("refresh_token", _refreshToken); _ssl.addFormField("grant_type", "refresh_token"); } _ssl.perform(); string response = _ssl.getResponseBody(); ofLogVerbose("CloudsVHXAuth::threadedFunction") << "Response:" << endl << response; completeArgs.success = false; ofxJSONElement json; if (json.parse(response)) { if (json.isMember("access_token")) { _accessToken = json["access_token"].asString(); _refreshToken = json["refresh_token"].asString(); _tokenExpiry = (ofGetSystemTime() / 1000.f) + json["expires_in"].asFloat(); // Save the tokens to disk. CloudsCryptoSaveTokens(_accessToken, _refreshToken, _tokenExpiry, _tokensPath); completeArgs.success = true; completeArgs.result = _accessToken; } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unexpected JSON format:" << endl << response; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unable to parse JSON:" << endl << response; } _ssl.clear(); bNotifyComplete = true; } else if (mode == REQUEST_CODE) { _ssl.setup(); _ssl.setOpt(CURLOPT_CAINFO, ofToDataPath(GetCloudsDataPath(true) + "vhx/cacert.pem")); _ssl.setURL("https://api.vhx.tv/oauth/codes"); _ssl.addFormField("client_id", _clientId); _ssl.addFormField("client_secret", _clientSecret); _ssl.perform(); string response = _ssl.getResponseBody(); ofLogVerbose("CloudsVHXAuth::threadedFunction") << "Response:" << endl << response; completeArgs.success = false; ofxJSONElement json; if (json.parse(response)) { if (json.isMember("code")) { _code = json["code"].asString(); _codeExpiry = (ofGetSystemTime() / 1000.f) + json["expires_in"].asFloat(); completeArgs.success = true; completeArgs.result = _code; } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unexpected JSON format:" << endl << response; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unable to parse JSON:" << endl << response; } _ssl.clear(); bNotifyComplete = true; } else if (mode == LINK_CODE) { stringstream ss; ss << "http://www.vhx.tv/activate/clouds"; ss << "?client_id=" << _clientId; ss << "&code=" << _code; ofLaunchBrowser(ss.str()); ss.str(""); ss << "https://api.vhx.tv/oauth/codes/" << _code; ss << "?client_id=" << _clientId; ss << "&client_secret=" << _clientSecret; completeArgs.success = false; bool bWaitForLink = true; while (bWaitForLink && isThreadRunning()) { _ssl.setup(); _ssl.setOpt(CURLOPT_CAINFO, ofToDataPath(GetCloudsDataPath(true) + "vhx/cacert.pem")); _ssl.setURL(ss.str()); _ssl.perform(); string response = _ssl.getResponseBody(); ofLogVerbose("CloudsVHXAuth::threadedFunction") << "Response:" << endl << response; ofxJSONElement json; if (json.parse(response)) { if (json.isMember("access_token")) { _accessToken = json["access_token"].asString(); _refreshToken = json["refresh_token"].asString(); _tokenExpiry = (ofGetSystemTime() / 1000.f) + json["expires_in"].asFloat(); // Save the tokens to disk. CloudsCryptoSaveTokens(_accessToken, _refreshToken, _tokenExpiry, _tokensPath); completeArgs.success = true; completeArgs.result = _accessToken; bWaitForLink = false; } else { ofLogVerbose("CloudsVHXAuth::threadedFunction") << "Unexpected JSON result:" << endl << response; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unable to parse JSON:" << endl << response; bWaitForLink = false; } _ssl.clear(); ofSleepMillis(500); } bNotifyComplete = true; } else if (mode == VERIFY_PACKAGE) { stringstream ss; ss << "Authorization: Bearer " << _accessToken; _ssl.setup(); _ssl.setOpt(CURLOPT_CAINFO, ofToDataPath(GetCloudsDataPath(true) + "vhx/cacert.pem")); _ssl.setURL("https://api.vhx.tv/me"); _ssl.addHeader(ss.str()); _ssl.perform(); string response = _ssl.getResponseBody(); ofLogVerbose("CloudsVHXAuth::threadedFunction") << "Response:" << endl << response; cout << "RESPONSE " << response << endl; completeArgs.success = false; ofxJSONElement json; if (json.parse(response)) { if (json.isMember("_embedded")) { const ofxJSONElement& embedded = json["_embedded"]; if (embedded.isMember("packages")) { const ofxJSONElement& packages = embedded["packages"]; if(packages.size() > 0){ for (int i = 0; i < packages.size(); ++i) { const ofxJSONElement& element = packages[i]; if (element.isMember("id")) { string packageId = element["id"].asString(); if (packageId == _packageId || packageId == "8102") { //hack for rental // Found matching package, check that purchase is valid. if (element.isMember("purchase_type")) { string purchaseType = element["purchase_type"].asString(); if (purchaseType == "purchase") { state = PURCHASE; completeArgs.success = true; completeArgs.result = "purchase"; break; } else if (purchaseType == "rental") { if (element.isMember("expires_at")) { if (element["expires_at"].isNull()) { // No expiry, assume we're good. state = RENTAL; completeArgs.success = true; completeArgs.result = "rental"; break; } else { // Parse the expiry date. Poco::DateTime dt; int tzd; if (Poco::DateTimeParser::tryParse(element["expires_at"].asString(), dt, tzd)) { Poco::LocalDateTime ldt(tzd, dt); Poco::Timestamp expiryTime = ldt.timestamp(); Poco::Timestamp nowTime; // Make sure the rental is valid. if (nowTime < expiryTime) { // Expires in the future, we're good. _packageExpiry = expiryTime.epochTime(); state = RENTAL; completeArgs.success = true; completeArgs.result = "rental"; break; } else { // Expired, no good. state = EXPIRED; completeArgs.success = true; completeArgs.result = "expired"; continue; //check other packages } } else { // Could not parse expiry, assume no good. state = EXPIRED; completeArgs.success = true; completeArgs.result = "expired"; continue; //check the other packages } } } else { // No expiry, assume we're good. state = RENTAL; completeArgs.success = true; completeArgs.result = "rental"; break; } } else { // Not a rental or purchase, assume no good. state = INACTIVE; completeArgs.success = true; completeArgs.result = "inactive"; continue; //check the other packages } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unexpected JSON result, 'purchase_type' not found:" << endl << response; } } else { ofLogNotice("CloudsVHXAuth::threadedFunction") << "Skipping package " << packageId; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unexpected JSON result, 'id' not found:" << endl << response; } } } else { state = INACTIVE; completeArgs.success = true; completeArgs.result = "inactive"; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unexpected JSON result, 'packages' not found:" << endl << response; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unexpected JSON result, '_embedded' not found:" << endl << response; } } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Unable to parse JSON:" << endl << response; } _ssl.clear(); bNotifyComplete = true; } else { ofLogError("CloudsVHXAuth::threadedFunction") << "Mode " << mode << " is unrecongized!"; } }
void imageCorpusAnalysisApp::analyzeDirectory(string dirName) { ofDirectory DIR; string fdpPath, fp; totalNumImages = 0; totalNumBlobs = 0; totalNumImagesToBeAnalyzed = 0; currentImageNum = 0; // scan for fdb file DIR.allowExt("fdb"); int numFDBFiles = DIR.listDir(dirName); cout << "# of .fdb files found (should be 1): " << numFDBFiles << endl; if (numFDBFiles == 1) { fdpPath = DIR.getPath(0); cout << ".fdb file path: " << fdpPath << "\n"; ifstream ifile; ifile.open(ofToDataPath(fdpPath).c_str(), ios::in); // ofToDataPath ??? string readin; if (ifile.is_open()) { getline(ifile, readin); string basename = readin; // don't use yet getline(ifile, readin); int numImages = (int)atoi(readin.c_str()); // used only for scanning array getline(ifile, readin); int numBlobs = (int)atoi(readin.c_str()); // don't use getline(ifile, readin); int threshold = (int)atoi(readin.c_str()); // this had better be right! cout << "Initial conditions (from .fdb file): " << numImages << "|" << numBlobs << "|" << threshold << "\n"; for (int i=0; i<numImages; i++) { getline(ifile, readin, ' '); int bindex = (int)atoi(readin.c_str()); // blob index DUMMY!!!!! // getline(ifile, readin, ' '); // int bthresh = (int)atoi(readin.c_str()); // threshold (*for now, not used) getline(ifile, readin, ' '); int blobs = (int)atoi(readin.c_str()); // # just for this file getline(ifile, readin); stringstream ss; string blobPng, blobTxt; string bstring = readin.c_str(); // cout << "|" << bstring << "|\n"; // vector<int> blobvec; for (int b=0; b<blobs; b++) { ss << bstring << "_" << b << ".png"; // cout << "SS: " << ss.str() << "\n"; blobPng = ss.str(); ss.clear(); ss.str(std::string()); ss << bstring << "_" << b << ".txt"; blobTxt = ss.str(); ss.clear(); ss.str(std::string()); // cout << "blobPng/blobTxt: " << blobPng << " | " << blobTxt << "\n"; } // cout << "inserting: " << totalNumImages << "|" << bstring << "\n"; mappedFileNames.insert(pair<int, string>(totalNumImages, bstring)); mappedThresholds.insert(pair<int, int>(totalNumImages, threshold)); // mappedBlobIDs.insert(pair<int, vector<int> >(totalNumImages, blobvec)); totalNumImages++; totalNumBlobs = numBlobs; } cout << "After scanning .fdb file...\n"; cout << " *total # of images: " << totalNumImages << "\n"; cout << " *total # of blobs: " << totalNumBlobs << "\n"; cout << " *total # of images TO BE ANALYZED: " << totalNumImagesToBeAnalyzed << "\n"; } ifile.close(); } string jpegFileName; ofDirectory DIR2; DIR2.allowExt("jpg"); int numJPGFiles = DIR2.listDir(dirName); cout << "# of JPEG files to import: " << numJPGFiles << "\n"; for (int j=0; j<numJPGFiles; j++) { jpegFileName = ofFilePath::removeExt(DIR2.getName(j)); // cout << "::: " << jpegFileName << endl; bool matchFlag = false; map<int, string>::iterator itz; for (itz = mappedFileNames.begin(); itz != mappedFileNames.end(); itz++) { // cout << "compare: " << (*itz).second << "|" << jpegFileName << "\n"; if ((*itz).second == jpegFileName) { // we have a match : do not add to be analyzed matchFlag = true; break; } } cout << "broke! ... " << (int)matchFlag << "\n"; if (matchFlag == 0) { cout << "***total num images: " << totalNumImages << " + j: " << j << "\n"; cout << "add to analyze: " << (totalNumImages + j) << ", " << jpegFileName << "\n"; toBeAnalyzedFileNames.insert(pair<int, string>((totalNumImages + totalNumImagesToBeAnalyzed), jpegFileName)); totalNumImagesToBeAnalyzed++; } } cout << "After scanning directory for .jpg files...\n"; cout << " * total # of images: " << totalNumImages << "\n"; // TO BE MAPPED (NOT ANALYZED) cout << " * total # of blobs: " << totalNumBlobs << "\n"; cout << " * # of images YET TO BE ANALYZED: " << totalNumImagesToBeAnalyzed << "\n"; cout << " ** mapped file names map contains:\n"; map<int, string>::iterator it; for (it = mappedFileNames.begin(); it != mappedFileNames.end(); it++) { // cout << (*it).first << "|" << (*it).second << "\n"; currentImageNum++; } // cout << "current image #: " << currentImageNum << "\n"; // setup out.fdb ofFile ofile; ofile.open("random/1/out.fdb", ofFile::WriteOnly); cout << "\n\n\nREPORT: \n\n"; // iterate over mapped and pull in metadata/pngs cout << "mapped:\n"; if (mappedFileNames.begin() == mappedFileNames.end()) { cout << "\n no mapped files to import.\n"; } else { // for (it = mappedFileNames.begin(); it != mappedFileNames.end(); it++) { // cout << "map w/o analyzing: " << (*it).first << "|" << (*it).second << "\n"; ifstream ifile; string fullPath, readin; // stringstream ss; // ss << "random/" << mappedFileNames[i] << ".jpg"; // cout << "**************\n\n" << ss.str() << "\n\n"; ifile.open(ofToDataPath(fdpPath).c_str(), ios::in); if ((ifile.is_open()) && (ofile.is_open())) { for (int i=0; i<totalNumImages; i++) { ImageMap im; im.setup(gStandardThreshold); im.prefab = true; stringstream oss; getline(ifile, readin, ' '); int imnum = atoi(readin.c_str()); cout << " i|imnum: " << i << "|" << imnum << " ... "; im.imageID = imnum; getline(ifile, readin, ' '); im.numberOfRegions = atoi(readin.c_str()); getline(ifile, readin); im.imageFileBase = readin; // cout << imnum << "|" << im.numberOfRegions << "||" << basename << "\n"; for (int reg=0; reg<im.numberOfRegions; reg++) { // get the .txt files' + the .png files' names stringstream sst, ssp, ssmd; sst << dirName << im.imageFileBase << "_" << reg << ".txt"; ssp << dirName << im.imageFileBase << "_" << reg << ".png"; ofFile mdfile; mdfile.open(sst.str()); if ((im.numberOfRegions > 0) && (mdfile.is_open())) { string name; int regionIndex; CvPoint anchor; CvSize size; float area, ratio; CvPoint centroid; int rgbmax; getline(mdfile, readin); name = readin; // DUMMY! getline(mdfile, readin); regionIndex = atoi(readin.c_str()); cout << "ri: " << regionIndex << "\n"; getline(mdfile, readin, ' '); size.width = atoi(readin.c_str()); getline(mdfile, readin); size.height = atoi(readin.c_str()); getline(mdfile, readin, ' '); anchor.x = atoi(readin.c_str()); getline(mdfile, readin); anchor.y = atoi(readin.c_str()); getline(mdfile, readin); area = atof(readin.c_str()); getline(mdfile, readin); ratio = atof(readin.c_str()); getline(mdfile, readin, ' '); centroid.x = atof(readin.c_str()); getline(mdfile, readin); centroid.y = atof(readin.c_str()); getline(mdfile, readin); rgbmax = atoi(readin.c_str()); im.constructPrefabRegion(name, regionIndex, anchor, size, area, ratio, centroid, rgbmax); im.mappedRegions[regionIndex].regionHistogram.setup(); int vals[64]; int* vptr; vptr = vals; for (int h=0; h<64; h++) { // N_REGIONS! // getline(mdfile, readin, ' '); // int rd = atoi(readin.c_str()); // getline(mdfile, readin, ' '); // int gn = atoi(readin.c_str()); // getline(mdfile, readin); // int bl = atoi(readin.c_str()); getline(mdfile, readin, ' '); vals[h] = atoi(readin.c_str()); // im.mappedRegions[regionIndex].regionHistogram.injectHistogram(h, rd, gn, bl); } im.mappedRegions[regionIndex].regionHistogram.injectHistogram(vptr); // int w = im.mappedRegions[regionIndex].size.width*2; // int h = im.mappedRegions[regionIndex].size.height*2; // cout << w << "|" << h << "\n"; // cout << ssp.str() << "\n"; // cout << ssp.str() << "\n"; // ofImage image; // image.loadImage(ssp.str()); // // // cout << image.width << "\n\n"; // // im.mappedRegions[regionIndex].regionImage.allocate(image.width, image.height); // im.mappedRegions[regionIndex].regionImage.setFromPixels(image.getPixels(), image.width, image.height); // cout << "and now the histogram:\n" << im.mappedRegions[regionIndex].regionHistogram.writeOutHistogram(); } else { cout << "no regions for this file!!!\nor " << sst.str() << " is missing...\n"; } } // cout << "inserting imagemap at: " << i << "\n"; mappedImages.insert(pair<int, ImageMap>(i, im)); // map one line of out.fdb oss << im.imageID << " " << im.numberOfRegions << " " << im.imageFileBase << "\n"; ofile << oss.str(); oss.flush(); } } else { cout << "ERROR: COULD NOT LOAD FILE!\n"; } } currentImageNum = totalNumImages; // iterate over toBeAnalyzed and perform analysis // - write txt and png files! // cout << "\nto be Analyzed:\n"; // cout << "curr. image: " << currentImageNum << "\n"; for (it = toBeAnalyzedFileNames.begin(); it != toBeAnalyzedFileNames.end(); it++) { stringstream ss, oss; ss << "random/1/" << (*it).second << ".jpg"; // cout << "analyzing: " << (*it).first << " | " << ss.str() << "\n"; ImageMap newIM; // mappedImages.insert(pair<int, ImageMap>(currentImageNum, newIM)); newIM.setup(gStandardThreshold); newIM.setupDownsampleThresholdAnalyze( (int)((*it).first), (string)(ss.str()) ); // cout << "#: " << newIM.numberOfRegions << "\n"; for (int r=0; r<newIM.numberOfRegions; r++) { // cout << "r: " << r << endl; newIM.findContourMask(r, true); newIM.writeAnalysis(r); totalNumBlobs++; } // map one line of out.fdb oss.flush(); oss << newIM.imageID << " " << newIM.numberOfRegions << " " << newIM.imageFileBase << "\n"; mappedImages.insert(pair<int, ImageMap>(currentImageNum, newIM)); totalNumImages++; currentImageNum++; ofile << oss.str(); oss.flush(); } stringstream oss; oss << "random/1/" << "\n"; cout << "TOTALS:\n" << totalNumImages << "\n" << totalNumBlobs << "\n" << gStandardThreshold << "\n"; oss << totalNumImages << "\n" << totalNumBlobs << "\n" << gStandardThreshold << "\n"; ofile << oss.str(); ofile.close(); }
void ofxPd::addSearchPath( string path ) { search_path.push_back( ofToDataPath( path ) ); }
inline void images_to_gv(std::string output_path, std::vector<std::string> imagePaths, float fps, int *done_frames, bool hasAlpha, std::shared_ptr<Dx11> dx, ofxCoroutine::Yield &yield) { if (imagePaths.empty()) { return; } // memory uint32_t _width = 0; uint32_t _height = 0; float _fps = fps; uint32_t _bufferSize = 0; std::vector<uint8_t> _lz4CompressBuffer; std::vector<Lz4Block> _lz4blocks; std::unique_ptr<GpuVideoIO> _io; int _index = 0; int width; int height; ofPixels img; ofLoadImage(img, imagePaths[0]); width = img.getWidth(); height = img.getHeight(); _width = width; _height = height; int blockcount = ((_width + 3) / 4) * ((_height + 3) / 4); int blocksize = 16; _bufferSize = blockcount * blocksize; // 書き出し開始 _io = std::unique_ptr<GpuVideoIO>(new GpuVideoIO(output_path.c_str(), "wb")); // ヘッダー情報書き出し #define W(v) if(_io->write(&v, sizeof(v)) != sizeof(v)) { assert(0); } W(_width); W(_height); uint32_t frameCount = (uint32_t)imagePaths.size(); W(frameCount); W(_fps); uint32_t videoFmt = GPU_COMPRESS_BC7; W(videoFmt); W(_bufferSize); #undef W int compressBound = LZ4_compressBound(_bufferSize); _lz4CompressBuffer.resize(compressBound); for (int i = 0; i < imagePaths.size(); ++i) { DirectX::TexMetadata metadata; DirectX::ScratchImage image; auto imgPath = ofToDataPath(imagePaths[i]); HRESULT hr = DirectX::LoadFromWICFile(to_wstring(imgPath).c_str(), 0, &metadata, image); if (FAILED(hr)) { abort(); } DWORD flags = DirectX::TEX_COMPRESS_DEFAULT; flags |= DirectX::TEX_COMPRESS_PARALLEL; flags |= DirectX::TEX_COMPRESS_BC7_USE_3SUBSETS; flags |= DirectX::TEX_COMPRESS_UNIFORM; flags |= DirectX::TEX_COMPRESS_SRGB_IN; flags |= DirectX::TEX_COMPRESS_SRGB_OUT; float alphaWeight = hasAlpha ? 1.0f : 0.0f; DirectX::ScratchImage cImage; hr = DirectX::Compress(dx11->device(), *image.GetImage(0, 0, 0), DXGI_FORMAT_BC7_UNORM_SRGB, flags, alphaWeight, cImage); int src = cImage.GetPixelsSize(); if (_bufferSize != cImage.GetPixelsSize()) { abort(); } int compressed = LZ4_compress_HC((char *)cImage.GetPixels(), (char *)_lz4CompressBuffer.data(), _bufferSize, compressBound, LZ4HC_CLEVEL_MAX); // 住所を記録しつつ uint64_t head = _lz4blocks.empty() ? kRawMemoryAt : (_lz4blocks[_lz4blocks.size() - 1].address + _lz4blocks[_lz4blocks.size() - 1].size); Lz4Block lz4block; lz4block.address = head; lz4block.size = compressed; _lz4blocks.push_back(lz4block); // 書き込み if (_io->write(_lz4CompressBuffer.data(), compressed) != compressed) { assert(0); } (*done_frames)++; yield(); } // 最後に住所を記録 uint64_t size = _lz4blocks.size() * sizeof(Lz4Block); if (_io->write(_lz4blocks.data(), size) != size) { abort(); } // ファイルをクローズ _io.reset(); //for (;;) { // if (_index < imagePaths.size()) { // auto compress = [imagePaths, _width, _height, _squishFlag](int index, uint8_t *dst) { // std::string src = imagePaths[index]; // ofPixels img; // ofLoadImage(img, src); // img.setImageType(OF_IMAGE_COLOR_ALPHA); // squish::CompressImage(img.getData(), _width, _height, dst, _squishFlag); // }; // const int kBatchCount = 32; // int workCount = std::min((int)imagePaths.size() - _index, kBatchCount); // uint32_t lz4sizes[kBatchCount]; // int compressBound = LZ4_compressBound(_bufferSize); // _gpuCompressBuffer.resize(workCount * _bufferSize); // _lz4CompressBuffer.resize(workCount * compressBound); // tbb::parallel_for(tbb::blocked_range<int>(0, workCount, 1), [compress, _index, _bufferSize, compressBound, &lz4sizes, &_gpuCompressBuffer, &_lz4CompressBuffer, &done_frames](const tbb::blocked_range< int >& range) { // for (int i = range.begin(); i != range.end(); i++) { // compress(_index + i, _gpuCompressBuffer.data() + i * _bufferSize); // lz4sizes[i] = LZ4_compress_HC((char *)_gpuCompressBuffer.data() + i * _bufferSize, // (char *)_lz4CompressBuffer.data() + i * compressBound, // _bufferSize, compressBound, 16); // done_frames++; // } // }); // uint64_t head = _lz4blocks.empty() ? kRawMemoryAt : (_lz4blocks[_lz4blocks.size() - 1].address + _lz4blocks[_lz4blocks.size() - 1].size); // for (int i = 0; i < workCount; i++) { // // 住所を記録しつつ // Lz4Block lz4block; // lz4block.address = head; // lz4block.size = lz4sizes[i]; // head += lz4block.size; // _lz4blocks.push_back(lz4block); // // 書き込み // if (_io->write(_lz4CompressBuffer.data() + i * compressBound, lz4sizes[i]) != lz4sizes[i]) { // assert(0); // } // } // _index += workCount; // // 強制離脱 // if (interrupt) { // _io.reset(); // ::remove(output_path.c_str()); // break; // } // } // else { // // 最後に住所を記録 // uint64_t size = _lz4blocks.size() * sizeof(Lz4Block); // if (_io->write(_lz4blocks.data(), size) != size) { // assert(0); // } // // ファイルをクローズ // _io.reset(); // // 終了 // break; // } //} }
//-------------------------------------------------------------- bool ofxEditorSyntax::loadFile(const string& xmlFile) { string path = ofToDataPath(xmlFile); ofXml xml; if(!xml.load(path)) { ofLogError("ofxEditorSyntax") << "couldn't load \"" << ofFilePath::getFileName(xmlFile) << "\""; return false; } xml.setToParent(); if(!xml.exists("syntax")) { ofLogWarning("ofxEditorSyntax") << "root xml tag not \"syntax\", ignoring"; return false; } xml.setTo("syntax"); int numTags = xml.getNumChildren(); clear(); for(int i = 0; i < numTags; ++i) { xml.setToChild(i); if(xml.getName() == "lang") {setLang(xml.getValue());} else if(xml.getName() == "files") { int numExts = xml.getNumChildren(); for(int e = 0; e < numExts; ++e) { xml.setToChild(e); if(xml.getName() == "ext") {addFileExt(xml.getValue());} else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown files xml tag \"" << xml.getName() << "\""; } xml.setToParent(); } } else if(xml.getName() == "singlecomment") {singleLineComment = string_to_wstring(xml.getValue());} else if(xml.getName() == "multicomment") { if(xml.exists("begin")) {multiLineCommentBegin = string_to_wstring(xml.getValue("begin"));} if(xml.exists("end")) {multiLineCommentBegin = string_to_wstring(xml.getValue("end"));} } else if(xml.getName() == "preprocessor") {preprocessor = string_to_wstring(xml.getValue());} else if(xml.getName() == "hexliteral") { string b = xml.getValue(); if(b == "true") {setHexLiteral(true);} else if(b == "false") {setHexLiteral(false);} else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown xml bool string \"" << b << "\""; } } else if(xml.getName() == "operator") {operatorChars = string_to_wstring(xml.getValue());} else if(xml.getName() == "punctuation") {punctuationChars = string_to_wstring(xml.getValue());} else if(xml.getName() == "words") { int numWords = xml.getNumChildren(); for(int w = 0; w < numWords; ++w) { xml.setToChild(w); if(xml.getName() == "keyword") {setWord(xml.getValue(), KEYWORD);} else if(xml.getName() == "typename") {setWord(xml.getValue(), TYPENAME);} else if(xml.getName() == "function") {setWord(xml.getValue(), FUNCTION);} else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown words xml tag \"" << xml.getName() << "\""; } xml.setToParent(); } } else { ofLogWarning("ofxEditorSyntax") << "ignoring unknown xml tag \"" << xml.getName() << "\""; } xml.setToParent(); } xml.clear(); return true; }
//---------------------------------------------------- bool ofImage::loadImageIntoPixels(string fileName, ofPixels &pix){ int width, height, bpp; fileName = ofToDataPath(fileName); bool bLoaded = false; FIBITMAP * bmp = NULL; FREE_IMAGE_FORMAT fif = FIF_UNKNOWN; fif = FreeImage_GetFileType(fileName.c_str(), 0); if(fif == FIF_UNKNOWN) { // or guess via filename fif = FreeImage_GetFIFFromFilename(fileName.c_str()); } if((fif != FIF_UNKNOWN) && FreeImage_FIFSupportsReading(fif)) { bmp = FreeImage_Load(fif, fileName.c_str(), 0); bLoaded = true; if (bmp == NULL){ bLoaded = false; } } //----------------------------- if (bLoaded ){ width = FreeImage_GetWidth(bmp); height = FreeImage_GetHeight(bmp); bpp = FreeImage_GetBPP(bmp); bool bPallette = (FreeImage_GetColorType(bmp) == FIC_PALETTE); switch (bpp){ case 8: if (bPallette) { FIBITMAP * bmpTemp = FreeImage_ConvertTo24Bits(bmp); if (bmp != NULL) FreeImage_Unload(bmp); bmp = bmpTemp; bpp = FreeImage_GetBPP(bmp); } else { // do nothing we are grayscale } break; case 24: // do nothing we are color break; case 32: // do nothing we are colorAlpha break; default: FIBITMAP * bmpTemp = FreeImage_ConvertTo24Bits(bmp); if (bmp != NULL) FreeImage_Unload(bmp); bmp = bmpTemp; bpp = FreeImage_GetBPP(bmp); } int byteCount = bpp / 8; //------------------------------------------ // call the allocation routine (which checks if really need to allocate) here: allocatePixels(pix, width, height, bpp); FreeImage_ConvertToRawBits(pix.pixels, bmp, width*byteCount, bpp, FI_RGBA_RED_MASK, FI_RGBA_GREEN_MASK, FI_RGBA_BLUE_MASK, true); // get bits //------------------------------------------ // RGB or RGBA swap // this can be done with some ill pointer math. // anyone game? // #ifdef TARGET_LITTLE_ENDIAN if (byteCount != 1) swapRgb(pix); #endif //------------------------------------------ } else { width = height = bpp = 0; } if (bmp != NULL){ FreeImage_Unload(bmp); } return bLoaded; }
static bool loadImage(ofPixels_<PixelType> & pix, const std::filesystem::path& _fileName, const ofImageLoadSettings& settings){ ofInitFreeImage(); auto uriStr = _fileName.string(); UriUriA uri; UriParserStateA state; state.uri = &uri; if(uriParseUriA(&state, uriStr.c_str())!=URI_SUCCESS){ const int bytesNeeded = 8 + 3 * strlen(uriStr.c_str()) + 1; std::vector<char> absUri(bytesNeeded); #ifdef TARGET_WIN32 uriWindowsFilenameToUriStringA(uriStr.c_str(), absUri.data()); #else uriUnixFilenameToUriStringA(uriStr.c_str(), absUri.data()); #endif if(uriParseUriA(&state, absUri.data())!=URI_SUCCESS){ ofLogError("ofImage") << "loadImage(): malformed uri when loading image from uri " << _fileName; uriFreeUriMembersA(&uri); return false; } } std::string scheme(uri.scheme.first, uri.scheme.afterLast); uriFreeUriMembersA(&uri); if(scheme == "http" || scheme == "https"){ return ofLoadImage(pix, ofLoadURL(_fileName.string()).data); } std::string fileName = ofToDataPath(_fileName, true); bool bLoaded = false; FIBITMAP * bmp = nullptr; FREE_IMAGE_FORMAT fif = FIF_UNKNOWN; fif = FreeImage_GetFileType(fileName.c_str(), 0); if(fif == FIF_UNKNOWN) { // or guess via filename fif = FreeImage_GetFIFFromFilename(fileName.c_str()); } if((fif != FIF_UNKNOWN) && FreeImage_FIFSupportsReading(fif)) { if(fif == FIF_JPEG) { int option = getJpegOptionFromImageLoadSetting(settings); bmp = FreeImage_Load(fif, fileName.c_str(), option); } else { bmp = FreeImage_Load(fif, fileName.c_str(), 0); } if (bmp != nullptr){ bLoaded = true; } } //----------------------------- if ( bLoaded ){ putBmpIntoPixels(bmp,pix); } if (bmp != nullptr){ FreeImage_Unload(bmp); } return bLoaded; }
//--------------------------------------------------------- bool ofxXmlSettings::saveFile(const string& xmlFile){ string fullXmlFile = ofToDataPath(xmlFile); return doc.SaveFile(fullXmlFile); }
//----------------------------------------------- void captureApp::update(){ panel.update(); if( panel.getValueB("bRestart") ){ if( panel.getValueI("restartHour") == ofGetHours() ){ if( panel.getValueI("restartMinute") == ofGetMinutes() ){ printf("shutting down now!\n"); system(ofToDataPath("reboot.sh").c_str()); } } } char data[10]; memset(data, 0, 10); if (serial.available() > 0){ serial.readBytes((unsigned char*) data, 10); if(state == CAP_STATE_WAITING) startFadeIn(); } if( panel.hasValueChanged("bOverideLight") ){ if( panel.getValueB("bOverideLight") ){ light.lightOn(); }else{ light.lightOff(); } } bEnableOsc = panel.getValueB("use_osc"); if( panel.hasValueChanged("use_osc") ){ if( bEnableOsc && !bOscSetup ){ setupOsc(); printf("------------- setting up osc\n"); } } if( state == CAP_STATE_FADEIN || state == CAP_STATE_CAPTURE ){ panel.setValueB("frameByFrame", false); panel.setValueB("bSpotLight", true); } //the capture part happens in the camera callbacks at the top. //this just checks to make sure that the capture doesn't need to keep running. if( state == CAP_STATE_CAPTURE ){ panel.hidden = true; if( ofGetFrameNum() % 4 == 0 ){ printf("fps is %s\n", ofToString(camFps, 2).c_str()); } if( ofGetElapsedTimef() >= timeToEndCapture ){ printf("time is %f - time to end is %f\n", ofGetElapsedTimef(), timeToEndCapture); endCapture(); } }else if( debugState == CAP_DEBUG ){ panel.hidden = false; } if( debugState == CAP_NORMAL ){ ofHideCursor(); }else { ofShowCursor(); } if( state == CAP_STATE_CAPTURE && ofGetElapsedTimef() >= timeToEndCapture ){ printf("time is %f - time to end is %f\n", ofGetElapsedTimef(), timeToEndCapture); endCapture(); if( panel.getValueB("B_FACE_TRIGGER") ){ bNeedsToLeaveFrame = true; }else{ bNeedsToLeaveFrame = false; } } if( state == CAP_STATE_FADEIN && ofGetElapsedTimef() > fadeInStartTime + panel.getValueF("fadeInTime") ){ startCapture(); } if( state == CAP_STATE_DECODING ){ handleDecode(); }else{ handleProjection(); handleCamera(); handleFaceTrigger(); } panel.clearAllChanged(); }
//------------------------------------------------------------------------ void CvProcessor::setHaarXMLFile( string xmlFile ){ haarFinder.load(ofToDataPath(xmlFile)); }