SDL_Surface* KrEncoder::Load32Surface( const char* filename, Transparent* trans, int nTrans, gedString* error ) { // Make sure SetImageLoader has been called! GLASSERT( ImageLoader ); if ( !ImageLoader ) { return 0; } if ( !filename ) { if ( error ) *error = "No filename for a surface specified"; #ifdef DEBUG GLOUTPUT( "No filename for a surface specified\n" ); #endif return 0; } // Try to load the file. SDL_Surface* surface = ImageLoader( filename ); if ( !surface ) { char buf[256]; sprintf( buf, "Failed to load surface '%s'.", filename ); if ( error ) *error = buf; #ifdef DEBUG GLOUTPUT( "No filename for a surface specified\n" ); #endif return 0; } // The image can be 32 bits or less. A NON-32 bit image has // color(s) that are marked transparent. A 32 bit image // simply uses the alpha channel. To simplify things, // images are converted to 32 bit before they // are returned. // // Oddly, SDL_Image will sometimes return a 32 bit image, // when it wasn't. This will *really* screw up kyra. if ( surface->format->BytesPerPixel < 4 ) { SDL_Surface* s32 = SDL_CreateRGBSurface( SDL_SWSURFACE, surface->w, surface->h, 32, 0xff000000, 0x00ff0000, 0x0000ff00, 0x000000ff ); GLASSERT( s32 ); #ifdef DEBUG GLOUTPUT( "Creating 32 bit SDL surface.\n" ); #endif // Now copy one surface to the other, // set transparency as needed afterwards. SDL_BlitSurface( surface, 0, s32, 0 ); int i; KrPainter painter( s32 ); // Covert color keys to RGB values. for ( i=0; i<nTrans; i++ ) { if ( trans[i].type != RGBA ) { switch( trans[i].type ) { case LowerLeft: painter.BreakPixel( 0, surface->h - 1, &trans[i].rgba ); break; case UpperLeft: painter.BreakPixel( 0, 0, &trans[i].rgba ); break; case LowerRight: painter.BreakPixel( surface->w - 1, surface->h - 1, &trans[i].rgba ); break; case UpperRight: painter.BreakPixel( surface->w - 1, 0, &trans[i].rgba ); break; default: GLASSERT( 0 ); } } } // Now set the transparency. int x, y; int nTransPixels = 0; for( x=0; x<surface->w; x++ ) { for( y=0; y<surface->h; y++ ) { KrRGBA rgba; painter.BreakPixel( x, y, &rgba ); for ( i=0; i<nTrans; i++ ) { if ( rgba.c.red == trans[i].rgba.c.red && rgba.c.green == trans[i].rgba.c.green && rgba.c.blue == trans[i].rgba.c.blue ) { rgba.c.alpha = KrRGBA::KR_TRANSPARENT; // Set the surface alpha to transparent. painter.SetPixel( x, y, rgba ); nTransPixels++; break; } } } } #ifdef DEBUG GLOUTPUT( "Transparency converted=%d\n", nTransPixels ); #endif SDL_FreeSurface( surface ); return s32; } return surface; }
KrCanvasResource* KrEncoder::Load32Canvas( const char* filename, const KrRGBA* transparent, int nTrans, gedString* error ) { if ( !filename ) { if ( error ) *error = "No filename for a surface specified"; return 0; } // Try to load the file. SDL_Surface* surface = ImageLoader( filename ); if ( !surface ) { char buf[256]; sprintf( buf, "Failed to load surface '%s'.", filename ); if ( error ) *error = buf; return 0; } // The image can be 32 bits or less. A NON-32 bit image has // color(s) that are marked transparent. A 32 bit image // simply uses the alpha channel. Canvas's are always 32 bit, // and we always use a canvas that supports alpha. KrCanvasResource* canvas = new KrCanvasResource( "encoder", surface->w, surface->h, true ); if ( !canvas ) { if ( error ) *error = "Failed to create canvas."; return 0; } // Copy from the surface to the canvas. int x, y; int i; KrPaintInfo canvasPaintInfo( canvas->Pixels(), canvas->Width(), canvas->Height() ); KrPainter canvasPainter( &canvasPaintInfo ); KrPainter surfacePainter( surface ); for( x=0; x<surface->w; x++ ) { for( y=0; y<surface->h; y++ ) { KrRGBA rgba; surfacePainter.BreakPixel( x, y, &rgba ); for ( i=0; i<nTrans; i++ ) { if ( rgba.c.red == transparent[i].c.red && rgba.c.green == transparent[i].c.green && rgba.c.blue == transparent[i].c.blue ) { // Set the surface alpha to transparent. rgba.c.alpha = KrRGBA::KR_TRANSPARENT; break; } } canvasPainter.SetPixel( x, y, rgba ); } } return canvas; }
int _tmain(int argc, _TCHAR* argv[]) { //std::string path("D:/GitHub/Image-Understanding-Classification/Image-Understanding02/101_ObjectCategories"); std::string path("../101_ObjectCategories"); ImageLoader LoadImages = ImageLoader(path); FeatureExtractor GetFeatures; DecisionMaker GetClassification; std::vector<cv::Mat> trainingImages; std::vector<int> trainingLabels; std::vector<cv::Mat> testImages; std::vector<int> testLabels; std::vector<std::vector< cv::Mat >> FeatureVectorsTraining; cv::Mat ReshapedFeatureVectorsTraining; cv::Mat ReducedFeatureVectorsTraining; std::vector<std::vector< cv::Mat >> FeatureVectorsTest; cv::Mat ReshapedFeatureVectorsTest; cv::Mat ReducedFeatureVectorsTest; std::vector<int> ResultsTest; std::vector<int> ResultsTraining; std::vector<std::string> classNames; int NumberOfSamples; int NumberOfClasses; std::vector<std::string> folders; folders.push_back("accordion"); //folders.push_back("airplanes"); folders.push_back("anchor"); folders.push_back("ant"); folders.push_back("barrel"); folders.push_back("bass"); folders.push_back("beaver"); folders.push_back("binocular"); folders.push_back("bonsai"); for (int i = 0; i < 10; ++i) { //LoadImages.LoadImagesFromSubfolders(folders); LoadImages.LoadImages(); LoadImages.getTrainingData(trainingImages, trainingLabels); LoadImages.getTestData(testImages, testLabels); FeatureVectorsTraining.clear(); FeatureVectorsTraining.resize(trainingImages.size()); GetFeatures.computeHOGFeatures(trainingImages, FeatureVectorsTraining); //GetFeatures.computeColorFeatures(trainingImages, FeatureVectorsTraining); GetClassification.ReshapeFeatures(FeatureVectorsTraining, ReshapedFeatureVectorsTraining); //GetClassification.constructPCA(ReshapedFeatureVectorsTraining); //GetClassification.reduceFeaturesPCA(ReshapedFeatureVectorsTraining, ReducedFeatureVectorsTraining); //GetClassification.TrainRandomTrees(ReshapedFeatureVectorsTraining, trainingLabels); GetClassification.TrainSVM(ReshapedFeatureVectorsTraining, trainingLabels); std::cout << "Training done." << std::endl; //GetClassification.PredictRandomTrees(ReshapedFeatureVectorsTraining, ResultsTraining); GetClassification.PredictSVM(ReshapedFeatureVectorsTraining, ResultsTraining); FeatureVectorsTest.clear(); FeatureVectorsTest.resize(testImages.size()); GetFeatures.computeHOGFeatures(testImages, FeatureVectorsTest); //GetFeatures.computeColorFeatures(testImages, FeatureVectorsTest); GetClassification.ReshapeFeatures(FeatureVectorsTest, ReshapedFeatureVectorsTest); //GetClassification.reduceFeaturesPCA(ReshapedFeatureVectorsTest, ReducedFeatureVectorsTest); //GetClassification.PredictRandomTrees(ReshapedFeatureVectorsTest, ResultsTest); GetClassification.PredictSVM(ReshapedFeatureVectorsTest, ResultsTest); LoadImages.getClassNames(classNames); NumberOfClasses = classNames.size(); LoadImages.getSampleSize(NumberOfSamples); EvaluationUnit GetTrainingEvaluation(trainingLabels, NumberOfClasses, NumberOfSamples); double TrainingPercent = GetTrainingEvaluation.EvaluateResultSimple(ResultsTraining); std::cout << " Training Data Simple Percentage: " + std::to_string(TrainingPercent) << std::endl; std::vector<double> classPercentageTraining; std::vector<std::vector<int>> TrainingStatistics; GetTrainingEvaluation.EvaluateResultComplex(ResultsTraining, classPercentageTraining, TrainingStatistics); //for (int i = 0; i < classPercentageTraining.size(); i++) //{ // std::cout << " " + classNames[i] + ": " + std::to_string(classPercentageTraining[i]) << std::endl; //} EvaluationUnit GetTestEvaluation(testLabels, NumberOfClasses, NumberOfSamples); double TestPercent = GetTestEvaluation.EvaluateResultSimple(ResultsTest); std::cout << " Test Data Simple Percentage: " + std::to_string(TestPercent) << std::endl; /*std::vector<double> classPercentageTest; std::vector<std::vector<int>> TestStatistics; GetTestEvaluation.EvaluateResultComplex(ResultsTest, classPercentageTest, TestStatistics); for (int i = 0; i < classPercentageTest.size(); i++) { std::cout << " " + classNames[i] + ": " + std::to_string(classPercentageTest[i]) << std::endl; } */ } return 0; }