void _testReduceChessboard1 ( void ) { RGB24Buffer *buffer = new RGB24Buffer(512, 512); for(int i = 0; i < buffer->h ; i++) { for(int j = 0; j < buffer->w ; j++) { int last1 = lastBit(i); int last2 = lastBit(j); /*int max = std::max(last1, last2); int selector1 = max - 2; int selector2 = max - 2; if (last1 == last2) { selector1 = last1 - 1; selector2 = last2 - 1; }*/ int selector1 = last1 - 1; int selector2 = last2 - 1; bool isWhite = (!!(i & (1 << selector1))) ^ (!!(j & (1 << selector2))); buffer->element(buffer->h - 1 - i, buffer->w - 1 - j) = isWhite ? RGBColor(0xFFFFFF) : RGBColor(0x000000); } } (BMPLoader()).save("chess.bmp", buffer); delete buffer; }
void plotRosenberg ( void ) { const int STEPS = 40; FILE *Out = fopen("rosenberg.txt", "wt"); RosenbrockFunction func; for (int i = 0; i < STEPS; i++) { for (int j = 0; j < STEPS; j++) { double x = ((double) i / (STEPS - 1)); double y = ((double) j / (STEPS - 1)); double in[2]; double out[1]; in[0] = x * 4.0 - 2.0; in[1] = y * 4.0 - 1.0; func.operator ()(in, out); fprintf(Out, "%lf %lf %lf\n", in[0], in[1], out[0]); } } ScalerFunction scaled(&func, 0.01, 0.0); RGB24Buffer *image = new RGB24Buffer(500,500); image->drawFunction(-1.0, -2.0, 4.0, 4.0, scaled); image->drawIsolines(-1.0, -2.0, 4.0, 4.0, 10, scaled); BMPLoader().save("rosenplot.bmp", image); delete image; fclose(Out); }
void testBoolean (void) { int h = 45; int w = 45; G8Buffer *buffer = new G8Buffer(h, w); AbstractPainter<G8Buffer> painter(buffer); painter.drawCircle( w / 4, h / 4, w / 4, 255 ); painter.drawCircle( w / 4, 3 * h / 4, w / 4, 255 ); painter.drawCircle(3 * w / 4, h / 4, w / 4, 255 ); painter.drawCircle(3 * w / 4, 3 * h / 4, w / 4, 255 ); RGB24Buffer *sourceImage = new RGB24Buffer(h, w); sourceImage->drawG8Buffer(buffer); (BMPLoader()).save("source.bmp", sourceImage); BooleanBuffer packedBuffer(buffer); G8Buffer *unpackBuffer = packedBuffer.unpack(0, 255); RGB24Buffer *image = new RGB24Buffer(unpackBuffer->h, unpackBuffer->w); image->drawG8Buffer(unpackBuffer); (BMPLoader()).save("unpacked.bmp", image); packedBuffer.printBuffer(); delete_safe(image); delete_safe(sourceImage); delete_safe(unpackBuffer); delete_safe(buffer); }
void ADCensus::disparityMapFromRGB(QUrl leftImageUrl, QUrl rightImageUrl) { std::string left = leftImageUrl.toLocalFile().toStdString(); std::string right = rightImageUrl.toLocalFile().toStdString(); cout << "Opening [" << left << " " << right << "]" << endl; RGB24Buffer *leftImage = BMPLoader().loadRGB(left); RGB24Buffer *rightImage = BMPLoader().loadRGB(right); G8Buffer *leftGray = leftImage->getChannel(ImageChannel::GRAY); G8Buffer *rightGray = rightImage->getChannel(ImageChannel::GRAY); AbstractBuffer<int32_t> disparities = constructDisparityMap(leftImage, rightImage, leftGray, rightGray); G8Buffer *result = new G8Buffer(disparities.getSize()); for (int x = 0; x < result->w; ++x) { for (int y = 0; y < result->h; ++y) { result->element(y,x) = (disparities.element(y, x) / (double)result->w * 255 * 3); } } BMPLoader().save("result.bmp", result); cout << "Resulting disparity map saved to result.bmp\n"; delete leftImage; delete rightImage; delete leftGray; delete rightGray; delete result; }
RGB24Buffer * alphaBlend(RGB24Buffer *in1, RGB24Buffer *in2, G8Buffer *alpha) { RGB24Buffer *result = new RGB24Buffer(in1->getSize()); for (int i = 0; i < result->h; i++) { for (int j = 0; j < result->w; j++) { RGBColor maskEl = in1->element(i,j); RGBColor faceEl = in2->element(i,j); int a = alpha->element(i,j); int b = 255 - a; int r1 = maskEl.r(); int g1 = maskEl.g(); int b1 = maskEl.b(); int r2 = faceEl.r(); int g2 = faceEl.g(); int b2 = faceEl.b(); RGBColor resultEl( (r1 * a + r2 * b) / 255, (g1 * a + g2 * b) / 255, (b1 * a + b2 * b) / 255 ); result->element(i,j) = resultEl; } } return result; }
RGB24Buffer * BMPLoader::loadRGB(string name) { uint8_t *data = NULL; BMPHeader header; RGB24Buffer *toReturn = NULL; if (parseBMP(name, &header, &data) != 0) goto fail; toReturn = new RGB24Buffer(header.h, header.w); for (unsigned i = 0; i < header.h; i++) { uint8_t *src = &(data[header.lineLength * (header.h - i - 1)]); RGBColor *dest = &toReturn->element(i,0); for (unsigned j = 0; j < header.w; j++) { uint32_t b = src[0]; uint32_t g = src[1]; uint32_t r = src[2]; *dest = RGBColor(r,g,b); src+=3; dest++; } } fail: if (data) delete[] data; return toReturn; }
bool BMPLoader::save(string name, G8Buffer *buffer) { RGB24Buffer *toSave = new RGB24Buffer(buffer->getSize()); toSave->drawG8Buffer(buffer); bool result = save(name, toSave); delete toSave; return result; }
RGB24Buffer *QTFileLoader::RGB24BufferFromQImage(QImage *image) { if (image == NULL) return NULL; RGB24Buffer *result = new RGB24Buffer(image->height(), image->width(), false); if (image->format() == QImage::Format_ARGB32 || image->format() == QImage::Format_RGB32 ) { for (int i = 0; i < image->height(); i++) { uint8_t *in = (uint8_t *)image->scanLine(i); RGBColor *out = &result->element(i, 0); for (int j = 0; j < image->width(); j++) { uint8_t r = *(in++); uint8_t g = *(in++); uint8_t b = *(in++); *out = RGBColor(b,g,r); in++; out++; } } } else { /** * TODO: Make this faster using .bits() method. * So far don't want to mess with possible image formats * */ qDebug("QTFileLoader::RGB24BufferFromQImage():Slow conversion."); for (int i = 0; i < image->height(); i++) { for (int j = 0; j < image->width(); j++) { QRgb pixel = image->pixel(j,i); result->element(i,j) = RGBColor(qRed(pixel), qGreen(pixel), qBlue(pixel)); } } } return result; }
void SelectableGeometryFeatures::draw(RGB24Buffer &buffer) { for (auto& p_ptr:mPaths) { SelectableGeometryFeatures::Vertex* prev = nullptr; for (auto& v_ptr: p_ptr->vertexes) { if (prev) { corecvs::Vector2dd from = prev->position; corecvs::Vector2dd to = v_ptr->position; auto diff = from - to; int sx = (int)(std::abs(diff[0]) * 2 + 1); int sy = (int)(std::abs(diff[1]) * 2 + 1); int steps = std::max(sx, sy); for (double alpha = 0.0; alpha < 1.0; alpha += 1.0 / steps) { auto p = from * alpha + (1.0 - alpha) * to; int px = (int)(p[0] + 0.5), py = (int)(p[1] + 0.5); for (int y = py - 1; y < py + 2; ++y) for (int x = px - 1; x < px + 2; ++x) if (std::abs(x - p[0]) < 0.5 && std::abs(y - p[1]) < 0.5 && x >= 0 && x < buffer.w && y >= 0 && y < buffer.h) buffer.element(y, x) = corecvs::RGBColor(0xff0000); } } prev = v_ptr; } } for (auto& p: mPoints) { int px = (int)p->position[0], py = (int)p->position[1]; if (px >= 0 && px < buffer.w && py >= 0 && py < buffer.h) buffer.element(py, px) = corecvs::RGBColor(0x00ff00); } }
RGB24Buffer *QTFileLoader::RGB24BufferFromQImage(QImage *image) { if (image == NULL) return NULL; RGB24Buffer *result = new RGB24Buffer(image->height(), image->width(), false); /** * TODO: Make this faster using .bits() method. * So far don't want to mess with possible image formats * */ for (int i = 0; i < image->height(); i++) { for (int j = 0; j < image->width(); j++) { QRgb pixel = image->pixel(j,i); result->element(i,j) = RGBColor(qRed(pixel), qGreen(pixel), qBlue(pixel)); } } return result; }
void TestbedMainWindow::updateViewImage(void) { enum {IMAGE, MASK, HUE, SATURATION, VALUE, EDGES, CANNY, PRINCIPAL, SECONDARY, THIRD}; switch (mUi->bufferSelectBox->currentIndex()) { case IMAGE: { if (mImage == NULL) { return; } if (mUi->blendEdgeCheckBox->isChecked()) { prepareBlendedMask(); } RGB24Buffer *toDraw = new RGB24Buffer(mImage); RGBColor maskColor = mUi->maskColorWidget->getColor(); double alpha = (mUi->maskAlphaSpinBox->value()) / 100.0; if (mUi->actionShowMask->isChecked()) { for (int i = 0; i < toDraw->h; i++) { for (int j = 0; j < toDraw->w; j++) { bool hasmask = false; bool hasnomask = false; /* so far no optimization here */ if (mUi->showEdgeCheckBox->isChecked()) { for (int dx = -1; dx <= 1; dx++) { for (int dy = -1; dy <= 1; dy++) { if (!mMask->isValidCoord(i + dy, j + dx)) continue; if (mMask->element(i + dy, j + dx)) { hasmask = true; } else { hasnomask = true; } } } } if (mUi->blendEdgeCheckBox->isChecked()) { double scaler = alpha * mMaskBlended->element(i,j) / 255; toDraw->element(i,j).r() += (maskColor.r() - toDraw->element(i,j).r()) * scaler; toDraw->element(i,j).g() += (maskColor.g() - toDraw->element(i,j).g()) * scaler; toDraw->element(i,j).b() += (maskColor.b() - toDraw->element(i,j).b()) * scaler; } else { if (mMask->element(i,j)) { toDraw->element(i,j).r() += (maskColor.r() - toDraw->element(i,j).r()) * alpha; toDraw->element(i,j).g() += (maskColor.g() - toDraw->element(i,j).g()) * alpha; toDraw->element(i,j).b() += (maskColor.b() - toDraw->element(i,j).b()) * alpha; } } if (mMask->element(i,j)) { if (hasmask && hasnomask) { toDraw->element(i,j) = mUi->edgeColorWidget->getColor(); } } if (mUi->levelDebugCheckBox->isChecked() && mMaskStore != NULL && mMaskStore->element(i,j) != 0) { if (mMaskStore->element(i,j) > mUi->levelSpinBox->value()) { toDraw->element(i,j) = RGBColor::Red(); } else { toDraw->element(i,j) = RGBColor::Blue(); } } } } } QImage *qImage = new RGB24Image(toDraw); mImageWidget->setImage(QSharedPointer<QImage>(qImage)); delete_safe(toDraw); } break; case MASK: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mMask))); break; case HUE: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mHComp))); break; case SATURATION: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mSComp))); break; case VALUE: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mVComp))); break; case EDGES: mImageWidget->setImage(QSharedPointer<QImage>(new G12Image(mEdges))); break; case CANNY: mImageWidget->setImage(QSharedPointer<QImage>(new G12Image(mCannyEdges))); break; case PRINCIPAL: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mPrincipal))); break; case SECONDARY: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mPrincipal2))); break; case THIRD: default: mImageWidget->setImage(QSharedPointer<QImage>(new G8Image(mPrincipal3))); break; } }
void testRadialInversion(int scale) { RGB24Buffer *image = new RGB24Buffer(250 * scale, 400 * scale); auto operation = [](int i, int j, RGBColor *pixel) { i = i / 100; j = j / 200; if ( (i % 2) && (j % 2)) *pixel = RGBColor::Green(); if (!(i % 2) && (j % 2)) *pixel = RGBColor::Yellow(); if ( (i % 2) && !(j % 2)) *pixel = RGBColor::Red(); if (!(i % 2) && !(j % 2)) *pixel = RGBColor::Blue(); }; touchOperationElementwize(image, operation); #if 0 LensDistortionModelParameters deformator; deformator.setPrincipalX(image->w / 2); deformator.setPrincipalY(image->h / 2); deformator.setNormalizingFocal(deformator.principalPoint().l2Metric()); deformator.setTangentialX(0.001); deformator.setTangentialY(0.001); deformator.setAspect(1.0); deformator.setScale(1.0); deformator.mKoeff.push_back( 0.1); deformator.mKoeff.push_back(-0.2); deformator.mKoeff.push_back( 0.3); #else LensDistortionModelParameters deformator; deformator.setMapForward(false); deformator.setPrincipalX(480); deformator.setPrincipalY(360); deformator.setNormalizingFocal(734.29999999999995); deformator.setTangentialX(0.00); deformator.setTangentialY(0.00); deformator.setShiftX(0.00); deformator.setShiftY(0.00); deformator.setAspect(1.0); deformator.setScale (1.0); deformator.mKoeff.clear(); deformator.mKoeff.push_back( 0); deformator.mKoeff.push_back( -0.65545); deformator.mKoeff.push_back( 0); deformator.mKoeff.push_back( 8.2439); // deformator.mKoeff.push_back( 0); // deformator.mKoeff.push_back( 8.01); #endif RadialCorrection T(deformator); PreciseTimer timer; cout << "Initial deformation... " << endl; cout << T.mParams << flush;; cout << "Starting deformation... " << flush; timer = PreciseTimer::currentTime(); RGB24Buffer *deformed = image->doReverseDeformationBlTyped<RadialCorrection>(&T); cout << "done in: " << timer.usecsToNow() << "us" << endl; /* */ int inversionGridStep = 30; cout << "Starting invertion... " << flush; RadialCorrection invert = T.invertCorrection(image->h, image->w, inversionGridStep); cout << "done" << endl; cout << "Starting backprojection... " << flush; timer = PreciseTimer::currentTime(); RGB24Buffer *backproject = deformed->doReverseDeformationBlTyped<RadialCorrection>(&invert); cout << "done in: " << timer.usecsToNow() << "us" << endl; cout << "done" << endl; RGB24Buffer *debug = new RGB24Buffer(image->getSize()); /* Show visual */ double dh = (double)image->h / (inversionGridStep - 1); double dw = (double)image->w / (inversionGridStep - 1); for (int i = 0; i < inversionGridStep; i++) { for (int j = 0; j < inversionGridStep; j++) { Vector2dd point(dw * j, dh * i); debug->drawCrosshare1(point, RGBColor::Yellow()); Vector2dd deformed = T.mapToUndistorted(point); /* this could be cached */ Vector2dd backproject = invert.mapToUndistorted(deformed); debug->drawCrosshare1(backproject, RGBColor::Green()); } } BMPLoader().save("input.bmp" , image); BMPLoader().save("debug.bmp" , debug); BMPLoader().save("forward.bmp" , deformed); BMPLoader().save("backproject.bmp", backproject); delete_safe(image); delete_safe(debug); delete_safe(deformed); delete_safe(backproject); }
//int main (int /*argC*/, char *argV[]) TEST(FaceRecognition1, main) { const double UP_FACTOR = 5; const double UP_STEP = 1.2; // const double DOWN_FACTOR = 0.25; // const double DOWN_STEP = 1.0 / 1.2; fstream patternFile; patternFile.open("boost-result.txt", fstream::in); VJAdaBoostedClassifier *classifier = new VJAdaBoostedClassifier(); patternFile >> (*classifier); for (unsigned i = 0; i < classifier->children.size(); i++) { char name[100]; snprintf2buf(name, "pattern%d.bmp", i); VJSimpleClassifier* partClass = classifier->children.at(i); RGB24Buffer *buffer = partClass->drawPattern(); (BMPLoader()).save(string(name), buffer); delete buffer; } // classifier->print(); patternFile.close(); G12Buffer *inputPicture = BufferFactory::getInstance()->loadG12Bitmap(argV[1]); if (inputPicture == NULL) { printf("Problem loading input %s\n", argV[1]); } RGB24Buffer *output = new RGB24Buffer(inputPicture); G12IntegralBuffer *integral = new G12IntegralBuffer(inputPicture); int total = 0; int positive = 0; double factor; for (factor = 1.0; factor < UP_FACTOR; factor *= UP_STEP) { VJAdaBoostedClassifier *scaled = classifier->scale(factor); scaled->initLimits(); double x = scaled->leftMargin; double y = scaled->topMargin; double patw = scaled->rightMargin + scaled->leftMargin; double path = scaled->topMargin + scaled->bottomMargin; int hlimit = inputPicture->h - scaled->bottomMargin; int wlimit = inputPicture->w - scaled->rightMargin; for (; y < hlimit ; y += factor) { for (; x < wlimit ; x += factor) { bool hasMatch = scaled->applyToPoint(integral, y, x); if (hasMatch) { //printf("Match Found! at %d %d level %d\n",j ,i, l); int h = path; int w = patw; output->drawCrosshare1(x, y , RGBColor(0xFF0000)); output->drawCrosshare1(x + w, y , RGBColor(0x7F0000)); output->drawCrosshare1(x, y + h , RGBColor(0x00FF00)); output->drawCrosshare1(x + w, y + h , RGBColor(0x007F00)); positive++; } total++; } } delete scaled; } delete integral; printf("Found %d faces among %d objects %2.2lf%%\n", positive, total, (double) 100.0 * positive / total ); (BMPLoader()).save("output.bmp", output); // return 0; }
RGB24Buffer *LibjpegFileReader::load(string name) { struct jpeg_decompress_struct cinfo; struct jpeg_error_mgr jerr; JSAMPARRAY buffer; int row_stride; FILE * infile; /* source file */ if ((infile = fopen(name.c_str(), "rb")) == NULL) { SYNC_PRINT(("can't open %s\n", name.c_str())); return NULL; } /* Step 1: allocate and initialize JPEG decompression object */ cinfo.err = jpeg_std_error(&jerr); /* jerr.pub.error_exit = my_error_exit; if (setjmp(jerr.setjmp_buffer)) { jpeg_destroy_decompress(&cinfo); fclose(infile); return NULL; }*/ jpeg_create_decompress(&cinfo); jpeg_stdio_src(&cinfo, infile); (void) jpeg_read_header(&cinfo, TRUE); (void) jpeg_start_decompress(&cinfo); SYNC_PRINT(("Parsed header [%dx%d] out = [%dx%d]\n", cinfo.image_width, cinfo.image_height, cinfo.output_width, cinfo.output_height)); RGB24Buffer *result = new RGB24Buffer(cinfo.output_height, cinfo.output_width); row_stride = cinfo.output_width * cinfo.output_components; buffer = (*cinfo.mem->alloc_sarray) ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); while (cinfo.output_scanline < cinfo.output_height) { int i = cinfo.output_scanline; (void) jpeg_read_scanlines(&cinfo, buffer, 1); for (int j = 0; j < cinfo.output_width; j++) { if (!result->isValidCoord(i, j)) { SYNC_PRINT(("-(%dx%d)\n", i, j )); } result->element(i, j) = RGBColor( buffer[0][j*cinfo.output_components + 0], buffer[0][j*cinfo.output_components + 1], buffer[0][j*cinfo.output_components + 2] ); } //put_scanline_someplace(buffer[0], row_stride); } (void) jpeg_finish_decompress(&cinfo); jpeg_destroy_decompress(&cinfo); fclose(infile); return result; }
int main (int argc, char **argv) { QCoreApplication app(argc, argv); printf("Loading mask...\n"); QTRGB24Loader ::registerMyself(); QTG12Loader ::registerMyself(); QTRuntimeLoader::registerMyself(); QImage imageMask ("data/adopt/orig.png"); QImage imageAlpha ("data/adopt/alpha.bmp"); QImage imageFace ("data/adopt/face.png"); RGB24Buffer *alpha24 = QTFileLoader::RGB24BufferFromQImage(&imageAlpha); RGB24Buffer *mask = QTFileLoader::RGB24BufferFromQImage(&imageMask); RGB24Buffer *face = QTFileLoader::RGB24BufferFromQImage(&imageFace); G8Buffer *alpha = alpha24->getChannel(ImageChannel::GRAY); Vector3dd meanMask(0.0); Vector3dd meanFace(0.0); double count = 0; /* Get hole statistics */ for (int i = 0; i < mask->h; i++) { for (int j = 0; j < mask->w; j++) { if (alpha->element(i,j) > 10) continue; count++; meanFace += face->element(i,j).toDouble(); meanMask += mask->element(i,j).toDouble(); } } meanFace /= count; meanMask /= count; cout << "Mean face value is" << meanFace << endl; cout << "Mean face value is" << meanMask << endl; EllipticalApproximationUnified<Vector3dd> facePrincipal; EllipticalApproximationUnified<Vector3dd> maskPrincipal; for (int i = 0; i < mask->h; i++) { for (int j = 0; j < mask->w; j++) { facePrincipal.addPoint(face->element(i,j).toDouble() - meanFace); maskPrincipal.addPoint(mask->element(i,j).toDouble() - meanMask); } } facePrincipal.getEllipseParameters(); maskPrincipal.getEllipseParameters(); cout << "Face Principals" << endl; cout << facePrincipal.mAxes[0] << "->" << facePrincipal.mValues[0] << endl; cout << facePrincipal.mAxes[1] << "->" << facePrincipal.mValues[1] << endl; cout << facePrincipal.mAxes[2] << "->" << facePrincipal.mValues[2] << endl; cout << "Mask Principals" << endl; cout << maskPrincipal.mAxes[0] << "->" << maskPrincipal.mValues[0] << endl; cout << maskPrincipal.mAxes[1] << "->" << maskPrincipal.mValues[1] << endl; cout << maskPrincipal.mAxes[2] << "->" << maskPrincipal.mValues[2] << endl; Vector3dd scalers; scalers.x() = sqrt(maskPrincipal.mValues[0]) / sqrt(facePrincipal.mValues[0]); scalers.y() = sqrt(maskPrincipal.mValues[1]) / sqrt(facePrincipal.mValues[1]); scalers.z() = sqrt(maskPrincipal.mValues[2]) / sqrt(facePrincipal.mValues[2]); /* Making correction for face */ RGB24Buffer *faceCorr = new RGB24Buffer(face->getSize(), false); for (int i = 0; i < faceCorr->h; i++) { for (int j = 0; j < faceCorr->w; j++) { Vector3dd color = face->element(i,j).toDouble() - meanFace; Vector3dd projected(color & facePrincipal.mAxes[0], color & facePrincipal.mAxes[1], color & facePrincipal.mAxes[2]); projected = projected * scalers; Vector3dd newColor = maskPrincipal.mAxes[0] * projected.x() + maskPrincipal.mAxes[1] * projected.y() + maskPrincipal.mAxes[2] * projected.z() + meanMask; RGBColor newrgb; double c; c = newColor.x(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.r() = c; c = newColor.y(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.g() = c; c = newColor.z(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.b() = c; faceCorr->element(i,j) = newrgb; } } /* With Matrix*/ Matrix33 scalerM = Matrix33::Scale3(scalers); Matrix33 toUnityM = Matrix33::FromRows(facePrincipal.mAxes[0], facePrincipal.mAxes[1], facePrincipal.mAxes[2]); Matrix33 fromUnityM = Matrix33::FromColumns(maskPrincipal.mAxes[0], maskPrincipal.mAxes[1], maskPrincipal.mAxes[2]); Matrix33 transform = fromUnityM * scalerM * toUnityM; RGB24Buffer *faceCorr2 = new RGB24Buffer(face->getSize(), false); for (int i = 0; i < faceCorr2->h; i++) { for (int j = 0; j < faceCorr2->w; j++) { Vector3dd newColor = transform * (face->element(i,j).toDouble() - meanFace) + meanMask; RGBColor newrgb; double c; c = newColor.x(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.r() = c; c = newColor.y(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.g() = c; c = newColor.z(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.b() = c; faceCorr2->element(i,j) = newrgb; } } /* Without roots */ scalers.x() = maskPrincipal.mValues[0] / facePrincipal.mValues[0]; scalers.y() = maskPrincipal.mValues[1] / facePrincipal.mValues[1]; scalers.z() = maskPrincipal.mValues[2] / facePrincipal.mValues[2]; /* Making correction for face */ RGB24Buffer *faceCorr1 = new RGB24Buffer(face->getSize(), false); for (int i = 0; i < faceCorr1->h; i++) { for (int j = 0; j < faceCorr1->w; j++) { Vector3dd color = face->element(i,j).toDouble() - meanFace; Vector3dd projected(color & facePrincipal.mAxes[0], color & facePrincipal.mAxes[1], color & facePrincipal.mAxes[2]); projected = projected * scalers; Vector3dd newColor = maskPrincipal.mAxes[0] * projected.x() + maskPrincipal.mAxes[1] * projected.y() + maskPrincipal.mAxes[2] * projected.z() + meanMask; RGBColor newrgb; double c; c = newColor.x(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.r() = c; c = newColor.y(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.g() = c; c = newColor.z(); if (c < 0) c = 0; if (c > 255) c = 255; newrgb.b() = c; faceCorr1->element(i,j) = newrgb; } } /* Make a final blending */ BMPLoader().save("output0.bmp", mask); RGB24Buffer *result = alphaBlend(mask, face, alpha); BMPLoader().save("output1.bmp", result); RGB24Buffer *result1 = alphaBlend(mask, faceCorr, alpha); BMPLoader().save("output2.bmp", result1); RGB24Buffer *result2 = alphaBlend(mask, faceCorr1, alpha); BMPLoader().save("output3.bmp", result2); RGB24Buffer *result3 = alphaBlend(mask, faceCorr2, alpha); BMPLoader().save("matrix-out.bmp", result3); delete_safe(alpha); delete_safe(mask); delete_safe(face); delete_safe(faceCorr); delete_safe(faceCorr1); delete_safe(faceCorr2); delete_safe(result); delete_safe(result1); delete_safe(result2); delete_safe(result3); return 0; }