QgsGlobeTileImage::QgsGlobeTileImage( QgsGlobeTileSource* tileSource, const QgsRectangle& tileExtent, int tileSize , int tileLod )
    : osg::Image()
    , mTileSource( tileSource )
    , mTileExtent( tileExtent )
    , mTileSize( tileSize )
    , mLod( tileLod )
{
  mTileSource->addTile( this );
#ifdef GLOBE_SHOW_TILE_STATS
  QgsGlobeTileStatistics::instance()->updateTileCount( + 1 );
#endif
  mTileData = new unsigned char[mTileSize * mTileSize * 4];
  std::memset( mTileData, 0, mTileSize * mTileSize * 4 );
#if 0
  setImage( mTileSize, mTileSize, 1, 4, // width, height, depth, internal_format
            GL_BGRA, GL_UNSIGNED_BYTE,
            mTileData, osg::Image::NO_DELETE );

  mTileSource->mTileUpdateManager.addTile( const_cast<QgsGlobeTileImage*>( this ) );
  mDpi = 72;
#else
  QImage qImage( mTileData, mTileSize, mTileSize, QImage::Format_ARGB32_Premultiplied );
  QPainter painter( &qImage );
  QgsMapRendererCustomPainterJob job( createSettings( qImage.logicalDpiX(), mTileSource->mLayerSet ), &painter );
  job.renderSynchronously();

  setImage( mTileSize, mTileSize, 1, 4, // width, height, depth, internal_format
            GL_BGRA, GL_UNSIGNED_BYTE,
            mTileData, osg::Image::NO_DELETE );
  flipVertical();
  mDpi = qImage.logicalDpiX();
#endif
}
Esempio n. 2
0
// Slots
void CamWidget::setImage(const cv::Mat &updatedImage)
{
	// Perform conversion from cv::Mat to QImage
	cv::Mat tmpImage;
	// Assume the image has 3 channels and is in BGR format
	cv::cvtColor(updatedImage, tmpImage, CV_BGR2RGB);
	QImage qImage(tmpImage.data, tmpImage.cols, tmpImage.rows, QImage::Format_RGB888);
	pixmap.convertFromImage(qImage);

	QSize newSize = calcZoom(pixmap.size()) * pixmap.size();
	pixmap = pixmap.scaled(newSize);

	resize(newSize);

	update();
}
Esempio n. 3
0
void cThumbnailWidget::slotFullyRendered()
{
	if(!disableThumbnailCache)
	{
		QImage qImage((const uchar*) image->ConvertTo8bit(),
									image->GetWidth(),
									image->GetHeight(),
									image->GetWidth() * sizeof(sRGB8),
									QImage::Format_RGB888);
		QPixmap pixmap;
		pixmap.convertFromImage(qImage);

		QString thumbnailFileName = systemData.thumbnailDir + hash + QString(".png");
		pixmap.save(thumbnailFileName, "PNG");
	}
	lastRenderTime = renderingTimeTimer.nsecsElapsed() / 1e9;
	emit thumbnailRendered();
}
Esempio n. 4
0
bool QtPixmapDumper::dumpImage(const Image *image) const
{
    if (!image)
    {
        return false;
    }

    if (!m_pixmap)
    {
        return false;
    }

    QImage qImage((const uchar *)(image->getRawPoints()),
                  image->getWidth(),
                  image->getHeight(),
                  QImage::Format_ARGB32);

    m_pixmap->convertFromImage(qImage);

    return true;
}
Esempio n. 5
0
QImage ImageCapture::imageConvert(cv::Mat &matImage) {

    // Convert incoming openCV frame to Qt's image format in order to display.
    QImage qImage(
        (uchar*)matImage.data,
        matImage.cols,
        matImage.rows,
        matImage.step,
        QImage::Format_RGB888
        );
    clock_t t;
    t=clock();
    char buf[40];
    sprintf(buf,"image_%d.JPG",(int)t);
    //string out=string(buf);

    qImage.save(buf,"JPEG");


    return qImage.rgbSwapped().mirrored(true, false);
}
void TileBoundsCalculator::_exportResult(const vector<PixelBox>& boxes, QString output)
{
  QImage qImage(_r1.cols, _r1.rows, QImage::Format_RGB16);
  if (qImage.isNull())
  {
    throw HootException(QString("Unable to allocate image of size %1x%2").arg(_r1.cols).
      arg(_r1.rows));
  }
  QPainter pt(&qImage);
  pt.setRenderHint(QPainter::Antialiasing, false);
  pt.fillRect(pt.viewport(), Qt::black);
  QPen pen;
  pen.setWidth(0);
  pen.setColor(qRgb(1, 0, 0));
  pt.setPen(pen);

  LOG_INFO("max value: " << _maxValue);

  for (int y = 0; y < _r1.rows; y++)
  {
    int32_t* row1 = _r1.ptr<int32_t>(y);
    int32_t* row2 = _r2.ptr<int32_t>(y);
    for (int x = 0; x < _r1.cols; x++)
    {
      double l1 = row1[x] <= 0 ? 0.0 : log(row1[x]) / log(_maxValue);
      double l2 = row2[x] <= 0 ? 0.0 : log(row2[x]) / log(_maxValue);

      qImage.setPixel(x, _r1.rows - y - 1, qRgb(l1 * 255, l2 * 255, 0));
    }
  }

  pt.setPen(QPen(QColor(0, 0, 255, 100)));
  for (size_t i = 0; i < boxes.size(); i++)
  {
    const PixelBox& b = boxes[i];
    pt.drawRect(b.minX, _r1.rows - b.maxY - 1, b.maxX - b.minX, b.maxY - b.minY);
  }

  qImage.save(output);
}
void TileBoundsCalculator::_exportImage(cv::Mat &r, QString output)
{
  QImage qImage(r.cols, r.rows, QImage::Format_RGB16);
  if (qImage.isNull())
  {
    throw HootException(QString("Unable to allocate image of size %1x%2").arg(r.cols).
      arg(r.rows));
  }
  QPainter pt(&qImage);
  pt.setRenderHint(QPainter::Antialiasing, false);
  pt.fillRect(pt.viewport(), Qt::black);
  QPen pen;
  pen.setWidth(0);
  pen.setColor(qRgb(1, 0, 0));
  pt.setPen(pen);

  LOG_INFO("max value: " << _maxValue);

  for (int y = 0; y < r.rows; y++)
  {
    int32_t* row = r.ptr<int32_t>(y);
    for (int x = 0; x < r.cols; x++)
    {
      double l;
      if (row[x] == 0)
      {
        l = 0.0;
      }
      else
      {
        l = log(row[x]) / log(_maxValue);
      }
      int v = l * 255;
      qImage.setPixel(x, r.rows - y - 1, qRgb(v, v, 50));
    }
  }

  qImage.save(output);
}
Esempio n. 8
0
void BaseComparator::_saveImage(cv::Mat& image, QString path, double max, bool gradient)
{
  if (max <= 0.0)
  {
    for (int y = 0; y < _height; y++)
    {
      float* row = image.ptr<float>(y);
      for (int x = 0; x < _width; x++)
      {
        max = std::max((double)row[x], max);
      }
    }
  }

  QImage qImage(_width, _height, QImage::Format_ARGB32);

  QRgb rgb;
  if (max > 0.0)
  {
    for (int y = 0; y < _height; y++)
    {
      float* row = image.ptr<float>(y);
      for (int x = 0; x < _width; x++)
      {
        if (gradient)
        {
          _calculateColor(row[x], max, rgb);
        }
        else
        {
          _calculateRingColor(row[x], max, rgb);
        }
        qImage.setPixel(x, y, rgb);
      }
    }
  }

  qImage.save(path);
}
Esempio n. 9
0
File: mhi.cpp Progetto: Olti/mythtv
// Draw a rectangle.  This is complicated if we want to get transparency right.
void MHIContext::DrawRect(int xPos, int yPos, int width, int height,
                          MHRgba colour)
{
    if (colour.alpha() == 0 || height == 0 || width == 0)
        return; // Fully transparent

    QRgb qColour = qRgba(colour.red(), colour.green(),
                         colour.blue(), colour.alpha());

    int scaledWidth  = SCALED_X(width);
    int scaledHeight = SCALED_Y(height);
    QImage qImage(scaledWidth, scaledHeight, QImage::Format_ARGB32);

    for (int i = 0; i < scaledHeight; i++)
    {
        for (int j = 0; j < scaledWidth; j++)
        {
            qImage.setPixel(j, i, qColour);
        }
    }

    AddToDisplay(qImage, SCALED_X(xPos), SCALED_Y(yPos));
}
Esempio n. 10
0
void SubtitleScreen::DisplayAVSubtitles(void)
{
    if (!m_player || !m_subreader)
        return;

    AVSubtitles* subs = m_subreader->GetAVSubtitles();
    QMutexLocker lock(&(subs->lock));
    if (subs->buffers.empty() && (kDisplayAVSubtitle != m_subtitleType))
        return;

    VideoOutput    *videoOut = m_player->GetVideoOutput();
    VideoFrame *currentFrame = videoOut ? videoOut->GetLastShownFrame() : NULL;

    if (!currentFrame || !videoOut)
        return;

    float tmp = 0.0;
    QRect dummy;
    videoOut->GetOSDBounds(dummy, m_safeArea, tmp, tmp, tmp);

    while (!subs->buffers.empty())
    {
        const AVSubtitle subtitle = subs->buffers.front();
        if (subtitle.start_display_time > currentFrame->timecode)
            break;

        ClearDisplayedSubtitles();
        subs->buffers.pop_front();
        for (std::size_t i = 0; i < subtitle.num_rects; ++i)
        {
            AVSubtitleRect* rect = subtitle.rects[i];

            bool displaysub = true;
            if (subs->buffers.size() > 0 &&
                subs->buffers.front().end_display_time <
                currentFrame->timecode)
            {
                displaysub = false;
            }

            if (displaysub && rect->type == SUBTITLE_BITMAP)
            {
                // AVSubtitleRect's image data's not guaranteed to be 4 byte
                // aligned.

                QSize img_size(rect->w, rect->h);
                QRect img_rect(rect->x, rect->y, rect->w, rect->h);
                QRect display(rect->display_x, rect->display_y,
                              rect->display_w, rect->display_h);

                // XSUB and some DVD/DVB subs are based on the original video
                // size before the video was converted. We need to guess the
                // original size and allow for the difference

                int right  = rect->x + rect->w;
                int bottom = rect->y + rect->h;
                if (subs->fixPosition || (currentFrame->height < bottom) ||
                   (currentFrame->width  < right))
                {
                    int sd_height = 576;
                    if ((m_player->GetFrameRate() > 26.0f) && bottom <= 480)
                        sd_height = 480;
                    int height = ((currentFrame->height <= sd_height) &&
                                  (bottom <= sd_height)) ? sd_height :
                                 ((currentFrame->height <= 720) && bottom <= 720)
                                   ? 720 : 1080;
                    int width  = ((currentFrame->width  <= 720) &&
                                  (right <= 720)) ? 720 :
                                 ((currentFrame->width  <= 1280) &&
                                  (right <= 1280)) ? 1280 : 1920;
                    display = QRect(0, 0, width, height);
                }

                QRect scaled = videoOut->GetImageRect(img_rect, &display);
                QImage qImage(img_size, QImage::Format_ARGB32);
                for (int y = 0; y < rect->h; ++y)
                {
                    for (int x = 0; x < rect->w; ++x)
                    {
                        const uint8_t color = rect->pict.data[0][y*rect->pict.linesize[0] + x];
                        const uint32_t pixel = *((uint32_t*)rect->pict.data[1]+color);
                        qImage.setPixel(x, y, pixel);
                    }
                }

                if (scaled.size() != img_size)
                {
                    qImage = qImage.scaled(scaled.width(), scaled.height(),
                             Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
                }

                MythPainter *osd_painter = videoOut->GetOSDPainter();
                MythImage* image = NULL;
                if (osd_painter)
                   image = osd_painter->GetFormatImage();

                long long displayfor = subtitle.end_display_time -
                                       subtitle.start_display_time;
                if (displayfor == 0)
                    displayfor = 60000;
                displayfor = (displayfor < 50) ? 50 : displayfor;
                long long late = currentFrame->timecode -
                                 subtitle.start_display_time;
                MythUIImage *uiimage = NULL;
                if (image)
                {
                    image->Assign(qImage);
                    QString name = QString("avsub%1").arg(i);
                    uiimage = new MythUIImage(this, name);
                    if (uiimage)
                    {
                        m_refreshArea = true;
                        uiimage->SetImage(image);
                        uiimage->SetArea(MythRect(scaled));
                        m_expireTimes.insert(uiimage,
                                     currentFrame->timecode + displayfor);
                    }
                }
                if (uiimage)
                {
                    LOG(VB_PLAYBACK, LOG_INFO, LOC +
                        QString("Display %1AV subtitle for %2ms")
                        .arg(subtitle.forced ? "FORCED " : "")
                        .arg(displayfor));
                    if (late > 50)
                        LOG(VB_PLAYBACK, LOG_INFO, LOC +
                            QString("AV Sub was %1ms late").arg(late));
                }
            }
#ifdef USING_LIBASS
            else if (displaysub && rect->type == SUBTITLE_ASS)
            {
                InitialiseAssTrack(m_player->GetDecoder()->GetTrack(kTrackTypeSubtitle));
                AddAssEvent(rect->ass);
            }
#endif
        }
        m_subreader->FreeAVSubtitle(subtitle);
    }
#ifdef USING_LIBASS
    RenderAssTrack(currentFrame->timecode);
#endif
}
Esempio n. 11
0
QImage iplImageToQImage(IplImage * iplImage) {
	if(!iplImage)
		return QImage();


	int depth = iplImage->nChannels;

	bool rgb24_to_bgr32 = false;
	if(depth == 3  ) {// RGB24 is obsolete on Qt => use 32bit instead
            depth = 4;
            rgb24_to_bgr32 = true;
	}

	u32 * grayToBGR32palette = grayToBGR32;
	bool gray_to_bgr32 = false;

	if(depth == 1) {// GRAY is obsolete on Qt => use 32bit instead
		depth = 4;
		gray_to_bgr32 = true;

		init_grayToBGR32();
		grayToBGR32palette = grayToBGR32;
	}

	int orig_width = iplImage->width;
//	if((orig_width % 2) == 1)
//		orig_width--;


	QImage qImage(orig_width, iplImage->height, depth == 1 ? QImage::Format_Mono : QImage::Format_ARGB32);
	memset(qImage.bits(), 255, orig_width*iplImage->height*depth); // to fill the alpha channel


	switch(iplImage->depth) {
	default:
		fprintf(stderr, "[TamanoirApp]::%s:%d : Unsupported depth = %d\n", __func__, __LINE__, iplImage->depth);
		break;

	case IPL_DEPTH_8U: {
		if(!rgb24_to_bgr32 && !gray_to_bgr32) {
			if(iplImage->nChannels != 4) {
				for(int r=0; r<iplImage->height; r++) {
					// NO need to swap R<->B
					memcpy(qImage.bits() + r*orig_width*depth,
						iplImage->imageData + r*iplImage->widthStep,
						orig_width*depth);
				}
			} else {
				for(int r=0; r<iplImage->height; r++) {
					// need to swap R<->B
					u8 * buf_out = (u8 *)(qImage.bits()) + r*orig_width*depth;
					u8 * buf_in = (u8 *)(iplImage->imageData) + r*iplImage->widthStep;
					memcpy(qImage.bits() + r*orig_width*depth,
						iplImage->imageData + r*iplImage->widthStep,
						orig_width*depth);
/*
					for(int pos4 = 0 ; pos4<orig_width*depth; pos4+=depth,
						buf_out+=4, buf_in+=depth
						 ) {
						buf_out[2] = buf_in[0];
						buf_out[0] = buf_in[2];
                                        }
*/

				}


			}
		}
		else if(rgb24_to_bgr32) {
			// RGB24 to BGR32
			u8 * buffer3 = (u8 *)iplImage->imageData;
			u8 * buffer4 = (u8 *)qImage.bits();
			int orig_width4 = 4 * orig_width;

			for(int r=0; r<iplImage->height; r++)
			{
				int pos3 = r * iplImage->widthStep;
				int pos4 = r * orig_width4;
				for(int c=0; c<orig_width; c++, pos3+=3, pos4+=4)
				{
					//buffer4[pos4 + 2] = buffer3[pos3];
					//buffer4[pos4 + 1] = buffer3[pos3+1];
					//buffer4[pos4    ] = buffer3[pos3+2];
					buffer4[pos4   ] = buffer3[pos3];
					buffer4[pos4 + 1] = buffer3[pos3+1];
					buffer4[pos4 + 2] = buffer3[pos3+2];
				}
			}
		} else if(gray_to_bgr32) {
			for(int r=0; r<iplImage->height; r++)
			{
				u32 * buffer4 = (u32 *)qImage.bits() + r*qImage.width();
				u8 * bufferY = (u8 *)(iplImage->imageData + r*iplImage->widthStep);
				for(int c=0; c<orig_width; c++) {
					buffer4[c] = grayToBGR32palette[ (int)bufferY[c] ];
				}
			}
		}
		}break;
	case IPL_DEPTH_16S: {
		if(!rgb24_to_bgr32) {

			u8 * buffer4 = (u8 *)qImage.bits();
			short valmax = 0;

			for(int r=0; r<iplImage->height; r++)
			{
				short * buffershort = (short *)(iplImage->imageData + r*iplImage->widthStep);
				for(int c=0; c<iplImage->width; c++)
					if(buffershort[c]>valmax)
						valmax = buffershort[c];
			}

			if(valmax>0)
				for(int r=0; r<iplImage->height; r++)
				{
					short * buffer3 = (short *)(iplImage->imageData
									+ r * iplImage->widthStep);
					int pos3 = 0;
					int pos4 = r * orig_width;
					for(int c=0; c<orig_width; c++, pos3++, pos4++)
					{
						int val = abs((int)buffer3[pos3]) * 255 / valmax;
						if(val > 255) val = 255;
						buffer4[pos4] = (u8)val;
					}
				}
		}
		else {
			u8 * buffer4 = (u8 *)qImage.bits();
			if(depth == 3) {

				for(int r=0; r<iplImage->height; r++)
				{
					short * buffer3 = (short *)(iplImage->imageData + r * iplImage->widthStep);
					int pos3 = 0;
					int pos4 = r * orig_width*4;
					for(int c=0; c<orig_width; c++, pos3+=3, pos4+=4)
					{
						buffer4[pos4   ] = buffer3[pos3];
						buffer4[pos4 + 1] = buffer3[pos3+1];
						buffer4[pos4 + 2] = buffer3[pos3+2];
					}
				}
			} else if(depth == 1) {
				short valmax = 0;
				short * buffershort = (short *)(iplImage->imageData);
				for(int pos=0; pos< iplImage->widthStep*iplImage->height; pos++)
					if(buffershort[pos]>valmax)
						valmax = buffershort[pos];

				if(valmax>0) {
					for(int r=0; r<iplImage->height; r++)
					{
						short * buffer3 = (short *)(iplImage->imageData
											+ r * iplImage->widthStep);
						int pos3 = 0;
						int pos4 = r * orig_width;
						for(int c=0; c<orig_width; c++, pos3++, pos4++)
						{
							int val = abs((int)buffer3[pos3]) * 255 / valmax;
							if(val > 255) val = 255;
							buffer4[pos4] = (u8)val;
						}
					}
				}
			}
		}
		}break;
	case IPL_DEPTH_16U: {
		if(!rgb24_to_bgr32) {

			unsigned short valmax = 0;

			for(int r=0; r<iplImage->height; r++)
			{
				unsigned short * buffershort = (unsigned short *)(iplImage->imageData + r*iplImage->widthStep);
				for(int c=0; c<iplImage->width; c++)
					if(buffershort[c]>valmax)
						valmax = buffershort[c];
			}

			if(valmax>0) {
				if(!gray_to_bgr32) {
					u8 * buffer4 = (u8 *)qImage.bits();
					for(int r=0; r<iplImage->height; r++)
					{
						unsigned short * buffer3 = (unsigned short *)(iplImage->imageData
										+ r * iplImage->widthStep);
						int pos3 = 0;
						int pos4 = r * orig_width;
						for(int c=0; c<orig_width; c++, pos3++, pos4++)
						{
							int val = abs((int)buffer3[pos3]) * 255 / valmax;
							if(val > 255) val = 255;
							buffer4[pos4] = (u8)val;
						}
					}
				} else {
					u32 * buffer4 = (u32 *)qImage.bits();
					for(int r=0; r<iplImage->height; r++)
					{
						unsigned short * buffer3 = (unsigned short *)(iplImage->imageData
										+ r * iplImage->widthStep);
						int pos3 = 0;
						int pos4 = r * orig_width;
						for(int c=0; c<orig_width; c++, pos3++, pos4++)
						{
							int val = abs((int)buffer3[pos3]) * 255 / valmax;
							if(val > 255) val = 255;
							buffer4[pos4] = grayToBGR32palette[ val ];
						}
					}
				}
			}
		}
		else {
			fprintf(stderr, "[TamanoirApp]::%s:%d : U16  depth = %d -> BGR32\n", __func__, __LINE__, iplImage->depth);
			u8 * buffer4 = (u8 *)qImage.bits();
			if(depth == 3) {

				for(int r=0; r<iplImage->height; r++)
				{
					short * buffer3 = (short *)(iplImage->imageData + r * iplImage->widthStep);
					int pos3 = 0;
					int pos4 = r * orig_width*4;
					for(int c=0; c<orig_width; c++, pos3+=3, pos4+=4)
					{
						buffer4[pos4   ] = buffer3[pos3]/256;
						buffer4[pos4 + 1] = buffer3[pos3+1]/256;
						buffer4[pos4 + 2] = buffer3[pos3+2]/256;
					}
				}
			} else if(depth == 1) {
				short valmax = 0;
				short * buffershort = (short *)(iplImage->imageData);
				for(int pos=0; pos< iplImage->widthStep*iplImage->height; pos++)
					if(buffershort[pos]>valmax)
						valmax = buffershort[pos];

				if(valmax>0)
					for(int r=0; r<iplImage->height; r++)
					{
						short * buffer3 = (short *)(iplImage->imageData
											+ r * iplImage->widthStep);
						int pos3 = 0;
						int pos4 = r * orig_width;
						for(int c=0; c<orig_width; c++, pos3++, pos4++)
						{
							int val = abs((int)buffer3[pos3]) * 255 / valmax;
							if(val > 255) val = 255;
							buffer4[pos4] = (u8)val;
						}
					}
			}
		}
		}break;
	}

#ifdef WIN32
	qImage.save("C:\\temp\\qImage.png"); /// \todo fixme : remove
	// Force alpha channel to be 255
	for(int r=0; r<iplImage->height; r++) {
		u8 * buf_out = (u8 *)(qImage.bits()) + r*qImage.depth()/8;
		for(int c4 = 0; c4 <= iplImage->widthStep; c4+=4)
		{
			buf_out[c4] = 255;
		}
	}
	qImage.save("C:\\temp\\qImage2.png"); /// \todo fixme : remove
#endif

	if(qImage.depth() == 8) {
#ifndef _QT5
		qImage.setNumColors(256);
#endif
		for(int c=0; c<256; c++) {
			/* False colors
			int R=c, G=c, B=c;
			if(c<128) B = (255-2*c); else B = 0;
			if(c>128) R = 2*(c-128); else R = 0;
			G = abs(128-c)*2;

			qImage.setColor(c, qRgb(R,G,B));
			*/
			qImage.setColor(c, qRgb(c,c,c));
		}
	}
	return qImage;
}
Esempio n. 12
0
//----------------------------------------------------------------------------
int ExtractRidges::Main (int, char**)
{
    std::string imageName = Environment::GetPathR("Head.im");
    ImageDouble2D image(imageName.c_str());

    // Normalize the image values to be in [0,1].
    int quantity = image.GetQuantity();
    double minValue = image[0], maxValue = minValue;
    int i;
    for (i = 1; i < quantity; ++i)
    {
        if (image[i] < minValue)
        {
            minValue = image[i];
        }
        else if (image[i] > maxValue)
        {
            maxValue = image[i];
        }
    }
    double invRange = 1.0/(maxValue - minValue);
    for (i = 0; i < quantity; ++i)
    {
        image[i] = (image[i] - minValue)*invRange;
    }

    // Use first-order centered finite differences to estimate the image
    // derivatives.  The gradient is DF = (df/dx, df/dy) and the Hessian
    // is D^2F = {{d^2f/dx^2, d^2f/dxdy}, {d^2f/dydx, d^2f/dy^2}}.
    int xBound = image.GetBound(0);
    int yBound = image.GetBound(1);
    int xBoundM1 = xBound - 1;
    int yBoundM1 = yBound - 1;
    ImageDouble2D dx(xBound, yBound);
    ImageDouble2D dy(xBound, yBound);
    ImageDouble2D dxx(xBound, yBound);
    ImageDouble2D dxy(xBound, yBound);
    ImageDouble2D dyy(xBound, yBound);
    int x, y;
    for (y = 1; y < yBoundM1; ++y)
    {
        for (x = 1; x < xBoundM1; ++x)
        {
            dx(x, y) = 0.5*(image(x+1, y) - image(x-1, y));
            dy(x, y) = 0.5*(image(x, y+1) - image(x, y-1));
            dxx(x, y) = image(x+1, y) - 2.0*image(x, y) + image(x-1, y);
            dxy(x, y) = 0.25*(image(x+1, y+1) + image(x-1, y-1)
                - image(x+1, y-1) - image(x-1, y+1));
            dyy(x, y) = image(x, y+1) - 2.0*image(x, y) + image(x, y+1);
        }
    }
    dx.Save("dx.im");
    dy.Save("dy.im");
    dxx.Save("dxx.im");
    dxy.Save("dxy.im");
    dyy.Save("dyy.im");

    // The eigensolver produces eigenvalues a and b and corresponding
    // eigenvectors U and V:  D^2F*U = a*U, D^2F*V = b*V.  Define
    // P = Dot(U,DF) and Q = Dot(V,DF).  The classification is as follows.
    //   ridge:   P = 0 with a < 0
    //   valley:  Q = 0 with b > 0
    ImageDouble2D aImage(xBound, yBound);
    ImageDouble2D bImage(xBound, yBound);
    ImageDouble2D pImage(xBound, yBound);
    ImageDouble2D qImage(xBound, yBound);
    for (y = 1; y < yBoundM1; ++y)
    {
        for (x = 1; x < xBoundM1; ++x)
        {
            Vector2d gradient(dx(x, y), dy(x, y));
            Matrix2d hessian(dxx(x, y), dxy(x, y), dxy(x, y), dyy(x, y));
            EigenDecompositiond decomposer(hessian);
            decomposer.Solve(true);
            aImage(x,y) = decomposer.GetEigenvalue(0);
            bImage(x,y) = decomposer.GetEigenvalue(1);
            Vector2d u = decomposer.GetEigenvector2(0);
            Vector2d v = decomposer.GetEigenvector2(1);
            pImage(x,y) = u.Dot(gradient);
            qImage(x,y) = v.Dot(gradient);
        }
    }
    aImage.Save("a.im");
    bImage.Save("b.im");
    pImage.Save("p.im");
    qImage.Save("q.im");

    // Use a cheap classification of the pixels by testing for sign changes
    // between neighboring pixels.
    ImageRGB82D result(xBound, yBound);
    for (y = 1; y < yBoundM1; ++y)
    {
        for (x = 1; x < xBoundM1; ++x)
        {
            unsigned char gray = (unsigned char)(255.0f*image(x, y));

            double pValue = pImage(x, y);
            bool isRidge = false;
            if (pValue*pImage(x-1 ,y) < 0.0
            ||  pValue*pImage(x+1, y) < 0.0
            ||  pValue*pImage(x, y-1) < 0.0
            ||  pValue*pImage(x, y+1) < 0.0)
            {
                if (aImage(x, y) < 0.0)
                {
                    isRidge = true;
                }
            }

            double qValue = qImage(x,y);
            bool isValley = false;
            if (qValue*qImage(x-1, y) < 0.0
            ||  qValue*qImage(x+1, y) < 0.0
            ||  qValue*qImage(x, y-1) < 0.0
            ||  qValue*qImage(x, y+1) < 0.0)
            {
                if (bImage(x,y) > 0.0)
                {
                    isValley = true;
                }
            }

            if (isRidge)
            {
                if (isValley)
                {
                    result(x, y) = GetColor24(gray, 0, gray);
                }
                else
                {
                    result(x, y) = GetColor24(gray, 0, 0);
                }
            }
            else if (isValley)
            {
                result(x, y) = GetColor24(0, 0, gray);
            }
            else
            {
                result(x, y) = GetColor24(gray, gray, gray);
            }
        }
    }
    result.Save("result.im");

    return 0;
}