コード例 #1
0
ファイル: ShaderFactory.cpp プロジェクト: arucgab/osgearth-1
osg::Shader*
ShaderFactory::createColorFilterChainFragmentShader( const std::string& function, const ColorFilterChain& chain ) const
{
    std::stringstream buf;
    buf << "#version " << GLSL_VERSION_STR << "\n"
        << PRECISION_MEDIUMP_FLOAT << "\n";

    // write out the shader function prototypes:
    for( ColorFilterChain::const_iterator i = chain.begin(); i != chain.end(); ++i )
    {
        ColorFilter* filter = i->get();
        buf << "void " << filter->getEntryPointFunctionName() << "(in int slot, inout vec4 color);\n";
    }

    // write out the main function:
    buf << "void " << function << "(in int slot, inout vec4 color) \n"
        << "{ \n";

    // write out the function calls. if there are none, it's a NOP.
    for( ColorFilterChain::const_iterator i = chain.begin(); i != chain.end(); ++i )
    {
        ColorFilter* filter = i->get();
        buf << "    " << filter->getEntryPointFunctionName() << "(slot, color);\n";
    }
        
    buf << "} \n";

    std::string bufstr;
    bufstr = buf.str();
    return new osg::Shader(osg::Shader::FRAGMENT, bufstr);
}
コード例 #2
0
void GridClassifier::populateHistogramBins (const QImage &image,
                                            const Transformation &transformation,
                                            double xMin,
                                            double xMax,
                                            double yMin,
                                            double yMax)
{
  LOG4CPP_INFO_S ((*mainCat)) << "GridClassifier::populateHistogramBins";

  ColorFilter filter;
  QRgb rgbBackground = filter.marginColor (&image);

  for (int x = 0; x < image.width(); x++) {
    for (int y = 0; y < image.height(); y++) {

      QColor pixel = image.pixel (x, y);

      // Skip pixels with background color
      if (!filter.colorCompare (rgbBackground,
                                pixel.rgb ())) {

        // Add this pixel to histograms
        QPointF posGraph;
        transformation.transformScreenToRawGraph (QPointF (x, y), posGraph);

        if (transformation.modelCoords().coordsType() == COORDS_TYPE_POLAR) {

          // If out of the 0 to period range, the theta value must shifted by the period to get into that range
          while (posGraph.x() < xMin) {
            posGraph.setX (posGraph.x() + transformation.modelCoords().thetaPeriod());
          }
          while (posGraph.x() > xMax) {
            posGraph.setX (posGraph.x() - transformation.modelCoords().thetaPeriod());
          }
        }

        int binX = binFromCoordinate (posGraph.x(), xMin, xMax);
        int binY = binFromCoordinate (posGraph.y(), yMin, yMax);

        ENGAUGE_ASSERT (0 <= binX);
        ENGAUGE_ASSERT (0 <= binY);
        ENGAUGE_ASSERT (binX < m_numHistogramBins);
        ENGAUGE_ASSERT (binY < m_numHistogramBins);

        // Roundoff error in log scaling may let bin go just outside legal range
        binX = qMin (binX, m_numHistogramBins - 1);
        binY = qMin (binY, m_numHistogramBins - 1);

        ++m_binsX [binX];
        ++m_binsY [binY];
      }
    }
  }
}
コード例 #3
0
QRgb DlgSettingsColorFilter::createThread ()
{
  LOG4CPP_INFO_S ((*mainCat)) << "DlgSettingsColorFilter::createThread";

  // Get background color
  QImage image = cmdMediator().document().pixmap().toImage();
  ColorFilter filter;
  QRgb rgbBackground = filter.marginColor(&image);

  // Only create thread once
  if (m_filterThread == 0) {

    m_filterThread = new DlgFilterThread (cmdMediator().document().pixmap(),
                                          rgbBackground,
                                          *this);
    m_filterThread->start(); // Now that thread is started, we can use signalApplyFilter
  }

  return rgbBackground;
}
コード例 #4
0
ファイル: SegmentFactory.cpp プロジェクト: keszybz/engauge6
void SegmentFactory::loadBool (const ColorFilter &filter,
                               bool *columnBool,
                               const QImage &image,
                               int x)
{
  for (int y = 0; y < image.height(); y++) {
    if (x < 0) {
      columnBool [y] = false;
    } else {
      columnBool [y] = filter.pixelFilteredIsOn (image, x, y);
    }
  }
}
コード例 #5
0
bool DigitizeStatePointMatch::pixelIsOnInImage (const QImage &img,
                                                int x,
                                                int y,
                                                int radiusLimit) const
{
  ColorFilter filter;

  // Examine all nearby pixels
  bool pixelShouldBeOn = false;
  for (int xOffset = -radiusLimit; xOffset <= radiusLimit; xOffset++) {
    for (int yOffset = -radiusLimit; yOffset <= radiusLimit; yOffset++) {

      int radius = qFloor (qSqrt (xOffset * xOffset + yOffset * yOffset));

      if (radius <= radiusLimit) {

        int xNearby = x + xOffset;
        int yNearby = y + yOffset;

        if ((0 <= xNearby) &&
            (0 <= yNearby) &&
            (xNearby < img.width()) &&
            (yNearby < img.height())) {

          if (filter.pixelFilteredIsOn (img,
                                        xNearby,
                                        yNearby)) {

            pixelShouldBeOn = true;
            break;
          }
        }
      }
    }
  }

  return pixelShouldBeOn;
}
コード例 #6
0
QList<PointMatchPixel> DigitizeStatePointMatch::extractSamplePointPixels (const QImage &img,
                                                                          const DocumentModelPointMatch &modelPointMatch,
                                                                          const QPointF &posScreen) const
{
  LOG4CPP_INFO_S ((*mainCat)) << "DigitizeStatePointMatch::extractSamplePointPixels";

  // All points inside modelPointMatch.maxPointSize() are collected, whether or not they
  // are on or off. Originally only the on points were collected, but obvious mismatches
  // were happening (example, 3x3 point would appear to be found in several places inside 8x32 rectangle)
  QList<PointMatchPixel> samplePointPixels;

  int radiusMax = qFloor (modelPointMatch.maxPointSize() / 2);

  ColorFilter colorFilter;
  for (int xOffset = -radiusMax; xOffset <= radiusMax; xOffset++) {
    for (int yOffset = -radiusMax; yOffset <= radiusMax; yOffset++) {

      int x = qFloor (posScreen.x() + xOffset);
      int y = qFloor (posScreen.y() + yOffset);
      int radius = qFloor (qSqrt (xOffset * xOffset + yOffset * yOffset));

      if (radius <= radiusMax) {

        bool pixelIsOn = colorFilter.pixelFilteredIsOn (img,
                                                        x,
                                                        y);

        PointMatchPixel point (xOffset,
                               yOffset,
                               pixelIsOn);

        samplePointPixels.push_back (point);
      }
    }
  }

  return samplePointPixels;
}
コード例 #7
0
bool HedgeDetector::processColor(Image* input, Image* output,
                                 ColorFilter& filter,
                                 BlobDetector::Blob& leftBlob,
                                 BlobDetector::Blob& rightBlob,
                                 BlobDetector::Blob& outBlob)
{
    output->copyFrom(input);
    output->setPixelFormat(Image::PF_RGB_8);
    output->setPixelFormat(Image::PF_LCHUV_8);

    filter.filterImage(output);

    // Erode and dilate the image (only if necessary)
    if (m_erodeIterations > 0) {
        IplImage* img = output->asIplImage();
        cvErode(img, img, NULL, m_erodeIterations);
    }

    if (m_dilateIterations > 0) {
        IplImage* img = output->asIplImage();
        cvDilate(img, img, NULL, m_dilateIterations);
    }

    OpenCVImage debug(output->getWidth(), output->getHeight(),
                              Image::PF_BGR_8);
    m_blobDetector.processImage(output, &debug);
    //Image::showImage(&debug);
    BlobDetector::BlobList blobs = m_blobDetector.getBlobs();

    BOOST_FOREACH(BlobDetector::Blob blob, blobs)
    {
        // Sanity check blob
        double percent = (double) blob.getSize() /
            (blob.getHeight() * blob.getWidth());
        if (1.0/blob.getTrueAspectRatio() <= m_maxAspectRatio &&
            1.0/blob.getTrueAspectRatio() >= m_minAspectRatio &&
            m_minWidth < blob.getWidth() &&
            m_minHeight < blob.getHeight() &&
            percent > m_minPixelPercentage &&
            percent < m_maxPixelPercentage)
        {
            processSides(output, blob, leftBlob, rightBlob);
            outBlob = blob;
            return true;
        }
    }
コード例 #8
0
void DlgFilterWorker::slotRestartTimeout ()
{
  if (m_inputCommandQueue.count() > 0) {

    DlgFilterCommand command = m_inputCommandQueue.last();
    m_inputCommandQueue.clear ();

    // Start over from the left side
    m_colorFilterMode = command.colorFilterMode();
    m_low = command.low0To1();
    m_high = command.high0To1();

    m_xLeft = 0;

    // Start timer to process first piece
    m_restartTimer.start (NO_DELAY);

  } else if (m_xLeft < m_imageOriginal.width ()) {

    // To to process a new piece, starting at m_xLeft
    int xStop = m_xLeft + COLUMNS_PER_PIECE;
    if (xStop >= m_imageOriginal.width()) {
      xStop = m_imageOriginal.width();
    }

    // From  here on, if a new command gets pushed onto the queue then we immediately stop processing
    // and do nothing except start the timer so we can start over after the next timeout. The goal is
    // to not tie up the gui by emitting signalTransferPiece unnecessarily.
    //
    // This code is basically a heavily customized version of ColorFilter::filterImage
    ColorFilter filter;
    int processedWidth = xStop - m_xLeft;
    QImage imageProcessed (processedWidth,
                           m_imageOriginal.height(),
                           QImage::Format_RGB32);
    for (int xFrom = m_xLeft, xTo = 0; (xFrom < xStop) && (m_inputCommandQueue.count() == 0); xFrom++, xTo++) {
      for (int y = 0; (y < m_imageOriginal.height ()) && (m_inputCommandQueue.count() == 0); y++) {
        QColor pixel = m_imageOriginal.pixel (xFrom, y);
        bool isOn = false;
        if (pixel.rgb() != m_rgbBackground) {

          isOn = filter.pixelUnfilteredIsOn (m_colorFilterMode,
                                             pixel,
                                             m_rgbBackground,
                                             m_low,
                                             m_high);
        }

        imageProcessed.setPixel (xTo, y, (isOn ?
                                          QColor (Qt::black).rgb () :
                                          QColor (Qt::white).rgb ()));
      }
    }

    if (m_inputCommandQueue.count() == 0) {
      emit signalTransferPiece (m_xLeft,
                                imageProcessed);
      m_xLeft += processedWidth;
    }

    if ((xStop < m_imageOriginal.width()) ||
        (m_inputCommandQueue.count () > 0)) {

      // Restart timer to process next piece
      m_restartTimer.start (NO_DELAY);
    }
  }
}
コード例 #9
0
bool WindowDetector::processColor(Image* input, Image* output,
                                  ColorFilter& filter,
                                  BlobDetector::Blob& outerBlob,
                                  BlobDetector::Blob& innerBlob)
{
    tempFrame->copyFrom(input);
    tempFrame->setPixelFormat(Image::PF_RGB_8);
    tempFrame->setPixelFormat(Image::PF_LCHUV_8);

    filter.filterImage(tempFrame, output);

    // Erode the image (only if necessary)
    IplImage* img = output->asIplImage();
    if (m_erodeIterations > 0) {
        cvErode(img, img, NULL, m_erodeIterations);
    }

    // Dilate the image (only if necessary)
    if (m_dilateIterations > 0) {
        cvDilate(img, img, NULL, m_dilateIterations);
    }

    m_blobDetector.processImage(output);
    BlobDetector::BlobList blobs = m_blobDetector.getBlobs();

    BOOST_FOREACH(BlobDetector::Blob blob, blobs)
    {
        // Sanity check blob
        double pixelPercentage = blob.getSize() /
            (double) (blob.getHeight() * blob.getWidth());


        double aspect = blob.getTrueAspectRatio();
        if (aspect <= m_maxAspectRatio &&
            aspect >= m_minAspectRatio &&
            m_minHeight <= blob.getHeight() &&
            m_minWidth <= blob.getWidth() &&
            m_minPixelPercentage <= pixelPercentage &&
            m_maxPixelPercentage >= pixelPercentage &&
            processBackground(tempFrame, filter, blob, innerBlob))
        {
            int outerCenterX = (blob.getMaxX() - blob.getMinX()) / 2 + blob.getMinX();
            int innerCenterX = (innerBlob.getMaxX() - innerBlob.getMinX()) /
                2 + innerBlob.getMinX();

            int outerCenterY = (blob.getMaxY() - blob.getMinY()) / 2 + blob.getMinY();
            int innerCenterY = (innerBlob.getMaxY() - innerBlob.getMinY()) /
                2 + innerBlob.getMinY();

            int centerXdiff = abs(outerCenterX - innerCenterX);
            int centerYdiff = abs(outerCenterY - innerCenterY);

            double relHeight = (double) innerBlob.getHeight() / blob.getHeight();
            double relWidth = (double) innerBlob.getWidth() / blob.getWidth();

            if(centerXdiff < m_centerXDisagreement &&
               centerYdiff < m_centerYDisagreement &&
               relHeight > m_minRelInnerHeight &&
               relWidth > m_minRelInnerWidth) {

                outerBlob = blob;
                return true;

            }
        }
    }