Exemple #1
0
// calculate image from sources and set its availability
void ImageMix::calcImage(unsigned int texId, double ts)
{
	// check source sizes
	if (!checkSourceSizes()) THRWEXCP(ImageSizesNotMatch, S_OK);
	// set offsets to image buffers
	for (ImageSourceList::iterator it = m_sources.begin(); it != m_sources.end(); ++it)
		// if image buffer is available
		if ((*it)->getImageBuf() != NULL)
			// set its offset
			getImageSourceMix(*it)->setOffset(m_sources[0]->getImageBuf());
		// otherwise don't calculate image
		else 
			return;
	// if there is only single source
	if (m_sources.size() == 1)
	{
		// use single filter
		FilterBase mixFilt;
		// fiter and convert image
		filterImage(mixFilt, m_sources[0]->getImageBuf(), m_sources[0]->getSize());
	}
	// otherwise use mix filter to merge source images
	else
	{
		FilterImageMix mixFilt (m_sources);
		// fiter and convert image
		filterImage(mixFilt, m_sources[0]->getImageBuf(), m_sources[0]->getSize());
	}
}
Exemple #2
0
bool task5(const char* path) {
    struct dirent *entry;
    DIR *dp;
    bool res = true;
    int i = 1;

    dp = opendir(path);
    if (dp == NULL) {
        std::cerr << "opendir: Path does not exist or could not be read." << std::endl;
        return -1;
    }

    while ((entry = readdir(dp))) {
        struct stat s;
        std::string filename(path);
        filename += (entry -> d_name);
        if (stat(filename.c_str(), &s) == 0) {
            if (s.st_mode & S_IFREG) {
                cv::Mat image, filtered;
                image = cv::imread(filename.c_str(), 1);
                filterImage(image, filtered);
                res &= cv::imwrite(PATH + generateName("Task5-", i++), filtered);
            }
        }
    }

    closedir(dp);
    return true;
}
int MainWindow::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
    _id = QMainWindow::qt_metacall(_c, _id, _a);
    if (_id < 0)
        return _id;
    if (_c == QMetaObject::InvokeMetaMethod) {
        switch (_id) {
        case 0: updateAspectRatio(); break;
        case 1: settingsChanged(); break;
        case 2: fileCopy3Dto2D(); break;
        case 3: fileNew(); break;
        case 4: fileOpen(); break;
        case 5: fileSave(); break;
        case 6: checkAllRayFeatures(); break;
        case 7: uncheckAllRayFeatures(); break;
        case 8: filterImage(); break;
        case 9: renderImage(); break;
        case 10: activateCanvas2D(); break;
        case 11: activateCanvas3D(); break;
        default: ;
        }
        _id -= 12;
    }
    return _id;
}
ImageToolWidget::ImageToolWidget(QWidget *parent) : QWidget(parent)
{
    ui.setupUi(this);

    // Register our mirror filter.
    qtRegisterImageFilter<MirrorFilter>(QLatin1String("MirrorFilter"));

    // Iterate through all registered filters and append them to the vector
    QStringList filters = QtImageFilterFactory::imageFilterList();
    for (int i = 0; i < filters.count(); ++i) {
        m_imageFilters += QtImageFilterFactory::createImageFilter(filters[i]);
    }

    for (int i = 0; i < m_imageFilters.count(); ++i) {
        ui.FiltersCombo->addItem(m_imageFilters[i]->name());
    }

    ui.FiltersCombo->insertItem(0, QLatin1String("--Choose filter--"));

    m_imageFilters.prepend((QtImageFilter*)0);

    QObject::connect(ui.LoadButton, SIGNAL(clicked()), this, SLOT(loadImage()));
    QObject::connect(ui.ReloadButton, SIGNAL(clicked()), this, SLOT(reloadImage()));
    QObject::connect(ui.FilterButton, SIGNAL(clicked()), this, SLOT(filterImage()));
    QObject::connect(ui.FiltersCombo, SIGNAL(currentIndexChanged(int)), this, SLOT(filterIndexChanged(int)));

    ui.FiltersCombo->setCurrentIndex(0);
    m_currentFilename = QLatin1String("images/qtlogo.png");
    reloadImage();
}
Exemple #5
0
WBFilter::WBFilter(const WBContainer& settings, DImgThreadedFilter* master,
                   const DImg& orgImage, const DImg& destImage, int progressBegin, int progressEnd)
    : DImgThreadedFilter(master, orgImage, destImage, progressBegin, progressEnd, "WBFilter"),
      d(new WBFilterPriv)
{
    m_settings = settings;
    filterImage();
}
Exemple #6
0
// capture image from viewport
void ImageViewport::calcImage (unsigned int texId)
{
	// if scale was changed
	if (m_scaleChange)
		// reset image
		init(m_capSize[0], m_capSize[1]);
	// if texture wasn't initialized
	if (!m_texInit)
	{
		// initialize it
		loadTexture(texId, m_image, m_size);
		m_texInit = true;
	}
	// if texture can be directly created
	if (texId != 0 && m_pyfilter == NULL && m_capSize[0] == calcSize(m_capSize[0])
		&& m_capSize[1] == calcSize(m_capSize[1]) && !m_flip)
	{
		// just copy current viewport to texture
	    glBindTexture(GL_TEXTURE_2D, texId);
	    glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1]);
	    // image is not available
	    m_avail = false;
	}
	// otherwise copy viewport to buffer, if image is not available
	else if (!m_avail)
	{
		// get frame buffer data
        if (m_alpha)
        {
    		glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1], GL_RGBA,
			    GL_UNSIGNED_BYTE, m_viewportImage);
		    // filter loaded data
		    FilterRGBA32 filt;
		    filterImage(filt, m_viewportImage, m_capSize);
        } 
        else
        {
    		glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1], GL_RGB,
	    		GL_UNSIGNED_BYTE, m_viewportImage);
    		// filter loaded data
		    FilterRGB24 filt;
		    filterImage(filt, m_viewportImage, m_capSize);
        }
	}
}
Exemple #7
0
BCGFilter::BCGFilter(const BCGContainer& settings, DImgThreadedFilter* const master,
                     const DImg& orgImage, const DImg& destImage, int progressBegin, int progressEnd)
    : DImgThreadedFilter(master, orgImage, destImage, progressBegin, progressEnd, "WBFilter"),
      d(new Private)
{
    d->settings = settings;
    reset();
    filterImage();
}
RawProcessingFilter::RawProcessingFilter(const DRawDecoding& settings,
                                         DImgThreadedFilter* master, const DImg& orgImage, const DImg& destImage,
                                         int progressBegin, int progressEnd, const QString& name)
    : DImgThreadedFilter(master, orgImage, destImage, progressBegin, progressEnd, name),
      m_observer(0)
{
    setSettings(settings);
    filterImage();
}
void
CSVGFeDiffuseLighting::
draw()
{
  CImagePtr src_image = svg_.getBufferImage(filter_in_);

  CImagePtr dst_image = filterImage(src_image);

  svg_.setBufferImage(filter_out_, dst_image);
}
// process video frame
void VideoBase::process (BYTE *sample)
{
	// if scale was changed
	if (m_scaleChange)
		// reset image
		init(m_orgSize[0], m_orgSize[1]);
	// if image is allocated and is able to store new image
	if (m_image != NULL && !m_avail)
	{
		// filters used
		// convert video format to image
		switch (m_format)
		{
		case RGBA32:
			{
				FilterRGBA32 filtRGBA;
				// use filter object for format to convert image
				filterImage(filtRGBA, sample, m_orgSize);
				// finish
				break;
			}
		case RGB24:
			{
				FilterRGB24 filtRGB;
				// use filter object for format to convert image
				filterImage(filtRGB, sample, m_orgSize);
				// finish
				break;
			}
		case YV12:
			{
				// use filter object for format to convert image
				FilterYV12 filtYUV;
				filtYUV.setBuffs(sample, m_orgSize);
				filterImage(filtYUV, sample, m_orgSize);
				// finish
				break;
			}
		case None:
			break; /* assert? */
		}
	}
}
void
CSVGFeComponentTransfer::
draw()
{
  CImagePtr src_image = svg_.getBufferImage(filter_in_);

  CImagePtr dst_image = filterImage(src_image);

  svg_.setBufferImage(filter_out_, dst_image);
}
Exemple #12
0
void
CSVGFeTile::
draw()
{
  CImagePtr src_image = svg_.getBufferImage(filter_in_);

  CImagePtr dst_image = filterImage(src_image);

  svg_.setBufferImage(filter_out_, dst_image);
}
Exemple #13
0
FilmGrainFilter::FilmGrainFilter(DImgThreadedFilter* const parentFilter,
                                 const DImg& orgImage, const DImg& destImage,
                                 int progressBegin, int progressEnd,
                                 const FilmGrainContainer& settings)
    : DImgThreadedFilter(parentFilter, orgImage, destImage, progressBegin, progressEnd,
                         parentFilter->filterName() + QLatin1String(": FilmGrain")),
    d(new Private)
{
    d->settings = settings;
    filterImage();
}
Exemple #14
0
SuperImpose::SuperImpose(DImg* orgImage, DImg* templ,
                         QRect orgImageSelection,
                         DColorComposer::CompositingOperation compositeRule)
{
    m_orgImage      = *orgImage;
    m_template      = *templ;
    m_selection     = orgImageSelection;
    m_compositeRule = compositeRule;

    filterImage();
}
Exemple #15
0
cv::Mat *filterImage(cv::Mat *image){

  cv::Mat *filtered = NULL;
  if (!image) return filtered;
  //Deep copy of the original                                                                                                                                                                                                         
  filtered = new cv::Mat(image->clone());
  unsigned char *fil = (unsigned char*)(filtered->data);

  unsigned char *im = (unsigned char*)(image->data);
  int im_step = image->step;
  int im_cols = image->cols;
  int im_rows = image->rows;
  filterImage(fil,im,im_step,im_cols,im_rows);
  return filtered;
}
int Canvas2D::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
    _id = SupportCanvas2D::qt_metacall(_c, _id, _a);
    if (_id < 0)
        return _id;
    if (_c == QMetaObject::InvokeMetaMethod) {
        switch (_id) {
        case 0: cancelRender(); break;
        case 1: filterImage(); break;
        default: ;
        }
        _id -= 2;
    }
    return _id;
}
Exemple #17
0
void
CSVGFeImage::
draw()
{
  CImagePtr src_image;

  if      (xlink_.isImage())
    src_image = xlink_.getImage();
  else if (xlink_.isObject())
    src_image = xlink_.getObject()->toImage();

  CImagePtr dst_image = filterImage(src_image);

  svg_.setBufferImage(filter_out_, dst_image);
}
Exemple #18
0
int QRcodeImage::GetImageData()
{
	DECIMAL_POINT=21;
	int rt = filterImage();
	if(rt < 0)
	{
		printf("filter image error.\n");
		return -1;
	}
	
	rt = finderPattern->findFinderPattern(bitmap,nWidth,nHeight,DECIMAL_POINT);
	if(rt < 0)
	{
		printf("find pattern error.\n");
		return -1;
	}

	alignmentPattern->findAlignmentPattern(bitmap,nWidth,nHeight,finderPattern,DECIMAL_POINT);


	//Creating sampling grid
	//samplingGrid = getSamplingGrid2_6(finderPattern, alignmentPattern);
	samplingGrid = getSamplingGrid(finderPattern, alignmentPattern);
	
	//Reading grid
	getQRCodeMatrix(samplingGrid);

	return 0;
/*	boolean[][] qRCodeMatrix = null;
	try {
		qRCodeMatrix = getQRCodeMatrix(bitmap, samplingGrid);
	} catch (ArrayIndexOutOfBoundsException e) {
		throw new SymbolNotFoundException("Sampling grid exceeded image boundary");
	}
	//canvas.drawMatrix(qRCodeMatrix);
	return new QRCodeSymbol(qRCodeMatrix);
*/
}
Exemple #19
0
SharpenFilter::SharpenFilter(DImgThreadedFilter* const parentFilter,
                             const DImg& orgImage, const DImg& destImage,
                             int progressBegin, int progressEnd, double radius, double sigma)
    : DImgThreadedFilter(parentFilter, orgImage, destImage, progressBegin, progressEnd,
                         parentFilter->filterName() + QLatin1String(": Sharpen"))
{
    m_radius = radius;
    m_sigma  = sigma;

    // We need to provide support for orgImage == destImage.
    // The algorithm does not support this out of the box, so use a temporary.
    if (orgImage.bits() == destImage.bits())
    {
        m_destImage = DImg(destImage.width(), destImage.height(), destImage.sixteenBit());
    }

    filterImage();

    if (orgImage.bits() == destImage.bits())
    {
        memcpy(destImage.bits(), m_destImage.bits(), m_destImage.numBytes());
    }
}
// capture image from viewport
void ImageViewport::calcViewport (unsigned int texId, double ts, unsigned int format)
{
	// if scale was changed
	if (m_scaleChange)
		// reset image
		init(m_capSize[0], m_capSize[1]);
	// if texture wasn't initialized
	if (!m_texInit && texId != 0) {
		// initialize it
		loadTexture(texId, m_image, m_size);
		m_texInit = true;
	}
	// if texture can be directly created
	if (texId != 0 && m_pyfilter == NULL && m_size[0] == m_capSize[0] &&
	    m_size[1] == m_capSize[1] && !m_flip && !m_zbuff && !m_depth)
	{
		// just copy current viewport to texture
		glBindTexture(GL_TEXTURE_2D, texId);
		glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1]);
		glBindTexture(GL_TEXTURE_2D, 0);
		// image is not available
		m_avail = false;
	}
	// otherwise copy viewport to buffer, if image is not available
	else if (!m_avail) {
		if (m_zbuff) {
			// Use read pixels with the depth buffer
			// *** misusing m_viewportImage here, but since it has the correct size
			//     (4 bytes per pixel = size of float) and we just need it to apply
			//     the filter, it's ok
			glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1],
			        GL_DEPTH_COMPONENT, GL_FLOAT, m_viewportImage);
			// filter loaded data
			FilterZZZA filt;
			filterImage(filt, (float *)m_viewportImage, m_capSize);
		}
		else {

			if (m_depth) {
				// Use read pixels with the depth buffer
				// See warning above about m_viewportImage.
				glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1],
				        GL_DEPTH_COMPONENT, GL_FLOAT, m_viewportImage);
				// filter loaded data
				FilterDEPTH filt;
				filterImage(filt, (float *)m_viewportImage, m_capSize);
			}
			else {

				// get frame buffer data
				if (m_alpha) {
					// as we are reading the pixel in the native format, we can read directly in the image buffer
					// if we are sure that no processing is needed on the image
					if (m_size[0] == m_capSize[0] &&
					    m_size[1] == m_capSize[1] &&
					    !m_flip &&
					    !m_pyfilter)
					{
						glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1], format,
						             GL_UNSIGNED_BYTE, m_image);
						m_avail = true;
					}
					else if (!m_pyfilter) {
						glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1], format,
						             GL_UNSIGNED_BYTE, m_viewportImage);
						FilterRGBA32 filt;
						filterImage(filt, m_viewportImage, m_capSize);
					}
					else {
						glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1], GL_RGBA,
						             GL_UNSIGNED_BYTE, m_viewportImage);
						FilterRGBA32 filt;
						filterImage(filt, m_viewportImage, m_capSize);
						if (format == GL_BGRA) {
							// in place byte swapping
							swapImageBR();
						}
					}
				}
				else {
					glReadPixels(m_upLeft[0], m_upLeft[1], (GLsizei)m_capSize[0], (GLsizei)m_capSize[1], GL_RGB,
					        GL_UNSIGNED_BYTE, m_viewportImage);
					// filter loaded data
					FilterRGB24 filt;
					filterImage(filt, m_viewportImage, m_capSize);
					if (format == GL_BGRA) {
						// in place byte swapping
						swapImageBR();
					}
				}
			}
		}
	}
}
Exemple #21
0
void filterSamples(){
  for (int i=0; i< nbSamples; i++){
    sample_gray[i] = *filterImage(&sample_gray[i]);
  }
}
Exemple #22
0
void  filterImages(){
  for (int i=0; i< nbImages; i++){
    image_gray[i] = *filterImage(&image_gray[i]);
  }
}