Ejemplo n.º 1
0
static sk_sp<SkImageFilter> make_image_filter(bool canBeNull) {
    sk_sp<SkImageFilter> filter;

    // Add a 1 in 3 chance to get a nullptr input
    uint8_t i;
    fuzz->nextRange(&i, 0, 2);
    if (fuzz->exhausted() || (canBeNull && i == 1)) {
        return filter;
    }

    enum { ALPHA_THRESHOLD, MERGE, COLOR, BLUR, MAGNIFIER,
           BLENDMODE, OFFSET, MATRIX, MATRIX_CONVOLUTION, COMPOSE,
           DISTANT_LIGHT, POINT_LIGHT, SPOT_LIGHT, NOISE, DROP_SHADOW,
           MORPHOLOGY, BITMAP, DISPLACE, TILE, PICTURE, PAINT, NUM_FILTERS };

    uint8_t s;
    fuzz->nextRange(&s, 0, NUM_FILTERS - 1);
    switch (s) {
    case ALPHA_THRESHOLD: {
        SkRegion reg = make_region();
        SkScalar innerMin, outerMax;
        fuzz->next(&innerMin, &outerMax);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = SkAlphaThresholdFilter::Make(reg, innerMin, outerMax, fil);
        break;
    }
    case MERGE: {
        sk_sp<SkImageFilter> filA = make_image_filter();
        sk_sp<SkImageFilter> filB = make_image_filter();
        filter = SkMergeImageFilter::Make(filA, filB);
        break;
    }
    case COLOR: {
        sk_sp<SkColorFilter> cf(make_color_filter());
        filter = cf ? SkColorFilterImageFilter::Make(std::move(cf), make_image_filter())
                    : nullptr;
        break;
    }
    case BLUR: {
        SkScalar sX = make_number(true);
        SkScalar sY = make_number(true);
        sk_sp<SkImageFilter> fil = make_image_filter();

        filter = SkBlurImageFilter::Make(sX, sY, fil);
        break;
    }
    case MAGNIFIER: {
        SkRect rect = make_rect();
        SkScalar inset = make_number(true);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = SkMagnifierImageFilter::Make(rect, inset, fil);
        break;
    }
    case BLENDMODE: {
        SkBlendMode mode = make_blendmode();
        sk_sp<SkImageFilter> filA = make_image_filter();
        sk_sp<SkImageFilter> filB = make_image_filter();
        filter = SkXfermodeImageFilter::Make(mode, filA, filB, nullptr);
        break;
    }
    case OFFSET: {
        SkScalar dx, dy;
        fuzz->next(&dx, &dy);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = SkOffsetImageFilter::Make(dx, dy, fil);
        break;
    }
    case MATRIX: {
        SkMatrix m;
        init_matrix(&m);
        int qual;
        fuzz->nextRange(&qual, 0, SkFilterQuality::kLast_SkFilterQuality - 1);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = SkImageFilter::MakeMatrixFilter(m, (SkFilterQuality)qual, fil);
        break;
    }
    case MATRIX_CONVOLUTION: {
        SkImageFilter::CropRect cropR(SkRect::MakeWH(SkIntToScalar(kBitmapSize),
                                                     SkIntToScalar(kBitmapSize)));
        int w, h;
        fuzz->nextRange(&w, 1, 10);
        fuzz->nextRange(&h, 1, 10);
        SkISize size = SkISize::Make(w, h);
        int arraySize = size.width() * size.height();
        SkTArray<SkScalar> kernel(arraySize);
        for (int i = 0; i < arraySize; ++i) {
            kernel.push_back() = make_number(false);
        }
        fuzz->nextRange(&w, 0, size.width()  - 1);
        fuzz->nextRange(&h, 0, size.height() - 1);
        SkIPoint kernelOffset = SkIPoint::Make(w, h);
        int mode;
        fuzz->nextRange(&mode, 0, SkMatrixConvolutionImageFilter::kMax_TileMode - 1);
        bool convolveAlpha = make_bool();
        SkScalar gain, bias;
        fuzz->next(&gain, &bias);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = SkMatrixConvolutionImageFilter::Make(size,
                                                      kernel.begin(),
                                                      gain,
                                                      bias,
                                                      kernelOffset,
                                                      (SkMatrixConvolutionImageFilter::TileMode)mode,
                                                      convolveAlpha,
                                                      fil,
                                                      &cropR);
        break;
    }
    case COMPOSE: {
        sk_sp<SkImageFilter> filA = make_image_filter();
        sk_sp<SkImageFilter> filB = make_image_filter();
        filter = SkComposeImageFilter::Make(filA, filB);
        break;
    }
    case DISTANT_LIGHT: {
        SkPoint3 p = make_point();
        SkColor c = make_color();
        SkScalar ss, kd;
        fuzz->next(&ss, &kd);
        int shininess;
        fuzz->nextRange(&shininess, 0, 9);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = make_bool()
                 ? SkLightingImageFilter::MakeDistantLitDiffuse(p, c, ss, kd, fil)
                 : SkLightingImageFilter::MakeDistantLitSpecular(p, c, ss, kd, shininess, fil);
        break;
    }
    case POINT_LIGHT: {
        SkPoint3 p = make_point();
        SkColor c = make_color();
        SkScalar ss, kd;
        fuzz->next(&ss, &kd);
        int shininess;
        fuzz->nextRange(&shininess, 0, 9);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = make_bool()
                 ? SkLightingImageFilter::MakePointLitDiffuse(p, c, ss, kd, fil)
                 : SkLightingImageFilter::MakePointLitSpecular(p, c, ss, kd, shininess, fil);
        break;
    }
    case SPOT_LIGHT: {
        SkPoint3 p = make_point();
        SkColor c = make_color();
        SkScalar se, ca, ss, kd;
        fuzz->next(&se, &ca, &ss, &kd);
        int shininess;
        fuzz->nextRange(&shininess, 0, 9);
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = make_bool()
                 ? SkLightingImageFilter::MakeSpotLitDiffuse(SkPoint3::Make(0, 0, 0),
                                                             p, se, ca, c, ss, kd, fil)
                 : SkLightingImageFilter::MakeSpotLitSpecular(SkPoint3::Make(0, 0, 0),
                                                              p, se, ca, c, ss, kd,
                                                              shininess, fil);
        break;
    }
    case NOISE: {
        SkScalar bfx = make_number(true);
        SkScalar bfy = make_number(true);
        SkScalar seed = make_number(false);
        int octaves;
        fuzz->nextRange(&octaves, 0, 9);
        sk_sp<SkShader> shader(make_bool()
                ? SkPerlinNoiseShader::MakeFractalNoise(bfx, bfy, octaves, seed)
                : SkPerlinNoiseShader::MakeTurbulence(bfx, bfy, octaves, seed));
        SkPaint paint;
        paint.setShader(shader);
        SkImageFilter::CropRect cropR(SkRect::MakeWH(SkIntToScalar(kBitmapSize),
                                                     SkIntToScalar(kBitmapSize)));
        filter = SkPaintImageFilter::Make(paint, &cropR);
        break;
    }
    case DROP_SHADOW: {
        SkScalar dx, dy, sx, sy;
        fuzz->next(&dx, &dy);
        sx = make_number(true);
        sy = make_number(true);
        SkColor c = make_color();
        SkDropShadowImageFilter::ShadowMode mode = make_shadow_mode();
        sk_sp<SkImageFilter> fil = make_image_filter();
        filter = SkDropShadowImageFilter::Make(dx, dy, sx, sy, c, mode, fil, nullptr);
        break;
    }
    case MORPHOLOGY: {
        int rx, ry;
        fuzz->nextRange(&rx, 0, kBitmapSize);
        fuzz->nextRange(&ry, 0, kBitmapSize);
        sk_sp<SkImageFilter> fil = make_image_filter();
        if (make_bool()) {
            filter = SkDilateImageFilter::Make(rx, ry, fil);
        } else {
            filter = SkErodeImageFilter::Make(rx, ry, fil);
        }
        break;
    }
    case BITMAP: {
        sk_sp<SkImage> image(SkImage::MakeFromBitmap(make_bitmap()));
        if (make_bool()) {
            filter = SkImageSource::Make(std::move(image),
                                         make_rect(),
                                         make_rect(),
                                         kHigh_SkFilterQuality);
        } else {
            filter = SkImageSource::Make(std::move(image));
        }
        break;
    }
    case DISPLACE: {
        SkDisplacementMapEffect::ChannelSelectorType x = make_channel_selector_type();
        SkDisplacementMapEffect::ChannelSelectorType y = make_channel_selector_type();
        SkScalar scale = make_number(false);
        sk_sp<SkImageFilter> filA = make_image_filter(false);
        sk_sp<SkImageFilter> filB = make_image_filter();

        filter = SkDisplacementMapEffect::Make(x, y, scale, filA, filB);
        break;
    }
    case TILE: {
        SkRect src = make_rect();
        SkRect dest = make_rect();
        sk_sp<SkImageFilter> fil = make_image_filter(false);
        filter = SkTileImageFilter::Make(src, dest, fil);
        break;
    }
    case PICTURE: {
        SkRTreeFactory factory;
        SkPictureRecorder recorder;
        SkCanvas* recordingCanvas = recorder.beginRecording(SkIntToScalar(kBitmapSize),
                                                            SkIntToScalar(kBitmapSize),
                                                            &factory, 0);
        drawSomething(recordingCanvas);
        sk_sp<SkPicture> pict(recorder.finishRecordingAsPicture());
        filter = SkPictureImageFilter::Make(pict, make_rect());
        break;
    }
    case PAINT: {
        SkImageFilter::CropRect cropR(make_rect());
        filter = SkPaintImageFilter::Make(make_paint(), &cropR);
        break;
    }
    default:
        break;
    }
    return filter;
}
Ejemplo n.º 2
0
//see http://opencv.willowgarage.com/documentation/cpp/camera_calibration_and_3d_reconstruction.html
bool ccCalibratedImage::undistort()
{
	if (m_image.isNull())
		return false;

	//nothing to do
	if (m_k1==0 && m_k2==0)
		return true;

	float f2 = m_focal_pix*m_focal_pix;
	float cx = 0.5f*(float)m_width;
	float cy = 0.5f*(float)m_height;

	//look for optimal enlargement coeff
	float enlargeCoef = 1.0f;
	//{
	//	float lastrp = 0;
	//	const float c_stepSize = 0.01f;
	//	unsigned step=0;
	//	while (step<50)
	//	{
	//		enlargeCoef = 1.0f+c_stepSize*(float)step;
	//		float maxcx = 0.5f*enlargeCoef*(float)m_width;
	//		float maxcy = 0.5f*enlargeCoef*(float)m_height;
	//		float maxp2 = (maxcx*maxcx+maxcy*maxcy)/f2;
	//		float maxrp = 1.0f+maxp2*(m_k1+m_k2*maxp2);
	//		float eqx = maxrp * maxcx;
	//		float eqy = maxrp * maxcy;
	//		if ((int)eqx>=cx && (int)eqy>=cy)
	//			break;
	//		//something wrong!
	//		if (maxrp<lastrp)
	//		{
	//			if (step<2)
	//				return false;
	//			else
	//			{
	//				//step back
	//				enlargeCoef = 1.0f+c_stepSize*(float)(step-1);
	//				break;
	//			}
	//		}
	//		lastrp=maxrp;
	//		++step;
	//	}
	//}

	unsigned newWidth = (unsigned)(enlargeCoef*(float)m_width);
	unsigned newHeight = (unsigned)(enlargeCoef*(float)m_height);
	float newCx = 0.5f*(float)newWidth;
	float newCy = 0.5f*(float)newHeight;
	QImage image(QSize(newWidth,newHeight),m_image.format());
	image.fill(0);

	//image undistortion
	{
		for (unsigned i=0;i<newWidth;++i)
		{
			float x = (float)(i-newCx);
			float x2 = x*x;
			for (unsigned j=0;j<newHeight;++j)
			{
				float y = (float)(j-newCy);
				float y2 = y*y;

				float p2 = (x2+y2)/f2; //p = pix/f
				float rp = 1.0f+p2*(m_k1+p2*m_k2); //r(p) = 1.0 + k1 * ||p||^2 + k2 * ||p||^4
				float eqx = rp * x + cx;
				float eqy = rp * y + cy;

				int pixx=(int)eqx;
				int pixy=(int)eqy;
				if (pixx>=0 && pixx<(int)m_width && pixy>=0 && pixy<(int)m_height)
					image.setPixel(i,j,m_image.pixel(pixx,pixy));
			}
		}
	}

	//update image parameters
	m_image = image;
	m_width = newWidth;
	m_height = newHeight;
	//m_aspectRatio = (aspect ratio is not changed)
	m_k1 = m_k2 = 0;

	setName(getName()+QString("_undistort"));

	return true;
}
Ejemplo n.º 3
0
/**
 * @brief Returns html rendered markdown of the note text
 * @param notesPath for transforming relative local urls to absolute ones
 * @return
 */
QString Note::toMarkdownHtml(QString notesPath) {
    hoedown_renderer *renderer =
            hoedown_html_renderer_new(HOEDOWN_HTML_USE_XHTML, 16);
    hoedown_extensions extensions =
            (hoedown_extensions) (HOEDOWN_EXT_BLOCK | HOEDOWN_EXT_SPAN);
    hoedown_document *document = hoedown_document_new(renderer, extensions, 16);

    // get the decrypted note text (or the normal note text if there isn't any)
    QString str = getDecryptedNoteText();

    // parse for relative file urls and make them absolute
    // (for example to show images under the note path)
    str.replace(
            QRegularExpression("\\(file:\\/\\/([^\\/].+)\\)"),
            "(file://" + notesPath + "/\\1)");

    unsigned char *sequence = (unsigned char *) qstrdup(str.toUtf8().constData());
    int length = strlen((char *) sequence);

    // return an empty string if the note is empty
    if (length == 0) {
        return "";
    }

    hoedown_buffer *html = hoedown_buffer_new(length);

    // render markdown html
    hoedown_document_render(document, html, sequence, length);

    // get markdown html
    QString result = QString::fromUtf8((char *) html->data, html->size);

    /* Cleanup */
    free(sequence);
    hoedown_buffer_free(html);

    hoedown_document_free(document);
    hoedown_html_renderer_free(renderer);

    result =
            "<html><head><style>h1, h2, h3 { margin: 5pt 0 10pt 0; }"
            "a { color: #FF9137; text-decoration: none; }</style></head><body>" +
            result + "</body></html>";

    // check if width of embedded local images is too high
    QRegularExpression re("<img src=\"file:\\/\\/([^\"]+)\"");
    QRegularExpressionMatchIterator i = re.globalMatch(result);
    while (i.hasNext()) {
        QRegularExpressionMatch match = i.next();
        QString fileName = match.captured(1);
        QImage image(fileName);

        // cap the image width at 980px
        if (image.width() > 980) {
            result.replace(
                    QRegularExpression("<img src=\"file:\\/\\/" + QRegularExpression::escape(fileName) + "\""),
                           "<img width=\"980\" src=\"file://" + fileName + "\"");
        }
    }

    return result;
}
Ejemplo n.º 4
0
wxImage wxDIB::ConvertToImage() const
{
    wxCHECK_MSG( IsOk(), wxNullImage,
                    wxT("can't convert invalid DIB to wxImage") );

    // create the wxImage object
    const int w = GetWidth();
    const int h = GetHeight();
    wxImage image(w, h, false /* don't bother clearing memory */);
    if ( !image.IsOk() )
    {
        wxFAIL_MSG( wxT("could not allocate data for image") );
        return wxNullImage;
    }

    const int bpp = GetDepth();

    // Remember if we have any "real" transparency, i.e. either any partially
    // transparent pixels or not all pixels are fully opaque or fully
    // transparent.
    bool hasAlpha = false;
    bool hasOpaque = false;
    bool hasTransparent = false;

    if ( bpp == 32 )
    {
        // 32 bit bitmaps may be either 0RGB or ARGB and we don't know in
        // advance which one do we have so suppose we have alpha of them and
        // get rid of it later if it turns out we didn't.
        image.SetAlpha();
    }

    // this is the same loop as in Create() just above but with copy direction
    // reversed
    const int dstBytesPerLine = w * 3;
    const int srcBytesPerLine = GetLineSize(w, bpp);
    unsigned char *dst = image.GetData() + ((h - 1) * dstBytesPerLine);
    unsigned char *alpha = image.HasAlpha() ? image.GetAlpha() + (h - 1)*w
                                            : NULL;
    const unsigned char *srcLineStart = (unsigned char *)GetData();
    for ( int y = 0; y < h; y++ )
    {
        // copy one DIB line
        const unsigned char *src = srcLineStart;
        for ( int x = 0; x < w; x++ )
        {
            dst[2] = *src++;
            dst[1] = *src++;
            dst[0] = *src++;

            if ( bpp == 32 )
            {
                // wxImage uses non premultiplied alpha so undo
                // premultiplication done in Create() above
                const unsigned char a = *src;
                *alpha++ = a;

                // Check what kind of alpha do we have.
                switch ( a )
                {
                    case 0:
                        hasTransparent = true;
                        break;

                    default:
                        // Anything in between means we have real transparency
                        // and must use alpha channel.
                        hasAlpha = true;
                        break;

                    case 255:
                        hasOpaque = true;
                        break;
                }

                if ( a > 0 )
                {
                    dst[0] = (dst[0] * 255) / a;
                    dst[1] = (dst[1] * 255) / a;
                    dst[2] = (dst[2] * 255) / a;
                }

                src++;
            }

            dst += 3;
        }

        // pass to the previous line in the image
        dst -= 2*dstBytesPerLine;
        if ( alpha )
            alpha -= 2*w;

        // and to the next one in the DIB
        srcLineStart += srcBytesPerLine;
    }

    if ( hasOpaque && hasTransparent )
        hasAlpha = true;

    if ( !hasAlpha && image.HasAlpha() )
        image.ClearAlpha();

    return image;
}
Ejemplo n.º 5
0
void RenderSVGImage::adjustRectsForAspectRatio(FloatRect& destRect, FloatRect& srcRect, SVGPreserveAspectRatio* aspectRatio)
{
    float origDestWidth = destRect.width();
    float origDestHeight = destRect.height();
    if (aspectRatio->meetOrSlice() == SVGPreserveAspectRatio::SVG_MEETORSLICE_MEET) {
        float widthToHeightMultiplier = srcRect.height() / srcRect.width();
        if (origDestHeight > (origDestWidth * widthToHeightMultiplier)) {
            destRect.setHeight(origDestWidth * widthToHeightMultiplier);
            switch(aspectRatio->align()) {
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMINYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMID:
                    destRect.setY(destRect.y() + origDestHeight / 2.0f - destRect.height() / 2.0f);
                    break;
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMINYMAX:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMAX:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMAX:
                    destRect.setY(destRect.y() + origDestHeight - destRect.height());
                    break;
            }
        }
        if (origDestWidth > (origDestHeight / widthToHeightMultiplier)) {
            destRect.setWidth(origDestHeight / widthToHeightMultiplier);
            switch(aspectRatio->align()) {
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMIN:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMAX:
                    destRect.setX(destRect.x() + origDestWidth / 2.0f - destRect.width() / 2.0f);
                    break;
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMIN:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMAX:
                    destRect.setX(destRect.x() + origDestWidth - destRect.width());
                    break;
            }
        }
    } else if (aspectRatio->meetOrSlice() == SVGPreserveAspectRatio::SVG_MEETORSLICE_SLICE) {
        float widthToHeightMultiplier = srcRect.height() / srcRect.width();
        // if the destination height is less than the height of the image we'll be drawing
        if (origDestHeight < (origDestWidth * widthToHeightMultiplier)) {
            float destToSrcMultiplier = srcRect.width() / destRect.width();
            srcRect.setHeight(destRect.height() * destToSrcMultiplier);
            switch(aspectRatio->align()) {
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMINYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMID:
                    srcRect.setY(destRect.y() + image()->height() / 2.0f - srcRect.height() / 2.0f);
                    break;
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMINYMAX:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMAX:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMAX:
                    srcRect.setY(destRect.y() + image()->height() - srcRect.height());
                    break;
            }
        }
        // if the destination width is less than the width of the image we'll be drawing
        if (origDestWidth < (origDestHeight / widthToHeightMultiplier)) {
            float destToSrcMultiplier = srcRect.height() / destRect.height();
            srcRect.setWidth(destRect.width() * destToSrcMultiplier);
            switch(aspectRatio->align()) {
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMIN:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMIDYMAX:
                    srcRect.setX(destRect.x() + image()->width() / 2.0f - srcRect.width() / 2.0f);
                    break;
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMIN:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMID:
                case SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_XMAXYMAX:
                    srcRect.setX(destRect.x() + image()->width() - srcRect.width());
                    break;
            }
        }
    }
}
Ejemplo n.º 6
0
void RenderSVGImage::paint(PaintInfo& paintInfo, int parentX, int parentY)
{
    if (paintInfo.context->paintingDisabled() || (paintInfo.phase != PaintPhaseForeground) || style()->visibility() == HIDDEN)
        return;
    
    paintInfo.context->save();
    paintInfo.context->concatCTM(AffineTransform().translate(parentX, parentY));
    paintInfo.context->concatCTM(localTransform());
    paintInfo.context->concatCTM(translationForAttributes());

    FloatRect boundingBox = FloatRect(0, 0, width(), height());

    SVGElement* svgElement = static_cast<SVGElement*>(element());
    ASSERT(svgElement && svgElement->document() && svgElement->isStyled());

    SVGStyledElement* styledElement = static_cast<SVGStyledElement*>(svgElement);
    const SVGRenderStyle* svgStyle = style()->svgStyle();

    AtomicString filterId(SVGURIReference::getTarget(svgStyle->filter()));
    AtomicString clipperId(SVGURIReference::getTarget(svgStyle->clipPath()));
    AtomicString maskerId(SVGURIReference::getTarget(svgStyle->maskElement()));

#if ENABLE(SVG_EXPERIMENTAL_FEATURES)
    SVGResourceFilter* filter = getFilterById(document(), filterId);
#endif
    SVGResourceClipper* clipper = getClipperById(document(), clipperId);
    SVGResourceMasker* masker = getMaskerById(document(), maskerId);

#if ENABLE(SVG_EXPERIMENTAL_FEATURES)
    if (filter)
        filter->prepareFilter(paintInfo.context, boundingBox);
    else if (!filterId.isEmpty())
        svgElement->document()->accessSVGExtensions()->addPendingResource(filterId, styledElement);
#endif

    if (clipper) {
        clipper->addClient(styledElement);
        clipper->applyClip(paintInfo.context, boundingBox);
    } else if (!clipperId.isEmpty())
        svgElement->document()->accessSVGExtensions()->addPendingResource(clipperId, styledElement);

    if (masker) {
        masker->addClient(styledElement);
        masker->applyMask(paintInfo.context, boundingBox);
    } else if (!maskerId.isEmpty())
        svgElement->document()->accessSVGExtensions()->addPendingResource(maskerId, styledElement);

    float opacity = style()->opacity();
    if (opacity < 1.0f) {
        paintInfo.context->clip(enclosingIntRect(boundingBox));
        paintInfo.context->beginTransparencyLayer(opacity);
    }

    PaintInfo pi(paintInfo);
    pi.rect = absoluteTransform().inverse().mapRect(pi.rect);

    SVGImageElement* imageElt = static_cast<SVGImageElement*>(node());

    FloatRect destRect(m_x, m_y, contentWidth(), contentHeight());
    FloatRect srcRect(0, 0, image()->width(), image()->height());

    if (imageElt->preserveAspectRatio()->align() != SVGPreserveAspectRatio::SVG_PRESERVEASPECTRATIO_NONE)
        adjustRectsForAspectRatio(destRect, srcRect, imageElt->preserveAspectRatio());

    paintInfo.context->drawImage(image(), destRect, srcRect);

#if ENABLE(SVG_EXPERIMENTAL_FEATURES)
    if (filter)
        filter->applyFilter(paintInfo.context, boundingBox);
#endif

    if (opacity < 1.0f)
        paintInfo.context->endTransparencyLayer();

    paintInfo.context->restore();
}
Ejemplo n.º 7
0
///////////////////////////////////////////////////////////////////////////////
//! Load meta file and all needed files from meta file definitions
///////////////////////////////////////////////////////////////////////////////
void MainWindow::load_metafile()
{
    QString filename = QFileDialog::getOpenFileName(this, tr("Select meta file"));

    if (filename.isEmpty())
        return;

    QFile file(filename);
    file.open(QIODevice::ReadOnly);
    if (!file.isOpen())
        return;

    QTextStream in(&file);

    size_t image_number;
    in >> image_number;

    // loading matrix of calibration
    // TODO: CAUTION: type-specific code
    // TODO: code duplication with "from line 159"
    for (size_t r=0; r < matrix_of_camera_calibration.RowNo(); ++r)
        for (size_t c=0; c < matrix_of_camera_calibration.ColNo(); ++c)
        {
        float x;
        in >> x;
        matrix_of_camera_calibration(r,c) = x;
    }

    image_preview_model->clear();

    images.clear();
    images.reserve(image_number);

    // load images names with bounding sqares from meta file
    for (size_t i = 0; i < image_number; ++i)
    {
        // loading image file name
        QString image_file_name;
        in >> image_file_name;

        QRectF bounding_rectangle;
        float temp[4];
        in >> temp[0];
        in >> temp[1];
        in >> temp[2];
        in >> temp[3];
        bounding_rectangle.setLeft(temp[0]);
        bounding_rectangle.setTop(temp[1]);
        bounding_rectangle.setRight(temp[2]);
        bounding_rectangle.setBottom(temp[3]);

        // loading matrix of calibration
        matrix<float> matrix_of_calibration(4, 4);
        // TODO: CAUTION: type-specific code
        for (size_t r=0; r < matrix_of_calibration.RowNo(); ++r)
            for (size_t c=0; c < matrix_of_calibration.ColNo(); ++c)
            {
            float x;
            in >> x;
            matrix_of_calibration(r,c) = x;
        }

        // create image
        ImageInfo image(image_file_name, matrix_of_calibration, bounding_rectangle);
        if (image.is_valid())
        {
            images.push_back(image);
            // add image to the preview widget
            image_preview_model->add_image(image.get_image());
        }
    }

    //close input file
    file.close();
}
Ejemplo n.º 8
0
void KoColorSlider::drawContents( QPainter *painter )
{
    QPixmap checker(8, 8);
    QPainter p(&checker);
    p.fillRect(0, 0, 4, 4, Qt::lightGray);
    p.fillRect(4, 0, 4, 4, Qt::darkGray);
    p.fillRect(0, 4, 4, 4, Qt::darkGray);
    p.fillRect(4, 4, 4, 4, Qt::lightGray);
    p.end();
    QRect contentsRect_(contentsRect());
    painter->fillRect(contentsRect_, QBrush(checker));

    if( !d->upToDate || d->pixmap.isNull() || d->pixmap.width() != contentsRect_.width()
        || d->pixmap.height() != contentsRect_.height() )
    {
        KoColor c = d->minColor; // smart way to fetch colorspace
        QColor color;

        const quint8 *colors[2];
        colors[0] = d->minColor.data();
        colors[1] = d->maxColor.data();

        KoMixColorsOp * mixOp = c.colorSpace()->mixColorsOp();

        QImage image(contentsRect_.width(), contentsRect_.height(), QImage::Format_ARGB32 );

        if( orientation() == Qt::Horizontal ) {
            for (int x = 0; x < contentsRect_.width(); x++) {

                qreal t = static_cast<qreal>(x) / (contentsRect_.width() - 1);

                qint16 colorWeights[2];
                colorWeights[0] = static_cast<quint8>((1.0 - t) * 255 + 0.5);
                colorWeights[1] = 255 - colorWeights[0];

                mixOp->mixColors(colors, colorWeights, 2, c.data());

                c.toQColor(&color);

                for (int y = 0; y < contentsRect_.height(); y++)
                image.setPixel(x, y, color.rgba());
            }
        }
        else {
            for (int y = 0; y < contentsRect_.height(); y++) {

                qreal t = static_cast<qreal>(y) / (contentsRect_.height() - 1);

                qint16 colorWeights[2];
                colorWeights[0] = static_cast<quint8>((t) * 255 + 0.5);
                colorWeights[1] = 255 - colorWeights[0];

                mixOp->mixColors(colors, colorWeights, 2, c.data());

                c.toQColor(&color);

                for (int x = 0; x < contentsRect_.width(); x++)
                image.setPixel(x, y, color.rgba());
            }
        }
        d->pixmap = QPixmap::fromImage(image);
        d->upToDate = true;
    }
    painter->drawPixmap( contentsRect_, d->pixmap, QRect( 0, 0, d->pixmap.width(), d->pixmap.height()) );
}
Ejemplo n.º 9
0
int main(int argc, char** argv) {

    TGAImage image(width, height, TGAImage::RGB);
    TGAImage image_shdr(width, height, TGAImage::RGB);





    std::vector<string> names;
    std::vector<string> diffs;
    std::vector<string> nm;
    std::vector<string> gls;
    std::vector<coeffs> coeff;

    names.push_back("african_head.obj");
    //names.push_back("floor.obj");
    names.push_back("african_head_eye_inner.obj");
    //names.push_back("african_head_eye_outer.obj");
    //names.push_back("diablo3_pose.obj");


    diffs.push_back("african_head_diffuse.tga");
    //diffs.push_back("floor_diffuse.tga");
    diffs.push_back("african_head_eye_inner_diffuse.tga");
    //diffs.push_back("african_head_eye_outer_diffuse.tga");
    //diffs.push_back("diablo3_pose_diffuse.tga");


    nm.push_back("african_head_nm.tga");
    //nm.push_back("floor_nm_tangent.tga");
    nm.push_back("african_head_eye_inner_nm.tga");
    //nm.push_back("african_head_eye_outer_nm.tga");
    //nm.push_back("diablo3_pose_nm.tga");



    gls.push_back("african_head_spec.tga");
    //gls.push_back("floor_nm_tangent.tga");
    gls.push_back("african_head_eye_inner_spec.tga");
    //gls.push_back("african_head_eye_outer_spec.tga");
    //gls.push_back("diablo3_pose_spec.tga");


    coeff.push_back(coeffs(20,1.2f,0.6f));
    //coeff.push_back(coeffs(10,1.f,0.6f));
    coeff.push_back(coeffs(20,1.2f,0.6f));
    //coeff.push_back(coeffs(10,1.f,4.6f));
    //coeff.push_back(coeffs(20,1.2f,0.6f));

    char name_file[40];
    char name_file_diff[40];
    char name_file_norm[40];
    char name_file_spec[40];

    ViewPortMtx=viewport(width/8,height/8 , width*3/4, height*3/4, zeight);
    //width/8, height/8, width*3/4, height*3/4
    //PerspMtx=perspective((eye-center).norm());
    PerspMtx=perspective((eye-center).norm());
    view = lookat(eye, center,up);



    shdw=ViewPortMtx*lookat(light_vec,center,up);
    shdw_adj=shdw.Adjacent();

    vect<4,float> gl_light_shdw;
    gl_light_shdw[0]=light_vec[0];
    gl_light_shdw[1]=light_vec[1];
    gl_light_shdw[2]=light_vec[2];

    gl_light_shdw=shdw*gl_light_shdw;

    printf("gl_light_shdw %f %f %f\n",gl_light_shdw[0],gl_light_shdw[1],gl_light_shdw[2]);


    light_vec_shdw=vect<3,float>(gl_light_shdw[0],gl_light_shdw[1],gl_light_shdw[2]);

    light_vec_shdw=light_vec_shdw.normalize();

    fin_mtrx=ViewPortMtx*PerspMtx*view;
    fin_mtrx_adj=(ViewPortMtx*view).invert_transpose();


    uniform_Mshadow=shdw*fin_mtrx.invert();


    //light_dir = proj<3>((Projection*ModelView*embed<4>(light_dir, 0.f))).normalize();
    vect<4,float> gl_light;
    gl_light[0]=light_vec[0];
    gl_light[1]=light_vec[1];
    gl_light[2]=light_vec[2];

    gl_light=ViewPortMtx*PerspMtx*view*gl_light;
    light_vec=vect<3,float>(gl_light[0],gl_light[1],gl_light[2]);

    light_vec=light_vec.normalize();


    for (int i =0; i<names.size(); i++) {


        coeff_cur=coeff[i];

        strcpy(name_file,names[i].c_str());
        strcpy(name_file_diff,diffs[i].c_str());
        strcpy(name_file_norm,nm[i].c_str());
        strcpy(name_file_spec,gls[i].c_str());
        printf("file %s %s %s %s\n",name_file,name_file_diff,name_file_norm,name_file_spec);



        Model mdl;





        parser(name_file_diff,name_file,name_file_norm,name_file_spec,mdl);
        Shaderer shader;
        ShaderGuro2 shaderer;


        //printf("OKI DOKI!\n");

        for(std::vector<int>::size_type i = 0; i != mdl.coords_tri.size(); i++) {

            for (int j=0; j<3; j++) {
                shader.vertex(i, j,&mdl);
            }

            color_triangle(shader, image_shdr, z_shdr);

        }





        for(std::vector<int>::size_type i = 0; i != mdl.coords_tri.size(); i++) {

            for (int j=0; j<3; j++) {
                shaderer.vertex(i, j,&mdl);
            }

            color_triangle(shaderer, image, z_buffer);

        }

    }


    image_shdr.flip_vertically(); // i want to have the origin at the left bottom corner of the image
    image_shdr.write_tga_file("output_shr.tga");

    image.flip_vertically(); // i want to have the origin at the left bottom corner of the image
    image.write_tga_file("output.tga");

    return 0;
}
Ejemplo n.º 10
0
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
Tensor::Tensor(const IplImage *cv_image, BOOL isComputeGradient)
{
	//保存原图像的副本
	m_img=cvCreateImage(cvSize(cv_image->width,cv_image->height),cv_image->depth,3);
	cvCopyImage(cv_image,m_img);

	//获取非线性多尺度结构张量的参数值
	m_levels = 2;

	ASSERT(m_levels > 0 );

	m_dim = m_levels * SiNGLE_TENSOR_DIM;    //SiNGLE_TENSOR_DIM单一张量

	//SiNGLE_TENSOR_DIM=n(n+1)/2;反解n=m_axes_cnt,m_axes_cnt为坐标抽的维数
	m_axes_cnt = (unsigned int)(sqrt(2 * SiNGLE_TENSOR_DIM + 0.25) - 0.5);   // 2

	m_grad_dim = m_levels * m_axes_cnt;      //m_grad_dim


	////////////////////////////////////////////////////////////////////////////
	//将多通道转化为单通道,默认为三个通道
	unsigned int x,y,i,n;
	m_w = cv_image->width;
	m_h = cv_image->height;
	IplImage *cv_channels[3];
	for (n = 0;n < 3;n++)
	{
		cv_channels[n] = cvCreateImage( cvGetSize(cv_image), cv_image->depth, 1 );
	}
	cvSplit(cv_image, cv_channels[0], cv_channels[1], cv_channels[2], NULL);


	////////////////////////////////////////////////////////////////////////////
	//初始化m_tensor,CMatrix(m_h,m_w)创建一个矩阵,其元素全为0
	m_tensor = new CMatrix *[m_dim];
	for (i=0;i<m_dim;i++)
	{   
		m_tensor[i] = new CMatrix(m_h,m_w);
	}

	////////////////////////////////////////////////////////////////////////////
	//将每一尺度的张量转化为彩色图像存储起来,申请空间	
	m_pImageTensorRGB=new Image<Color_RGB> *[m_levels];
	for (i=0;i<m_levels;i++)
	{   
		m_pImageTensorRGB[i] = new Image<Color_RGB> (m_w,m_h);
	}

	//初始化m_gradient
	if (isComputeGradient)
	{
		m_gradient = new CMatrix *[m_grad_dim];
		for (i=0;i<m_grad_dim;i++)
		{
			m_gradient[i] = new CMatrix(m_h,m_w);
		}
	}
	else
	{
		m_gradient = NULL;
	}


	//辅助矩阵
	CMatrix image(m_h, m_w);
	CMatrix dx(m_h,m_w);
	CMatrix dy(m_h,m_w);
	CMatrix dx2(m_h,m_w);
	CMatrix dy2(m_h,m_w);
	CMatrix dxdy(m_h,m_w);

	//利用固定数据创建一个矩阵
	CvMat cv_dx2 = cvMat(m_h, m_w, CV_64FC1, dx2.GetData());
	CvMat cv_dy2 = cvMat(m_h, m_w, CV_64FC1, dy2.GetData());
	CvMat cv_dxdy =cvMat(m_h, m_w, CV_64FC1, dxdy.GetData());


	//完成IplImage向CMatrix类型的转换,对每一个颜色通道分别进行处理
	for (n = 0;n <3;n++)	//n表示通道数,默认为3
	{  
		//将每一个通道的元素拷贝到image中
		for (y = 0; y < m_h; y++)
		{
			for (x = 0; x < m_w; x++)
			{
				uchar* dst = &CV_IMAGE_ELEM( cv_channels[n], uchar, y, x );
				image.SetElement(y, x, (double)(dst[0]));
			}
		}
		//计算每一个颜色通道的梯度(x方向,y方向)并分别赋给dx,dy
		image.centdiffX(dx);
		image.centdiffY(dy);

		//将dx,dy分别赋给cv_dx,cv_dy
		CvMat cv_dx = cvMat(m_h, m_w, CV_64FC1, dx.GetData());
		CvMat cv_dy = cvMat(m_h, m_w, CV_64FC1, dy.GetData());

		//初始化cv_tensor0,cv_tensor1,cv_tensor2,此时m_tensor[0],m_tensor[1],m_tensor[2]均初始化0
		CvMat cv_tensor0 = cvMat(m_h, m_w, CV_64FC1, (m_tensor[0])->GetData());
		CvMat cv_tensor1 = cvMat(m_h, m_w, CV_64FC1, (m_tensor[1])->GetData());
		CvMat cv_tensor2 = cvMat(m_h, m_w, CV_64FC1, (m_tensor[2])->GetData());

		//计算图像的梯度,保存在cv_gradX,cv_gradY中,并赋值给m_gradient[0],m_gradient[1]
		if (isComputeGradient)
		{   
			//cv_gradX,cv_gradY初始化并计算
			CvMat cv_gradX = cvMat(m_h, m_w, CV_64FC1, (m_gradient[0])->GetData());
			CvMat cv_gradY = cvMat(m_h, m_w, CV_64FC1, (m_gradient[1])->GetData());
			cvAdd(&cv_gradX, &cv_dx, &cv_gradX);//对于三个通道进行累加
			cvAdd(&cv_gradY, &cv_dy, &cv_gradY);
		}

		//计算结构张量,cv_tensor0=dx*dx,cv_tensor1=dy*dy,cv_tensor2=dx*dy
		cvMul(&cv_dx, &cv_dx, &cv_dx2);
		cvAdd(&cv_tensor0, &cv_dx2, &cv_tensor0);
		cvMul(&cv_dy, &cv_dy, &cv_dy2);
		cvAdd(&cv_tensor1, &cv_dy2, &cv_tensor1);
		cvMul(&cv_dx, &cv_dy, &cv_dxdy);
		cvAdd(&cv_tensor2, &cv_dxdy, &cv_tensor2);


		//单尺度计算完毕,以下为多尺度非线性结构张量的计算方法
		if (m_levels > 1)
		{   
			unsigned int wavelet_levels = m_levels - 1;	//-1的原因是因为之前没有if (m_levels==1)的判断语句	
			double dMaxValue,dMinValue;
			cvMinMaxLoc(cv_channels[n], &dMinValue, &dMaxValue);//Finds global minimum, maximum 

			//将图像的像素值归一化到[0,1]
			Wavelet *wave = new Wavelet(&image, dMinValue, dMaxValue, wavelet_levels); //调用Wavelet的构造函数

			//新建WaveletDetailImages结构体的数组
			WaveletDetailImages *D_images = new WaveletDetailImages[wavelet_levels];

			for (i = 0; i < wavelet_levels; i++)
			{
				D_images[i].Detail_1 = new CMatrix(m_h, m_w);
				D_images[i].Detail_2 = new CMatrix(m_h, m_w);
			}

			wave->execute(D_images);//得到D(s,x),D(s,y)

			for (i = 0; i < wavelet_levels; i++)
			{   
				//默认多尺度结构张量的比例因子a=2
				double scale = pow((float)0.25, (int)(i + 1));              //见公式(2-15)
				CvMat cv_dx = cvMat(m_h, m_w, CV_64FC1, D_images[i].Detail_1->GetData());
				CvMat cv_dy = cvMat(m_h, m_w, CV_64FC1, D_images[i].Detail_2->GetData());
				CvMat cv_tensor0 = cvMat(m_h, m_w, CV_64FC1, (m_tensor[(i+1) * SiNGLE_TENSOR_DIM])->GetData());
				CvMat cv_tensor1 = cvMat(m_h, m_w, CV_64FC1, (m_tensor[(i+1) * SiNGLE_TENSOR_DIM + 1])->GetData());
				CvMat cv_tensor2 = cvMat(m_h, m_w, CV_64FC1, (m_tensor[(i+1) * SiNGLE_TENSOR_DIM + 2])->GetData());
				//计算梯度
				if (isComputeGradient)
				{
					CvMat cv_gradX = cvMat(m_h, m_w, CV_64FC1, (m_gradient[(i+1) * m_axes_cnt])->GetData());
					CvMat cv_gradY = cvMat(m_h, m_w, CV_64FC1, (m_gradient[(i+1) * m_axes_cnt + 1])->GetData());
					cvAdd(&cv_gradX, &cv_dx, &cv_gradX);
					cvAdd(&cv_gradY, &cv_dy, &cv_gradY);
				}
				//计算张量
				cvMul(&cv_dx, &cv_dx, &cv_dx2, scale);
				cvAdd(&cv_tensor0, &cv_dx2, &cv_tensor0);
				cvMul(&cv_dy, &cv_dy, &cv_dy2, scale);
				cvAdd(&cv_tensor1, &cv_dy2, &cv_tensor1);
				cvMul(&cv_dx, &cv_dy, &cv_dxdy, scale);
				cvAdd(&cv_tensor2, &cv_dxdy, &cv_tensor2);
			}
			for (i = 0; i < wavelet_levels; i++)
			{
				delete D_images[i].Detail_1;
				delete D_images[i].Detail_2;
			}
			delete [] D_images;
			delete wave;
		}
		cvReleaseImage(&cv_channels[n]);
	}

	//将每一尺度的结构张量转换为彩色图像存储起来
	for (i=0;i<m_levels;i++)
	{
		for (y=0;y<m_h;y++)
		{
			for (x=0;x<m_w;x++)
			{
				(*m_pImageTensorRGB[i])(x,y).r=(m_tensor[i*SiNGLE_TENSOR_DIM])->GetElement(y,x);
				(*m_pImageTensorRGB[i])(x,y).g=(m_tensor[i*SiNGLE_TENSOR_DIM+1])->GetElement(y,x);
				(*m_pImageTensorRGB[i])(x,y).b=(m_tensor[i*SiNGLE_TENSOR_DIM+2])->GetElement(y,x);
			}
		}
	}
	m_tensors = NULL;	
}
Ejemplo n.º 11
0
PyObject* Effect::wrapSetImage(PyObject *self, PyObject *args)
{
	// get the effect
	Effect * effect = getEffect();

	// check if we have aborted already
	if (effect->_abortRequested)
	{
		return Py_BuildValue("");
	}

	// determine the timeout
	int timeout = effect->_timeout;
	if (timeout > 0)
	{
		timeout = effect->_endTime - QDateTime::currentMSecsSinceEpoch();

		// we are done if the time has passed
		if (timeout <= 0)
		{
			return Py_BuildValue("");
		}
	}

	// bytearray of values
	int width, height;
	PyObject * bytearray = nullptr;
	if (PyArg_ParseTuple(args, "iiO", &width, &height, &bytearray))
	{
		if (PyByteArray_Check(bytearray))
		{
			int length = PyByteArray_Size(bytearray);
			if (length == 3 * width * height)
			{
				Image<ColorRgb> image(width, height);
				char * data = PyByteArray_AS_STRING(bytearray);
				memcpy(image.memptr(), data, length);

				effect->_imageProcessor->process(image, effect->_colors);
				effect->setColors(effect->_priority, effect->_colors, timeout, false);
				return Py_BuildValue("");
			}
			else
			{
				PyErr_SetString(PyExc_RuntimeError, "Length of bytearray argument should be 3*width*height");
				return nullptr;
			}
		}
		else
		{
			PyErr_SetString(PyExc_RuntimeError, "Argument 3 is not a bytearray");
			return nullptr;
		}
	}
	else
	{
		return nullptr;
	}

	// error
	PyErr_SetString(PyExc_RuntimeError, "Unknown error");
	return nullptr;
}
Ejemplo n.º 12
0
void ImageReaderPNM::readNative(ImageReader::ImagePlane imagePlanes[])
	{
	switch(imageType)
		{
		case '1': // ASCII bitmap
			{
			/* Create a value source to parse ASCII pixel values: */
			IO::ValueSource image(file);
			image.skipWs();
			
			/* Read the image one row at a time: */
			Misc::UInt8* rowPtr=static_cast<Misc::UInt8*>(imagePlanes[0].basePtr);
			for(unsigned int y=0;y<imageSpec.size[1];++y,rowPtr+=imagePlanes[0].rowStride)
				{
				/* Read the row one pixel at a time: */
				Misc::UInt8* pPtr=rowPtr;
				for(unsigned int x=0;x<imageSpec.size[0];++x,pPtr+=imagePlanes[0].pixelStride)
					;
				}
			
			break;
			}
		
		case '2': // ASCII grayscale image
		case '3': // ASCII RGB image
			{
			/* Create a value source to parse ASCII pixel values: */
			IO::ValueSource image(file);
			image.skipWs();
			
			/* Determine the native pixel size: */
			if(maxValue<256U)
				{
				/* Read 8-bit pixels: */
				readASCII<Misc::UInt8>(image,imageSpec,imagePlanes);
				}
			else
				{
				/* Read 16-bit pixels: */
				readASCII<Misc::UInt16>(image,imageSpec,imagePlanes);
				}
			break;
			}
		
		case '4': // Binary bitmap
			{
			/* Read the image one row at a time: */
			Misc::UInt8* rowPtr=static_cast<Misc::UInt8*>(imagePlanes[0].basePtr);
			for(unsigned int y=0;y<imageSpec.size[1];++y,rowPtr+=imagePlanes[0].rowStride)
				{
				/* Read the row one pixel at a time: */
				Misc::UInt8* pPtr=rowPtr;
				for(unsigned int x=0;x<imageSpec.size[0];++x,pPtr+=imagePlanes[0].pixelStride)
					;
				}
			break;
			}
		
		case '5': // Binary grayscale image
		case '6': // Binary RGB image
			{
			/* Determine the native pixel size: */
			if(maxValue<256U)
				{
				/* Read 8-bit pixels: */
				readBinary<Misc::UInt8>(file,imageSpec,imagePlanes);
				}
			else
				{
				/* Read 16-bit pixels: */
				readBinary<Misc::UInt16>(file,imageSpec,imagePlanes);
				}
			break;
			}
		
		}
	
	/* There can be only one image in a PNM file: */
	done=true;
	}
void MicrodiaCamera::backgroundLoop()
{
    check_heap();
    int len;
    int imgSize = width() * height();
    int buffer_size  = imgSize * 3;
    unsigned char buffer[buffer_size];
    Image image(height(), width());

    int consecutive_readerrs=0;

    bool external_camera_in_use;
    int external_camera_file;

    while (1)
    {
        check_heap();

        external_camera_in_use = access( EXTERNAL_CAMERA_BUFFER, F_OK ) != -1;

        if( external_camera_in_use ) 
        {
            // external camera buffer is in use

            // Wait for frame to be ready
            while(! (access(EXTERNAL_CAMERA_READY, F_OK) != -1) )
            {
                // Check if the external camera has been disabled
                external_camera_in_use = access( EXTERNAL_CAMERA_BUFFER, R_OK ) != -1;
                if(! external_camera_in_use) break;
            }
            
            // If external camera is now disabled, go back to the beginning
            if(! external_camera_in_use) continue;

            // a new frame is ready for reading

            // open file
            external_camera_file = open(EXTERNAL_CAMERA_BUFFER, O_RDONLY);

            len = read(external_camera_file, buffer, buffer_size);  // read in the image from the file

            // close file
            close(external_camera_file);

            // don't use this frame again
            remove(EXTERNAL_CAMERA_READY);
            
        } else 
        {
            // use Microdia camera
            len = read(m_camDevice, buffer, buffer_size);  // read in the image from the camera
        }

        if (len == -1) {                                // check for errors
            if (consecutive_readerrs >= 10) {
                //printf("%d consecutive read errors:  try to reopen camera\n",consecutive_readerrs);
                // Break from loop and try to reopen camera
                closeCamera();
                openCamera();
                consecutive_readerrs=0;
                //break;
            }
            consecutive_readerrs++;
            QThread::yieldCurrentThread();
            continue;
        }

        consecutive_readerrs=0;

        if (len != buffer_size){
            printf("Error reading from camera:  expected %d bytes, got %d bytes\n", buffer_size, len);
            continue;
        }

        check_heap();

        Pixel565 *out = image.scanLine(0);  // Copy to image
        unsigned char *in = buffer;

        for (int i = imgSize; i > 0; i--) {
            *(out++) = Pixel565::fromRGB8(in[2], in[1], in[0]);
            in += 3;
        }
        check_heap();

        callFrameHandlers(image);

        if(m_processOneFrame || !m_processContinuousFrames){
            m_processOneFrame = false;
            break;
        }
    }
}
Ejemplo n.º 14
0
wxImage ThemeBase::MakeImageWithAlpha( wxBitmap & Bmp )
{
   // BUG in wxWidgets.  Conversion from BMP to image does not preserve alpha.
   wxImage image( Bmp.ConvertToImage() );
   return image;
}
void REIXSXESImageInterpolationAB::computeCachedValues() const
{
	int iSize = inputSource_->size(0); //1024
	int jSize = inputSource_->size(1); //64

	// grab the full image
	QVector<double> image(iSize*jSize);
	if(!inputSource_->values(AMnDIndex(0,0), AMnDIndex(iSize-1, jSize-1), image.data())) {
		AMErrorMon::report(this, AMErrorReport::Alert, -333, "Could not retrieve values from detector image. Please report this problem to the REIXS Acquaman developers.");
		cacheUpdateRequired_ = false;	// avoid repeating this error message for every single data point...
		return;
	}

	//If there is on one shift value, don't interpolate.
	if(shiftValues2().isEmpty() || curve2Disabled_)
	{
		//The center point of the sum region
		double originX = (double)sumRangeMinX_ + ((double)sumRangeMaxX_ - (double)sumRangeMinX_)/2.0;
		double originY = (double)sumRangeMinY_ + ((double)sumRangeMaxY_ - (double)sumRangeMinY_)/2.0;
		//Width and height of the sum region, in pixels
		double sumRangeWidth = (double)(sumRangeMaxX_ - sumRangeMinX_);
		double sumRangeHeight = (double)(sumRangeMaxY_ - sumRangeMinY_);


		for(int bin = 0; bin < iSize / binningLevel_; bin++) {
			cachedData_[bin] = 0;
			for(int temp = 0; temp < binningLevel_; temp++ ) {
				int i = bin * binningLevel_ + temp;
				double newVal = 0.0;
				int contributingRows = 0;
				if(i > sumRangeMinX_ && i < sumRangeMaxX_) {
					double xVal = (double)i - originX;
					for(int j=sumRangeMinY_; j<=sumRangeMaxY_; j++) { // loop through rows
						if(rangeRound_ == 0.0) { //not ellipse
							int sourceI = i + shiftValues1_.at(j);
							if(sourceI < iSize && sourceI >= 0) {
								newVal += image.at(sourceI*jSize + j);
								contributingRows++;
							}
						}
						else {

							double yVal = (double)j - originY;
							if(isWithinMaskEllipse(xVal, yVal, sumRangeWidth, sumRangeHeight)) {
								int sourceI = i + shiftValues1_.at(j);
								if(sourceI < iSize && sourceI >= 0) {
									newVal += image.at(sourceI*jSize + j);
									contributingRows++;
								}
							}
						}
					}
				}
				// normalize by dividing by the number of rows that contributed. Since we want to keep the output in units similar to raw counts, multiply by the nominal (usual) number of contributing rows.
				// Essentially, this normalization prevents columns near the edge that miss out on some rows due to shifting from being artificially suppressed.  For inner columns, contributingRows will (sumRangeMax_ - sumRangeMin_ + 1).
				if(contributingRows == 0)
					newVal = 0;
				else
					newVal = newVal * double(sumRangeMaxY_ - sumRangeMinY_ + 1) / double(contributingRows);

				cachedData_[bin] = cachedData_[bin] + newVal;
			}
		}
	}
	else
	{
		int interpolatedISize = (iSize-1)*interpolationLevel_;
		QVector<double> shiftValueMap = QVector<double>(interpolatedISize*jSize, 0);
		computeShiftMap(interpolatedISize, jSize, shiftValueMap.data());
		double *shiftValueMapPointer = shiftValueMap.data();

		//interpolate image in x, probably rotating image by 90 degree in memory because of a switch in conventions...
		QVector<double> interpolatedImage = QVector<double>(interpolatedISize*jSize, 0);
		double *interpolatedImagePointer = interpolatedImage.data();

		for(int i = 0, modifiedISize = iSize-1; i < modifiedISize; i++){

			for(int j = 0; j < jSize; j++){

				int index = j + i*jSize;
				int interpolatedIndex = j + i*jSize*interpolationLevel_;
				double imageValue = image.at(index);
				double nextImageValue = image.at(j + (i+1)*jSize);

				for(int k = 0; k < interpolationLevel_; k++)
					interpolatedImagePointer[interpolatedIndex + k*jSize] = imageValue + ((double)k / (double)interpolationLevel_) * (nextImageValue - imageValue);
			}
		}

		//OKAY!  Now I think we have a shiftMap_ and a interpolatedImage that mirror those used in Robert's code.
		//***************************************************************************
		//***************************************************************************

		QVector<double> finalLargeImage = QVector<double>(interpolatedISize*jSize, 0);
		double *finalLargeImagePointer = finalLargeImage.data();

		//iterate through shiftMap, adding the corresponding element from interpolated to the shifted location in this new array
		for(int i = 0; i < interpolatedISize; i++){

			for(int j = 0; j < jSize; j++){

				int shiftOffset = qRound(shiftValueMapPointer[i*jSize+j] * interpolationLevel_);

				if (((i + shiftOffset) < interpolatedISize) && ((i + shiftOffset) > 0))
				{
					//add only one pixel from the interpolated image to each pixel in the shifted image:
					finalLargeImagePointer[j + i*jSize] = interpolatedImagePointer[j + (i + shiftOffset)*jSize];
				}
			}
		}

		QVector<double> tempFinalVector = QVector<double>(iSize*jSize, 0);
		double *tempFinalVectorPointer = tempFinalVector.data();

		for(int i = 0, modifiedISize = iSize-1; i < modifiedISize; i++){

			for(int j = 0; j < jSize; j++){

				int offset = j + i*jSize;
				int largeOffset = j + i*jSize*interpolationLevel_;

				for (int k = 0; k < interpolationLevel_; k++)
					tempFinalVectorPointer[offset] += finalLargeImagePointer[largeOffset + k*jSize];

				tempFinalVectorPointer[offset] /= double(interpolationLevel_);
			}
		}

//		***************************************************************************
//		***************************************************************************

		//The center point of the sum region
		double originX = (double)sumRangeMinX_ + ((double)sumRangeMaxX_ - (double)sumRangeMinX_)/2.0;
		double originY = (double)sumRangeMinY_ + ((double)sumRangeMaxY_ - (double)sumRangeMinY_)/2.0;
		//Width and height of the sum region, in pixels
		double sumRangeWidth = (double)(sumRangeMaxX_ - sumRangeMinX_);
		double sumRangeHeight = (double)(sumRangeMaxY_ - sumRangeMinY_);


		for(int bin = 0; bin < iSize / binningLevel_; bin++) {
			cachedData_[bin] = 0;
			for(int temp = 0; temp < binningLevel_; temp++ ) {
				int i = bin * binningLevel_ + temp;
				double newVal = 0.0;
				int contributingRows = 0;

				if(i > sumRangeMinX_ && i < sumRangeMaxX_) {

					double xVal = (double)i - originX;

					for (int j = sumRangeMinY_; j <= sumRangeMaxY_; j++) { // loop through rows

						if (rangeRound_ == 0.0) { //not ellipse

							newVal += tempFinalVectorPointer[j+i*jSize];
							contributingRows++;
						}

						else {

							double yVal = (double)j - originY;

							if(isWithinMaskEllipse(xVal, yVal, sumRangeWidth, sumRangeHeight)) {

								newVal += tempFinalVectorPointer[j+i*jSize];
								contributingRows++;
							}
						}
					}
				}
				// normalize by dividing by the number of rows that contributed. Since we want to keep the output in units similar to raw counts, multiply by the nominal (usual) number of contributing rows.
				// Essentially, this normalization prevents columns near the edge that miss out on some rows due to shifting from being artificially suppressed.  For inner columns, contributingRows will (sumRangeMax_ - sumRangeMin_ + 1).
				if(contributingRows == 0)
					newVal = 0;

				else
					newVal = newVal * double(sumRangeMaxY_ - sumRangeMinY_ + 1) / double(contributingRows);

				cachedData_[bin] += newVal;
			}
		}
	}

	cachedDataRange_ = AMUtility::rangeFinder(cachedData_);
	cacheUpdateRequired_ = false;
}
Ejemplo n.º 16
0
void JpegServer::generateNextFrame()
{
	if(!m_scene || !MainWindow::mw())
		return;
		
	if(m_onlyRenderOnSlideChange &&
	   !m_slideChanged &&
	   !m_cachedImage.isNull())
	{
		//qDebug() << "JpegServer::generateNextFrame(): Hit Cache";
		emit frameReady(m_cachedImage);
		if(m_sender)
			m_sender->transmitImage(m_cachedImage);
		return;
	}
	
	if(m_onlyRenderOnSlideChange)
	{
		m_slideChanged = false;
		//qDebug() << "JpegServer::generateNextFrame(): Cache fallthru ...";
	}
	
	//qDebug() << "JpegServer::generateNextFrame(): Rendering scene "<<m_scene<<", slide:"<<m_scene->slide();
	
	m_time.start();
	
	QImage image(FRAME_WIDTH,
	             FRAME_HEIGHT,
		     FRAME_FORMAT);
	memset(image.scanLine(0), 0, image.byteCount());
	
	QPainter painter(&image);
	painter.fillRect(image.rect(),Qt::transparent);
	painter.setRenderHint(QPainter::SmoothPixmapTransform, false);
	painter.setRenderHint(QPainter::Antialiasing, false);
	painter.setRenderHint(QPainter::TextAntialiasing, false);
	
	if(!m_sourceRect.isValid())
		m_sourceRect = MainWindow::mw()->standardSceneRect();
		
	if(m_sourceRect != m_targetRect)
		updateRects();
	
	m_scene->render(&painter,
		m_targetRect,
		m_sourceRect);
	
	painter.end();
	
 	emit frameReady(image);
	
	if(m_sender)
	{
		//qDebug() << "JpegServer::generateNextFrame(): Sending image via VideoSender";
		m_sender->transmitImage(image);
	}
	else
	{
		//qDebug() << "JpegServer::generateNextFrame(): No VideoSender created";
	}
	
	if(m_onlyRenderOnSlideChange)
		m_cachedImage = image;
	
// 	QImageWriter writer("frame.png", "png");
// 	writer.write(image);

	m_frameCount ++;
	m_timeAccum  += m_time.elapsed();
	
// 	if(m_frameCount % (m_fps?m_fps:10) == 0)
// 	{
// 		QString msPerFrame;
// 		msPerFrame.setNum(((double)m_timeAccum) / ((double)m_frameCount), 'f', 2);
// 	
// 		qDebug() << "JpegServer::generateNextFrame(): Avg MS per Frame:"<<msPerFrame<<", threadId:"<<QThread::currentThreadId();
// 	}
// 			
// 	if(m_frameCount % ((m_fps?m_fps:10) * 10) == 0)
// 	{
// 		m_timeAccum  = 0;
// 		m_frameCount = 0;
// 	}
	
	//qDebug() << "JpegServer::generateNextFrame(): Done rendering "<<m_scene;
}
Ejemplo n.º 17
0
wxImage wxDIB::ConvertToImage() const
{
    wxCHECK_MSG( IsOk(), wxNullImage,
                    wxT("can't convert invalid DIB to wxImage") );

    // create the wxImage object
    const int w = GetWidth();
    const int h = GetHeight();
    wxImage image(w, h, false /* don't bother clearing memory */);
    if ( !image.Ok() )
    {
        wxFAIL_MSG( wxT("could not allocate data for image") );
        return wxNullImage;
    }

    if ( m_hasAlpha )
    {
        image.SetAlpha();
    }

    // this is the same loop as in Create() just above but with copy direction
    // reversed
    const int bpp = GetDepth();
    const int dstBytesPerLine = w * 3;
    const int srcBytesPerLine = GetLineSize(w, bpp);
    unsigned char *dst = image.GetData() + ((h - 1) * dstBytesPerLine);
    unsigned char *alpha = image.HasAlpha() ? image.GetAlpha() + (h - 1)*w
                                            : NULL;
    const bool is32bit = bpp == 32;
    const unsigned char *srcLineStart = (unsigned char *)GetData();
    for ( int y = 0; y < h; y++ )
    {
        // copy one DIB line
        const unsigned char *src = srcLineStart;
        for ( int x = 0; x < w; x++ )
        {
            dst[2] = *src++;
            dst[1] = *src++;
            dst[0] = *src++;

            if ( is32bit )
            {
                if ( alpha )
                {
                    // wxImage uses non premultiplied alpha so undo
                    // premultiplication done in Create() above
                    const unsigned char a = *src;
                    *alpha++ = a;
                    if ( a > 0 )
                    {
                        dst[0] = (dst[0] * 255) / a;
                        dst[1] = (dst[1] * 255) / a;
                        dst[2] = (dst[2] * 255) / a;
                    }
                }

                src++;
            }

            dst += 3;
        }

        // pass to the previous line in the image
        dst -= 2*dstBytesPerLine;
        if ( alpha )
            alpha -= 2*w;

        // and to the next one in the DIB
        srcLineStart += srcBytesPerLine;
    }

    return image;
}
Ejemplo n.º 18
0
/* Preload images for entities that could be found on this map and gamemode.
   This was basically cut & paste from bits of ../server/sv_map.c */
void preload_entities(char *filename, int gamemode)
{
    char buf[MAXLINE], name[32], *line;
    ent_type_t *et;
    FILE *file;
    int entity_section, i, skipline;

    /* Open the map file */
    if( !(file = open_data_file("maps", filename) ) ) {
	printf("%s: Couldn't open %s\n", __FILE__, filename);
	return;
    }

    if( !ent_img_loaded[client.entity_type] ) { /* Load clients entity image */
	if( (et = entity_type(client.entity_type)) == NULL )
	    return;
	image(entity_type_animation(et, ALIVE)->pixmap, MASKED);
	ent_img_loaded[client.entity_type] = 1;
    }

    /* Scan map for entities that will be loaded */
    entity_section = 0; /* Found section yet? */
    while( !feof(file) ) {
	if( !fgets(buf, MAXLINE, file) )
	    break;
	CHOMP(buf);
	if( buf[0] == '#' || buf[0] == '\0' )
	    continue; /* skip line */
	if( entity_section ) {
	    line = buf;
	    skipline = 0;
	    /* check for gamemode specific line prefix characters */
	    for(i=0 ; i<NUM_GAME_MODES ; i++ ) {
		if( buf[0] == gamemodechar[i] ) {
		    if( gamemode == i )
			line = buf + 1;
		    else
			skipline = 1;
		}
	    }

	    if( skipline )
		continue; /* Not in our mode */

	    if( sscanf(line, "%s", name) && strcasecmp(name, "SPAWN") != 0 ) {
		for( i=0 ; i < num_entity_types ; i++ ) {
		    if( (et = entity_type(i)) == NULL )
			continue;
		    if( !ent_img_loaded[i] && !strcasecmp( et->name, name ) ) {
			ent_img_loaded[i] = 1;
			image(entity_type_animation(et,ALIVE)->pixmap, MASKED);
		    }
		}
	    }
	} else if( !strcasecmp("ENTITY", buf) )
	    entity_section = 1;
    }

    fclose(file);

}
Ejemplo n.º 19
0
PassRefPtr<BitmapImageSingleFrameSkia> BitmapImageSingleFrameSkia::create(const SkBitmap& bitmap)
{
    RefPtr<BitmapImageSingleFrameSkia> image(adoptRef(new BitmapImageSingleFrameSkia()));
    bitmap.copyTo(&image->m_nativeImage, bitmap.config());
    return image.release();
}
Ejemplo n.º 20
0
	string Resources::GetLanguageXml ()
	{
#ifdef TC_WINDOWS
		ConstBufferPtr res = GetWindowsResource (L"XML", L"IDR_LANGUAGE");
		Buffer strBuf (res.Size() + 1);
		strBuf.Zero();
		strBuf.CopyFrom (res);
		return string (reinterpret_cast <char *> (strBuf.Ptr()));
#else
		static const char LanguageXml[] =
		{
//			include "../Common/Language.xml.h" ??? it does not exist and the next is known wrong ???
#			include "Common/Language.xml.h"
			, 0
		};

		return string (LanguageXml);
#endif
	}

	string Resources::GetLegalNotices ()
	{
#ifdef TC_WINDOWS
		ConstBufferPtr res = GetWindowsResource (L"TEXT", L"IDR_LICENSE");
		Buffer strBuf (res.Size() + 1);
		strBuf.Zero();
		strBuf.CopyFrom (res);
		return string (reinterpret_cast <char *> (strBuf.Ptr()));
#else
		static const char License[] =
		{
#			include "License.txt.h"
			, 0
		};

		return string (License);
#endif
	}


#ifndef TC_NO_GUI

	wxBitmap Resources::GetDriveIconBitmap ()
	{
#ifdef TC_WINDOWS
		return wxBitmap (L"IDB_DRIVE_ICON", wxBITMAP_TYPE_BMP_RESOURCE).ConvertToImage().Resize (wxSize (16, 12), wxPoint (0, 0));
#else
		static const byte DriveIcon[] =
		{
#			include "Mount/Drive_icon_96dpi.bmp.h"
		};

		wxMemoryInputStream stream (DriveIcon, sizeof (DriveIcon));
		return wxBitmap (wxImage (stream).Resize (wxSize (16, 12), wxPoint (0, 0)));
#endif
	}

	wxBitmap Resources::GetDriveIconMaskBitmap ()
	{
#ifdef TC_WINDOWS
		wxImage image = wxBitmap (L"IDB_DRIVE_ICON_MASK", wxBITMAP_TYPE_BMP_RESOURCE).ConvertToImage().Resize (wxSize (16, 12), wxPoint (0, 0));
		return wxBitmap (image.ConvertToMono (0, 0, 0), 1);
#else
		static const byte DriveIconMask[] =
		{
#			include "Mount/Drive_icon_mask_96dpi.bmp.h"
		};

		wxMemoryInputStream stream (DriveIconMask, sizeof (DriveIconMask));
		wxImage image (stream);
		image.Resize (wxSize (16, 12), wxPoint (0, 0));

#	ifdef __WXGTK__
		return wxBitmap (image.ConvertToMono (0, 0, 0), 1);
#	else
		return wxBitmap (image);
#	endif
#endif
	}


	wxBitmap Resources::GetLogoBitmap ()
	{
#ifdef TC_WINDOWS
		return wxBitmap (L"IDB_LOGO", wxBITMAP_TYPE_BMP_RESOURCE);
#else
		static const byte Logo[] =
		{
#			include "Mount/Logo_96dpi.bmp.h"
		};

		wxMemoryInputStream stream (Logo, sizeof (Logo));
		return wxBitmap (wxImage (stream));
#endif
	}

	wxBitmap Resources::GetTextualLogoBitmap ()
	{
#ifdef TC_WINDOWS
		return wxBitmap (L"IDB_TEXTUAL_LOGO", wxBITMAP_TYPE_BMP_RESOURCE);
#else
		static const byte Logo[] =
		{
#			include "Common/Textual_logo_96dpi.bmp.h"
		};

		wxMemoryInputStream stream (Logo, sizeof (Logo));
		return wxBitmap (wxImage (stream));
#endif
	}

	wxIcon Resources::GetCipherShedIcon ()
	{
#ifdef TC_WINDOWS
		return wxIcon (L"IDI_CIPHERSHED_ICON", wxBITMAP_TYPE_ICO_RESOURCE, 16, 16);
#else
#		include "Resources/Icons/CipherShed-16x16.xpm"
		return wxIcon (CipherShedIcon16x16);
#endif
	}

	wxBitmap Resources::GetVolumeCreationWizardBitmap (int height)
	{
#ifdef TC_WINDOWS
		return wxBitmap (L"IDB_VOLUME_WIZARD_BITMAP", wxBITMAP_TYPE_BMP_RESOURCE);
#else
		static const byte VolumeWizardIcon[] =
		{
#			include "Format/CipherShed_Wizard.bmp.h"
		};

		wxMemoryInputStream stream (VolumeWizardIcon, sizeof (VolumeWizardIcon));

		wxImage image (stream);
		if (height != -1)
		{
			double scaleFactor = double (height) / double (image.GetHeight());
			image.Rescale (int (image.GetWidth() * scaleFactor), int (image.GetHeight() * scaleFactor), wxIMAGE_QUALITY_HIGH);
		}

		return wxBitmap (image);
#endif
	}

#endif // !TC_NO_GUI

}
Ejemplo n.º 21
0
int main(int argc, char **argv)
{
#ifdef CONSOLE_APPLICATION
    QApplication app(argc, argv, QApplication::Tty);
#else
    QApplication app(argc, argv);
#endif
#ifdef DO_QWS_DEBUGGING
    qt_show_painter_debug_output = false;
#endif

    DeviceType type = WidgetType;
    bool checkers_background = true;

    QImage::Format imageFormat = QImage::Format_ARGB32_Premultiplied;

    QLocale::setDefault(QLocale::c());

    QStringList files;

    bool interactive = false;
    bool printdlg = false;
    bool highres = false;
    bool show_cmp = false;
    int width = 800, height = 800;
    bool verboseMode = false;

#ifndef QT_NO_OPENGL
    QGLFormat f = QGLFormat::defaultFormat();
    f.setSampleBuffers(true);
    f.setStencil(true);
    f.setAlpha(true);
    f.setAlphaBufferSize(8);
    QGLFormat::setDefaultFormat(f);
#endif

    char *arg;
    for (int i=1; i<argc; ++i) {
        arg = argv[i];
        if (*arg == '-') {
            QString option = QString(arg + 1).toLower();
            if (option == "widget")
                type = WidgetType;
            else if (option == "bitmap")
                type = BitmapType;
            else if (option == "pixmap")
                type = PixmapType;
            else if (option == "image")
                type = ImageType;
            else if (option == "imageformat") {
                Q_ASSERT_X(i + 1 < argc, "main", "-imageformat must be followed by a value");
                QString format = QString(argv[++i]).toLower();

                imageFormat = QImage::Format_Invalid;
                static const int formatCount =
                    sizeof(imageFormats) / sizeof(imageFormats[0]);
                for (int ff = 0; ff < formatCount; ++ff) {
                    if (QLatin1String(imageFormats[ff].name) == format) {
                        imageFormat = imageFormats[ff].format;
                        break;
                    }
                }

                if (imageFormat == QImage::Format_Invalid) {
                    printf("Invalid image format.  Available formats are:\n");
                    for (int ff = 0; ff < formatCount; ++ff)
                        printf("\t%s\n", imageFormats[ff].name);
                    return -1;
                }
            } else if (option == "imagemono")
                type = ImageMonoType;
            else if (option == "imagewidget")
                type = ImageWidgetType;
#ifndef QT_NO_OPENGL
            else if (option == "opengl")
                type = OpenGLType;
            else if (option == "pbuffer")
                type = OpenGLPBufferType;
#endif
#ifdef USE_CUSTOM_DEVICE
            else if (option == "customdevice")
                type = CustomDeviceType;
            else if (option == "customwidget")
                type = CustomWidgetType;
#endif
            else if (option == "pdf")
                type = PdfType;
            else if (option == "ps")
                type = PsType;
            else if (option == "picture")
                type = PictureType;
            else if (option == "printer")
                type = PrinterType;
            else if (option == "highres") {
                type = PrinterType;
                highres = true;
            } else if (option == "printdialog") {
                type = PrinterType;
                printdlg = true;
            }
            else if (option == "grab")
                type = GrabType;
            else if (option == "i")
                interactive = true;
            else if (option == "v")
                verboseMode = true;
            else if (option == "commands") {
                displayCommands();
                return 0;
            } else if (option == "w") {
                Q_ASSERT_X(i + 1 < argc, "main", "-w must be followed by a value");
                width = atoi(argv[++i]);
            } else if (option == "h") {
                Q_ASSERT_X(i + 1 < argc, "main", "-h must be followed by a value");
                height = atoi(argv[++i]);
            } else if (option == "cmp") {
                show_cmp = true;
            } else if (option == "bg-white") {
                checkers_background = false;
            }
        } else {
#if defined (Q_WS_WIN)
            QString input = QString::fromLocal8Bit(argv[i]);
            if (input.indexOf('*') >= 0) {
                QFileInfo info(input);
                QDir dir = info.dir();
                QFileInfoList infos = dir.entryInfoList(QStringList(info.fileName()));
                for (int ii=0; ii<infos.size(); ++ii)
                    files.append(infos.at(ii).absoluteFilePath());
            } else {
                files.append(input);
            }
#else
            files.append(QString(argv[i]));
#endif
        }
    }

    PaintCommands pcmd(QStringList(), 800, 800);
    pcmd.setVerboseMode(verboseMode);
    pcmd.setType(type);
    pcmd.setCheckersBackground(checkers_background);

    QWidget *activeWidget = 0;

    if (interactive) {
        runInteractive();
        if (!files.isEmpty())
            interactive_widget->load(files.at(0));
    } else if (files.isEmpty()) {
        printHelp();
        return 0;
    } else {
        for (int j=0; j<files.size(); ++j) {
            const QString &fileName = files.at(j);
            QStringList content;

            QFile file(fileName);
            QFileInfo fileinfo(file);
            if (file.open(QIODevice::ReadOnly)) {
                QTextStream textFile(&file);
                QString script = textFile.readAll();
                content = script.split("\n", QString::SkipEmptyParts);
            } else {
                printf("failed to read file: '%s'\n", qPrintable(fileinfo.absoluteFilePath()));
                continue;
            }
            pcmd.setContents(content);

            if (show_cmp) {
                QString pmFile = QString(files.at(j)).replace(".qps", "_qps") + ".png";
                qDebug() << pmFile << QFileInfo(pmFile).exists();
                QPixmap pixmap(pmFile);
                if (!pixmap.isNull()) {
                    QLabel *label = createLabel();
                    label->setWindowTitle("VERIFY: " + pmFile);
                    label->setPixmap(pixmap);
                    label->show();
                }
            }

            switch (type) {

            case WidgetType:
            {
                OnScreenWidget<QWidget> *qWidget =
                    new OnScreenWidget<QWidget>(files.at(j));
                qWidget->setVerboseMode(verboseMode);
                qWidget->setType(type);
                qWidget->setCheckersBackground(checkers_background);
                qWidget->m_commands = content;
                qWidget->resize(width, height);
                qWidget->show();
                activeWidget = qWidget;
                break;
            }

            case ImageWidgetType:
            {
                OnScreenWidget<QWidget> *qWidget = new OnScreenWidget<QWidget>(files.at(j));
                qWidget->setVerboseMode(verboseMode);
                qWidget->setType(type);
                qWidget->setCheckersBackground(checkers_background);
                qWidget->m_commands = content;
                qWidget->resize(width, height);
                qWidget->show();
                activeWidget = qWidget;
                break;

            }
#ifndef QT_NO_OPENGL
            case OpenGLPBufferType:
            {
                QGLPixelBuffer pbuffer(QSize(width, height));
                QPainter pt(&pbuffer);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();

                QImage image = pbuffer.toImage();

                QLabel *label = createLabel();
                label->setPixmap(QPixmap::fromImage(image));
                label->resize(label->sizeHint());
                label->show();
                activeWidget = label;
                break;
            }
            case OpenGLType:
            {
                OnScreenWidget<QGLWidget> *qGLWidget = new OnScreenWidget<QGLWidget>(files.at(j));
                qGLWidget->setVerboseMode(verboseMode);
                qGLWidget->setType(type);
                qGLWidget->setCheckersBackground(checkers_background);
                qGLWidget->m_commands = content;
                qGLWidget->resize(width, height);
                qGLWidget->show();
                activeWidget = qGLWidget;
                break;
            }
#else
            case OpenGLType:
                printf("OpenGL type not supported in this Qt build\n");
                break;
#endif
#ifdef USE_CUSTOM_DEVICE
            case CustomDeviceType:
            {
                CustomPaintDevice custom(width, height);
                QPainter pt;
                pt.begin(&custom);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();
                QImage *img = custom.image();
                if (img) {
                    QLabel *label = createLabel();
                    label->setPixmap(QPixmap::fromImage(*img));
                    label->resize(label->sizeHint());
                    label->show();
                    activeWidget = label;
                    img->save("custom_output_pixmap.png", "PNG");
                } else {
                    custom.save("custom_output_pixmap.png", "PNG");
                }
                break;
            }
            case CustomWidgetType:
            {
                OnScreenWidget<CustomWidget> *cWidget = new OnScreenWidget<CustomWidget>;
                cWidget->setVerboseMode(verboseMode);
                cWidget->setType(type);
                cWidget->setCheckersBackground(checkers_background);
                cWidget->m_filename = files.at(j);
                cWidget->setWindowTitle(fileinfo.filePath());
                cWidget->m_commands = content;
                cWidget->resize(width, height);
                cWidget->show();
                activeWidget = cWidget;
                break;
            }
#endif
            case PixmapType:
            {
                QPixmap pixmap(width, height);
                pixmap.fill(Qt::white);
                QPainter pt(&pixmap);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();
                pixmap.save("output_pixmap.png", "PNG");
                break;
            }

            case BitmapType:
            {
                QBitmap bitmap(width, height);
                QPainter pt(&bitmap);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();
                bitmap.save("output_bitmap.png", "PNG");

                QLabel *label = createLabel();
                label->setPixmap(bitmap);
                label->resize(label->sizeHint());
                label->show();
                activeWidget = label;
                break;
            }

            case ImageMonoType:
            case ImageType:
            {
                qDebug() << "Creating image";
                QImage image(width, height, type == ImageMonoType
                             ? QImage::Format_MonoLSB
                             : imageFormat);
                image.fill(0);
                QPainter pt(&image);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();
                image.convertToFormat(QImage::Format_ARGB32).save("output_image.png", "PNG");
#ifndef CONSOLE_APPLICATION
                QLabel *label = createLabel();
                label->setPixmap(QPixmap::fromImage(image));
                label->resize(label->sizeHint());
                label->show();
                activeWidget = label;
#endif
                break;
            }

            case PictureType:
            {
                QPicture pic;
                QPainter pt(&pic);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();

                QImage image(width, height, QImage::Format_ARGB32_Premultiplied);
                image.fill(0);
                pt.begin(&image);
                pt.drawPicture(0, 0, pic);
                pt.end();
                QLabel *label = createLabel();
                label->setWindowTitle(fileinfo.absolutePath());
                label->setPixmap(QPixmap::fromImage(image));
                label->resize(label->sizeHint());
                label->show();
                activeWidget = label;
                break;
            }

            case PrinterType:
            {
#ifndef QT_NO_PRINTER
                PaintCommands pcmd(QStringList(), 800, 800);
                pcmd.setVerboseMode(verboseMode);
                pcmd.setType(type);
                pcmd.setCheckersBackground(checkers_background);
                pcmd.setContents(content);
                QString file = QString(files.at(j)).replace(".", "_") + ".ps";

                QPrinter p(highres ? QPrinter::HighResolution : QPrinter::ScreenResolution);
                if (printdlg) {
                    QPrintDialog printDialog(&p, 0);
                    if (printDialog.exec() != QDialog::Accepted)
                        break;
                } else {
                    p.setOutputFileName(file);
                }

                QPainter pt(&p);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();

                if (!printdlg) {
                    printf("wrote file: %s\n", qPrintable(file));
                }

                Q_ASSERT(!p.paintingActive());
#endif
                break;
            }
            case PsType:
            case PdfType:
            {
#ifndef QT_NO_PRINTER
                PaintCommands pcmd(QStringList(), 800, 800);
                pcmd.setVerboseMode(verboseMode);
                pcmd.setType(type);
                pcmd.setCheckersBackground(checkers_background);
                pcmd.setContents(content);
                bool ps = type == PsType;
                QPrinter p(highres ? QPrinter::HighResolution : QPrinter::ScreenResolution);
                QFileInfo input(files.at(j));
                QString file = QString("%1_%2.%3")
                               .arg(input.baseName())
                               .arg(input.suffix())
                               .arg(ps ? "ps" : "pdf");
                p.setOutputFormat(ps ? QPrinter::PdfFormat : QPrinter::PostScriptFormat);
                p.setOutputFileName(file);
                p.setPageSize(QPrinter::A4);
                QPainter pt(&p);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();

                printf("write file: %s\n", qPrintable(file));
#endif
                break;
            }
            case GrabType:
            {
                QImage image(width, height, QImage::Format_ARGB32_Premultiplied);
                image.fill(QColor(Qt::white).rgb());
                QPainter pt(&image);
                pcmd.setPainter(&pt);
                pcmd.setFilePath(fileinfo.absolutePath());
                pcmd.runCommands();
                pt.end();
                QImage image1(width, height, QImage::Format_RGB32);
                image1.fill(QColor(Qt::white).rgb());
                QPainter pt1(&image1);
                pt1.drawImage(QPointF(0, 0), image);
                pt1.end();

                QString filename = QString(files.at(j)).replace(".qps", "_qps") + ".png";
                image1.save(filename, "PNG");
                printf("%s grabbed to %s\n", qPrintable(files.at(j)), qPrintable(filename));
                break;
            }
            default:
                break;
            }
        }
    }
#ifndef CONSOLE_APPLICATION
    if (activeWidget || interactive) {
        QObject::connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit()));
        app.exec();
    }
    delete activeWidget;
#endif
    delete interactive_widget;
    return 0;
}
Ejemplo n.º 22
0
void VDTestPixmaps() {
	CPUEnableExtensions(CPUCheckForExtensions());
	VDFastMemcpyAutodetect();

	VDRegisterVideoDisplayControl();

	HWND hwndDisp = CreateWindow(VIDEODISPLAYCONTROLCLASS, "Kasumi onee-sama", WS_VISIBLE|WS_POPUP, 0, 0, 1024, 768, NULL, NULL, GetModuleHandle(NULL), NULL);

	IVDVideoDisplay *pDisp = VDGetIVideoDisplay(hwndDisp);

	const int srcw = 80;
	const int srch = 60;

	VDPixmapBuffer image(srcw, srch, nsVDPixmap::kPixFormat_XRGB8888);

	for(int y=0; y<srch; ++y) {
		for(int x=0; x<srcw; ++x) {
			int x2 = x - (srcw>>1);
			int y2 = y - (srch>>1);

			uint32 v = (int)((1.0 + sin((x2*x2 + y2*y2) / 50.0)) * 255.0 / 2.0 + 0.5);

			uint32 r = (255-v)<<16;

			if ((x^y)&1)
				v = r = 0;

			((uint32 *)((char *)image.data + image.pitch * y))[x] = (v*x/srcw) + (((v*y)/srch)<<8) + r;
		}
	}

	VDPixmapBuffer sprite(srcw, srch, nsVDPixmap::kPixFormat_XRGB8888);
	VDPixmapBuffer buffer(1024, 768, nsVDPixmap::kPixFormat_XRGB8888);

	VDPixmapBlt(sprite, image);

	pDisp->SetSourcePersistent(true, buffer);

	bouncer p1(-64, -48, 1024+64, 768+48, 1.0);
	bouncer p2(-64, -48, 1024+64, 768+48, 0.5);

	sint64 freq;
	QueryPerformanceFrequency((LARGE_INTEGER *)&freq);

	sint64 start;
	QueryPerformanceCounter((LARGE_INTEGER *)&start);
	int blits = 0;
	double th = 0;

	VDPixmapTextureMipmapChain mipchain(sprite);

	vdautoptr<IVDPixmapResampler> pResampler(VDCreatePixmapResampler());

	while(pump()) {
		int x1 = p1.xposf();
		int y1 = p1.yposf();
		int x2 = p2.xposf();
		int y2 = p2.yposf();

//		VDPixmapBlt(buffer, xp, yp, image, 0, 0, 320, 240);
//		VDPixmapStretchBltNearest(buffer, x1, y1, x2, y2, sprite, -32<<16, -32<<16, (srcw+32)<<16, (srch+32)<<16);
//		VDPixmapStretchBltBilinear(buffer, x1, y1, x2, y2, sprite, 0, 0, srcw<<16, srch<<16);

		double fx1 = x1 / 65536.0;
		double fy1 = y1 / 65536.0;
		double fx2 = x2 / 65536.0;
		double fy2 = y2 / 65536.0;

		if (fx2 < fx1)
			std::swap(fx1, fx2);
		if (fy2 < fy1)
			std::swap(fy1, fy2);

		pResampler->Init(fx2-fx1, fy2-fy1, buffer.format, sprite.w, sprite.h, sprite.format, IVDPixmapResampler::kFilterLanczos3, IVDPixmapResampler::kFilterLanczos3, false);
		pResampler->Process(&buffer, fx1, fy1, fx2, fy2, &sprite, 0, 0);

#if 0
		float mx[16]={1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1};

		mx[0] = cos(th) / 512.0f;
		mx[1] = sin(th) / 512.0f;
		mx[5] = cos(th) / 384.0f;
		mx[4] = -sin(th) / 384.0f;

		mx[13] = -6.0f / 384.0f;
		mx[15] = 1.0f;

		VDTriBltVertex vx[4]={
			{ -100, -100, 0, 0, 0 },
			{ +100, -100, 0, 0, 60 },
			{ +100, +100, 0, 80, 60 },
			{ -100, +100, 0, 80, 0 },
		};

		const int idx[6]={0,1,2,0,2,3};

		VDPixmap buffer_cropped(VDPixmapOffset(buffer, 160, 120));

		buffer_cropped.w -= 320;
		buffer_cropped.h -= 240;

		VDPixmapTriBlt(buffer_cropped, mipchain.Mips(), mipchain.Levels(), vx, 4, idx, 6, kTriBltFilterTrilinear, 0.0f, mx);

		th += 0.01;
#endif

		pDisp->Update();
		++blits;

		p1.advance();
		p2.advance();

		sint64 last;
		QueryPerformanceCounter((LARGE_INTEGER *)&last);

		if (last-start >= freq) {
			start += freq;
			VDDEBUG2("%d blits/sec\n", blits);
			blits = 0;
		}
	}
}
Ejemplo n.º 23
0
void EDA_3D_CANVAS::TakeScreenshot( wxCommandEvent& event )
{
    static wxFileName fn;                 // Remember path between saves during this session only.
    wxString          FullFileName;
    wxString          file_ext, mask;
    bool              fmt_is_jpeg = false;

    // First time path is set to the project path.
    if( !fn.IsOk() )
        fn = Parent()->Prj().GetProjectFullName();

    if( event.GetId() == ID_MENU_SCREENCOPY_JPEG )
        fmt_is_jpeg = true;

    if( event.GetId() != ID_TOOL_SCREENCOPY_TOCLIBBOARD )
    {
        file_ext     = fmt_is_jpeg ? wxT( "jpg" ) : wxT( "png" );
        mask         = wxT( "*." ) + file_ext;
        fn.SetExt( file_ext );

        FullFileName = EDA_FILE_SELECTOR( _( "3D Image File Name:" ), fn.GetPath(),
                                          fn.GetFullName(), file_ext, mask, this,
                                          wxFD_SAVE | wxFD_OVERWRITE_PROMPT, true );

        if( FullFileName.IsEmpty() )
            return;

        fn = FullFileName;

        // Be sure the screen area destroyed by the file dialog is redrawn before making
        // a screen copy.
        // Without this call, under Linux the screen refresh is made to late.
        wxYield();
    }

    struct viewport_params
    {
        GLint originx;
        GLint originy;
        GLint x;
        GLint y;
    } viewport;

    // Be sure we have the latest 3D view (remember 3D view is buffered)
    Refresh();
    wxYield();

    // Build image from the 3D buffer
    wxWindowUpdateLocker noUpdates( this );
    glGetIntegerv( GL_VIEWPORT, (GLint*) &viewport );

    unsigned char*       pixelbuffer = (unsigned char*) malloc( viewport.x * viewport.y * 3 );
    unsigned char*       alphabuffer = (unsigned char*) malloc( viewport.x * viewport.y );
    wxImage image( viewport.x, viewport.y );

    glPixelStorei( GL_PACK_ALIGNMENT, 1 );
    glReadBuffer( GL_BACK_LEFT );
    glReadPixels( viewport.originx, viewport.originy,
                  viewport.x, viewport.y,
                  GL_RGB, GL_UNSIGNED_BYTE, pixelbuffer );
    glReadPixels( viewport.originx, viewport.originy,
                  viewport.x, viewport.y,
                  GL_ALPHA, GL_UNSIGNED_BYTE, alphabuffer );

    image.SetData( pixelbuffer );
    image.SetAlpha( alphabuffer );
    image = image.Mirror( false );
    wxBitmap bitmap( image );

    if( event.GetId() == ID_TOOL_SCREENCOPY_TOCLIBBOARD )
    {
        if( wxTheClipboard->Open() )
        {
            wxBitmapDataObject* dobjBmp = new wxBitmapDataObject( bitmap );

            if( !wxTheClipboard->SetData( dobjBmp ) )
                wxMessageBox( _( "Failed to copy image to clipboard" ) );

            wxTheClipboard->Flush();    /* the data in clipboard will stay
                                         * available after the application exits */
            wxTheClipboard->Close();
        }
    }
    else
    {
        wxImage image = bitmap.ConvertToImage();

        if( !image.SaveFile( FullFileName,
                             fmt_is_jpeg ? wxBITMAP_TYPE_JPEG : wxBITMAP_TYPE_PNG ) )
            wxMessageBox( _( "Can't save file" ) );

        image.Destroy();
    }
}
Ejemplo n.º 24
0
void AlignmentView::paintEvent(QPaintEvent * event)
{
	DrawingArea::paintEvent(event);
	
	if ( alignment == 0 )
	{
		return;
	}
	
	if ( width() <= frameWidth() * 2 )
	{
		return;
	}
	
	QPainter painter(this);
//	painter.setRenderHints(0);
	
	QImage image(width() - frameWidth() * 2, height() - frameWidth() * 2, QImage::Format_RGB32);
	
//	setColors();
	
	image.setColorCount(62);
	image.setColorTable(colors);
	
	image.fill(qRgb(180, 180, 180));
	
	int * hues = new int[image.width()];
	
	for ( int i = 0; i < image.width(); i++ )
	{
		hues[i] = i * 128 / image.width();
	}
	
	if ( highlight && highlightGradient )
	{
		float radius = .06;
		float offset = wrap(trackViews[highlightTrack].getLcbOffset(highlightLcb, highlightOffset), 0, 1);
		float lcbOffset;
		int lcb = trackViews[highlightTrack].getLcb
		(
			(offset - radius) * image.width(),
			image.width(),
			lcbOffset
		);
		RegionVector * track = (*alignment->getTracks())[getIdByTrack(highlightTrack)];
		
		int i = 0;
		
		// seek to lcb. TODO: Track method?
		//
		while ( (*track)[i]->getLcb() != lcb )
		{
			i++;
		}
		
		int delta = trackViews[highlightTrack].getRc() ? -1 : 1;
		float highlightStart = wrap((*track)[i]->getStartScaled() * delta + trackViews[highlightTrack].getOffset(), 0, 1);
		
		while ( wrap((*track)[i]->getStartScaled() * delta + trackViews[highlightTrack].getOffset(), 0, 1) < offset + radius )
		{
			const gav::Region * region = (*alignment->getLcb((*track)[i]->getLcb()).regions)[0];
			
			int start = region->getStartScaled() * image.width();
			int end = region->getEndScaled() * image.width();
			
			for ( int j = start; j <= end; j++ )
			{
				hues[j] = 60 + (wrap((*track)[i]->getStartScaled() * delta - highlightStart + trackViews[highlightTrack].getOffset(), 0, 1)) * 59 / (radius * 2);
			}
			
			i += delta;
			
			if ( i < 0 )
			{
				i += track->size();break;
			}
			else if ( i == track->size() )
			{
				i = 0;break;
			}
		}
		/*
		for ( int i = wrap(offset - radius, 0, 1) * image.width(); i <= wrap(offset + radius, 0, 1) * image.width() && i < image.width(); i++ )
		{
			int lcb = trackViews[highlightTrack].getLcb
			(
			 i,
			 image.width(),
			 lcbOffset
			 );
			if ( i < 0 )
			{
				i += image.width();
			}
			else if ( i >= image.width() )
			{
				i -= image.width();
			}
			
			int position = (int)(getRefPos(lcb, lcbOffset) * image.width());
			hues[position] = 60 + (i - (offset - radius) * image.width()) * 59 / (2 * radius * image.width());
		}*/
	}
	
	for ( int i = trackViews.size() - 1; i >= 0; i-- )
	{
		trackViews[i].draw(&image, palette, hues, progress, getTrackHeight(i), getTrackHeight(i + 1) - getTrackHeight(i), highlight, highlightLcb, highlightOffset);
	}
	
	delete [] hues;
	
	QTime time = QTime::currentTime();
	
	frames++;
	
	if ( time.second() != secLast )
	{
		//printf("fps: %d\n", frames);
		frames = 0;
	}
	
	secLast = time.second();
	
	painter.drawImage(frameWidth(), frameWidth(), image);
	
	for ( int i = 1; i < trackViews.size(); i++ )
	{
		float childSize = getTrackHeight(i + 1) - getTrackHeight(i);
		int shade;
		
		if ( childSize >= 20 )
		{
			shade = 255;
		}
		else if ( childSize < 2 )
		{
			shade = 0;
		}
		else
		{
			shade = 256 * (childSize - 2) / 18;
		}
		
		if ( i == getTrackFocus() || i == getTrackFocus() + 1 )
		{
			painter.setPen(QColor::fromRgba(qRgba(255, 255, 255, 255)));
		}
		else if ( i == getTrackHover() || i == getTrackHover() + 1 )
		{
			painter.setPen(QColor::fromRgba(qRgba(180, 180, 180, 255)));
		}
		else
		{
			painter.setPen(QColor::fromRgba(qRgba(0, 0, 0, shade)));
		}
		
		if ( shade > 0 )
		{
			painter.drawLine(frameWidth(), getTrackHeight(i) + frameWidth(), width() - frameWidth() - 1, getTrackHeight(i) + frameWidth());
		}
	}
	
	if ( highlightGradient )
	{
		float radius = .06;
		float offset = wrap(trackViews[highlightTrack].getLcbOffset(highlightLcb, highlightOffset), 0, 1);
		
		int x1 = (offset - radius) * image.width();
		int x2 = (offset + radius) * image.width();
		int y1 = getTrackHeight(highlightTrack);
		int y2 = getTrackHeight(highlightTrack + 1);
		
		QPen pen;
		pen.setWidth(3);
		pen.setColor(Qt::white);
		painter.setPen(pen);
		painter.drawRect(x1 - 1, y1 - 1, x2 - x1 + 2, y2 - y1 + 2);
	}
	
	if ( highlight )
	{
		highlightTrack = getTrackHover();
		int gap = 5;//cursorSize * .3;
		int y = cursorY;//(highlightTrack + .5) * image.height() / trackViews.size() - cursorSize / 20;
		int y1 = getTrackHeight(highlightTrack) - cursorSize / 20 + frameWidth() + 1;
		int y2 = getTrackHeight(highlightTrack + 1) + cursorSize / 20 + frameWidth() + 1;
		
		QPen pen;
		pen.setWidth(2 + cursorSize / 10);
		pen.setColor(QColor::fromHsl(120, 255, 127).rgb());
		painter.setPen(pen);
		painter.drawRect(cursorX - gap + frameWidth(), y1, gap * 2, y2 - y1);
		pen.setColor(Qt::white);
		painter.setPen(pen);
		return;
		painter.drawLine(cursorX - cursorSize, y - cursorSize, cursorX - gap, y - gap);
		painter.drawLine(cursorX - cursorSize, y + cursorSize, cursorX - gap, y + gap);
		painter.drawLine(cursorX + cursorSize, y - cursorSize, cursorX + gap, y - gap);
		painter.drawLine(cursorX + cursorSize, y + cursorSize, cursorX + gap, y + gap);
		
	}
}
Ejemplo n.º 25
0
PassRefPtr<Image> Image::imageForDefaultFrame()
{
    RefPtr<Image> image(this);

    return image.release();
}
Ejemplo n.º 26
0
int main(int argc, char *argv[])
{
	printf("Podaj ilosc punktow startowych: ");
// 	kolej.clear();
	scanf("%d", &starting_points);
	printf("Podaj prawdopodobieństwo poszerzenia ladu (0 - 10000): ");
	scanf("%d", &spread_chance);
	printf("Podaj pole (def 3) i sile wygladzenia (def 2): ");
	scanf("%d %d", &smooth_area, &smooth_pow);
	QImage image(WID, HEI, QImage::Format_RGB32);
	image.fill(c_woda);
	srand (time ( NULL ));
	for (int i = 0; i < starting_points; i++)
	{
		a.randum();
// 		printf("%d %d\n", a.x, a.y);
		kolej.add(a);
		land[a.conv()] = true;
		// 		printf("huehuehue\n");
// 		kolej.kju[qhead++] = a;
	}
// 	printf("uno\n");
	while (!kolej.empty())
	{
		a = kolej.pop();
// 		printf("1a\n");
 		//printf("lol %d %d\n", a.x, a.y);

// 		printf("2a\n");
		for (int i = -1; i <= 1; i++)
			for (int j = -1; j <= 1; j++)
			{
				b.make(a.x + i, a.y + j);
// 				printf("3a\n");
				if (!land[b.conv()] && b.x >= 0 && b.x < WID && b.y >= 0 && b.y < HEI && rand() % MAX < spread_chance)
				{
					land[b.conv()] = true;
// 					printf("habahaba\n");
					kolej.add(b);
				}
// 					kolej.kju[qhead++];
// 				printf("4a\n");
			}
	}
	smooth(land, smooth_area, smooth_pow, WID, HEI);
// 	printf("fcuk\n");
	for (int i = 0 ; i < WID; i++)
		for (int j = 0; j < HEI; j++)
		{
			a.make(i, j);
			image.setPixel(a.x, a.y, land[a.conv()] ? c_ziemia : c_woda);
		}
	//test
	image.save("test1.png");
	image = image.copy(0, 0, WID / 2, HEI / 2);
	image.save("test2.png");
	image = image.scaled(WID, HEI);
	//end of test
	image.save("world.png");
	return 0;
}
Ejemplo n.º 27
0
int main(int argc, char** argv) {
  std::cout << "make the image denoising alchemy problem"
            << std::endl;

  std::string model_filename = "image";
  std::string drawing = "sunset";
  std::string corruption = "gaussian";
  std::string smoothing = "square";
  double lambda = 3;
  double sigma = 1;
  size_t rows = 200;
  size_t rings = 7;
  


  

  // Command line parsing
  graphlab::command_line_options clopts("Make the alchemy image", true);
  clopts.attach_option("model", 
                       &model_filename, model_filename,
                       "Alchemy formatted model file");
  clopts.attach_option("drawing", 
                       &drawing, drawing,
                       "drawing type");
  clopts.attach_option("corruption", 
                       &corruption, corruption,
                       "corruption type");
  clopts.attach_option("smoothing", 
                       &smoothing, smoothing,
                       "smoothing type");
  clopts.attach_option("lambda", 
                       &lambda, lambda,
                       "edge parameter");
  clopts.attach_option("sigma", 
                       &sigma, sigma,
                       "noise parameter");
  clopts.attach_option("rows", 
                       &rows, rows,
                       "number of rows and cols");
  clopts.attach_option("rings", 
                       &rings, rings,
                       "number of rings");

  if( !clopts.parse(argc, argv) ) { 
    std::cout << "Error parsing command line arguments!"
              << std::endl;
    return EXIT_FAILURE;
  }


  
  std::cout << "Creating a synethic image." << std::endl;
  image original(rows, rows);
  if(drawing == "sunset") 
    original.paint_sunset(rings);
  else if(drawing == "checkerboard")
    original.paint_checkerboard(rings);
  else {
    std::cout << "Invalid drawing type!" << std::endl;
    exit(1);
  }
  std::cout << "Saving original image. " << std::endl;
  original.save("original.pgm");    

    
  std::cout << "Corrupting Image. " << std::endl;
  image noisy = original;
  if(corruption == "gaussian") 
    noisy.gaussian_corrupt(sigma);
  else if(corruption == "flip")
    noisy.flip_corrupt(rings, 0.75);
  else if(corruption == "ising") 
    noisy = image(rows, rows);
  else {
    std::cout << "Invalid corruption type!" << std::endl;
    exit(1);
  }
  std::cout << "Saving corrupted image. " << std::endl;
  noisy.save("corrupted.pgm");
  

  // dummy variables 0 and 1 and num_rings by num_rings
  std::cout << "Creating edge factor" << std::endl;
  factor_t edge_factor(domain_t(variable_t(0, rings), variable_t(1, rings)));
  // Set the smoothing type
  if(smoothing == "square") {
    edge_factor.set_as_agreement(lambda);
  } else if (smoothing == "laplace") {
    edge_factor.set_as_laplace(lambda);
  } else  {
    std::cout << "Invalid smoothing stype!" << std::endl;
    assert(false);
  }
  std::cout << edge_factor << std::endl;
  
  std::cout << "Constructing factor graph." << std::endl;
  factorized_model model;
  // Add all the node factors
  double sigmaSq = sigma*sigma;
  for(size_t i = 0; i < noisy.rows(); ++i) {
    for(size_t j = 0; j < noisy.cols(); ++j) {
      // initialize the potential and belief
      uint32_t pixel_id = noisy.vertid(i, j);
      variable_t var(pixel_id, rings);
      factor_t factor(var);
      // Set the node potential
      double obs = noisy.pixel(i, j);
      if(corruption == "gaussian") {
        for(size_t pred = 0; pred < rings; ++pred) {
          factor.logP(pred) = 
            -(obs - pred)*(obs - pred) / (2.0 * sigmaSq);
        }
      } else if(corruption == "flip") {
        for(size_t pred = 0; pred < rings; ++pred) {
          factor.logP(pred) = obs == pred? 0 : -sigma;
        }
      } else if(corruption == "ising") {
        // Do nothing since we want a uniform node potential
        factor.uniform();
      } else {
        std::cout << "Invalid corruption!" << std::endl;
        exit(1);
      }
      factor.normalize();
      model.add_factor(factor);
    } // end of for j in cols
  } // end of for i in rows

  // Construct edge_factors  
  for(size_t i = 0; i < noisy.rows(); ++i) {
    for(size_t j = 0; j < noisy.cols(); ++j) {
      size_t source = noisy.vertid(i,j);
      variable_t source_var(source, rings);
      if(i+1 < noisy.rows()) {
        vertex_id_t target = noisy.vertid(i+1, j);
        variable_t target_var(target, rings);
        domain_t dom(source_var, target_var);
        edge_factor.set_args(dom);
        model.add_factor(edge_factor);
      }
      if(j+1 < noisy.cols()) {
        vertex_id_t target = noisy.vertid(i, j+1);
        variable_t target_var(target, rings);
        domain_t dom(source_var, target_var);
        edge_factor.set_args(dom);
        model.add_factor(edge_factor);
      }
    } // end of for j in cols
  } // end of for i in rows

  std::cout << "Saving model in alchemy format" << std::endl;
  model.save_alchemy(model_filename + ".alchemy");


  return EXIT_SUCCESS;
} // end of main
Ejemplo n.º 28
0
bool MapnikRenderer::Preprocess( IImporter* importer, QString dir )
{
	QString filename = fileInDirectory( dir, "Mapnik Renderer" );

	try {
		IImporter::BoundingBox box;
		if ( !importer->GetBoundingBox( &box ) )
			return false;
		std::vector< IImporter::RoutingEdge > inputEdges;
		std::vector< IImporter::RoutingNode > inputNodes;
		std::vector< IImporter::RoutingNode > inputPaths;
		if ( m_settings.deleteTiles ) {
			if ( !importer->GetRoutingEdges( &inputEdges ) ) {
				qCritical() << "Mapnik Renderer: failed to read routing edges";
				return false;
			}
			if ( !importer->GetRoutingNodes( &inputNodes ) ) {
				qCritical() << "Mapnik Renderer: failed to read routing nodes";
				return false;
			}
			if ( !importer->GetRoutingEdgePaths( &inputPaths ) ) {
				qCritical() << "Mapnik Renderer: failed to read routing paths";
			}
		}

		Timer time;

		mapnik::datasource_cache::instance().register_datasources( m_settings.plugins.toLatin1().constData() );
		QDir fonts( m_settings.fonts );
		mapnik::projection projection( "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs +over" );
		mapnik::freetype_engine::register_font( fonts.filePath( "DejaVuSans.ttf" ).toLatin1().constData() );
		mapnik::freetype_engine::register_font( fonts.filePath( "DejaVuSans-Bold.ttf" ).toLatin1().constData() );
		mapnik::freetype_engine::register_font( fonts.filePath( "DejaVuSans-Oblique.ttf" ).toLatin1().constData() );
		mapnik::freetype_engine::register_font( fonts.filePath( "DejaVuSans-BoldOblique.ttf" ).toLatin1().constData() );

		qDebug() << "Mapnik Renderer: initialized mapnik connection:" << time.restart() << "ms";

		int numThreads = omp_get_max_threads();
		qDebug() << "Mapnik Renderer: using" << numThreads << "threads";

		qDebug() << "Mapnik Renderer: x: " << box.min.x << "-" << box.max.x;
		qDebug() << "Mapnik Renderer: y: " << box.min.y << "-" << box.max.y;

		FileStream configData( filename );
		if ( !configData.open( QIODevice::WriteOnly ) )
			return false;

		configData << quint32( m_settings.tileSize ) << quint32( m_settings.zoomLevels.size() );

		long long tilesSkipped = 0;
		long long tiles = 0;
		long long metaTilesRendered = 0;
		long long pngcrushSaved = 0;

		std::vector< ZoomInfo > zoomInfo( m_settings.zoomLevels.size() );
		std::vector< MetaTile > tasks;

		for ( int zoomLevel = 0; zoomLevel < ( int ) m_settings.zoomLevels.size(); zoomLevel++ ) {
			ZoomInfo& info = zoomInfo[zoomLevel];
			int zoom = m_settings.zoomLevels[zoomLevel];

			info.minX = box.min.GetTileX( zoom );
			info.maxX = box.max.GetTileX( zoom ) + 1;
			info.minY = box.min.GetTileY( zoom );
			info.maxY = box.max.GetTileY( zoom ) + 1;

			if ( zoom <= m_settings.fullZoom ) {
				info.minX = info.minY = 0;
				info.maxX = info.maxY = 1 << zoom;
			} else {
				info.minX = std::max( 0 , info.minX - m_settings.tileMargin );
				info.maxX = std::min ( 1 << zoom, info.maxX + m_settings.tileMargin );
				info.minY = std::max( 0, info.minY - m_settings.tileMargin );
				info.maxY = std::min ( 1 << zoom, info.maxY + m_settings.tileMargin );
			}

			tiles += ( info.maxX - info.minX ) * ( info.maxY - info.minY );
			qDebug() << "Mapnik Renderer: [" << zoom << "] x:" << info.minX << "-" << info.maxX << "; y:" << info.minY << "-" << info.maxY;
			configData << quint32( zoom ) << quint32( info.minX ) << quint32( info.maxX ) << quint32( info.minY ) << quint32( info.maxY );

			int numberOfTiles = ( info.maxX - info.minX ) * ( info.maxY - info.minY );
			IndexElement dummyIndex;
			dummyIndex.start = dummyIndex.size = 0;
			info.index.resize( numberOfTiles, dummyIndex );

			std::vector< UnsignedCoordinate > path;
			for ( std::vector< IImporter::RoutingEdge >::const_iterator i = inputEdges.begin(), e = inputEdges.end(); i != e; ++i ) {
				path.push_back( inputNodes[i->source].coordinate );
				for ( int pathID = 0; pathID < i->pathLength; pathID++ )
					path.push_back( inputPaths[pathID + i->pathID].coordinate );
				path.push_back( inputNodes[i->target].coordinate );

				for ( unsigned edge = 0; edge < path.size(); edge++ ) {
					int sourceX = path[edge].GetTileX( zoom );
					int sourceY = path[edge].GetTileY( zoom );
					int targetX = path[edge].GetTileX( zoom );
					int targetY = path[edge].GetTileY( zoom );
					if ( sourceX > targetX )
						std::swap( sourceX, targetX );
					if ( sourceY > targetY )
						std::swap( sourceY, targetY );
					sourceX = std::max( sourceX, info.minX );
					sourceX = std::min( sourceX, info.maxX - 1 );
					sourceY = std::max( sourceY, info.minY );
					sourceY = std::min( sourceY, info.maxY - 1 );
					targetX = std::max( targetX, info.minX );
					targetX = std::min( targetX, info.maxX - 1 );
					targetY = std::max( targetY, info.minY );
					targetY = std::min( targetY, info.maxY - 1 );
					for ( int x = sourceX; x <= targetX; ++x )
						for ( int y = sourceY; y <= targetY; ++y )
							info.index[( x - info.minX ) + ( y - info.minY ) * ( info.maxX - info.minX )].size = 1;
				}

				path.clear();
			}

			info.tilesFile = new QFile( filename + QString( "_%1_tiles" ).arg( zoom ) );
			if ( !openQFile( info.tilesFile, QIODevice::WriteOnly ) )
				return false;

			for ( int x = info.minX; x < info.maxX; x+= m_settings.metaTileSize ) {
				int metaTileSizeX = std::min( m_settings.metaTileSize, info.maxX - x );
				for ( int y = info.minY; y < info.maxY; y+= m_settings.metaTileSize ) {
					int metaTileSizeY = std::min( m_settings.metaTileSize, info.maxY - y );
					MetaTile tile;
					tile.zoom = zoomLevel;
					tile.x = x;
					tile.y = y;
					tile.metaTileSizeX = metaTileSizeX;
					tile.metaTileSizeY = metaTileSizeY;
					tasks.push_back( tile );
				}
			}
		}


#pragma omp parallel
		{
			int threadID = omp_get_thread_num();
			const int metaTileSize = m_settings.metaTileSize * m_settings.tileSize + 2 * m_settings.margin;

			mapnik::Map map;
			mapnik::image_32 image( metaTileSize, metaTileSize );
			QTemporaryFile tempOut;
			QTemporaryFile tempIn;
			mapnik::load_map( map, m_settings.theme.toLocal8Bit().constData() );

#pragma omp for schedule( dynamic )
			for ( int i = 0; i < ( int ) tasks.size(); i++ ) {

				int metaTileSizeX = tasks[i].metaTileSizeX;
				int metaTileSizeY = tasks[i].metaTileSizeY;
				int x = tasks[i].x;
				int y = tasks[i].y;
				int zoomLevel = tasks[i].zoom;
				int zoom = m_settings.zoomLevels[zoomLevel];
				ZoomInfo& info = zoomInfo[zoomLevel];

				map.resize( metaTileSizeX * m_settings.tileSize + 2 * m_settings.margin, metaTileSizeY * m_settings.tileSize + 2 * m_settings.margin );

				ProjectedCoordinate drawTopLeft( x - 1.0 * m_settings.margin / m_settings.tileSize, y - 1.0 * m_settings.margin / m_settings.tileSize, zoom );
				ProjectedCoordinate drawBottomRight( x + metaTileSizeX + 1.0 * m_settings.margin / m_settings.tileSize, y + metaTileSizeY + 1.0 * m_settings.margin / m_settings.tileSize, zoom );
				GPSCoordinate drawTopLeftGPS = drawTopLeft.ToGPSCoordinate();
				GPSCoordinate drawBottomRightGPS = drawBottomRight.ToGPSCoordinate();
				projection.forward( drawTopLeftGPS.longitude, drawBottomRightGPS.latitude );
				projection.forward( drawBottomRightGPS.longitude, drawTopLeftGPS.latitude );
				mapnik::box2d<double> boundingBox( drawTopLeftGPS.longitude, drawTopLeftGPS.latitude, drawBottomRightGPS.longitude, drawBottomRightGPS.latitude );
				map.zoom_to_box( boundingBox );
				mapnik::agg_renderer<mapnik::image_32> renderer( map, image );
				renderer.apply();

				std::string data;
				int skipped = 0;
				int saved = 0;
				for ( int subX = 0; subX < metaTileSizeX; ++subX ) {
					for ( int subY = 0; subY < metaTileSizeY; ++subY ) {
						int indexNumber = ( y + subY - info.minY ) * ( info.maxX - info.minX ) + x + subX - info.minX;
						mapnik::image_view<mapnik::image_data_32> view = image.get_view( subX * m_settings.tileSize + m_settings.margin, subY * m_settings.tileSize + m_settings.margin, m_settings.tileSize, m_settings.tileSize );
						std::string result;
						if ( !m_settings.deleteTiles || info.index[( x + subX - info.minX ) + ( y + subY - info.minY ) * ( info.maxX - info.minX )].size == 1 ) {
							if ( m_settings.reduceColors )
								result = mapnik::save_to_string( view, "png256" );
							else
								result = mapnik::save_to_string( view, "png" );

							if ( m_settings.pngcrush ) {
								tempOut.open();
								tempOut.write( result.data(), result.size() );
								tempOut.flush();
								tempIn.open();
								pclose( popen( ( "pngcrush " + tempOut.fileName() + " " + tempIn.fileName() ).toUtf8().constData(), "r" ) );
								QByteArray buffer = tempIn.readAll();
								tempIn.close();
								tempOut.close();
								if ( buffer.size() != 0 && buffer.size() < ( int ) result.size() ) {
									saved += result.size() - buffer.size();
									result.assign( buffer.constData(), buffer.size() );
								}
							}
						}

						info.index[indexNumber].start = data.size();
						info.index[indexNumber].size = result.size();
						data += result;
					}
				}

				qint64 position;
#pragma omp critical
				{
					position = info.tilesFile->pos();
					info.tilesFile->write( data.data(), data.size() );

					metaTilesRendered++;
					tilesSkipped += skipped;
					pngcrushSaved += saved;
					qDebug() << "Mapnik Renderer: [" << zoom << "], thread" << threadID << ", metatiles:" << metaTilesRendered << "/" << tasks.size();
				}

				for ( int subX = 0; subX < metaTileSizeX; ++subX ) {
					for ( int subY = 0; subY < metaTileSizeY; ++subY ) {
						int indexNumber = ( y + subY - info.minY ) * ( info.maxX - info.minX ) + x + subX - info.minX;
						info.index[indexNumber].start += position;
					}
				}
			}
		}

		for ( int zoomLevel = 0; zoomLevel < ( int ) m_settings.zoomLevels.size(); zoomLevel++ ) {
			const ZoomInfo& info = zoomInfo[zoomLevel];
			int zoom = m_settings.zoomLevels[zoomLevel];
			QFile indexFile( filename + QString( "_%1_index" ).arg( zoom ) );
			if ( !openQFile( &indexFile, QIODevice::WriteOnly ) )
				return false;
			for ( int i = 0; i < ( int ) info.index.size(); i++ ) {
				indexFile.write( ( const char* ) &info.index[i].start, sizeof( info.index[i].start ) );
				indexFile.write( ( const char* ) &info.index[i].size, sizeof( info.index[i].size ) );
			}
			delete info.tilesFile;
		}

		if ( m_settings.deleteTiles )
			qDebug() << "Mapnik Renderer: removed" << tilesSkipped << "tiles";
		if ( m_settings.pngcrush )
			qDebug() << "Mapnik Renderer: PNGcrush saved" << pngcrushSaved / 1024 / 1024 << "MB";

		qDebug() << "Mapnik Renderer: finished:" << time.restart() << "ms";

	} catch ( const mapnik::config_error & ex ) {
		qCritical( "Mapnik Renderer: ### Configuration error: %s", ex.what() );
		return false;
	} catch ( const std::exception & ex ) {
		qCritical( "Mapnik Renderer: ### STD error: %s", ex.what() );
		return false;
	} catch ( ... ) {
		qCritical( "Mapnik Renderer: ### Unknown error" );
		return false;
	}
	return true;
}
Ejemplo n.º 29
0
int main(int, char *[])
{
  double  testrad = 5.5, testx = 127.25, testy = 127.75;  //< Actual location of spot
  double  seedrad = 6, seedx = 120, seedy = 118;	  //< Start location for tracking
  int	  avgcount; //< How many samples to average over
  double  pixacc;   //< Pixel accuracy desired
  double  err, minerr, maxerr, sumerr;
  double  raderr, minraderr, maxraderr, sumraderr;
  double  biasx, biasy;

  printf("Generating default test image with radius %lg disk at %lg, %lg\n", testrad, testx, testy);
  disc_image  image(0,255, 0,255, 127, 5, testx, testy, testrad, 250);

  printf("-----------------------------------------------------------------\n");
  printf("Generating default spot tracker\n");
  disk_spot_tracker tracker(seedrad);

  printf("Looking for best fit within the image\n");
  tracker.locate_good_fit_in_image(image, 0, seedx, seedy);

  printf("Optimization, starting at found location %lg, %lg,  rad %lg\n", seedx, seedy, seedrad);
  int i;
  double  x,y, rad, fit;
  tracker.take_single_optimization_step(image, 0, x,y, seedx, seedy);
  for (i = 0; i < 5; i++) {
    tracker.take_single_optimization_step(image, 0, x, y, true, true, true);
    rad = tracker.get_radius();
    fit = tracker.get_fitness();
    printf("Next step: X = %8.3lg,  Y = %8.3lg,  rad = %8.3lg, fit = %12.5lg\n", x,y,rad, fit);
  }

  printf("Chasing around a slightly noisy spot using full optimization\n");
  testrad = 5.5, testx = 127.25, testy = 127.75;  //< Actual location of spot
  seedrad = 6, seedx = 120, seedy = 118;	  //< Start location for tracking
  avgcount = 50;
  minerr = 1000; maxerr = 0; sumerr = 0;
  minraderr = 1000; maxraderr = 0; sumraderr = 0;
  biasx = 0; biasy = 0;
  for (i = 0; i < avgcount; i++) {
    testrad += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * 1;
    if (testrad < 3) { testrad = 3; }
    testx += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    testy += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    {
      disc_image image2(0,255, 0,255, 127, 5, testx, testy, testrad, 250);
      tracker.optimize(image2, 0, x, y);
      rad = tracker.get_radius();
      fit = tracker.get_fitness();
      err = sqrt( (x-testx)*(x-testx) + (y-testy)*(y-testy) );
      if (err < minerr) { minerr = err; }
      if (err > maxerr) { maxerr = err; }
      sumerr += err;
      raderr = fabs(rad-testrad);
      if (raderr < minraderr) { minraderr = raderr; }
      if (raderr > maxraderr) { maxraderr = raderr; }
      sumraderr += raderr;
      biasx += x - testx;
      biasy += y - testy;
      if (i == 0) {
	printf("First opt: real coords (%g,%g), found coords (%g,%g)\n", testx,testy, x,y);
      }
    }
  }
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  printf("Rad err: min=%g, max=%g, mean=%g\n", minraderr, maxraderr, sumraderr/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  avgcount = 50;
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy disk of known radius %g to %g pixel\n", testrad, pixacc);
  compute_disk_chase_statistics(tracker, testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx, biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy cone of known radius %g to %g pixel\n", testrad, pixacc);
  compute_cone_chase_statistics(tracker, testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx, biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  pixacc = 0.05;
  testrad = 5.5;
  tracker.set_pixel_accuracy(pixacc);
  printf("Timing how long it takes to optimize pos to %g pixels from a nearby position on average\n", pixacc);
  avgcount = 1000;
  struct timeval start, end;
  tracker.optimize(image, 0, x,y);	      // Get back to the correct starting location
  gettimeofday(&start, NULL);
  for (i = 0; i < avgcount; i++) {
    tracker.optimize_xy(image, 0, x, y,
      x + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2),
      y + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2));
  }
  gettimeofday(&end, NULL);
  printf("  Time: %lg seconds per optimization\n", duration(end, start)/avgcount);

  printf("-----------------------------------------------------------------\n");
  printf("Generating interpolating spot tracker\n");
  disk_spot_tracker_interp interptracker(seedrad);

  printf("Looking for best fit within the image\n");
  interptracker.locate_good_fit_in_image(image, 0, seedx, seedy);

  printf("Optimization, starting at found location %lg, %lg,  rad %lg\n", seedx, seedy, seedrad);
  interptracker.take_single_optimization_step(image, 0, x,y, seedx, seedy);
  for (i = 0; i < 5; i++) {
    interptracker.take_single_optimization_step(image, 0, x, y, true, true, true);
    rad = interptracker.get_radius();
    fit = interptracker.get_fitness();
    printf("Next step: X = %8.3lg,  Y = %8.3lg,  rad = %8.3lg, fit = %12.5lg\n", x,y,rad, fit);
  }

  printf("Chasing around a slightly noisy spot using full optimization\n");
  testrad = 5.5, testx = 127.25, testy = 127.75;  //< Actual location of spot
  seedrad = 6, seedx = 120, seedy = 118;	  //< Start location for tracking
  avgcount = 50;
  minerr = 1000; maxerr = 0; sumerr = 0;
  minraderr = 1000; maxraderr = 0; sumraderr = 0;
  biasx = 0; biasy = 0;
  for (i = 0; i < avgcount; i++) {
    testrad += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * 1;
    if (testrad < 3) { testrad = 3; }
    testx += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    testy += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    {
      disc_image image2(0,255, 0,255, 127, 5, testx, testy, testrad, 250);
      interptracker.optimize(image2, 0, x, y);
      rad = interptracker.get_radius();
      fit = interptracker.get_fitness();
      err = sqrt( (x-testx)*(x-testx) + (y-testy)*(y-testy) );
      if (err < minerr) { minerr = err; }
      if (err > maxerr) { maxerr = err; }
      sumerr += err;
      raderr = fabs(rad-testrad);
      if (raderr < minraderr) { minraderr = raderr; }
      if (raderr > maxraderr) { maxraderr = raderr; }
      sumraderr += raderr;
      biasx += x - testx;
      biasy += y - testy;
      if (i == 0) {
	printf("First opt: real coords (%g,%g), found coords (%g,%g)\n", testx,testy, x,y);
      }
    }
  }
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  printf("Rad err: min=%g, max=%g, mean=%g\n", minraderr, maxraderr, sumraderr/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  avgcount = 50;
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy disk of known radius %g to %g pixel\n", testrad, pixacc);
  compute_disk_chase_statistics(interptracker, testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy cone of known radius %g to %g pixel\n", testrad, pixacc);
  compute_cone_chase_statistics(interptracker, testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  x = 120.5; y = 120;
  disc_image image3(0,255,0,255,127,0,x,y,testrad, 250);
  printf("Optimizing a slightly noisy disk of known radius %g at %g,%g\n", testrad, x,y);
  interptracker.optimize_xy(image3, 0, x, y, floor(x), ceil(y));
  printf("  Found a spot of radius %g at %g,%g\n", interptracker.get_radius(), interptracker.get_x(), interptracker.get_y());

  pixacc = 0.05;
  testrad = 5.5;
  interptracker.set_pixel_accuracy(pixacc);
  printf("Timing how long it takes to optimize pos to %g pixels from a nearby position on average\n", pixacc);
  avgcount = 100;
  interptracker.optimize(image, 0, x,y);	      // Get back to the correct starting location
  gettimeofday(&start, NULL);
  for (i = 0; i < avgcount; i++) {
    interptracker.optimize_xy(image, 0, x, y,
      x + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2),
      y + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2));
  }
  gettimeofday(&end, NULL);
  printf("  Time: %lg seconds per optimization\n", duration(end, start)/avgcount);

  printf("-----------------------------------------------------------------\n");
  printf("Generating interpolating cone spot tracker\n");
  cone_spot_tracker_interp conetracker(seedrad);

  printf("Looking for best fit within the image\n");
  conetracker.locate_good_fit_in_image(image, 0, seedx, seedy);

  printf("Optimization, starting at found location %lg, %lg,  rad %lg\n", seedx, seedy, seedrad);
  conetracker.take_single_optimization_step(image, 0, x,y, seedx, seedy);
  for (i = 0; i < 5; i++) {
    conetracker.take_single_optimization_step(image, 0, x, y, true, true, true);
    rad = conetracker.get_radius();
    fit = conetracker.get_fitness();
    printf("Next step: X = %8.3lg,  Y = %8.3lg,  rad = %8.3lg, fit = %12.5lg\n", x,y,rad, fit);
  }

  printf("Chasing around a slightly noisy spot using full optimization\n");
  testrad = 5.5, testx = 127.25, testy = 127.75;  //< Actual location of spot
  seedrad = 6, seedx = 120, seedy = 118;	  //< Start location for tracking
  avgcount = 50;
  minerr = 1000; maxerr = 0; sumerr = 0;
  minraderr = 1000; maxraderr = 0; sumraderr = 0;
  biasx = 0; biasy = 0;
  for (i = 0; i < avgcount; i++) {
    testrad += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * 1;
    if (testrad < 3) { testrad = 3; }
    testx += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    testy += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    {
      disc_image image2(0,255, 0,255, 127, 5, testx, testy, testrad, 250);
      conetracker.optimize(image2, 0, x, y);
      rad = conetracker.get_radius();
      fit = conetracker.get_fitness();
      err = sqrt( (x-testx)*(x-testx) + (y-testy)*(y-testy) );
      if (err < minerr) { minerr = err; }
      if (err > maxerr) { maxerr = err; }
      sumerr += err;
      raderr = fabs(rad-testrad);
      if (raderr < minraderr) { minraderr = raderr; }
      if (raderr > maxraderr) { maxraderr = raderr; }
      sumraderr += raderr;
      biasx += x - testx;
      biasy += y - testy;
      if (i == 0) {
	printf("First opt: real coords (%g,%g), found coords (%g,%g)\n", testx,testy, x,y);
      }
    }
  }
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  printf("Rad err: min=%g, max=%g, mean=%g\n", minraderr, maxraderr, sumraderr/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  avgcount = 50;
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy disk of known radius %g to %g pixel\n", testrad, pixacc);
  // Make the radius slightly larger than the radius of the spot.
  compute_disk_chase_statistics(conetracker, 1.3*testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy cone of known radius %g to %g pixel\n", testrad, pixacc);
  // Make the radius slightly larger than the radius of the spot.
  compute_cone_chase_statistics(conetracker, 1.3*testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  x = 120.5; y = 120;
  disc_image image4(0,255,0,255,127,0,x,y,testrad, 250);
  printf("Optimizing a slightly noisy disk of known radius %g at %g,%g\n", testrad, x,y);
  conetracker.optimize_xy(image4, 0, x, y, floor(x), ceil(y));
  printf("  Found a spot of radius %g at %g,%g\n", conetracker.get_radius(), conetracker.get_x(), conetracker.get_y());

  pixacc = 0.05;
  testrad = 5.5;
  conetracker.set_pixel_accuracy(pixacc);
  printf("Timing how long it takes to optimize pos to %g pixels from a nearby position on average\n", pixacc);
  avgcount = 100;
  conetracker.optimize(image, 0, x,y);	      // Get back to the correct starting location
  gettimeofday(&start, NULL);
  for (i = 0; i < avgcount; i++) {
    conetracker.optimize_xy(image, 0, x, y,
      x + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2),
      y + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2));
  }
  gettimeofday(&end, NULL);
  printf("  Time: %lg seconds per optimization\n", duration(end, start)/avgcount);

  printf("-----------------------------------------------------------------\n");
  printf("Generating interpolating symmetric spot tracker\n");
  symmetric_spot_tracker_interp symmetrictracker(seedrad);

  printf("Looking for best fit within the image\n");
  symmetrictracker.locate_good_fit_in_image(image, 0, seedx, seedy);

  printf("Optimization, starting at found location %lg, %lg,  rad %lg\n", seedx, seedy, seedrad);
  symmetrictracker.take_single_optimization_step(image, 0, x,y, seedx, seedy);
  for (i = 0; i < 5; i++) {
    symmetrictracker.take_single_optimization_step(image, 0, x, y, true, true, true);
    rad = symmetrictracker.get_radius();
    fit = symmetrictracker.get_fitness();
    printf("Next step: X = %8.3lg,  Y = %8.3lg,  rad = %8.3lg, fit = %12.5lg\n", x,y,rad, fit);
  }

  printf("Chasing around a slightly noisy spot using full optimization\n");
  testrad = 5.5, testx = 127.25, testy = 127.75;  //< Actual location of spot
  seedrad = 6, seedx = 120, seedy = 118;	  //< Start location for tracking
  avgcount = 50;
  minerr = 1000; maxerr = 0; sumerr = 0;
  minraderr = 1000; maxraderr = 0; sumraderr = 0;
  biasx = 0; biasy = 0;
  for (i = 0; i < avgcount; i++) {
    testrad += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * 1;
    if (testrad < 3) { testrad = 3; }
    testx += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    testy += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    {
      disc_image image2(0,255, 0,255, 127, 5, testx, testy, testrad, 250);
      symmetrictracker.optimize(image2, 0, x, y);
      rad = symmetrictracker.get_radius();
      fit = symmetrictracker.get_fitness();
      err = sqrt( (x-testx)*(x-testx) + (y-testy)*(y-testy) );
      if (err < minerr) { minerr = err; }
      if (err > maxerr) { maxerr = err; }
      sumerr += err;
      raderr = fabs(rad-testrad);
      if (raderr < minraderr) { minraderr = raderr; }
      if (raderr > maxraderr) { maxraderr = raderr; }
      sumraderr += raderr;
      biasx += x - testx;
      biasy += y - testy;
      if (i == 0) {
	printf("First opt: real coords (%g,%g), found coords (%g,%g)\n", testx,testy, x,y);
      }
    }
  }
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  printf("Rad err: min=%g, max=%g, mean=%g\n", minraderr, maxraderr, sumraderr/avgcount);

  testrad = 5.5;
  pixacc = 0.01;
  avgcount = 50;
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy disk of known radius %g to %g pixel\n", testrad, pixacc);
  // Make the radius slightly larger than the radius of the spot.
  compute_disk_chase_statistics(symmetrictracker, 1.3*testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy cone of known radius %g to %g pixel\n", testrad, pixacc);
  // Make the radius slightly larger than the radius of the spot.
  compute_disk_chase_statistics(symmetrictracker, 1.3*testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  x = 120.5; y = 120;
  disc_image image5(0,255,0,255,127,0,x,y,testrad, 250);
  printf("Optimizing a slightly noisy disk of known radius %g at %g,%g\n", testrad, x,y);
  symmetrictracker.optimize_xy(image5, 0, x, y, floor(x), ceil(y));
  printf("  Found a spot of radius %g at %g,%g\n", symmetrictracker.get_radius(), symmetrictracker.get_x(), symmetrictracker.get_y());

  pixacc = 0.05;
  testrad = 5.5;
  symmetrictracker.set_pixel_accuracy(pixacc);
  printf("Timing how long it takes to optimize pos to %g pixels from a nearby position on average\n", pixacc);
  avgcount = 100;
  symmetrictracker.optimize(image, 0, x,y);	      // Get back to the correct starting location
  gettimeofday(&start, NULL);
  for (i = 0; i < avgcount; i++) {
    symmetrictracker.optimize_xy(image, 0, x, y,
      x + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2),
      y + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2));
  }
  gettimeofday(&end, NULL);
  printf("  Time: %lg seconds per optimization\n", duration(end, start)/avgcount);

  printf("-----------------------------------------------------------------\n");
  printf("Generating Gaussian spot tracker\n");

  testrad = 5.5, testx = 127.25, testy = 127.75;  //< Actual location of spot
  seedrad = 6, seedx = 120, seedy = 118;	  //< Start location for tracking
  Gaussian_spot_tracker Gaussiantracker(seedrad, false, 0.25, 0.25, 1.0, 127, 11689);
  printf("Looking for best fit within the image\n");
  Gaussiantracker.locate_good_fit_in_image(image, 0, seedx, seedy);

  printf("Optimization, starting at found location %lg, %lg,  rad %lg\n", seedx, seedy, seedrad);
  Gaussiantracker.take_single_optimization_step(image, 0, x,y, seedx, seedy);
  for (i = 0; i < 5; i++) {
    Gaussiantracker.take_single_optimization_step(image, 0, x, y, true, true, true);
    rad = Gaussiantracker.get_radius();
    fit = Gaussiantracker.get_fitness();
    printf("Next step: X = %8.3lg,  Y = %8.3lg,  rad = %8.3lg, fit = %12.5lg\n", x,y,rad, fit);
  }

  printf("Chasing around a slightly noisy spot using full optimization\n");
  avgcount = 50;
  minerr = 1000; maxerr = 0; sumerr = 0;
  minraderr = 1000; maxraderr = 0; sumraderr = 0;
  biasx = 0; biasy = 0;
  for (i = 0; i < avgcount; i++) {
    testrad += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * 1;
    if (testrad < 3) { testrad = 3; }
    testx += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    testy += ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2);
    {
      disc_image image2(0,255, 0,255, 127, 5, testx, testy, testrad, 250);
      Gaussiantracker.optimize(image2, 0, x, y);
      rad = Gaussiantracker.get_radius();
      fit = Gaussiantracker.get_fitness();
      err = sqrt( (x-testx)*(x-testx) + (y-testy)*(y-testy) );
      if (err < minerr) { minerr = err; }
      if (err > maxerr) { maxerr = err; }
      sumerr += err;
      raderr = fabs(rad-testrad);
      if (raderr < minraderr) { minraderr = raderr; }
      if (raderr > maxraderr) { maxraderr = raderr; }
      sumraderr += raderr;
      biasx += x - testx;
      biasy += y - testy;
      if (i == 0) {
	printf("First opt: real coords (%g,%g), found coords (%g,%g)\n", testx,testy, x,y);
      }
    }
  }
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);
  // XXX Radius error meaningless here because radius not optimized.
  //printf("Rad err: min=%g, max=%g, mean=%g\n", minraderr, maxraderr, sumraderr/avgcount);

  testrad = 5.5;
  pixacc = 0.01;
  avgcount = 50;
  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy disk of known radius %g to %g pixel\n", testrad, pixacc);
  // Make the radius slightly larger than the radius of the spot.
  compute_disk_chase_statistics(Gaussiantracker, 1.3*testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  x = 120.5; y = 120;
  printf("Chasing around a slightly noisy cone of known radius %g to %g pixel\n", testrad, pixacc);
  // Make the radius slightly larger than the radius of the spot.
  compute_disk_chase_statistics(Gaussiantracker, 1.3*testrad, pixacc, avgcount, minerr, maxerr, sumerr, biasx,biasy, x,y);
  printf("Pos err: min=%g, max=%g, mean=%g, xbias = %g, ybias = %g\n", minerr, maxerr, sumerr/avgcount, biasx/avgcount, biasy/avgcount);

  testrad = 5.5;
  pixacc = 0.05;
  x = 120.5; y = 120;
  disc_image image6(0,255,0,255,127,0,x,y,testrad, 250);
  printf("Optimizing a slightly noisy disk of known radius %g at %g,%g\n", testrad, x,y);
  Gaussiantracker.optimize(image6, 0, x, y, floor(x), ceil(y));
  printf("  Found a spot of radius %g at %g,%g\n", Gaussiantracker.get_radius(), Gaussiantracker.get_x(), Gaussiantracker.get_y());

  pixacc = 0.05;
  testrad = 5.5;
  Gaussiantracker.set_pixel_accuracy(pixacc);
  printf("Timing how long it takes to optimize pos to %g pixels from a nearby position on average\n", pixacc);
  avgcount = 10;
  Gaussiantracker.optimize(image, 0, x,y);	      // Get back to the correct starting location
  gettimeofday(&start, NULL);
  for (i = 0; i < avgcount; i++) {
    Gaussiantracker.optimize_xy(image, 0, x, y,
      x + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2),
      y + ( (rand()/(double)(RAND_MAX)) - 0.5) * 2 * (testrad/2));
  }
  gettimeofday(&end, NULL);
  printf("  Time: %lg seconds per optimization\n", duration(end, start)/avgcount);

  //-----------------------------------------------------------------------------------------------
  // Testing the Z-tracking classes.
  printf("-----------------------------------------------------------------\n");

  // Construct a PSF kernel by making a number of disc images and sticking them into it.
  disc_image  *discs[10];
  PSF_File    *psf = new PSF_File("deleteme.tif", 25, false);
  for (i = 0; i < 10; i++) {
    discs[i] = new disc_image(0,128, 0,128, 0, 0.0, 64,64, i+10, 255, 4);
    psf->append_line(*discs[i], 64, 64);
  }
  delete psf;

  // Test the best-fit-finding code
  radial_average_tracker_Z  Ztrack("deleteme.tif");
  double z = 0.0;
  Ztrack.locate_best_fit_in_depth(*discs[5], 0, 64, 64, z);
  printf("Z best fit should be 5, found at %lf\n", z);

  // Test the optimization code
  Ztrack.optimize(*discs[7], 0, 64, 64, z);
  printf("Z optimum should be 7, found at %lf\n", z);

  // Test on a novel image
  disc_image test_disc(0,128, 0,128, 0, 0.0, 64,64, 5.5+10, 255, 4);
  Ztrack.optimize(test_disc, 0, 64, 64, z);
  printf("Z optimum should be 5.5, found at %lf\n", z);

  // Delete the PSF file
  unlink("deleteme.tif");
  
  return 0;
}
Ejemplo n.º 30
0
bool KoReportODTRenderer::render(const KoReportRendererContext& context, ORODocument* document, int /*page*/)
{
    QTextTableFormat tableFormat;
    tableFormat.setCellPadding(5);
    tableFormat.setHeaderRowCount(1);
    tableFormat.setBorderStyle(QTextFrameFormat::BorderStyle_Solid);
    tableFormat.setWidth(QTextLength(QTextLength::PercentageLength, 100));
    QTextTable *table = m_cursor.insertTable(1, 1, tableFormat);

    long renderedSections = 0;

    for (long s = 0; s < document->sections(); s++) {
        OROSection *section = document->section(s);
        section->sortPrimatives(OROSection::SortX);

        if (section->type() == KRSectionData::GroupHeader || section->type() == KRSectionData::GroupFooter ||
            section->type() == KRSectionData::ReportHeader || section->type() == KRSectionData::ReportFooter ||
            section->type() == KRSectionData::Detail){
            //Add this section to the document

            //Resize the table to accomodate all the primitives in the section
            if (table->columns() < section->primitives()) {
                table->appendColumns(section->primitives() - table->columns());
            }

            if (renderedSections > 0) {
                //We need to back a row, then forward a row to get at the start cell
                m_cursor.movePosition(QTextCursor::PreviousRow);
                m_cursor.movePosition(QTextCursor::NextRow);
            } else {
                //On the first row, ensure we are in the first cell after expanding the table
                while (m_cursor.movePosition(QTextCursor::PreviousCell)){}
            }
            //Render the objects in each section
            for (int i = 0; i < section->primitives(); i++) {
                //Colour the cell using hte section background colour
                OROPrimitive * prim = section->primitive(i);
                QTextTableCell cell = table->cellAt(m_cursor);
                QTextCharFormat format = cell.format();
                format.setBackground(section->backgroundColor());
                cell.setFormat(format);

                if (prim->type() == OROTextBox::TextBox) {
                    OROTextBox * tb = (OROTextBox*) prim;
                    m_cursor.insertText(tb->text());
                } else if (prim->type() == OROImage::Image) {
                    OROImage * im = (OROImage*) prim;

                    m_cursor.insertImage(im->image().scaled(im->size().width(), im->size().height(), Qt::KeepAspectRatio));

                } else if (prim->type() == OROPicture::Picture) {
                    OROPicture * im = (OROPicture*) prim;

                    QImage image(im->size().toSize(), QImage::Format_RGB32);
                    QPainter painter(&image);
                    im->picture()->play(&painter);


                    m_cursor.insertImage(image);
                } else {
                    kDebug() << "unhandled primitive type";
                }
                m_cursor.movePosition(QTextCursor::NextCell);

            }
            if (s < document->sections() - 1) {
                table->appendRows(1);
            }

            renderedSections++;
        }
    }

    QTextDocumentWriter writer(context.destinationUrl.toLocalFile());
    return writer.write(m_document);
}