Beispiel #1
0
void MainWindow::on_actionLoad_Stereo_Calibration_triggered()
{
    QString fileName = QFileDialog::getOpenFileName(this, "Select file with calibration data", workingDir);
    if (!fileName.isNull())
    {
        if (depthMapBuilder.loadCalibrationParams(fileName.toStdString()))
        {
            cv::Size imgSize(this->currentSize.width(), this->currentSize.height());
            cv::Mat mapx, mapy;
            cv::Rect roi;

            depthMapBuilder.getLeftMapping(imgSize, mapx, mapy, roi);
            frameProcessor[0].setUndistortMappings(mapx, mapy, roi);

            depthMapBuilder.getRightMapping(imgSize, mapx, mapy, roi);
            frameProcessor[1].setUndistortMappings(mapx, mapy, roi);

            QMessageBox::information(this, tr("Stereo Calibration"), tr("Loaded succesfully!"), QMessageBox::Ok);
        }
        else
        {
            QMessageBox::warning(this, tr("Stereo Calibration"), tr("Load failed!"));
        }
    }
}
Beispiel #2
0
void KWTextImage::drawCustomItem( QPainter* p, int x, int y, int wpix, int hpix, int /*ascentpix*/, int cx, int cy, int cw, int ch, const QColorGroup& cg, bool selected, int /*offset*/,  bool drawingShadow)
{
    if ( drawingShadow )
        return;

    // (x,y) is the position of the inline item (in pixels)
    // (wpix,hpix) is the size of the inline item (in pixels)
    // (cx,cy,cw,ch) is the rectangle to be painted, in pixels too
    if ( m_image.isNull() ) {
        kdDebug() << "KWTextImage::draw null image!" << endl;
        p->fillRect( x, y, 50, 50, cg.dark() );
        return;
    }

    QSize imgSize( wpix, hpix );

    QRect rect( QPoint(x, y), imgSize );
    if ( !rect.intersects( QRect( cx, cy, cw, ch ) ) )
        return;

    QPixmap pixmap=m_image.generatePixmap( imgSize, true );
    //if ( placement() == PlaceInline )
        p->drawPixmap( x, y, pixmap );
    //else
    //    p->drawPixmap( cx, cy, pixmap, cx - x, cy - y, cw, ch );

    if ( selected && placement() == PlaceInline && p->device()->devType() != QInternal::Printer ) {
        p->fillRect( rect , QBrush( cg.highlight(), QBrush::Dense4Pattern) );
    }
}
Beispiel #3
0
/*! Add to current trasformation matrix a \b delta traslation.
*/
void ImageViewer::panQt(const QPoint &delta) {
  if (delta == QPoint()) return;

  // stop panning when the image is at the edge of window
  QPoint delta_(delta.x(), delta.y());

  TToonzImageP timg  = (TToonzImageP)m_image;
  TRasterImageP rimg = (TRasterImageP)m_image;
  if (timg || rimg) {
    bool isXPlus = delta.x() > 0;
    bool isYPlus = delta.y() > 0;

    TDimension imgSize((timg) ? timg->getSize() : rimg->getRaster()->getSize());
    int subSampling = (timg) ? timg->getSubsampling() : rimg->getSubsampling();

    TPointD cornerPos = TPointD(imgSize.lx * ((isXPlus) ? -1 : 1),
                                imgSize.ly * ((isYPlus) ? 1 : -1)) *
                        (0.5 / (double)subSampling);
    cornerPos = m_viewAff * cornerPos;

    if ((cornerPos.x > 0) == isXPlus) delta_.setX(0);
    if ((cornerPos.y < 0) == isYPlus) delta_.setY(0);
  }

  setViewAff(TTranslation(delta_.x(), -delta_.y()) * m_viewAff);

  update();
}
Beispiel #4
0
void ImageLens::doTrans(int& x, int& y) const
{
    size2d s = imgSize();
    int w = s.w;
    int h = s.h;

    const ImageTransform& trafo = gSession->params.imageTransform;
    if        (trafo.rotation==0 && !trafo.mirror) {
        ; // do nothing
    } else if (trafo.rotation==1 && !trafo.mirror) {
        qSwap(x, y);
        y = w - y - 1;
    } else if (trafo.rotation==2 && !trafo.mirror) {
        x = w - x - 1;
        y = h - y - 1;
    } else if (trafo.rotation==3 && !trafo.mirror) {
        qSwap(x, y);
        x = h - x - 1;
    } else if (trafo.rotation==0 &&  trafo.mirror) {
        x = w - x - 1;
    } else if (trafo.rotation==1 &&  trafo.mirror) {
        y = h - y - 1;
        qSwap(x, y);
        y = w - y - 1;
    } else if (trafo.rotation==2 &&  trafo.mirror) {
        y = h - y - 1;
    } else if (trafo.rotation==3 &&  trafo.mirror) {
        qSwap(x, y);
    }
}
Beispiel #5
0
/////////////////////////////////////////////////////////
// processImage
//
/////////////////////////////////////////////////////////
void pix_tIIRf :: processImage(imageStruct &image)
{
  int j;
  size_t imagesize = imgSize(&image);
  unsigned char *dest;

  if(!imgCompare(image, m_image)) {
    // LATER only reallocate if really needed
    deallocate();
    allocate(image);
    m_set=CLEAR;
  }

  switch(m_set) {
  case SET: set(&image); break;
  case CLEAR: set(); break;
  default: break;
  }
  m_set=NONE;

  dest=m_image.data;

  // do the filtering

  // feed-back
  // w[n] = x[n]*ff0 + w[n-1]*ff1 + ... + w[n-N]*ffN

  // w[n] = x[n]*ff0
  img2buf(&image, m_buffer[m_counter], m_fb[0]);
  j=m_fbnum;
  for(j=1; j<m_fbnum; j++) {
    // w[n] += w[n-J]*ffJ
    const unsigned int index=getIndex(m_counter, -j, m_bufnum-1);
    weightAdd(m_buffer[index],
	      m_buffer[m_counter],
	      m_fb[j],
	      imagesize);
  }

  // feed-forward
  //  y[n] = ff0*w[n] + ff1*w[n-1] + ... + ffM*w[n-M]
  weightSet(m_buffer[m_counter],
	    m_buffer[m_bufnum-1],
	    m_ff[0],
	    imagesize);

  for(j=1; j<m_ffnum; j++) {
    const unsigned int index=getIndex(m_counter, -j, m_bufnum-1);
    weightAdd(m_buffer[index],
	      m_buffer[m_bufnum-1],
	      m_ff[j],
	      imagesize);
  }

  buf2img(m_buffer[m_bufnum-1], &image);


  m_counter = getIndex(m_counter, 1, m_bufnum-1);
}
Beispiel #6
0
// retrieve the current buffer into the img (doing a conversion from float)
static void buf2img(t_float*fbuffer, imageStruct*img) {
  unsigned char*bbuffer=img->data;
  size_t size=imgSize(img);
  size_t i;

  for(i=0; i<size; i++) {
    *bbuffer++ = CLAMP(static_cast<int>((*fbuffer++)*255));
  }
}
Beispiel #7
0
// store the given image in the buffer (doing a conversion to float)
static void img2buf(imageStruct*img, t_float*fbuffer, const t_float factor=1.) {
  const t_float f=factor/255.;
  unsigned char*bbuffer=img->data;
  size_t size=imgSize(img);
  size_t i;

  for(i=0; i<size; i++) {
    *fbuffer++ = f * (*bbuffer++);
  }
}
Beispiel #8
0
int main(int argc, char *argv[])
{
    // Read command line files 
    
    // Init cuda
    CUcontext cuContext;
    initCuda(cuContext); 

    // Image buffer allocation
    unsigned int depth =4;
    unsigned int width=960;
    unsigned int height=1080;
    size_t bufferSize = sizeof(unsigned char)*width*height*depth;
    unsigned char *imgRight = (unsigned char*)malloc(bufferSize);
    unsigned char *imgLeft= (unsigned char*)malloc(bufferSize);

    // Read the list of test files
    std::vector<std::string> testsFiles;
    readTestsFiles(testsFiles, "./tests.txt");
   
    // Launch tests 
    for(int i=0; i<testsFiles.size(); i++)
    {
        std::stringstream testImage1;
        testImage1 << "./" << testsFiles[i] << "_1.dat";
        std::stringstream testImage2;
        testImage2 << "./" << testsFiles[i] << "_2.dat";
        
        readTestImage( testImage1.str(), imgRight, bufferSize);
        readTestImage( testImage2.str(), imgLeft, bufferSize);
        
        VertexBufferObject rightPoints;
        VertexBufferObject leftPoints;
        DescriptorData  rightDescriptors;
        DescriptorData  leftDescriptors;
        computeDescriptorsLane(imgRight, depth, width, height, rightPoints, rightDescriptors);
        computeDescriptorsLane(imgLeft, depth, width, height, leftPoints, leftDescriptors);
        
        UInt2 imgSize(1920,1080);

        vector<CvPoint2D32f> leftMatchedPts;
        vector<CvPoint2D32f> rightMatchedPts;
        leftMatchedPts.reserve(10000);
        rightMatchedPts.reserve(10000);
        computeMatching( leftDescriptors, rightDescriptors, leftMatchedPts, rightMatchedPts, imgSize);
    }
    
    // finalize cuda
    CUresult cerr = cuCtxDestroy(cuContext);
    checkError(cerr);
    return 0;    
}
Beispiel #9
0
const Range& ImageLens::rgeInten(bool fixed) const
{
    if (fixed)
        return gSession->activeClusters.rgeFixedInten.yield();
           // TODO restore from pre d9714895: (trans_, cut_);
    if (!rgeInten_.isValid()) {
        size2d sz = imgSize();
        for (int j=0; j<sz.h; ++j)
            for (int i=0; i<sz.w; ++i)
                rgeInten_.extendBy(double(imageInten(i,j)));
    }
    return rgeInten_;
}
Beispiel #10
0
void computeDescriptorsLane(void *pixels, int depth, int width, int height, VertexBufferObject &points, DescriptorData  &descriptors)
{
    unsigned char *srcBuffer;
    unsigned char *dstBuffer;
    cudaMalloc( (void**) &srcBuffer, sizeof(unsigned char)*depth*width*height);
    cudaMemcpy( srcBuffer, pixels, sizeof(unsigned char)*depth*width*height, cudaMemcpyHostToDevice );
    cudaMalloc((void**) &dstBuffer, sizeof(unsigned char)*depth*width*height*2);
    // convert to RGB
    cudaYCbYCrToY( (uchar4*)dstBuffer, (uchar4*)srcBuffer, width*2, height);
    std::cout << "Passed cudaYCbYCrToY" << std::endl;

    // convert to 1 plane float cuda
    //float *yBuffer;
    //cudaMalloc((void**)&yBuffer, sizeof(float)*width*2*height);
    
    UInt2 imgSize(1920,1080);
    CudaImageBuffer<float> m_satImage;
    allocBuffer(m_satImage, imgSize);
    
    cudaRGBAtoCuda((float*)m_satImage, (uchar4*)dstBuffer, width*2, height, width*2); 
    std::cout << "Passed cudaRGBAtoCuda" << std::endl;
    
    convertToIntegral(m_satImage);
    std::cout << "Passed convertToIntegral" << std::endl;
    
    CudaImageBuffer<float> m_hesImage;
    allocBuffer(m_hesImage, imgSize);
    HessianData     m_hessianData;
    m_hessianData.allocImages(imgSize);
    computeHessianDet( m_satImage, m_hesImage, m_hessianData );
    std::cout << "Passed computeHessianDet" << std::endl;

    computeNonMaxSuppression( m_hesImage, m_hessianData );
    std::cout << "Passed computeNonMaxSuppression" << std::endl;

    collectHessianPoints( m_hessianData, descriptors);
    std::cout << "Passed collectHessianPoint" << std::endl;
    
    computeDescriptors( m_satImage, descriptors);
    std::cout << "Passed computeDescriptors" << std::endl;

    //collectPoints( descriptors, points, imgSize );
    
    // free memory
    m_hessianData.freeImages();
    releaseBuffer(m_hesImage); 
    releaseBuffer(m_satImage);
    cudaFree(srcBuffer);
    cudaFree(dstBuffer);
}
Beispiel #11
0
// allocate ff+fb buffers that can hold "img" like images
void pix_tIIRf ::allocate(imageStruct&img) {
  deallocate();

  m_image.xsize=img.xsize;
  m_image.ysize=img.ysize;
  m_image.setCsizeByFormat(img.format);
  m_image.reallocate();
  m_image.setBlack();

  size_t size=imgSize(&img);
  int i;
  for(i=0; i<m_bufnum; i++) {
    m_buffer[i]=new t_float[size*sizeof(t_float)];
  }
}
void TextureBrowser::updateInfoConverted()
{
	if(NULL != curTexture && NULL != curDescriptor)
	{
		char tmp[1024];
		const char *formatStr = "Unknown";

		int datasize = 0;
		int filesize = 0;
		QSize imgSize(0, 0);

		if(curDescriptor->compression[curTextureView].format != DAVA::FORMAT_INVALID)
		{
			DAVA::FilePath compressedTexturePath = DAVA::GPUFamilyDescriptor::CreatePathnameForGPU(curDescriptor, curTextureView);
			filesize = QFileInfo(compressedTexturePath.GetAbsolutePathname().c_str()).size();
			formatStr = GlobalEnumMap<DAVA::PixelFormat>::Instance()->ToString(curDescriptor->compression[curTextureView].format);
			
			int w = curDescriptor->compression[curTextureView].compressToWidth;
			int h = curDescriptor->compression[curTextureView].compressToHeight;

			if(0 != w && 0 != h)
			{
				imgSize = QSize(w, h);
			}
			else
			{
				imgSize = QSize(curTexture->width, curTexture->height);
			}

			// get data size
			datasize = ImageTools::GetTexturePhysicalSize(curDescriptor, curTextureView);
		}

		sprintf(tmp, "Format\t: %s\nSize\t: %dx%d\nData size\t: %s\nFile size\t: %s", formatStr, imgSize.width(), imgSize.height(),
			SizeInBytesToString(datasize).c_str(),
			SizeInBytesToString(filesize).c_str());

		ui->labelConvertedFormat->setText(tmp);
	}
	else
	{
		ui->labelConvertedFormat->setText("");
	}
}
Beispiel #13
0
void System3d::setWebcamImage(sensor_msgs::Image msg)
{
    cv_bridge::CvImagePtr cv_ptr;
    cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    //webcamImageTmp = cv_ptr->image;
    Mat tmpImage, tmpImage2;
    transpose(cv_ptr->image, tmpImage2);
    flip(tmpImage2, tmpImage, 1);

    Size imgSize(8*IMG_HEIGHT, 8*IMG_WIDTH);

    //Rect area( imgSize.width, 0, imgSize.height);
    int rowStart = 50;
    int colStart = 0;
    Range row(rowStart, rowStart + imgSize.height);
    Range col(colStart, colStart + imgSize.width);

    webcamImageTmp = tmpImage(row, col);
}
void ImageViewer_ex2::adjustimageAffineSimilarity()
{
    Vector3f l(3,1);
    Vector3f m(3,1);
    Vector3f r1(3,1);
    Vector3f r2(3,1);
    MatrixXf A(2,3);

    l = pinmanager->getLine(0);
    m = pinmanager->getLine(1);

    r2 << l(0) * m(0), l(0) * m(1) + l(1) * m(0), l(1) * m(1);

    l = pinmanager->getLine(2);
    m = pinmanager->getLine(3);

    r1 << l(0) * m(0), l(0) * m(1) + l(1) * m(0), l(1) * m(1);

    A << r1.transpose(), r2.transpose();
    JacobiSVD<MatrixXf> SVD(A, ComputeFullV);
    VectorXf S = SVD.matrixV().col(SVD.matrixV().cols() - 1);

    //S /= S(2);
    S(2) = 1;

    MatrixXf kkt(2,2);
    kkt << S(0), S(1), S(1), S(2);

    LLT<MatrixXf> lltOfA(kkt);
    MatrixXf L = lltOfA.matrixU();

    H << L(0), L(1), 0, L(2), L(3), 0, 0, 0, 1;
    //std::cout << H << std::endl;
    H = H.inverse();

    QSize imgSize(this->width(), this->height());
    QVector<QPoint> areaRender;
    areaRender << QPoint(0,0) << QPoint(0, imgSize.height()) << QPoint(imgSize.width(), imgSize.height()) << QPoint(imgSize.width(), 0);
    showResult(imgSize, areaRender);
}
void ImageViewer_ex2::adjustImage(float w, float h)
{
    Vector3f hl(3,1);
    hl = pinmanager->getHorizonLine().transpose();
    hl = hl/ hl(2);

    MatrixXf temp(3,3);
    temp << 1, 0, 0, 0, 1, 0, hl(0), hl(1), hl(2);
    Hp = temp;

    H = Hp;
    Hi = H.inverse();

    QSize imgSize(this->width(), this->height());
    QVector<QPoint> areaRender;
    areaRender << QPoint(0,0) << QPoint(0, imgSize.height()) << QPoint(imgSize.width(), imgSize.height()) << QPoint(imgSize.width(), 0);
    showResult(imgSize, areaRender);
    imageBase = imageResult;
    pinmanager->setType(pinmanager->TYPE_FOUR);
    if(isDebug){
        prepareDebug();
    }
}
    void SliceRenderer2D::updateResult(DataContainer& data) {
        ImageRepresentationGL::ScopedRepresentation img(data, p_sourceImageID.getValue());

        if (img != 0) {
            if (img->getDimensionality() == 2) {
                cgt::vec3 imgSize(img->getSize());
             
                float renderTargetRatio = static_cast<float>(getEffectiveViewportSize().x) / static_cast<float>(getEffectiveViewportSize().y);

                cgt::vec2 topLeft_px(static_cast<float>(p_cropLeft.getValue()), static_cast<float>(p_cropTop.getValue()));
                cgt::vec2 bottomRight_px(static_cast<float>(imgSize.x - p_cropRight.getValue()), static_cast<float>(imgSize.y - p_cropBottom.getValue()));
                cgt::vec2 croppedSize = bottomRight_px - topLeft_px;

                float sliceRatio =
                    (static_cast<float>(croppedSize.x) * img.getImageData()->getMappingInformation().getVoxelSize().x)
                    / (static_cast<float>(croppedSize.y) * img.getImageData()->getMappingInformation().getVoxelSize().y);
       
                // configure model matrix so that slices are rendered with correct aspect posNormalized
                float ratioRatio = sliceRatio / renderTargetRatio;
                cgt::mat4 viewMatrix = (ratioRatio > 1) ? cgt::mat4::createScale(cgt::vec3(1.f, 1.f / ratioRatio, 1.f)) : cgt::mat4::createScale(cgt::vec3(ratioRatio, 1.f, 1.f));
                viewMatrix.t11 *= -1;

                // prepare OpenGL
                _shader->activate();
                cgt::TextureUnit inputUnit, tfUnit;
                img->bind(_shader, inputUnit);
                p_transferFunction.getTF()->bind(_shader, tfUnit);

                if (p_invertXAxis.getValue())
                    viewMatrix *= cgt::mat4::createScale(cgt::vec3(-1, 1, 1));

                if (p_invertYAxis.getValue())
                    viewMatrix *= cgt::mat4::createScale(cgt::vec3(1, -1, 1));


                cgt::vec2 topLeft = topLeft_px / imgSize.xy();
                cgt::vec2 bottomRight = bottomRight_px / imgSize.xy();

                _shader->setUniform("_viewMatrix", viewMatrix);
                _shader->setUniform("_topLeft", topLeft);
                _shader->setUniform("_bottomRight", bottomRight);

                // render slice
                FramebufferActivationGuard f*g(this);
                createAndAttachColorTexture();
                createAndAttachDepthTexture();
                glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
                QuadRdr.renderQuad();


                _shader->deactivate();
                cgt::TextureUnit::setZeroUnit();

                data.addData(p_targetImageID.getValue(), new RenderData(_fbo));
            }
            else {
                LERROR("Input image must have dimensionality of 2.");
            }
        }
        else {
            LDEBUG("No suitable input image found.");
        }
    }
Beispiel #17
0
void TextLayer::recreateTexture(VidgfxContext *gfx)
{
	if(!m_isTexDirty)
		return; // Don't waste any time if it hasn't changed
	m_isTexDirty = false;

	// Delete existing texture if one exists
	if(m_texture != NULL)
		vidgfx_context_destroy_tex(gfx, m_texture);
	m_texture = NULL;

	// Determine texture size. We need to keep in mind that the text in the
	// document might extend outside of the layer's bounds.
	m_document.setTextWidth(m_rect.width());
	QSize size(
		(int)ceilf(m_document.size().width()),
		(int)ceilf(m_document.size().height()));

	if(m_document.isEmpty() || size.isEmpty()) {
		// Nothing to display
		return;
	}

	// Create temporary canvas. We need to be careful here as text is rendered
	// differently on premultiplied vs non-premultiplied pixel formats. On a
	// premultiplied format text is rendered with subpixel rendering enabled
	// while on a non-premultiplied format it is not. As we don't want subpixel
	// rendering we use the standard ARGB32 format.
	QSize imgSize(
		size.width() + m_strokeSize * 2, size.height() + m_strokeSize * 2);
	QImage img(imgSize, QImage::Format_ARGB32);
	img.fill(Qt::transparent);
	QPainter p(&img);
	p.setRenderHint(QPainter::Antialiasing, true);

	// Render text
	//m_document.drawContents(&p);

	// Render stroke
	if(m_strokeSize > 0) {
#define STROKE_TECHNIQUE 0
#if STROKE_TECHNIQUE == 0
		// Technique 0: Use QTextDocument's built-in text outliner
		//quint64 timeStart = App->getUsecSinceExec();

		QTextDocument *outlineDoc = m_document.clone(this);

		QTextCharFormat format;
		QPen pen(m_strokeColor, (double)(m_strokeSize * 2));
		pen.setJoinStyle(Qt::RoundJoin);
		format.setTextOutline(pen);
		QTextCursor cursor(outlineDoc);
		cursor.select(QTextCursor::Document);
		cursor.mergeCharFormat(format);

		// Take into account the stroke offset
		p.translate(m_strokeSize, m_strokeSize);

		//quint64 timePath = App->getUsecSinceExec();
		outlineDoc->drawContents(&p);
		delete outlineDoc;

		//quint64 timeEnd = App->getUsecSinceExec();
		//appLog() << "Path time = " << (timePath - timeStart) << " usec";
		//appLog() << "Render time = " << (timeEnd - timePath) << " usec";
		//appLog() << "Full time = " << (timeEnd - timeStart) << " usec";
#elif STROKE_TECHNIQUE == 1
		// Technique 1: Create a text QPainterPath and stroke it
		quint64 timeStart = App->getUsecSinceExec();

		// Create the path for the text's stroke
		QPainterPath path;
		QTextBlock &block = m_document.firstBlock();
		int numBlocks = m_document.blockCount();
		for(int i = 0; i < numBlocks; i++) {
			QTextLayout *layout = block.layout();
			for(int j = 0; j < layout->lineCount(); j++) {
				QTextLine &line = layout->lineAt(j);
				const QString text = block.text().mid(
					line.textStart(), line.textLength());
				QPointF pos = layout->position() + line.position();
				pos.ry() += line.ascent();
				//appLog() << pos << ": " << text;
				path.addText(pos, block.charFormat().font(), text);
			}
			block = block.next();
		}

		quint64 timePath = App->getUsecSinceExec();
		path = path.simplified(); // Fixes gaps with large stroke sizes
		quint64 timeSimplify = App->getUsecSinceExec();

		// Render the path
		//p.strokePath(path, QPen(m_strokeColor, m_strokeSize));

		// Convert it to a stroke
		QPainterPathStroker stroker;
		stroker.setWidth(m_strokeSize);
		//stroker.setCurveThreshold(2.0);
		stroker.setJoinStyle(Qt::RoundJoin);
		path = stroker.createStroke(path);

		// Render the path
		p.fillPath(path, m_strokeColor);

		quint64 timeEnd = App->getUsecSinceExec();
		appLog() << "Path time = " << (timePath - timeStart) << " usec";
		appLog() << "Simplify time = " << (timeSimplify - timePath) << " usec";
		appLog() << "Render time = " << (timeEnd - timeSimplify) << " usec";
		appLog() << "Full time = " << (timeEnd - timeStart) << " usec";
#elif STROKE_TECHNIQUE == 2
		// Technique 2: Similar to technique 1 but do each block separately
		quint64 timeStart = App->getUsecSinceExec();
		quint64 timeTotalSimplify = 0;
		quint64 timeTotalRender = 0;

		// Create the path for the text's stroke
		QTextBlock &block = m_document.firstBlock();
		int numBlocks = m_document.blockCount();
		for(int i = 0; i < numBlocks; i++) {
			// Convert this block to a painter path
			QPainterPath path;
			QTextLayout *layout = block.layout();
			for(int j = 0; j < layout->lineCount(); j++) {
				QTextLine &line = layout->lineAt(j);
				const QString text = block.text().mid(
					line.textStart(), line.textLength());
				QPointF pos = layout->position() + line.position() +
					QPointF(m_strokeSize, m_strokeSize);
				pos.ry() += line.ascent();
				//appLog() << pos << ": " << text;
				path.addText(pos, block.charFormat().font(), text);
			}

			// Prevent gaps appearing at larger stroke sizes
			quint64 timeA = App->getUsecSinceExec();
			path = path.simplified();
			quint64 timeB = App->getUsecSinceExec();
			timeTotalSimplify += timeB - timeA;

			// Render the path
			QPen pen(m_strokeColor, m_strokeSize * 2);
			pen.setJoinStyle(Qt::RoundJoin);
			p.strokePath(path, pen);
			timeA = App->getUsecSinceExec();
			timeTotalRender += timeA - timeB;

			// Iterate
			block = block.next();
		}

		// Make the final draw take into account the stroke offset
		p.translate(m_strokeSize, m_strokeSize);

		quint64 timeEnd = App->getUsecSinceExec();
		appLog() << "Simplify time = " << timeTotalSimplify << " usec";
		appLog() << "Render time = " << timeTotalRender << " usec";
		appLog() << "Full time = " << (timeEnd - timeStart) << " usec";
#elif STROKE_TECHNIQUE == 3
		// Technique 3: Raster brute-force where for each destination pixel
		// we measure the distance to the closest opaque source pixel
		quint64 timeStart = App->getUsecSinceExec();

		// Get bounding region based on text line bounding rects
		QRegion region;
		QTextBlock &block = m_document.firstBlock();
		int numBlocks = m_document.blockCount();
		for(int i = 0; i < numBlocks; i++) {
			QTextLayout *layout = block.layout();
			for(int j = 0; j < layout->lineCount(); j++) {
				QTextLine &line = layout->lineAt(j);
				const QString text = block.text().mid(
					line.textStart(), line.textLength());
				QRect rect = line.naturalTextRect()
					.translated(layout->position()).toAlignedRect();
				if(rect.isEmpty())
					continue; // Don't add empty rectangles
				rect.adjust(0, 0, 1, 0); // QTextLine is incorrect?
				rect.adjust(
					-m_strokeSize, -m_strokeSize,
					m_strokeSize, m_strokeSize);
				//appLog() << rect;
				region += rect;
			}

			// Iterate
			block = block.next();
		}
		quint64 timeRegion = App->getUsecSinceExec();

#if 0
		// Debug bounding region
		QPainterPath regionPath;
		regionPath.addRegion(region);
		regionPath.setFillRule(Qt::WindingFill);
		p.fillPath(regionPath, QColor(255, 0, 0, 128));
#endif // 0

		// We cannot read and write to the same image at the same time so
		// create a second one. Note that this is not premultiplied.
		QImage imgOut(size, QImage::Format_ARGB32);
		imgOut.fill(Qt::transparent);

		// Do distance calculation. We assume that non-fully transparent
		// pixels are always next to a fully opaque one so if the closest
		// "covered" pixel is not fully opaque then we can use that pixel's
		// opacity to determine the distance to the shape's edge.
		for(int y = 0; y < img.height(); y++) {
			for(int x = 0; x < img.width(); x++) {
				if(!region.contains(QPoint(x, y)))
					continue;
				float dist = getDistance(img, x, y, m_strokeSize);

				// We fake antialiasing by blurring the edge by 1px
				float outEdge = (float)m_strokeSize;
				if(dist >= outEdge)
					continue; // Outside stroke completely
				float opacity = qMin(1.0f, outEdge - dist);
				QColor col = m_strokeColor;
				col.setAlphaF(col.alphaF() * opacity);

				// Blend the stroke so that it appears under the existing
				// pixel data
				QRgb origRgb = img.pixel(x, y);
				QColor origCol(origRgb);
				origCol.setAlpha(qAlpha(origRgb));
				col = blendColors(col, origCol, 1.0f);
				imgOut.setPixel(x, y, col.rgba());
			}
		}
		quint64 timeRender = App->getUsecSinceExec();

		// Swap image data
		p.end();
		img = imgOut;
		p.begin(&img);

		quint64 timeEnd = App->getUsecSinceExec();
		appLog() << "Region time = " << (timeRegion - timeStart) << " usec";
		appLog() << "Render time = " << (timeRender - timeRegion) << " usec";
		appLog() << "Swap time = " << (timeEnd - timeRender) << " usec";
		appLog() << "Full time = " << (timeEnd - timeStart) << " usec";
#endif // STROKE_TECHNIQUE
	}

	// Render text
	m_document.drawContents(&p);

	// Convert the image to a GPU texture
	m_texture = vidgfx_context_new_tex(gfx, img);

	// Preview texture for debugging
	//img.save(App->getDataDirectory().filePath("Preview.png"));
}
int main(int argc, char **argv) {

    /* command line arguments */
    BasicAppOptions appopt(argc, argv);

    if (!appopt.gotCalibStorageDir) {
        pcl::console::print_error(
            "No calibration storage directory provided. --calibstorage <path>\n");
        exit(1);
    }

    if (!appopt.gotRigConfigFile) {
        pcl::console::print_error(
            "No rig config provided. --rigconfig <file>\n");
        exit(1);
    }

    /* print usage info */
    printSimpleInfo("[Capture Pairs] \n", 
           "Focus windows 'capture' and hit [c] to capture a pair.\n[Esc] to quit.\n");

    /* the calibdation storage */
    CalibStorageContract calibStorage(appopt.calibStorageDir);

    /* rig config */
    RigConfig rigConfig;
    rigConfig.loadFromFile(appopt.rigConfigFile);

    /* camera capturing interfaces */
    CameraInterface::Ptr camIf = createCameraInterface(rigConfig);
    if (!camIf->checkConnection()) {
        pcl::console::print_error("Camera not connected!\n");
        exit(1);
    }

    /* cloud interface */
    OpenNiInterfaceRGB::Ptr oniIf(
            new OpenNiInterfaceRGB(rigConfig.rangefinderDeviceID));
    bool cloudConnected = oniIf->init();
    if (!cloudConnected) {
        pcl::console::print_error("Can't connect to cloud interface!\n");
        exit(1);
    }
    oniIf->waitForFirstFrame();
    CloudProvider<pcl::PointXYZRGBA>::Ptr cloudIf;
    cloudIf = oniIf;

    /* setup visualizer */
    CalibVisualizer visualizer;

    /* captcher windows */
    cv::namedWindow("capture", CV_WINDOW_NORMAL|CV_GUI_EXPANDED);

    /* the captured pair */
    cv::Mat img;
    pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud;

    /* resize captured images */
    cv::Size imgSize(rigConfig.cameraImageWidth,
                     rigConfig.cameraImageHeight);

    for (;;) {

        /* update and display cloud */
        cloud = cloudIf->getCloudCopy();
        visualizer.setMainCloud(cloud);

        visualizer.spinOnce();

        /* handle input */
        int key = cv::waitKey(2);
        if (key == KEY_ESC) {
            break;
        } else if (key == KEY_c) {
            /* capture photo and display */
            img = camIf->captureImage();
            cv::Mat displayImg;
            displayImg = img.clone();
            cv::imshow("capture", displayImg);

            /* scale image to camera resolution from rigconfig */
            cv::resize(img, img, imgSize);

            /* query again for storage */
            printSimpleInfo("[Capture]", 
                    " to store cloud/image hit [c] again, any other key to abort.\n");
            /* wait for another key */
            do { 
                key = cv::waitKey(1);
                visualizer.spinOnce();
            } while (key < 0);

            if (key == KEY_c) {
                calibStorage.addExtrinsicPairRGB(img, cloud);
            } else {
                printSimpleInfo("[Capture] ", "aborted.\n");
            }
        }

    }

}
Beispiel #19
0
 static void init_planar_mats(Datastore *store, std::vector<cv::Mat> *channels)
 {
   for(uint i=0;i<channels->size();i++)
     (*channels)[i].create(imgSize(store), BaseType2CvDepth(store->type()));
 }
Beispiel #20
0
void FilmstripFrames::paintEvent(QPaintEvent *evt) {
  QPainter p(this);

  // p.setRenderHint(QPainter::Antialiasing, true);

  QRect clipRect = evt->rect();

  p.fillRect(clipRect, Qt::black);
  // thumbnail rect, including offsets
  QRect iconImgRect = QRect(QPoint(fs_leftMargin + fs_iconMarginLR,
                                   fs_frameSpacing / 2 + fs_iconMarginTop),
                            m_iconSize);
  // frame size with margins
  QSize frameSize = m_iconSize + QSize(fs_iconMarginLR * 2,
                                       fs_iconMarginTop + fs_iconMarginBottom);
  //  .. and with offset
  QRect frameRect =
      QRect(QPoint(fs_leftMargin, fs_frameSpacing / 2), frameSize);

  int oneFrameHeight = frameSize.height() + fs_frameSpacing;

  // visible frame index range
  int i0 = y2index(clipRect.top());
  int i1 = y2index(clipRect.bottom());

  // fids, frameCount <- frames del livello
  std::vector<TFrameId> fids;
  TXshSimpleLevel *sl = getLevel();
  if (sl)
    sl->getFids(fids);
  else {
    for (int i = i0; i <= i1; i++) {
      // draw white rectangles if obtaining the level is failed
      QRect iconRect = frameRect.translated(QPoint(0, oneFrameHeight * i));
      p.setBrush(QColor(192, 192, 192));
      p.setPen(Qt::NoPen);
      p.drawRect(iconRect);
    }
    return;
  }

  //--- compute navigator rect ---

  QRect naviRect;
  ComboViewerPanel *inknPaintViewerPanel =
      TApp::instance()->getInknPaintViewerPanel();
  if (sl->getType() == TZP_XSHLEVEL && inknPaintViewerPanel) {
    // show navigator only if the inknpaint viewer is visible
    if (inknPaintViewerPanel->isVisible()) {
      SceneViewer *viewer = inknPaintViewerPanel->getSceneViewer();
      // imgSize: image's pixel size
      QSize imgSize(sl->getProperties()->getImageRes().lx,
                    sl->getProperties()->getImageRes().ly);
      // Viewer affine
      TAffine viewerAff =
          inknPaintViewerPanel->getSceneViewer()->getViewMatrix();
      // pixel size which will be displayed with 100% scale in Viewer Stage
      TFrameId currentId = TApp::instance()->getCurrentFrame()->getFid();
      double imgPixelWidth =
          (double)(imgSize.width()) / sl->getDpi(currentId).x * Stage::inch;
      double imgPixelHeight =
          (double)(imgSize.height()) / sl->getDpi(currentId).y * Stage::inch;

      // get the image's corner positions in viewer matrix (with current zoom
      // scale)
      TPointD imgTopRight =
          viewerAff * TPointD(imgPixelWidth / 2.0f, imgPixelHeight / 2.0f);
      TPointD imgBottomLeft =
          viewerAff * TPointD(-imgPixelWidth / 2.0f, -imgPixelHeight / 2.0f);

      // pixel size in viewer matrix ( with current zoom scale )
      QSizeF imgSizeInViewer(imgTopRight.x - imgBottomLeft.x,
                             imgTopRight.y - imgBottomLeft.y);

      // ratio of the Viewer frame's position and size
      QRectF naviRatio(
          (-(float)viewer->width() * 0.5f - (float)imgBottomLeft.x) /
              imgSizeInViewer.width(),
          1.0f -
              ((float)viewer->height() * 0.5f - (float)imgBottomLeft.y) /
                  imgSizeInViewer.height(),
          (float)viewer->width() / imgSizeInViewer.width(),
          (float)viewer->height() / imgSizeInViewer.height());

      naviRect = QRect(iconImgRect.left() +
                           (int)(naviRatio.left() * (float)iconImgRect.width()),
                       iconImgRect.top() +
                           (int)(naviRatio.top() * (float)iconImgRect.height()),
                       (int)((float)iconImgRect.width() * naviRatio.width()),
                       (int)((float)iconImgRect.height() * naviRatio.height()));
      // for drag move
      m_naviRectPos = naviRect.center();

      naviRect = naviRect.intersected(frameRect);

      m_icon2ViewerRatio.setX(imgSizeInViewer.width() /
                              (float)iconImgRect.width());
      m_icon2ViewerRatio.setY(imgSizeInViewer.height() /
                              (float)iconImgRect.height());
    }
  }

  //--- compute navigator rect end ---

  int frameCount = (int)fids.size();

  std::set<TFrameId> editableFrameRange;

  if (sl) editableFrameRange = sl->getEditableRange();

  bool isReadOnly    = false;
  if (sl) isReadOnly = sl->isReadOnly();

  int i;
  int iconWidth   = m_iconSize.width();
  int x0          = m_frameLabelWidth;
  int x1          = x0 + iconWidth;
  int frameHeight = m_iconSize.height();

  // linee orizzontali che separano i frames
  p.setPen(getLightLineColor());
  for (i = i0; i <= i1; i++) {
    int y = index2y(i) + frameHeight;
    p.drawLine(0, y, x1, y);
  }

  TFilmstripSelection::InbetweenRange range = m_selection->getInbetweenRange();

  // draw for each frames
  for (i = i0; i <= i1; i++) {
    QRect tmp_iconImgRect =
        iconImgRect.translated(QPoint(0, oneFrameHeight * i));
    QRect tmp_frameRect = frameRect.translated(QPoint(0, oneFrameHeight * i));

    bool isCurrentFrame =
        (i == sl->fid2index(TApp::instance()->getCurrentFrame()->getFid()));
    bool isSelected =
        (0 <= i && i < frameCount && m_selection->isSelected(fids[i]));

    if (0 <= i && i < frameCount) {
      TFrameId fid = fids[i];

      // normal or inbetween (for vector levels)
      int flags = (sl->getType() == PLI_XSHLEVEL && range.first < fid &&
                   fid < range.second)
                      ? F_INBETWEEN_RANGE
                      : F_NORMAL;

      // draw icons
      drawFrameIcon(p, tmp_iconImgRect, i, fid, flags);

      p.setPen(Qt::NoPen);
      p.setBrush(Qt::NoBrush);
      p.drawRect(tmp_iconImgRect);

      // Frame number
      if (m_selection->isSelected(fids[i])) {
        if (TApp::instance()->getCurrentFrame()->isEditingLevel() &&
            isCurrentFrame)
          p.setPen(Qt::red);
        else
          p.setPen(Qt::white);
      } else
        p.setPen(QColor(192, 192, 192));

      p.setBrush(Qt::NoBrush);
      // for single frame
      QString text;
      if (fid.getNumber() == TFrameId::EMPTY_FRAME ||
          fid.getNumber() == TFrameId::NO_FRAME) {
        text = QString("Single Frame");
      }
      // for sequencial frame (with letter)
      else if (Preferences::instance()->isShowFrameNumberWithLettersEnabled()) {
        text = fidToFrameNumberWithLetter(fid.getNumber());
      }
      // for sequencial frame
      else {
        text = QString::number(fid.getNumber()).rightJustified(4, '0');
      }
      p.drawText(tmp_frameRect.adjusted(0, 0, -3, 2), text,
                 QTextOption(Qt::AlignRight | Qt::AlignBottom));
      p.setPen(Qt::NoPen);

      // Read-only frames (lock)
      if (0 <= i && i < frameCount) {
        if ((editableFrameRange.empty() && isReadOnly) ||
            (isReadOnly && (!editableFrameRange.empty() &&
                            editableFrameRange.count(fids[i]) == 0))) {
          static QPixmap lockPixmap(":Resources/forbidden.png");
          p.drawPixmap(tmp_frameRect.bottomLeft() + QPoint(3, -13), lockPixmap);
        }
      }
    }

    // navigator rect
    if (naviRect.isValid() && isCurrentFrame) {
      p.setPen(QPen(Qt::red, 1));
      p.drawRect(naviRect.translated(0, oneFrameHeight * i));
      p.setPen(Qt::NoPen);
    }

    // red frame for the current frame
    if (TApp::instance()->getCurrentFrame()->isEditingLevel() &&
        (isCurrentFrame || isSelected)) {
      QPen pen;
      pen.setColor(Qt::red);
      pen.setWidth(2);
      pen.setJoinStyle(Qt::RoundJoin);
      p.setPen(pen);

      p.drawRect(tmp_frameRect.adjusted(-1, -1, 2, 2));
      p.setPen(Qt::NoPen);
    }
  }

  // se sono in modalita' level edit faccio vedere la freccia che indica il
  // frame corrente
  if (TApp::instance()->getCurrentFrame()->isEditingLevel())
    m_frameHeadGadget->draw(p, QColor(Qt::white), QColor(Qt::black));
}
Beispiel #21
0
bool Utils::loadCvMat(const char* filename, cv::Mat& image)
{
    if (!filename || strlen(filename) == 0)
        return false;

    FILE* file = NULL;

#ifdef _WIN32
    errno_t err = fopen_s(&file, filename, "rb");
    if(!file || err) {
        std::cerr << "could not open file: " << filename << std::endl;
        return false;
    }
#elif __APPLE__ & __MACH__
    file = fopen(filename, "rb");
    if(!file || ferror(file)) {
        cerr << "could not open file: " << filename << endl;
        return false;
    }
#endif

    // read header
    int flags = 0;
    int headerSize = 0;
    cv::Size imgSize(0, 0);
    int dims = 0;
    cv::Size origSize(0, 0);
    cv::Point startPoint(0, 0);
    int size = 0;

    char buffer[4];
    fread(&buffer, sizeof(char), 3, file);
    buffer[3] = '\0';

    if (strcmp(buffer, "CVM") != 0) {
        std::cerr << "file does not have cvm format: " << filename << std::endl;
        fclose(file);
        return 0;
    }

    // read header
    fread(&headerSize, sizeof(int), 1, file);
    fread(&flags, sizeof(int), 1, file);
    fread(&imgSize.width, sizeof(int), 1, file);
    fread(&imgSize.height, sizeof(int), 1, file);
    fread(&dims, sizeof(int), 1, file);
    fread(&origSize.width, sizeof(int), 1, file);
    fread(&origSize.height, sizeof(int), 1, file);
    fread(&startPoint.x, sizeof(int), 1, file);
    fread(&startPoint.y, sizeof(int), 1, file);
    fread(&size, sizeof(int), 1, file);

    image = cv::Mat(origSize.height, origSize.width, CV_MAT_TYPE(flags));

    // set file pointer
    fseek(file, headerSize, SEEK_SET);

    // read actual data
    fread(image.data, sizeof(unsigned char), size, file);

    // NOTE: Can be speeded up: don't create new image with the specified roi
    // like here, but only set image.dataStart and image.dataEnd accordingly
    // to point to the image region specified by the roi. [9/15/2011 Norman]
    image = image(cv::Rect(startPoint.x, startPoint.y, imgSize.width, imgSize.height));

    fclose(file);

    return true;
}
Beispiel #22
0
void RenderThread::run()
{
	logInfo("RenderThread::run : starting thread");
	while (running)
	{
		running_mutex.lock();
		RenderRequest* job;
		if (preview_request != 0)
		{
			job = preview_request;
			preview_request = 0;
		}
		else if (image_request != 0)
		{
			job = image_request;
			image_request = 0;
		}
		else
		{
			rqueue_mutex.lock();
			if (request_queue.isEmpty())
			{
				// sleep only after checking for requests
				current_request = 0;
				rqueue_mutex.unlock();
				running_mutex.unlock();
				usleep(10000);
				continue;
			}
			else
			{
				job = request_queue.dequeue();
				logFine("RenderThread::run : dequeueing request %#x", (long)job);
				rqueue_mutex.unlock();
			}
		}
		render_loop_flag = true;
		current_request = job;

		// make sure there is something to calculate
		bool no_pos_xf = true;
		for (flam3_xform* xf = job->genome()->xform ;
			 xf < job->genome()->xform + job->genome()->num_xforms ; xf++)
			if (xf->density > 0.0)
			{
				no_pos_xf = false;
				break;
			}
		if (no_pos_xf)
		{
			logWarn(QString("RenderThread::run : no xform in request 0x%1").arg((long)job,0,16));
			running_mutex.unlock();
			continue;
		}

		logFiner(QString("RenderThread::run : rendering request 0x%1").arg((long)job,0,16));
		rtype = job->name();
		flame.time = job->time();
		flame.ngenomes = job->numGenomes();
		flam3_genome* genomes = new flam3_genome[flame.ngenomes]();
		flam3_genome* job_genome = job->genome();
		for (int n = 0 ; n < flame.ngenomes ; n++)
			flam3_copy(genomes + n, job_genome + n);
		flame.genomes = genomes;
		QSize imgSize(job->size());
		if (!imgSize.isEmpty())
		{
			for (int n = 0 ; n < flame.ngenomes ; n++)
			{
				flam3_genome* genome = genomes + n;
				// scale images, previews, etc. if necessary
				int width  = genome->width;
				genome->width  = imgSize.width();
				genome->height = imgSize.height();

				// "rescale" the image scale to maintain the camera
				// for smaller/larger image size
				genome->pixels_per_unit /= ((double)width) / genome->width;
			}
		}

		// Load image quality settings for Image, Preview, and File types
		switch (job->type())
		{
			case RenderRequest::File:
				rtype = QFileInfo(job->name()).fileName();

			case RenderRequest::Image:
			case RenderRequest::Preview:
			case RenderRequest::Queued:
			{
				const flam3_genome* g = job->imagePresets();
				if (g->nbatches > 0) // valid quality settings for nbatches > 0
					for (int n = 0 ; n < flame.ngenomes ; n++)
					{
						flam3_genome* genome = genomes + n;
						genome->sample_density =            g->sample_density;
						genome->spatial_filter_radius =     g->spatial_filter_radius;
						genome->spatial_oversample =        g->spatial_oversample;
						genome->nbatches =                  g->nbatches;
						genome->ntemporal_samples =         g->ntemporal_samples;
						genome->estimator =                 g->estimator;
						genome->estimator_curve =           g->estimator_curve;
						genome->estimator_minimum =         g->estimator_minimum;
					}
			}

			default:
				;
		}

		// add symmetry xforms before rendering
		for (int n = 0 ; n < flame.ngenomes ; n++)
		{
			flam3_genome* genome = genomes + n;
			if (genome->symmetry != 1)
				flam3_add_symmetry(genome, genome->symmetry);
		}

		int msize = channels * genomes->width * genomes->height;
		unsigned char* out = new unsigned char[msize];
		unsigned char* head = out;
		logFine("RenderThread::run : allocated %d bytes, rendering...", msize);
		init_status_cb();
		rendering = true;
		ptimer.start();
		int rv = flam3_render(&flame, out, 0, channels, alpha_trans, &_stats);
		millis = ptimer.elapsed();
		rendering = false;
		render_loop_flag = false;

		if (_stop_current_job) // if stopRendering() is called
		{
			logFine(QString("RenderThread::run : %1 rendering stopped").arg(rtype));
			delete[] head;
			for (int n = 0 ; n < flame.ngenomes ; n++)
				clear_cp(genomes + n, flam3_defaults_off);
			delete[] genomes;
			if (kill_all_jobs)
			{
				preview_request = 0;
				image_request = 0;
				rqueue_mutex.lock();
				request_queue.clear();
				rqueue_mutex.unlock();
				kill_all_jobs = false;
				emit flameRenderingKilled();
			}
			else
				if (job->type() == RenderRequest::Queued)
				{
					logFine("RenderThread::run : re-adding queued request");
					rqueue_mutex.lock();
					request_queue.prepend(job);
					rqueue_mutex.unlock();
				}

			_stop_current_job = false;
			running_mutex.unlock();
			continue;
		}

		QSize buf_size(genomes->width, genomes->height);
		if (img_format == RGB32)
		{
			if (buf_size != img_buf.size())
				img_buf = QImage(buf_size, QImage::Format_RGB32);
			if (rv == 0)
			{
				for (int h = 0 ; h < genomes->height ; h++)
					for (int w = 0 ; w < genomes->width ; w++, out += channels)
						img_buf.setPixel(QPoint(w, h), qRgb(out[0], out[1], out[2]));
			}
			else
				img_buf.fill(0);
		}
		else
		{
			if (buf_size != img_buf.size())
				img_buf = QImage(buf_size, QImage::Format_ARGB32);
			if (rv == 0)
			{
				for (int h = 0 ; h < genomes->height ; h++)
					for (int w = 0 ; w < genomes->width ; w++, out += channels)
						img_buf.setPixel(QPoint(w, h), qRgba(out[0], out[1], out[2], out[3]));
			}
			else
				img_buf.fill(0);
		}
		delete[] head;
		for (int n = 0 ; n < flame.ngenomes ; n++)
			clear_cp(genomes + n, flam3_defaults_off);
		delete[] genomes;

		if (job->type() == RenderRequest::File)
			img_buf.save(job->name(), "png", 100);

		job->setImage(img_buf);
		job->setFinished(true);

		// look for a free event
		RenderEvent* event = 0;
		foreach (RenderEvent* e, event_list)
			if (e->accepted())
			{
				e->accept(false);
				event = e;
				break;
			}

		if (!event)
		{
			logFinest(QString("RenderThread::run : adding event"));
			event = new RenderEvent();
			event->accept(false);
			event_list.append(event);
		}
		logFiner(QString("RenderThread::run : event list size %1")
				.arg(event_list.size()));

		event->setRequest(job);
		emit flameRendered(event);
		logFiner(QString("RenderThread::run : finished"));
		running_mutex.unlock();
	}

	logInfo("RenderThread::run : thread exiting");
}
/************************w is variable, and Forward map*/
Mat corrector::latitudeCorrection5(Mat imgOrg, Point2i center, int radius, double w_longtitude, double w_latitude, distMapMode distMap, double theta_left, double phi_up, double camerFieldAngle, camMode camProjMode)
{
	if (!(camerFieldAngle > 0 && camerFieldAngle <= PI))
	{
		cout << "The parameter \"camerFieldAngle\" must be in the interval (0,PI]." << endl;
		return Mat();
	}
	double rateOfWindow = 0.9;

	//int width = imgOrg.size().width*rateOfWindow;
	//int height = width;

	//int width = max(imgOrg.cols, imgOrg.rows);
	int width = 512;
	int height = width;
	//int height = imgOrg.rows;


	Size imgSize(width, height);
	int center_x = imgSize.width / 2;
	int center_y = imgSize.height / 2;

	Mat retImg(imgSize, CV_8UC3, Scalar(0, 0, 0));

	double dx = camerFieldAngle / imgSize.width;
	double dy = camerFieldAngle / imgSize.height;

	//coordinate for latitude map
	double latitude;
	double longitude;

	//unity sphere coordinate 
	double x, y, z, r;

	//parameter cooradinate of sphere coordinate
	double Theta_sphere;
	double Phi_sphere;

	//polar cooradinate for fish-eye Image
	double p;
	double theta;

	//cartesian coordinate 
	double x_cart, y_cart;

	//Image cooradinate of imgOrg
	double u, v;
	Point pt, pt1, pt2, pt3, pt4;

	//Image cooradinate of imgRet
	int u_longtitude, v_latitude;
	Rect imgArea(0, 0, imgOrg.cols, imgOrg.rows);

	//offset of imgRet Origin
	double longitude_offset, latitude_offset;
	longitude_offset = (PI - camerFieldAngle) / 2;
	latitude_offset = (PI - camerFieldAngle) / 2;

	double foval = 0.0;//焦距


	cv::Mat_<Vec3b> _retImg = retImg;
	cv::Mat_<Vec3b> _imgOrg = imgOrg;

	int left, top;
	left = center.x - radius;
	top = center.y - radius;
	for (int j = top; j < top + 2 * radius; j++)
	{
		for (int i = left; i < left + 2 * radius; i++)
		{
			if (pow(i - center.x, 2) + pow(j - center.y, 2) > pow(radius, 2))
				continue;
			//Origin image cooradinate in pixel
			u = i;
			v = j;

			//Convert to cartiesian cooradinate in unity circle
			x_cart = (u - center.x);
			y_cart = -(v - center.y);

			//convert to polar axes
			theta = cvFastArctan(y_cart, x_cart)*PI / 180;
			p = sqrt(pow(x_cart, 2) + pow(y_cart, 2));

			//convert to sphere surface parameter cooradinate


			Theta_sphere = p*(camerFieldAngle / 2) / radius;
			Phi_sphere = theta;

			//convert to sphere surface 3D cooradinate
			x = sin(Theta_sphere)*cos(Phi_sphere);
			y = sin(Theta_sphere)*sin(Phi_sphere);
			z = cos(Theta_sphere);

			//convert to latitiude  cooradinate
			latitude = acos(y);
			longitude = cvFastArctan(z, -x)*PI / 180;

			//transform the latitude to pixel cooradinate
			double  limi_latitude = auxFunc(w_latitude, 0);
			double l = 0;
			if (latitude >= 0 && latitude < PI / 2)
			{
				l = limi_latitude - sin(w_latitude)*sqrt(cos(latitude)*cos(latitude) + (1 - sin(latitude))*(1 - sin(latitude))) / sin(PI - w_latitude - atan((1 - sin(latitude)) / abs(cos(latitude))));
			}
			else
			{
				l = limi_latitude + sin(w_latitude)*sqrt(cos(latitude)*cos(latitude) + (1 - sin(latitude))*(1 - sin(latitude))) / sin(PI - w_latitude - atan((1 - sin(latitude)) / abs(cos(latitude))));
			}
			u_longtitude = ((longitude - longitude_offset) / dx);
			// = (latitude - latitude_offset) / dy;
			v_latitude = l*imgSize.height / (2 * limi_latitude);

			if (u_longtitude < 0 || u_longtitude >= imgSize.height || v_latitude < 0 || v_latitude >= imgSize.width)
				continue;

			//perform the map from the origin image to the latitude map image
			_retImg.at<cv::Vec3b>(v_latitude, u_longtitude) = imgOrg.at<cv::Vec3b>(j, i);
		}
	}

	//imshow("org", _imgOrg);
	//imshow("ret", _retImg);
	//cv::waitKey();
#ifdef _DEBUG_
	cv::namedWindow("Corrected Image", CV_WINDOW_AUTOSIZE);
	imshow("Corrected Image", retImg);
	cv::waitKey();
#endif
	imwrite("ret.jpg", retImg);
	return retImg;
}
Beispiel #24
0
    cv::Mat Deformation::DeformByMovingLeastSquares(const cv::Mat& inputImg, 
            const std::vector<int>& originIndex, const std::vector<int>& targetIndex)
    {
        int imgW = inputImg.cols;
        int imgH = inputImg.rows;
        cv::Size imgSize(imgW, imgH);
        cv::Mat resImg(imgSize, CV_8UC3);
        int markNum = originIndex.size() / 2;
        std::vector<double> wList(markNum);
        std::vector<MagicMath::Vector2> pHatList(markNum);
        std::vector<MagicMath::Vector2> qHatList(markNum);
        MagicMath::Vector2 pStar, qStar;
        std::vector<MagicMath::Vector2> pList(markNum);
        for (int mid = 0; mid < markNum; mid++)
        {
            pList.at(mid) = MagicMath::Vector2(originIndex.at(mid * 2), originIndex.at(mid * 2 + 1));
        }
        std::vector<MagicMath::Vector2> qList(markNum);
        for (int mid = 0; mid < markNum; mid++)
        {
            qList.at(mid) = MagicMath::Vector2(targetIndex.at(mid * 2), targetIndex.at(mid * 2 + 1));
        }
        std::vector<std::vector<double> > aMatList(markNum);
        std::vector<bool> visitFlag(imgW * imgH, 0);

        for (int hid = 0; hid < imgH; hid++)
        {
            for (int wid = 0; wid < imgW; wid++)
            {
                MagicMath::Vector2 pos(wid, hid);
                //calculate w
                bool isMarkVertex = false;
                int markedIndex = -1;
                double wSum = 0;
                for (int mid = 0; mid < markNum; mid++)
                {
                    //double dTemp = (pos - pList.at(mid)).LengthSquared(); //variable
                    double dTemp = (pos - pList.at(mid)).Length();
                    //dTemp = pow(dTemp, 1.25);
                    if (dTemp < 1.0e-15)
                    {
                        isMarkVertex = true;
                        markedIndex = mid;
                        break;
                    }
                    dTemp = pow(dTemp, 1.25);
                    wList.at(mid) = 1.0 / dTemp;
                    wSum += wList.at(mid);
                }
                //
                if (isMarkVertex)
                {
                    const unsigned char* pPixel = inputImg.ptr(hid, wid);
                    int targetH = targetIndex.at(2 * markedIndex + 1);
                    int targetW = targetIndex.at(2 * markedIndex);
                    unsigned char* pResPixel = resImg.ptr(targetH, targetW);
                    pResPixel[0] = pPixel[0];
                    pResPixel[1] = pPixel[1];
                    pResPixel[2] = pPixel[2];
                    visitFlag.at(targetH * imgW + targetW) = 1;
                }
                else
                {
                    //Calculate pStar qStar
                    pStar = MagicMath::Vector2(0.0, 0.0);
                    qStar = MagicMath::Vector2(0.0, 0.0);
                    for (int mid = 0; mid < markNum; mid++)
                    {
                        pStar += (pList.at(mid) * wList.at(mid));
                        qStar += (qList.at(mid) * wList.at(mid));
                    }
                    pStar /= wSum;
                    qStar /= wSum;

                    //Calculate pHat qHat
                    for (int mid = 0; mid < markNum; mid++)
                    {
                        pHatList.at(mid) = pList.at(mid) - pStar;
                        qHatList.at(mid) = qList.at(mid) - qStar;
                    }
                    
                    //Calculate A
                    MagicMath::Vector2 col0 = pos - pStar;
                    MagicMath::Vector2 col1(col0[1], -col0[0]);
                    for (int mid = 0; mid < markNum; mid++)
                    {
                        std::vector<double> aMat(4);
                        MagicMath::Vector2 row1(pHatList.at(mid)[1], -pHatList.at(mid)[0]);
                        aMat.at(0) = pHatList.at(mid) * col0 * wList.at(mid);
                        aMat.at(1) = pHatList.at(mid) * col1 * wList.at(mid);
                        aMat.at(2) = row1 * col0 * wList.at(mid);
                        aMat.at(3) = row1 * col1 * wList.at(mid);
                        aMatList.at(mid) = aMat;
                    }

                    //Calculate fr(v)
                    MagicMath::Vector2 fVec(0, 0);
                    for (int mid = 0; mid < markNum; mid++)
                    {
                        fVec[0] += (qHatList.at(mid)[0] * aMatList.at(mid).at(0) + qHatList.at(mid)[1] * aMatList.at(mid).at(2));
                        fVec[1] += (qHatList.at(mid)[0] * aMatList.at(mid).at(1) + qHatList.at(mid)[1] * aMatList.at(mid).at(3));
                    }

                    //Calculate target position
                    fVec.Normalise();
                    MagicMath::Vector2 targetPos = fVec * ((pos - pStar).Length()) + qStar;
                    int targetW = targetPos[0];
                    int targetH = targetPos[1];
                    if (targetH >= 0 && targetH < imgH && targetW >= 0 && targetW < imgW)
                    {
                        const unsigned char* pPixel = inputImg.ptr(hid, wid);
                        unsigned char* pResPixel = resImg.ptr(targetH, targetW);
                        pResPixel[0] = pPixel[0];
                        pResPixel[1] = pPixel[1];
                        pResPixel[2] = pPixel[2];
                        visitFlag.at(targetH * imgW + targetW) = 1;
                    }
                }
            }
        }

        std::vector<int> unVisitVecH;
        std::vector<int> unVisitVecW;
        for (int hid = 0; hid < imgH; hid++)
        {
            int baseIndex = hid * imgW;
            for (int wid = 0; wid < imgW; wid++)
            {
                if (!visitFlag.at(baseIndex + wid))
                {
                    unVisitVecH.push_back(hid);
                    unVisitVecW.push_back(wid);
                }
            }
        }
        int minAcceptSize = 4;
        int fillTime = 1;
        while (unVisitVecH.size() > 0)
        {
            DebugLog << "unVisit number: " << unVisitVecH.size() << std::endl;
            std::vector<int> unVisitVecHCopy = unVisitVecH;
            std::vector<int> unVisitVecWCopy = unVisitVecW;
            unVisitVecH.clear();
            unVisitVecW.clear();
            int unVisitSize = unVisitVecHCopy.size();
            for (int uid = 0; uid < unVisitSize; uid++)
            {
                MagicMath::Vector3 avgColor(0, 0, 0);
                int hid = unVisitVecHCopy.at(uid);
                int wid = unVisitVecWCopy.at(uid);
                int avgSize = 0;
                if ((hid - 1) >= 0 && visitFlag.at((hid - 1) * imgW + wid))
                {
                    unsigned char* pPixel = resImg.ptr(hid - 1, wid);
                    avgColor[0] += pPixel[0];
                    avgColor[1] += pPixel[1];
                    avgColor[2] += pPixel[2];
                    avgSize++;
                }
                if ((hid + 1) < imgH && visitFlag.at((hid + 1) * imgW + wid))
                {
                    unsigned char* pPixel = resImg.ptr(hid + 1, wid);
                    avgColor[0] += pPixel[0];
                    avgColor[1] += pPixel[1];
                    avgColor[2] += pPixel[2];
                    avgSize++;
                }
                if ((wid - 1) >= 0 && visitFlag.at(hid * imgW + wid - 1))
                {
                    unsigned char* pPixel = resImg.ptr(hid, wid - 1);
                    avgColor[0] += pPixel[0];
                    avgColor[1] += pPixel[1];
                    avgColor[2] += pPixel[2];
                    avgSize++;
                }
                if ((wid + 1) < imgW && visitFlag.at(hid * imgW + wid + 1))
                {
                    unsigned char* pPixel = resImg.ptr(hid, wid + 1);
                    avgColor[0] += pPixel[0];
                    avgColor[1] += pPixel[1];
                    avgColor[2] += pPixel[2];
                    avgSize++;
                }
                if (avgSize >= minAcceptSize)
                {
                    visitFlag.at(hid * imgW + wid) = 1;
                    avgColor /= avgSize;
                    unsigned char* pFillPixel = resImg.ptr(hid, wid);
                    pFillPixel[0] = avgColor[0];
                    pFillPixel[1] = avgColor[1];
                    pFillPixel[2] = avgColor[2];
                }
                else
                {
                    unVisitVecH.push_back(hid);
                    unVisitVecW.push_back(wid);
                }
            }
            if (fillTime == 4)
            {
                minAcceptSize--;
            }
            else if (fillTime == 6)
            {
                minAcceptSize--;
            }
            else if (fillTime == 8)
            {
                minAcceptSize--;
            }
            fillTime++;
        }
        //fill hole
        /*for (int hid = 0; hid < imgH; hid++)
        {
            int baseIndex = hid * imgW;
            for (int wid = 0; wid < imgW; wid++)
            {
                if (!visitFlag.at(baseIndex + wid))
                {
                    double wSum = 0;
                    MagicMath::Vector3 avgColor(0, 0, 0);
                    for (int wRight = wid + 1; wRight < imgW; wRight++)
                    {
                        if (visitFlag.at(baseIndex + wRight))
                        {
                            double wTemp = 1.0 / (wRight - wid);
                            wSum += wTemp;
                            unsigned char* pPixel = resImg.ptr(hid, wRight);
                            avgColor[0] += wTemp * pPixel[0];
                            avgColor[1] += wTemp * pPixel[1];
                            avgColor[2] += wTemp * pPixel[2];
                            break;
                        }
                    }
                    for (int wLeft = wid - 1; wLeft >= 0; wLeft--)
                    {
                        if (visitFlag.at(baseIndex + wLeft))
                        {
                            double wTemp = 1.0 / (wid - wLeft);
                            wSum += wTemp;
                            unsigned char* pPixel = resImg.ptr(hid, wLeft);
                            avgColor[0] += wTemp * pPixel[0];
                            avgColor[1] += wTemp * pPixel[1];
                            avgColor[2] += wTemp * pPixel[2];
                            break;
                        }
                    }
                    for (int hUp = hid - 1; hUp >= 0; hUp--)
                    {
                        if (visitFlag.at(hUp * imgW + wid))
                        {
                            double wTemp = 1.0 / (hid - hUp);
                            unsigned char* pPixel = resImg.ptr(hUp, wid);
                            wSum += wTemp;
                            avgColor[0] += wTemp * pPixel[0];
                            avgColor[1] += wTemp * pPixel[1];
                            avgColor[2] += wTemp * pPixel[2];
                            break;
                        }
                    }
                    for (int hDown = hid + 1; hDown < imgH; hDown++)
                    {
                        if (visitFlag.at(hDown * imgW + wid))
                        {
                            double wTemp = 1.0 / (hDown - hid);
                            unsigned char* pPixel = resImg.ptr(hDown, wid);
                            wSum += wTemp;
                            avgColor[0] += wTemp * pPixel[0];
                            avgColor[1] += wTemp * pPixel[1];
                            avgColor[2] += wTemp * pPixel[2];
                            break;
                        }
                    }
                    if (wSum > 1.0e-15)
                    {
                        avgColor /= wSum;
                    }
                    unsigned char* pFillPixel = resImg.ptr(hid, wid);
                    pFillPixel[0] = avgColor[0];
                    pFillPixel[1] = avgColor[1];
                    pFillPixel[2] = avgColor[2];
                }
            }
        }*/
        return resImg;
    }
//longitude-latitude reverse or forward map correction method
Mat corrector::latitudeCorrection(Mat imgOrg, Point2i center, int radius, double camerFieldAngle, CorrectType type)
{
	if (!(camerFieldAngle > 0 && camerFieldAngle <= PI))
	{
		cout << "The parameter \"camerFieldAngle\" must be in the interval (0,PI]." << endl;
		return Mat();
	}
	double rateOfWindow = 0.9;
	int width = imgOrg.size().width*rateOfWindow;
	int height = width;
	Size imgSize(width, height);

	Mat retImg(imgSize, CV_8UC3, Scalar(0, 0, 0));

	double dx = camerFieldAngle / imgSize.width;
	double dy = dx;

	//coordinate for latitude map
	double latitude;
	double longitude;

	//unity sphere coordinate 
	double x, y, z, r;

	//parameter cooradinate of sphere coordinate
	double Theta_sphere;
	double Phi_sphere;

	//polar cooradinate for fish-eye Image
	double p;
	double theta;

	//cartesian coordinate 
	double x_cart, y_cart;

	//Image cooradinate of imgOrg
	int u, v;

	//Image cooradinate of imgRet
	int u_latitude, v_latitude;

	//offset of imgRet Origin
	double longitude_offset, latitude_offset;
	longitude_offset = (PI - camerFieldAngle) / 2;
	latitude_offset = (PI - camerFieldAngle) / 2;

	cv::Mat_<Vec3b> _retImg = retImg;
	cv::Mat_<Vec3b> _imgOrg = imgOrg;

	//according to the correct type to do the calibration
	switch (type)
	{
	case Forward:
		int left, top;
		left = center.x - radius;
		top = center.y - radius;
		for (int j = top; j < top + 2 * radius; j++)
		{
			for (int i = left; i < left + 2 * radius; i++)
			{
				if (pow(i - center.x, 2) + pow(j - center.y, 2) > pow(radius, 2))
					continue;
				//Origin image cooradinate in pixel
				u = i;
				v = j;

				double R = radius / sin(camerFieldAngle / 2);

				//Convert to cartiesian cooradinate in unity circle
				x_cart = (u - center.x) / R;
				y_cart = -(v - center.y) / R;

				//convert to polar axes
				theta = cvFastArctan(y_cart, x_cart)*PI / 180;
				p = sqrt(pow(x_cart, 2) + pow(y_cart, 2));

				//convert to sphere surface parameter cooradinate
				Theta_sphere = asin(p);
				Phi_sphere = theta;

				//convert to sphere surface 3D cooradinate
				x = sin(Theta_sphere)*cos(Phi_sphere);
				y = sin(Theta_sphere)*sin(Phi_sphere);
				z = cos(Theta_sphere);

				//convert to latitiude  cooradinate
				latitude = acos(y);
				longitude = cvFastArctan(z, -x)*PI / 180;

				//transform the latitude to pixel cooradinate

				u_latitude = ((longitude - longitude_offset) / dx);
				v_latitude = ((latitude - latitude_offset) / dy);

				if (u_latitude < 0 || u_latitude >= imgSize.height || v_latitude < 0 || v_latitude >= imgSize.width)
					continue;

				//perform the map from the origin image to the latitude map image
				_retImg(v_latitude, u_latitude)[0] = _imgOrg(j, i)[0];
				_retImg(v_latitude, u_latitude)[1] = _imgOrg(j, i)[1];
				_retImg(v_latitude, u_latitude)[2] = _imgOrg(j, i)[2];
			}
		}

		break;

	case Reverse:

		for (int j = 0; j < imgSize.height; j++)
		{

			latitude = latitude_offset + j*dy;
			for (int i = 0; i < imgSize.width; i++)
			{

				longitude = longitude_offset + i*dx;
				//Convert from latitude cooradinate to the sphere cooradinate
				x = -sin(latitude)*cos(longitude);
				y = cos(latitude);
				z = sin(latitude)*sin(longitude);

				//Convert from sphere cooradinate to the parameter sphere cooradinate
				Theta_sphere = acos(z);
				Phi_sphere = cvFastArctan(y, x);//return value in Angle
				Phi_sphere = Phi_sphere*PI / 180;//Convert from Angle to Radian


				//Convert from parameter sphere cooradinate to fish-eye polar cooradinate
				p = sin(Theta_sphere);
				theta = Phi_sphere;

				//Convert from fish-eye polar cooradinate to cartesian cooradinate
				x_cart = p*cos(theta);
				y_cart = p*sin(theta);

				//double R = radius / sin(camerFieldAngle / 2);
				double R = radius;
				//Convert from cartesian cooradinate to image cooradinate
				u = x_cart*R + center.x;
				v = -y_cart*R + center.y;

				//if (pow(u - center.x, 2) + pow(v - center.y, 2) > pow(radius, 2))
				//{
				//	_imgOrg(v, u)[0] = 255;
				//	_imgOrg(v, u)[1] = 255;
				//	_imgOrg(v, u)[2] = 255;
				//	continue;
				//}

				_retImg.at<Vec3b>(j, i) = _imgOrg.at<Vec3b>(v, u);
			}
		}

		break;
	default:
		cout << "The CorrectType is Wrong! It should be \"Forward\" or \"Reverse\"." << endl;
		return Mat();
	}

	//imwrite("C:\\Users\\Joker\\Desktop\\ret4.jpg", retImg);
	//imshow("org", _imgOrg);
	//imshow("ret", _retImg);
	//cv::waitKey();
#ifdef _DEBUG_
	cv::namedWindow("Corrected Image", CV_WINDOW_AUTOSIZE);
	imshow("Corrected Image", retImg);
	cv::waitKey();
#endif
	return retImg;
}
/*********************w is variable********************************/
Mat corrector::latitudeCorrection4(Mat imgOrg, Point2i center, int radius, double w_longtitude, double w_latitude, distMapMode distMap, double theta_left, double phi_up, double camerFieldAngle, camMode camProjMode)
{
	if (!(camerFieldAngle > 0 && camerFieldAngle <= PI))
	{
		cout << "The parameter \"camerFieldAngle\" must be in the interval (0,PI]." << endl;
		return Mat();
	}
	double rateOfWindow = 0.9;

	//int width = imgOrg.size().width*rateOfWindow;
	//int height = width;

	//int width = max(imgOrg.cols, imgOrg.rows);
	int width = 512;
	int height = width;
	//int height = imgOrg.rows;


	Size imgSize(width, height);
	int center_x = imgSize.width / 2;
	int center_y = imgSize.height / 2;

	Mat retImg(imgSize, CV_8UC3, Scalar(0, 0, 0));

	double dx = camerFieldAngle / imgSize.width;
	double dy = camerFieldAngle / imgSize.height;

	//coordinate for latitude map
	double latitude;
	double longitude;

	//unity sphere coordinate 
	double x, y, z, r;

	//parameter cooradinate of sphere coordinate
	double Theta_sphere;
	double Phi_sphere;

	//polar cooradinate for fish-eye Image
	double p;
	double theta;

	//cartesian coordinate 
	double x_cart, y_cart;

	//Image cooradinate of imgOrg
	double u, v;
	Point pt, pt1, pt2, pt3, pt4;

	//Image cooradinate of imgRet
	int u_latitude, v_latitude;
	Rect imgArea(0, 0, imgOrg.cols, imgOrg.rows);

	//offset of imgRet Origin
	double longitude_offset, latitude_offset;
	longitude_offset = (PI - camerFieldAngle) / 2;
	latitude_offset = (PI - camerFieldAngle) / 2;

	double foval = 0.0;//焦距


	cv::Mat_<Vec3b> _retImg = retImg;
	cv::Mat_<Vec3b> _imgOrg = imgOrg;

	//according to the camera type to do the calibration
	double  limi_latitude = 2 * auxFunc(w_latitude, 0);
	double  limi_longtitude = 2 * auxFunc(w_longtitude, 0);
	for (int j = 0; j < imgSize.height; j++)
	{

		for (int i = 0; i < imgSize.width; i++)
		{
			Point3f tmpPt(i - center_x, center_y - j, 600);//最后一个参数用来修改成像面的焦距
			double normPt = norm(tmpPt);

			switch (distMap)
			{
			case PERSPECTIVE:

				tmpPt.x /= normPt;
				tmpPt.y /= normPt;
				tmpPt.z /= normPt;

				x = tmpPt.x;
				y = tmpPt.y;
				z = tmpPt.z;

				break;
			case LATITUDE_LONGTITUDE:

				//latitude = latitude_offset + j*dy;

				latitude = getPhi1((double)j*limi_latitude / imgSize.height, w_latitude);
				//longitude = getPhi1((double)i * limi_longtitude / imgSize.width,w_longtitude);

				//latitude = latitude_offset + j*dy;
				longitude = longitude_offset + i*dx;
				//Convert from latitude cooradinate to the sphere cooradinate
				x = -sin(latitude)*cos(longitude);
				y = cos(latitude);
				z = sin(latitude)*sin(longitude);

				break;
			default:
				break;
			}

			if (distMap == PERSPECTIVE)
			{
				//double theta = PI/4;
				//double phi = -PI/2;
				cv::Mat curPt(cv::Point3f(x, y, z));
				std::vector<cv::Point3f> pts;

				//向东旋转地球
				//pts.push_back(cv::Point3f(cos(theta), 0, -sin(theta)));
				//pts.push_back(cv::Point3f(0, 1, 0));
				//pts.push_back(cv::Point3f(sin(theta), 0, cos(theta)));

				//向南旋转地球
				//pts.push_back(cv::Point3f(1, 0, 0));
				//pts.push_back(cv::Point3f(0, cos(phi), sin(phi)));
				//pts.push_back(cv::Point3f(0, -sin(phi), cos(phi)));

				//两个方向旋转
				pts.push_back(cv::Point3f(cos(theta_left), 0, sin(theta_left)));
				pts.push_back(cv::Point3f(sin(phi_up)*sin(theta_left), cos(phi_up), -sin(phi_up)*cos(theta_left)));
				pts.push_back(cv::Point3f(-cos(phi_up)*sin(theta_left), sin(phi_up), cos(phi_up)*cos(theta_left)));


				cv::Mat revert = cv::Mat(pts).reshape(1).t();

				cv::Mat changed(revert*curPt);

				cv::Mat_<double> changed_double;
				changed.convertTo(changed_double, CV_64F);

				x = changed_double.at<double>(0, 0);
				y = changed_double.at<double>(1, 0);
				z = changed_double.at<double>(2, 0);

				//std::cout << curPt << std::endl
				//	<<revert<<std::endl;
			}

			//Convert from unit sphere cooradinate to the parameter sphere cooradinate
			Theta_sphere = acos(z);
			Phi_sphere = cvFastArctan(y, x);//return value in Angle
			Phi_sphere = Phi_sphere*PI / 180;//Convert from Angle to Radian


			switch (camProjMode)
			{
			case STEREOGRAPHIC:
				foval = radius / (2 * tan(camerFieldAngle / 4));
				p = 2 * foval*tan(Theta_sphere / 2);
				break;
			case EQUIDISTANCE:
				foval = radius / (camerFieldAngle / 2);
				p = foval*Theta_sphere;
				break;
			case EQUISOLID:
				foval = radius / (2 * sin(camerFieldAngle / 4));
				p = 2 * foval*sin(Theta_sphere / 2);
				break;
			case ORTHOGONAL:
				foval = radius / sin(camerFieldAngle / 2);
				p = foval*sin(Theta_sphere);
				break;
			default:
				cout << "The camera mode hasn't been choose!" << endl;
			}
			//Convert from parameter sphere cooradinate to fish-eye polar cooradinate
			//p = sin(Theta_sphere);
			theta = Phi_sphere;

			//Convert from fish-eye polar cooradinate to cartesian cooradinate
			x_cart = p*cos(theta);
			y_cart = p*sin(theta);

			//double R = radius / sin(camerFieldAngle / 2);

			//Convert from cartesian cooradinate to image cooradinate
			u = x_cart + center.x;
			v = -y_cart + center.y;

			pt = Point(u, v);

			if (!pt.inside(imgArea))
			{
				continue;
			}
			else
			{
				_retImg.at<Vec3b>(j, i) = _imgOrg.at<Vec3b>(pt);
			}


		}
	}

	//imshow("org", _imgOrg);
	//imshow("ret", _retImg);
	//cv::waitKey();
#ifdef _DEBUG_
	cv::namedWindow("Corrected Image", CV_WINDOW_AUTOSIZE);
	imshow("Corrected Image", retImg);
	cv::waitKey();
#endif
	imwrite("ret.jpg", retImg);
	return retImg;
}