void BleWindowsCaptureSource::run()
{
    // TODO make could select screen
    // QGuiApplication::screens();
    while (!m_stop) {
        QElapsedTimer elapsedTimer;
        elapsedTimer.start();

        QScreen *screen = QGuiApplication::primaryScreen();

        if (screen) {
            QPixmap pixmap = screen->grabWindow(m_wid, m_x, m_y, m_width, m_height);
#if 1
            // TODO to draw cursor to image
            QRect desktopRect = QRect(QPoint(0, 0), screen->size());
            if (desktopRect.contains(QCursor::pos())) {
                drawCursor(&pixmap);
            }
#endif
            QImage image = pixmap.toImage();

            m_modifyMutex.lock();           // Start lock

            BleImage be;
            be.width = image.width();
            be.height = image.height();

            int imageSize = be.width * be.height * 3;
            be.data = new char[imageSize];

            IplImage *oriImage = cvCreateImageHeader(cvSize(image.width(), image.height()), IPL_DEPTH_8U, 4);
            cvSetData(oriImage, image.bits(), image.bytesPerLine());

            IplImage *dstImage = cvCreateImageHeader(cvSize(image.width(), image.height()), IPL_DEPTH_8U, 3);
            cvSetData(dstImage, be.data, be.width * 3);

            cvCvtColor(oriImage, dstImage, CV_BGRA2BGR);

            be.dataSize = imageSize;
            be.format = BleImage_Format_BGR24;

            m_image = be;

            cvReleaseImageHeader(&oriImage);
            cvReleaseImageHeader(&dstImage);

            m_modifyMutex.unlock();        // End unlock
        }

        int elapsedMs = elapsedTimer.elapsed();
        int needSleepMs = m_interval - elapsedMs;
        if (needSleepMs < 0) {
            needSleepMs = 0;
        }
        msleep(needSleepMs);
    }

    log_trace("BleWindowsCaptureSource exit normally.");
}
Ejemplo n.º 2
0
CLIFIntegralResult
clifGrayscaleIntegral(const IplImage* source,
                      CLIFEnvironmentData* data,
                      const cl_bool use_opencl)
{
    CLIFIntegralResult ret;
    
    if(!use_opencl) {
        IplImage* grayscale = cvCreateImage(cvSize(source->width, source->height), IPL_DEPTH_8U, 1);
        cvCvtColor(source, grayscale, CV_BGR2GRAY);
        ret.image = cvCreateMat(source->height + 1, source->width + 1, CV_32SC1);
        ret.square_image = cvCreateMat(source->height + 1, source->width + 1, CV_64FC1);
        cvIntegral(grayscale, ret.image, ret.square_image);
        cvReleaseImage(&grayscale);
        
        return ret;
    }
    
    cl_int error = CL_SUCCESS;
    
    // Init buffer
    error = clEnqueueWriteBuffer(data->environment.queue, data->bgr_to_gray_data.buffers[0], CL_FALSE, 0, source->widthStep * source->height, source->imageData, 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Run kernel
    error = clEnqueueNDRangeKernel(data->environment.queue, data->environment.kernels[0], 2, NULL, data->bgr_to_gray_data.global_size, data->bgr_to_gray_data.local_size, 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Set as arg the output of greyscale
    clSetKernelArg(data->environment.kernels[1], 0, sizeof(cl_mem), &(data->bgr_to_gray_data.buffers[1]));
    clCheckOrExit(error);
    
    // Run sum rows kernel
    error = clEnqueueNDRangeKernel(data->environment.queue, data->environment.kernels[1], 1, NULL, &(data->integral_image_data.global_size[0]), &(data->integral_image_data.local_size[0]), 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Run sum cols kernel
    error = clEnqueueNDRangeKernel(data->environment.queue, data->environment.kernels[2], 1, NULL, &(data->integral_image_data.global_size[1]), &(data->integral_image_data.local_size[1]), 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Read result
    cl_uint* result = (cl_uint*)clEnqueueMapBuffer(data->environment.queue, data->integral_image_data.buffers[3], CL_TRUE, CL_MAP_READ, 0, (source->width + 1) * (source->height + 1) * sizeof(cl_uint), 0, NULL, NULL, &error);
    clCheckOrExit(error);
    
    cl_ulong* square_result = (cl_ulong*)clEnqueueMapBuffer(data->environment.queue, data->integral_image_data.buffers[4], CL_TRUE, CL_MAP_READ, 0, (source->width + 1) * (source->height + 1) * sizeof(cl_ulong), 0, NULL, NULL, &error);
    clCheckOrExit(error);
    
    data->integral_image_data.ptr = result;
    
    // Return
    ret.image = cvCreateMatHeader(source->height + 1, source->width + 1, CV_32SC1);
    cvSetData(ret.image, result, source->width + 1);
    ret.square_image = cvCreateMatHeader(source->height + 1, source->width + 1, CV_64FC1);
    cvSetData(ret.square_image, square_result, source->width + 1);
    return ret;
}
void BleImageProcess::paintEvent(QPaintEvent *event)
{
    Q_UNUSED(event);

    QPainter p(this);

    p.setRenderHint(QPainter::SmoothPixmapTransform);

    // back ground
    p.fillRect(rect(), QBrush(QColor(48, 48, 48)));

    // element draw
    for (int i = 0; i < m_sources.size(); ++i) {
        const SourcePair & pair = m_sources.at(i);
        BleSourceAbstract *s = pair.source;

        // TODO image data may be used by other thread
        BleImage image = s->getImage();

        if (image.dataSize <= 0) continue;

        QImage qimage;
        if (image.format == BleImage_Format_BGR24) {
            IplImage *oriImage = cvCreateImageHeader(cvSize(image.width, image.height), IPL_DEPTH_8U, 3);
            cvSetData(oriImage, image.data, image.width*3);

            IplImage *dstImage = cvCreateImageHeader(cvSize(image.width, image.height), IPL_DEPTH_8U, 3);
            cvSetData(dstImage, image.data, image.width*3);

            cvCvtColor(oriImage, dstImage, CV_BGR2RGB);

            cvReleaseImageHeader(&oriImage);
            cvReleaseImageHeader(&dstImage);
        }
        qimage = QImage((uchar*)image.data, image.width, image.height, QImage::Format_RGB888);

        p.drawPixmap(pair.rect, QPixmap::fromImage(qimage));
        // p.drawImage(pair.rect, qimage);
    }

    if (m_activePair && m_activePair->rect.isValid()) {
        QPen pen(Qt::SolidLine);

        pen.setColor(Qt::white);
        pen.setWidth(2);
        pen.setStyle(Qt::DotLine);
        p.setPen(pen);
        p.drawRect(m_activePair->rect);

        QRect topLeftRect(m_activePair->rect.x(), m_activePair->rect.y(), 8, 8);
        p.fillRect(topLeftRect, QBrush(Qt::red));

        QRect bottomRightRect(m_activePair->rect.bottomRight().x(), m_activePair->rect.bottomRight().y(), -8, -8);
        p.fillRect(bottomRightRect, QBrush(Qt::red));
    }
}
Ejemplo n.º 4
0
void ofxImageROI::projectToTarget(ofPoint *dstPoints)
{
	CvPoint2D32f cvsrc[4];
	CvPoint2D32f cvdst[4];

	cvsrc[0].x = cornerPoints[0].x;
	cvsrc[0].y = cornerPoints[0].y;
	cvsrc[1].x = cornerPoints[1].x;
	cvsrc[1].y = cornerPoints[1].y;
	cvsrc[2].x = cornerPoints[2].x;
	cvsrc[2].y = cornerPoints[2].y;
	cvsrc[3].x = cornerPoints[3].x;
	cvsrc[3].y = cornerPoints[3].y;

	cvdst[0].x = dstPoints[0].x;
	cvdst[0].y = dstPoints[0].y;
	cvdst[1].x = dstPoints[1].x;
	cvdst[1].y = dstPoints[1].y;
	cvdst[2].x = dstPoints[2].x;
	cvdst[2].y = dstPoints[2].y;
	cvdst[3].x = dstPoints[3].x;
	cvdst[3].y = dstPoints[3].y;

	CvMat* src_mat = cvCreateMat(4, 2, CV_32FC1);
	CvMat* dst_mat = cvCreateMat(4, 2, CV_32FC1);

	cvSetData(src_mat, cvsrc, sizeof(CvPoint2D32f));
	cvSetData(dst_mat, cvdst, sizeof(CvPoint2D32f));

	CvMat * translate = cvCreateMat(3,3,CV_32FC1); // 領域確保
    cvFindHomography(src_mat, dst_mat, translate); // ホモグラフィ計算

    // 画像サイズが変わったとき
    if (this->bAllocated() == true && inputImg.bAllocated == false) {
        projectedImage.allocate(this->width, this->height, OF_IMAGE_COLOR);
        inputImg.allocate(this->width, this->height);
        outputImg.allocate(this->width, this->height);
    }

    inputImg.setFromPixels(this->getPixelsRef()); // 画像のロード
    cvWarpPerspective(inputImg.getCvImage(), outputImg.getCvImage(), translate, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(255)); // 透視投影変換

    //出力結果をofImageに変換
    projectedImage.setFromPixels(outputImg.getPixels(), outputImg.getWidth(), outputImg.getHeight(), OF_IMAGE_COLOR);

    cvReleaseMat(&translate);
    cvReleaseMat(&src_mat);
    cvReleaseMat(&dst_mat);
}
Ejemplo n.º 5
0
static CvSpillTree*
icvCreateSpillTree( const CvMat* raw_data,
		    const int naive,
		    const double rho,
		    const double tau )
{
  int n = raw_data->rows;
  int d = raw_data->cols;

  CvSpillTree* tr = (CvSpillTree*)cvAlloc( sizeof(CvSpillTree) );
  tr->root = (CvSpillTreeNode*)cvAlloc( sizeof(CvSpillTreeNode) );
  memset(tr->root, 0, sizeof(CvSpillTreeNode));
  tr->refmat = (CvMat**)cvAlloc( sizeof(CvMat*)*n );
  tr->total = n;
  tr->naive = naive;
  tr->rho = rho;
  tr->tau = tau;
  tr->type = raw_data->type;

  // tie a link-list to the root node
  tr->root->lc = (CvSpillTreeNode*)cvAlloc( sizeof(CvSpillTreeNode) );
  memset(tr->root->lc, 0, sizeof(CvSpillTreeNode));
  tr->root->lc->center = cvCreateMatHeader( 1, d, tr->type );
  cvSetData( tr->root->lc->center, _dispatch_mat_ptr(raw_data, 0), raw_data->step );
  tr->refmat[0] = tr->root->lc->center;
  tr->root->lc->lc = NULL;
  tr->root->lc->leaf = true;
  tr->root->lc->i = 0;
  CvSpillTreeNode* node = tr->root->lc;
  for ( int i = 1; i < n; i++ )
    {
      CvSpillTreeNode* newnode = (CvSpillTreeNode*)cvAlloc( sizeof(CvSpillTreeNode) );
      memset(newnode, 0, sizeof(CvSpillTreeNode));
      newnode->center = cvCreateMatHeader( 1, d, tr->type );
      cvSetData( newnode->center, _dispatch_mat_ptr(raw_data, i*d), raw_data->step );
      tr->refmat[i] = newnode->center;
      newnode->lc = node;
      newnode->i = i;
      newnode->leaf = true;
      newnode->rc = NULL;
      node->rc = newnode;
      node = newnode;
    }
  tr->root->rc = node;
  tr->root->cc = n;
  icvDFSInitSpillTreeNode( tr, d, tr->root );
  return tr;
}
Ejemplo n.º 6
0
void showInputLayer2DImage(LayerStack* stack){
	//printf("show image\n");
	int scale = 10;
	CvMat* image;
	int width = sqrt(stack->layerSizes[0]);
	image =cvCreateMat(width*scale,width*scale,CV_8UC1);
	//upscaled =cvCreateMat(width*scale,width*scale,CV_8UC1);
	unsigned char* data = malloc((width*width*scale*scale)*sizeof(unsigned char));
	assert(data!=NULL);
	for(int i=0;i<width*scale;i++){
		for(int j=0;j<width*scale;j++){
			//printf("to %d from %d\n",i*width*scale+j,(i/scale)*width+j/scale);
			data[i*width*scale+j] = 255-(stack->layers[0][(i/scale)*width+j/scale]*255);
			//printf("neuron: %.3f, pixel: %d \n",stack->stack[0]->neurons[i],data[i]);
		}
	}
	cvSetData(image,data,width*scale);

	

	
	cvNamedWindow("NeuroOutput",CV_WINDOW_AUTOSIZE);
	cvShowImage("NeuroOutput",image);
	cvWaitKey(0);
	
	cvReleaseMat(&image);
	free(data);
	//cvReleaseMat(&upscaled);
}
Ejemplo n.º 7
0
IplImage* CvCaptureCAM_Aravis::retrieveFrame(int)
{
    if(framebuffer) {
        int depth = 0, channels = 0;
        switch(pixelFormat) {
            case ARV_PIXEL_FORMAT_MONO_8:
                depth = IPL_DEPTH_8U;
                channels = 1;
                break;
            case ARV_PIXEL_FORMAT_MONO_12:
                depth = IPL_DEPTH_16U;
                channels = 1;
                break;
        }
        if(depth && channels) {
            IplImage src;
            cvInitImageHeader( &src, cvSize( width, height ), depth, channels, IPL_ORIGIN_TL, 4 );

            cvSetData( &src, framebuffer, src.widthStep );
            if( !frame ||
                 frame->width != src.width ||
                 frame->height != src.height ||
                 frame->depth != src.depth ||
                 frame->nChannels != src.nChannels) {

                cvReleaseImage( &frame );
                frame = cvCreateImage( cvGetSize(&src), src.depth, channels );
            }
            cvCopy(&src, frame);

            return frame;
        }
    }
    return NULL;
}
Ejemplo n.º 8
0
void ICVConsumer::consume (IConsumer::info consume_info,
	ssi_size_t stream_in_num,
	ssi_stream_t stream_in[]) {

	cvSetData (_image_in, stream_in[0].ptr, _stride_in);
	consume (stream_in[0].sr, _image_in);
}
Ejemplo n.º 9
0
// initialize CvMat header, allocated by the user
CV_IMPL CvMat*
cvInitMatHeader( CvMat* arr, int height, int width,
                 int type, void* data, int step )
{
    CV_FUNCNAME( "cvInitMatHeader" );
    
    __BEGIN__;

    if( !arr )
        CV_ERROR_FROM_CODE( CV_StsNullPtr );

    if( (unsigned)(CV_ARR_CN(type) - 1) >= CV_CN_MAX )
        CV_ERROR_FROM_CODE( CV_BadDepth );

    if( (unsigned)CV_ARR_DEPTH(type) > CV_DEPTH_MAX )
        CV_ERROR_FROM_CODE( CV_BadNumChannels );

    if( height <= 0 || width <= 0 )
        CV_ERROR_FROM_CODE( CV_StsBadSize );
 
    arr->type = (type & CV_ARR_TYPE_MASK) | CV_ARR_MAGIC_VAL;
    arr->height = height;
    arr->width = width;

    CV_CALL( cvSetData( arr, data, step ));

    __END__;

    return arr;
}
Ejemplo n.º 10
0
IplImage* CvCaptureAVI_VFW::retrieveFrame(int)
{
    if( avistream && bmih )
    {
        bool isColor = bmih->biBitCount == 24;
        int nChannels = (isColor) ? 3 : 1;
        IplImage src;
        cvInitImageHeader( &src, cvSize( bmih->biWidth, bmih->biHeight ),
                           IPL_DEPTH_8U, nChannels, IPL_ORIGIN_BL, 4 );

        char* dataPtr = (char*)(bmih + 1);

        // Only account for the color map size if we are an 8-bit image and the color map is used
        if (!isColor)
        {
            static int RGBQUAD_SIZE_PER_BYTE = sizeof(RGBQUAD)/sizeof(BYTE);
            int offsetFromColormapToData = (int)bmih->biClrUsed*RGBQUAD_SIZE_PER_BYTE;
            dataPtr += offsetFromColormapToData;
        }

        cvSetData( &src, dataPtr, src.widthStep );

        if( !frame || frame->width != src.width || frame->height != src.height )
        {
            cvReleaseImage( &frame );
            frame = cvCreateImage( cvGetSize(&src), 8, nChannels );
        }

        cvFlip( &src, frame, 0 );
        return frame;
    }

    return 0;
}
Ejemplo n.º 11
0
/******************将图片缩小至640*512方便显示***********************/
void CMainFrame::Resize()
 {
	CvSize cvSize;
	cvSize.width = Width;
 	cvSize.height = Height;
 	
	//生成支持OPENCV的IPLIMAGE数据结构,并使用相机采集的图像数据初始化
	IplImage *iplImage = cvCreateImageHeader(cvSize,IPL_DEPTH_8U,3);
 	cvSetData(iplImage,m_pImageBuffer,Width*3);  
	
    CvSize my_cvSize;           //新图片尺寸
	my_cvSize.width = Width/2;
 	my_cvSize.height = Height/2;

	IplImage *iplgraytemp = cvCreateImage(my_cvSize,IPL_DEPTH_8U,3); //新图片矩阵

	cvResize(iplImage,iplgraytemp,CV_INTER_NN);
    	
	//从LPLIMAGE数据结构中提取图像数据
    memcpy(DisplayBuffer,(BYTE*)iplgraytemp->imageData,Height/2*Width/2*3);

 	//释放申请的图象空间
	cvReleaseImage(&iplgraytemp);
	

 }
Ejemplo n.º 12
0
static IplImage*
icvRetrieveImage( void* obj )
{
    IplImage* img = 0;

    if( CV_IS_IMAGE(obj) )
        img = (IplImage*)obj;
    else if( CV_IS_MAT(obj) )
    {
        CvMat* m = (CvMat*)obj;
        img = cvCreateImageHeader( cvSize(m->cols,m->rows),
                        CV_MAT_DEPTH(m->type), CV_MAT_CN(m->type) );
        cvSetData( img, m->data.ptr, m->step );
        img->imageDataOrigin = (char*)m->refcount;
        m->data.ptr = 0; m->step = 0;
        cvReleaseMat( &m );
    }
    else if( obj )
    {
        cvRelease( &obj );
        CV_Error( CV_StsUnsupportedFormat, "The object is neither an image, nor a matrix" );
    }

    return img;
}
Ejemplo n.º 13
0
PyObject *wrapped_People_detectAllFaces(PyObject *self, PyObject *args)
{
  wrapped_People_t *pp = (wrapped_People_t*)self;

  // IplImage *image, const char* haar_classifier_filename, double threshold, IplImage *disparity_image, CvStereoCamModel *cam_model, bool do_draw)
  IplImage *input;
  char *haar_filename;
  double threshold;
  PyObject *disparity_image, *cam_model;
  int do_draw;

  {
    char *imgdata;
    int imgdata_size, x, y;
    if (!PyArg_ParseTuple(args, "s#iisdOOi", &imgdata, &imgdata_size, &x, &y, &haar_filename, &threshold, &disparity_image, &cam_model, &do_draw))
      return NULL;
    input = cvCreateImageHeader(cvSize(x, y), IPL_DEPTH_8U, 1);
    cvSetData(input, imgdata, x);
  }

  vector<CvRect> faces_vector = pp->c.detectAllFaces(input, haar_filename, threshold, NULL, NULL, do_draw);
  PyObject *r = PyList_New(faces_vector.size());
  for (size_t i = 0; i < faces_vector.size(); i++)
    PyList_SetItem(r, i, Py_BuildValue("iiii",
                    faces_vector[i].x,
                    faces_vector[i].y,
                    faces_vector[i].width,
                    faces_vector[i].height));
  return r;
}
void ofxCvBrightnessContrast::setBrightnessAndContrast(ofxCvImage& img, float brightnessAmount, float contrastAmount){
	brightnessVal = MAX(-127, MIN(127, brightnessAmount));
	contrastVal = MAX(-127, MIN(127, contrastAmount));
	
	unsigned char data[ 256 ];
	CvMat * matrix;
	double delta, a, b;
	
	matrix = cvCreateMatHeader( 1, 256, CV_8UC1 );
    cvSetData( matrix, data, 0 );
	
	if ( contrastVal>0 ) {
        delta = (127.0f*contrastVal) / 128.0f;
        a = 255.0f / ( 255.0f-(delta*2.0f) );
        b = a * (brightnessVal-delta);
    }
    else {
		delta = (-128.0f*contrastVal) / 128.0f;
		a = ( 256.0f-(delta*2.0f) ) / 255.0f;
		b = ( a*brightnessVal )+delta;
    }
	
	for( int i=0; i<256; i++ ) {
		int value = cvRound( (a*i)+b );
		data[i]	= (unsigned char) min( max(0,value), 255 );
	}
	
    cvLUT( img.getCvImage(), img.getCvImage(), matrix );
	cvReleaseMat( &matrix );
	
}
Ejemplo n.º 15
0
void Filters::lowPass(VRFrame* frame, int size)
{
    IplImage* imgDst = 0;
    IplImage* imgAux = 0;
    IplImage* imgNew = 0;

    VRFrame* frameAux;

    Log::writeLog("%s :: param: frame[%x] size[%d]", __FUNCTION__, frame, size);

    //Ajuste do tamanho da matriz.
    if (size > 9)
        size = 9;

    int cols_i = size;
    int rows_i = size;
    int total_size = 0;
    CvMat *filter = 0;

    total_size=(int)pow((double)size,2);

    // Máscara para realizar o processo de convolução.
    ///double convMask[total_size];
    double * convMask = new double[total_size];

    // Cria uma imagem com os mesmos parâmetros da original.
    frameAux = new VRFrame(frame);

    imgDst = VRFrame::imgAlloc(frameAux);
    imgAux = VRFrame::imgAlloc(frameAux);
    imgNew = VRFrame::imgAlloc(frameAux);

    // Monta a máscara com o tamanho que foi passado como parâmetro.
    for (int i=0; i<total_size; i++)	
        convMask[i] = (double)1/(double)total_size;

    imgAux->imageData = frameAux->data->imageData;
    imgAux->widthStep = frameAux->data->width;

    imgDst->imageData = imgAux->imageData;
    imgDst->widthStep = imgAux->width;

    filter = cvCreateMatHeader(rows_i, cols_i, CV_64FC1);

    cvSetData(filter, convMask, cols_i*8);

    cvFilter2D(imgAux, imgDst, filter, cvPoint(-1,-1));

    VRFrame::imgCopy(imgDst, imgNew);

    frame->setImage(imgNew);

    // Desaloca os temporários
    VRFrame::imgDealloc(imgAux);
    VRFrame::imgDealloc(imgDst);

    delete[] convMask;
    delete frameAux;

}
Ejemplo n.º 16
0
 void VideoFrame::ImageView(IplImage* view) const {
   ASSURE_LOG(view) << "View not set.";
   ASSERT_LOG(width_step_ % 4 == 0) << "IplImages need to have width_step_ "
                                    << "divisable by 4.";
   cvInitImageHeader(view, cvSize(width_, height_), IPL_DEPTH_8U, channels_);
   cvSetData(view, data_, width_step_);
 }
Ejemplo n.º 17
0
//--------------------------------------------------------------
ofImage ofxContrast::setBrightness(ofImage& _img, float brightnessAmount){
    ofxCvColorImage cvimg;
    cvimg.allocate(_img.width, _img.height);
    cvimg.setFromPixels(_img.getPixels(), _img.width, _img.height);
    
	float brightnessVal = MAX(-127, MIN(127, brightnessAmount));
	
	unsigned char data[ 256 ];
	CvMat * matrix;
	
	matrix = cvCreateMatHeader( 1, 256, CV_8UC1 );
    cvSetData( matrix, data, 0 );
	
	for( int i=0; i<256; i++ ) {
		int value = cvRound( i+brightnessVal );
		data[i]	= (unsigned char) min( max(0,value), 255 );
	}
	
    cvLUT( cvimg.getCvImage(), cvimg.getCvImage(), matrix );
	cvReleaseMat( &matrix );
    
    ofImage ofimg;
    ofimg.allocate(_img.width, _img.height, OF_IMAGE_COLOR);
    ofimg.setFromPixels(cvimg.getPixels(), _img.width, _img.height, OF_IMAGE_COLOR);
	return ofimg;
}
Ejemplo n.º 18
0
// OpenCLIF computations
CLIFGrayscaleResult
clifGrayscale(const IplImage* source,
              CLIFEnvironmentData* data,
              const cl_bool use_opencl)
{
    CLIFGrayscaleResult ret;
    if(!use_opencl) {
        ret.image = cvCreateImage(cvSize(source->width, source->height), IPL_DEPTH_8U, 1);
        cvCvtColor(source, ret.image, CV_BGR2GRAY);
        return ret;
    }
    
    cl_int error = CL_SUCCESS;
    
    // Init buffer
    error = clEnqueueWriteBuffer(data->environment.queue, data->bgr_to_gray_data.buffers[0], CL_FALSE, 0, source->widthStep * source->height, source->imageData, 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Run kernel
    error = clEnqueueNDRangeKernel(data->environment.queue, data->environment.kernels[0], 2, NULL, data->bgr_to_gray_data.global_size, data->bgr_to_gray_data.local_size, 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Read result
    cl_uchar* result = (cl_uchar*)clEnqueueMapBuffer(data->environment.queue, data->bgr_to_gray_data.buffers[1], CL_TRUE, CL_MAP_READ, 0, source->width * source->height, 0, NULL, NULL, &error);
    clCheckOrExit(error);
    
    // Return
    ret.image = cvCreateImageHeader(cvSize(source->width, source->height), IPL_DEPTH_8U, 1);
    cvSetData(ret.image, result, source->width);
    return ret;
}
Ejemplo n.º 19
0
_IplImage cvIplImage(const cv::Mat& m)
{
    _IplImage self;
    CV_Assert( m.dims <= 2 );
    cvInitImageHeader(&self, cvSize(m.size()), cvIplDepth(m.flags), m.channels());
    cvSetData(&self, m.data, (int)m.step[0]);
    return self;
}
Ejemplo n.º 20
0
void rgb_cb(freenect_device *dev, freenect_pixel *rgb, uint32_t timestamp) 
{
	pthread_mutex_lock(&backbuf_mutex);
	got_frames++;
	cvSetData(rgbBack, rgb, 3*FREENECT_FRAME_W);
	pthread_cond_signal(&framesReady_cond);
	pthread_mutex_unlock(&backbuf_mutex);
}
Ejemplo n.º 21
0
C_RESULT output_gtk_stage_transform( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
  uint64_t timeCode;
  struct timeval lastFrameReceptionTime;

  gettimeofday(&lastFrameReceptionTime, NULL);

  if (!videoServer_isStarted()) return SUCCESS;

  videoServer_lockFrameBuffer();
 
  if(out->status == VP_API_STATUS_INIT)
  {
    out->numBuffers = 1;
    out->size = QVGA_WIDTH * QVGA_HEIGHT * 3;
    out->buffers = (int8_t **) vp_os_malloc (sizeof(int8_t *)+out->size*sizeof(int8_t));
    out->buffers[0] = (int8_t *)(out->buffers+1);
    outputImageBuffer = out->buffers[0];
    cvSetData(debugImage, outputImageBuffer, QVGA_WIDTH * 3);

    out->indexBuffer = 0;
    // out->lineSize not used
    out->status = VP_API_STATUS_PROCESSING;
  }

  if(out->status == VP_API_STATUS_PROCESSING && outputImageBuffer != NULL) {
	  /* Get a reference to the last decoded picture */
	  pixbuf_data      = (uint8_t*)in->buffers[0];

	  {
		  int i;
		  int8_t *outBuffer = outputImageBuffer;
		  for (i = 0; i < QVGA_WIDTH * QVGA_HEIGHT * 3; i += 3) {
			  outBuffer[i] = pixbuf_data[i + 2];
			  outBuffer[i + 1] = pixbuf_data[i + 1];
			  outBuffer[i + 2] = pixbuf_data[i];
		  }
	  }

	  timeCode = (uint64_t)lastFrameReceptionTime.tv_sec * 1e6 + (uint64_t)lastFrameReceptionTime.tv_usec;
	  videoServer_feedFrame_BGR24(timeCode, outputImageBuffer);

/*	  struct timeval currentTime;
	  gettimeofday(&currentTime, NULL);
	  double elapsed = (currentTime.tv_sec - lastFrameTime.tv_sec) + (currentTime.tv_usec - lastFrameTime.tv_usec) / 1e6;
	  lastFrameTime = currentTime;
	  PRINT("fps: %f\n", 1.0 / elapsed);
*/
#ifdef IMAGE_DEBUG
	  cvShowImage("DroneCamera", debugImage);
#endif

  }

  videoServer_unlockFrameBuffer();

  return (SUCCESS);
}
Ejemplo n.º 22
0
void showImage_1ch(UINT8T* p_1ch, const char* name)
{
	// 显示单通道数据

	cvSetData(image_1ch, p_1ch, C);
	cvNamedWindow(name, 0);
	cvShowImage(name, image_1ch);
	cvWaitKey(0);
}
Ejemplo n.º 23
0
CLIFIntegralResult
clifIntegral(const IplImage* source,
             CLIFEnvironmentData* data,
             const cl_bool use_opencl)
{
    CLIFIntegralResult ret;
    
    if(!use_opencl) {        
        ret.image = cvCreateMat(source->height + 1, source->width + 1, CV_32SC1);
        ret.square_image = cvCreateMat(source->height + 1, source->width + 1, CV_64FC1);
        cvIntegral(source, ret.image, ret.square_image);
        return ret;
    }
    
    cl_int error = CL_SUCCESS;
    
    // Init buffer
    error = clEnqueueWriteBuffer(data->environment.queue, data->integral_image_data.buffers[0], CL_FALSE, 0, source->width * source->height, source, 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Run sum rows kernel
    error = clEnqueueNDRangeKernel(data->environment.queue, data->environment.kernels[1], 1, NULL, &(data->integral_image_data.global_size[0]), &(data->integral_image_data.local_size[0]), 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Run sum cols kernel
    error = clEnqueueNDRangeKernel(data->environment.queue, data->environment.kernels[2], 1, NULL, &(data->integral_image_data.global_size[1]), &(data->integral_image_data.local_size[1]), 0, NULL, NULL);
    clCheckOrExit(error);
    
    // Read result
    cl_uint* result = (cl_uint*)clEnqueueMapBuffer(data->environment.queue, data->integral_image_data.buffers[3], CL_TRUE, CL_MAP_READ, 0, (source->width + 1) * (source->height + 1) * sizeof(cl_uint), 0, NULL, NULL, &error);
    clCheckOrExit(error);
    
    cl_ulong* square_result = (cl_ulong*)clEnqueueMapBuffer(data->environment.queue, data->integral_image_data.buffers[4], CL_TRUE, CL_MAP_READ, 0, (source->width + 1) * (source->height + 1) * sizeof(cl_ulong), 0, NULL, NULL, &error);
    clCheckOrExit(error);
    
    data->integral_image_data.ptr = result;
    
    // Return    
    ret.image = cvCreateMat(source->height + 1, source->width + 1, CV_32SC1);
    cvSetData(ret.image, result, (source->width + 1) * sizeof(cl_uint));
    ret.square_image = cvCreateMatHeader(source->height + 1, source->width + 1, CV_64FC1);
    cvSetData(ret.square_image, square_result, (source->width + 1) * sizeof(cl_ulong));
    return ret;
}
Ejemplo n.º 24
0
IplImage *freenect_sync_get_rgb_cv(int index)
{
	static IplImage *image = 0;
	static char *data = 0;
	if (!image) image = cvCreateImageHeader(cvSize(640,480), 8, 3);
	unsigned int timestamp;
	if (freenect_sync_get_video(&data, &timestamp, index, FREENECT_VIDEO_RGB))
	    return NULL;
	cvSetData(image, data, 640*3);
	return image;
}
Ejemplo n.º 25
0
  IplImage* host_image2d<V>::getIplImage() const
  {
    assert(begin_);
    //allocate the structure
    IplImage* frameIPL = cvCreateImageHeader(cvSize(ncols(),nrows()),
					     sizeof(typename V::vtype)*8,
					     V::size);
    //init the data structure
    cvSetData(frameIPL, (void*)begin(), pitch());
    return frameIPL;
  }
Ejemplo n.º 26
0
IplImage *freenect_sync_get_depth_cv(int index)
{
	static IplImage *image = 0;
	static char *data = 0;
	if (!image) image = cvCreateImageHeader(cvSize(640,480), 16, 1);
	unsigned int timestamp;
	if (freenect_sync_get_depth(&data, &timestamp, index, FREENECT_DEPTH_11BIT))
	    return NULL;
	cvSetData(image, data, 640*2);
	return image;
}
Ejemplo n.º 27
0
/**
 * Create IplImage from Comp Buf
 * @param cbuf
 * @return IplImage pointer
 */
IplImage* BOCV_IplImage_attach(CompBuf* cbuf)
{
    if(cbuf->x>0 && cbuf->y>0){
    	IplImage *img = cvCreateImageHeader(cvSize(cbuf->x,cbuf->y),IPL_DEPTH_32F,cbuf->type);
	cvSetData(img,cbuf->rect,cbuf->x * cbuf->type * sizeof(float)); // always 4 byte align.
        return img;
    }else{
        return NULL;
    }
	
}
Ejemplo n.º 28
0
unsigned int __stdcall RightCaptureThread(LPVOID lpParameter)
{
	BYTE *pRGB24Buff = new BYTE[IMGWIDTH * IMGHEIGHT * 3];//接收图像数据的缓冲区
	DeviceFrameInfo dfInfo;
	while (true)
	{
		RightDevice->SoftTriggerOnce();
		RightDevice->DeviceGetImageBufferEx2(pRGB24Buff, -1, &dfInfo);
		cvSetData(RightImg, pRGB24Buff, dfInfo.uiWidth * 3);
		ReleaseSemaphore(RightImgOver, 1, NULL);
	}
}
Ejemplo n.º 29
0
void adjustBrightnessContrast(IplImage *&src, int Brightness, int Contrast)
{
	unsigned char LookupTableData[256];
	CvMat *LookupTableMatrix;
	double Delta;
	double a, b;
	int y;

	IplImage *filterB = cvCreateImage(cvGetSize(src), (src)->depth, 1);
	IplImage *filterG = cvCreateImage(cvGetSize(src), (src)->depth, 1);
	IplImage *filterR = cvCreateImage(cvGetSize(src), (src)->depth, 1);
	cvSplit(src, filterB, filterG, filterR, 0);

	//Brightness/Contrast Formula
	if(Contrast > 0)
	{
		Delta = 127 * Contrast / 100;
		a=255 / (255 - Delta * 2);
		b = a * (Brightness - Delta);
	}
	else
	{
		Delta = -128 * Contrast / 100;
		a = (256 - Delta*2) / 255;
		b = a * Brightness + Delta;
	}

	for(int x = 0 ; x < 256 ; x++)
	{
		y=(int)(a * x + b);
		if(y < 0) y = 0; else if(y > 255) y = 255;

		LookupTableData[x]=(uchar)y;
	}

	LookupTableMatrix = cvCreateMatHeader(1, 256, CV_8UC1);
	cvSetData(LookupTableMatrix, LookupTableData, 0);

	cvLUT(filterB, filterB, LookupTableMatrix);
	cvLUT(filterG, filterG, LookupTableMatrix);
	cvLUT(filterR, filterR, LookupTableMatrix);

	IplImage *dst = cvCreateImage(cvGetSize(src), src->depth, src->nChannels);
	cvMerge(filterB, filterG, filterR, 0, dst);

	cvReleaseImage(&src);
	src = cvCloneImage(dst);
	cvReleaseImage(&dst);
	cvReleaseImage(&filterB);
	cvReleaseImage(&filterG);
	cvReleaseImage(&filterR);
	cvReleaseMat(&LookupTableMatrix);
}//end Brightness/Contrast
Ejemplo n.º 30
0
    virtual IplImage* retrieveFrame(int)
    {
        unsigned char* data = 0;
        int step=0, width=0, height=0, cn=0;

        if(!ffmpegCapture ||
           !icvRetrieveFrame_FFMPEG_p(ffmpegCapture,&data,&step,&width,&height,&cn))
           return 0;
        cvInitImageHeader(&frame, cvSize(width, height), 8, cn);
        cvSetData(&frame, data, step);
        return &frame;
    }