//------------------------------------------------------------------------------------- void ofCvColorImage::setFromPixels(unsigned char * _pixels, int w, int h){ cvSetImageData(cvImage, _pixels, w*3); int stride = w*3; for (int i = 0; i < h; i++){ memcpy(pixels + (i * stride), cvImage->imageData + (i * cvImage->widthStep), stride); } }
unsigned char* OpenCVImage::setData(unsigned char* data, bool ownership) { m_own = ownership; unsigned char* tmp = getData(); int channels = getNumChannels(); cvSetImageData(m_img, data, cvGetSize(m_img).width * channels); return tmp; }
void MyKinectListener::ColorReceived(Kinect::Kinect *K) { lock(); K->ParseColorBuffer(); cvSetImageData( _colorImg, K->mColorBuffer, 640*3 ); //memcpy( _colorImg->imageData, K->mColorBuffer, 640*480*3 ); //cvCvtColor( _colorImg, _colorImg, CV_RGB2BGR ); _colorImgFrame++; unlock(); }
// // Transform // Transform the sample 'in place' // HRESULT CKalmTrack::Transform(IMediaSample *pSample) { BYTE* pData; CvImage image; pSample->GetPointer(&pData); AM_MEDIA_TYPE* pType = &m_pInput->CurrentMediaType(); VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pType->pbFormat; // Get the image properties from the BITMAPINFOHEADER CvSize size = cvSize( pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight ); int stride = (size.width * 3 + 3) & -4; cvInitImageHeader( &image, size, IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4, 1 ); cvSetImageData( &image, pData,stride ); if(IsTracking == false) { if(IsInit == false) { CvPoint p1, p2; // Draw box p1.x = cvRound( size.width * m_params.x ); p1.y = cvRound( size.height * m_params.y ); p2.x = cvRound( size.width * (m_params.x + m_params.width)); p2.y = cvRound( size.height * (m_params.y + m_params.height)); CheckBackProject( &image ); cvRectangle( &image, p1, p2, -1, 1 ); } else { m_object.x = cvRound( size.width * m_params.x ); m_object.y = cvRound( size.height * m_params.y ); m_object.width = cvRound( size.width * m_params.width ); m_object.height = cvRound( size.height * m_params.height ); ApplyCamShift( &image, true ); CheckBackProject( &image ); IsTracking = true; } } else { cvKalmanUpdateByTime(Kalman); m_object.x = cvRound( Kalman->PriorState[0]-m_object.width*0.5); m_object.y = cvRound( Kalman->PriorState[2]-m_object.height*0.5 ); ApplyCamShift( &image, false ); CheckBackProject( &image ); cvRectangle( &image, cvPoint( m_object.x, m_object.y ), cvPoint( m_object.x + m_object.width, m_object.y + m_object.height ), -1, 1 ); Rectang(&image,m_Indicat1,-1); m_X.x = 10; m_X.y = 10; m_X.width=50*m_Old.x/size.width; m_X.height =10; Rectang(&image,m_X,CV_RGB(0,0,255)); m_Y.x = 10; m_Y.y = 10; m_Y.width=10; m_Y.height = 50*m_Old.y/size.height; Rectang(&image,m_Y,CV_RGB(255,0,0)); m_Indicat2.x = 0; m_Indicat2.y = size.height-50; m_Indicat2.width = 50; m_Indicat2.height = 50; Rectang(&image,m_Indicat2,-1); float Norm = cvSqrt(Measurement[1]*Measurement[1]+Measurement[3]*Measurement[3]); int VXNorm = (fabs(Measurement[1])>5)?(int)(12*Measurement[1]/Norm):0; int VYNorm = (fabs(Measurement[3])>5)?(int)(12*Measurement[3]/Norm):0; CvPoint pp1 = {25,size.height-25}; CvPoint pp2 = {25+VXNorm,size.height-25+VYNorm}; cvLine(&image,pp1,pp2,CV_RGB(0,0,0),3); /*CvPoint pp1 = {25,size.height-25}; double angle = atan2( Measurement[3], Measurement[1] ); CvPoint pp2 = {cvRound(25+12*cos(angle)),cvRound(size.height-25-12*sin(angle))}; cvLine(&image,pp1,pp2,0,3);*/ } cvSetImageData( &image, 0, 0 ); return NOERROR; } // Transform