void adjustHSV(IplImage *&src, int HuePosition, int SaturationPosition, int ValuePosition) { int Hue = HuePosition; double Saturation = SaturationPosition * 2.55; double Value = ValuePosition / 100.; //create float image IplImage *temp = cvCreateImage(cvGetSize(src), IPL_DEPTH_32F, src->nChannels); cvConvertScale(src, temp, 1.0/255.0, 0); //split IplImage* floatingH = cvCreateImage( cvGetSize(src), IPL_DEPTH_32F, 1 ); IplImage* floatingS = cvCreateImage( cvGetSize(src), IPL_DEPTH_32F, 1 ); IplImage* floatingV = cvCreateImage( cvGetSize(src), IPL_DEPTH_32F, 1 ); cvCvtColor(temp, temp, CV_BGR2HSV);//color convert cvSplit( temp, floatingH, floatingS, floatingV, NULL); //adjust cvAddS(floatingH, cvScalarAll(Hue), floatingH); cvAddS(floatingV, cvScalarAll(Value), floatingV); //merge cvZero(temp); cvMerge(floatingH, floatingS, floatingV, NULL, temp); cvCvtColor(temp, temp, CV_HSV2BGR); //save cvConvertScale( temp, src, 255, 0 ); IplImage *HSV = convertImageRGBtoHSV(src); IplImage *H = cvCreateImage(cvGetSize(src), src->depth, 1); IplImage *S = cvCreateImage(cvGetSize(src), src->depth, 1); IplImage *V = cvCreateImage(cvGetSize(src), src->depth, 1); cvSplit(HSV, H, S, V, 0); cvAddS(S, cvScalarAll(Saturation), S); cvMerge(H, S, V, 0, HSV); cvReleaseImage(&src); src = convertImageHSVtoRGB(HSV); cvReleaseImage(&HSV); cvReleaseImage(&H); cvReleaseImage(&S); cvReleaseImage(&V); cvReleaseImage(&temp); cvReleaseImage(&floatingH); cvReleaseImage(&floatingS); cvReleaseImage(&floatingV); }//end HSV
// define a trackbar callback void on_trackbar( int h ) { int j; int distStep = dist -> widthStep / 4; float* currPointer; cvThreshold( gray, edge, ( float )( edge_thresh ), ( float )( edge_thresh ), CV_THRESH_BINARY ); //Distance transform cvDistTransform( edge, dist, CV_DIST_L2, CV_DIST_MASK_5, NULL ); cvConvertScale( dist, dist, 5000.0, 0 ); for( j = 0, currPointer = dist -> imageData; j < dist -> height; j++, currPointer += distStep ) { cvbSqrt( ( float* )( currPointer ), ( float* )( currPointer ), dist -> width ); } cvConvertScale( dist, dist32s, 1.0, 0.5 ); cvAndS( dist32s, cvScalarAll(255), dist32s, 0 ); cvConvertScale( dist32s, dist8u1, 1, 0 ); cvConvertScale( dist32s, dist32s, -1, 0 ); cvAddS( dist32s, cvScalarAll(255), dist32s, 0 ); cvConvertScale( dist32s, dist8u2, 1, 0 ); cvCvtPlaneToPix( dist8u1, dist8u2, dist8u2, 0, dist8u ); show_iplimage( wndname, dist8u ); }
int main(int argc, char** argv){ IplImage *src; if(argc == 7 && (src = cvLoadImage(argv[1])) != 0){ int x = atoi(argv[2]); int y = atoi(argv[3]); int width = atoi(argv[4]); int height = atoi(argv[5]); int add = atoi(argv[6]); printf("X: %d, Y: %d, width: %d, height: %d, add: %d \n", x, y, width, height, add); printf("Image width: %d, Image height: %d", src->width, src->height); //set the ROI (Region Of Interest) with the arguments passed via command line cvSetImageROI(src, cvRect(x, y, width, height)); //apply the Add operation to work only on the ROI cvAddS(src, cvScalar(add), src); //Reset ROI to display the entire image on the window cvResetImageROI(src); cvNamedWindow("Roi_Add", 1); cvShowImage("Roi_Add", src); cvWaitKey(); cvReleaseImage(&src); cvDestroyWindow("Roi_Add"); } return(0); }
IplImage *createLegend(int nbLines) { IplImage *legend = cvCreateImage(cvSize(640, 21*nbLines), IPL_DEPTH_8U, 3); // creates the image cvZero(legend); // fill the image with black pixels cvAddS(legend, cvScalar(255, 255, 255), legend); // change black pixels to white pixels return legend; }
void CreateModelsfromStats() { cvConvertScale(IavgF, IavgF, (double)(1.0 / Icount)); cvConvertScale(IdiffF, IdiffF, (double)(1.0 / Icount)); cvAddS(IdiffF, cvScalar(1, 1, 1), IdiffF); SetHighThreshold(7.0); SetLowThreshold(6.0); }
//Once you've learned the background long enough, turn it into a background model void ofxBackground::createModelsfromStats() { cvConvertScale(IavgF,IavgF,(double)(1.0/Icount)); cvConvertScale(IdiffF,IdiffF,(double)(1.0/Icount)); cvAddS(IdiffF,cvScalar(1.0,1.0,1.0),IdiffF); //Make sure diff is always something scaleHigh(HIGH_SCALE_NUM); scaleLow(LOW_SCALE_NUM); }
void OpenCV::popupcv() { cvSetImageROI(KinectColorImg, cvRect(0, 0, 640, 480)); cvAddS(KinectColorImg, cvScalar(20), KinectColorImg); cvResetImageROI(KinectColorImg); cvNamedWindow("RoiAdd", 1); cvShowImage("OpenCV_Kinect!", KinectColorImg); }
//-------------------------------------------------------------------------------- void ofxCvImage::operator += ( float value ) { if( !bAllocated ){ ofLogError("ofxCvImage") << "operator-=: image not allocated"; return; } cvAddS( cvImage, cvScalar(value), cvImageTemp ); swapTemp(); flagImageChanged(); }
void createModelsfromStats(){ cvConvertScale( IavgF, IavgF, (double)(1.0/Icount) ); cvConvertScale( IdiffF, IdiffF, (double)(1.0/Icount) ); //Make sure diff is always something cvAddS( IdiffF, cvScalar( 1.0, 1.0, 1.0), IdiffF ); setHighThreshold( 7.0 ); setLowThreshold( 6.0 ); }
//-------------------------------------------------------------------------------- void ofxCvImage::operator += ( float value ) { if( !bAllocated ){ ofLog(OF_LOG_ERROR, "in -=, need to allocate image first"); return; } cvAddS( cvImage, cvScalar(value), cvImageTemp ); swapTemp(); flagImageChanged(); }
void THISCLASS::OnStep() { // Get and check input image IplImage *inputimage = mCore->mDataStructureImageColor.mImage; if (! inputimage) { AddError(wxT("No input image.")); return; } if (inputimage->nChannels != 3) { AddError(wxT("The input image is not a color image.")); return; } // Check and update the background if (! mOutputImage) { mOutputImage = cvCloneImage(inputimage); } else { cvCopyImage(inputimage, mOutputImage); } if (! mBackgroundImage) { mBackgroundImage = cvCloneImage(mOutputImage); } else if (mUpdateProportion > 0) { if ((cvGetSize(mOutputImage).height != cvGetSize(mBackgroundImage).height) || (cvGetSize(mOutputImage).width != cvGetSize(mBackgroundImage).width)) { AddError(wxT("Input and background images do not have the same size.")); return; } cvAddWeighted(mOutputImage, mUpdateProportion, mBackgroundImage, 1.0 - mUpdateProportion, 0, mBackgroundImage); } try { // Correct the tmpImage with the difference in image mean if (mCorrectMean) { mBackgroundImageMean = cvAvg(mBackgroundImage); CvScalar tmpScalar = cvAvg(mOutputImage); cvAddS(mOutputImage, cvScalar(mBackgroundImageMean.val[0] - tmpScalar.val[0], mBackgroundImageMean.val[1] - tmpScalar.val[1], mBackgroundImageMean.val[2] - tmpScalar.val[2]), mOutputImage); } // Background subtraction if (mMode == sMode_SubImageBackground) { cvSub(mOutputImage, mBackgroundImage, mOutputImage); } else if (mMode == sMode_SubBackgroundImage) { cvSub(mBackgroundImage, mOutputImage, mOutputImage); } else { cvAbsDiff(mOutputImage, mBackgroundImage, mOutputImage); } } catch (...) { AddError(wxT("Background subtraction failed.")); } mCore->mDataStructureImageColor.mImage = mOutputImage; // Set the display DisplayEditor de(&mDisplayOutput); if (de.IsActive()) { de.SetMainImage(mOutputImage); } }
//Once you've learned the background long enough, turn it into a background model void createModelsfromStats() { for(int i=0; i<NUM_CAMERAS; i++) { cvConvertScale(IavgF[i],IavgF[i],(double)(1.0/Icount[i])); cvConvertScale(IdiffF[i],IdiffF[i],(double)(1.0/Icount[i])); cvAddS(IdiffF[i],cvScalar(1.0,1.0,1.0),IdiffF[i]); //Make sure diff is always something scaleHigh(HIGH_SCALE_NUM,i); scaleLow(LOW_SCALE_NUM,i); } }
ReturnType RGBValueControl::onExecute() { // 영상을 Inport로부터 취득 opros_any *pData = ImageIn.pop(); RawImage result; if(pData != NULL){ // 포트로 부터 이미지 취득 RawImage Image = ImageIn.getContent(*pData); RawImageData *RawImage = Image.getImage(); // 현재영상의 크기를 취득 m_in_width = RawImage->getWidth(); m_in_height = RawImage->getHeight(); // 원본영상의 이미지영역 확보 if(m_orig_img == NULL){ m_orig_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3); } // 영상에 대한 정보를 확보!memcpy memcpy(m_orig_img->imageData, RawImage->getData(), RawImage->getSize()); //영상에 rgb상수값 계산 cvAddS(m_orig_img, CV_RGB(R_Value,G_Value,B_Value), m_orig_img, NULL); // RawImage의 이미지 포인터 변수 할당 RawImageData *pimage = result.getImage(); // 입력된 이미지 사이즈 및 채널수로 로 재 설정 pimage->resize(m_orig_img->width, m_orig_img->height, m_orig_img->nChannels); // 영상의 총 크기(pixels수) 취득 int size = m_orig_img->width * m_orig_img->height * m_orig_img->nChannels; // 영상 데이터로부터 영상값만을 할당하기 위한 변수 unsigned char *ptrdata = pimage->getData(); // 현재 프레임 영상을 사이즈 만큼 memcpy memcpy(ptrdata, m_orig_img->imageData, size); // 포트아웃 opros_any mdata = result; ImageOut.push(result);//전달 delete pData; } return OPROS_SUCCESS; }
//形态学H极小值 void lhMorpHMin(const IplImage* src, IplImage* dst, unsigned char h, IplConvKernel* se = NULL) { assert(src != NULL && dst != NULL && src != dst ); //p150 IplImage* temp = cvCreateImage(cvGetSize(src), 8, 1); cvAddS(src, cvScalar(h), temp); lhMorpRErode(temp, src, dst, se); cvReleaseImage(&temp); }
static void work(int) { int r, b, g, subR, subB, subG; r = cvGetTrackbarPos("增加紅", ctrlPanel1); b = cvGetTrackbarPos("增加藍", ctrlPanel1); g = cvGetTrackbarPos("增加綠", ctrlPanel1); subR = cvGetTrackbarPos("減少紅", ctrlPanel2); subB = cvGetTrackbarPos("減少藍", ctrlPanel2); subG = cvGetTrackbarPos("減少綠", ctrlPanel2); des = cvCloneImage(img); cvAddS(img, CV_RGB(r, g, b), des, 0); cvSubS(des, CV_RGB(subR, subG, subB), des, 0); cvShowImage(windowName, des); }
//形态学区域极小值 void lhMorpRMin(const IplImage* src, IplImage* dst, IplConvKernel* se = NULL) { assert(src != NULL && dst != NULL && src != dst ); //p149 (6.14) IplImage* temp = cvCreateImage(cvGetSize(src), 8, 1); cvAddS(src, cvScalar(1), temp); lhMorpRErode(temp, src, dst, se); cvSub(dst, src, dst); cvReleaseImage(&temp); }
void cvShowInvDFT1(IplImage* im, CvMat* dft_A, int dft_M, int dft_N,char* src) { IplImage* realInput; IplImage* imaginaryInput; IplImage* complexInput; IplImage * image_Re; IplImage * image_Im; double m, M; char str[80]; realInput = cvCreateImage( cvGetSize(im), IPL_DEPTH_64F, 1); imaginaryInput = cvCreateImage( cvGetSize(im), IPL_DEPTH_64F, 1); complexInput = cvCreateImage( cvGetSize(im), IPL_DEPTH_64F, 2); image_Re = cvCreateImage( cvSize(dft_N, dft_M), IPL_DEPTH_64F, 1); image_Im = cvCreateImage( cvSize(dft_N, dft_M), IPL_DEPTH_64F, 1); //cvDFT( dft_A, dft_A, CV_DXT_INV_SCALE, complexInput->height ); cvDFT( dft_A, dft_A, CV_DXT_INV_SCALE, dft_M); strcpy(str,"DFT INVERSE - "); strcat(str,src); cvNamedWindow(str, 0); // Split Fourier in real and imaginary parts cvSplit( dft_A, image_Re, image_Im, 0, 0 ); // Compute the magnitude of the spectrum Mag = sqrt(Re^2 + Im^2) cvPow( image_Re, image_Re, 2.0); cvPow( image_Im, image_Im, 2.0); cvAdd( image_Re, image_Im, image_Re, NULL); cvPow( image_Re, image_Re, 0.5 ); // Compute log(1 + Mag) cvAddS( image_Re, cvScalarAll(1.0), image_Re, NULL ); // 1 + Mag cvLog( image_Re, image_Re ); // log(1 + Mag) cvMinMaxLoc(image_Re, &m, &M, NULL, NULL, NULL); cvScale(image_Re, image_Re, 1.0/(M-m), 1.0*(-m)/(M-m)); //cvCvtColor(image_Re, image_Re, CV_GRAY2RGBA); cvShowImage(str, image_Re); }
void THISCLASS::OnStep() { // Get and check input image IplImage *inputimage = mCore->mDataStructureImageGray.mImage; if (! inputimage) { AddError(wxT("No input image.")); return; } if (inputimage->nChannels != 1) { AddError(wxT("The input image is not a grayscale image.")); return; } // Check the background image if (! mBackgroundImage) { AddError(wxT("No background image loaded.")); return; } if ((cvGetSize(inputimage).height != cvGetSize(mBackgroundImage).height) || (cvGetSize(inputimage).width != cvGetSize(mBackgroundImage).width)) { AddError(wxT("Input and background images don't have the same size.")); return; } try { // Correct the inputimage with the difference in image mean if (mCorrectMean) { cvAddS(inputimage, cvScalar(mBackgroundImageMean.val[0] - cvAvg(inputimage).val[0]), inputimage); } // Background subtraction if (mMode == sMode_SubImageBackground) { cvSub(inputimage, mBackgroundImage, inputimage); } else if (mMode == sMode_SubBackgroundImage) { cvSub(mBackgroundImage, inputimage, inputimage); } else { cvAbsDiff(inputimage, mBackgroundImage, inputimage); } } catch (...) { AddError(wxT("Background subtraction failed.")); } // Set the display DisplayEditor de(&mDisplayOutput); if (de.IsActive()) { de.SetMainImage(inputimage); } }
double pkmGaussianMixtureModel::multinormalDistribution(const CvMat *pts, const CvMat *mean, const CvMat *covar) { int dimensions = 2; // add a tiny bit because of small samples CvMat *covarShifted = cvCreateMat(2, 2, CV_64FC1); cvAddS( covar, cvScalarAll(0.001), covarShifted); // calculate the determinant double det = cvDet(covarShifted); // invert covariance CvMat *covarInverted = cvCreateMat(2, 2, CV_64FC1); cvInvert(covarShifted, covarInverted); double ff = (1.0/(2.0*(double)PI))*(pow(det,-0.5)); CvMat *centered = cvCreateMat(2, 1, CV_64FC1); cvSub(pts, mean, centered); CvMat *invxmean = cvCreateMat(2, 1, CV_64FC1); //cvGEMM(covarInverted, centered, 1., NULL, 1., invxmean); cvMatMul(covarInverted, centered, invxmean); cvMul(centered, invxmean, invxmean); CvScalar sum = cvSum(invxmean); /* printf("covar: %f %f %f %f\n", cvmGet(covar, 0, 0), cvmGet(covar, 0, 1), cvmGet(covar, 1, 0), cvmGet(covar, 1, 1)); printf("covarShifted: %f %f %f %f\n", cvmGet(covarShifted, 0, 0), cvmGet(covarShifted, 0, 1), cvmGet(covarShifted, 1, 0), cvmGet(covarShifted, 1, 1)); printf("det: %f\n", det); printf("covarInverted: %f %f %f %f\n", cvmGet(covarInverted, 0, 0), cvmGet(covarInverted, 0, 1), cvmGet(covarInverted, 1, 0), cvmGet(covarShifted, 1, 1)); printf("ff: %f\n", ff); printf("pts: %f %f)\n", cvmGet(pts, 0, 0), cvmGet(pts, 1, 0)); printf("mean: %f %f)\n", cvmGet(mean, 0, 0), cvmGet(mean, 1, 0)); printf("centered: %f %f)\n", cvmGet(centered, 0, 0), cvmGet(centered, 1, 0)); printf("invxmean: %f %f)\n", cvmGet(invxmean, 0, 0), cvmGet(invxmean, 1, 0)); printf("scalar: %f %f %f %f\n", sum.val[0], sum.val[1], sum.val[2], sum.val[3]); */ cvReleaseMat(&covarShifted); cvReleaseMat(&covarInverted); cvReleaseMat(¢ered); cvReleaseMat(&invxmean); return ff * exp(-0.5*sum.val[0]); }
int main(int argc, char** argv) { IplImage* interest_img; CvRect interest_rect; if( argc == 7 && ((interest_img=cvLoadImage(argv[1],1)) != 0 )) { interest_rect.x = atoi(argv[2]); interest_rect.y = atoi(argv[3]); interest_rect.width = atoi(argv[4]); interest_rect.height = atoi(argv[5]); int add = atoi(argv[6]); // Assuming IplImage *interest_img; and // CvRect interest_rect; // Use widthStep to get a region of interest // // (Alternate method) // IplImage *sub_img = cvCreateImageHeader( cvSize( interest_rect.width, interest_rect.height ), interest_img->depth, interest_img->nChannels ); sub_img->origin = interest_img->origin; sub_img->widthStep = interest_img->widthStep; sub_img->imageData = interest_img->imageData + interest_rect.y * interest_img->widthStep + interest_rect.x * interest_img->nChannels; cvAddS( sub_img, cvScalar(add), sub_img ); cvReleaseImageHeader(&sub_img); cvNamedWindow( "Roi_Add", CV_WINDOW_AUTOSIZE ); cvShowImage( "Roi_Add", interest_img ); cvWaitKey(); } return 0; }
int main(int argc, char** argv){ IplImage* src; if (argc == 7 && ((src=cvLoadImage(argv[1], 1)) != 0)){ int x = atoi(argv[2]); int y = atoi(argv[3]); int width = atoi(argv[4]); int height = atoi(argv[5]); int add = atoi(argv[6]); cvSetImageROI(src, cvRect(x, y, width, height)); cvAddS(src, cvScalar(add), src); cvResetImageROI(src); cvNamedWindow("Roi_add", 1); cvShowImage("Roi_add", src); cvWaitKey(); } return 0; }
void BModel::wiener_filter_chanel(IplImage *channel, IplImage *kernel, const double sigma) { IplImage *fKernel = cvCreateImage(cvGetSize(kernel), IPL_DEPTH_64F, 2); IplImage *fChannel = cvCreateImage(cvGetSize(channel), IPL_DEPTH_64F, 2); IplImage *answ = cvCreateImage(cvGetSize(channel), IPL_DEPTH_64F, 2); IplImage *reFKernel = cvCreateImage(cvGetSize(kernel), IPL_DEPTH_64F, 1); IplImage *imFKernel = cvCreateImage(cvGetSize(kernel), IPL_DEPTH_64F, 1); IplImage *reFChannel = cvCreateImage(cvGetSize(kernel), IPL_DEPTH_64F, 1); IplImage *imFChannel = cvCreateImage(cvGetSize(kernel), IPL_DEPTH_64F, 1); IplImage *reAnsw = cvCreateImage(cvGetSize(channel), IPL_DEPTH_64F, 1); IplImage *imAnsw = cvCreateImage(cvGetSize(channel), IPL_DEPTH_64F, 1); cvDFT(kernel, fKernel, CV_DXT_FORWARD, channel->height); cvDFT(channel, fChannel, CV_DXT_FORWARD, channel->height); cvMulSpectrums(fChannel, fKernel, answ, CV_DXT_MUL_CONJ); cvSplit(answ, reAnsw, imAnsw, 0, 0 ); cvSplit(fKernel, reFKernel, imFKernel, 0, 0 ); cvPow(reFKernel, reFKernel, 2); cvPow(imFKernel, imFKernel, 2); cvAdd(reFKernel, imFKernel, reFKernel, 0); cvAddS(reFKernel, cvScalarAll(sigma), reFKernel, 0); cvDiv(reAnsw, reFKernel, reAnsw, 1); cvDiv(imAnsw, reFKernel, imAnsw, 1); cvMerge(reAnsw, imAnsw, NULL, NULL, answ); cvDFT(answ, answ, CV_DXT_INV_SCALE, channel->height); cvCopy(answ, channel); cvReleaseImage(&fKernel); cvReleaseImage(&fChannel); cvReleaseImage(&answ); cvReleaseImage(&reFKernel); cvReleaseImage(&imFKernel); cvReleaseImage(&reFChannel); cvReleaseImage(&imFChannel); cvReleaseImage(&reAnsw); cvReleaseImage(&imAnsw); }
int main(){ //initialize IplImage* src_image = 0; IplImage* bright_image = 0; IplImage* dark_image = 0; //load image src_image = cvLoadImage("/Users/ihong-gyu/MyProject/OpenCVTest/Lena.jpeg",0); //create a window cvNamedWindow("Original Image", CV_WINDOW_AUTOSIZE); cvNamedWindow("Bright Image", CV_WINDOW_AUTOSIZE); cvNamedWindow("Dark Image", CV_WINDOW_AUTOSIZE); //create images bright_image = cvCreateImage(cvGetSize(src_image),IPL_DEPTH_8U, 1); dark_image = cvCreateImage(cvGetSize(src_image),IPL_DEPTH_8U, 1); //add cvAddS(src_image, CV_RGB(60,60,60), bright_image,NULL); cvSubS(src_image, CV_RGB(60,60,60), dark_image,NULL); //show the image cvShowImage("Original Image", src_image); cvShowImage("Bright Image", bright_image); cvShowImage("Dark Image", dark_image); //wait for a key cvWaitKey(0); //release the image cvReleaseImage(&src_image); cvReleaseImage(&bright_image); cvReleaseImage(&dark_image); return 0; }
void TBackgroundVuMeter::Reset(void) { float fVal = 0.0; TBackground::Reset(); if(m_pHist != NULL) { // fVal = (m_nBinCount != 0) ? (float)(1.0 / (double)m_nBinCount) : (float)0.0; fVal = 0.0; for(int i = 0; i < m_nBinCount; ++i) { if(m_pHist[i] != NULL) { cvSetZero(m_pHist[i]); cvAddS(m_pHist[i], cvScalar(fVal), m_pHist[i]); } } } m_nCount = 0; }
// ch3_ex3_12 image_name x y width height add# int main(int argc, char** argv) { IplImage* src; cvNamedWindow("Example3_12_pre", CV_WINDOW_AUTOSIZE); cvNamedWindow("Example3_12_post", CV_WINDOW_AUTOSIZE); if (argc == 7 && ((src = cvLoadImage(argv[1], 1)) != 0)) { int x = atoi(argv[2]); int y = atoi(argv[3]); int width = atoi(argv[4]); int height = atoi(argv[5]); int add = atoi(argv[6]); cvShowImage("Example3_12_pre", src); cvSetImageROI(src, cvRect(x, y, width, height)); cvAddS(src, cvScalar(add), src); cvResetImageROI(src); cvShowImage("Example3_12_post", src); cvWaitKey(); } cvReleaseImage(&src); cvDestroyWindow("Example3_12_pre"); cvDestroyWindow("Example3_12_post"); return 0; }
int main(int argc, char** argv){ IplImage *interest_img = cvLoadImage(argv[1]); CvRect interest_rect = cvRect (10, 10, 120, 120); IplImage *sub_img = cvCreateImageHeader( cvSize(interest_rect.width, interest_rect.height), interest_img->depth, interest_img->nChannels ); sub_img->origin = interest_img->origin; sub_img->widthStep = interest_img->widthStep; sub_img->imageData = interest_img->imageData + interest_rect.y*interest_img->widthStep + interest_rect.x * interest_img->nChannels; cvAddS(sub_img, cvScalar(1), sub_img); cvNamedWindow("Widthstep_add", 1); cvShowImage("Widthstep_add", sub_img); cvWaitKey(); cvReleaseImageHeader(&sub_img); return (0); }
int main(int argc, char* argv[]){ IplImage* src = cvLoadImage(argv[1]); if(argc == 7 && src != NULL){ int x = atoi(argv[2]); int y = atoi(argv[3]); int width = atoi(argv[4]); int height = atoi(argv[5]); int add_t =atoi(argv[6]);; cvSetImageROI(src,cvRect(x,y,width,height)); cvAddS(src,cvScalar(add_t,250),src);//BGR order in image Data,add_t is blue value,250 is green value cvResetImageROI(src); cvNamedWindow("output",1); cvShowImage("output",src); cvWaitKey(); } return 0; }
void StdDCT() { allocateImages(); if(!cvIm32F) { cvIm32Fin = cvCreateImage(cvGetSize(cvImGray), IPL_DEPTH_32F, 1); cvIm32F = cvCreateImage(cvGetSize(cvImGray), IPL_DEPTH_32F, 1); cvImDCT = cvCreateImage(cvGetSize(cvImGray), IPL_DEPTH_32F, 1); } cvConvertScale(cvImGray, cvIm32Fin); cvCopy(cvIm32Fin, cvIm32F); cvDCT(cvIm32F, cvImDCT, CV_DXT_FORWARD); /* OUTPUTS * char * LogDCT_outputnames_list[] = { "LogDCT", "Log DCT Cropped", "Log DCT Inv", "DCT Inv", "Final"}; */ if(StdDCT_output.curitem == 0) // "StdDCT" { cvConvertScale(cvImDCT, cvImGray, 1.); } // Reduce low DCT float rad = (float)StdDCT_radius / 100.f; for(int r = 0; r<cvImDCT->height; r++) { float fr = (float)r / (float)cvImDCT->height; if(fr > rad) { float * line = (float *)(cvImDCT->imageData + r*cvImDCT->widthStep); for(int c = 0; c<cvImDCT->width; c++) { float fc = (float)c / (float)cvImDCT->width; // float dc = fc*fc; // float dr = fr*fr; if(fc > rad) { line[c] = 0; } } } } if(StdDCT_output.curitem == 1) // "Log DCT Cropped" { cvConvertScale(cvImDCT, cvImGray, 1.); } cvDCT(cvImDCT, cvIm32F, CV_DXT_INVERSE); if(StdDCT_output.curitem == 2) // "StdDCT Inv" { cvConvertScale(cvIm32F, cvImGray, 1.); } cvConvertScale(cvIm32F, cvImDCT, LogDCT_coef); if(StdDCT_output.curitem == 3) // "DCT Inv" { cvConvertScale(cvImDCT, cvImGray, 1.); } // Substract low pass image from input image cvSub(cvIm32Fin, cvImDCT, cvIm32F); if(StdDCT_output.curitem == 4) // "DCT Inv - scal" { cvConvertScale(cvIm32F, cvImGray, 1.); } cvAddS(cvIm32F, cvScalarAll(LogDCT_add), cvImDCT); if(StdDCT_output.curitem == 5) // "Out" { cvConvertScale(cvImDCT, cvImGray, 1.); } finishImages(); }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------ CvMat *tgso (CvMat &tmap, int ntex, double sigma, double theta, CvMat &tsim, int useChi2) { CvMat *roundTmap=cvCreateMat(tmap.rows,tmap.cols,CV_32FC1); CvMat *comp=cvCreateMat(tmap.rows,tmap.cols,CV_32FC1); for (int i=0;i<tmap.rows;i++) for (int j=0;j<tmap.cols;j++) cvSetReal2D(roundTmap,i,j,cvRound(cvGetReal2D(&tmap,i,j))); cvSub(&tmap,roundTmap,comp); if (cvCountNonZero(comp)) { printf("texton labels not integral"); cvReleaseMat(&roundTmap); cvReleaseMat(&comp); exit(1); } double min,max; cvMinMaxLoc(&tmap,&min,&max); if (min<1 && max>ntex) { char *msg=new char[50]; printf(msg,"texton labels out of range [1,%d]",ntex); cvReleaseMat(&roundTmap); cvReleaseMat(&comp); exit(1); } cvReleaseMat(&roundTmap); cvReleaseMat(&comp); double wr=floor(sigma); //sigma=radius (Leo) CvMat *x=cvCreateMat(1,wr-(-wr)+1, CV_64FC1); CvMat *y=cvCreateMat(wr-(-wr)+1,1, CV_64FC1); CvMat *u=cvCreateMat(wr-(-wr)+1,wr-(-wr)+1, CV_64FC1); CvMat *v=cvCreateMat(wr-(-wr)+1,wr-(-wr)+1, CV_64FC1); CvMat *gamma=cvCreateMat(u->rows,v->rows, CV_64FC1); // Set x,y directions for (int j=-wr;j<=wr;j++) { cvSetReal2D(x,0,(j+wr),j); cvSetReal2D(y,(j+wr),0,j); } // Set u,v, meshgrids for (int i=0;i<u->rows;i++) { cvRepeat(x,u); cvRepeat(y,v); } // Compute the gamma matrix from the grid for (int i=0;i<u->rows;i++) for (int j=0;j<u->cols;j++) cvSetReal2D(gamma,i,j,atan2(cvGetReal2D(v,i,j),cvGetReal2D(u,i,j))); cvReleaseMat(&x); cvReleaseMat(&y); CvMat *sum=cvCreateMat(u->rows,u->cols, CV_64FC1); cvMul(u,u,u); cvMul(v,v,v); cvAdd(u,v,sum); CvMat *mask=cvCreateMat(u->rows,u->cols, CV_8UC1); cvCmpS(sum,sigma*sigma,mask,CV_CMP_LE); cvConvertScale(mask,mask,1.0/255); cvSetReal2D(mask,wr,wr,0); int count=cvCountNonZero(mask); cvReleaseMat(&u); cvReleaseMat(&v); cvReleaseMat(&sum); CvMat *sub=cvCreateMat(mask->rows,mask->cols, CV_64FC1); CvMat *side=cvCreateMat(mask->rows,mask->cols, CV_8UC1); cvSubS(gamma,cvScalar(theta),sub); cvReleaseMat(&gamma); for (int i=0;i<mask->rows;i++){ for (int j=0;j<mask->cols;j++) { double n=cvmGet(sub,i,j); double n_mod = n-floor(n/(2*M_PI))*2*M_PI; cvSetReal2D(side,i,j, 1 + int(n_mod < M_PI)); } } cvMul(side,mask,side); cvReleaseMat(&sub); cvReleaseMat(&mask); CvMat *lmask=cvCreateMat(side->rows,side->cols, CV_8UC1); CvMat *rmask=cvCreateMat(side->rows,side->cols, CV_8UC1); cvCmpS(side,1,lmask,CV_CMP_EQ); cvCmpS(side,2,rmask,CV_CMP_EQ); int count1=cvCountNonZero(lmask), count2=cvCountNonZero(rmask); if (count1 != count2) { printf("Bug: imbalance\n"); } CvMat *rlmask=cvCreateMat(side->rows,side->cols, CV_32FC1); CvMat *rrmask=cvCreateMat(side->rows,side->cols, CV_32FC1); cvConvertScale(lmask,rlmask,1.0/(255*count)*2); cvConvertScale(rmask,rrmask,1.0/(255*count)*2); cvReleaseMat(&lmask); cvReleaseMat(&rmask); cvReleaseMat(&side); int h=tmap.rows; int w=tmap.cols; CvMat *d = cvCreateMat(h*w,ntex,CV_32FC1); CvMat *coltemp = cvCreateMat(h*w,1,CV_32FC1); CvMat *tgL = cvCreateMat(h,w, CV_32FC1); CvMat *tgR = cvCreateMat(h,w, CV_32FC1); CvMat *temp = cvCreateMat(h,w,CV_8UC1); CvMat *im = cvCreateMat(h,w, CV_32FC1); CvMat *sub2 = cvCreateMat(h,w,CV_32FC1); CvMat *sub2t = cvCreateMat(w,h,CV_32FC1); CvMat *prod = cvCreateMat(h*w,ntex,CV_32FC1); CvMat reshapehdr,*reshape; CvMat* tgL_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1); CvMat* tgR_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1); CvMat* im_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1); CvMat *tg=cvCreateMat(h,w,CV_32FC1); cvZero(tg); if (useChi2 == 1){ CvMat* temp_add1 = cvCreateMat(h,w,CV_32FC1); for (int i=0;i<ntex;i++) { cvCmpS(&tmap,i+1,temp,CV_CMP_EQ); cvConvertScale(temp,im,1.0/255); cvCopyMakeBorder(tgL,tgL_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT); cvCopyMakeBorder(tgR,tgR_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT); cvCopyMakeBorder(im,im_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT); cvFilter2D(im_pad,tgL_pad,rlmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2)); cvFilter2D(im_pad,tgR_pad,rrmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2)); cvGetSubRect(tgL_pad,tgL,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgL->cols,tgL->rows)); cvGetSubRect(tgR_pad,tgR,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgR->cols,tgR->rows)); cvSub(tgL,tgR,sub2); cvPow(sub2,sub2,2.0); cvAdd(tgL,tgR,temp_add1); cvAddS(temp_add1,cvScalar(0.0000000001),temp_add1); cvDiv(sub2,temp_add1,sub2); cvAdd(tg,sub2,tg); } cvScale(tg,tg,0.5); cvReleaseMat(&temp_add1); } else{// if not chi^2 for (int i=0;i<ntex;i++) { cvCmpS(&tmap,i+1,temp,CV_CMP_EQ); cvConvertScale(temp,im,1.0/255); cvCopyMakeBorder(tgL,tgL_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT); cvCopyMakeBorder(tgR,tgR_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT); cvCopyMakeBorder(im,im_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT); cvFilter2D(im_pad,tgL_pad,rlmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2)); cvFilter2D(im_pad,tgR_pad,rrmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2)); cvGetSubRect(tgL_pad,tgL,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgL->cols,tgL->rows)); cvGetSubRect(tgR_pad,tgR,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgR->cols,tgR->rows)); cvSub(tgL,tgR,sub2); cvAbs(sub2,sub2); cvTranspose(sub2,sub2t); reshape=cvReshape(sub2t,&reshapehdr,0,h*w); cvGetCol(d,coltemp,i); cvCopy(reshape,coltemp); } cvMatMul(d,&tsim,prod); cvMul(prod,d,prod); CvMat *sumcols=cvCreateMat(h*w,1,CV_32FC1); cvSetZero(sumcols); for (int i=0;i<prod->cols;i++) { cvGetCol(prod,coltemp,i); cvAdd(sumcols,coltemp,sumcols); } reshape=cvReshape(sumcols,&reshapehdr,0,w); cvTranspose(reshape,tg); cvReleaseMat(&sumcols); } //Smooth the gradient now!! tg=fitparab(*tg,sigma,sigma/4,theta); cvMaxS(tg,0,tg); cvReleaseMat(&im_pad); cvReleaseMat(&tgL_pad); cvReleaseMat(&tgR_pad); cvReleaseMat(&rlmask); cvReleaseMat(&rrmask); cvReleaseMat(&im); cvReleaseMat(&tgL); cvReleaseMat(&tgR); cvReleaseMat(&temp); cvReleaseMat(&coltemp); cvReleaseMat(&sub2); cvReleaseMat(&sub2t); cvReleaseMat(&d); cvReleaseMat(&prod); return tg; }
CvMat* cvShowDFT1(IplImage* im, int dft_M, int dft_N,char* src) { IplImage* realInput; IplImage* imaginaryInput; IplImage* complexInput; CvMat* dft_A, tmp; IplImage* image_Re; IplImage* image_Im; char str[80]; double m, M; realInput = cvCreateImage( cvGetSize(im), IPL_DEPTH_64F, 1); imaginaryInput = cvCreateImage( cvGetSize(im), IPL_DEPTH_64F, 1); complexInput = cvCreateImage( cvGetSize(im), IPL_DEPTH_64F, 2); cvScale(im, realInput, 1.0, 0.0); cvZero(imaginaryInput); cvMerge(realInput, imaginaryInput, NULL, NULL, complexInput); dft_A = cvCreateMat( dft_M, dft_N, CV_64FC2 ); image_Re = cvCreateImage( cvSize(dft_N, dft_M), IPL_DEPTH_64F, 1); image_Im = cvCreateImage( cvSize(dft_N, dft_M), IPL_DEPTH_64F, 1); // copy A to dft_A and pad dft_A with zeros cvGetSubRect( dft_A, &tmp, cvRect(0,0, im->width, im->height)); cvCopy( complexInput, &tmp, NULL ); if( dft_A->cols > im->width ) { cvGetSubRect( dft_A, &tmp, cvRect(im->width,0, dft_A->cols - im->width, im->height)); cvZero( &tmp ); } // no need to pad bottom part of dft_A with zeros because of // use nonzero_rows parameter in cvDFT() call below cvDFT( dft_A, dft_A, CV_DXT_FORWARD, complexInput->height ); strcpy(str,"DFT -"); strcat(str,src); cvNamedWindow(str, 0); // Split Fourier in real and imaginary parts cvSplit( dft_A, image_Re, image_Im, 0, 0 ); // Compute the magnitude of the spectrum Mag = sqrt(Re^2 + Im^2) cvPow( image_Re, image_Re, 2.0); cvPow( image_Im, image_Im, 2.0); cvAdd( image_Re, image_Im, image_Re, NULL); cvPow( image_Re, image_Re, 0.5 ); // Compute log(1 + Mag) cvAddS( image_Re, cvScalarAll(1.0), image_Re, NULL ); // 1 + Mag cvLog( image_Re, image_Re ); // log(1 + Mag) cvMinMaxLoc(image_Re, &m, &M, NULL, NULL, NULL); cvScale(image_Re, image_Re, 1.0/(M-m), 1.0*(-m)/(M-m)); cvShowImage(str, image_Re); return(dft_A); }