Пример #1
0
void OpcodeDeltaPerRow::apply( RawImage &in, RawImage &out, int startY, int endY )
{
  if (in->getDataType() == TYPE_USHORT16) {
    int cpp = out->getCpp();
    for (int y = startY; y < endY; y += mRowPitch) {
      ushort16 *src = (ushort16*)out->getData(mAoi.getLeft(), y);
      // Add offset, so this is always first plane
      src+=mFirstPlane;
      int delta = (int)(65535.0f * mDelta[y]);
      for (int x = 0; x < mAoi.getWidth(); x += mColPitch) {
        for (int p = 0; p < mPlanes; p++)
        {
          src[x*cpp+p] = clampbits(16,delta + src[x*cpp+p]);
        }
      }
    }
  } else {
    int cpp = out->getCpp();
    for (int y = startY; y < endY; y += mRowPitch) {
      float *src = (float*)out->getData(mAoi.getLeft(), y);
      // Add offset, so this is always first plane
      src+=mFirstPlane;
      float delta = mDelta[y];
      for (int x = 0; x < mAoi.getWidth(); x += mColPitch) {
        for (int p = 0; p < mPlanes; p++)
        {
          src[x*cpp+p] = delta + src[x*cpp+p];
        }
      }
    }
  }
}
Пример #2
0
void evaluate(string dir, int l, int m, int n)
{
	//int l=512,m=512,n=570;
	RawImage test;
	char dst[100];
	strcpy(dst, dir.c_str());
	char dir2[200] = output;
	strcat(dir2, dst);
	char dir3[300];
	strcpy(dir3, dir2);
	strcat(dir3, "outer5-8_2_20140405.raw");
	float * indata1 = test.readStreamfloat(dir3, &l, &m, &n);
	char dir4[300];
	strcpy(dir4, dir2);
	strcat(dir4, "inner5-8_2.raw");
	float * indata2 = test.readStreamfloat(dir4, &l, &m, &n);
	for (int k = 0; k < n; k++)
	{
		for (int j = 0; j < m; j++)
		{
			for (int i = 0; i < l; i++)
			{
				PIXTYPE *val = &indata1[k*m*l + j*l + i];
				//if(i<409 &&i> 107 && j>156 &&j <390)
				//{
				//	if (*val>1)
				//	{
				//		*val=0;

				//	}
				//	else *val=100;
				//}
				//else *val=0;
				*val -= indata2[k*m*l + j*l + i];
			}
		}
	}
	//for (int i = 0; i < l*m*n; i++)
	//{
	//	indata1[i] -= indata2[i];
	//}
	FILE *p;
	char dir5[300];
	strcpy(dir5, dir2);
	strcat(dir5, "thickness5-8_2_20140405.raw");
	p = fopen(dir5, "wb");
	unsigned char * indata1char = new unsigned char[l*n*m];
	for (int i = 0; i< l*m*n; i++)
	{
		indata1char[i] = (unsigned char)indata1[i];
	}
	delete[]indata1;
	fwrite(indata1char, sizeof(unsigned char), l*m*n, p);
	fclose(p);
	fflush(stdout);
	delete[] indata1char;
	delete[] indata2;

}
Пример #3
0
ReturnType ZoomInOut::onExecute()
{
	// 영상을 Inport로부터 취득
	opros_any *pData = ImageIn.pop();
	RawImage result;

	if(pData != NULL){
		
		// 포트로 부터 이미지 취득
		RawImage Image = ImageIn.getContent(*pData);
		RawImageData *RawImage = Image.getImage();

		// 현재영상의 크기를 취득
		m_in_width = RawImage->getWidth();
		m_in_height = RawImage->getHeight();

		// 원본영상의 이미지영역 확보
		if(m_orig_img == NULL){
			m_orig_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		}
		
		if(m_result_img == NULL){

			m_pre_zoom = m_zoom;

			m_result_img = cvCreateImage(cvSize((int)(m_in_width * m_zoom), 
					(int)(m_in_height * m_zoom)), IPL_DEPTH_8U, 3);

		}

		// 영상에 대한 정보를 확보!memcpy
		memcpy(m_orig_img->imageData, RawImage->getData(), RawImage->getSize());

		cvResize(m_orig_img, m_result_img, m_interpolation);		
		
		// RawImage의 이미지 포인터 변수 할당
		RawImageData *pimage = result.getImage();
		
		// 입력된 이미지 사이즈 및 채널수로 로 재 설정
		pimage->resize(m_result_img->width, m_result_img->height, m_result_img->nChannels);
		
		// 영상의 총 크기(pixels수) 취득
		int size = m_result_img->width * m_result_img->height * m_result_img->nChannels;
		
		// 영상 데이터로부터 영상값만을 할당하기 위한 변수
		unsigned char *ptrdata = pimage->getData();
		
		// 현재 프레임 영상을 사이즈 만큼 memcpy
		memcpy(ptrdata, m_result_img->imageData, size);

		// 포트아웃
		opros_any mdata = result;
		ImageOut.push(result);//전달

		delete pData;
	}

	return OPROS_SUCCESS;
}
Пример #4
0
void testcolonway2(int argc, string dir)
{

	char *pt = "single_well";
	int l = 0, m = 0, n = 0, l1 = 0, l2 = 0, iter_outer = 10;
	RawImage test;
	char dirhead[200] = input2;  //K:\\sdf\\volume\\clean\\clean\\ep\\

	char dirbody[100];
	strcpy(dirbody, dir.c_str());
	cout << "dirbody" << dirbody << endl;
	strcat(dirhead, dirbody);
	cout << "dirhead" << dirhead << endl;
	short * indata = test.readStream(dirhead, &l, &m, &n);
	Raw *initial = new Raw(l, m, n);
	float *inputo = new float[l*m*n];
	for (int i = 0; i < l*m*n; i++)
	{
		inputo[i] = (float)indata[i];
	}

	Raw *input = new Raw(l, m, n, inputo);
	Filter *f = new Filter();
	//input=f->guass3DFilter(input,3);
	RawImage *write = new RawImage();
	ThreeDim_LevelSet *ls = new ThreeDim_LevelSet();
	ls->initialg(*input);
	for (int i = 0; i<input->getXsize(); i++)
	{
		for (int j = 0; j<input->getYsize(); j++)
		{
			for (int k = 0; k<input->getZsize(); k++)
			{
				if (input->get(i, j, k) >= 1)
				{
					initial->put(i, j, k, -2);
				}
				else initial->put(i, j, k, 2);

			}
		}

	}
	*initial = ls->minimal_surface(*initial, *input, 5.0, 0.1, -3, 1.5, 1, iter_outer, pt);
	char *outname1 = "inner.raw";
	char outdir[200] = output;
	strcat(outdir, dirbody);
	strcat(outdir, outname1);
	test.writeImageName(*initial, outdir);
	//Raw temp(*initial);
	ls->outerwall(*initial, *input, 5.0, 0.1, -3, 1.5, 1, 10, pt);
	//*initial -=temp;
	char *outname2 = "outer5-8_2.raw";
	char outdir2[200] = output;
	strcat(outdir2, dirbody);
	strcat(outdir2, outname2);
	test.writeImageName(*initial, outdir2);
	evaluate(dir, l, m, n);
}
Пример #5
0
void testlobster(int argc,char **argv)
{
	char *pt="single_well";
	int l=301,m=324,n=56,l1=0,l2=0,iter_outer=50;
	RawImage test;
	unsigned char * indata=new unsigned char [l*m*n];
	//Raw *inputeasy=new Raw(l,m,n);
	Raw *initial=new Raw(l,m,n);

	test.readImage(indata,"E:\\geo\\lobster.raw",301*324*56);

	float *inputo=new float[l*m*n];
	for (int i = 0; i < l*m*n; i++)
	{
		inputo[i]=(float) indata[i];
	}

	Raw *input=new Raw(l,m,n,inputo);
	int width=10;

	int x1=138,x2=144,y1=137,y2=155,z1=30,z2=35; //test for little data

	for (int i=0;i<l;i++)
	{
		for (int j=0;j<m;j++)
		{
			for (int k=0;k<n;k++)
			{

				if (i>x1 && j>y1 && k>z1 && i<x2 && j<y2 && k<z2)
				{
					initial->put(i,j,k,-2.0);
				} 
				else if (i<x1|| j<y1 || k < z1 || i>x2 || j>y2 || k> z2)
				{
					initial->put(i,j,k,2.0);
				}
				else {
					initial->put(i,j,k,-2.0);
				}

			}
		}
	}
	RawImage *write=new RawImage();

	ThreeDim_LevelSet *ls=new ThreeDim_LevelSet();
	//IShowraw(*input,argc,argv);
	ls->initialg(*input);	
	ls->minimal_surface(*initial,*input,5.0,0.1,-3,1.5,1,iter_outer,pt);
	IShowraw(*initial,argc,argv);
	test.writeImage(*initial);


}
Пример #6
0
RawImage* DisparityMap::ProcessInput(CommandLineArgModel* arg, RawImage* image)
{
	DisparityMapModel* model = (DisparityMapModel*)arg->ParsedModel;
	Rectangle left, roi = model->Roi, right;

	left.Width = roi.Width / 2;
	right.Width = roi.Width / 2;

	left.X = roi.X;
	right.X = roi.X + left.Width;

	left.Right = roi.Right - right.Width;
	right.Right = roi.Right;

	left.Height = right.Height = roi.Height;
	left.Bottom = right.Bottom = roi.Bottom;
	left.Y = right.Y = roi.Y;

	Mat leftImg = image->CloneToMat(left);
	Mat rightImg = image->CloneToMat(right);

	cvtColor(leftImg, leftImg, CV_BGR2GRAY);
	cvtColor(rightImg, rightImg, CV_BGR2GRAY);

	Ptr<StereoSGBM> stereo = StereoSGBM::create(
		0,		//minDisparity
		144,	//numDisparities
		3,		//blockSize
		3 * 3 * 4, 
		3 * 3 * 32, 
		1,		//disp12MaxDiff
		10,		//preFilterCap
		10,		//uniquenessRatio
		100,	//speckleWindowSize
		32,		//speckleRange
		true);	//mode

	Mat disparity;
	stereo->compute(rightImg, leftImg, disparity);

	double max, min;
	minMaxIdx(disparity, &min, &max);
	convertScaleAbs(disparity, disparity, 255 / max);

	cvtColor(disparity, disparity, CV_GRAY2RGB);

	imwrite("F:\\1\\raw-stereo.disparity.opencv.png", disparity);
	
	RawImage* newImage = new RawImage(left.Width, left.Height);


	newImage->Import(disparity, 0, 0);

	return newImage;
}
Пример #7
0
RawImage CaptureFromFile::getFrame()
{
#ifndef VDATA_NO_QT
   mutex.lock();
#endif

  RawImage result;
  result.setColorFormat(COLOR_RGB8); 
  result.setTime(0.0);
  rgba* rgba_img = 0;
  int width;
  int height;
  if(images.size())
  {
    rgba_img = images[currentImageIndex];
    width = widths[currentImageIndex];
    height = heights[currentImageIndex];
    currentImageIndex = (currentImageIndex + 1) % images.size();
  }
  if (rgba_img == 0)
  {
    fprintf (stderr, "CaptureFromFile Error, no images available");
    is_capturing=false;
    result.setData(0);
    result.setWidth(640);
    result.setHeight(480);
    frame = 0;
  }
  else
  {
    frame = new unsigned char[width*height*3];
    unsigned char* p = &frame[0];
    for (int i=0; i < width * height; i++)
    {
      *p = rgba_img[i].r;
      p++;
      *p = rgba_img[i].g;
      p++;
      *p = rgba_img[i].b;
      p++;
    }
    result.setWidth(width);
    result.setHeight(height);
    result.setData(frame);
    tval tv;    
    gettimeofday(&tv,0);
    result.setTime((double)tv.tv_sec + tv.tv_usec*(1.0E-6));
  }
#ifndef VDATA_NO_QT
  mutex.unlock();
#endif 
  return result;
}
Пример #8
0
ReturnType HVR2300ImageCapture::onExecute()
{

    RawImage result;					// 데이터 포트타입은 RawImage

    BYTE *buff;
    buff = new BYTE[m_img_width * m_img_height];

    if(1 != HVR_camGetImageData(m_img_width * m_img_height, buff, CAMERA_NO)) {

        std::cout<<"Buffer fail"<<std::endl;
        Sleep(30);
        getFlag = false;

    } else {

        getFlag = true;

    }

    if(getFlag == true) {

        m_image->imageData = (char*)buff;
        cvCvtColor(m_image, m_image_tp, CV_BayerGB2BGR);

        // 영상의 복사 작업(영상데이터 & 영상 크기(Pixel수)
        // RawImage의 이미지 포인터 변수 할당
        RawImageData *pimage = result.getImage();

        // 입력된 이미지 사이즈 및 채널수로 로 재 설정
        pimage->resize(m_image_tp->width, m_image_tp->height, m_image_tp->nChannels);

        // 영상의 총 크기(pixels수) 취득
        int size = m_image_tp->width * m_image_tp->height * m_image_tp->nChannels;

        // 영상 데이터로부터 영상값만을 할당하기 위한 변수
        unsigned char *ptrdata = pimage->getData();

        // 현재 프레임 영상을 사이즈 만큼 memcpy
        memcpy(ptrdata, m_image_tp->imageData, size);

        // 포트아웃
        opros_any mdata = result;
        ImageOut.push(result);//전달

    }

    delete buff;

    return OPROS_SUCCESS;
}
Пример #9
0
void OpcodeMapPolynomial::apply( RawImage &in, RawImage &out, int startY, int endY )
{
  int cpp = out->getCpp();
  for (int y = startY; y < endY; y += mRowPitch) {
    ushort16 *src = (ushort16*)out->getData(mAoi.getLeft(), y);
    // Add offset, so this is always first plane
    src+=mFirstPlane;
    for (int x = 0; x < mAoi.getWidth(); x += mColPitch) {
      for (int p = 0; p < mPlanes; p++)
      {
        src[x*cpp+p] = mLookup[src[x*cpp+p]];
      }
    }
  }
}
Пример #10
0
void rate(string dir)
{
	char *pt="single_well";
	int l=0,m=0,n=0,l1=0,l2=0,iter_outer=10;
	RawImage test;
	char dirhead[200]=input2;  //K:\\sdf\\volume\\clean\\clean\\ep\\

	char dirbody[100];
	strcpy(dirbody,dir.c_str());
	cout <<"dirbody"<<dirbody<<endl;
	strcat(dirhead,dirbody);
	cout << "dirhead" << dirhead<< endl;
	short * indata=test.readStream(dirhead,&l,&m,&n);
	delete [] indata;
	
}
Пример #11
0
void OpcodeFixBadPixelsList::apply( RawImage &in, RawImage &out, int startY, int endY )
{
  iPoint2D crop = in->getCropOffset();
  uint32 offset = crop.x | (crop.y << 16);
  for (vector<uint32>::iterator i=bad_pos.begin(); i != bad_pos.end(); i++) {
    uint32 pos = offset + (*i);
    out->mBadPixelPositions.push_back(pos);
  }
}
Пример #12
0
void FrameAverager::addFrame(const RawImage& frame)
{
    if (view(accumulator).size() == 0)
    {
        accumulator.recreate(frame.dimensions());
        fill_pixels(view(accumulator), Color::black());
    }
    boost::gil::transform_pixels(const_view(frame), const_view(accumulator), view(accumulator), boost::gil::pixel_plus_t<RawPixel, AccumPixel, AccumPixel>());
    ++frameCount;
}
Пример #13
0
void CubemapStore::Load(const std::string& name, const RawImage& img, GLuint level /*= 0*/)
{
    CubemapDescription cubemap;
    glGenTextures(1, &cubemap.id);
    glBindTexture(GL_TEXTURE_CUBE_MAP, cubemap.id);

    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);

    // Calc image params
    int stride = img.Width();
    int width = img.Width() / 4;
    int height = img.Height() / 3;
    int channels = img.Channels();

    // Set row read stride
    glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);

    // Upload image data
    for (int i = 0; i < 6; ++i)
    {
        int xoff, yoff;
        int target = GL_TEXTURE_CUBE_MAP_POSITIVE_X + i;
        crossFaceOffset(target, &xoff, &yoff, stride);
        glTexImage2D(
            target, level, channels == 3 ? GL_RGB : GL_RGBA,
            width, height,
            0, channels == 3 ? GL_RGB : GL_RGBA, GL_UNSIGNED_BYTE,
            img.Data() + (yoff * stride + xoff) * channels);
    }

    // Reset row stride
    glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);

    glGenerateMipmap(GL_TEXTURE_CUBE_MAP);
    glBindTexture(GL_TEXTURE_CUBE_MAP, 0);

    mCubemaps.insert({name, cubemap});
}
Пример #14
0
void OpcodeFixBadPixelsConstant::apply( RawImage &in, RawImage &out, int startY, int endY )
{
  iPoint2D crop = in->getCropOffset();
  uint32 offset = crop.x | (crop.y << 16);
  vector<uint32> bad_pos;
  for (int y = startY; y < endY; y ++) {
    ushort16* src = (ushort16*)out->getData(0, y);
    for (int x = 0; x < in->dim.x; x++) {
      if (src[x]== mValue) {
        bad_pos.push_back(offset + ((uint32)x | (uint32)y<<16));
      }
    }
  }
  if (!bad_pos.empty()) {
	//mutex::lock(&out->mBadPixelMutex);
	out->mBadPixelMutex.lock();
    out->mBadPixelPositions.insert(out->mBadPixelPositions.end(), bad_pos.begin(), bad_pos.end());
   // pthread_mutex_unlock(&out->mBadPixelMutex);
	out->mBadPixelMutex.unlock();
  }

}
Пример #15
0
bool SaveRawImageToFile(const RawImage& img, const char* filename)
{
	const int w = img.Width();
	const int h = img.Height();
	const int bpp = img.BytesPerPixel();

	const ScopedFilePointer fp(filename, "wb");
	if (!fp)
	{
		return false;
	}

	const void* p = img.Bits();
	if (!p)
	{
		return false;
	}

	fwrite(p, w * h * bpp, 1, fp);
	return true;

}
Пример #16
0
void threshold()
{
	int l = 512;
	int m = 512;
	int	n = 700;
	RawImage test;

	char dir2[200] = "K:\\sdf\\volume\\clean\\clean\\ep\\test3\\WI_3035_P_iso_clean.raw";

	char dir1[200] = "L:\\sdfdata2\\testthreshold\\WI_3035_P_iso_clean_threshold.raw";

	short * indata1 = test.readStream(dir2, &l, &m, &n);

	for (int i = 0; i < n; i++)
	{
		for (int j = 0; j < l; j++)
		{
			for (int k = 0; k < m; k++)
			{
				short *val = &indata1[i*l*m + j*m + k];
				if (*val >-938 && *val < -124)
				{
					*val = 100;
				}
				else *val = 0;
			}
		}
	}
	//Raw *indata=new Raw(l,m,n,indata1);
	FILE *p;
	p = fopen(dir1, "wb");
	fwrite(indata1, sizeof(short), l*m*n, p);
	fclose(p);
	fflush(stdout);
	delete[] indata1;



}
Пример #17
0
void ddcircle(string dir)
{
	int l = 512;
	int m = 512;
	int	n = 700;
	RawImage test;
	char dst[100];
	strcpy(dst, dir.c_str());
	char dir2[200] = "D:\\sdfdata\\";
	strcat(dir2, dst);
	char dir1[200] = "J:\\swfdata\\";
	strcat(dir1, dst);
	float * indata1 = test.readStreamfloat(dir2, &l, &m, &n);
	for (int i = 0; i < n; i++)
	{
		for (int j = 0; j < l; j++)
		{
			for (int k = 0; k < m; k++)
			{
				PIXTYPE *val = &indata1[i*l*m + j*m + k];
				if (((j - 256) * (j - 256) + (k - 256) * (k - 256)) >= 40000)
				{

					*val = 0;

				}
			}
		}
	}
	//Raw *indata=new Raw(l,m,n,indata1);
	FILE *p;
	p = fopen(dir1, "wb");
	fwrite(indata1, sizeof(PIXTYPE), l*m*n, p);
	fclose(p);
	fflush(stdout);
	delete[] indata1;
	//indata->~Raw();
}
Пример #18
0
RawImage* ScaleFilter::scaleDown(double scale, RawImage* image)
{
	ImageSize size = image->GetSize();

	int newHeight = (int)round((double)size.Height * scale);
	int newWidth = (int)round((double)size.Width * scale);
	RawImage* returnValue = new RawImage(newWidth, newHeight);
	Rectangle roi = fromSize(size);
	double scaleUp = 1.0 / scale;
	int scaleWindow = max((int)floor(scaleUp), 1);

	if (scaleWindow % 2 == 0) scaleWindow++;

	for (int y = 0; y < newHeight; y++)
		for (int x = 0; x < newWidth; x++)
		{
			int sourceX = (int)round((double)x * scaleUp);
			int sourceY = (int)round((double)y * scaleUp);

			returnValue->SetPixel(x, y, image->GetAverage(sourceX, sourceY, scaleWindow, scaleWindow, roi));
		}

	return returnValue;
}
Пример #19
0
void float2uchar(int l, int m, int n, string dir)
{
	//int l=512;
	//int m=512; 
	//int	n=700;
	RawImage test;
	char dst[100];
	strcpy(dst, dir.c_str());
	char dir2[200] = "L:\\sdfdata2\\outer\\";		//"D:\\sdfdata\\distance\\";
	strcat(dir2, dst);
	char dir1[200] = "L:\\sdfdata2\\outeruint8\\";
	strcat(dir1, dst);
	float * indata1 = test.readStreamfloat(dir2, &l, &m, &n);
	unsigned char * outdata = new unsigned char[l*m*n];
	for (int i = 0; i < n; i++)
	{
		for (int j = 0; j < l; j++)
		{
			for (int k = 0; k < m; k++)
			{
				//PIXTYPE *val=&indata1[i*l*m+j*m+k];
				outdata[i*l*m + j*m + k] = (unsigned char)indata1[i*l*m + j*m + k];

			}
		}
	}
	//Raw *indata=new Raw(l,m,n,indata1);
	FILE *p;
	p = fopen(dir1, "wb");
	fwrite(outdata, sizeof(unsigned char), l*m*n, p);
	fclose(p);
	fflush(stdout);
	delete[] indata1;
	delete[] outdata;

}
Пример #20
0
void RawImage::downsampleInPlace(unsigned int downsampleX, unsigned int downsampleY)
{
  RawImage* tmp = downsample(this, downsampleX, downsampleY);

  deletePixels();
  _type = tmp->getType();
  _bytesPerPixel = tmp->getBytesPerPixel();
  _width = tmp->getWidth();
  _height = tmp->getHeight();
  _pixels = tmp->takePixels();

  delete tmp;
}
Пример #21
0
RawImage* downsample(RawImage* src, unsigned int downsampleX, unsigned int downsampleY)
{
  RawImage* result = new RawImage(src->getType(), src->getBytesPerPixel(),
      src->getWidth() / downsampleX, src->getHeight() / downsampleY);
  
  size_t bpp = result->getBytesPerPixel();
  size_t xStride = (downsampleX - 1) * bpp;
  for (size_t y = 0; y < result->getHeight(); ++y) {
    size_t from = y * downsampleY * src->getWidth() * bpp;
    size_t to = y * result->getWidth() * bpp;
    for (size_t x = 0; x < result->getWidth(); ++x) {
      for (size_t b = 0; b < bpp; ++b) {
        result->getPixels()[to] = src->getPixels()[from];
        ++to;
        ++from;
      }
      from += xStride;
    }
  }

  return result;
}
Пример #22
0
void GLLUTWidget::sampleImage(const RawImage & img) {
  //compute slice it sits on:
  ColorFormat source_format=img.getColorFormat();
  
  int n=img.getNumPixels();
  
  yuv color;
  int i=0;
  
  if (img.getWidth() > 1 && img.getHeight() > 1) {
    if (source_format==COLOR_RGB8) {
      rgbImage rgb_img(img);
      rgb * color_rgb=rgb_img.getPixelData();
      for (int j=0;j<n;j++) {
        color=Conversions::rgb2yuv(*color_rgb);
        i=_lut->norm2lutX(color.y);
        if (i >= 0 && i < (int)slices.size()) {
          drawSample(i,_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v));
          //slices[i]->sampler->surface.setPixel(_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v),rgba(255,255,255,255));
          slices[i]->sampler_update_pending=true;
        }
        color_rgb++;
      }
    } else if (source_format==COLOR_YUV444) {    
      yuvImage yuv_img(img);
      yuv * color_yuv=yuv_img.getPixelData();
      for (int j=0;j<n;j++) {
        color=(*color_yuv);
        i=_lut->norm2lutX(color.y);
        if (i >= 0 && i < (int)slices.size()) {
          //slices[i]->sampler->surface.setPixel(_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v),rgba(255,255,255,255));
          drawSample(i,_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v));
          slices[i]->sampler_update_pending=true;
        }
        color_yuv++;
      }
    } else if (source_format==COLOR_YUV422_UYVY) {
        uyvy * color_uyvy = (uyvy*)img.getData();
        uyvy color_uyvy_tmp;
        for (int j=0;j<n;j+=2) {
          color_uyvy_tmp=(*color_uyvy);
          color.u=color_uyvy_tmp.u;
          color.v=color_uyvy_tmp.v;
  
          color.y=color_uyvy_tmp.y1;
          i=_lut->norm2lutX(color.y);
          if (i >= 0 && i < (int)slices.size()) {
            //slices[i]->sampler->surface.setPixel(_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v),rgba(255,255,255,255));
            drawSample(i,_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v));
            slices[i]->sampler_update_pending=true;
          }
  
          color.y=color_uyvy_tmp.y2;
          i=_lut->norm2lutX(color.y);
          if (i >= 0 && i < (int)slices.size()) {
            //slices[i]->sampler->surface.setPixel(_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v),rgba(255,255,255,255));
            drawSample(i,_lut->norm2lutY(color.u),_lut->norm2lutZ(color.v));
            slices[i]->sampler_update_pending=true;
          }
          color_uyvy++;
        }
    } else {
      fprintf(stderr,"Unable to sample colors from frame of format: %s\n",Colors::colorFormatToString(source_format).c_str());
      fprintf(stderr,"Currently supported are rgb8, yuv444, and yuv422 (UYVY).\n");
      fprintf(stderr,"(Feel free to add more conversions to glLUTwidget.cpp).\n");
    }
   }
  redraw();

}
Пример #23
0
bool RawImageFromBmpFile(const char* filename, RawImage& result)
{
	result.Teardown();

	if (!filename || filename[0] == '\0')
	{
		fprintf(stderr, "filename required!\n", filename);
		return false;
	}
	const ScopedFilePointer fp(filename, "rb");

	if (!fp)
	{
		fprintf(stderr, "Could not open file \"%s\"!\n", filename);
		return false;
	}

	BitmapFileHeader fileHeader = {};
	fread(&fileHeader, sizeof(fileHeader), 1, fp);
	if (fileHeader.bfType != ('B' | ((int)'M' << 8)))
	{
		fprintf(stderr, "\"%s\" is not a bitmap file!\n", filename);
		return false;
	}

	BitmapInfoHeader infoHeader = {};
	fread(&infoHeader, sizeof(infoHeader), 1, fp);

	const int width = infoHeader.biWidth;
	const int height = infoHeader.biHeight;
	const int bitsPerPixel = infoHeader.biBitCount;
	const int bytesPerPixel = bitsPerPixel / 8;
	const int pitch = (width * bytesPerPixel + 3) & ~3;
	const int dataSize = pitch * height;

	if (width > 8192 || width <= 0 ||
		height > 8192 || height <= 0 ||
		infoHeader.biPlanes != 1 ||
		bitsPerPixel != 24 ||
		infoHeader.biCompression != 0 ||
		static_cast<int>(infoHeader.biSizeImage) != dataSize
		)
	{
		fprintf(stderr, "\"%s\" is unsupported format!\n", filename);
		fprintf(stderr,
			"  Width:%d, Height:%d, Bpp:%d, Plane:%d, "
			"Compression:%d, Size:%d\n",
			width, height, infoHeader.biBitCount, infoHeader.biPlanes,
			infoHeader.biCompression, infoHeader.biSizeImage);
		return false;
	}

	const int dataBeginPos = ftell(fp);
	if (dataBeginPos != static_cast<int>(fileHeader.bfOffBits))
	{
		fprintf(stderr, "\"%s\" is unsupported format!\n", filename);
		fprintf(stderr, "  OffsetBits:%d\n", fileHeader.bfOffBits);
		return false;
	}

	result.Allocate(width, height, bytesPerPixel);
	char* const destBegin = static_cast<char*>(result.MutableBits());

	const int destPitch = width * bytesPerPixel;
	const int gap = pitch - destPitch;
	char* curDest = destBegin + height * destPitch;
	for (int y = height; y > 0; --y)
	{
		curDest -= destPitch;
		fread(curDest, destPitch, 1, fp);
		if (gap > 0)
		{
			fseek(fp, gap, SEEK_CUR);
		}
		for (int x = 0; x < width; ++x)
		{
			char temp = curDest[x * 3];
			curDest[x * 3] = curDest[x * 3 + 2];
			curDest[x * 3 + 2] = temp;
		}
	}

	const int expectedEnd =
		fileHeader.bfOffBits + infoHeader.biSizeImage;
	const int dataEndPos = ftell(fp);
	return dataEndPos == expectedEnd;
}
Пример #24
0
ReturnType ErodeDilate::onExecute()
{
	// 영상을 Inport로부터 취득
	opros_any *pData = ImageIn.pop();
	RawImage result;

	if(pData != NULL){
		
		// 포트로 부터 이미지 취득
		RawImage Image = ImageIn.getContent(*pData);
		RawImageData *RawImage = Image.getImage();

		// 현재영상의 크기를 취득
		m_in_width = RawImage->getWidth();
		m_in_height = RawImage->getHeight();

		// 원본영상의 이미지영역 확보
		if(m_orig_img == NULL){
			m_orig_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		}
		if(m_gray_img == NULL){
			m_gray_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 1);
		}
		if(m_result_img == NULL){
			m_result_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		}

		// 영상에 대한 정보를 확보!memcpy
		memcpy(m_orig_img->imageData, RawImage->getData(), RawImage->getSize());

		
		if(m_RGB_mode == "Gray"){	// m_RGB_mode:1 흑백
			// 컬러영상을 그레이스케일로 변환
			cvCvtColor( m_orig_img, m_gray_img, CV_RGB2GRAY );

			// 그레이이미지(1채널)을 3채널로 변경, 팽창침식연산위해 다시 m_image_buff에 저장
			cvMerge(m_gray_img, m_gray_img, m_gray_img, NULL, m_orig_img);
		}

		if(m_Change_mode == "Erode"){	// m_change_mode:Erode 침식
			cvErode(m_orig_img, m_result_img, NULL, m_Repeat_count);
		}else if(m_Change_mode == "Dilate"){				// m_change_mode:Dilate 팽창
			cvDilate(m_orig_img, m_result_img, NULL, m_Repeat_count);
		}else{
			cvCopy(m_orig_img, m_result_img);
		}


		// RawImage의 이미지 포인터 변수 할당
		RawImageData *pimage = result.getImage();
		
		// 입력된 이미지 사이즈 및 채널수로 로 재 설정
		pimage->resize(m_result_img->width, m_result_img->height, m_result_img->nChannels);
		
		// 영상의 총 크기(pixels수) 취득
		int size = m_result_img->width * m_result_img->height * m_result_img->nChannels;
		
		// 영상 데이터로부터 영상값만을 할당하기 위한 변수
		unsigned char *ptrdata = pimage->getData();
		
		// 현재 프레임 영상을 사이즈 만큼 memcpy
		memcpy(ptrdata, m_result_img->imageData, size);

		// 포트아웃
		opros_any mdata = result;
		ImageOut.push(result);//전달

		delete pData;
	}

	return OPROS_SUCCESS;
}
Пример #25
0
void pvalue(string dir1, string dir2, string dir22)
{
	int l = 0, m = 0, n = 0;
	char dst1[200], dst2[200], dst3[200];
	strcpy(dst1, dir1.c_str()); //string-->char
	strcpy(dst2, dir2.c_str());
	strcpy(dst3, dir22.c_str());
	RawImage test;
	char dir3[200] = input1;  //"D:\\sdfdata\\distance\\rocdata\\" ;
	char dir4[200] = input2;  //"K:\\sdf\\volume\\clean\\clean\\ep\\rocdatao\\";
	char dir5[200] = "K:\\sdf\\volume\\clean\\clean\\ep\\clean\\";
	strcat(dir3, dst1);
	strcat(dir4, dst2);
	strcat(dir5, dst3);
	short * indata2 = test.readStream(dir5, &l, &m, &n);
	delete[] indata2;
	cout << m << endl;
	float * indata1 = test.readStreamfloat(dir3, &l, &m, &n);	//level-set data
	float * indata3 = test.readStreamfloat(dir4, &l, &m, &n);	//robot data

	int TT = 0, TF = 0, FT = 0, FF = 0;
	double *res = new double[4];
	for (int i = 100; i < n - 100; i++)
	{
		for (int j = 0; j < l; j++)
		{
			for (int k = 0; k < m; k++)
			{

				PIXTYPE val1 = indata1[i*l*m + j*m + k];
				//cout<<val1<<endl;
				short val2 = indata3[i*l*m + j*m + k];
				//if (val2 !=0)
				//{
				//	cout << val2 <<endl;
				//} 
				//else
				//{
				//}

				if (val1 == 100)
				{
					//cout <<val1<<endl;

					if (val2 == 100) //or val [-924,-124]
					{
						TT++;
					}
					else
					{
						FT++;
					}
				}
				if (val2 == 100 && ((j - 256) * (j - 256) + (k - 256) * (k - 256)) <= 200 * 200) //> -938 && val2 <-124
				{

					if (val1 == 0)
					{
						TF++;

					}

				}
				else if (val1 == 0)
				{
					FF++;
				}



			}
		}
	}
	res[0] = TT;
	res[1] = FT;
	res[2] = TF;
	res[3] = FF;
	cout << "TT:" << TT << "TF:" << TF << "FF:" << FF << "FT" << FT << endl;
	//FILE *p=fopen("K:\\sdf\\volume\\clean\\clean\\ep\\roc.txt","at+");
	//fwrite(res,sizeof(double),4,p);
	//fclose(p);
	//fflush(stdout);
	ofstream os("K:\\sdf\\volume\\clean\\clean\\ep\\roc.txt", ios::app);
	if (os)
	{
		for (int i = 0; i <4; i++)
		{
			os << res[i] << " ";

		}
		os << endl;
	}
	else cerr << "no file" << endl;
	delete[] res;
	delete[] indata1;
	delete[] indata3;

}
Пример #26
0
bool CaptureFromFile::copyAndConvertFrame(const RawImage & src, RawImage & target)
{
#ifndef VDATA_NO_QT
  mutex.lock();
#endif
  ColorFormat output_fmt = Colors::stringToColorFormat(v_colorout->getSelection().c_str());
  ColorFormat src_fmt=src.getColorFormat();
    
  if (target.getData()==0)
    target.allocate(output_fmt, src.getWidth(), src.getHeight());
  else
    target.ensure_allocation(output_fmt, src.getWidth(), src.getHeight());
     
  target.setTime(src.getTime());
     
  if (output_fmt == src_fmt)
  {
    if (src.getData() != 0)
      memcpy(target.getData(),src.getData(),src.getNumBytes());
  }
  else if (src_fmt == COLOR_RGB8 && output_fmt == COLOR_YUV422_UYVY)
  {
    if (src.getData() != 0)
      dc1394_convert_to_YUV422(src.getData(), target.getData(), src.getWidth(), src.getHeight(), 
                               DC1394_BYTE_ORDER_UYVY, DC1394_COLOR_CODING_RGB8, 8);
  }
  else if (src_fmt == COLOR_YUV422_UYVY && output_fmt == COLOR_RGB8)
  {
    if (src.getData() != 0)
      dc1394_convert_to_RGB8(src.getData(),target.getData(), src.getWidth(), src.getHeight(), 
                             DC1394_BYTE_ORDER_UYVY, DC1394_COLOR_CODING_YUV422, 8);
  } 
  else 
  {
    fprintf(stderr,"Cannot copy and convert frame...unknown conversion selected from: %s to %s\n",
            Colors::colorFormatToString(src_fmt).c_str(),
            Colors::colorFormatToString(output_fmt).c_str());
#ifndef VDATA_NO_QT
    mutex.unlock();
#endif
    return false;
  } 
#ifndef VDATA_NO_QT
  mutex.unlock();
#endif
  return true;
}
Пример #27
0
ReturnType HandsMotionTracking::onExecute()
{
	// 영상을 Inport로부터 취득
	opros_any *pData = ImageIn.pop();
	RawImage result;

	//아웃 데이터
	std::vector<PositionDataType> data;


	if(pData != NULL){
		
		// 포트로 부터 이미지 취득
		RawImage Image = ImageIn.getContent(*pData);
		RawImageData *RawImage = Image.getImage();


		// 현재영상의 크기를 취득
		m_in_width = RawImage->getWidth();
		m_in_height = RawImage->getHeight();

		// 메모리 한번 해제해주고
		if(m_image_buff			!= NULL)
			cvReleaseImage(&m_image_buff);
		if(m_image_dest         != NULL)
			cvReleaseImage(&m_image_dest);
		if(m_image_dest2        != NULL)
			cvReleaseImage(&m_image_dest2);

		if(m_image_th			!= NULL)
			cvReleaseImage(&m_image_th);
		if(m_image_th2			!= NULL)
			cvReleaseImage(&m_image_th2);

		// 이미지용 메모리 할당
        m_image_buff   = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);//원본 이미지
		m_image_dest   = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		m_image_dest2  = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);

		m_image_th     = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 1);//영역 추출 이미지
		m_image_th2    = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 1);//영역 추출 이미지
		
		
		if(!video_flag)
		{
			std::string cpath = getProperty("opros.component.dir");
			std::string file = getProperty("VideoFile");
			if (file == "") file = "sample.avi";

			std::string path = cpath + file;

			m_video	= NULL;
			m_video = cvCreateFileCapture(path.c_str()); //비디오
			video_flag = true;// 비디오가 계속 새로 재생됨을 방지
			
		}

		// 영상에 대한 정보를 확보!memcpy
		memcpy(m_image_buff->imageData, RawImage->getData(), RawImage->getSize());

		// 출력용
		cvCopy(m_image_buff, m_image_dest, 0);

		// 색상 분리용 이미지
		IplImage* m_image_YCrCb = cvCreateImage(cvGetSize(m_image_buff), IPL_DEPTH_8U, 3);
		IplImage* m_Y  = cvCreateImage(cvGetSize(m_image_buff), IPL_DEPTH_8U, 1);
		IplImage* m_Cr = cvCreateImage(cvGetSize(m_image_buff), IPL_DEPTH_8U, 1);
		IplImage* m_Cb = cvCreateImage(cvGetSize(m_image_buff), IPL_DEPTH_8U, 1);

		cvCvtColor(m_image_buff, m_image_YCrCb, CV_RGB2YCrCb);   //RGB - > YCrCV 변환

		cvSplit(m_image_YCrCb, m_Y, m_Cr, m_Cb, NULL);   //채널 분리

		//추출이 필요한 영역 픽셀 데이터 저장 변수
		unsigned char m_Cr_val = 0;			
		unsigned char m_Cb_val = 0;

		// 살색추출
		for(int i=0;i<m_image_buff->height;i++)            
		{
			for(int j=0;j<m_image_buff->width;j++)
			{  
				//Cr 영역과 Cb 영역 추출
				m_Cr_val = (unsigned char)m_Cr->imageData[i*m_Cr->widthStep+j];
				m_Cb_val = (unsigned char)m_Cb->imageData[i*m_Cb->widthStep+j];

				//살색에 해당하는 영역인지 검사
				if( (77 <= m_Cr_val) && (m_Cr_val <= 127) && (133 <= m_Cb_val) && (m_Cb_val <= 173) )
				{
					// 살색부분은 하얀색
					m_image_buff->imageData[i*m_image_buff->widthStep+j*3+0] = (unsigned char)255; 
					m_image_buff->imageData[i*m_image_buff->widthStep+j*3+1] = (unsigned char)255;
					m_image_buff->imageData[i*m_image_buff->widthStep+j*3+2] = (unsigned char)255; 
				}
				else
				{ 
					// 나머지는 검정색
					m_image_buff->imageData[i*m_image_buff->widthStep+j*3+0]= 0;
					m_image_buff->imageData[i*m_image_buff->widthStep+j*3+1]= 0;
					m_image_buff->imageData[i*m_image_buff->widthStep+j*3+2]= 0;
				}
			}
		}
		
		//살색 추출한 영상을 이진화
		cvCvtColor(m_image_buff, m_image_th, CV_RGB2GRAY); 

		//잡영 제거를 위한 연산
		cvDilate (m_image_th, m_image_th, NULL, 2);//팽창
		cvErode  (m_image_th, m_image_th, NULL, 2);//침식	

		//변수 및 이미지 메모리 초기화
		int temp_num = 0;
		int StartX , StartY, EndX , EndY;
		int nNumber = 0;
		m_nThreshold	= 100;

		if( m_rec_out != NULL )
		{
			delete m_rec_out;

			m_rec_out	= NULL;
			m_nBlobs_out	= _DEF_MAX_BLOBS;
		}
		else
		{
			m_rec_out	= NULL;
			m_nBlobs_out	= _DEF_MAX_BLOBS;
		}
	
		if( m_image_th2 != NULL )	
			cvReleaseImage( &m_image_th2 );

		//레이블링 할 영상 따로 생성
		m_image_th2			= cvCloneImage( m_image_th );

		//레이블링 할 이미지의 크기 저장
		int nWidth	= m_image_th2->width;
		int nHeight = m_image_th2->height;

		//해당 영상 크기만큼 버프 설정
		unsigned char* tmpBuf = new unsigned char [nWidth * nHeight];

		for(int j=0; j<nHeight ;j++)	
			for(int i=0; i<nWidth ;i++)	
				//전 픽셀 순회
				tmpBuf[j*nWidth+i] = (unsigned char)m_image_th2->imageData[j*m_image_th2->widthStep+i];
		
////// 레이블링을 위한 포인트 초기화

		m_vPoint_out = new Visited [nWidth * nHeight];
		
		for(int nY = 0; nY < nHeight; nY++)
		{
			for(int nX = 0; nX < nWidth; nX++)
			{
				m_vPoint_out[nY * nWidth + nX].bVisitedFlag		= FALSE;
				m_vPoint_out[nY * nWidth + nX].ptReturnPoint.x	= nX;
				m_vPoint_out[nY * nWidth + nX].ptReturnPoint.y	= nY;
			}
		}

////// 레이블링 수행
		for(int nY = 0; nY < nHeight; nY++)
		{
			for(int nX = 0; nX < nWidth; nX++)
			{
				if(tmpBuf[nY * nWidth + nX] == 255)		// Is this a new component?, 255 == Object
				{
					temp_num++;

					tmpBuf[nY * nWidth + nX] = temp_num;
					
					StartX = nX, StartY = nY, EndX = nX, EndY= nY;

					__NRFIndNeighbor(tmpBuf, nWidth, nHeight, nX, nY, &StartX, &StartY, &EndX, &EndY, m_vPoint_out);

					if(__Area(tmpBuf, StartX, StartY, EndX, EndY, nWidth, temp_num) < m_nThreshold)
					{
		 				for(int k = StartY; k <= EndY; k++)
						{
							for(int l = StartX; l <= EndX; l++)
							{
								if(tmpBuf[k * nWidth + l] == temp_num)
									tmpBuf[k * nWidth + l] = 0;
							}
						}
						--temp_num;

						if(temp_num > 250)
							temp_num = 0;
					}
				}
			}
		}
		// 포인트 메모리 해제
		delete m_vPoint_out;

		//결과 보존
		nNumber = temp_num;
		
		//레이블링 수만큼 렉트 생성
		if( nNumber != _DEF_MAX_BLOBS )		
			m_rec_out = new CvRect [nNumber];
	
		//렉트 만들기
			if( nNumber != 0 )	
				DetectLabelingRegion(nNumber, tmpBuf, nWidth, nHeight,m_rec_out);

				for(int j=0; j<nHeight; j++)
					for(int i=0; i<nWidth ; i++)
						m_image_th2->imageData[j*m_image_th2->widthStep+i] = tmpBuf[j*nWidth+i];
		
				delete tmpBuf;
	
		//레이블링 수 보존		
		m_nBlobs_out = nNumber;
	
		//레이블링 영역 거르기
		int nMaxWidth	= m_in_height * 9 / 10;	   // 영상 가로 전체 크기의 90% 이상인 레이블은 제거
		int nMaxHeight	= m_in_width  * 9 / 10;	   // 영상 세로 전체 크기의 90% 이상인 레이블은 제거

		//최소영역과 최대영역 지정- 화면 크기에 영향 받음..
		_BlobSmallSizeConstraint( 5, 150, m_rec_out, &m_nBlobs_out);
		_BlobBigSizeConstraint(nMaxWidth, nMaxHeight,m_rec_out, &m_nBlobs_out);

		//앞으로 쓸 메모리 등록
		storage1 = cvCreateMemStorage(0);
		storage2 = cvCreateMemStorage(0);

		//변수 초기화
		CvPoint point;
		CvSeq* seq[10];
		CvSeq* hull;
		CvPoint end_pt;
		CvPoint center;

		//내보낼 데이터 초기화
		outData[0].x = 0, outData[0].y  = 0;
		outData[1].x = 0, outData[1].y  = 0;
		outData[2].x = 0, outData[2].y  = 0;

		int num = 0;
		int temp_x = 0;
		int temp_y = 0;
		int rect = 0;
		
		//만일을 대비하여 준비한 시퀸스 배열의 크기를 초과하지 않도록 조절
		//일단 한곳에서만 영상이 나오도록 조절..
		if(m_nBlobs_out > 1)
		{
			m_nBlobs_out = 1;
		}

		//레이블링 영역 내의 처리 시작 
		for( int i=0; i <  m_nBlobs_out; i++ )
		{
			//사각형 그리기에 필요한 두점 저장
			CvPoint	pt1 = cvPoint(	m_rec_out[i].x, m_rec_out[i].y );
			CvPoint pt2 = cvPoint(	pt1.x + m_rec_out[i].width,pt1.y + m_rec_out[i].height );

			// 컬러값 설정
			CvScalar color	= cvScalar( 0, 0, 255 );

			//레이블 사각형 그리기 - 확인용
			//cvDrawRect( m_image_dest, pt1, pt2, color);
			
			//레이블을 관심영역으로 지정할 이미지 생성
			temp_mask = cvCreateImage(cvSize(m_rec_out[i].width, m_rec_out[i].height),8,1);
			temp_mask2 = cvCreateImage(cvSize(m_rec_out[i].width, m_rec_out[i].height),8,1);
			
			//관심영역 지정
			cvSetImageROI(m_image_th, m_rec_out[i]);
				
			//관심영역 추출
			cvCopy(m_image_th, temp_mask, 0);

			//관심영역 해제
			cvResetImageROI(m_image_th);

			
			//관심영역 내의 오브젝트 처리를 위한 시퀸스 생성
			seq[i] = cvCreateSeq(CV_SEQ_KIND_GENERIC | CV_32SC2,sizeof(CvContour),sizeof(CvPoint), storage1);
			
			//관심영역에서 추출한이미지의 흰색 픽셀값으로 시퀸스 생성
			for(int j =0; j < temp_mask ->height ; j++)
			{
				for(int k = 0; k < temp_mask ->width; k++)
				{				
					if((unsigned char)temp_mask->imageData[j*temp_mask->widthStep+k] == 255)
					{
						point.x = k;		//흰색 픽셀 x좌표 저장
						point.y = j;		//흰색 픽셀 y좌표 저장
						cvSeqPush(seq[i], &point);	//시퀸스 구조체에 해당 좌표 삽입
						temp_x += point.x; //좌표 누적
						temp_y += point.y; //좌표 누적
						num++;             //픽셀 수 카운트

					}	
				}
			} 
			
			//좌표 초기화
			point.x				= 0;
			point.y				= 0;
			end_pt.x			= 0;
			end_pt.y			= 0;
			center.x			= 0;
			center.y			= 0;
			
			CvPoint dist_pt;			//중심점과의 최대거리를 찾을 컨백스헐 저장
			double fMaxDist		= 0;    //중심점과의 최대거리 저장
			double fDist		= 0;	//거리계산에 사용
		
	
			//중심점 찾기 - 픽셀의  평균값 찾기
			if(num != 0)
			{
				center.x			= (int)temp_x/num; //평균 좌표값 구하기
				center.y			= (int)temp_y/num; //평균 좌표값 구하기
			}

			//관심영역 설정
			cvSetImageROI(m_image_dest, m_rec_out[i]);

/////////컨백스헐 그리기////////
			if(seq[i]->total !=0)
			{	
				//컨백스헐 구하기
				hull = cvConvexHull2(seq[i], 0, CV_COUNTER_CLOCKWISE, 0);	
				point = **CV_GET_SEQ_ELEM(CvPoint*, hull,hull->total-1);

				//구한 컨백스헐 라인으로 그리기
				for(int x = 0; x < hull->total; x++)
				{
					CvPoint hull_pt = **CV_GET_SEQ_ELEM(CvPoint*, hull,x);

					//컨백스헐 라인 그리기
					//cvLine(m_image_dest, point, hull_pt, CV_RGB(255, 255, 0 ),2, 8);
					point = hull_pt;

					//최대 거리 구하기
					dist_pt =  **CV_GET_SEQ_ELEM(CvPoint*, hull,x);

					fDist = sqrt((double)((center.x - dist_pt.x) * (center.x - dist_pt.x) 
						+ (center.y - dist_pt.y) * (center.y - dist_pt.y)));

					if(fDist > fMaxDist)
					{
						max_pt = dist_pt;
						fMaxDist = fDist;
					}
				}
			}
			

			//중심점그리기
			cvCircle(m_image_dest,center,5, CV_RGB(0,0,255), 5);

			//내보낼 중심점 데이터 저장
			outData[0].x = center.x;
			outData[0].y = center.y;
	
			
////////마스크 만들기///////

			//중심점을 기준으로 그릴 마스크 이미지 생성
			circle_mask = cvCreateImage(cvGetSize(temp_mask), 8, 1);
			
			//바탕은 검은색으로
			cvSetZero(circle_mask);
			
			//흰색 원 - 손 영상과의 연산을 위해 바이너리 이미지에 그리기
			int radi = (int)m_rec_out[i].height/2.9; // 원 크기 수동조절..

			//흰색 원과 흰색 네모로 구성된 마스크 영상 생성을 위한 그리기
			cvCircle(circle_mask, center, radi, CV_RGB(255,255,255),CV_FILLED);
			cvDrawRect(circle_mask, cvPoint(center.x - radi, center.y),cvPoint(center.x + radi, pt2.y),
				 CV_RGB(255,255,255),CV_FILLED);

			//마스크 추출
			cvSub(temp_mask, circle_mask, temp_mask, 0);

	
///////관심영역 레이블링 - 손가락 끝 추출//////

			//변수 및 이미지 메모리 초기화
			int temp_num_in = 0;
			int StartX_in , StartY_in, EndX_in , EndY_in;
			int nNumber_in = 0;
			m_nThreshold_in	= 10;

			if( m_rec_in != NULL )
			{
				delete m_rec_in;

				m_rec_in	= NULL;
				m_nBlobs_in	= _DEF_MAX_BLOBS;
			}
			else
			{
				m_rec_in	= NULL;
				m_nBlobs_in	= _DEF_MAX_BLOBS;
			}

			if( temp_mask2 != NULL )	
				cvReleaseImage( &temp_mask2 );

			temp_mask2			= cvCloneImage( temp_mask );	

			//들어온 이미지의 크기 저장
			int nWidth	= temp_mask2->width;
			int nHeight = temp_mask2->height;
		
			//영상 크기만큼 버프 설정
			unsigned char* tmpBuf_in = new unsigned char [nWidth * nHeight];

			for(int j=0; j<nHeight ;j++)	
				for(int i=0; i<nWidth ;i++)
					//전 픽셀 순회
					tmpBuf_in[j*nWidth+i] = (unsigned char)temp_mask2->imageData[j*temp_mask2->widthStep+i];
				
	
		/////// 레이블링을 위한 포인트 초기화 ////////
			
			m_vPoint_in = new Visited [nWidth * nHeight];

			for(int nY = 0; nY < nHeight; nY++)
			{
				for(int nX = 0; nX < nWidth; nX++)
				{
					m_vPoint_in[nY * nWidth + nX].bVisitedFlag		= FALSE;
					m_vPoint_in[nY * nWidth + nX].ptReturnPoint.x	= nX;
					m_vPoint_in[nY * nWidth + nX].ptReturnPoint.y	= nY;
				}
			}

			////레이블링 수행
			for(int nY = 0; nY < nHeight; nY++)
			{
				for(int nX = 0; nX < nWidth; nX++)
				{
					if(tmpBuf_in[nY * nWidth + nX] == 255)		// Is this a new component?, 255 == Object
					{
						temp_num_in++;

						tmpBuf_in[nY * nWidth + nX] = temp_num_in;
						
						StartX_in = nX, StartY_in = nY, EndX_in = nX, EndY_in= nY;

						__NRFIndNeighbor(tmpBuf_in, nWidth, nHeight, nX, nY, 
							&StartX_in, &StartY_in, &EndX_in, &EndY_in,m_vPoint_in);

						if(__Area(tmpBuf_in, StartX_in, StartY_in, EndX_in, EndY_in, nWidth, temp_num_in) < m_nThreshold_in)
						{
		 					for(int k = StartY_in; k <= EndY_in; k++)
							{
								for(int l = StartX_in; l <= EndX_in; l++)
								{
									if(tmpBuf_in[k * nWidth + l] == temp_num_in)
										tmpBuf_in[k * nWidth + l] = 0;
								}
							}
							--temp_num_in;

							if(temp_num_in > 250)
								temp_num_in = 0;
						}
					}
				}
			}
			// 포인트 메모리 해제
			delete m_vPoint_in;

			//레이블링 수 보존
			nNumber_in = temp_num_in;

			if( nNumber_in != _DEF_MAX_BLOBS )		
				m_rec_in = new CvRect [nNumber_in];
		
				if( nNumber_in != 0 )	
					DetectLabelingRegion(nNumber_in, tmpBuf_in, nWidth, nHeight,m_rec_in);
				
				for(int j=0; j<nHeight; j++)
					for(int i=0; i<nWidth ; i++)
						temp_mask2->imageData[j*temp_mask2->widthStep+i] = tmpBuf_in[j*nWidth+i];
			
					delete tmpBuf_in;
				
			m_nBlobs_in = nNumber_in;

			//최소영역과 최대영역 설정
			_BlobSmallSizeConstraint( 5, 5, m_rec_in, &m_nBlobs_in);
			_BlobBigSizeConstraint( temp_mask2->width, temp_mask2->height,m_rec_in, &m_nBlobs_in);

			//선언 및 초기화
			CvPoint center_in;
			CvPoint point_in;
			
			point_in.x = 0;
			point_in.y = 0;
			center_in.x = 0;
			center_in.x = 0;
			CvSeq* seq_in[20];

			//준비한 시퀸스 배열크기를 초과하지 않도록 조절
			if(m_nBlobs_in > 20)
			{
				m_nBlobs_in =20;
			}

			for( int ni =0; ni <  m_nBlobs_in; ni++ )
			{		
				//사각형 그리기에 필요한 두 점 저장
				CvPoint	pt1 = cvPoint(	m_rec_in[ni].x, m_rec_in[ni].y );
				CvPoint pt2 = cvPoint(	pt1.x + m_rec_in[ni].width,pt1.y + m_rec_in[ni].height );

				//색상값 설정
				CvScalar color	= cvScalar( 255,0 , 255 );
				
				//레이블 사각형 그리기
				//cvDrawRect( m_image_dest, pt1, pt2, color);
				
				//처리할 손끝 마스크 생성할 메모리 할당
				in_mask = cvCreateImage(cvSize(m_rec_in[ni].width, m_rec_in[ni].height),8,1);

				//관심영역 설정
				cvSetImageROI(temp_mask, m_rec_in[ni]);
				
				//필요한 영역 복사
				cvCopy(temp_mask, in_mask, 0);

				//관심영역 해제
				cvResetImageROI(temp_mask);

				//관심영역 내의 오브젝트 처리를 위한 시퀸스 생성
				seq_in[ni] = cvCreateSeq(CV_SEQ_KIND_GENERIC | CV_32SC2,sizeof(CvContour),sizeof(CvPoint), storage2);

				//초기화
				int temp_x_in = 0;
				int temp_y_in = 0;	
				int num_in = 0;
				
				//관심영역에서 추출한이미지의 흰색 픽셀값으로 시퀸스 생성
				for(int j =0; j < in_mask ->height ; j++)
				{
					for(int k = 0; k < in_mask ->width; k++)
					{				
						if((unsigned char)in_mask->imageData[j*in_mask->widthStep+k] == 255)
						{
							point_in.x = k;		//흰색 픽셀 x좌표 저장
							point_in.y = j;		//흰색 픽셀 y좌표 저장
							cvSeqPush(seq_in[ni], &point_in);	//시퀸스 구조체에 해당 좌표 삽입
							temp_x_in += point_in.x; //좌표 누적
							temp_y_in += point_in.y; //좌표 누적
							num_in++;             //픽셀 수 카운트

						}

					}
				}
				
				//초기화
				max_pt_in.x = 0;
				max_pt_in.y = 0;					
				double fMaxDist_in	= 0;
				double fDist_in	= 0;
			
				//중심점 찾기 - 픽셀의  평균값 찾기
				if(num_in != 0)
				{
					center_in.x			= (int)temp_x_in/num_in + pt1.x; //평균 좌표값 구하기
					center_in.y			= (int)temp_y_in/num_in + pt1.y; //평균 좌표값 구하기
					
				}

				//우선 끝점이 2개일때만..
				if(m_nBlobs_in == 2)  
				{	
					//초기화
					finger_pt[ni].x = NULL;
					finger_pt[ni].y = NULL;
					finger_pt[ni].x = NULL;
					finger_pt[ni].y = NULL;

					if(seq_in[ni]->total !=0)
					{	
						//컨백스헐 구하기 - 윤곽선의 좌표 정보 겟
						CvSeq* hull_in = cvConvexHull2(seq_in[ni], 0, CV_COUNTER_CLOCKWISE, 0);	
						//point_in = **CV_GET_SEQ_ELEM(CvPoint*, hull_in,hull_in->total-1);


						//구한 컨백스헐 라인으로 그리기
						for(int nx = 0; nx < hull_in->total; nx++)
						{
							CvPoint hull_pt_in = **CV_GET_SEQ_ELEM(CvPoint*, hull_in,nx);
							hull_pt_in.x = hull_pt_in.x  + pt1.x;
							hull_pt_in.y = hull_pt_in.y + pt1.y;

							//중심점과 해당영역의 컨백스 헐 지점간의 거리 계산
							fDist_in = sqrt((double)((center.x - hull_pt_in.x) * (center.x - hull_pt_in.x) 
								+ (center.y - hull_pt_in.y) * (center.y - hull_pt_in.y)));

							//거리가 먼 점 찾기
							if(fDist_in > fMaxDist_in)
							{
								max_pt_in = hull_pt_in;
								fMaxDist_in = fDist_in;
								
							}
						}
					}				
				
					//최대점 보존
					finger_pt[ni].x = max_pt_in.x ;
					finger_pt[ni].y = max_pt_in.y ;
						
					//관심영역 해제할 경우의 값으로 보정
					finger_pt[ni].x = finger_pt[ni].x + m_rec_out[i].x;
					finger_pt[ni].y = finger_pt[ni].y + m_rec_out[i].y;		
				}
Пример #28
0
void HUandThickness()
{
	//thickness data
	string dir1(input1);//K:\sdf\volume\clean\clean\ep// D:\sdfdata\distance\rocdata
	//origion data
	string dir2(input2);//K:\sdf\volume\clean\clean\ep\rocdatao
	//skeleton data
	string dir3(input3);//K:\sdf\volume\clean\clean\ep\rocdatao
	//cout << dir1 <<endl;
	vector<string> files1;
	vector<string> files2;
	vector<string> files3;
	GetFileNameFromDir(dir1, files1);
	GetFileNameFromDir(dir2, files2);
	GetFileNameFromDir(dir3, files3);
	vector<string>::iterator iterFile1, iterFile2 = files2.begin(), iterFile3 = files3.begin();
	for (iterFile1 = files1.begin(); iterFile1 != files1.end(); iterFile1++)
	{


		iterFile1->assign(iterFile1->substr(dir1.size() + 1));
		iterFile2->assign(iterFile2->substr(dir2.size() + 1));
		iterFile3->assign(iterFile3->substr(dir3.size() + 1));
		cout << *iterFile1 << endl;
		cout << *iterFile2 << endl;
		//iterFile2++;
		int l = 0, m = 0, n = 0;
		char dst1[200], dst2[200], dst3[200];
		strcpy(dst1, (*iterFile1).c_str()); //string-->char
		strcpy(dst2, (*iterFile2).c_str());
		strcpy(dst3, (*iterFile3).c_str());
		RawImage test;
		char dir3[200] = input1;
		char dir4[200] = input2;
		char dir5[200] = input1;
		char dir6[200] = input3;
		strcat(dir3, dst1);
		strcat(dir4, dst2);
		strcat(dir5, dst1);
		strcat(dir6, dst3);
		short*orgiondata = test.readStream(dir4, &l, &m, &n);
		PIXTYPE * innerdata = new PIXTYPE[l*m*n];
		test.readImage2(innerdata, dir3, l*m*n);
		unsigned char * skeletondata = new unsigned char[l*m*n];
		test.readImage(skeletondata, dir6, l*m*n);

		float *inputo = new float[l*m*n];
		PIXTYPE *skeletondataF = new PIXTYPE[l*m*n];
		PIXTYPE *innerdataF = new PIXTYPE[l*m*n];
		PIXTYPE max = 0, min = 1000;
		for (int i = 0; i < l*m*n; i++)
		{

			PIXTYPE a = inputo[i] = (float)orgiondata[i] + 1020;
			max > a ? max = max : max = a;
			min < a ? min = min : min = a;
			PIXTYPE b = skeletondataF[i] = biglittleedian(float(skeletondata[i]));
			//if (PIXTYPE c=innerdataF[i]=biglittleedian((float)innerdata[i]))
			//{
			//	cout <<"aaa"<<endl;
			//}
			PIXTYPE c = innerdataF[i] = biglittleedian((float)innerdata[i]);
		}
		cout << max << endl;
		cout << min << endl;
		delete[]skeletondata;
		delete[]innerdata;
		delete[] orgiondata;
		Raw *thickness = new Raw(l, m, n, innerdataF, true);
		Raw *orgion = new Raw(l, m, n, inputo);
		Raw *skeleton = new Raw(l, m, n, skeletondataF);
		Raw *hu = thicknessequalHU(orgion, thickness);
		queue<Point>q;
		vector<Point> c;

		DivideRegion *dr = new DivideRegion(q, skeleton, c);
		dr->putskletoninorder();
		dr->DivideRegionv2(skeleton, hu);
		dr->DivideRegionthicknessv2(skeleton, thickness);

	}

}
Пример #29
0
void rocwayinner2people(string dir1, string dir2)
{
	int l = 0, m = 0, n = 0;
	char dst1[200], dst2[200];
	strcpy(dst1, dir1.c_str()); //string-->char
	strcpy(dst2, dir2.c_str());
	RawImage test;
	char dir3[200] = "L:\\sdfdata2\\inner\\";
	char dir4[200] = "K:\\sdf\\volume\\clean\\clean\\ep\\clean\\";
	char dir5[200] = "D:\\segdata\\people\\";
	strcat(dir3, dst1);
	strcat(dir4, dst2);
	strcat(dir5, dst1);
	//short * indata2=test.readStream(dir4,&l,&m,&n);
	//cout<<m<<endl;
	//float * indata1=test.readStreamfloat(dir3,&l,&m,&n);

	//char * file="L:\\sdfdata2\\inner\\",
	//	*file1="K:\\sdf\\volume\\clean\\clean\\ep\\clean\\";
	//int l = 512,
	//	m = 512,
	//	n = 700;
	//int l = 0,
	//	m = 0,
	//	n = 0;
	//RawImage test;
	short*orgiondata = test.readStream(dir4, &l, &m, &n);
	PIXTYPE * innerdata = test.readStreamfloat(dir3, &l, &m, &n);

	Raw src(l, m, n, innerdata);
	float *inputo = new float[l*m*n];
	for (int i = 0; i < l*m*n; i++)
	{
		inputo[i] = (float)orgiondata[i];
	}
	Raw *orgion = new Raw(l, m, n, inputo);


	int count = 0, time = 4;
	do
	{
		//Raw srcnew(src);
		for (int k = 1; k< n - 1; k++)
		{
			for (int j = 1; j < m - 1; j++)
			{
				for (int i = 1; i < l - 1; i++)
				{
					PIXTYPE val = src.get(i, j, k);
					//unsigned char * p= (unsigned char *)innerdata;
					//std::swap(p[0],p[3]);
					//std::swap(p[1],p[2]);

					if (val == 100)
					{
						PIXTYPE data1 = orgion->get(i - 1, j, k);
						//PIXTYPE *q=&data1;
						//unsigned char * p= (unsigned char*) q;
						//std::swap(p[0],p[3]);
						//std::swap(p[1],p[2]);
						if (sign(data1))
						{
							src.put(i - 1, j, k, 100);
							count++;
						}
						if (sign(orgion->get(i - 1, j - 1, k)))
						{
							src.put(i - 1, j - 1, k, 100);
							count++;
						}

						if (sign(orgion->get(i - 1, j + 1, k)))
						{
							src.put(i - 1, j + 1, k, 100);
							count++;
						}
						if (sign(orgion->get(i, j - 1, k)))
						{
							src.put(i, j - 1, k, 100);
							count++;
						}
						if (sign(orgion->get(i, j + 1, k)))
						{
							src.put(i, j + 1, k, 100);
							count++;
						}
						if (sign(orgion->get(i + 1, j, k)))
						{
							src.put(i + 1, j, k, 100);
							count++;
						}
						if (sign(orgion->get(i + 1, j - 1, k)))
						{
							src.put(i + 1, j - 1, k, 100);
							count++;
						}
						if (sign(orgion->get(i + 1, j + 1, k)))
						{
							src.put(i + 1, j + 1, k, 100);
							count++;
						}
					}

				}
			}

		}

		time--;
		cout << count << endl;

	} while (time);

	test.writeImageName(src, dir5);
}
Пример #30
0
void thincknessstdv2(string dir)
{
	int l = 512;
	int m = 512;
	int	n = 700;
	RawImage test;
	char dst[100];
	strcpy(dst, dir.c_str());
	char readdir[200] = "L:\\sdfdata2\\edt\\";
	strcat(readdir, dst);
	//char writedir[200] = "J:\\swfdata\\";
	//strcat(writedir,dst);
	PIXTYPE * indata1 = test.readStreamfloat(readdir, &l, &m, &n);
	Raw *src = new Raw(l, m, n, indata1);
	PIXTYPE sum = 0, std = 0, mean = 0, s = 0;
	int countall = 0;
	int countboundary = 0;
	for (int k = 0; k < src->getZsize(); k++)
	{
		for (int j = 0; j < src->getYsize(); j++)
		{
			for (int i = 0; i < src->getXsize(); i++)
			{
				PIXTYPE  val = src->get(i, j, k);
				float * p = &val;
				unsigned char * bp = (unsigned char *)p;
			std:swap(bp[0], bp[3]);
				std::swap(bp[1], bp[2]);
				//cout << val <<endl;
				if (val > 0 && val <100)
				{

					//cout <<val <<endl;


					countall++;
					if (src->get(i - 1, j, k) <= val || src->get(i - 1, j - 1, k) <= val || src->get(i - 1, j + 1, k) <= val \
						|| src->get(i, j - 1, k) <= val || src->get(i, j + 1, k) <= val || src->get(i + 1, j, k) <= val || src->get(i + 1, j - 1, k) <= val || src->get(i + 1, j + 1, k) <= val)
					{
						sum += val;

						s += val*val;		//std simple implementation ,just for data less than 10
						countboundary++;
					}
				}
				//else
				//{
				//}
			}
		}

	}
	mean = sum / countboundary;
	std = sqrt((s - countboundary*mean*mean) / countboundary);
	PIXTYPE *res = new PIXTYPE[2];
	res[1] = mean;
	res[2] = std;
	cout << "mean" << mean << "std" << std << endl;
	ofstream os("K:\\sdf\\volume\\clean\\clean\\ep\\meanstd.txt", ios::app);
	if (os)
	{
		for (int i = 0; i <2; i++)
		{
			os << res[i] << " ";

		}
		os << endl;
	}
	delete[] res;
	delete[] indata1;

}