// -------------------------------------------------------------------------- void BlobModel::UpdateHeatMap(IplImage* motionmap) { // for each head candidate draw a circle on the floor; add it to the heatmap image // NOTE: due to scaling, circle becomes an ellipse double BLOB_RADIUS = 50; // blob projection on the floor average radius in centimeters bool oneclose = false; for (int i=0;i<blob.GetCount();i++) { CvSeq* heads = doc->blobmodel.blob[i]->heads; for (int j=0;j<heads->total;j++) { BlobRay* br = (BlobRay*)cvGetSeqElem(heads, j); CvPoint3D32f head, foot; doc->cameramodel.coordsImage2RealSameXY_Feet2Floor(cvPointTo32f(br->p1), cvPointTo32f(br->p2), &head, &foot); // ignore short candidates if (head.z < doc->bodymodel.m_minHeight) continue; // ignore repeating artifact closeby candidates if (oneclose) continue; if (d(foot) < BLOB_RADIUS*2) oneclose = true; CvPoint c = doc->floormodel.coordsReal2Floor(foot); CvSize axes = doc->floormodel.sizeReal2Floor(cvSize2D32f(BLOB_RADIUS, BLOB_RADIUS)); cvZero(motionmaptemp); cvEllipse(motionmaptemp, c, axes, 0, 0, 360, cvScalar(1), CV_FILLED); cvAcc(motionmaptemp, motionmap); } } }
CHandPoint CTransformImage::findFinger() { findCenter(); if(!m_transImage) return CHandPoint(); int width = m_transImage->width; int height = 180; int moveX = 0, moveY = height; BOOL bClick = FALSE, bWheel = FALSE; unsigned char ch; for(int y = m_center.y; y < height; ++y) { for(int x = m_center.x-100; x < m_center.x+50; ++x) { if(x < 0 || x >= width || y < 0 || y >= height) continue; ch = m_transImage->imageData[y*width+x]; if(ch == 255) { moveX = x, moveY = y; if(x < m_center.x-50) bClick = TRUE; break; } // CvBox2D box; // box.center = cvPoint2D32f(x, y); // box.size = cvSize2D32f(2, 2); // box.angle = 90; // cvEllipseBox(m_image, box, CV_RGB(0,255,255), 1); } if(moveY != y) break; } // 좌표가 조금씩 흔들리는 것을 방지하기 위한 부분 if(abs(m_pastPt.x-moveX) < 2 || abs(m_pastPt.y-moveY) < 2) moveX = m_pastPt.x, moveY = m_pastPt.y; m_pastPt.x = moveX, m_pastPt.y = moveY; CvBox2D box; box.center = cvPoint2D32f(moveX, moveY); box.size = cvSize2D32f(2, 2); box.angle = 90; cvEllipseBox(m_image, box, CV_RGB(0,255,0), 1); return CHandPoint(moveX, height-moveY, bClick, bWheel); }
CvPoint CTransformImage::findCenter() { IplImage* dist8u = cvCloneImage(m_transImage); IplImage* dist32f = cvCreateImage(cvGetSize(m_transImage), IPL_DEPTH_32F, 1); IplImage* dist32s = cvCreateImage(cvGetSize(m_transImage), IPL_DEPTH_32S, 1); // 거리 변환 행렬 float mask[3] = {1.f, 1.5f, 0}; // 거리 변환 함수 사용 cvDistTransform(m_transImage, dist32f, CV_DIST_USER, 3, mask, NULL); // 눈에 보이게 변환 cvConvertScale(dist32f, dist32f, 1000, 0); cvPow(dist32f, dist32f, 0.5); cvConvertScale(dist32f, dist32s, 1.0, 0.5); cvAndS(dist32s, cvScalarAll(255), dist32s, 0); cvConvertScale(dist32s, dist8u, 1, 0); // 가장 큰 좌표를 찾는다 int max; for(int i = max = 0; i < dist8u->height; ++i) { int index = i * dist8u->widthStep; for(int j = 0; j < dist8u->width; ++j) { if((unsigned char)dist8u->imageData[index+j] > max) { max = (unsigned char)dist8u->imageData[index+j]; m_center.x = j, m_center.y = i; } } } cvReleaseImage(&dist8u); cvReleaseImage(&dist32f); cvReleaseImage(&dist32s); if(m_center.x < 0 || m_center.y < 0) m_center.x = 0, m_center.y = 0; CvBox2D box; box.center = cvPoint2D32f(m_center.x, m_center.y); box.size = cvSize2D32f(3, 3); box.angle = 90; cvEllipseBox(m_image, box, CV_RGB(255,242,0), 3); return m_center; }
t_jit_err cv_jit_shift_matrix_calc(t_cv_jit_shift *x, void *inputs, void *outputs) { t_jit_err err=JIT_ERR_NONE; long in_savelock = 0; t_jit_matrix_info in_minfo; void *in_matrix; CvMat source; CvRect rectangle; CvBox2D box; CvConnectedComp component; CvPoint2D32f vertices[4]; float w,h,c,s; //Get pointer to matrix in_matrix = jit_object_method(inputs,_jit_sym_getindex,0); if (x&&in_matrix) { //Lock the matrix in_savelock = (long) jit_object_method(in_matrix,_jit_sym_lock,1); //Make sure input is of proper format jit_object_method(in_matrix,_jit_sym_getinfo,&in_minfo); if(in_minfo.dimcount != 2) { err = JIT_ERR_MISMATCH_DIM; goto out; } if(in_minfo.planecount != 1) { err = JIT_ERR_MISMATCH_PLANE; goto out; } if(in_minfo.type != _jit_sym_char) { err = JIT_ERR_MISMATCH_TYPE; goto out; } //Don't process if image is too small if((in_minfo.dim[0] < 2)||(in_minfo.dim[1] < 2)) goto out; //Calculate start rectangle: rectangle = cvRect(x->rect[0],x->rect[1],x->rect[2]-x->rect[0],x->rect[3]-x->rect[1]); CLIP_ASSIGN(rectangle.x,0,in_minfo.dim[0]-1); CLIP_ASSIGN(rectangle.y,0,in_minfo.dim[1]-1); CLIP_ASSIGN(rectangle.width,1,in_minfo.dim[0]-rectangle.x); CLIP_ASSIGN(rectangle.height,1,in_minfo.dim[1]-rectangle.y); //Convert Jitter matrix to OpenCV matrix cvJitter2CvMat(in_matrix, &source); //Calculate camshift if(x->mode == 1) //Use camshift cvCamShift(&source, rectangle, cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,(int)x->maxiters,x->epsilon), &component, &box ); else { cvMeanShift(&source, rectangle, cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,(int)x->maxiters,x->epsilon), &component); box.angle = 90.f; box.size = cvSize2D32f(component.rect.width, component.rect.height); box.center = cvPoint2D32f((float)component.rect.x + (float)component.rect.width * 0.5f,(float)component.rect.y + (float)component.rect.height * 0.5f); } //Prepare output // jit_atom_setlong(&x->box[0],component.rect.x); jit_atom_setlong(&x->box[1],component.rect.y); jit_atom_setlong(&x->box[2],component.rect.x + component.rect.width); jit_atom_setlong(&x->box[3],component.rect.y + component.rect.height); x->rect[0]=component.rect.x; x->rect[1]=component.rect.y; x->rect[2]=component.rect.x + component.rect.width; x->rect[3]=component.rect.y + component.rect.height; //cvBoxPoints(box,vertices); w = box.size.width * 0.5; h = box.size.height * 0.5; c = cos((box.angle - 90.f) * -0.01745329252); s = sin((box.angle - 90.f) * -0.01745329252); vertices[0].x = box.center.x - s*h - c*w; vertices[0].y = box.center.y - c*h + s*w; vertices[1].x = box.center.x - s*h + c*w; vertices[1].y = box.center.y - c*h - s*w; vertices[2].x = box.center.x + s*h + c*w; vertices[2].y = box.center.y + c*h - s*w; vertices[3].x = box.center.x + s*h - c*w; vertices[3].y = box.center.y + c*h + s*w; jit_atom_setlong(&x->frame[0],(long)vertices[0].x); jit_atom_setlong(&x->frame[1],(long)vertices[0].y); jit_atom_setlong(&x->frame[2],(long)vertices[1].x); jit_atom_setlong(&x->frame[3],(long)vertices[1].y); jit_atom_setlong(&x->frame[4],(long)vertices[2].x); jit_atom_setlong(&x->frame[5],(long)vertices[2].y); jit_atom_setlong(&x->frame[6],(long)vertices[3].x); jit_atom_setlong(&x->frame[7],(long)vertices[3].y); x->mass = (float)(component.area / 256.); } out: jit_object_method(in_matrix,gensym("lock"),in_savelock); return err; }
CHandPoint CTransformImage::findFingerInfo() { if(!m_transImage) return CHandPoint(); findCenter(); CHandPoint handPt; std::vector<CvPoint> ptList; double pi = 3.1415; int width = m_transImage->width; int fingerCnt = 0; int x, y, radius = 80; unsigned char ch, pastCh = 0; for(double theta = 180; theta <= 360; ++theta) { x = (int)(m_center.x + radius*cos(theta*pi/180)); y = (int)(m_center.y - radius*sin(theta*pi/180)); ch = m_transImage->imageData[y*width+x]; if(ch == 255 && pastCh == 0) // Counting Finger ptList.push_back(cvPoint(x,y)), ++fingerCnt; pastCh = ch; // Draw OutLine CvBox2D box; box.center = cvPoint2D32f(x, y); box.size = cvSize2D32f(1, 1); box.angle = 90; cvEllipseBox(m_image, box, CV_RGB(255,242,0), 1); } // handPt Setting float dist = 0, dist2 = 0; switch(fingerCnt) { case 0: handPt.m_mode = CHandPoint::CLEAR; break; case 1: handPt.m_mode = CHandPoint::MOVE; findEndPoint(&handPt.m_nX, &handPt.m_nY); break; case 2: { CvPoint a = ptList[0], b = ptList[1]; float dist = sqrt((float)(abs(a.x-b.x)*abs(a.x-b.x) + abs(a.y-b.y)*abs(a.y-b.y))); if(dist < 70) // DRAW mode { handPt.m_mode = CHandPoint::CIRCLE; handPt.m_nX = m_center.x, handPt.m_nY = m_center.y; } else { handPt.m_mode = CHandPoint::DRAW; findEndPoint(&handPt.m_nX, &handPt.m_nY); } } break; case 3: { CvPoint a = ptList[0], b = ptList[1], c = ptList[2]; dist = sqrt((float)(abs(a.x-b.x)*abs(a.x-b.x) + abs(a.y-b.y)*abs(a.y-b.y))); dist2 = sqrt((float)(abs(c.x-b.x)*abs(c.x-b.x) + abs(c.y-b.y)*abs(c.y-b.y))); if(abs(dist-dist2) < 10) { handPt.m_mode = CHandPoint::TRIANGE; handPt.m_nX = m_center.x, handPt.m_nY = m_center.y; } else { handPt.m_mode = CHandPoint::SETTING; } } break; case 4: handPt.m_mode = CHandPoint::RECT; handPt.m_nX = m_center.x, handPt.m_nY = m_center.y; break; case 5: handPt.m_mode = CHandPoint::STAR; handPt.m_nX = m_center.x, handPt.m_nY = m_center.y; break; default: handPt.m_mode = CHandPoint::NOTHING; break; } TCHAR buf[256] = {0,}; swprintf(buf, sizeof(buf), _T("%d\t%f\n"), fingerCnt, dist); ::OutputDebugString(buf); return handPt; }
THISCLASS::ComponentSimulationParticles(SwisTrackCore *stc): Component(stc, wxT("SimulationParticles")), mCameraOrigin(cvPoint2D32f(0, 0)), mCameraRotation(0), mCameraPixelSize(1), mCameraSize(cvSize2D32f(640, 480)), mSimulationParticles(0), mParticles(), mDisplayOutput(wxT("Output"), wxT("Particle Simulation: Output")) { // Data structure relations mCategory = &(mCore->mCategoryParticleDetection); AddDataStructureWrite(&(mCore->mDataStructureInput)); AddDataStructureWrite(&(mCore->mDataStructureParticles)); AddDisplay(&mDisplayOutput); // Read the XML configuration file Initialize(); }