void draw_subdiv_facet( IplImage* img, CvSubdiv2DEdge edge ) { CvSubdiv2DEdge t = edge; int i, count = 0; CvPoint* buf = 0; // count number of edges in facet do { count++; t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); } while (t != edge ); buf = (CvPoint*)malloc( count * sizeof(buf[0])); // gather points t = edge; for( i = 0; i < count; i++ ) { CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t ); if( !pt ) break; buf[i] = cvPoint( cvRound(pt->pt.x), cvRound(pt->pt.y)); t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); } if( i == count ) { CvSubdiv2DPoint* pt = cvSubdiv2DEdgeDst( cvSubdiv2DRotateEdge( edge, 1 )); cvFillConvexPoly( img, buf, count, CV_RGB(rand()&255,rand()&255,rand()&255), CV_AA, 0 ); cvPolyLine( img, &buf, &count, 1, 1, CV_RGB(0,0,0), 1, CV_AA, 0); draw_subdiv_point( img, pt->pt, CV_RGB(0,0,0)); } free( buf ); }
static void cvSubdiv2DDeleteEdge( CvSubdiv2D * subdiv, CvSubdiv2DEdge edge ) { CvQuadEdge2D *quadedge = (CvQuadEdge2D *) (edge & ~3); CV_FUNCNAME( "cvSubdiv2DDeleteEdge" ); __BEGIN__; if( !subdiv || !quadedge ) CV_ERROR( CV_StsNullPtr, "" ); cvSubdiv2DSplice( edge, cvSubdiv2DGetEdge( edge, CV_PREV_AROUND_ORG )); { CvSubdiv2DEdge sym_edge = cvSubdiv2DSymEdge( edge ); cvSubdiv2DSplice( sym_edge, cvSubdiv2DGetEdge( sym_edge, CV_PREV_AROUND_ORG )); } cvSetRemoveByPtr( (CvSet*)(subdiv->edges), quadedge ); subdiv->quad_edges--; __END__; }
static void draw_subdiv_facet( CvSubdiv2D * subdiv, IplImage * dst, IplImage * src, CvSubdiv2DEdge edge ) { CvSubdiv2DEdge t = edge; int i, count = 0; CvPoint local_buf[100]; CvPoint *buf = local_buf; // count number of edges in facet do { count++; t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); } while( t != edge && count < subdiv->quad_edges * 4 ); if( count * sizeof( buf[0] ) > sizeof( local_buf )) { buf = (CvPoint *) malloc( count * sizeof( buf[0] )); } // gather points t = edge; for( i = 0; i < count; i++ ) { CvSubdiv2DPoint *pt = cvSubdiv2DEdgeOrg( t ); if( !pt ) break; assert( fabs( pt->pt.x ) < 10000 && fabs( pt->pt.y ) < 10000 ); buf[i] = cvPoint( cvRound( pt->pt.x ), cvRound( pt->pt.y )); t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); } if( i == count ) { CvSubdiv2DPoint *pt = cvSubdiv2DEdgeDst( cvSubdiv2DRotateEdge( edge, 1 )); CvPoint ip = cvPoint( cvRound( pt->pt.x ), cvRound( pt->pt.y )); CvScalar color = {{0,0,0,0}}; //printf("count = %d, (%d,%d)\n", ip.x, ip.y ); if( 0 <= ip.x && ip.x < src->width && 0 <= ip.y && ip.y < src->height ) { uchar *ptr = (uchar*)(src->imageData + ip.y * src->widthStep + ip.x * 3); color = CV_RGB( ptr[2], ptr[1], ptr[0] ); } cvFillConvexPoly( dst, buf, count, color ); //draw_subdiv_point( dst, pt->pt, CV_RGB(0,0,0)); } if( buf != local_buf ) free( buf ); }
CV_IMPL int icvSubdiv2DCheck( CvSubdiv2D* subdiv ) { int i, j, total = subdiv->edges->total; int check_result = 0; CV_FUNCNAME("icvSubdiv2DCheck"); __BEGIN__; if( !subdiv ) CV_ERROR_FROM_STATUS( CV_NULLPTR_ERR ); for( i = 0; i < total; i++ ) { CvQuadEdge2D* edge = (CvQuadEdge2D*)cvGetSetElem(subdiv->edges,i); if( edge && CV_IS_SET_ELEM( edge )) { for( j = 0; j < 4; j++ ) { CvSubdiv2DEdge e = (CvSubdiv2DEdge)edge + j; CvSubdiv2DEdge o_next = cvSubdiv2DNextEdge(e); CvSubdiv2DEdge o_prev = cvSubdiv2DGetEdge(e, CV_PREV_AROUND_ORG ); CvSubdiv2DEdge d_prev = cvSubdiv2DGetEdge(e, CV_PREV_AROUND_DST ); CvSubdiv2DEdge d_next = cvSubdiv2DGetEdge(e, CV_NEXT_AROUND_DST ); // check points if( cvSubdiv2DEdgeOrg(e) != cvSubdiv2DEdgeOrg(o_next)) EXIT; if( cvSubdiv2DEdgeOrg(e) != cvSubdiv2DEdgeOrg(o_prev)) EXIT; if( cvSubdiv2DEdgeDst(e) != cvSubdiv2DEdgeDst(d_next)) EXIT; if( cvSubdiv2DEdgeDst(e) != cvSubdiv2DEdgeDst(d_prev)) EXIT; if( j % 2 == 0 ) { if( cvSubdiv2DEdgeDst(o_next) != cvSubdiv2DEdgeOrg(d_prev)) EXIT; if( cvSubdiv2DEdgeDst(o_prev) != cvSubdiv2DEdgeOrg(d_next)) EXIT; if( cvSubdiv2DGetEdge(cvSubdiv2DGetEdge(cvSubdiv2DGetEdge( e,CV_NEXT_AROUND_LEFT),CV_NEXT_AROUND_LEFT),CV_NEXT_AROUND_LEFT) != e ) EXIT; if( cvSubdiv2DGetEdge(cvSubdiv2DGetEdge(cvSubdiv2DGetEdge( e,CV_NEXT_AROUND_RIGHT),CV_NEXT_AROUND_RIGHT),CV_NEXT_AROUND_RIGHT) != e) EXIT; } } } } check_result = 1; __END__; return check_result; }
static CvSubdiv2DEdge cvSubdiv2DConnectEdges( CvSubdiv2D * subdiv, CvSubdiv2DEdge edgeA, CvSubdiv2DEdge edgeB ) { CvSubdiv2DEdge new_edge = 0; CV_FUNCNAME( "cvSubdiv2DConnectPoints" ); __BEGIN__; CvSubdiv2DPoint *orgB, *dstA; if( !subdiv ) CV_ERROR( CV_StsNullPtr, "" ); new_edge = cvSubdiv2DMakeEdge( subdiv ); cvSubdiv2DSplice( new_edge, cvSubdiv2DGetEdge( edgeA, CV_NEXT_AROUND_LEFT )); cvSubdiv2DSplice( cvSubdiv2DSymEdge( new_edge ), edgeB ); dstA = cvSubdiv2DEdgeDst( edgeA ); orgB = cvSubdiv2DEdgeOrg( edgeB ); cvSubdiv2DSetEdgePoints( new_edge, dstA, orgB ); __END__; return new_edge; }
static void cvSubdiv2DDeleteEdge( CvSubdiv2D * subdiv, CvSubdiv2DEdge edge ) { CvQuadEdge2D *quadedge = (CvQuadEdge2D *) (edge & ~3); if( !subdiv || !quadedge ) CV_Error( CV_StsNullPtr, "" ); cvSubdiv2DSplice( edge, cvSubdiv2DGetEdge( edge, CV_PREV_AROUND_ORG )); CvSubdiv2DEdge sym_edge = cvSubdiv2DSymEdge( edge ); cvSubdiv2DSplice( sym_edge, cvSubdiv2DGetEdge( sym_edge, CV_PREV_AROUND_ORG )); cvSetRemoveByPtr( (CvSet*)(subdiv->edges), quadedge ); subdiv->quad_edges--; }
static void cvSubdiv2DSwapEdges( CvSubdiv2DEdge edge ) { CvSubdiv2DEdge sym_edge = cvSubdiv2DSymEdge( edge ); CvSubdiv2DEdge a = cvSubdiv2DGetEdge( edge, CV_PREV_AROUND_ORG ); CvSubdiv2DEdge b = cvSubdiv2DGetEdge( sym_edge, CV_PREV_AROUND_ORG ); CvSubdiv2DPoint *dstB, *dstA; cvSubdiv2DSplice( edge, a ); cvSubdiv2DSplice( sym_edge, b ); dstA = cvSubdiv2DEdgeDst( a ); dstB = cvSubdiv2DEdgeDst( b ); cvSubdiv2DSetEdgePoints( edge, dstA, dstB ); cvSubdiv2DSplice( edge, cvSubdiv2DGetEdge( a, CV_NEXT_AROUND_LEFT )); cvSubdiv2DSplice( sym_edge, cvSubdiv2DGetEdge( b, CV_NEXT_AROUND_LEFT )); }
CV_IMPL int icvSubdiv2DCheck( CvSubdiv2D* subdiv ) { int i, j, total = subdiv->edges->total; CV_Assert( subdiv != 0 ); for( i = 0; i < total; i++ ) { CvQuadEdge2D* edge = (CvQuadEdge2D*)cvGetSetElem(subdiv->edges,i); if( edge && CV_IS_SET_ELEM( edge )) { for( j = 0; j < 4; j++ ) { CvSubdiv2DEdge e = (CvSubdiv2DEdge)edge + j; CvSubdiv2DEdge o_next = cvSubdiv2DNextEdge(e); CvSubdiv2DEdge o_prev = cvSubdiv2DGetEdge(e, CV_PREV_AROUND_ORG ); CvSubdiv2DEdge d_prev = cvSubdiv2DGetEdge(e, CV_PREV_AROUND_DST ); CvSubdiv2DEdge d_next = cvSubdiv2DGetEdge(e, CV_NEXT_AROUND_DST ); // check points if( cvSubdiv2DEdgeOrg(e) != cvSubdiv2DEdgeOrg(o_next)) return 0; if( cvSubdiv2DEdgeOrg(e) != cvSubdiv2DEdgeOrg(o_prev)) return 0; if( cvSubdiv2DEdgeDst(e) != cvSubdiv2DEdgeDst(d_next)) return 0; if( cvSubdiv2DEdgeDst(e) != cvSubdiv2DEdgeDst(d_prev)) return 0; if( j % 2 == 0 ) { if( cvSubdiv2DEdgeDst(o_next) != cvSubdiv2DEdgeOrg(d_prev)) return 0; if( cvSubdiv2DEdgeDst(o_prev) != cvSubdiv2DEdgeOrg(d_next)) return 0; if( cvSubdiv2DGetEdge(cvSubdiv2DGetEdge(cvSubdiv2DGetEdge( e,CV_NEXT_AROUND_LEFT),CV_NEXT_AROUND_LEFT),CV_NEXT_AROUND_LEFT) != e ) return 0; if( cvSubdiv2DGetEdge(cvSubdiv2DGetEdge(cvSubdiv2DGetEdge( e,CV_NEXT_AROUND_RIGHT),CV_NEXT_AROUND_RIGHT),CV_NEXT_AROUND_RIGHT) != e) return 0; } } } } return 1; }
static CvSubdiv2DEdge cvSubdiv2DConnectEdges( CvSubdiv2D * subdiv, CvSubdiv2DEdge edgeA, CvSubdiv2DEdge edgeB ) { if( !subdiv ) CV_Error( CV_StsNullPtr, "" ); CvSubdiv2DEdge new_edge = cvSubdiv2DMakeEdge( subdiv ); cvSubdiv2DSplice( new_edge, cvSubdiv2DGetEdge( edgeA, CV_NEXT_AROUND_LEFT )); cvSubdiv2DSplice( cvSubdiv2DSymEdge( new_edge ), edgeB ); CvSubdiv2DPoint* dstA = cvSubdiv2DEdgeDst( edgeA ); CvSubdiv2DPoint* orgB = cvSubdiv2DEdgeOrg( edgeB ); cvSubdiv2DSetEdgePoints( new_edge, dstA, orgB ); return new_edge; }
void PlanarSubdivisionEdgeToPoly(CvSubdiv2DEdge edge, CvSeq* buffer) { cvClearSeq(buffer); if(!edge) return; CvSubdiv2DPoint* v0 = cvSubdiv2DEdgeOrg(edge); if (!v0 || v0->flags < 0) { return; } for (; ; edge = cvSubdiv2DGetEdge(edge, CV_NEXT_AROUND_LEFT)) { CvSubdiv2DPoint* v = cvSubdiv2DEdgeDst(edge); if (!v || v->flags < 0) { cvClearSeq(buffer); return; } cvSeqPush(buffer, &v->pt); if (0 == memcmp(&v->pt, &v0->pt, sizeof(CvPoint2D32f))) break; } if (buffer->total <= 2) cvClearSeq(buffer); }
void PlanarSubdivisionEdgeToPoly(CvSubdiv2DEdge edge, CvPoint2D32f* buffer, int* count) { CvSubdiv2DPoint* v0 = cvSubdiv2DEdgeOrg(edge); if (v0->flags == -1) { *count = 0; return; } CvPoint2D32f* currentBuffer = buffer; for (; ; edge = cvSubdiv2DGetEdge(edge, CV_NEXT_AROUND_LEFT)) { CvSubdiv2DPoint* v = cvSubdiv2DEdgeDst(edge); if (v->flags == -1) { *count = 0; return; } *currentBuffer++ = v->pt; if (v->pt.x == v0->pt.x && v->pt.y == v0->pt.y) break; } *count = currentBuffer - buffer; if (*count <= 2) *count = 0; }
void locate_point( CvSubdiv2D* subdiv, CvPoint2D32f fp, IplImage* img, CvScalar active_color ) { CvSubdiv2DEdge e; CvSubdiv2DEdge e0 = 0; CvSubdiv2DPoint* p = 0; cvSubdiv2DLocate( subdiv, fp, &e0, &p ); if( e0 ) { e = e0; do { draw_subdiv_edge( img, e, active_color ); e = cvSubdiv2DGetEdge(e,CV_NEXT_AROUND_LEFT); } while( e != e0 ); } draw_subdiv_point( img, fp, active_color ); }
CV_IMPL CvSubdiv2DPoint* cvFindNearestPoint2D( CvSubdiv2D* subdiv, CvPoint2D32f pt ) { CvSubdiv2DPoint* point = 0; CvPoint2D32f start; CvPoint2D32f diff; CvSubdiv2DPointLocation loc; CvSubdiv2DEdge edge; int i; if( !subdiv ) CV_Error( CV_StsNullPtr, "" ); if( !CV_IS_SUBDIV2D( subdiv )) CV_Error( CV_StsNullPtr, "" ); if( subdiv->edges->active_count <= 3 ) return 0; if( !subdiv->is_geometry_valid ) cvCalcSubdivVoronoi2D( subdiv ); loc = cvSubdiv2DLocate( subdiv, pt, &edge, &point ); switch( loc ) { case CV_PTLOC_ON_EDGE: case CV_PTLOC_INSIDE: break; default: return point; } point = 0; start = cvSubdiv2DEdgeOrg( edge )->pt; diff.x = pt.x - start.x; diff.y = pt.y - start.y; edge = cvSubdiv2DRotateEdge( edge, 1 ); for( i = 0; i < subdiv->total; i++ ) { CvPoint2D32f t; for(;;) { assert( cvSubdiv2DEdgeDst( edge )); t = cvSubdiv2DEdgeDst( edge )->pt; if( icvIsRightOf2( t, start, diff ) >= 0 ) break; edge = cvSubdiv2DGetEdge( edge, CV_NEXT_AROUND_LEFT ); } for(;;) { assert( cvSubdiv2DEdgeOrg( edge )); t = cvSubdiv2DEdgeOrg( edge )->pt; if( icvIsRightOf2( t, start, diff ) < 0 ) break; edge = cvSubdiv2DGetEdge( edge, CV_PREV_AROUND_LEFT ); } { CvPoint2D32f tempDiff = cvSubdiv2DEdgeDst( edge )->pt; t = cvSubdiv2DEdgeOrg( edge )->pt; tempDiff.x -= t.x; tempDiff.y -= t.y; if( icvIsRightOf2( pt, t, tempDiff ) >= 0 ) { point = cvSubdiv2DEdgeOrg( cvSubdiv2DRotateEdge( edge, 3 )); break; } } edge = cvSubdiv2DSymEdge( edge ); } return point; }
CV_IMPL void cvCalcSubdivVoronoi2D( CvSubdiv2D * subdiv ) { CvSeqReader reader; int i, total, elem_size; if( !subdiv ) CV_Error( CV_StsNullPtr, "" ); /* check if it is already calculated */ if( subdiv->is_geometry_valid ) return; total = subdiv->edges->total; elem_size = subdiv->edges->elem_size; cvClearSubdivVoronoi2D( subdiv ); cvStartReadSeq( (CvSeq *) (subdiv->edges), &reader, 0 ); if( total <= 3 ) return; /* skip first three edges (bounding triangle) */ for( i = 0; i < 3; i++ ) CV_NEXT_SEQ_ELEM( elem_size, reader ); /* loop through all quad-edges */ for( ; i < total; i++ ) { CvQuadEdge2D *quadedge = (CvQuadEdge2D *) (reader.ptr); if( CV_IS_SET_ELEM( quadedge )) { CvSubdiv2DEdge edge0 = (CvSubdiv2DEdge) quadedge, edge1, edge2; double a0, b0, c0, a1, b1, c1; CvPoint2D32f virt_point; CvSubdiv2DPoint *voronoi_point; if( !quadedge->pt[3] ) { edge1 = cvSubdiv2DGetEdge( edge0, CV_NEXT_AROUND_LEFT ); edge2 = cvSubdiv2DGetEdge( edge1, CV_NEXT_AROUND_LEFT ); icvCreateCenterNormalLine( edge0, &a0, &b0, &c0 ); icvCreateCenterNormalLine( edge1, &a1, &b1, &c1 ); icvIntersectLines3( &a0, &b0, &c0, &a1, &b1, &c1, &virt_point ); if( fabs( virt_point.x ) < FLT_MAX * 0.5 && fabs( virt_point.y ) < FLT_MAX * 0.5 ) { voronoi_point = cvSubdiv2DAddPoint( subdiv, virt_point, 1 ); quadedge->pt[3] = ((CvQuadEdge2D *) (edge1 & ~3))->pt[3 - (edge1 & 2)] = ((CvQuadEdge2D *) (edge2 & ~3))->pt[3 - (edge2 & 2)] = voronoi_point; } } if( !quadedge->pt[1] ) { edge1 = cvSubdiv2DGetEdge( edge0, CV_NEXT_AROUND_RIGHT ); edge2 = cvSubdiv2DGetEdge( edge1, CV_NEXT_AROUND_RIGHT ); icvCreateCenterNormalLine( edge0, &a0, &b0, &c0 ); icvCreateCenterNormalLine( edge1, &a1, &b1, &c1 ); icvIntersectLines3( &a0, &b0, &c0, &a1, &b1, &c1, &virt_point ); if( fabs( virt_point.x ) < FLT_MAX * 0.5 && fabs( virt_point.y ) < FLT_MAX * 0.5 ) { voronoi_point = cvSubdiv2DAddPoint( subdiv, virt_point, 1 ); quadedge->pt[1] = ((CvQuadEdge2D *) (edge1 & ~3))->pt[1 + (edge1 & 2)] = ((CvQuadEdge2D *) (edge2 & ~3))->pt[1 + (edge2 & 2)] = voronoi_point; } } } CV_NEXT_SEQ_ELEM( elem_size, reader ); } subdiv->is_geometry_valid = 1; }
CV_IMPL CvSubdiv2DPoint * cvSubdivDelaunay2DInsert( CvSubdiv2D * subdiv, CvPoint2D32f pt ) { CvSubdiv2DPoint *point = 0; CvSubdiv2DPointLocation location = CV_PTLOC_ERROR; CvSubdiv2DPoint *curr_point = 0, *first_point = 0; CvSubdiv2DEdge curr_edge = 0, deleted_edge = 0, base_edge = 0; int i, max_edges; if( !subdiv ) CV_Error( CV_StsNullPtr, "" ); if( !CV_IS_SUBDIV2D(subdiv) ) CV_Error( CV_StsBadFlag, "" ); location = cvSubdiv2DLocate( subdiv, pt, &curr_edge, &curr_point ); switch (location) { case CV_PTLOC_ERROR: CV_Error( CV_StsBadSize, "" ); case CV_PTLOC_OUTSIDE_RECT: CV_Error( CV_StsOutOfRange, "" ); case CV_PTLOC_VERTEX: point = curr_point; break; case CV_PTLOC_ON_EDGE: deleted_edge = curr_edge; subdiv->recent_edge = curr_edge = cvSubdiv2DGetEdge( curr_edge, CV_PREV_AROUND_ORG ); cvSubdiv2DDeleteEdge( subdiv, deleted_edge ); /* no break */ case CV_PTLOC_INSIDE: assert( curr_edge != 0 ); subdiv->is_geometry_valid = 0; curr_point = cvSubdiv2DAddPoint( subdiv, pt, 0 ); base_edge = cvSubdiv2DMakeEdge( subdiv ); first_point = cvSubdiv2DEdgeOrg( curr_edge ); cvSubdiv2DSetEdgePoints( base_edge, first_point, curr_point ); cvSubdiv2DSplice( base_edge, curr_edge ); do { base_edge = cvSubdiv2DConnectEdges( subdiv, curr_edge, cvSubdiv2DSymEdge( base_edge )); curr_edge = cvSubdiv2DGetEdge( base_edge, CV_PREV_AROUND_ORG ); } while( cvSubdiv2DEdgeDst( curr_edge ) != first_point ); curr_edge = cvSubdiv2DGetEdge( base_edge, CV_PREV_AROUND_ORG ); max_edges = subdiv->quad_edges * 4; for( i = 0; i < max_edges; i++ ) { CvSubdiv2DPoint *temp_dst = 0, *curr_org = 0, *curr_dst = 0; CvSubdiv2DEdge temp_edge = cvSubdiv2DGetEdge( curr_edge, CV_PREV_AROUND_ORG ); temp_dst = cvSubdiv2DEdgeDst( temp_edge ); curr_org = cvSubdiv2DEdgeOrg( curr_edge ); curr_dst = cvSubdiv2DEdgeDst( curr_edge ); if( icvIsRightOf( temp_dst->pt, curr_edge ) > 0 && icvIsPtInCircle3( curr_org->pt, temp_dst->pt, curr_dst->pt, curr_point->pt ) < 0 ) { cvSubdiv2DSwapEdges( curr_edge ); curr_edge = cvSubdiv2DGetEdge( curr_edge, CV_PREV_AROUND_ORG ); } else if( curr_org == first_point ) { break; } else { curr_edge = cvSubdiv2DGetEdge( cvSubdiv2DNextEdge( curr_edge ), CV_PREV_AROUND_LEFT ); } } break; default: CV_Error_(CV_StsError, ("cvSubdiv2DLocate returned invalid location = %d", location) ); } return curr_point; }
CV_IMPL CvSubdiv2DPointLocation cvSubdiv2DLocate( CvSubdiv2D * subdiv, CvPoint2D32f pt, CvSubdiv2DEdge * _edge, CvSubdiv2DPoint ** _point ) { CvSubdiv2DPoint *point = 0; int right_of_curr = 0; if( !subdiv ) CV_Error( CV_StsNullPtr, "" ); if( !CV_IS_SUBDIV2D(subdiv) ) CV_Error( CV_StsBadFlag, "" ); int i, max_edges = subdiv->quad_edges * 4; CvSubdiv2DEdge edge = subdiv->recent_edge; if( max_edges == 0 ) CV_Error( CV_StsBadSize, "" ); CV_Assert(edge != 0); if( pt.x < subdiv->topleft.x || pt.y < subdiv->topleft.y || pt.x >= subdiv->bottomright.x || pt.y >= subdiv->bottomright.y ) CV_Error( CV_StsOutOfRange, "" ); CvSubdiv2DPointLocation location = CV_PTLOC_ERROR; right_of_curr = icvIsRightOf( pt, edge ); if( right_of_curr > 0 ) { edge = cvSubdiv2DSymEdge( edge ); right_of_curr = -right_of_curr; } for( i = 0; i < max_edges; i++ ) { CvSubdiv2DEdge onext_edge = cvSubdiv2DNextEdge( edge ); CvSubdiv2DEdge dprev_edge = cvSubdiv2DGetEdge( edge, CV_PREV_AROUND_DST ); int right_of_onext = icvIsRightOf( pt, onext_edge ); int right_of_dprev = icvIsRightOf( pt, dprev_edge ); if( right_of_dprev > 0 ) { if( right_of_onext > 0 || (right_of_onext == 0 && right_of_curr == 0) ) { location = CV_PTLOC_INSIDE; goto exit; } else { right_of_curr = right_of_onext; edge = onext_edge; } } else { if( right_of_onext > 0 ) { if( right_of_dprev == 0 && right_of_curr == 0 ) { location = CV_PTLOC_INSIDE; goto exit; } else { right_of_curr = right_of_dprev; edge = dprev_edge; } } else if( right_of_curr == 0 && icvIsRightOf( cvSubdiv2DEdgeDst( onext_edge )->pt, edge ) >= 0 ) { edge = cvSubdiv2DSymEdge( edge ); } else { right_of_curr = right_of_onext; edge = onext_edge; } } } exit: subdiv->recent_edge = edge; if( location == CV_PTLOC_INSIDE ) { double t1, t2, t3; CvPoint2D32f org_pt = cvSubdiv2DEdgeOrg( edge )->pt; CvPoint2D32f dst_pt = cvSubdiv2DEdgeDst( edge )->pt; t1 = fabs( pt.x - org_pt.x ); t1 += fabs( pt.y - org_pt.y ); t2 = fabs( pt.x - dst_pt.x ); t2 += fabs( pt.y - dst_pt.y ); t3 = fabs( org_pt.x - dst_pt.x ); t3 += fabs( org_pt.y - dst_pt.y ); if( t1 < FLT_EPSILON ) { location = CV_PTLOC_VERTEX; point = cvSubdiv2DEdgeOrg( edge ); edge = 0; } else if( t2 < FLT_EPSILON ) { location = CV_PTLOC_VERTEX; point = cvSubdiv2DEdgeDst( edge ); edge = 0; } else if( (t1 < t3 || t2 < t3) && fabs( cvTriangleArea( pt, org_pt, dst_pt )) < FLT_EPSILON ) { location = CV_PTLOC_ON_EDGE; point = 0; } } if( location == CV_PTLOC_ERROR ) { edge = 0; point = 0; } if( _edge ) *_edge = edge; if( _point ) *_point = point; return location; }
CV_IMPL CvSubdiv2DPoint * cvSubdivDelaunay2DInsert( CvSubdiv2D * subdiv, CvPoint2D32f pt ) { CvSubdiv2DPoint *point = 0; CvSubdiv2DPointLocation location = CV_PTLOC_ERROR; CvSubdiv2DPoint *curr_point = 0, *first_point = 0; CvSubdiv2DEdge curr_edge = 0, deleted_edge = 0, base_edge = 0; int i, max_edges; CV_FUNCNAME( "cvSubdivDelaunay2DInsert" ); __BEGIN__; if( !subdiv ) CV_ERROR( CV_StsNullPtr, "" ); if( !CV_IS_SUBDIV2D(subdiv) ) CV_ERROR_FROM_STATUS( CV_BADFLAG_ERR ); location = cvSubdiv2DLocate( subdiv, pt, &curr_edge, &curr_point ); switch (location) { case CV_PTLOC_ERROR: CV_ERROR_FROM_STATUS( CV_BADSIZE_ERR ); case CV_PTLOC_OUTSIDE_RECT: CV_ERROR_FROM_STATUS( CV_BADRANGE_ERR ); case CV_PTLOC_VERTEX: point = curr_point; break; case CV_PTLOC_ON_EDGE: deleted_edge = curr_edge; subdiv->recent_edge = curr_edge = cvSubdiv2DGetEdge( curr_edge, CV_PREV_AROUND_ORG ); cvSubdiv2DDeleteEdge( subdiv, deleted_edge ); /* no break */ case CV_PTLOC_INSIDE: assert( curr_edge != 0 ); subdiv->is_geometry_valid = 0; curr_point = cvSubdiv2DAddPoint( subdiv, pt, 0 ); CV_CHECK(); base_edge = cvSubdiv2DMakeEdge( subdiv ); first_point = cvSubdiv2DEdgeOrg( curr_edge ); cvSubdiv2DSetEdgePoints( base_edge, first_point, curr_point ); cvSubdiv2DSplice( base_edge, curr_edge ); do { base_edge = cvSubdiv2DConnectEdges( subdiv, curr_edge, cvSubdiv2DSymEdge( base_edge )); curr_edge = cvSubdiv2DGetEdge( base_edge, CV_PREV_AROUND_ORG ); } while( cvSubdiv2DEdgeDst( curr_edge ) != first_point ); curr_edge = cvSubdiv2DGetEdge( base_edge, CV_PREV_AROUND_ORG ); max_edges = subdiv->quad_edges * 4; for( i = 0; i < max_edges; i++ ) { CvSubdiv2DPoint *temp_dst = 0, *curr_org = 0, *curr_dst = 0; CvSubdiv2DEdge temp_edge = cvSubdiv2DGetEdge( curr_edge, CV_PREV_AROUND_ORG ); temp_dst = cvSubdiv2DEdgeDst( temp_edge ); curr_org = cvSubdiv2DEdgeOrg( curr_edge ); curr_dst = cvSubdiv2DEdgeDst( curr_edge ); if( icvIsRightOf( temp_dst->pt, curr_edge ) > 0 && icvIsPtInCircle3( curr_org->pt, temp_dst->pt, curr_dst->pt, curr_point->pt ) < 0 ) { cvSubdiv2DSwapEdges( curr_edge ); curr_edge = cvSubdiv2DGetEdge( curr_edge, CV_PREV_AROUND_ORG ); } else if( curr_org == first_point ) { break; } else { curr_edge = cvSubdiv2DGetEdge( cvSubdiv2DNextEdge( curr_edge ), CV_PREV_AROUND_LEFT ); } } break; default: assert( 0 ); CV_ERROR_FROM_STATUS( CV_NOTDEFINED_ERR ); } point = curr_point; __END__; //icvSubdiv2DCheck( subdiv ); return point; }
CV_IMPL CvSubdiv2DPointLocation cvSubdiv2DLocate( CvSubdiv2D * subdiv, CvPoint2D32f pt, CvSubdiv2DEdge * _edge, CvSubdiv2DPoint ** _point ) { CvSubdiv2DEdge edge = 0; CvSubdiv2DPoint *point = 0; CvSubdiv2DPointLocation location = CV_PTLOC_ERROR; int i, max_edges; int right_of_curr = 0; CV_FUNCNAME( "cvSubdiv2DLocate" ); __BEGIN__; if( !subdiv ) CV_ERROR( CV_StsNullPtr, "" ); if( !CV_IS_SUBDIV2D(subdiv) ) CV_ERROR_FROM_STATUS( CV_BADFLAG_ERR ); max_edges = subdiv->quad_edges * 4; edge = subdiv->recent_edge; if( max_edges == 0 ) CV_ERROR_FROM_STATUS( CV_BADSIZE_ERR ); if( !edge ) CV_ERROR_FROM_STATUS( CV_NOTDEFINED_ERR ); location = CV_PTLOC_OUTSIDE_RECT; if( pt.x < subdiv->topleft.x || pt.y < subdiv->topleft.y || pt.x >= subdiv->bottomright.x || pt.y >= subdiv->bottomright.y ) CV_ERROR_FROM_STATUS( CV_BADRANGE_ERR ); location = CV_PTLOC_ERROR; right_of_curr = icvIsRightOf( pt, edge ); if( right_of_curr > 0 ) { edge = cvSubdiv2DSymEdge( edge ); right_of_curr = -right_of_curr; } for( i = 0; i < max_edges; i++ ) { CvSubdiv2DEdge onext_edge = cvSubdiv2DNextEdge( edge ); CvSubdiv2DEdge dprev_edge = cvSubdiv2DGetEdge( edge, CV_PREV_AROUND_DST ); int right_of_onext = icvIsRightOf( pt, onext_edge ); int right_of_dprev = icvIsRightOf( pt, dprev_edge ); if( right_of_dprev > 0 ) { if( right_of_onext > 0 || right_of_onext == 0 && right_of_curr == 0 ) { location = CV_PTLOC_INSIDE; EXIT; } else { right_of_curr = right_of_onext; edge = onext_edge; } } else { if( right_of_onext > 0 ) { if( right_of_dprev == 0 && right_of_curr == 0 ) { location = CV_PTLOC_INSIDE; EXIT; } else { right_of_curr = right_of_dprev; edge = dprev_edge; } } else if( right_of_curr == 0 && icvIsRightOf( cvSubdiv2DEdgeDst( onext_edge )->pt, edge ) >= 0 ) { edge = cvSubdiv2DSymEdge( edge ); } else { right_of_curr = right_of_onext; edge = onext_edge; } } } __END__; subdiv->recent_edge = edge; if( location == CV_PTLOC_INSIDE ) { double t1, t2, t3; CvPoint2D32f org_pt = cvSubdiv2DEdgeOrg( edge )->pt; CvPoint2D32f dst_pt = cvSubdiv2DEdgeDst( edge )->pt; t1 = fabs( pt.x - org_pt.x ); t1 += fabs( pt.y - org_pt.y ); t2 = fabs( pt.x - dst_pt.x ); t2 += fabs( pt.y - dst_pt.y ); t3 = fabs( org_pt.x - dst_pt.x ); t3 += fabs( org_pt.y - dst_pt.y ); if( t1 < FLT_EPSILON ) { location = CV_PTLOC_VERTEX; point = cvSubdiv2DEdgeOrg( edge ); edge = 0; } else if( t2 < FLT_EPSILON ) { location = CV_PTLOC_VERTEX; point = cvSubdiv2DEdgeDst( edge ); edge = 0; } else if( (t1 < t3 || t2 < t3) && fabs( cvTriangleArea( pt, org_pt, dst_pt )) < FLT_EPSILON ) { location = CV_PTLOC_ON_EDGE; point = 0; } } if( location == CV_PTLOC_ERROR ) { edge = 0; point = 0; } if( _edge ) *_edge = edge; if( _point ) *_point = point; return location; }
/* This function uses Delaunay triangulation to populate the triangles matrix */ void PAW::triangulate(){ CvMemStorage* storage; CvSubdiv2D* subdiv; IplImage* img; int par; std::vector<CvPoint> points; vector<int> triangleVertices; CvRect rect = { 0, 0, baseImageWidth, baseImageHeight}; storage = cvCreateMemStorage(0); subdiv = cvCreateSubdivDelaunay2D(rect,storage); //insert srcLandmark points in Delaunay subdivision for(int i=0;i<nLandmarks;i++){ double x = srcLandmarks.at<int>(i,0); double y = srcLandmarks.at<int>(i,1); points.push_back(cvPoint(srcLandmarks.at<int>(i,0),srcLandmarks.at<int>(i,1))); CvPoint2D32f fp = cvPoint2D32f(x, y); cvSubdivDelaunay2DInsert( subdiv, fp ); } CvNextEdgeType triangleDirections[2] = {CV_NEXT_AROUND_LEFT,CV_NEXT_AROUND_RIGHT}; for(int tdi = 0;tdi<2;tdi++){ CvNextEdgeType triangleDirection = triangleDirections[tdi]; IplImage* triangleFrame = cvCreateImage(cvSize(baseImageWidth,baseImageHeight),IPL_DEPTH_32F,3); CvScalar delaunay_color, voronoi_color; delaunay_color = CV_RGB( 200,0,0); voronoi_color = CV_RGB(0, 200, 0); CvSeqReader reader; int i, total = subdiv->edges->total; int elem_size = subdiv->edges->elem_size; cvStartReadSeq( (CvSeq*)(subdiv->edges), &reader, 0 ); CvPoint buf[3]; printf("Total %d\n",total); for( i = 0; i < total; i++ ) { CvQuadEdge2D* edge = (CvQuadEdge2D*)(reader.ptr); if( CV_IS_SET_ELEM( edge )) { //draw_subdiv_edge( img, (CvSubdiv2DEdge)edge + 1, voronoi_color ); //TODO optimize this part of code, since we could use a map (and put order) or get points index from delaunay subdiv //if(i==par){ CvSubdiv2DEdge t = (CvSubdiv2DEdge)edge ; int shouldPaint=1; for(int j=0;j<3;j++){ CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t ); if( !pt ) break; buf[j] = cvPoint( cvRound(pt->pt.x), cvRound(pt->pt.y)); t = cvSubdiv2DGetEdge( t, triangleDirection ); if((pt->pt.x<0)||(pt->pt.x>baseImageWidth)) shouldPaint=0; if((pt->pt.y<0)||(pt->pt.y>baseImageHeight)) shouldPaint=0; } if(shouldPaint){ //cvFillConvexPoly( img, buf, 3, CV_RGB(0,.1+10.0/255.0,0), CV_AA, 0 ); int originalVertices[3]; for(int j=0;j<3;j++){ int px = buf[j].x; int py = buf[j].y; for(int k=0;k<points.size();k++){ if((points[k].x ==px) && (points[k].y==py)){ printf("%d ",k); originalVertices[j] = k; triangleVertices.push_back(k); break;//could there be overlapped points } } } printf("\n"); //originalVertices stores the correspondence of vertices 0, 1 and 2 of the currently mapped triangle //with their annotated points (which are in pcaSet) /* int p1x = pcaSet.at<double>(imageIndex,originalVertices[0]*2); int p1y = pcaSet.at<double>(imageIndex,originalVertices[0]*2+1); int p2x = pcaSet.at<double>(imageIndex,originalVertices[1]*2); int p2y = pcaSet.at<double>(imageIndex,originalVertices[1]*2+1); int p3x = pcaSet.at<double>(imageIndex,originalVertices[2]*2); int p3y = pcaSet.at<double>(imageIndex,originalVertices[2]*2+1); Point2f srcTri[3]; Point2f dstTri[3]; srcTri[0] = Point2f( p1x, p1y ); srcTri[1] = Point2f( p2x, p2y ); srcTri[2] = Point2f( p3x, p3y ); dstTri[0] = Point2f( buf[0].x, buf[0].y ); dstTri[1] = Point2f( buf[1].x, buf[1].y ); dstTri[2] = Point2f( buf[2].x, buf[2].y ); */ //warpTextureFromTriangle(srcTri, originalImage, dstTri, warp_final); /*cvLine(new IplImage(warp_final),cvPoint(p1x,p1y),cvPoint(p2x,p2y),CV_RGB(0,255,0),1,8,0); cvLine(new IplImage(warp_final),cvPoint(p2x,p2y),cvPoint(p3x,p3y),CV_RGB(0,255,0),1,8,0); cvLine(new IplImage(warp_final),cvPoint(p3x,p3y),cvPoint(p1x,p1y),CV_RGB(0,255,0),1,8,0);*/ } //draw_subdiv_edge( triangleFrame, (CvSubdiv2DEdge)edge, delaunay_color ); } CV_NEXT_SEQ_ELEM( elem_size, reader ); } //string num = static_cast<ostringstream*>( &(ostringstream() << countFrame++) )->str(); //imshow("Warped final "+ num,warp_final); //clean up repeated triangles set<Triangle> triangleSet; for(int i=0;i<triangleVertices.size()/3;i+=1){ printf("%2d %2d %2d\n",triangleVertices.at(3*i),triangleVertices.at(3*i+1),triangleVertices.at(3*i+2)); Triangle t(triangleVertices.at(3*i),triangleVertices.at(3*i+1),triangleVertices.at(3*i+2)); triangleSet.insert(t); } triangles = Mat::zeros(triangleSet.size(),3,CV_32S); set<Triangle>::iterator it; int count=0; for (it=triangleSet.begin(); it!=triangleSet.end(); it++){ cout << (*it).v1 << " " << (*it).v2 << " " << (*it).v3 << endl; triangles.at<int>(count,0) = ( (*it).v1); triangles.at<int>(count,1) = ( (*it).v2); triangles.at<int>(count,2) = ( (*it).v3); count++; } cout << endl; nTriangles = count; Mat triangleMat(triangleFrame); imshow("Triangle frame",triangleMat); populatePointTriangleMap(); } }
void PlanarSubdivisionEdgeToTriangle(CvSubdiv2DEdge e, Triangle2DF* triangle) { triangle->V0 = cvSubdiv2DEdgeOrg(e)->pt; triangle->V1 = cvSubdiv2DEdgeDst(cvSubdiv2DGetEdge(e, CV_NEXT_AROUND_LEFT))->pt; triangle->V2 = cvSubdiv2DEdgeDst(e)->pt; }
int main(int argc, char * argv[]) { if(argc < 2) { fprintf(stderr, "%s image1 image2\n", argv[0]); return 1; } char * im1fname = argv[1]; char * im2fname = argv[2]; IplImage * image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_GRAYSCALE); IplImage * eigenvalues = cvCreateImage(cvGetSize(image1), 32, 1); IplImage * temp = cvCreateImage(cvGetSize(image1), 32, 1); int count = MAX_COUNT; double quality = 0.5; // double min_distance = 2; double min_distance = 50; int block_size = 7; int use_harris = 0; int win_size = 10; int flags = 0; CvPoint2D32f * source_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f)); CvPoint2D32f * dest_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f)); CvPoint2D32f * delaunay_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f)); cvGoodFeaturesToTrack( image1, eigenvalues, temp, source_points, &count, quality, min_distance, 0, block_size, use_harris, 0.04 ); printf("%d features\n",count); setbuf(stdout, NULL); printf("Finding corner subpix..."); cvFindCornerSubPix( image1, source_points, count, cvSize(win_size,win_size), cvSize(-1,-1), cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03)); printf("done.\n"); cvReleaseImage(&eigenvalues); cvReleaseImage(&temp); IplImage * image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_GRAYSCALE); char * status = (char*)cvAlloc(sizeof(char)*MAX_COUNT); IplImage * pyramid = cvCreateImage( cvGetSize(image1), IPL_DEPTH_8U, 1 ); IplImage * second_pyramid = cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 1 ); printf("Computing optical flow..."); cvCalcOpticalFlowPyrLK(image1, image2, pyramid, second_pyramid, source_points, dest_points, count, cvSize(win_size,win_size), 4, status, 0, cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.03), flags); printf("done.\n"); int num_matches = 0; int num_out_matches = 0; int max_dist = 30; int offset = 200; CvMemStorage * storage = cvCreateMemStorage(0); CvSubdiv2D * delaunay = cvCreateSubdivDelaunay2D( cvRect(0,0,image1->width,image1->height), storage); cvReleaseImage(&image1); cvReleaseImage(&image2); image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR); image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR); cvSet( image1, cvScalarAll(255) ); std::map<CvPoint, CvPoint> point_lookup_map; std::vector<std::pair<CvPoint, CvPoint> > point_lookup; // put corners in the point lookup as going to themselves point_lookup_map[cvPoint(0,0)] = cvPoint(0,0); point_lookup_map[cvPoint(0,image1->height-1)] = cvPoint(0,image1->height-1); point_lookup_map[cvPoint(image1->width-1,0)] = cvPoint(image1->width-1,0); point_lookup_map[cvPoint(image1->width-1,image1->height-1)] = cvPoint(image1->width-1,image1->height-1); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,0), cvPoint(0,0))); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,image1->height-1), cvPoint(0,image1->height-1))); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,0), cvPoint(image1->width-1,0))); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,image1->height-1), cvPoint(image1->width-1,image1->height-1))); printf("Inserting corners..."); // put corners in the Delaunay subdivision for(unsigned int i = 0; i < point_lookup.size(); i++) { cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(point_lookup[i].first) ); } printf("done.\n"); CvSubdiv2DEdge proxy_edge; for(int i = 0; i < count; i++) { if(status[i]) { CvPoint source = cvPointFrom32f(source_points[i]); CvPoint dest = cvPointFrom32f(dest_points[i]); if((((int)fabs((double)(source.x - dest.x))) > max_dist) || (((int)fabs((double)(source.y - dest.y))) > max_dist)) { num_out_matches++; } else if((dest.x >= 0) && (dest.y >= 0) && (dest.x < (image1->width)) && (dest.y < (image1->height))) { if(point_lookup_map.find(source) == point_lookup_map.end()) { num_matches++; point_lookup_map[source] = dest; point_lookup.push_back(std::pair<CvPoint,CvPoint>(source,dest)); delaunay_points[i] = (cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(source) ))->pt; cvSetImageROI( image1, cvRect(source.x-8,source.y-8,8*2,8*2) ); cvResetImageROI( image2 ); cvGetRectSubPix( image2, image1, dest_points[i] ); } /* cvSet2D( image1, source.y, source.x, cvGet2D( image2, dest.y, dest.x ) ); cvSet2D( image1, source.y, source.x+1, cvGet2D( image2, dest.y, dest.x+1 ) ); cvSet2D( image1, source.y, source.x-1, cvGet2D( image2, dest.y, dest.x-1 ) ); cvSet2D( image1, source.y+1, source.x, cvGet2D( image2, dest.y+1, dest.x ) ); cvSet2D( image1, source.y-1, source.x, cvGet2D( image2, dest.y-1, dest.x ) ); cvSet2D( image1, source.y+1, source.x+1, cvGet2D( image2, dest.y+1, dest.x+1 ) ); cvSet2D( image1, source.y-1, source.x-1, cvGet2D( image2, dest.y-1, dest.x-1 ) ); cvSet2D( image1, source.y+1, source.x-1, cvGet2D( image2, dest.y+1, dest.x-1 ) ); cvSet2D( image1, source.y-1, source.x+1, cvGet2D( image2, dest.y-1, dest.x+1 ) ); */ // cvCircle( image1, source, 4, CV_RGB(255,0,0), 2, CV_AA ); // cvCircle( image2, dest, 4, CV_RGB(255,0,0), 2, CV_AA ); } /* cvSetImageROI( image1, cvRect(source.x-offset,source.y-offset,offset*2,offset*2) ); cvSetImageROI( image2, cvRect(dest.x-offset,dest.y-offset,offset*2,offset*2) ); cvNamedWindow("image1",0); cvNamedWindow("image2",0); cvShowImage("image1",image1); cvShowImage("image2",image2); printf("%d,%d -> %d,%d\n",source.x,source.y,dest.x,dest.y); cvWaitKey(0); cvDestroyAllWindows(); */ } } printf("%d %d\n",num_matches,num_out_matches); printf("%d lookups\n",point_lookup_map.size()); cvResetImageROI( image1 ); cvSaveImage("sparse.jpg", image1); cvReleaseImage(&image1); image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR); cvSet( image1, cvScalarAll(255) ); printf("Warping image..."); CvSeqReader reader; int total = delaunay->edges->total; int elem_size = delaunay->edges->elem_size; cvStartReadSeq( (CvSeq*)(delaunay->edges), &reader, 0 ); std::vector<Triangle> trivec; std::vector<CvMat *> baryinvvec; for( int i = 0; i < total; i++ ) { CvQuadEdge2D* edge = (CvQuadEdge2D*)(reader.ptr); if( CV_IS_SET_ELEM( edge )) { CvSubdiv2DEdge curedge = (CvSubdiv2DEdge)edge; CvSubdiv2DEdge t = curedge; Triangle temptri; int count = 0; // construct a triangle from this edge do { CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t ); if(count < 3) { pt->pt.x = pt->pt.x >= image1->width ? image1->width-1 : pt->pt.x; pt->pt.y = pt->pt.y >= image1->height ? image1->height-1 : pt->pt.y; pt->pt.x = pt->pt.x < 0 ? 0 : pt->pt.x; pt->pt.y = pt->pt.y < 0 ? 0 : pt->pt.y; temptri.points[count] = cvPointFrom32f( pt->pt ); } else { printf("More than 3 edges\n"); } count++; t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); } while( t != curedge ); // check that triangle is not already in if( std::find(trivec.begin(), trivec.end(), temptri) == trivec.end() ) { // push triangle in and draw trivec.push_back(temptri); cvLine( image1, temptri.points[0], temptri.points[1], CV_RGB(255,0,0), 1, CV_AA, 0 ); cvLine( image1, temptri.points[1], temptri.points[2], CV_RGB(255,0,0), 1, CV_AA, 0 ); cvLine( image1, temptri.points[2], temptri.points[0], CV_RGB(255,0,0), 1, CV_AA, 0 ); // compute barycentric computation vector for this triangle CvMat * barycen = cvCreateMat( 3, 3, CV_32FC1 ); CvMat * baryceninv = cvCreateMat( 3, 3, CV_32FC1 ); barycen->data.fl[3*0+0] = temptri.points[0].x; barycen->data.fl[3*0+1] = temptri.points[1].x; barycen->data.fl[3*0+2] = temptri.points[2].x; barycen->data.fl[3*1+0] = temptri.points[0].y; barycen->data.fl[3*1+1] = temptri.points[1].y; barycen->data.fl[3*1+2] = temptri.points[2].y; barycen->data.fl[3*2+0] = 1; barycen->data.fl[3*2+1] = 1; barycen->data.fl[3*2+2] = 1; cvInvert( barycen, baryceninv, CV_LU ); baryinvvec.push_back(baryceninv); cvReleaseMat( &barycen ); } } CV_NEXT_SEQ_ELEM( elem_size, reader ); } printf("%d triangles...", trivec.size()); cvSaveImage("triangles.jpg", image1); cvSet( image1, cvScalarAll(255) ); IplImage * clean_nonthresh = cvLoadImage( "conhull-clean.jpg", CV_LOAD_IMAGE_COLOR ); // for each triangle for(unsigned int i = 0; i < trivec.size(); i++) { Triangle curtri = trivec[i]; CvMat * curpoints = cvCreateMat( 1, 3, CV_32SC2 ); Triangle target; std::map<CvPoint,CvPoint>::iterator piter[3]; printf("Triangle %d / %d\n",i,trivec.size()); bool is_corner = false; for(int j = 0; j < 3; j++) { /* curpoints->data.i[2*j+0] = curtri.points[j].x; curpoints->data.i[2*j+1] = curtri.points[j].y; */ CV_MAT_ELEM( *curpoints, CvPoint, 0, j ) = curtri.points[j]; printf("%d,%d\n",curtri.points[j].x,curtri.points[j].y); /* if((curtri.points[j] == cvPoint(0,0)) || (curtri.points[j] == cvPoint(0,image1->height)) ||(curtri.points[j] == cvPoint(image1->width,0)) ||(curtri.points[j] == cvPoint(image1->width,image1->height))) { is_corner = true; break; } */ for(unsigned int k = 0; k < point_lookup.size(); k++) { std::pair<CvPoint,CvPoint> thispair = point_lookup[k]; if(thispair.first == curtri.points[j]) { target.points[j] = thispair.second; break; } } /* piter[j] = point_lookup_map.find(curtri.points[j]); if(piter[j] != point_lookup_map.end() ) { target.points[j] = piter[j]->second; } */ } // if((piter[0] != point_lookup_map.end()) && (piter[1] != point_lookup_map.end()) && (piter[2] != point_lookup_map.end())) { if(!is_corner) { CvMat * newcorners = cvCreateMat( 3, 3, CV_32FC1 ); newcorners->data.fl[3*0+0] = target.points[0].x; newcorners->data.fl[3*0+1] = target.points[1].x; newcorners->data.fl[3*0+2] = target.points[2].x; newcorners->data.fl[3*1+0] = target.points[0].y; newcorners->data.fl[3*1+1] = target.points[1].y; newcorners->data.fl[3*1+2] = target.points[2].y; newcorners->data.fl[3*2+0] = 1; newcorners->data.fl[3*2+1] = 1; newcorners->data.fl[3*2+2] = 1; CvContour hdr; CvSeqBlock blk; CvRect trianglebound = cvBoundingRect( cvPointSeqFromMat(CV_SEQ_KIND_CURVE+CV_SEQ_FLAG_CLOSED, curpoints, &hdr, &blk), 1 ); printf("Bounding box: %d,%d,%d,%d\n",trianglebound.x,trianglebound.y,trianglebound.width,trianglebound.height); for(int y = trianglebound.y; (y < (trianglebound.y + trianglebound.height)) && ( y < image1->height); y++) { for(int x = trianglebound.x; (x < (trianglebound.x + trianglebound.width)) && (x < image1->width); x++) { // check to see if we're inside this triangle /* CvPoint v0 = cvPoint( curtri.points[2].x - curtri.points[0].x, curtri.points[2].y - curtri.points[0].y ); CvPoint v1 = cvPoint( curtri.points[1].x - curtri.points[0].x, curtri.points[1].y - curtri.points[0].y ); CvPoint v2 = cvPoint( x - curtri.points[0].x, y - curtri.points[0].y ); int dot00 = v0.x * v0.x + v0.y * v0. y; int dot01 = v0.x * v1.x + v0.y * v1. y; int dot02 = v0.x * v2.x + v0.y * v2. y; int dot11 = v1.x * v1.x + v1.y * v1. y; int dot12 = v1.x * v2.x + v1.y * v2. y; double invDenom = 1.0 / (double)(dot00 * dot11 - dot01 * dot01); double u = (double)(dot11 * dot02 - dot01 * dot12) * invDenom; double v = (double)(dot00 * dot12 - dot01 * dot02) * invDenom; */ CvMat * curp = cvCreateMat(3, 1, CV_32FC1); CvMat * result = cvCreateMat(3, 1, CV_32FC1); curp->data.fl[0] = x; curp->data.fl[1] = y; curp->data.fl[2] = 1; cvMatMul( baryinvvec[i], curp, result ); // double u = result->data.fl[0]/result->data.fl[2]; // double v = result->data.fl[1]/result->data.fl[2]; if( (result->data.fl[0] > 0) && (result->data.fl[1] > 0) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) { // if((u > 0) || (v > 0) /*&& ((u +v) < 1)*/ ) { // printf("Barycentric: %f %f %f\n", result->data.fl[0], result->data.fl[1], result->data.fl[2]); // this point is inside this triangle // printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y, // trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y); CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1); cvMatMul( newcorners, result, sourcepoint ); double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/; double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/; if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) { // printf("%d,%d %d,%d\n",x,y,(int)sourcex,(int)sourcey); cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) ); } /* if((i == 143) && (y == 3577) && (x > 2055) && (x < 2087)) { printf("%d: %f, %f, %f\t%f, %f, %f\n",x,result->data.fl[0],result->data.fl[1],result->data.fl[2], sourcepoint->data.fl[0],sourcepoint->data.fl[1],sourcepoint->data.fl[2]); } */ cvReleaseMat( &sourcepoint ); // printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y, // trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y); } cvReleaseMat( &result ); cvReleaseMat( &curp ); } } cvReleaseMat( &newcorners ); } cvReleaseMat( &curpoints ); } /* for(int y = 0; y < image1->height; y++) { for(int x = 0; x < image1->width; x++) { CvMat * curp = cvCreateMat(3, 1, CV_32FC1); CvMat * result = cvCreateMat(3, 1, CV_32FC1); curp->data.fl[0] = x; curp->data.fl[1] = y; curp->data.fl[2] = 1; for(unsigned int i = 0; i < baryinvvec.size(); i++) { cvMatMul( baryinvvec[i], curp, result ); double u = result->data.fl[0]/result->data.fl[2]; double v = result->data.fl[1]/result->data.fl[2]; if((u > 0) && (v > 0) && (u + v < 1)) { // printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y, // trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y); break; } } cvReleaseMat( &result ); cvReleaseMat( &curp ); } } */ cvReleaseImage( &clean_nonthresh ); #ifdef OLD_BUSTED for(int y = 0; y < image1->height; y++) { for(int x = 0; x < image1->width; x++) { CvSubdiv2DPointLocation locate_result; CvSubdiv2DEdge on_edge; CvSubdiv2DPoint * on_vertex; CvPoint curpoint = cvPoint( x, y ); locate_result = cvSubdiv2DLocate( delaunay, cvPointTo32f( curpoint ), &on_edge, &on_vertex ); if( (locate_result != CV_PTLOC_OUTSIDE_RECT) && (locate_result != CV_PTLOC_ERROR) ) { if( locate_result == CV_PTLOC_VERTEX ) { // this point is on a vertex for(int i = 0; i < count; i++) { if(((on_vertex->pt).x == delaunay_points[i].x) && ((on_vertex->pt).y == delaunay_points[i].y)) { cvSet2D( image1, y, x, cvGet2D( image2, cvPointFrom32f(dest_points[i]).y, cvPointFrom32f(dest_points[i]).x ) ); break; } } } else if( locate_result == CV_PTLOC_ON_EDGE ) { // this point is on an edge CvSubdiv2DPoint* org_pt; CvSubdiv2DPoint* dst_pt; CvPoint org_pt_warp; CvPoint dst_pt_warp; org_pt = cvSubdiv2DEdgeOrg(on_edge); dst_pt = cvSubdiv2DEdgeDst(on_edge); for(int i = 0; i < count; i++) { if(((org_pt->pt).x == delaunay_points[i].x) && ((org_pt->pt).y == delaunay_points[i].y)) { org_pt_warp = cvPointFrom32f(dest_points[i]); } if(((dst_pt->pt).x == delaunay_points[i].x) && ((dst_pt->pt).y == delaunay_points[i].y)) { dst_pt_warp = cvPointFrom32f(dest_points[i]); } } // compute vector length of original edge and current point double original_length; double cur_length; if( (int)((org_pt->pt).x) == curpoint.x ) { // vertical line original_length = fabs((org_pt->pt).y - (dst_pt->pt).y); cur_length = fabs((org_pt->pt).y - curpoint.y); } else if( (int)((org_pt->pt).y) == curpoint.y ) { // horizontal line original_length = fabs((org_pt->pt).x - (dst_pt->pt).x); cur_length = fabs((org_pt->pt).x - curpoint.x); } else { // sloped line original_length = sqrt(pow((org_pt->pt).x - (dst_pt->pt).x, 2.0) + pow((org_pt->pt).y - (dst_pt->pt).y, 2.0)); cur_length = sqrt(pow((org_pt->pt).x - curpoint.x, 2.0) + pow((org_pt->pt).y - curpoint.y, 2.0)); } // compute ratio of this point on the edge double ratio = cur_length / original_length; // copy this point from the destination edge CvPoint point_in_original; int warped_x = (int)(org_pt_warp.x - dst_pt_warp.x); int warped_y = (int)(org_pt_warp.y - dst_pt_warp.y); if( org_pt_warp.x == curpoint.x ) { // vertical line point_in_original.y = (int)(org_pt_warp.y + (ratio * (org_pt_warp.y - dst_pt_warp.y))); point_in_original.x = org_pt_warp.x; } else if(org_pt_warp.y == curpoint.y) { // horizontal line point_in_original.x = (int)(org_pt_warp.x + (ratio * (org_pt_warp.x - dst_pt_warp.x))); point_in_original.y = org_pt_warp.y; } else { // sloped line double destination_length = sqrt(pow((org_pt_warp).x - (dst_pt_warp).x, 2.0) + pow((org_pt_warp).y - (dst_pt_warp).y, 2.0)); double scaled_length = ratio * destination_length; double dest_angle = atan(fabs( (double)warped_y / (double)warped_x )); double xdist = scaled_length * cos(dest_angle); double ydist = scaled_length * sin(dest_angle); xdist = warped_x > 0 ? xdist : xdist * -1; ydist = warped_y > 0 ? ydist : ydist * -1; point_in_original.x = (int)( org_pt_warp.x + xdist); point_in_original.y = (int)( org_pt_warp.y + ydist); } if((point_in_original.x >= 0) && (point_in_original.y >= 0) && (point_in_original.x < (image1->width)) && (point_in_original.y < (image1->height))) { cvSet2D( image1, y, x, cvGet2D( image2, point_in_original.y, point_in_original.x ) ); } else { printf("Edge point outside image\n"); } // cvSet2D( image1, y, x, cvGet2D( image2, (int)(org_pt_warp.x + (ratio * (org_pt_warp.x - dst_pt_warp.x))), // (int)(org_pt_warp.y + (ratio * (org_pt_warp.y - dst_pt_warp.y))) ) ); } else if( locate_result == CV_PTLOC_INSIDE ) { // this point is inside a facet (triangle) /* printf("Point inside facet: %d, %d\n",curpoint.x,curpoint.y); int count = 0; CvPoint * origins = (CvPoint*)malloc(sizeof(CvPoint)*3); CvSubdiv2DEdge t = on_edge; // count number of edges do { CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t ); if(count < 3) { origins[count] = cvPoint( cvRound(pt->pt.x), cvRound(pt->pt.y)); printf("%d,%d\t",origins[count].x,origins[count].y); } count++; t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); } while(t != on_edge); printf("\n"); free(origins); */ } } } } #endif // OLD_BUSTED printf("done.\n"); cvSaveImage("fullwarp.jpg", image1); printf("Drawing subdivisions on warped image..."); draw_subdiv( image1, delaunay, NULL, NULL, 0, NULL ); // draw_subdiv( image1, delaunay, delaunay_points, source_points, count, status ); printf("done.\n"); cvSaveImage("edgeswarp.jpg", image1); cvReleaseImage(&image2); image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR); // cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 3 ); // cvCalcSubdivVoronoi2D( delaunay ); printf("Drawing subdivisions on unwarped image..."); draw_subdiv( image2, delaunay, delaunay_points, dest_points, count, status ); // draw_subdiv( image2, delaunay, NULL, NULL, 0, NULL ); printf("done.\n"); cvSaveImage("edges.jpg",image2); cvReleaseImage(&image1); cvFree(&source_points); cvFree(&dest_points); cvFree(&status); cvReleaseMemStorage(&storage); cvFree(&delaunay_points); cvReleaseImage(&image2); return 0; }
int opticaltri( CvMat * &clean_texture, int verts ) { char * im1fname = "conhull-dirty-thresh.jpg"; char * im2fname = "conhull-clean-thresh.jpg"; int count = MAX_COUNT; char * status; CvPoint2D32f * source_points; CvPoint2D32f * dest_points; CvPoint2D32f * delaunay_points = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(CvPoint2D32f)); // count = opticalflow( im1fname, im2fname, source_points, dest_points, status ); count = findsiftpoints( "conhull-dirty.jpg", "conhull-clean.jpg", source_points, dest_points, status ); IplImage * image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR); CvMemStorage * storage = cvCreateMemStorage(0); CvSubdiv2D * delaunay = cvCreateSubdivDelaunay2D( cvRect(0,0,image1->width,image1->height), storage); IplImage * image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR); cvSet( image1, cvScalarAll(255) ); std::map<CvPoint, CvPoint> point_lookup_map; std::vector<std::pair<CvPoint, CvPoint> > point_lookup; int num_matches = 0; int num_out_matches = 0; int max_dist = 50; int offset = 200; // put corners in the point lookup as going to themselves point_lookup_map[cvPoint(0,0)] = cvPoint(0,0); point_lookup_map[cvPoint(0,image1->height-1)] = cvPoint(0,image1->height-1); point_lookup_map[cvPoint(image1->width-1,0)] = cvPoint(image1->width-1,0); point_lookup_map[cvPoint(image1->width-1,image1->height-1)] = cvPoint(image1->width-1,image1->height-1); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,0), cvPoint(0,0))); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(0,image1->height-1), cvPoint(0,image1->height-1))); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,0), cvPoint(image1->width-1,0))); point_lookup.push_back(std::pair<CvPoint,CvPoint>(cvPoint(image1->width-1,image1->height-1), cvPoint(image1->width-1,image1->height-1))); printf("Inserting corners..."); // put corners in the Delaunay subdivision for(unsigned int i = 0; i < point_lookup.size(); i++) { cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(point_lookup[i].first) ); } printf("done.\n"); CvSubdiv2DEdge proxy_edge; for(int i = 0; i < count; i++) { if(status[i]) { CvPoint source = cvPointFrom32f(source_points[i]); CvPoint dest = cvPointFrom32f(dest_points[i]); if((((int)fabs((double)(source.x - dest.x))) > max_dist) || (((int)fabs((double)(source.y - dest.y))) > max_dist)) { num_out_matches++; } else if((dest.x >= 0) && (dest.y >= 0) && (dest.x < (image1->width)) && (dest.y < (image1->height))) { if(point_lookup_map.find(source) == point_lookup_map.end()) { num_matches++; point_lookup_map[source] = dest; point_lookup.push_back(std::pair<CvPoint,CvPoint>(source,dest)); // delaunay_points[i] = (cvSubdivDelaunay2DInsert( delaunay, cvPointTo32f(source) ))->pt; cvSetImageROI( image1, cvRect(source.x-8,source.y-8,8*2,8*2) ); cvResetImageROI( image2 ); cvGetRectSubPix( image2, image1, dest_points[i] ); } /* cvSet2D( image1, source.y, source.x, cvGet2D( image2, dest.y, dest.x ) ); cvSet2D( image1, source.y, source.x+1, cvGet2D( image2, dest.y, dest.x+1 ) ); cvSet2D( image1, source.y, source.x-1, cvGet2D( image2, dest.y, dest.x-1 ) ); cvSet2D( image1, source.y+1, source.x, cvGet2D( image2, dest.y+1, dest.x ) ); cvSet2D( image1, source.y-1, source.x, cvGet2D( image2, dest.y-1, dest.x ) ); cvSet2D( image1, source.y+1, source.x+1, cvGet2D( image2, dest.y+1, dest.x+1 ) ); cvSet2D( image1, source.y-1, source.x-1, cvGet2D( image2, dest.y-1, dest.x-1 ) ); cvSet2D( image1, source.y+1, source.x-1, cvGet2D( image2, dest.y+1, dest.x-1 ) ); cvSet2D( image1, source.y-1, source.x+1, cvGet2D( image2, dest.y-1, dest.x+1 ) ); */ // cvCircle( image1, source, 4, CV_RGB(255,0,0), 2, CV_AA ); // cvCircle( image2, dest, 4, CV_RGB(255,0,0), 2, CV_AA ); } /* cvSetImageROI( image1, cvRect(source.x-offset,source.y-offset,offset*2,offset*2) ); cvSetImageROI( image2, cvRect(dest.x-offset,dest.y-offset,offset*2,offset*2) ); cvNamedWindow("image1",0); cvNamedWindow("image2",0); cvShowImage("image1",image1); cvShowImage("image2",image2); printf("%d,%d -> %d,%d\n",source.x,source.y,dest.x,dest.y); cvWaitKey(0); cvDestroyAllWindows(); */ } } printf("%d %d\n",num_matches,num_out_matches); printf("%d lookups\n",point_lookup_map.size()); cvResetImageROI( image1 ); cvSaveImage("sparse.jpg", image1); cvReleaseImage(&image1); image1 = cvLoadImage(im1fname, CV_LOAD_IMAGE_COLOR); cvSet( image1, cvScalarAll(255) ); printf("Warping image..."); CvSeqReader reader; int total = delaunay->edges->total; int elem_size = delaunay->edges->elem_size; std::vector<Triangle> trivec; std::vector<CvMat *> baryinvvec; for( int i = 0; i < total*2; i++ ) { if((i == 0) || (i == total)) { cvStartReadSeq( (CvSeq*)(delaunay->edges), &reader, 0 ); } CvQuadEdge2D* edge = (CvQuadEdge2D*)(reader.ptr); if( CV_IS_SET_ELEM( edge )) { CvSubdiv2DEdge curedge = (CvSubdiv2DEdge)edge; CvSubdiv2DEdge t = curedge; Triangle temptri; int count = 0; // construct a triangle from this edge do { CvSubdiv2DPoint* pt = cvSubdiv2DEdgeOrg( t ); if(count < 3) { pt->pt.x = pt->pt.x >= image1->width ? image1->width-1 : pt->pt.x; pt->pt.y = pt->pt.y >= image1->height ? image1->height-1 : pt->pt.y; pt->pt.x = pt->pt.x < 0 ? 0 : pt->pt.x; pt->pt.y = pt->pt.y < 0 ? 0 : pt->pt.y; temptri.points[count] = cvPointFrom32f( pt->pt ); } else { printf("More than 3 edges\n"); } count++; if(i < total) t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_LEFT ); else t = cvSubdiv2DGetEdge( t, CV_NEXT_AROUND_RIGHT ); } while( t != curedge ); // check that triangle is not already in if( std::find(trivec.begin(), trivec.end(), temptri) == trivec.end() ) { // push triangle in and draw trivec.push_back(temptri); cvLine( image1, temptri.points[0], temptri.points[1], CV_RGB(255,0,0), 1, CV_AA, 0 ); cvLine( image1, temptri.points[1], temptri.points[2], CV_RGB(255,0,0), 1, CV_AA, 0 ); cvLine( image1, temptri.points[2], temptri.points[0], CV_RGB(255,0,0), 1, CV_AA, 0 ); // compute barycentric computation vector for this triangle CvMat * barycen = cvCreateMat( 3, 3, CV_32FC1 ); CvMat * baryceninv = cvCreateMat( 3, 3, CV_32FC1 ); barycen->data.fl[3*0+0] = temptri.points[0].x; barycen->data.fl[3*0+1] = temptri.points[1].x; barycen->data.fl[3*0+2] = temptri.points[2].x; barycen->data.fl[3*1+0] = temptri.points[0].y; barycen->data.fl[3*1+1] = temptri.points[1].y; barycen->data.fl[3*1+2] = temptri.points[2].y; barycen->data.fl[3*2+0] = 1; barycen->data.fl[3*2+1] = 1; barycen->data.fl[3*2+2] = 1; cvInvert( barycen, baryceninv, CV_LU ); baryinvvec.push_back(baryceninv); cvReleaseMat( &barycen ); } } CV_NEXT_SEQ_ELEM( elem_size, reader ); } printf("%d triangles...", trivec.size()); cvSaveImage("triangles.jpg", image1); cvSet( image1, cvScalarAll(255) ); IplImage * clean_nonthresh = cvLoadImage( "conhull-clean.jpg", CV_LOAD_IMAGE_COLOR ); // for each triangle for(unsigned int i = 0; i < trivec.size(); i++) { Triangle curtri = trivec[i]; CvMat * curpoints = cvCreateMat( 1, 3, CV_32SC2 ); Triangle target; std::map<CvPoint,CvPoint>::iterator piter[3]; printf("Triangle %d / %d\n",i,trivec.size()); int is_corner = 0; for(int j = 0; j < 3; j++) { /* curpoints->data.i[2*j+0] = curtri.points[j].x; curpoints->data.i[2*j+1] = curtri.points[j].y; */ CV_MAT_ELEM( *curpoints, CvPoint, 0, j ) = curtri.points[j]; printf("%d,%d\n",curtri.points[j].x,curtri.points[j].y); if((curtri.points[j] == cvPoint(0,0)) || (curtri.points[j] == cvPoint(0,image1->height - 1)) ||(curtri.points[j] == cvPoint(image1->width - 1,0)) ||(curtri.points[j] == cvPoint(image1->width - 1,image1->height - 1))) { is_corner++; } for(unsigned int k = 0; k < point_lookup.size(); k++) { std::pair<CvPoint,CvPoint> thispair = point_lookup[k]; if(thispair.first == curtri.points[j]) { target.points[j] = thispair.second; break; } } /* piter[j] = point_lookup_map.find(curtri.points[j]); if(piter[j] != point_lookup_map.end() ) { target.points[j] = piter[j]->second; } */ } // if((piter[0] != point_lookup_map.end()) && (piter[1] != point_lookup_map.end()) && (piter[2] != point_lookup_map.end())) { if(is_corner < 3) { CvMat * newcorners = cvCreateMat( 3, 3, CV_32FC1 ); newcorners->data.fl[3*0+0] = target.points[0].x; newcorners->data.fl[3*0+1] = target.points[1].x; newcorners->data.fl[3*0+2] = target.points[2].x; newcorners->data.fl[3*1+0] = target.points[0].y; newcorners->data.fl[3*1+1] = target.points[1].y; newcorners->data.fl[3*1+2] = target.points[2].y; newcorners->data.fl[3*2+0] = 1; newcorners->data.fl[3*2+1] = 1; newcorners->data.fl[3*2+2] = 1; CvContour hdr; CvSeqBlock blk; CvRect trianglebound = cvBoundingRect( cvPointSeqFromMat(CV_SEQ_KIND_CURVE+CV_SEQ_FLAG_CLOSED, curpoints, &hdr, &blk), 1 ); printf("Bounding box: %d,%d,%d,%d\n",trianglebound.x,trianglebound.y,trianglebound.width,trianglebound.height); for(int y = trianglebound.y; (y < (trianglebound.y + trianglebound.height)) && ( y < image1->height); y++) { for(int x = trianglebound.x; (x < (trianglebound.x + trianglebound.width)) && (x < image1->width); x++) { // check to see if we're inside this triangle /* CvPoint v0 = cvPoint( curtri.points[2].x - curtri.points[0].x, curtri.points[2].y - curtri.points[0].y ); CvPoint v1 = cvPoint( curtri.points[1].x - curtri.points[0].x, curtri.points[1].y - curtri.points[0].y ); CvPoint v2 = cvPoint( x - curtri.points[0].x, y - curtri.points[0].y ); int dot00 = v0.x * v0.x + v0.y * v0. y; int dot01 = v0.x * v1.x + v0.y * v1. y; int dot02 = v0.x * v2.x + v0.y * v2. y; int dot11 = v1.x * v1.x + v1.y * v1. y; int dot12 = v1.x * v2.x + v1.y * v2. y; double invDenom = 1.0 / (double)(dot00 * dot11 - dot01 * dot01); double u = (double)(dot11 * dot02 - dot01 * dot12) * invDenom; double v = (double)(dot00 * dot12 - dot01 * dot02) * invDenom; */ CvMat * curp = cvCreateMat(3, 1, CV_32FC1); CvMat * result = cvCreateMat(3, 1, CV_32FC1); curp->data.fl[0] = x; curp->data.fl[1] = y; curp->data.fl[2] = 1; cvMatMul( baryinvvec[i], curp, result ); // double u = result->data.fl[0]/result->data.fl[2]; // double v = result->data.fl[1]/result->data.fl[2]; /* if((i == 3019) && (y == 1329) && (x > 2505) && (x < 2584)) { printf("Range %d: %f, %f, %f\t%f, %f, %f\n",x,result->data.fl[0],result->data.fl[1],result->data.fl[2], sourcepoint->data.fl[0],sourcepoint->data.fl[1],sourcepoint->data.fl[2]); } */ if( (result->data.fl[0] > MIN_VAL) && (result->data.fl[1] > MIN_VAL) && (result->data.fl[2] > MIN_VAL) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) { // if((u > 0) || (v > 0) /*&& ((u +v) < 1)*/ ) // printf("Barycentric: %f %f %f\n", result->data.fl[0], result->data.fl[1], result->data.fl[2]); // this point is inside this triangle // printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y, // trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y); CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1); cvMatMul( newcorners, result, sourcepoint ); double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/; double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/; if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) { // printf("%d,%d %d,%d\n",x,y,(int)sourcex,(int)sourcey); cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) ); } // printf("Point %d,%d inside %d,%d %d,%d %d,%d\n",x,y,trivec[i].points[0].x,trivec[i].points[0].y, // trivec[i].points[1].x,trivec[i].points[1].y,trivec[i].points[2].x,trivec[i].points[2].y); cvReleaseMat( &sourcepoint ); } cvReleaseMat( &result ); cvReleaseMat( &curp ); } } for(int k = 0; k < verts; k++) { double x = clean_texture->data.fl[2*k+0]; double y = clean_texture->data.fl[2*k+1]; // check to see if we're inside this triangle CvMat * curp = cvCreateMat(3, 1, CV_32FC1); CvMat * result = cvCreateMat(3, 1, CV_32FC1); curp->data.fl[0] = x; curp->data.fl[1] = y; curp->data.fl[2] = 1; cvMatMul( baryinvvec[i], curp, result ); if( (result->data.fl[0] > MIN_VAL) && (result->data.fl[1] > MIN_VAL) && (result->data.fl[2] > MIN_VAL) && (fabs(1.0 - (result->data.fl[0]+result->data.fl[1]+result->data.fl[2])) <= 0.01) ) { CvMat * sourcepoint = cvCreateMat(3, 1, CV_32FC1); cvMatMul( newcorners, result, sourcepoint ); double sourcex = sourcepoint->data.fl[0]/*/sourcepoint->data.fl[2]*/; double sourcey = sourcepoint->data.fl[1]/*/sourcepoint->data.fl[2]*/; if((sourcex >= 0) && (sourcey >= 0) && (sourcex < (image1->width)) && (sourcey < (image1->height))) { clean_texture->data.fl[2*k+0] = sourcex; clean_texture->data.fl[2*k+1] = sourcey; // cvSet2D( image1, y, x, cvGet2D( clean_nonthresh, (int)sourcey, (int)sourcex ) ); } cvReleaseMat( &sourcepoint ); } cvReleaseMat( &result ); cvReleaseMat( &curp ); } cvReleaseMat( &newcorners ); } cvReleaseMat( &curpoints ); } cvReleaseImage( &clean_nonthresh ); printf("done.\n"); cvSaveImage("fullwarp.jpg", image1); printf("Drawing subdivisions on warped image..."); draw_subdiv( image1, delaunay, NULL, NULL, 0, NULL ); // draw_subdiv( image1, delaunay, delaunay_points, source_points, count, status ); printf("done.\n"); cvSaveImage("edgeswarp.jpg", image1); cvReleaseImage(&image2); image2 = cvLoadImage(im2fname, CV_LOAD_IMAGE_COLOR); // cvCreateImage( cvGetSize(image2), IPL_DEPTH_8U, 3 ); // cvCalcSubdivVoronoi2D( delaunay ); printf("Drawing subdivisions on unwarped image..."); // draw_subdiv( image2, delaunay, delaunay_points, dest_points, count, status ); // draw_subdiv( image2, delaunay, NULL, NULL, 0, NULL ); printf("done.\n"); cvSaveImage("edges.jpg",image2); cvReleaseImage(&image1); cvFree(&source_points); cvFree(&dest_points); cvFree(&status); cvReleaseMemStorage(&storage); cvFree(&delaunay_points); cvReleaseImage(&image2); return 0; }
void moCalibrationModule::transformPoints() { // Calibration & triangulation is done. Just convert the point coordinates. moDataGenericList *blobs = static_cast<moDataGenericList*>(input->getData()); moDataGenericList::iterator it; this->blobs.clear(); for (it = blobs->begin(); it != blobs->end(); it++) { // Get the camera/surface coordinates of the blob double blob_x = (*it)->properties["x"]->asDouble(); double blob_y = (*it)->properties["y"]->asDouble(); // Find the closest edge the point falls onto or right to CvSubdiv2DEdge edge; CvSubdiv2DPoint* vertex; CvPoint2D32f P = cvPoint2D32f(blob_x, blob_y); CvSubdiv2DPointLocation loc; loc = cvSubdiv2DLocate(this->subdiv, P, &edge, &vertex); if (loc == CV_PTLOC_VERTEX) { moPoint screen_point = this->delaunayToScreen[vertex]; moDataGenericContainer *blob = new moDataGenericContainer(); blob->properties["type"] = new moProperty("trackedblob"); blob->properties["blob_id"] = new moProperty((*it)->properties["blob_id"]->asInteger()); blob->properties["implements"] = new moProperty((*it)->properties["implements"]->asString()); // FIXME copy all the properties, not just x/y blob->properties["x"] = new moProperty(screen_point.x); blob->properties["y"] = new moProperty(screen_point.y); this->blobs.push_back(blob); continue; } // P is inside the triangle, so we must compute barycentric coords for P with // respect to the triangle. To find the triangle, we traverse the edges // around the right facet, to get the vertices that make up the triangle containing P. // // A, B, C are given in surface coordinates! CvSubdiv2DPoint* A = cvSubdiv2DEdgeOrg(edge); edge = cvSubdiv2DGetEdge( edge, CV_NEXT_AROUND_RIGHT ); CvSubdiv2DPoint* B = cvSubdiv2DEdgeOrg(edge); edge = cvSubdiv2DGetEdge( edge, CV_NEXT_AROUND_RIGHT ); CvSubdiv2DPoint* C = cvSubdiv2DEdgeOrg(edge); CvPoint2D32f a = A->pt, b = B->pt, c = C->pt; // XXX Should this computation turn out to be too costly for each blob in each frame, // it can be speeded up by computing a map of surface2screen points for each pixel // of the camera image and then just looking the appropriate coordinates up from the map! // // This is one way to compute the barycentric coordinates. Other attempts use // matrices and determinants or stuff like that. This approach suffices here. float entire_area = (a.x - b.x) * (a.y - c.y) - (a.y - b.y) * (a.x - c.x); float area_a = (blob_x - b.x) * (blob_y - c.y) - (blob_y - b.y) * (blob_x - c.x); float area_b = (a.x - blob_x) * (a.y - c.y) - (a.y - blob_y) * (a.x - c.x); // Compute the barycentric coords alpha, beta, gamma based on the area. float alpha = area_a / entire_area; float beta = area_b / entire_area; // Set alpha + beta + gamma == 1 (True if point inside triangle, // with alpha, beta and gamma each between 0 and 1) float gamma = 1.0f - alpha - beta; moPoint A_screen = this->delaunayToScreen[A]; moPoint B_screen = this->delaunayToScreen[B]; moPoint C_screen = this->delaunayToScreen[C]; // Transform the point into screen space by interpolating the three vertices // of the enclosing triangle in screen space with the barycentric coordinates. moPoint P_transformed; P_transformed.x = alpha*A_screen.x + beta*B_screen.x + gamma*C_screen.x; P_transformed.y = alpha*A_screen.y + beta*B_screen.y + gamma*C_screen.y; // TODO This should copy ALL blob attributes, even unknown ones (from different detectors) // Copy the blob, but adjust x/y // XXX Do we need to adjust w/h too? moDataGenericContainer *blob = new moDataGenericContainer(); blob->properties["type"] = new moProperty("trackedblob"); blob->properties["blob_id"] = new moProperty((*it)->properties["blob_id"]->asInteger()); blob->properties["implements"] = new moProperty((*it)->properties["implements"]->asString()); //blob->properties["w"] = new moProperty((*it)->properties["w"]->asDouble()); //blob->properties["h"] = new moProperty((*it)->properties["h"]->asDouble()); blob->properties["x"] = new moProperty(P_transformed.x); blob->properties["y"] = new moProperty(P_transformed.y); this->blobs.push_back(blob); LOG(MO_DEBUG, "transformed Point |" << " in: " << P.x << "," << P.y << " out: " << P_transformed.x << "," << P_transformed.y); } this->output->push(&this->blobs); this->notifyGui(); }