/*! \fn void Camera::SendARtag() * \brief Send the ARtag id and pose info over to remote through udp. * \see ARtag */ void Camera::SendARtag() { Packet packet; memset(&packet, -1, sizeof(packet)); packet.type = DATA; int numARtags = ar->getARtagSize(); numARtags = numARtags < MAXARTAGSEEN ? numARtags:MAXARTAGSEEN; for (int i = 0; i < numARtags; ++i) { ARtag* tag = ar->getARtag(i); packet.u.data.tagId[i] = tag->getId(); cv::Mat pose = tag->getPose(); packet.u.data.x[i] = pose.at<float>(0,3)/1000.f; packet.u.data.y[i] = pose.at<float>(1,3)/1000.f; packet.u.data.z[i] = pose.at<float>(2,3)/1000.f; packet.u.data.yaw[i] = -atan2(pose.at<float>(1,0), pose.at<float>(0,0)); if (packet.u.data.yaw[i] < 0) { packet.u.data.yaw[i] += 6.28; } } if (sendto(_sock, (unsigned char*)&packet, ARTAG_PACKET_SIZE, 0, (const struct sockaddr *)&_artagPort, sizeof(struct sockaddr_in)) < 0) printf("sendto\n"); }
void ObjDetect::imageCaptured ( const FCam::Frame & frame ) { IplImage * img = capture_rgb24 ( frame ); // printf("[imageCaptured]: I got here! yeye!\n"); IplImage * gray = cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U,1); cvCvtColor(img,gray,CV_RGB2GRAY); // if ( !img1 ) // { img1 = img; // } // else // { // img2 = img; // thread = new ObjDetectThread ( this, img1, img2, // ui->getFeatureType (), ui->getMatchType () ); // // QObject::connect ( thread, SIGNAL ( progressUpdate ( int ) ), // // progressBar, SLOT ( setValue ( int ) ) ); // QObject::connect ( thread, SIGNAL ( finished () ), // this, SLOT ( computed () ) ); // thread->start (); // } isready = true; arloc.getARtagPose(gray,img1,0); ARtag * ar; CvMat * pose; OverlayText ot; QByteArray ba; // ot.u + ot.v + ot.ID + ot.poseX + ot.poseY + ot.poseZ + ot.timestamp ; int packetSize = sizeof(struct OverlayText)*arloc.getARtagSize(); ba.resize(packetSize); for (int n = 0; n < arloc.getARtagSize(); ++n) { ar = arloc.getARtag(n); pose = ar->getPose(); ot.u = ar->getU(); ot.v = ar->getV(); ot.ID = ar->getId(); ot.poseX = CV_MAT_ELEM(*pose , float, 0, 3); ot.poseY = CV_MAT_ELEM(*pose , float, 1, 3); ot.poseZ = CV_MAT_ELEM(*pose , float, 2, 3); ot.timestamp = (double)t.elapsed()/1000.0; ui->getViewfinder()->setText(ot); memcpy(ba.data()+n*sizeof(struct OverlayText),(char*)&ot,sizeof(struct OverlayText)); } ui->udpSender->broadcastDatagram(ba); cvReleaseImage(&gray); isready = true; completed = true; // image = QImage ( ( const uchar * )img1->imageData, // img1->width, // img1->height, // img1->widthStep, // QImage::Format_RGB888 ); // canvas = new QLabel ( this ); // // progressBar->hide (); // QHBoxLayout * layout = new QHBoxLayout ( this ); // layout->addWidget ( canvas ); // setLayout ( layout ); // QPicture picture; // QPainter painter; // painter.begin ( &picture ); //// painter.drawImage ( QRect ( QPoint ( 0, 0 ), QPoint ( 800, 600 ) ), image ); // painter.drawText(arloc.getARtag()); // painter.end (); // canvas->setPicture ( picture ); // canvas->show (); }
bool ARtagLocalizer::getARtagPose(IplImage* src, IplImage* dst, int camID) { if (!init) { printf("Did not initalize the ARtagLocalizer!!\n"); return NULL; } if (src->width != imgwidth || src->height != imgheight) { printf("src->width: %d src->height %d\n", src->width, src->height); printf("imgwidth: %d imgheight %d\n", imgwidth, imgheight); printf("Image passed in does not match initialized image size!!\n"); return NULL; } if (src->nChannels != 1) { printf("Please pass in grayscale image into ARtagLocalizer! \n"); return NULL; } int numMarkers = 0; ARToolKitPlus::ARMarkerInfo* markers = NULL; if (tracker->arDetectMarker(const_cast<unsigned char*>((unsigned char*)src->imageData), 150, &markers, &numMarkers) < 0) { return false; } mytag.clear(); float modelViewMatrix_[16]; for(int m = 0; m < numMarkers; ++m) { if(markers[m].id != -1 && markers[m].cf >= 0.5) { tracker->calcOpenGLMatrixFromMarker(&markers[m], patternCenter_, patternWidth_, modelViewMatrix_); float x = modelViewMatrix_[12] / 1000.0; float y = modelViewMatrix_[13] / 1000.0; float z = modelViewMatrix_[14] / 1000.0; float yaw = -atan2(modelViewMatrix_[1], modelViewMatrix_[0]); if (yaw < 0) { yaw += 6.28; } if ((x == 0.0 && y == 0.0 && yaw == 0.0) || (x > 10000.0 && y > 10000.0) || (x < -10000.0 && y < -10000.0) || (z <= 0.001)) { // ARTKPlus bug that occurs sometimes continue; } // printf("Id: %d\t Conf: %.2f\n", markers[m].id, markers[m].cf); // printf("x: %.2f \t y: %.2f \t z: %.2f \t yaw: %.2f\n", x,y,z,yaw); // printf("\n"); // char str[30]; // sprintf(str,"%d",markers[m].id); // cvPutText (dst,str,cvPoint( markers[m].pos[0]+25,markers[m].pos[1]+10),&cvFont(3,3),cvScalar(255,0,0)); // sprintf(str,"(%.2f,%.2f,%.2f)", x*fudge + xoffset, -(y*fudge + yoffset), yaw + yawoffset); // cvPutText (dst,str,cvPoint( markers[m].pos[0]+25,markers[m].pos[1]+25),&cvFont(1,1),cvScalar(255,0,0)); cv::Mat PoseM(4, 4, CV_32F, modelViewMatrix_); cv::transpose(PoseM,PoseM); CvMat pose = PoseM; // save artag struct for access later if (markers[m].id >= 0 && markers[m].id < 50 && !allStop) { // EnterCriticalSection(&tags_mutex); // ARtag * ar = tags[markers[m].id]; // ar->setId(markers[m].id); // ar->setPose(&pose); // ar->setPoseAge(0); // ar->setCamId(camID); // ar->setLocation(markers[m].pos[0], markers[m].pos[1]); ARtag mt; mt.setId(markers[m].id); mt.setPose(&pose); mt.setPoseAge(0); mt.setCamId(camID); mt.setLocation(markers[m].pos[0], markers[m].pos[1]); // LeaveCriticalSection(&tags_mutex); mytag.push_back(mt); } } } return true; }
bool getARtagPose(IplImage * src, IplImage *dst, vector<ARtag>& tags, int camNum) { if (trackers[camNum] == NULL) { // This camera does not have a tracker object. return false; } if (!init) { printf("Did not initalize the ARtagLocalizer!!\n"); return NULL; } if (src->width != imgwidth || src->height != imgheight) { printf("Image passed in does not match initialized image size!!\n"); return NULL; } if (src->nChannels != 1) { printf("Please pass in grayscale image into ARtagLocalizer! \n"); return NULL; } int n = 0; for(int i = 0; i < src->height; ++i) for(int j = 0; j < src->width; ++j) cameraBuffer[n++] = CV_IMAGE_ELEM(src,uchar,i,j); /*const char* description = tracker->getDescription(); printf("ARToolKitPlus compile-time information:\n%s\n\n", description);*/ int numMarkers = 0; ARToolKitPlus::ARMarkerInfo* markers = NULL; if (trackers[camNum]->arDetectMarker(const_cast<unsigned char*>(cameraBuffer), 150, &markers, &numMarkers) < 0) { return false; } float modelViewMatrix_[16]; for(int m = 0; m < numMarkers; ++m) { if(markers[m].id != -1 && markers[m].cf >= 0.5) { trackers[camNum]->calcOpenGLMatrixFromMarker(&markers[m], patternCenter_, patternWidth_, modelViewMatrix_); ARtag ar; ar.setId(markers[m].id); float x = modelViewMatrix_[12] / 1000.0; float y = modelViewMatrix_[13] / 1000.0; float z = modelViewMatrix_[14] / 1000.0; float yaw = -atan2(modelViewMatrix_[1], modelViewMatrix_[0]); if ((x == 0 && y == 0 && yaw == 0) || (x > 10000 && y > 10000) || (x < -10000 && y < -10000) || z <= 0.01) { // ARTKPlus bug that occurs sometimes continue; } char str[30]; sprintf(str,"%d",markers[m].id); cvPutText (dst,str,cvPoint( markers[m].pos[0]+25,markers[m].pos[1]+10),&cvFont(3,3),cvScalar(0,0,255)); sprintf(str,"(%.2f,%.2f,%.2f)", x, y, yaw); cvPutText (dst,str,cvPoint( markers[m].pos[0]+25,markers[m].pos[1]+25),&cvFont(1,1),cvScalar(0,0,255)); cv::Mat PoseM(4, 4, CV_32F, modelViewMatrix_); cv::transpose(PoseM,PoseM); CvMat pose = PoseM; ar.setPose(&pose); ar.setPoseAge(0); tags.push_back(ar); } } //google::protobuf::ShutdownProtobufLibrary(); //printf("\n\n"); return true; }