/*! \fn void Camera::SendARtag() * \brief Send the ARtag id and pose info over to remote through udp. * \see ARtag */ void Camera::SendARtag() { Packet packet; memset(&packet, -1, sizeof(packet)); packet.type = DATA; int numARtags = ar->getARtagSize(); numARtags = numARtags < MAXARTAGSEEN ? numARtags:MAXARTAGSEEN; for (int i = 0; i < numARtags; ++i) { ARtag* tag = ar->getARtag(i); packet.u.data.tagId[i] = tag->getId(); cv::Mat pose = tag->getPose(); packet.u.data.x[i] = pose.at<float>(0,3)/1000.f; packet.u.data.y[i] = pose.at<float>(1,3)/1000.f; packet.u.data.z[i] = pose.at<float>(2,3)/1000.f; packet.u.data.yaw[i] = -atan2(pose.at<float>(1,0), pose.at<float>(0,0)); if (packet.u.data.yaw[i] < 0) { packet.u.data.yaw[i] += 6.28; } } if (sendto(_sock, (unsigned char*)&packet, ARTAG_PACKET_SIZE, 0, (const struct sockaddr *)&_artagPort, sizeof(struct sockaddr_in)) < 0) printf("sendto\n"); }
void ObjDetect::imageCaptured ( const FCam::Frame & frame ) { IplImage * img = capture_rgb24 ( frame ); // printf("[imageCaptured]: I got here! yeye!\n"); IplImage * gray = cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U,1); cvCvtColor(img,gray,CV_RGB2GRAY); // if ( !img1 ) // { img1 = img; // } // else // { // img2 = img; // thread = new ObjDetectThread ( this, img1, img2, // ui->getFeatureType (), ui->getMatchType () ); // // QObject::connect ( thread, SIGNAL ( progressUpdate ( int ) ), // // progressBar, SLOT ( setValue ( int ) ) ); // QObject::connect ( thread, SIGNAL ( finished () ), // this, SLOT ( computed () ) ); // thread->start (); // } isready = true; arloc.getARtagPose(gray,img1,0); ARtag * ar; CvMat * pose; OverlayText ot; QByteArray ba; // ot.u + ot.v + ot.ID + ot.poseX + ot.poseY + ot.poseZ + ot.timestamp ; int packetSize = sizeof(struct OverlayText)*arloc.getARtagSize(); ba.resize(packetSize); for (int n = 0; n < arloc.getARtagSize(); ++n) { ar = arloc.getARtag(n); pose = ar->getPose(); ot.u = ar->getU(); ot.v = ar->getV(); ot.ID = ar->getId(); ot.poseX = CV_MAT_ELEM(*pose , float, 0, 3); ot.poseY = CV_MAT_ELEM(*pose , float, 1, 3); ot.poseZ = CV_MAT_ELEM(*pose , float, 2, 3); ot.timestamp = (double)t.elapsed()/1000.0; ui->getViewfinder()->setText(ot); memcpy(ba.data()+n*sizeof(struct OverlayText),(char*)&ot,sizeof(struct OverlayText)); } ui->udpSender->broadcastDatagram(ba); cvReleaseImage(&gray); isready = true; completed = true; // image = QImage ( ( const uchar * )img1->imageData, // img1->width, // img1->height, // img1->widthStep, // QImage::Format_RGB888 ); // canvas = new QLabel ( this ); // // progressBar->hide (); // QHBoxLayout * layout = new QHBoxLayout ( this ); // layout->addWidget ( canvas ); // setLayout ( layout ); // QPicture picture; // QPainter painter; // painter.begin ( &picture ); //// painter.drawImage ( QRect ( QPoint ( 0, 0 ), QPoint ( 800, 600 ) ), image ); // painter.drawText(arloc.getARtag()); // painter.end (); // canvas->setPicture ( picture ); // canvas->show (); }