int main(int argc, char **argv) { Aria::init(); ArMutex mutex; mutex.setLogName("test mutex"); ArLog::log(ArLog::Normal, "This test succeeds if three (and only three) mutex lock/unlock time warning follow."); mutex.setUnlockWarningTime(1); // 1 sec mutex.lock(); mutex.unlock(); // should not warn mutex.lock(); ArUtil::sleep(2000); // 2 sec mutex.unlock(); // should warn mutex.lock(); ArUtil::sleep(500); // 0.5 sec mutex.unlock(); // should not warn mutex.setUnlockWarningTime(0.5); // 0.5 sec mutex.lock(); ArUtil::sleep(600); // 0.6 sec mutex.unlock(); // should warn mutex.lock(); ArUtil::sleep(200); // 0.2 sec mutex.unlock(); // should not warn mutex.lock(); mutex.unlock(); // should not warn mutex.setUnlockWarningTime(0.1); // 0.1 sec mutex.lock(); ArUtil::sleep(200); // 0.2 sec mutex.unlock(); // should warn mutex.setUnlockWarningTime(0.0); // off mutex.lock(); ArUtil::sleep(100); // should not warn mutex.unlock(); Aria::exit(0); }
int main(int argc, char **argv) { Aria::init(); ArMutex mutex; mutex.setLogName("test mutex"); ArLog::log(ArLog::Normal, "This test succeeds if three (and only three) mutex lock/unlock time warning follow."); puts("setting test_mutex warning time to 1 sec"); mutex.setUnlockWarningTime(1); // 1 sec puts("locking and unlocking immediately, should not warn..."); mutex.lock(); mutex.unlock(); // should not warn puts("locking and unlocking after 2 sec, should warn..."); mutex.lock(); ArUtil::sleep(2000); // 2 sec mutex.unlock(); // should warn puts("locking and unlocking after 0.5 sec, should not warn..."); mutex.lock(); ArUtil::sleep(500); // 0.5 sec mutex.unlock(); // should not warn puts("setting test_mutex warning time to 0.5 sec"); mutex.setUnlockWarningTime(0.5); // 0.5 sec puts("locking and unlocking after 0.6 sec, should warn..."); mutex.lock(); ArUtil::sleep(600); // 0.6 sec mutex.unlock(); // should warn puts("locking and unlocking after 0.2 sec, should not warn..."); mutex.lock(); ArUtil::sleep(200); // 0.2 sec mutex.unlock(); // should not warn puts("locking and unlocking immediately, should not warn..."); mutex.lock(); mutex.unlock(); // should not warn puts("setting test_mutex warning time to 0.1 sec"); mutex.setUnlockWarningTime(0.1); // 0.1 sec puts("locking and unlocking after 0.2 sec, should warn..."); mutex.lock(); ArUtil::sleep(200); // 0.2 sec mutex.unlock(); // should warn mutex.setUnlockWarningTime(0.0); // off mutex.lock(); ArUtil::sleep(100); // should not warn mutex.unlock(); // Create and destroy a few mutexes, locking them, etc. ArMutex *m1 = new ArMutex(); m1->setLogName("m1"); m1->lock(); ArMutex *m2 = new ArMutex(); m2->lock(); m2->setLogName("m2"); puts("unlocking m1 before destroying it..."); m1->unlock(); delete m1; puts("NOT unlocking m2 before destroying it..."); delete m2; puts("exiting with Aria::exit(0)..."); Aria::exit(0); }
void* VideoServerBase::runThread(void*) { VideoCapture capture(-1); //int key=0; capture.set( CV_CAP_PROP_FRAME_WIDTH, 640); capture.set( CV_CAP_PROP_FRAME_HEIGHT, 480); capture.read(cap_img); /* run the stream server as a separate thread */ ss.runAsync(); while(1/*key != 'q'*/) //display routine { capture.read(cap_img); mutex_video.lock(); cvtColor(cap_img, gray_img, CV_BGR2GRAY); //imshow(" ",gray_img); waitKey(1); is_data_ready = 1; mutex_video.unlock(); } }
void PtzCameraExample::handleCameraListReply(ArNetPacket *pkt) { ArTypes::Byte2 numCams = pkt->bufToByte2(); ArLog::log(ArLog::Normal, "%d cameras in list.", numCams); char camName[128]; char camType[128]; char displayName[128]; char displayType[128]; char cmdDesc[128]; char cmdName[128]; ArTypes::Byte4 cmdFreq; int dataReqFreq = 100; for(ArTypes::Byte2 i = 0; i < numCams; ++i) { memset(camName, 0, 128); memset(camType, 0, 128); memset(displayName, 0, 128); memset(displayType, 0, 128); pkt->bufToStr(camName, 128); // name ArClientHandlerCamera *cam = new ArClientHandlerCamera(myClient, camName); pkt->bufToStr(camType, 128); // type cam->type = camType; pkt->bufToStr(displayName, 128); // description cam->displayName = displayName; pkt->bufToStr(displayType, 128); // description cam->displayType = displayType; ArTypes::Byte2 numCmds = pkt->bufToByte2(); ArLog::log(ArLog::Normal, "%d commands for camera \'%s\' (%s) / \'%s\' (%s)", numCmds, camName, camType, displayName, displayType); for(ArTypes::Byte2 c = 0; c < numCmds; ++c) { memset(cmdDesc, 0, 128); memset(cmdName, 0, 128); char cmdDesc[128]; char cmdName[128]; pkt->bufToStr(cmdDesc, 128); // description pkt->bufToStr(cmdName, 128); // request name cmdFreq = pkt->bufToByte4(); // recommended request frequency ArLog::log(ArLog::Normal, "Camera %s has %s command named %s with recommended request frequency %d.", camName, cmdDesc, cmdName, cmdFreq); if(strcmp(cmdDesc, "getCameraData") == 0) dataReqFreq = cmdFreq; } ArTypes::Byte2 numParams = pkt->bufToByte2(); ArLog::log(ArLog::Normal, "Camera %s has %d parameters.", camName, numParams); for(ArTypes::Byte2 p = 0; p < numParams; ++p) { ArClientArg carg; ArConfigArg arg; if(!carg.bufToArgValue(pkt, arg)) ArLog::log(ArLog::Normal, "Hmm, error getting ArClientArg for camera %s's parameter #%d.", camName, p); } cam->requestUpdates(dataReqFreq); mutex.lock(); myCameras.insert(cam); mutex.unlock(); } }
bool PtzCameraExample::init() { // If the server has the "getCameraList" request, then it's using // ArServerHandlerCameraCollection, and migth have multiple PTZs/cameras each with // its own set of requests. So send a "getCameraList" request, and when its // reply is received, the handler will send "getCameraInfo" requests for each. // If the server does not have "getCameraList", it only has one PTZ camera, just // send "getCameraInfo". The handler for that will send various control // commands. // If the server does not have "getCameraInfo", then it doesn't provide any // access to PTZ cameras. if(myClient->dataExists("getCameraList")) { ArLog::log(ArLog::Normal, "Server may have multiple cameras. Requesting list."); myClient->addHandler("getCameraList", &myCameraListReplyFunc); myClient->requestOnce("getCameraList"); } else if(myClient->dataExists("getCameraInfo")) { ArLog::log(ArLog::Normal, "Server does not support multiple cameras. Requesting info for its camera."); ArClientHandlerCamera *camClient = new ArClientHandlerCamera(myClient, ""); camClient->requestUpdates(100); mutex.lock(); myCameras.insert(camClient); mutex.unlock(); } else { ArLog::log(ArLog::Terse, "Error, server does not have any camera control requests. (Was the server run with video features enabled or video forwarding active?)"); return false; } return true; }
void C_RobotVideoCB(ArNetPacket* robVideoPack) { unsigned int width; unsigned int height; unsigned int vType; if(imgSize==0) { mutex_robotVideo.lock(); robImgPtr = robot_img.data; } else robImgPtr = robot_img.data+ imgSize; robVideoPack->bufToData(robImgPtr, robVideoPack->getDataLength()); imgSize += robVideoPack->getDataLength(); //cout <<imgSize<< " robVideoPack->getDataLength() " << robVideoPack->getDataLength()<<endl; if (imgSize >= 480*640) { imgSize=0; mutex_robotVideo.unlock(); //cout <<"received" <<endl; imshow("Robot1",robot_img); waitKey(1); //client->requestOnce("RobotVideo"); //return; } }
void* runThread(void*) { size_t strSize; // The socket objects: one for accepting new client connections, // and another for communicating with a client after it connects. ArSocket serverSock, clientSock; // Open the server socket if (serverSock.open(VIDEO_PORT, ArSocket::TCP)) ArLog::log(ArLog::Normal, " Opened the server port %d.", VIDEO_PORT); else ArLog::log(ArLog::Normal, " Failed to open the server port: %s.", serverSock.getErrorStr().c_str()); if (serverSock.accept(&clientSock)) ArLog::log(ArLog::Normal, " Client has connected."); else ArLog::log(ArLog::Terse, " Error in accepting a connection from the client: %s.", serverSock.getErrorStr().c_str()); while(1) { /* send the grayscaled frame, thread safe */ mutex_video.lock(); if (is_data_ready) { int grayImgSize=gray_img.rows*gray_img.cols; //cout <<" Sending image to the client.grayImgSize= " << grayImgSize; if ((strSize = clientSock.write(gray_img.data, grayImgSize))==grayImgSize ) ;//ArLog::log(ArLog::Normal, " Sent image to the client.grayImgSize= %d", grayImgSize); // else // ArLog::log(ArLog::Normal, " Error in sending hello string to the client."); //ArLog::log(ArLog::Normal, " String Size: \"%d\"", strSize); is_data_ready = 0; } mutex_video.unlock(); if(!clientSock.isOpen()) { if (serverSock.accept(&clientSock)) ArLog::log(ArLog::Normal, " Client has connected."); else ArLog::log(ArLog::Terse, " Error in accepting a connection from the client: %s.", serverSock.getErrorStr().c_str()); } } // Now lets close the connection to the client clientSock.close(); ArLog::log(ArLog::Normal, " Socket to client closed."); // And lets close the server port serverSock.close(); ArLog::log(ArLog::Normal, " Server socket closed."); // Uninitialize Aria and exit the program //Aria::exit(0); } //end of runThread
// Method called by accessor methods when properties changed. This reconstructs // the myReply packet sent in response to requests from clients void Circle::regenerate() { myMutex.lock(); myReply.empty(); myReply.byte4ToBuf(myNumPoints); double a = 360.0/myNumPoints; for(unsigned int i = 0; i < myNumPoints; ++i) { myReply.byte4ToBuf(ArMath::roundInt(myPos.getX()+ArMath::cos(i*a)*myRadius)); // X myReply.byte4ToBuf(ArMath::roundInt(myPos.getY()+ArMath::sin(i*a)*myRadius)); // Y } myMutex.unlock(); }
void PtzCameraExample::run() { enum { MinPan, MaxPan, Center1, MinTilt, MaxTilt, Center2} stage; stage = MinPan; while(myClient->isConnected()) { mutex.lock(); for(std::set<ArClientHandlerCamera*>::const_iterator i = myCameras.begin(); i != myCameras.end(); ++i) { ArClientHandlerCamera* c = (*i); c->lock(); switch(stage) { case MinPan: c->request.requestPanTiltAbs(c->minPan, 0); stage = MaxPan; break; case MaxPan: c->request.requestPanTiltAbs(c->maxPan, 0); stage = Center1; break; case Center1: c->request.requestPanTiltAbs(0, 0); stage = MinTilt; break; case MinTilt: c->request.requestPanTiltAbs(0, c->minTilt); stage = MaxTilt; break; case MaxTilt: c->request.requestPanTiltAbs(0, c->maxTilt); stage = Center2; break; case Center2: c->request.requestPanTiltAbs(0, 0); stage = MinPan; } c->unlock(); } mutex.unlock(); ArUtil::sleep(3000); } }
void* GlassesVideo::runThread(void*) { VideoCapture gl_capture(3); gl_capture.set(CV_CAP_PROP_FRAME_WIDTH , 640); gl_capture.set(CV_CAP_PROP_FRAME_HEIGHT, 480); if(!gl_capture.isOpened()) { cout << "Cannot open glasses video !" << endl; } Mat gl_img, gl_img_OR; Mat curMat, preMat; //glassesOR glOR(&gl_img_OR); //glOR.stopRunning(); ObjectRecognition gl_or("g20111105_4.yml.gz"); Mat gl_img_bk; Mat glres_image; //display result image int gl_result=255; RobotSearch robotsearch; //robotsearch.create(); robotsearch.stopRunning(); //namedWindow("Glasses Video"); //moveWindow("Glasses Video", 645, 0); namedWindow("Video Live"); moveWindow("Video Live", 645, 0); namedWindow("Glasses_result",CV_WINDOW_NORMAL); moveWindow("Glasses_result",1000,600); //G_glassesMode = glassesOR; while(1) { gl_capture >> gl_img; cvtColor(gl_img,gl_img_bk,CV_RGB2GRAY); imshow("Video Live",gl_img_bk); waitKey(1); //----------------------------glasses Motion ------------------------ preMat = gl_img.clone(); //imshow("preMat", preMat); gl_capture >> curMat; //imshow ("cur", curMat); modeSwitch(preMat, curMat); //------------------------------------------------------------------- if(G_glassesMode == glassesOR) //OR MODE { //Open Glasses Objct Recognition //glOR.runAsync(); gl_result=255; gl_result = gl_or.find(gl_img_bk, 'G'); //if(gl_result !=255) //{ // gl_capture >> gl_img; // cvtColor(gl_img,gl_img_bk,CV_RGB2GRAY); // imshow("Video Live",gl_img); // waitKey(1); // gl_result=255; // gl_result = gl_or.find(gl_img_bk, 'G'); // /*if(gl_result !=255) // { // gl_capture >> gl_img; // cvtColor(gl_img,gl_img_bk,CV_RGB2GRAY); // imshow("Video Live",gl_img); // waitKey(1); // gl_result=255; // gl_result = gl_or.find(gl_img_bk, 'G'); // } // else gl_result=255;*/ //} //gl_result=4; if(gl_result !=255) { //-------------------------Display the result ------------------------ robotSpeak(gl_result, "name"); stringstream ret_src1; //result src ObjectRecognition::loadImage(ret_src1, gl_result, 'G', 1); glres_image = imread(ret_src1.str()); imshow("Glasses_result", glres_image); waitKey(1); //--------------------glasses goes to roobt search mode------------------ GlassesModeMutex.lock(); CB.clear(); G_glassesMode = robotSearch; G_Search_Step = 0; isDoneRobot = true; G_Target= gl_result/5; gl_result = 255; HelpStartTime = time(NULL); //RobotCommand(CameraMotion); //cameraMotion GlassesModeMutex.unlock(); ////-------------------------Open robot search thread ------------------------ if(!robotsearch.getRunning()) robotsearch.runAsync(); } } } //return 0; }
void Circle::drawingServerCB(ArServerClient *client, ArNetPacket *pkt) { myMutex.lock(); client->sendPacketUdp(&myReply); myMutex.unlock(); }
void* runThread(void*) { size_t strSize; // The socket objects: one for accepting new client connections, // and another for communicating with a client after it connects. // Open the server socket if (serverSock.open(VIDEO_PORT, ArSocket::TCP)) ArLog::log(ArLog::Normal, " Opened the server port %d.", VIDEO_PORT); else ArLog::log(ArLog::Normal, " Failed to open the server port: %s.", serverSock.getErrorStr().c_str()); if (serverSock.accept(&clientSock)) ArLog::log(ArLog::Normal, " Client has connected."); else ArLog::log(ArLog::Terse, " Error in accepting a connection from the client: %s.", serverSock.getErrorStr().c_str()); //.setCloseCallback(new ArGlobalFunctor1<ArNetPacket*>(&clientCloseCallback)); //serverSock.setCloseCallback(new ArGlobalFunctor1<ArNetPacket*>(&clientCloseCallback)); while(1) { //int clinetSocketStatus = serverSock.accept(&clientSock); // MODIFIED By Yang, Try to accept new client. // //if (clinetSocketStatus > 0){ // char buf[1024] = {0}; // int recievedInt = clientSock.read(buf, 1024, 0); // cout << recievedInt << endl; // while(true) // { /* send the grayscaled frame, thread safe */ mutex_video.lock(); if (is_data_ready) { int grayImgSize=gray_img.rows*gray_img.cols; //cout <<" Sending image to the client.grayImgSize= " << grayImgSize; if ((strSize = clientSock.write(gray_img.data, grayImgSize))==grayImgSize ) ;//ArLog::log(ArLog::Normal, " Sent image to the client.grayImgSize= %d", grayImgSize); // else // ArLog::log(ArLog::Normal, " Error in sending hello string to the client."); //ArLog::log(ArLog::Normal, " String Size: \"%d\"", strSize); is_data_ready = 0; } mutex_video.unlock(); //if(!clientSock.isOpen()) //{ // clientSock.close(); // ArLog::log(ArLog::Normal, " Socket to client closed."); // if (serverSock.accept(&clientSock)) // ArLog::log(ArLog::Normal, " Client has connected."); // else // ArLog::log(ArLog::Terse, " Error in accepting a connection from the client: %s.", serverSock.getErrorStr().c_str()); //} } //} //} // Now lets close the connection to the client clientSock.close(); ArLog::log(ArLog::Normal, " Socket to client closed."); // And lets close the server port //serverSock.close(); //ArLog::log(ArLog::Normal, " Server socket closed."); // Uninitialize Aria and exit the program //Aria::exit(0); } //end of runThread
void lock() { myMutex.lock(); }
void* RobotVideo::runThread(void* arg) { cvNamedWindow("Robot", CV_WINDOW_AUTOSIZE); moveWindow("Robot", 0, 0); // The buffer in which to receive the hello from the server //unsigned char buff[12]; unsigned char imgBuff[480*640]; //Mat imgRecieve(480,640,CV_8UC1); // The size of the string the server sent size_t strSize; // The socket object ArSocket sock; unsigned char *ptr; // Initialize Aria. It is especially important to do // this on Windows, because it will initialize Window's // sockets system. // Aria::init(); // Connect to the server //ArLog::log(ArLog::Normal, "socketClientExample: Connecting to localhost TCP port 7777..."); if (sock.connect(server_ip, ROBOT_VIDEO_PORT, ArSocket::TCP)) ArLog::log(ArLog::Normal, "Connected to server at %s TCP port 11000.", server_ip); else { ArLog::log(ArLog::Terse, "Error connecting to server at %s TCP port 11000: %s",server_ip, sock.getErrorStr().c_str()); //return(-1); } //---------------------------VideoWriter--------------------------------------- robotVideo.open("robot.avi" , CV_FOURCC('M','J','P','G') /* CV_FOURCC('P','I','M','1') */, 20/*inputVideo.get(CV_CAP_PROP_FPS)*/,Size(640, 480), false); if (!robotVideo.isOpened()) { cout << "Could not open the output video for write: " /*<< source*/ << endl; } while(1) { ptr = &imgBuff[0]; strSize = 0; while (strSize < 640 * 480) { //ArLog::log(ArLog::Normal, "socketClientExample: String Size: \"%d\"", strSize); strSize += sock.read(ptr, 2); ptr += 2; } ptr=robot_img.datastart; //ArLog::log(ArLog::Normal, "socketClientExample: String Size: \"%d\"", strSize); mutex_robotVideo.lock(); for (int i = 0,k = 0; i < robot_img.rows*robot_img.cols; i++) { *(ptr++) = imgBuff[k++]; } imshow("Robot", robot_img); robotVideo << robot_img; mutex_robotVideo.unlock(); waitKey(1);/*0)==27) break;*/ } sock.close(); //outputVideo.release(); // Now close the connection to the server ArLog::log(ArLog::Normal, "Socket to server closed."); destroyWindow("Robot"); // Uninitialize Aria and exit //Aria::exit(0); return NULL; }