void testUnbufferedSubscriber() { report(0,"Unbuffereded Subscriber test"); Node n("/node"); BufferedPort<Bottle> pout; pout.setWriteOnly(); pout.open("very_interesting_topic"); { Node n2("/node2"); Subscriber<Bottle> pin("/very_interesting_topic"); waitForOutput(pout,10); Bottle& b = pout.prepare(); b.clear(); b.addInt(42); pout.write(); Bottle bin; bin.addInt(99); pin.read(bin); pout.waitForWrite(); checkEqual(bin.get(0).asInt(),42,"message is correct"); } }
virtual bool close() { if (action!=NULL) delete action; if (openPorts) { inPort.close(); rpcPort.close(); } return true; }
bool getPointTrack(double tableHeight) { // Put the input image at the moment of computing out printf("Propagating image!!\n"); ImageOf<PixelRgb> *imgIn = imInPort.read(); // read an image if(imgIn == NULL) { printf("No object tracked \n"); return false; } ImageOf<PixelRgb> &imOut = imOutPort.prepare(); imOut = *imgIn; imOutPort.write(); // Retrieves and relays the 2D coordinates of the object tracked by the tracker //Bottle &out = coordsOutPort.prepare(); printf("Getting 3D coords of tracked object!!\n"); Bottle *trackCoords = trackInPort.read(true); coords2D.resize(2,0); coords2D[0] = trackCoords->get(0).asInt(); coords2D[1] = trackCoords->get(1).asInt(); printf("Point in 2D read: %.2f, %.2f!!\n", coords2D(0), coords2D(1)); Vector table(4); // specify the plane in the root reference frame as ax+by+cz+d=0; z=-tableHeight in this case table[0] = 0.0; table[1] = 0.0; table[2] = 1.0; table[3] = -tableHeight; // d -> so the equation of the table plane is z=-h int camSel; if (camera != "left") { camSel = 1;} else { camSel = 0;} if(igaze->get3DPointOnPlane(camSel,coords2D, table, coords3D)){ printf("Point in 3D computed: %.2f, %.2f, %.2f!!\n", coords3D(0), coords3D(1), coords3D(2)); return true; } else { printf("3D point could not be computed\n"); return false; } }
bool updateModule(){ static int userLastStatus[MAX_USERS] = {-1}; for(int userID = 0; userID < MAX_USERS; userID++){ if(userLastStatus[userID] != _kinect->getSkeletonState(userID)){ userLastStatus[userID] = _kinect->getSkeletonState(userID); printf("USER %d HAS STATUS %d\n",userID,userLastStatus[userID]); } Matrix skeletonOrientation[TOTAL_JOINTS]; Vector skeletonPosition[TOTAL_JOINTS]; double confPosition[TOTAL_JOINTS], confOrientation[TOTAL_JOINTS]; if(!_kinect->getSkeletonOrientation(skeletonOrientation,confOrientation,userID)) continue; _kinect->getSkeletonPosition(skeletonPosition,confPosition,userID); _glWindow->setData(userID,userLastStatus[userID],skeletonOrientation, confPosition,skeletonPosition,confOrientation); _glWindow->redraw(); } ImageOf<PixelInt> img = _kinect->getDepthMap(); imgPort.prepare() = img; imgPort.write(); return true; };
void SamInit(void) { puts("in laser"); laser.setSerialPort("COM10"); puts("set serial \n Starting laser, this may take a minute"); bool TurnedOn = false; while(!TurnedOn) // sometimes the coms are busy, wait untill we get what we want { try{TurnedOn = laser.turnOn();} catch (...){} } puts("laser turned on"); RecognisePort("Out"); StartModule("/Laser"); myfirst.open("/Laser_Out"); //myPortStatus myfirst.setReporter(myPortStatus); }
bool getPointClick() { printf("Click on the object in the image!!\n"); //Bottle &out = coordsOutPort.prepare(); Bottle *obj2Dcoord = coordsInPort.read(true); printf("Point read!!\n"); coords2D.resize(2,0.0); coords2D[0] = obj2Dcoord->get(0).asInt(); coords2D[1] = obj2Dcoord->get(1).asInt(); printf("Point in 2D read: %.2f, %.2f!!\n", coords2D(0), coords2D(1)); return true; // get the projection }
bool configure(ResourceFinder &rf) { context=rf.check("context",Value("periPersonalSpace")).asString(); from=rf.check("from",Value("ppsAggregEventsForiCubGui.ini")).asString(); name=rf.check("name",Value("ppsAggregEventsForiCubGui")).asString(); verbosity = rf.check("verbosity",Value(0)).asInt(); autoconnect=rf.check("autoconnect",Value("off")).asString()=="on"?true:false; // on | off tactile=rf.check("tactile",Value("on")).asString()=="on"?true:false; // on | off pps=rf.check("pps",Value("on")).asString()=="on"?true:false; // on | off gain=rf.check("gain",Value(50.0)).asDouble(); yInfo("[ppsAggregEventsForiCubGui] Initial Parameters:"); yInfo("Context: %s \t From: %s \t Name: %s \t Verbosity: %d", context.c_str(),from.c_str(),name.c_str(),verbosity); yInfo("Autoconnect : %d \n tactile: %d \n pps: %d \n gain: %f \n", autoconnect,tactile,pps,gain); //open ports if(tactile) { aggregSkinEventsInPort.open("/"+name+"/skin_events_aggreg:i"); } if(pps) { aggregPPSeventsInPort.open("/"+name+"/pps_events_aggreg:i"); } aggregEventsForiCubGuiPort.open("/"+name+"/contacts:o"); if (autoconnect) { Network::connect("/skinEventsAggregator/skin_events_aggreg:o",("/"+name+"/skin_events_aggreg:i").c_str()); Network::connect("/visuoTactileRF/pps_events_aggreg:o",("/"+name+"/pps_events_aggreg:i").c_str()); Network::connect(("/"+name+"/contacts:o").c_str(),"/iCubGui/forces"); } return true; }
void Localizer::sendData( const yarp::sig::Vector &ms_particle) { BufferedPort<Bottle> dataOutPort; string outPortName=this->rf->find("outPortName").asString().c_str(); if(this->rf->find("outPortName").isNull()) outPortName=this->rf->check("outPortName",Value("output_port")).asString().c_str(); dataOutPort.open(("/"+outPortName+"/data:o").c_str()); Bottle &dataOut=dataOutPort.prepare(); dataOut.clear(); for(size_t i=0; i<6; i++) { dataOut.addDouble(ms_particle(i)); } dataOutPort.write(); dataOutPort.close(); };
virtual bool close() { portIn.close(); delete scales; cvReleaseImage(&infloat); cvReleaseImage(&ingray); cvReleaseImageHeader(&outfloat); cvReleaseImage(&outgray); for(int i = 0; i < levels; i++) portOut[i].close(); ss.FreeResources(); return true; };
// Function: mdlOutputs ======================================================= // Abstract: // In this function, you compute the outputs of your S-function // block. static void mdlOutputs(SimStruct *S, int_T tid) { BufferedPort<Vector> *port = static_cast<BufferedPort<Vector>*>(ssGetPWork(S)[0]); int_T blocking = ssGetIWork(S)[0]; int_T shouldReadTimestamp = ssGetIWork(S)[1]; int_T isAutoconnect = ssGetIWork(S)[2]; int timeStampPortIndex = 1; int connectionStatusPortIndex = 1; Vector *v = port->read(blocking); // Read from the port. Waits until data arrives. if (v) { if (shouldReadTimestamp) { connectionStatusPortIndex++; yarp::os::Stamp timestamp; port->getEnvelope(timestamp); real_T *pY1 = ssGetOutputPortRealSignal(S, timeStampPortIndex); pY1[0] = (real_T)(timestamp.getCount()); pY1[1] = (real_T)(timestamp.getTime()); } real_T *signal = ssGetOutputPortRealSignal(S, 0); int_T widthPort = ssGetOutputPortWidth(S, 0); for (int i = 0; i < std::min(widthPort, (int_T)v->size()); i++) { signal[i] = (*v)[i]; } if (!isAutoconnect) { unsigned char *statusPort = (unsigned char*)ssGetOutputPortSignal(S, connectionStatusPortIndex); statusPort[0] = 1; //somebody wrote in the port => the port is connected //TODO implement a sort of "timeout" paramter //At the current state this is equal to timeout = inf //Otherwise we can check the timeout and if nobody sent data in the last X secs //we set the port to zero again } } }
bool updateModule() { if (imageIn.getInputCount()>0) { cv::Mat orig = (IplImage *) imageIn.read(true)->getIplImage(); ImageOf<PixelMono> &outImg = imageOut.prepare(); cv::Mat blueOnly = blueFilter(orig); float sumx=0, sumy=0; float num_pixel = 0; for(int x=0; x<blueOnly.cols; x++) { for(int y=0; y<blueOnly.rows; y++) { int val = blueOnly.at<uchar>(y,x); if( val >= 50) { sumx += x; sumy += y; num_pixel++; } } } cv::Point p(sumx/num_pixel, sumy/num_pixel); //cout << cv::Mat(p) << endl; cv::Moments m = cv::moments(blueOnly, false); cv::Point p1(m.m10/m.m00, m.m01/m.m00); //cout << cv::Mat(p1) << endl; cv::circle(blueOnly, p, 5, cv::Scalar(0,0,0), -1); Bottle &target=targetPort.prepare(); target.clear(); if (p.x < 0 && p.y <0) { target.addDouble(0); target.addDouble(0); target.addInt(0); } else { target.addDouble(p.x); target.addDouble(p.y); target.addInt(1); } IplImage tmp = blueOnly; outImg.resize(tmp.width, tmp.height); cvCopyImage( &tmp, (IplImage *) outImg.getIplImage()); imageOut.write(); targetPort.write(); } return true; }
virtual bool threadInit() { // general part name=rf.check("name",Value("localizer")).asString().c_str(); setRate(rf.check("period",Value(20)).asInt()); string camera=rf.check("cameras",Value("left")).asString().c_str(); verbose=rf.check("verbose"); blend=rf.check("blend",Value(0.7)).asDouble(); matched_max_size=rf.check("filter",Value(1)).asInt(); locOutPort.open(("/"+name+"/loc:o").c_str()); locInPort.open(("/"+name+"/loc:i").c_str()); if(camera=="left"||camera=="both") { camCheck[LEFT]=true; imgInPort[LEFT].open(("/"+name+"/left/img:i").c_str()); imgOutPort[LEFT].open(("/"+name+"/left/img:o").c_str()); blobsPort[LEFT].open(("/"+name+"/left/blobs:i").c_str()); bkgSubPort[LEFT].open(("/"+name+"/left/bkg:i").c_str()); } if(camera=="right"||camera=="both") { camCheck[RIGHT]=true; imgInPort[RIGHT].open(("/"+name+"/right/img:i").c_str()); imgOutPort[RIGHT].open(("/"+name+"/right/img:o").c_str()); blobsPort[RIGHT].open(("/"+name+"/right/blobs:i").c_str()); bkgSubPort[RIGHT].open(("/"+name+"/right/bkg:i").c_str()); } track[LEFT]=track[RIGHT]=NULL; tbd_time[LEFT]=tbd_time[RIGHT]=0.0; tbd_list[LEFT].clear(); tbd_list[RIGHT].clear(); tbd_color_list[LEFT].clear(); tbd_color_list[RIGHT].clear(); blob_draw=false; current_draw_name="all"; return true; }
bool updateModule() { LockGuard guard(mutex); if (Vector *in=dataIn.read(false)) Controller_U_reference=(*in)[0]; ienc->getEncoder(joint,&Controller_U_plant_output); // Step the model double t0=Time::now(); // Step the model Controller_step(Controller_M, Controller_U_reference, Controller_U_compensator_state, Controller_U_plant_output, &Controller_Y_controller_output, &Controller_Y_controller_reference, &Controller_Y_plant_reference, &Controller_Y_error_statistics, &Controller_Y_enable_compensation); double t1=Time::now(); ivel->velocityMove(joint,Controller_Y_controller_output); Vector &out=dataOut.prepare(); out.resize(7); out[0]=Controller_U_reference; out[1]=Controller_Y_plant_reference; out[2]=Controller_U_plant_output; out[3]=Controller_Y_controller_reference; out[4]=Controller_Y_controller_output; out[5]=Controller_Y_error_statistics; out[6]=Controller_Y_enable_compensation; dataOut.write(); yInfo("time elapsed = %g [us]",(t1-t0)*1e6); return true; }
bool interruptModule() { if(meas_given!=1) { portIn.interrupt(); rpcOut.interrupt(); } rpcPort.close(); if(state==1) delete loc5; return true; }
virtual bool open(Searchable& config) { if (config.check("help","if present, display usage message")) { printf("Call with --name </portprefix> --file <configfile.ini>\n"); return false; } // Defaults will correspond to a view field of 90 deg. _input_lines = config.check("h", 480, "Input image lines").asInt(); _input_cols = config.check("w", 640, "Input image columns").asInt(); _fx = config.check("fx", 320, "Focal distance (on horizontal pixel size units)").asDouble(); _fy = config.check("fy", 240, "Focal distance (on vertical pixel size units)").asDouble(); _cx = config.check("cx", 320, "Image center (on horizontal pixel size units)").asDouble(); _cy = config.check("cy", 240, "Image center (on vertical pixel size units)").asDouble(); _k1 = config.check("k1", 0, "Radial distortion (first parameter)").asDouble(); _k2 = config.check("k2", 0, "Radial distortion (second parameter)").asDouble(); _p1 = config.check("p1", 0, "Tangential distortion (first parameter)").asDouble(); _p2 = config.check("p2", 0, "Tangential distortion (second parameter)").asDouble(); _output_lines = config.check("oh", 480, "Output image lines").asInt(); _output_cols = config.check("ow", 640, "Output image columns").asInt(); _mapx = cvCreateImage(cvSize(_output_cols, _output_lines), IPL_DEPTH_32F, 1); _mapy = cvCreateImage(cvSize(_output_cols, _output_lines), IPL_DEPTH_32F, 1); if(!compute_sp_map(_input_lines, _input_cols, _output_lines, _output_cols, _fx, _fy, _cx, _cy, _k1, _k2, _p1, _p2, (float*)_mapx->imageData, (float*)_mapy->imageData)) return false; portImgIn.open(getName("in")); portImgOut.open(getName("out")); return true; };
virtual bool close() { fprintf(stderr, "Closing module [%s]\n", partName); vc->stop(); vc->halt(); delete vc; fprintf(stderr, "Thead [%s] stopped\n", partName); driver.close(); input_port.close(); fprintf(stderr, "Module [%s] closed\n", partName); return true; }
void SamSendVoltage(void) { /* int iVolt=0; if(dVoltageNow<=12.4999)//require recharge iVolt=0; else if(dVoltageNow>=12.5000 && dVoltageNow<14.0000)//fine iVolt=1; else if (dVoltageNow>=14.0000)//fully charged iVolt=2; */ std::string s; // convert double b to string s std::ostringstream ss; ss << dVoltageNow; s = ss.str(); //if(iVoltage!=iVolt) //{ // send back a bottle with current voltage value Bottle& b2 = bRead.prepare();// prepare the bottle/port b2.clear(); //b2.addInt( iVolt );// indicates robot voltage b2.addString(s.c_str()); bRead.writeStrict(); //iVoltage=iVolt; //} std::cout << " voltage " << dVoltageNow << std::endl; }
virtual void threadRelease() { inPort.close(); outPort.close(); for(int i=0; i<huePorts.size(); i++) { huePorts[i]->close(); delete huePorts[i]; } huePorts.clear(); for(int i=0; i<satPorts.size(); i++) { satPorts[i]->close(); delete satPorts[i]; } satPorts.clear(); for(int i=0; i<intPorts.size(); i++) { intPorts[i]->close(); delete intPorts[i]; } huePorts.clear(); }
JNIEXPORT jboolean JNICALL Java_com_alecive_yarpdroid_yarpviewFragment_createBufferedMonoIPort (JNIEnv *env, jobject obj, jstring _applicationName) { __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "I'm creating the mono port"); if (putenv("YARP_CONF=/data/data/com.alecive.yarpdroid/files/yarpconf")) { __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Putenv failed %d", errno); } BufferedPort<Bottle> *MonoPortO = new BufferedPort<Bottle>; std::string portName=env->GetStringUTFChars(_applicationName, 0); portName = portName + "/iKinGazeCtrl/mono:o"; if(!MonoPortO->open(portName.c_str())) { __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Error in opening mono port!"); delete MonoPortO; MonoPortO = 0; return (jboolean)false; } setHandle(env, javaObj, MonoPortO, "monoLeftHandle"); return (jboolean)true; }
void SamIter(void) { //while (bTTS.getPendingReads() > 0) talk = bTTS.read(false); //if(talk!=NULL) //{ // puts("got a msg in rcv"); // puts(talk->toString()); //} //while (bEmo.getPendingReads() > 0) emotion = bEmo.read(false); // if(emotion!=NULL) // { // puts("got a emotion"); // puts(emotion->toString()); // } //while (bTrack.getPendingReads() > 0) track = bTrack.read(false); /*if(track!=NULL) { puts("got a msg"); puts(track->toString()); }*/ }
bool updateModule() { if (!running) return false; if (tracking) { Bottle *coords = coordsInPort.read(true); Bottle coordsSend; coordsSend.addInt(coords->get(0).asInt()); coordsSend.addInt(coords->get(1).asInt()); coordsOutPort.write(coordsSend); } return true; }
bool updateModule() { Vector buttons,pos,rpy; igeo->getButtons(buttons); igeo->getPosition(pos); igeo->getOrientation(rpy); bool b0=(buttons[0]!=0.0); bool b1=(buttons[1]!=0.0); reachingHandler(b0,pos,rpy); handHandler(b1); if (!b0 && !b1) { if (norm(feedback)>0.0) { igeo->stopFeedback(); feedback=0.0; } } else if (Bottle *bForce=forceFbPort.read(false)) { size_t sz=std::min(feedback.length(),(size_t)bForce->size()); for (size_t i=0; i<sz; i++) { feedback[i]=bForce->get(i).asDouble(); if ((feedback[i]>=-minForce) && (feedback[i]<=minForce)) feedback[i]=0.0; else if (feedback[i]<=-maxForce) feedback[i]=-maxForce; else if (feedback[i]>=maxForce) feedback[i]=maxForce; feedback[i]*=maxFeedback[i]/maxForce; } igeo->setFeedback(feedback); yInfo("feedback = (%s)",feedback.toString(3,3).c_str()); } yInfo("[reaching=%s; pose=%s;] [hand=%s; movement=%s;]", stateStr[s0].c_str(),onlyXYZ?"xyz":"full", stateStr[s1].c_str(),vels[0]>0.0?"closing":"opening"); return true; }
virtual void run() { // get inputs ImageOf<PixelFloat> *pSegIn=portImgIn->read(false); //if there is a seg image, use that if (pSegIn) { ImageOf<PixelRgb> *pImgIn=portRefIn->read(false); ImageOf<PixelRgb> &imgOut= portImgOut->prepare(); Mat * M = new Mat(pSegIn->height(), pSegIn->width(), CV_32F, (void *)pSegIn->getRawImage()); Mat X; vector<vector<Point> > contours; vector<Vec4i> hierarchy; M->convertTo(X,CV_8UC1); findContours(X, contours, hierarchy, CV_RETR_LIST, CV_CHAIN_APPROX_NONE); //pick a random segmented object int iBlob = -1; if (contours.size() > 0) { iBlob = rand() % contours.size(); } if (pImgIn) { imgOut.copy(*pImgIn); } else { imgOut.copy(*pSegIn); } //write the contour pixels of the object to port Matrix &object = portObjOut->prepare(); yarp::sig::Vector xs, ys; vector<Point> cts = contours[iBlob]; for (int i = 0; i < cts.size(); i++) { xs.push_back(cts.at(i).y); ys.push_back(cts.at(i).x); imgOut.pixel(cts.at(i).x,cts.at(i).y) = PixelRgb(255, 0, 0); } object.resize(xs.size(),2); object.setCol(0,xs); object.setCol(1,ys); portObjOut->write(); portImgOut->write(); delete M; } }
void run() { Vector *com_in0 = port_in0.read(true); Vector *com_in1 = port_in1.read(true); Vector *com_in2 = port_in2.read(true); Vector *com_in3 = port_in3.read(true); Vector *com_in4 = port_in4.read(true); Vector *com_in5 = port_in5.read(true); Vector *com_in6 = port_in6.read(true); Vector *com_in7 = port_in7.read(true); Vector *com_in8 = port_in8.read(true); /* //DEBUG ONLY printf ("0 CG: %s\n",com_out0.toString().c_str()); printf ("1 LL: %s\n",com_out1.toString().c_str()); printf ("2 LA: %s\n",com_out2.toString().c_str()); printf ("3 RL: %s\n",com_out3.toString().c_str()); printf ("4 RA: %s\n",com_out4.toString().c_str()); printf ("\n"); */ display_cog("COG", *com_in0,255,0,0); display_cog("LL_COM",*com_in1,0,255,0); display_cog("LA_COM",*com_in2,0,255,0); display_cog("RL_COM",*com_in3,0,255,0); display_cog("RA_COM",*com_in4,0,255,0); display_cog("HD_COM",*com_in5,0,255,0); display_cog("TO_COM",*com_in6,0,255,0); display_cog("UPP_COM",*com_in7,0,155,0); display_cog("LOW_COM",*com_in8,0,155,0); /* static double time_old_wd=Time::now(); double time_wd=Time::now(); fprintf(stdout,"time%+3.3f \n", time_wd-time_old_wd); time_old_wd=time_wd; */ }
virtual void threadRelease() { if (I) delete I; qdPort.interrupt(); voPort.interrupt(); qoPort.interrupt(); qdPort.close(); voPort.close(); qoPort.close(); }
void YarpAdapter::initializePorts(bool kinectActivated,bool oculusActivated,bool gloveActivated) { inPort[OCULUS_IN].open("/telecontroller/oculus/in"); inPort[KINECT_IN].open("/telecontroller/kinect/in"); inPort[GLOVE_IN].open("/telecontroller/glove/in"); outPort.open("/telecontroller/glove/vibration/out"); if(oculusActivated) { Network::connect("/oculusController/oculus_raw/out", "/telecontroller/oculus/in", "udp", true); } if(kinectActivated) { Network::connect("/kinectController/kinect_raw/out", "/telecontroller/kinect/in", "udp", true); } if(gloveActivated) { Network::connect("/hapticGloveController/glove_raw/out", "/telecontroller/glove/in", "udp", true); Network::connect("/telecontroller/glove/vibration/out", "/hapticGloveController/vibration/in", "udp", true); } }
void run() { Bottle *pTarget=port.read(false); if (pTarget!=NULL) { if (pTarget->size()>2) { if (pTarget->get(2).asInt()!=0) { Vector px(2); px[0]=pTarget->get(0).asDouble(); px[1]=pTarget->get(1).asDouble(); // track the moving target within // the camera image igaze->lookAtMonoPixel(0,px); // 0: left image plane is used, 1: for right } } } }
bool configure(ResourceFinder &rf) { agentName=rf.check("agent-name",Value("partner")).asString(); period=rf.check("period",Value(0.05)).asDouble(); if (!opc.connect("OPC")) { yError()<<"OPC seems unavailabe!"; return false; } dumpPort.open("/actionRecogDataDumper/data/dump:o"); rpcPort.open("/actionRecogDataDumper/rpc"); attach(rpcPort); actionTag="none"; objectTag="none"; gate=0; return true; }
virtual bool read(ConnectionReader& connection) { ImageOf<PixelRgb> img; img.read(connection); Bottle *fixIn = coordPort.read(false); if (fixIn != NULL) { x=fixIn->get(0).asInt(); y=fixIn->get(1).asInt(); fprintf(stdout, "\nWill now draw line at x= %d and y=%d \n", x, y); draw = true; } if (draw) draw::addSegment(img, PixelRgb(255,0,0), 0, y, img.width(), y); imageOutPort.write(img); return true; }
bool updateModule() { Bottle *inputBottleTactileLeft = gtw_tactileLeftInputPort.read(); Bottle *inputBottleTactileRight = gtw_tactileRightInputPort.read(); Bottle &outputBottleTactileLeft = gtw_tactileLeftOutputPort.prepare(); Bottle &outputBottleTactileRight = gtw_tactileRightOutputPort.prepare(); outputBottleTactileLeft = *inputBottleTactileLeft; outputBottleTactileRight = *inputBottleTactileRight; gtw_tactileLeftOutputPort.write(); gtw_tactileRightOutputPort.write(); return true; }