bool DevicePipe::updateService() { IFrameGrabberImage *imgSource; IAudioGrabberSound *sndSource; IAudioVisualGrabber *imgSndSource; IAudioVisualStream *sourceType; IAudioRender *sndSink; IFrameWriterImage *imgSink; IFrameWriterAudioVisual *imgSndSink; IAudioVisualStream *sinkType; source.view(imgSource); source.view(sndSource); source.view(imgSndSource); source.view(sourceType); sink.view(imgSink); sink.view(sndSink); sink.view(imgSndSink); sink.view(sinkType); if (sourceType!=NULL) { if (!(sourceType->hasAudio()&&sourceType->hasVideo())) { imgSndSource = NULL; } } if (sinkType!=NULL) { if (!(sinkType->hasAudio()&&sinkType->hasVideo())) { imgSndSink = NULL; } } if (imgSndSource!=NULL&&imgSndSink!=NULL) { ImageOf<PixelRgb> tmp; Sound tmpSound; imgSndSource->getAudioVisual(tmp,tmpSound); imgSndSink->putAudioVisual(tmp,tmpSound); printf("piped %dx%d image, %dx%d sound\n", tmp.width(), tmp.height(), tmpSound.getSamples(), tmpSound.getChannels()); } else if (imgSource!=NULL&&imgSink!=NULL) { ImageOf<PixelRgb> tmp; imgSource->getImage(tmp); imgSink->putImage(tmp); printf("piped %dx%d image\n", tmp.width(), tmp.height()); } else if (sndSource!=NULL&&sndSink!=NULL) { Sound tmp; sndSource->getSound(tmp); sndSink->renderSound(tmp); printf("piped %dx%d sound\n", tmp.getSamples(), tmp.getChannels()); } else { printf("Don't know how to pipe between these devices.\n"); printf("Piping is very limited at the moment.\n"); printf("You're probably better off writing some short custom code.\n"); return false; } return true; }
bool ServerFrameGrabber::open(yarp::os::Searchable& config) { if (active) { printf("Did you just try to open the same ServerFrameGrabber twice?\n"); return false; } // for AV, control whether output goes on a single port or multiple bool separatePorts = false; p.setReader(*this); yarp::os::Value *name; if (config.check("subdevice",name,"name (or nested configuration) of device to wrap")) { if (name->isString()) { // maybe user isn't doing nested configuration yarp::os::Property p; p.setMonitor(config.getMonitor(), name->toString().c_str()); // pass on any monitoring p.fromString(config.toString()); p.put("device",name->toString()); p.unput("subdevice"); poly.open(p); } else { Bottle subdevice = config.findGroup("subdevice").tail(); poly.open(subdevice); } if (!poly.isValid()) { //printf("cannot make <%s>\n", name->toString().c_str()); return false; } } else { printf("\"--subdevice <name>\" not set for server_framegrabber\n"); return false; } if (poly.isValid()) { IAudioVisualStream *str; poly.view(str); bool a = true; bool v = true; bool vraw = true; if (str!=NULL) { a = str->hasAudio(); v = str->hasVideo(); vraw = str->hasRawVideo(); } if (v) { poly.view(fgImage); } if (vraw) { poly.view(fgImageRaw); } if (a) { poly.view(fgSound); } if (a&&v) { poly.view(fgAv); } poly.view(fgCtrl); poly.view(fgTimed); } canDrop = !config.check("no_drop","if present, use strict policy for sending data"); addStamp = config.check("stamp","if present, add timestamps to data"); p.promiseType(Type::byName("yarp/image")); // TODO: reflect audio options p.setWriteOnly(); p.open(config.check("name",Value("/grabber"), "name of port to send data on").asString()); /* double framerate=0; if (config.check("framerate", name, "maximum rate in Hz to read from subdevice")) { framerate=name->asDouble(); } */ if (fgAv&& !config.check("shared-ports", "If present, send audio and images on same port")) { separatePorts = true; yAssert(p2==NULL); p2 = new Port; yAssert(p2!=NULL); p2->open(config.check("name2",Value("/grabber2"), "Name of second port to send data on, when audio and images sent separately").asString()); } if (fgAv!=NULL) { if (separatePorts) { yAssert(p2!=NULL); thread.attach(new DataWriter2<yarp::sig::ImageOf<yarp::sig::PixelRgb>, yarp::sig::Sound>(p,*p2,*this,canDrop,addStamp)); } else { thread.attach(new DataWriter<ImageRgbSound>(p,*this,canDrop, addStamp)); } } else if (fgImage!=NULL) { thread.attach(new DataWriter<yarp::sig::ImageOf<yarp::sig::PixelRgb> >(p,*this,canDrop,addStamp,fgTimed)); } else if (fgImageRaw!=NULL) { thread.attach(new DataWriter<yarp::sig::ImageOf<yarp::sig::PixelMono> >(p,*this,canDrop,addStamp,fgTimed)); } else if (fgSound!=NULL) { thread.attach(new DataWriter<yarp::sig::Sound>(p,*this,canDrop)); } else { printf("subdevice <%s> doesn't look like a framegrabber\n", name->toString().c_str()); return false; } singleThreaded = config.check("single_threaded", "if present, operate in single threaded mode")!=0; thread.open(config.check("framerate",Value("0"), "maximum rate in Hz to read from subdevice").asDouble(), singleThreaded); active = true; /* #define VOCAB_BRIGHTNESS VOCAB3('b','r','i') #define VOCAB_EXPOSURE VOCAB4('e','x','p','o') #define VOCAB_SHARPNESS VOCAB4('s','h','a','r') #define VOCAB_WHITE VOCAB4('w','h','i','t') #define VOCAB_HUE VOCAB3('h','u','e') #define VOCAB_SATURATION VOCAB4('s','a','t','u') #define VOCAB_GAMMA VOCAB4('g','a','m','m') #define VOCAB_SHUTTER VOCAB4('s','h','u','t') #define VOCAB_GAIN VOCAB4('g','a','i','n') #define VOCAB_IRIS VOCAB4('i','r','i','s') */ DeviceResponder::makeUsage(); addUsage("[set] [bri] $fBrightness", "set brightness"); addUsage("[set] [expo] $fExposure", "set exposure"); addUsage("[set] [shar] $fSharpness", "set sharpness"); addUsage("[set] [whit] $fBlue $fRed", "set white balance"); addUsage("[set] [hue] $fHue", "set hue"); addUsage("[set] [satu] $fSaturation", "set saturation"); addUsage("[set] [gamm] $fGamma", "set gamma"); addUsage("[set] [shut] $fShutter", "set shutter"); addUsage("[set] [gain] $fGain", "set gain"); addUsage("[set] [iris] $fIris", "set iris"); addUsage("[get] [bri]", "get brightness"); addUsage("[get] [expo]", "get exposure"); addUsage("[get] [shar]", "get sharpness"); addUsage("[get] [whit]", "get white balance"); addUsage("[get] [hue]", "get hue"); addUsage("[get] [satu]", "get saturation"); addUsage("[get] [gamm]", "get gamma"); addUsage("[get] [shut]", "get shutter"); addUsage("[get] [gain]", "get gain"); addUsage("[get] [iris]", "get iris"); addUsage("[get] [w]", "get width of image"); addUsage("[get] [h]", "get height of image"); return true; }