int main(int argc, char *argv[]) { // Open the network Network yarp; BufferedPort<Sound> p; p.open("/receiver"); Network::connect("/sender", "/receiver"); // Get an audio write device. Property conf; conf.put("device","portaudio"); conf.put("samples", "4096"); conf.put("write", "1"); PolyDriver poly(conf); IAudioRender *put; // Check it can write the sound poly.view(put); if (put==NULL) { printf("cannot open interface\n"); return 1; } //Receive and render the sound Sound *s; while (true) { s = p.read(false); if (s!=NULL) put->renderSound(*s); } return 0; }
bool DevicePipe::updateService() { IFrameGrabberImage *imgSource; IAudioGrabberSound *sndSource; IAudioVisualGrabber *imgSndSource; IAudioVisualStream *sourceType; IAudioRender *sndSink; IFrameWriterImage *imgSink; IFrameWriterAudioVisual *imgSndSink; IAudioVisualStream *sinkType; source.view(imgSource); source.view(sndSource); source.view(imgSndSource); source.view(sourceType); sink.view(imgSink); sink.view(sndSink); sink.view(imgSndSink); sink.view(sinkType); if (sourceType!=NULL) { if (!(sourceType->hasAudio()&&sourceType->hasVideo())) { imgSndSource = NULL; } } if (sinkType!=NULL) { if (!(sinkType->hasAudio()&&sinkType->hasVideo())) { imgSndSink = NULL; } } if (imgSndSource!=NULL&&imgSndSink!=NULL) { ImageOf<PixelRgb> tmp; Sound tmpSound; imgSndSource->getAudioVisual(tmp,tmpSound); imgSndSink->putAudioVisual(tmp,tmpSound); printf("piped %dx%d image, %dx%d sound\n", tmp.width(), tmp.height(), tmpSound.getSamples(), tmpSound.getChannels()); } else if (imgSource!=NULL&&imgSink!=NULL) { ImageOf<PixelRgb> tmp; imgSource->getImage(tmp); imgSink->putImage(tmp); printf("piped %dx%d image\n", tmp.width(), tmp.height()); } else if (sndSource!=NULL&&sndSink!=NULL) { Sound tmp; sndSource->getSound(tmp); sndSink->renderSound(tmp); printf("piped %dx%d sound\n", tmp.getSamples(), tmp.getChannels()); } else { printf("Don't know how to pipe between these devices.\n"); printf("Piping is very limited at the moment.\n"); printf("You're probably better off writing some short custom code.\n"); return false; } return true; }
void onRead(Sound& sound) { int ct = port.getPendingReads(); //printf("pending reads %d\n", ct); while (ct>padding) { ct = port.getPendingReads(); printf("Dropping sound packet -- %d packet(s) behind\n", ct); port.read(); } mutex.wait(); /* if (muted) { for (int i=0; i<sound.getChannels(); i++) { for (int j=0; j<sound.getSamples(); j++) { sound.put(0,j,i); } } } */ if (!muted) { if (put!=NULL) { put->renderSound(sound); } } if (saving) { saveFrame(sound); } mutex.post(); Time::yield(); }
void onRead(Sound& sound) { #ifdef TEST //this block can be used to measure time elapsed between two sound packets static double t1= yarp::os::Time::now(); static double t2= yarp::os::Time::now(); t1= yarp::os::Time::now(); printf("onread %f\n", t2-t1); t2 = yarp::os::Time::now(); #endif int ct = port.getPendingReads(); //printf("pending reads %d\n", ct); while (ct>padding) { ct = port.getPendingReads(); printf("Dropping sound packet -- %d packet(s) behind\n", ct); port.read(); } mutex.wait(); /* if (muted) { for (int i=0; i<sound.getChannels(); i++) { for (int j=0; j<sound.getSamples(); j++) { sound.put(0,j,i); } } } */ if (!muted) { if (put!=NULL) { put->renderSound(sound); } } if (saving) { saveFrame(sound); } mutex.post(); Time::yield(); }
int main(int argc, char *argv[]) { // Open the network Network yarp; BufferedPort<Sound> pReceiver; pReceiver.open("/receiver"); //Network::connect("/sender", "/receiver"); Port pSender; pSender.open("/sender"); // Get an audio write device. Property conf; conf.put("device","portaudio"); conf.put("samples", "4096"); conf.put("write", "1"); PolyDriver polyRender(conf); if(!polyRender.isValid()) { printf("cannot open interface \n"); return 1; } IAudioRender *put; // Get a portaudio read device. //Property conf; conf.put("device","portaudio"); conf.put("read", ""); //conf.put("samples", 4096); //conf.put("rate", 16000); PolyDriver polyGrabber(conf); if(!polyRender.isValid()) { printf("cannot open interface \n"); return 1; } IAudioGrabberSound *get; /* // Make sure we can write sound polyRender.view(put); if (put==NULL) { printf("cannot open interface\n"); return 1; } //Receive and render Sound *s; while (true) { s = p.read(false); if (s!=NULL) put->renderSound(*s); } return 0; */ /* // Make sure we can read sound polyGrabber.view(get); if (get==NULL) { printf("cannot open interface\n"); return 1; } //Grab and send Sound s; while (true) { get->getSound(s); p.write(s); } return 0; */ // echo from microphone to headphones, superimposing an annoying tone double vv=0; while(true){ Sound s; get->getSound(s); for (int i=0; i<s.getSamples(); i++) { double now = Time::now(); static double first = now; now -= first; if ((long int) (now*2) % 2 == 0) { vv += 0.08; } else { vv += 0.04; } double dv = 500*sin(vv); for (int j=0; j<s.getChannels(); j++) { int v =s.get(i,j); s.set((int)(v+dv+0.5),i,j); } } put->renderSound(s); } Network::fini(); }