//------------------------------------------------------------------------- // MAIN //------------------------------------------------------------------------- int main(int argc, char *argv[]) { char appname[256]; char rcfilename[256]; char* home = getenv("HOME"); snprintf(appname, 255, "%s", basename(argv[0])); snprintf(rcfilename, 255, "%s/.%src", home, appname); GUI* interface = new GTKUI (appname, &argc, &argv); FUI* finterface = new FUI(); DSP.buildUserInterface(interface); DSP.buildUserInterface(finterface); DSP.buildUserInterface(new PrintUI()); #ifdef HTTPCTRL httpdUI* httpdinterface = new httpdUI(appname, argc, argv); DSP.buildUserInterface(httpdinterface); std::cout << "HTTPD is on" << std::endl; #endif #ifdef OSCCTRL GUI* oscinterface = new OSCUI(appname, argc, argv); DSP.buildUserInterface(oscinterface); #endif jackaudio audio; audio.init(appname, &DSP); finterface->recallState(rcfilename); audio.start(); #ifdef HTTPCTRL httpdinterface->run(); #endif #ifdef OSCCTRL oscinterface->run(); #endif interface->run(); audio.stop(); finterface->saveState(rcfilename); // desallocation delete interface; delete finterface; #ifdef HTTPCTRL delete httpdinterface; #endif #ifdef OSCCTRL delete oscinterface; #endif return 0; }
/****************************************************************************** ******************************************************************************* MAIN PLAY THREAD ******************************************************************************* *******************************************************************************/ int main( int argc, char *argv[] ) { char name[256], dst[258]; char rcfilename[256]; char* home = getenv("HOME"); snprintf(name, 255, "%s", basename(argv[0])); snprintf(dst, 257, "/%s/", name); snprintf(rcfilename, 255, "%s/.%src", home, name); QApplication myApp(argc, argv); QTGUI* interface = new QTGUI(); FUI* finterface = new FUI(); DSP.buildUserInterface(interface); DSP.buildUserInterface(finterface); oscdsp osca (dst, argc, argv); OSCUI* oscinterface = new OSCUI(name, argc, argv, &osca); DSP.buildUserInterface(oscinterface); snprintf(dst, 257, "/%s/", oscinterface->getRootName()); osca.setDest (dst); osca.init (name, &DSP); finterface->recallState(rcfilename); osca.start (); oscinterface->run(); interface->run(); myApp.setStyleSheet(interface->styleSheet()); myApp.exec(); interface->stop(); osca.stop(); finterface->saveState(rcfilename); // desallocation delete interface; delete finterface; delete oscinterface; return 0; }
//------------------------------------------------------------------------- // MAIN //------------------------------------------------------------------------- int main(int argc, char *argv[] ) { char appname[256]; char rcfilename[256]; char* home = getenv("HOME"); snprintf(appname, 255, "%s", basename(argv[0])); snprintf(rcfilename, 255, "%s/.%src", home, appname); CMDUI* interface = new CMDUI(argc, argv); FUI* finterface = new FUI(); DSP.buildUserInterface(interface); DSP.buildUserInterface(finterface); #ifdef OSCCTRL GUI* oscinterface = new OSCUI(appname, argc, argv); DSP.buildUserInterface(oscinterface); #endif #ifdef HTTPCTRL httpdUI* httpdinterface = new httpdUI(appname, argc, argv); DSP.buildUserInterface(httpdinterface); #endif jackaudio audio; audio.init(appname, &DSP); interface->process_command(); audio.start(); #ifdef HTTPCTRL httpdinterface->run(); #endif #ifdef OSCCTRL oscinterface->run(); #endif interface->run(); audio.stop(); finterface->saveState(rcfilename); return 0; }
/* * init(samplingRate, bufferFrames) * Initializes the Audio engine and the DSP code * with samplingRate and bufferFrames. * This method also looks for the [style:poly] * metadata in the Faust code and initializes a * polyphonic object or not based on that. init * should be called before start. */ bool init(int samplingRate, int bufferSize) { DSP.init(samplingRate); inChanNumb = DSP.getNumInputs(); outChanNumb = DSP.getNumOutputs(); // configuring the UI DSP.buildUserInterface(&mapUI); DSP.buildUserInterface(&json); jsonString = json.JSON(); if (jsonString.find("keyboard") != std::string::npos || jsonString.find("poly") != std::string::npos){ polyMax = 4; DSPpoly = new mydsp_poly(polyMax, true); DSPpoly->init(samplingRate); } else { polyMax = 0; } return (fAudioDevice.Open(((polyMax > 0) ? DSPpoly : &DSP), inChanNumb, outChanNumb, bufferSize, samplingRate) == 0); }
/* * init(samplingRate, bufferFrames) * Initializes the Audio engine and the DSP code * with samplingRate and bufferFrames. * This method also looks for the [style:poly] * metadata in the Faust code and initializes a * polyphonic object or not based on that. init * should be called before start. */ void init(int samplingRate, int bufferFrames) { // configuring global variables SR = samplingRate; bufferSize = bufferFrames; vecSamps = bufferSize; DSP.init(SR); inChanNumb = DSP.getNumInputs(); outChanNumb = DSP.getNumOutputs(); // configuring the UI DSP.buildUserInterface(&mapUI); DSP.buildUserInterface(&json); jsonString = json.JSON(); if(jsonString.find("keyboard") != std::string::npos || jsonString.find("poly") != std::string::npos){ polyMax = 4; polyCoef = 1.0f / polyMax; DSPpoly = new mydsp_poly(SR, bufferSize, polyMax); } else{ polyMax = 0; } // allocating memory for output channel bufferout = new float *[outChanNumb]; for (int i = 0; i < outChanNumb; i++) { bufferout[i] = new float[vecSamps]; } // allocating memory for input channel if (inChanNumb >= 1) { bufferin = new float *[inChanNumb]; for (int i = 0; i < inChanNumb; i++) { bufferin[i] = new float[vecSamps]; } } }
int main(int argc, char *argv[]) { char appname[256]; char rcfilename[256]; char* home = getenv("HOME"); snprintf(appname, 255, "%s", basename(argv[0])); snprintf(rcfilename, 255, "%s/.%src", home, appname); GUI* interface = new QTGUI(argc, argv); FUI* finterface = new FUI(); DSP.buildUserInterface(interface); DSP.buildUserInterface(finterface); #ifdef OSCCTRL GUI* oscinterface = new OSCUI(appname, argc, argv); DSP.buildUserInterface(oscinterface); #endif long srate = (long)lopt(argv, "--frequency", 44100); int fpb = lopt(argv, "--buffer", 128); portaudio audio (srate, fpb); audio.init(appname, &DSP); finterface->recallState(rcfilename); audio.start(); #ifdef OSCCTRL oscinterface->run(); #endif interface->run(); audio.stop(); finterface->saveState(rcfilename); return 0; }
/* * processDSP(threadID) * Compute the DSP frames of the Faust object. */ void *processDSP(void *threadID) { while (on) { // getting input signal if (inChanNumb >= 1) android_AudioIn(p, bufferin[0], vecSamps); // computing... if (polyMax == 0) DSP.compute(vecSamps, bufferin, bufferout); else DSPpoly->compute(vecSamps, bufferin, bufferout); // sending output signal android_AudioOut(p, bufferout, vecSamps); } }
int main(int argc, char *argv[] ) { float fnbsamples; CMDUI* interface = new CMDUI(argc, argv); DSP.buildUserInterface(interface); interface->addOption("-n", &fnbsamples, 16, 0.0, 100000000.0); if (DSP.getNumInputs() > 0) { fprintf(stderr, "no inputs allowed\n"); exit(1); } // init signal processor and the user interface values DSP.init(44100); // modify the UI values according to the command line options interface->process_command(); int nouts = DSP.getNumOutputs(); channels chan (kFrames, nouts); int nbsamples = int(fnbsamples); while (nbsamples > kFrames) { DSP.compute(kFrames, 0, chan.buffers()); for (int i = 0; i < kFrames; i++) { for (int c = 0; c < nouts; c++) { printf("%8f\t", chan.buffers()[c][i]); } cout << endl; } nbsamples -= kFrames; } DSP.compute(nbsamples, 0, chan.buffers()); for (int i = 0; i < nbsamples; i++) { for (int c = 0; c < nouts; c++) { printf("%8f\t", chan.buffers()[c][i]); } cout << endl; } return 0; }
int main(int argc, char *argv[]) { SNDFILE* in_sf; SNDFILE* out_sf; SF_INFO in_info; SF_INFO out_info; unsigned int nAppend = 0; // number of frames to append beyond input file if (argc < 3) { fprintf(stderr,"*** USAGE: %s input_soundfile output_soundfile\n",argv[0]); exit(1); } nAppend = loptrm(&argc, argv, "--continue", "-c", 0); CMDUI* interface = new CMDUI(argc, argv); DSP.buildUserInterface(interface); interface->process_command(); // open input file in_info.format = 0; in_sf = sf_open(interface->input_file(), SFM_READ, &in_info); if (in_sf == NULL) { fprintf(stderr,"*** Input file not found.\n"); sf_perror(in_sf); exit(1); } // open output file out_info = in_info; out_info.format = in_info.format; out_info.channels = DSP.getNumOutputs(); out_sf = sf_open(interface->output_file(), SFM_WRITE, &out_info); if (out_sf == NULL) { fprintf(stderr,"*** Cannot write output file.\n"); sf_perror(out_sf); exit(1); } // create separator and interleaver Separator sep(kFrames, in_info.channels, DSP.getNumInputs()); Interleaver ilv(kFrames, DSP.getNumOutputs()); // init signal processor DSP.init(in_info.samplerate); //DSP.buildUserInterface(interface); interface->process_init(); // process all samples int nbf; do { nbf = READ_SAMPLE(in_sf, sep.input(), kFrames); sep.separate(); DSP.compute(nbf, sep.outputs(), ilv.inputs()); ilv.interleave(); sf_writef_float(out_sf, ilv.output(), nbf); //sf_write_raw(out_sf, ilv.output(), nbf); } while (nbf == kFrames); sf_close(in_sf); // compute tail, if any if (nAppend>0) { FAUSTFLOAT *input = (FAUSTFLOAT*) calloc(nAppend * DSP.getNumInputs(), sizeof(FAUSTFLOAT)); FAUSTFLOAT *inputs[1] = { input }; Interleaver ailv(nAppend, DSP.getNumOutputs()); DSP.compute(nAppend, inputs, ailv.inputs()); ailv.interleave(); sf_writef_float(out_sf, ailv.output(), nAppend); } sf_close(out_sf); }
dsp_faust() : json(DSP.getNumInputs(), DSP.getNumOutputs()),DSPpoly(0),on(false) {}
//------------------------------------------------------------------------- // MAIN //------------------------------------------------------------------------- int main(int argc, char *argv[]) { char appname[256]; char rcfilename[256]; char* home = getenv("HOME"); int celt = lopt(argv, "--celt", -1); const char* master_ip = lopts(argv, "--a", DEFAULT_MULTICAST_IP); int master_port = lopt(argv, "--p", DEFAULT_PORT); int mtu = lopt(argv, "--m", DEFAULT_MTU); int latency = lopt(argv, "--l", 2); snprintf(appname, 255, "%s", basename(argv[0])); snprintf(rcfilename, 255, "%s/.%src", home, appname); CMDUI* interface = new CMDUI(argc, argv); FUI* finterface = new FUI(); DSP.buildUserInterface(interface); DSP.buildUserInterface(finterface); #ifdef OSCCTRL GUI* oscinterface = new OSCUI(appname, argc, argv); DSP.buildUserInterface(oscinterface); #endif #ifdef HTTPCTRL httpdUI* httpdinterface = new httpdUI(appname, argc, argv); DSP.buildUserInterface(httpdinterface); #endif netjackaudio audio(celt, master_ip, master_port, mtu, latency); if (!audio.init(appname, &DSP)) { return 0; } finterface->recallState(rcfilename); if (!audio.start()) { return 0; } #ifdef HTTPCTRL httpdinterface->run(); #endif #ifdef OSCCTRL oscinterface->run(); #endif interface->run(); audio.stop(); finterface->saveState(rcfilename); // desallocation delete interface; delete finterface; #ifdef HTTPCTRL delete httpdinterface; #endif #ifdef OSCCTRL delete oscinterface; #endif return 0; }
//************************************************************** // Native Faust API //************************************************************** #include <android/log.h> #include "dsp_faust.h" #include <stdio.h> #include <string.h> #define FAUSTFLOAT float using namespace std; OPENSL_STREAM *p; // the audio engine mydsp DSP; // the monophonic Faust object mydsp_poly *DSPpoly; // the polyphonic Faust object MapUI mapUI; // the UI description pthread_t audioThread; // native thread for audio JSONUI json(DSP.getNumInputs(), DSP.getNumOutputs()); string jsonString; // Global variables int SR, bufferSize, vecSamps, polyMax, inChanNumb, outChanNumb, on; float **bufferout, **bufferin, polyCoef; /* * init(samplingRate, bufferFrames) * Initializes the Audio engine and the DSP code * with samplingRate and bufferFrames. * This method also looks for the [style:poly]
int main(int argc, char *argv[]) { char name[256]; char rcfilename[256]; char* home = getenv("HOME"); snprintf(name, 255, "%s", basename(argv[0])); snprintf(rcfilename, 255, "%s/.%src", home, basename(argv[0])); long srate = (long)lopt(argv, "--frequency", -1); int fpb = lopt(argv, "--buffer", 512); QApplication myApp(argc, argv); QTGUI* interface = new QTGUI(); DSP.buildUserInterface(interface); FUI* finterface = new FUI(); DSP.buildUserInterface(finterface); #ifdef HTTPCTRL httpdUI* httpdinterface = new httpdUI(name, argc, argv); DSP.buildUserInterface(httpdinterface); #endif #ifdef OSCCTRL GUI* oscinterface = new OSCUI(name, argc, argv); DSP.buildUserInterface(oscinterface); #endif coreaudio audio(srate, fpb); audio.init(name, &DSP); finterface->recallState(rcfilename); audio.start(); #ifdef HTTPCTRL httpdinterface->run(); #ifdef QRCODECTRL interface->displayQRCode( httpdinterface->getTCPPort() ); #endif #endif #ifdef OSCCTRL oscinterface->run(); #endif interface->run(); myApp.setStyleSheet(STYLESHEET); myApp.exec(); interface->stop(); audio.stop(); finterface->saveState(rcfilename); // desallocation delete interface; delete finterface; #ifdef HTTPCTRL delete httpdinterface; #endif #ifdef OSCCTRL delete oscinterface; #endif return 0; }