static void portMidi_listDevices(CSOUND *csound, int output) { int i,n = listDevices(csound, NULL, output); CS_MIDIDEVICE *devs = (CS_MIDIDEVICE *) csound->Malloc(csound, n*sizeof(CS_MIDIDEVICE)); listDevices(csound, devs, output); for(i=0; i < n; i++) csound->Message(csound, "%s: %s (%s)\n", devs[i].device_id, devs[i].device_name, devs[i].midi_module); csound->Free(csound, devs); }
int set_bootloader_to_winusb(int verbose) { int opt_silent = verbose; struct wdi_device_info *device, list; char* inf_name = NULL; int r; struct wdi_options_prepare_driver driver_opts = {0}; driver_opts.driver_type = WDI_WINUSB; driver_opts.vendor_name = NULL; driver_opts.device_guid = NULL; driver_opts.disable_cat = FALSE; driver_opts.disable_signing = FALSE; driver_opts.use_wcid_driver = FALSE; list = listDevices(verbose); if (&list != NULL) { oprintf("set_bootloader_to_winusb - valid list\n"); for (device = &list; device != NULL; device = device->next) { if ((device->vid == BOOTLOADER_VID) && (device->pid == BOOTLOADER_PID)) { oprintf("Installing using inf name: %s\n", INF_NAME); if ((wdi_prepare_driver(device, DEFAULT_DIR, INF_NAME, &driver_opts) == WDI_SUCCESS) & (device->driver != "WINUSB")) { oprintf("Prepared driver for install\n"); r=wdi_install_driver(device, DEFAULT_DIR, INF_NAME, NULL); oprintf("got return code: %d=%s \n", r,wdi_strerror(r)); } } } } return r; }
//------------------------------------------------------------------------------ ofxPortaudioSoundStream::ofxPortaudioSoundStream(){ OFSAptr = NULL; audio = NULL; nInputChannels = 0; nOutputChannels = 0; deviceID = 0; deviceName = ""; deviceID = -1; soundOutputPtr = NULL; soundInputPtr = NULL; tickCount= 0; streamInitialised = false; if(!streamInitialised){ PaError err; err = Pa_Initialize(); if( err != paNoError ){ ofLog(OF_LOG_ERROR,"PortAudio error: %s\n",Pa_GetErrorText( err )); }else{ streamInitialised = true; cout << "PortAudio launched in OF" << endl; } } listDevices(); printDeviceListInformation(); printf("sound stream setup finished\n"); err = Pa_Initialize(); }
bool ImageReader::initReader() { bool ret; ret = listDevices(_devices); if (_devices.size()) { openCamera(_devices.at(0)); } return ret; }
bool ttSerialDevice::setup( int device ) { map< string, string > devices = listDevices(); if ( device >= 0 && device < devices.size() ) { map< string, string >::iterator it = devices.begin(); for ( int i = 0; i < device; i++ ) it++; return setup( it->second ); } return false; }
void Arduino::setup() { listDevices(); //connected = ofSerial::setup(); connected = ofSerial::setup("COM8", 38400); buffer = ""; lastMillis = 0; }
//--------------------------------------------------------------------------- bool ofxKinectContext::init() { if(freenect_init(&kinectContext, NULL) < 0) { ofLog(OF_LOG_ERROR, "ofxKinect: freenect_init failed"); bInited = false; return false; } bInited = true; ofLog(OF_LOG_VERBOSE, "ofxKinect: Context inited"); buildDeviceList(); listDevices(true); return true; }
/** * \class MediaMonitorWindows * * I am assuming, for now, that everything on Windows uses drive letters * (e.g. C:, D:). That is probably wrong, though. (other APIs?) */ MediaMonitorWindows::MediaMonitorWindows(QObject* par, unsigned long interval, bool allowEject) : MediaMonitor(par, interval, allowEject) { char strDrives[128]; if (!::GetLogicalDriveStrings(sizeof(strDrives), strDrives)) return; for (char *driveName = strDrives; *driveName; driveName += strlen(driveName) + 1) { uint type = ::GetDriveType(driveName); if (type != DRIVE_REMOVABLE && type != DRIVE_CDROM) continue; MythMediaDevice *media = NULL; if (type == DRIVE_CDROM) media = MythCDROM::get(this, driveName, false, allowEject); else media = MythHDD::Get(this, driveName, false, allowEject); if (!media) { VERBOSE(VB_IMPORTANT, "Error. Couldn't create MythMediaDevice."); return; } // We store the volume name to improve // user activities like ChooseAndEjectMedia(). char volumeName[MAX_PATH]; if (GetVolumeInformation(driveName, volumeName, MAX_PATH, NULL, NULL, NULL, NULL, NULL)) { media->setVolumeID(volumeName); } AddDevice(media); } VERBOSE(VB_MEDIA, "Initial device list: " + listDevices()); }
devicemodel::devicemodel(QObject* parent) :QAbstractListModel(parent) { QHash<int, QByteArray> roles; roles.insert( devicemodel::DeviceName, "deviceName"); roles.insert( devicemodel::DeviceLabel, "deviceLabel"); setRoleNames(roles); m_deviceInterface = new org::freedesktop::UDisks( QString("org.freedesktop.UDisks"), QString("/org/freedesktop/UDisks"), QDBusConnection::systemBus(), this); connect(m_deviceInterface,SIGNAL(DeviceAdded(QDBusObjectPath)),this, SLOT(addDevice(QDBusObjectPath))); connect(m_deviceInterface,SIGNAL(DeviceRemoved(QDBusObjectPath)),this, SLOT(removeDevice(QDBusObjectPath))); listDevices(); }
//--------------------------------------------------------------------------- bool ofxKinectContext::init() { if(freenect_init(&kinectContext, NULL) < 0) { ofLogError("ofxKinect") << "init(): freenect_init failed"; bInited = false; return false; } #ifdef OFX_KINECT_EXTRA_FW freenect_set_fw_address_nui(kinectContext, ofxKinectExtras::getFWData1473(), ofxKinectExtras::getFWSize1473()); freenect_set_fw_address_k4w(kinectContext, ofxKinectExtras::getFWDatak4w(), ofxKinectExtras::getFWSizek4w()); #endif freenect_set_log_level(kinectContext, FREENECT_LOG_WARNING); freenect_select_subdevices(kinectContext, (freenect_device_flags)(FREENECT_DEVICE_MOTOR | FREENECT_DEVICE_CAMERA)); bInited = true; ofLogVerbose("ofxKinect") << "context inited"; buildDeviceList(); listDevices(true); return true; }
int main(int argc, char **argv) { QCoreApplication app(argc, argv); QCoreApplication::setApplicationName("Problemsolver"); QCoreApplication::setApplicationVersion("0.0.1"); QCommandLineParser parser; parser.setApplicationDescription("Problem Problem solver using OptiX."); parser.addHelpOption(); parser.addVersionOption(); parser.addPositionalArgument("source", "Problem definition input file."); QCommandLineOption deviceOption(QStringList() << "d" << "device", "Device number id. Use -l to list devices.", "device", "0"); QCommandLineOption listOption(QStringList() << "l" << "list", "List present CUDA devices in the machine."); parser.addOption(deviceOption); parser.addOption(listOption); parser.process(app); const QStringList args = parser.positionalArguments(); if(args.isEmpty()) { std::cerr << "Missing source parameter" << std::endl; parser.showHelp(1); } QString inputPath = args.first(); if(parser.isSet(listOption)) // -l, then list available devices. { listDevices(); exit(0); } // parse -d option bool parseOk; int deviceNumber = parser.value(deviceOption).toInt(&parseOk); if(!parseOk) { std::cerr << "Expect a number for device option." << std::endl; parser.showHelp(1); } if(deviceNumber < 0) { std::cerr << "Option --device(-d) can't be negative." << std::endl; parser.showHelp(1); } // find and check selected device ComputeDeviceRepository repository; const std::vector<ComputeDevice> & repo = repository.getComputeDevices(); if(repo.empty()) { std::cerr << "No computing device found" "You must have a CUDA enabled GPU to run this application. " "The CUDA GPUs nVidia developer page(https://developer.nvidia.com/cuda-gpus) has a " "list of all supported devices." << std::endl; exit(1); } if(deviceNumber >= repo.size()) { std::cerr << "Invalid device number " << deviceNumber << "." << std::endl << "Try -l to list available computing devices." << std::endl; exit(1); } ComputeDevice device = repo.at(deviceNumber); // Task parented to the application so that it // will be deleted by the application. Main *main = new Main(&app, inputPath, device); // This will cause the application to exit when // the task signals finished. QObject::connect(main, SIGNAL(finished()), &app, SLOT(quit())); // This will run the task from the application event loop. QTimer::singleShot(0, main, SLOT(run())); return app.exec(); }
CaptureWgt::CaptureWgt( QWidget * parent ) : QMainWindow( parent ) { pd = new PD(); pd->ui.setupUi( this ); pd->crossFname = "./video.ini"; pd->modifyLaserPos = false; pd->devMenu = 0; pd->resMenu = 0; pd->imgScale = 1.0; pd->drawArrow = false; pd->flipX = false; pd->flipY = false; pd->resolutionAcquired = false; pd->cap = new Capture( this ); pd->ui.brightnessDw->setVisible( false ); pd->brightness = new BrightnessWgt( this ); pd->ui.brightnessDw->setWidget( pd->brightness ); pd->cap->setBrightnessRange( pd->brightness->range() ); connect( pd->brightness, SIGNAL(rangeChanged(const QPointF &)), this, SLOT(slotBrightnessChanged(const QPointF &)) ); connect( pd->ui.brightnessRange, SIGNAL(triggered()), this, SLOT(slotBrightness()) ); Ui_CaptureWgt & ui = pd->ui; connect( ui.capture, SIGNAL(triggered()), this, SLOT(slotCapture()) ); connect( ui.settings, SIGNAL(triggered()), this, SLOT(slotSettings()) ); connect( ui.cross, SIGNAL(triggered()), this, SLOT(slotCrossEmphasis()) ); pd->scene = new QGraphicsScene( pd->ui.view ); pd->scene->setBackgroundBrush( QBrush( Qt::gray ) ); pd->ui.view->setScene( pd->scene ); pd->image = new QGraphicsPixmapItem( 0, pd->scene ); pd->image->setPos( 0.0, 0.0 ); pd->lineX1 = new QGraphicsLineItem( pd->image, pd->scene ); pd->lineY1 = new QGraphicsLineItem( pd->image, pd->scene ); pd->lineX2 = new QGraphicsLineItem( pd->image, pd->scene ); pd->lineY2 = new QGraphicsLineItem( pd->image, pd->scene ); pd->arrow = new QGraphicsPolygonItem( pd->image, pd->scene ); // This is for laser spot position. pd->laserPosImage = new QGraphicsPixmapItem( 0, pd->scene ); pd->laserPosImage->setPixmap( QPixmap( ":/images/laser.png" ) ); pd->laserPosImage->setPos( -pd->laserPosImage->pixmap().width()/2, -pd->laserPosImage->pixmap().height()/2 ); pd->laserPosImage->setVisible( false ); connect( pd->ui.flipX, SIGNAL(triggered()), this, SLOT(slotFlipX()) ); connect( pd->ui.flipY, SIGNAL(triggered()), this, SLOT(slotFlipY()) ); pd->cap = new Capture( this ); listDevices(); connect( pd->cap, SIGNAL(frame(const QImage &)), this, SLOT(slotFrame(const QImage &)) ); ui.view->installEventFilter( this ); pd->inMenu = new QMenu( this ); ui.input->setMenu( pd->inMenu ); QAction * a = new QAction( "Default", this ); a->setCheckable( true ); a->setChecked( true ); pd->inMenu->addAction( a ); pd->inList << a; a = new QAction( "Usb", this ); a->setCheckable( true ); pd->inMenu->addAction( a ); pd->inList << a; a = new QAction( "S-Video", this ); a->setCheckable( true ); pd->inMenu->addAction( a ); pd->inList << a; a = new QAction( "Composite", this ); a->setCheckable( true ); pd->inMenu->addAction( a ); pd->inList << a; for ( int i=0; i<pd->inList.size(); i++ ) { QAction * a = pd->inList[i]; connect( a, SIGNAL(triggered()), this, SLOT(slotInput()) ); } // ƒействи¤ соответствующие фильтрам. pd->filterList << ui.equalizeHist; pd->filterList << ui.fullContrast; pd->filterList << ui.sobel; pd->filterList << ui.median; pd->filterList << ui.timeSmooth; pd->filterList << ui.highPass; pd->filterList << ui.surface; pd->filterList << ui.brightnessRange; for ( int i=0; i<pd->filterList.size(); i++ ) { QAction * a = pd->filterList[i]; connect( a, SIGNAL(triggered()), this, SLOT(slotFilter()) ); } connect( ui.pixmap, SIGNAL(triggered()), this, SLOT(slotPixmap()) ); QToolButton * btn = qobject_cast<QToolButton *>(ui.toolBar->widgetForAction(ui.input)); if ( btn ) btn->setPopupMode(QToolButton::InstantPopup); btn = qobject_cast<QToolButton *>(ui.toolBar->widgetForAction(ui.device)); if ( btn ) btn->setPopupMode(QToolButton::InstantPopup); btn = qobject_cast<QToolButton *>(ui.toolBar->widgetForAction(ui.resolution)); if ( btn ) btn->setPopupMode(QToolButton::InstantPopup); //pd->usbLight = new LightIcon( this ); //ui.toolBar->addWidget( pd->usbLight ); // Laser spot handling. QMenu * m = new QMenu( this ); a = m->addAction( "Set laser spot position" ); connect( a, SIGNAL(triggered()), this, SLOT(slotSetLaserSpot()) ); btn = qobject_cast<QToolButton *>(ui.toolBar->widgetForAction(ui.laser)); btn->setPopupMode( QToolButton::MenuButtonPopup ); ui.laser->setMenu( m ); updateCrossPos(); }
int main(int argc, const char ** argv) { int rc; char * uri = NULL; oni_Device * device = NULL; oni_DeviceConnectedListener listen1; oni_DeviceDisconnectedListener listen2; oni_DeviceStateChangedListener listen3; oni_Version ver = oni_getVersion(); printf("OpenNI2 build %d, maintenance %d, v%d.%d\n", ver.build, ver.maintenance, ver.major, ver.minor); listen1.fnPtr = &deviceConnect; listen2.fnPtr = &deviceDisconnect; listen3.fnPtr = &deviceStateChange; //const char * err = oni_getExtendedError(); //printf("Extended error: %s\n", err); rc = oni_initialize(); if (rc == oni_STATUS_OK) { rc = oni_addDeviceConnectedListener(&listen1); rc = oni_addDeviceDisconnectedListener(&listen2); rc = oni_addDeviceStateChangedListener(&listen3); listDevices(); uri = getFirstUri(); } if (uri != NULL) { printf("Using URI: %s\n", uri); device = oni_new_Device(); } else { printf("Unable to find device!\n"); } if (device != NULL) { rc = oni_open(device, uri); printf("oni_open: rc=%s\n", oni_getString_Status(rc)); printf("Device is %s\n", oni_isValid_Device(device) ? "valid" : "invalid"); printf("Registration mode: %s\n", oni_getString_ImageRegistrationMode( oni_getImageRegistrationMode(device))); { int i; bool hasSensor; oni_SensorType types[] = { oni_SENSOR_IR, oni_SENSOR_COLOR, oni_SENSOR_DEPTH }; for (i = 0; i < 3; ++i) { hasSensor = oni_hasSensor(device, types[i]); if (hasSensor) { const oni_SensorInfo * info = oni_getSensorInfo(device, types[i]); if (info == NULL) { printf("Present, but can't get info\n"); } else { echoSensorInfo(info); } } else { const char * sensorName = oni_getString_SensorType(types[i]); printf("Sensor type %s: Not present", sensorName); } } } oni_close(device); } }
int main(int argc, char ** argv) { char * descURL = NULL; int option_index = 0; int c; int devid = -1; char arguments[1025]; arguments[0]='\0'; char action[129]; action[0]='\0'; char verbose = 0; unsigned int operation = OP_NONE; if(argc == 1) { printUsage(argv[0]); return -1; } while(1){ c = getopt_long(argc,argv,"dlva:p:",long_options,&option_index); if(c == -1) break; switch(c){ case 'd': operation = OP_DISCOVER; break; case 'l': operation = OP_LIST; break; case 'a': operation = OP_CALLOP; if(optarg) strncpy(action,optarg,128); break; case 'p': if(optarg) strncpy(arguments,optarg,1024); break; case 'v': verbose = 1; break; default: printUsage(argv[0]); return 0; } } if(verbose) printf("Universal Plug and Play IGD Tool v0.1\n João Paulo Barraca <*****@*****.**>\n\n"); if(operation == OP_NONE) return 0; descURL = upnpDiscover(5000); // timeout = 5secs if(descURL) { struct IGDdatas *data = (struct IGDdatas*) malloc(sizeof(struct IGDdatas)); struct UPNPUrls urls; memset(data, 0, sizeof(struct IGDdatas)); memset(&urls, 0, sizeof(struct UPNPUrls)); char * descXML; int descXMLsize = 0; descXML = miniwget(descURL, &descXMLsize); parserootdesc(descXML, descXMLsize, data); if(descXML) { switch(operation) { case OP_CALLOP: callUPNPVariable(data,action,arguments, verbose); break; case OP_LIST: listServices(data,devid,verbose); break; case OP_DISCOVER: listDevices(data); break; default: printf("Error: Action not implemented (yet)!\n"); break; } free(descXML); } else { printf("Error: Cannot get XML description of the device.\n"); } freeIGD(data); free(descURL); } else { fprintf(stderr, "Error: No IGD UPnP Device found on the network !\n"); } return 0; }
int main(int argc, char *argv[]) { CoInitialize(nullptr); listDevices(); IAudioClient *pAudioClient; IMMDevice *device; getDefaultDevice(&device); HRESULT hr = device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr, (void**)&pAudioClient); if (FAILED(hr)) { printf("IMMDevice::Activate(IAudioClient) failed: hr = 0x%08x", hr); return hr; } REFERENCE_TIME hnsDefaultDevicePeriod; hr = pAudioClient->GetDevicePeriod(&hnsDefaultDevicePeriod, nullptr); if (FAILED(hr)) { printf("IAudioClient::GetDevicePeriod failed: hr = 0x%08x\n", hr); pAudioClient->Release(); return hr; } // get the default device format WAVEFORMATEX *pwfx; hr = pAudioClient->GetMixFormat(&pwfx); if (FAILED(hr)) { printf("IAudioClient::GetMixFormat failed: hr = 0x%08x\n", hr); CoTaskMemFree(pwfx); pAudioClient->Release(); return hr; } DVAR(pwfx->wFormatTag); DVAR(pwfx->wBitsPerSample); DVAR(pwfx->nBlockAlign); DVAR(pwfx->nAvgBytesPerSec); switch (pwfx->wFormatTag) { case WAVE_FORMAT_IEEE_FLOAT: pwfx->wFormatTag = WAVE_FORMAT_PCM; pwfx->wBitsPerSample = 16; pwfx->nBlockAlign = pwfx->nChannels * pwfx->wBitsPerSample / 8; pwfx->nAvgBytesPerSec = pwfx->nBlockAlign * pwfx->nSamplesPerSec; break; case WAVE_FORMAT_EXTENSIBLE: { // naked scope for case-local variable PWAVEFORMATEXTENSIBLE pEx = reinterpret_cast<PWAVEFORMATEXTENSIBLE>(pwfx); if (IsEqualGUID(KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, pEx->SubFormat)) { pEx->SubFormat = KSDATAFORMAT_SUBTYPE_PCM; pEx->Samples.wValidBitsPerSample = 16; pwfx->wBitsPerSample = 16; pwfx->nBlockAlign = pwfx->nChannels * pwfx->wBitsPerSample / 8; pwfx->nAvgBytesPerSec = pwfx->nBlockAlign * pwfx->nSamplesPerSec; } else { printf("Don't know how to coerce mix format to int-16\n"); CoTaskMemFree(pwfx); pAudioClient->Release(); return E_UNEXPECTED; } } break; default: printf("Don't know how to coerce WAVEFORMATEX with wFormatTag = 0x%08x to int-16\n", pwfx->wFormatTag); CoTaskMemFree(pwfx); pAudioClient->Release(); return E_UNEXPECTED; } DVAR(pwfx->wFormatTag); DVAR(pwfx->wBitsPerSample); DVAR(pwfx->nBlockAlign); DVAR(pwfx->nAvgBytesPerSec); hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_LOOPBACK, 0, 0, pwfx, 0 ); if (FAILED(hr)) { printf("IAudioClient::Initialize failed: hr = 0x%08x\n", hr); pAudioClient->Release(); return hr; } IAudioCaptureClient *pAudioCaptureClient; hr = pAudioClient->GetService(__uuidof(IAudioCaptureClient), (void**)&pAudioCaptureClient); if (FAILED(hr)) { printf("IAudioClient::GetService(IAudioCaptureClient) failed: hr 0x%08x\n", hr); pAudioClient->Release(); return hr; } hr = pAudioClient->Start(); if (FAILED(hr)) { printf("IAudioClient::Start failed: hr = 0x%08x\n", hr); pAudioCaptureClient->Release(); pAudioClient->Release(); return hr; } for (int i = 0; i < 10; ++i) { UINT32 nNextPacketSize; hr = pAudioCaptureClient->GetNextPacketSize(&nNextPacketSize); if (FAILED(hr)) { printf("IAudioCaptureClient::GetNextPacketSize failed on pass %u after %u frames: hr = 0x%08x\n", 0, 0, hr); pAudioClient->Stop(); pAudioCaptureClient->Release(); pAudioClient->Release(); return hr; } // get the captured data BYTE *pData; UINT32 nNumFramesToRead; DWORD dwFlags; hr = pAudioCaptureClient->GetBuffer(&pData, &nNumFramesToRead, &dwFlags, nullptr, nullptr); if (FAILED(hr)) { printf("IAudioCaptureClient::GetBuffer failed on pass %u after %u frames: hr = 0x%08x\n", 0, 0, hr); pAudioClient->Stop(); pAudioCaptureClient->Release(); pAudioClient->Release(); return hr; } DVAR(nNumFramesToRead); // if (bFirstPacket && AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY == dwFlags) { // printf("Probably spurious glitch reported on first packet\n"); if (0 != dwFlags && AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY != dwFlags) { printf("IAudioCaptureClient::GetBuffer set flags to 0x%08x on pass %u after %u frames\n", dwFlags, 0, 0); // pAudioClient->Stop(); // pAudioCaptureClient->Release(); // pAudioClient->Release(); // return E_UNEXPECTED; } else DVAR((int)*pData); if (0 == nNumFramesToRead) { printf("IAudioCaptureClient::GetBuffer said to read 0 frames on pass %u after %u frames\n", 0, 0); pAudioClient->Stop(); pAudioCaptureClient->Release(); pAudioClient->Release(); return E_UNEXPECTED; } UINT32 nBlockAlign = pwfx->nBlockAlign; LONG lBytesToWrite = nNumFramesToRead * nBlockAlign; hr = pAudioCaptureClient->ReleaseBuffer(nNumFramesToRead); } pAudioClient->Stop(); pAudioCaptureClient->Release(); pAudioClient->Release(); CoUninitialize(); return 0; }
int main(int argc, char * const argv[]) { char *host = (char*)"127.0.0.1"; int port = 3001; int device = -1; // for default device int c; opterr = 0; /* options descriptor */ static struct option longopts[] = { { "help", no_argument , NULL, '?' }, { "list", no_argument , NULL, 'l' }, { "host", required_argument, NULL, 'h' }, { "port", required_argument, &port, 'p' }, { "device", required_argument, &device, 'd' }, { "time-out", required_argument, NULL, 't' }, { "osc-path", required_argument, NULL, 'o' }, { NULL, 0, NULL, 0 } }; while ((c = getopt_long (argc, argv, ":h:p:t:d:l",longopts,NULL)) != -1) switch (c) { case 'h': host = optarg; break; case 'p': port = atoi(optarg); break; case 'd': device = atoi(optarg); break; case 't': timeOut = atoi(optarg)/1000.f; break; case 'o': osc_path = optarg; break; case 'l': listDevices(); return 0; case '?': usage(); return 0; case ':': switch (optopt) { case 'h': case 'p': fprintf (stderr, "Please specify an ip and port to send to.\n"); break; case 't': fprintf (stderr, "Please specify a time out value in milliseconds.\n"); break; case 'd': fprintf (stderr, "-d takes a device index. Use -l to list all available devices.\n"); break; default: if (isprint (optopt)) fprintf (stderr, "Unknown option `-%c'.\n", optopt); else fprintf (stderr, "Unknown option character `\\x%x'.\n", optopt); break; } return 1; default: usage(); return 1; } // attach signal handlers signal(SIGINT, catch_int); // Set up OSC const int osc_buffer_size = 512; char buffer[osc_buffer_size]; UdpTransmitSocket _sock(IpEndpointName(host,port)); osc::OutboundPacketStream _stream(buffer,osc_buffer_size); OSCSender osc_sender(&_sock,_stream); _sock.SetEnableBroadcast(true); std::clog << "Sending OSC to " << host << " on port " << port << " with path '" << osc_path << "'" << std::endl; std::clog << "Time for repeated detection is set to " << ((int)(timeOut*1000)) << " milliseconds." << std::endl; std::clog << "Samplerate is set to " << SAMPLING_RATE << "Hz" << std::endl; // try to open audio: PaError err; err = Pa_Initialize(); if( err != paNoError ) goto error; DTMFSetup(SAMPLING_RATE, BUFFER_SIZE); deviceInfo = Pa_GetDeviceInfo((device == -1) ? Pa_GetDefaultInputDevice() : device); /* Open an audio I/O stream. */ PaStream *stream; err = Pa_OpenDefaultStream( &stream, 1, 0, paInt16, SAMPLING_RATE, BUFFER_SIZE, audioCallback, &osc_sender ); // send a pointer to the OSCSender with it if( err != paNoError ) goto error; err = Pa_StartStream( stream ); if( err != paNoError ) goto error; info = Pa_GetStreamInfo(stream); std::clog << "Audio initialized with:" << std::endl << " Device " << deviceInfo->name << std::endl << " Latency " << info->inputLatency << "ms" << std::endl << " Samplerate " << info->sampleRate << "kHz" << std::endl << " Buffer Size " << BUFFER_SIZE << "samples" << std::endl; std::clog << "Read for detection!" << std::endl; // pause and let the audio thread to the work while(running) pause(); // end program err = Pa_StopStream( stream ); if( err != paNoError ) goto error; err = Pa_CloseStream( stream ); if( err != paNoError ) goto error; // Close Audio: err = Pa_Terminate(); if( err != paNoError ) { error: std::cerr << "PortAudio error: " << Pa_GetErrorText( err ) << std::endl; return -1; } return 0; }
static void openJackStreams(RtJackGlobals *p) { char buf[256]; int i, j, k; CSOUND *csound = p->csound; /* connect to JACK server */ p->client = jack_client_open(&(p->clientName[0]), JackNoStartServer, NULL); if (UNLIKELY(p->client == NULL)) rtJack_Error(csound, -1, Str("could not connect to JACK server")); csound->system_sr(csound, jack_get_sample_rate(p->client)); csound->Message(csound, "system sr: %f\n", csound->system_sr(csound,0)); /* check consistency of parameters */ if (UNLIKELY(p->nChannels < 1 || p->nChannels > 255)) rtJack_Error(csound, -1, Str("invalid number of channels")); if (UNLIKELY(p->sampleRate < 1000 || p->sampleRate > 768000)) rtJack_Error(csound, -1, Str("invalid sample rate")); if (UNLIKELY(p->sampleRate != (int) jack_get_sample_rate(p->client))) { snprintf(&(buf[0]), 256, Str("sample rate %d does not match " "JACK sample rate %d"), p->sampleRate, (int) jack_get_sample_rate(p->client)); rtJack_Error(p->csound, -1, &(buf[0])); } if (UNLIKELY(p->bufSize < 8 || p->bufSize > 32768)) rtJack_Error(csound, -1, Str("invalid period size (-b)")); if (p->nBuffers < 2) p->nBuffers = 2; if (UNLIKELY((unsigned int) (p->nBuffers * p->bufSize) > (unsigned int) 65536)) rtJack_Error(csound, -1, Str("invalid buffer size (-B)")); if (UNLIKELY(((p->nBuffers - 1) * p->bufSize) < (int) jack_get_buffer_size(p->client))) rtJack_Error(csound, -1, Str("buffer size (-B) is too small")); /* register ports */ rtJack_RegisterPorts(p); /* allocate ring buffers if not done yet */ if (p->bufs == NULL) rtJack_AllocateBuffers(p); /* initialise ring buffers */ p->csndBufCnt = 0; p->csndBufPos = 0; p->jackBufCnt = 0; p->jackBufPos = 0; for (i = 0; i < p->nBuffers; i++) { rtJack_TryLock(p->csound, &(p->bufs[i]->csndLock)); rtJack_Unlock(p->csound, &(p->bufs[i]->jackLock)); for (j = 0; j < p->nChannels; j++) { if (p->inputEnabled) { for (k = 0; k < p->bufSize; k++) p->bufs[i]->inBufs[j][k] = (jack_default_audio_sample_t) 0; } if (p->outputEnabled) { for (k = 0; k < p->bufSize; k++) p->bufs[i]->outBufs[j][k] = (jack_default_audio_sample_t) 0; } } } /* output port buffer pointer cache is invalid initially */ if (p->outputEnabled) p->outPortBufs[0] = (jack_default_audio_sample_t*) NULL; /* register callback functions */ if (UNLIKELY(jack_set_sample_rate_callback(p->client, sampleRateCallback, (void*) p) != 0)) rtJack_Error(csound, -1, Str("error setting sample rate callback")); if (UNLIKELY(jack_set_buffer_size_callback(p->client, bufferSizeCallback, (void*) p) != 0)) rtJack_Error(csound, -1, Str("error setting buffer size callback")); #ifdef LINUX if (UNLIKELY(jack_set_freewheel_callback(p->client, freeWheelCallback, (void*) p) != 0)) rtJack_Error(csound, -1, Str("error setting freewheel callback")); #endif if (UNLIKELY(jack_set_xrun_callback(p->client, xrunCallback, (void*) p) != 0)) rtJack_Error(csound, -1, Str("error setting xrun callback")); jack_on_shutdown(p->client, shutDownCallback, (void*) p); if (UNLIKELY(jack_set_process_callback(p->client, processCallback, (void*) p) != 0)) rtJack_Error(csound, -1, Str("error setting process callback")); /* activate client */ if (UNLIKELY(jack_activate(p->client) != 0)) rtJack_Error(csound, -1, Str("error activating JACK client")); /* connect ports if requested */ if (p->inputEnabled) { char dev[128], *dev_final, *sp; { int i,n = listDevices(csound,NULL,0); CS_AUDIODEVICE *devs = (CS_AUDIODEVICE *) malloc(n*sizeof(CS_AUDIODEVICE)); listDevices(csound,devs,0); for(i=0; i < n; i++) csound->Message(csound, " %d: %s (%s)\n", i, devs[i].device_id, devs[i].device_name); strncpy(dev, devs[0].device_name, 128); free(devs); } if(p->inDevName != NULL) { strncpy(dev, p->inDevName, 128); dev[127]='\0'; } //if (dev) { dev_final = dev; sp = strchr(dev_final, '\0'); if (!isalpha(dev_final[0])) dev_final++; for (i = 0; i < p->nChannels; i++) { snprintf(sp, 128-(dev-sp), "%d", i + 1); if (UNLIKELY(jack_connect(p->client, dev_final, jack_port_name(p->inPorts[i])) != 0)) { //rtJack_Error(csound, -1, Str("error connecting input ports")); csound->Warning(csound, Str("not autoconnecting input channel %d \n" "(needs manual connection)"), i+1); } } *sp = (char) 0; //} } if (p->outputEnabled) { char dev[128], *dev_final, *sp; { int i,n = listDevices(csound,NULL,1); CS_AUDIODEVICE *devs = (CS_AUDIODEVICE *) malloc(n*sizeof(CS_AUDIODEVICE)); listDevices(csound,devs,1); for(i=0; i < n; i++) csound->Message(csound, " %d: %s (%s)\n", i, devs[i].device_id, devs[i].device_name); strncpy(dev, devs[0].device_name, 128); free(devs); } if (p->outDevName != NULL) { strncpy(dev, p->outDevName, 128); dev[127]='\0'; } //if (dev) { this test is rubbish dev_final = dev; sp = strchr(dev_final, '\0'); if(!isalpha(dev_final[0])) dev_final++; for (i = 0; i < p->nChannels; i++) { snprintf(sp, 128-(dev-sp), "%d", i + 1); if (jack_connect(p->client, jack_port_name(p->outPorts[i]), dev_final) != 0) { //rtJack_Error(csound, -1, Str("error connecting output ports")); csound->Warning(csound, Str("not autoconnecting input channel %d \n" "(needs manual connection)"), i+1); } } *sp = (char) 0; } /* stream is now active */ p->jackState = 0; }
//---------------------------------------------------------------- void ofSerial::enumerateDevices(){ listDevices(); }
//-------------------------------------------------------------------- bool ofQuickTimeGrabber::initGrabber(int w, int h){ //--------------------------------- #ifdef OF_VIDEO_CAPTURE_QUICKTIME //--------------------------------- //---------------------------------- 1 - open the sequence grabber if( !qtInitSeqGrabber() ){ ofLogError("ofQuickTimeGrabber") << "initGrabber(): unable to initialize the seq grabber"; return false; } //---------------------------------- 2 - set the dimensions //width = w; //height = h; MacSetRect(&videoRect, 0, 0, w, h); //---------------------------------- 3 - buffer allocation // Create a buffer big enough to hold the video data, // make sure the pointer is 32-byte aligned. // also the rgb image that people will grab offscreenGWorldPixels = (unsigned char*)malloc(4 * w * h + 32); pixels.allocate(w, h, OF_IMAGE_COLOR); #if defined(TARGET_OSX) && defined(__BIG_ENDIAN__) QTNewGWorldFromPtr (&(videogworld), k32ARGBPixelFormat, &(videoRect), NULL, NULL, 0, (offscreenGWorldPixels), 4 * w); #else QTNewGWorldFromPtr (&(videogworld), k24RGBPixelFormat, &(videoRect), NULL, NULL, 0, (pixels.getPixels()), 3 * w); #endif LockPixels(GetGWorldPixMap(videogworld)); SetGWorld (videogworld, NULL); SGSetGWorld(gSeqGrabber, videogworld, nil); //---------------------------------- 4 - device selection bool didWeChooseADevice = bChooseDevice; bool deviceIsSelected = false; //if we have a device selected then try first to setup //that device if(didWeChooseADevice){ deviceIsSelected = qtSelectDevice(deviceID, true); if(!deviceIsSelected && bVerbose) ofLogError("ofQuickTimeGrabber") << "initGrabber(): unable to open device[" << deviceID << "], will attempt other devices"; } //if we couldn't select our required device //or we aren't specifiying a device to setup //then lets try to setup ANY device! if(deviceIsSelected == false){ //lets list available devices listDevices(); setDeviceID(0); deviceIsSelected = qtSelectDevice(deviceID, false); } //if we still haven't been able to setup a device //we should error and stop! if( deviceIsSelected == false){ goto bail; } //---------------------------------- 5 - final initialization steps OSStatus err; err = SGSetChannelUsage(gVideoChannel,seqGrabPreview); if ( err != noErr ) goto bail; //----------------- callback method for notifying new frame err = SGSetChannelRefCon(gVideoChannel, (long)&bHavePixelsChanged ); if(!err) { VideoBottles vb; /* get the current bottlenecks */ vb.procCount = 9; err = SGGetVideoBottlenecks(gVideoChannel, &vb); if (!err) { myGrabCompleteProc = NewSGGrabCompleteBottleUPP(frameIsGrabbedProc); vb.grabCompleteProc = myGrabCompleteProc; /* add our GrabFrameComplete function */ err = SGSetVideoBottlenecks(gVideoChannel, &vb); } } err = SGSetChannelBounds(gVideoChannel, &videoRect); if ( err != noErr ) goto bail; err = SGPrepare(gSeqGrabber, true, false); //theo swapped so preview is true and capture is false if ( err != noErr ) goto bail; err = SGStartPreview(gSeqGrabber); if ( err != noErr ) goto bail; bGrabberInited = true; loadSettings(); if( attemptFramerate >= 0 ){ err = SGSetFrameRate(gVideoChannel, IntToFixed(attemptFramerate) ); if ( err != noErr ){ ofLogError("ofQuickTimeGrabber") << "initGrabber: couldn't setting framerate to " << attemptFramerate << ": OSStatus " << err; } } ofLogNotice("ofQuickTimeGrabber") << " inited grabbed "; ofLogNotice("ofQuickTimeGrabber") << "-------------------------------------"; // we are done return true; //--------------------- (bail) something's wrong ----- bail: ofLogError("ofQuickTimeGrabber") << "***** ofQuickTimeGrabber error *****"; ofLogError("ofQuickTimeGrabber") << "------------------------------------"; //if we don't close this - it messes up the next device! if(bSgInited) qtCloseSeqGrabber(); bGrabberInited = false; return false; //--------------------------------- #else //--------------------------------- return false; //--------------------------------- #endif //--------------------------------- }
//-------------------------------------------------------------------- bool ofVideoGrabber::initGrabber(int w, int h, bool setUseTexture){ bUseTexture = setUseTexture; //--------------------------------- #ifdef OF_VIDEO_CAPTURE_QUICKTIME //--------------------------------- //---------------------------------- 1 - open the sequence grabber if( !qtInitSeqGrabber() ){ ofLog(OF_LOG_ERROR, "error: unable to initialize the seq grabber"); return false; } //---------------------------------- 2 - set the dimensions width = w; height = h; MacSetRect(&videoRect, 0, 0, width, height); //---------------------------------- 3 - buffer allocation // Create a buffer big enough to hold the video data, // make sure the pointer is 32-byte aligned. // also the rgb image that people will grab offscreenGWorldPixels = (unsigned char*)malloc(4 * width * height + 32); pixels = new unsigned char[width*height*3]; QTNewGWorldFromPtr (&videogworld, k32ARGBPixelFormat, &videoRect, NULL, NULL, 0, offscreenGWorldPixels, 4 * width); LockPixels(GetGWorldPixMap(videogworld)); SetGWorld (videogworld, NULL); SGSetGWorld(gSeqGrabber, videogworld, nil); //---------------------------------- 4 - device selection bool didWeChooseADevice = bChooseDevice; bool deviceIsSelected = false; //if we have a device selected then try first to setup //that device if(didWeChooseADevice){ deviceIsSelected = qtSelectDevice(deviceID, true); if(!deviceIsSelected && bVerbose) ofLog(OF_LOG_WARNING, "unable to open device[%i] - will attempt other devices", deviceID); } //if we couldn't select our required device //or we aren't specifiying a device to setup //then lets try to setup ANY device! if(deviceIsSelected == false){ //lets list available devices listDevices(); setDeviceID(0); deviceIsSelected = qtSelectDevice(deviceID, false); } //if we still haven't been able to setup a device //we should error and stop! if( deviceIsSelected == false){ goto bail; } //---------------------------------- 5 - final initialization steps OSStatus err; err = SGSetChannelUsage(gVideoChannel,seqGrabPreview); if ( err != noErr ) goto bail; err = SGSetChannelBounds(gVideoChannel, &videoRect); if ( err != noErr ) goto bail; err = SGPrepare(gSeqGrabber, true, false); //theo swapped so preview is true and capture is false if ( err != noErr ) goto bail; err = SGStartPreview(gSeqGrabber); if ( err != noErr ) goto bail; bGrabberInited = true; loadSettings(); ofLog(OF_LOG_NOTICE,"end setup ofVideoGrabber"); ofLog(OF_LOG_NOTICE,"-------------------------------------\n"); //---------------------------------- 6 - setup texture if needed if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); memset(pixels, 0, width*height*3); tex.loadData(pixels, width, height, GL_RGB); } // we are done return true; //--------------------- (bail) something's wrong ----- bail: ofLog(OF_LOG_ERROR, "***** ofVideoGrabber error *****"); ofLog(OF_LOG_ERROR, "-------------------------------------\n"); //if we don't close this - it messes up the next device! if(bSgInited) qtCloseSeqGrabber(); bGrabberInited = false; return false; //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_DIRECTSHOW //--------------------------------- if (bChooseDevice){ device = deviceID; ofLog(OF_LOG_NOTICE, "choosing %i", deviceID); } else { device = 0; } width = w; height = h; bGrabberInited = false; bool bOk = VI.setupDevice(device, width, height); int ourRequestedWidth = width; int ourRequestedHeight = height; if (bOk == true){ bGrabberInited = true; width = VI.getWidth(device); height = VI.getHeight(device); if (width == ourRequestedWidth && height == ourRequestedHeight){ bDoWeNeedToResize = false; } else { bDoWeNeedToResize = true; width = ourRequestedWidth; height = ourRequestedHeight; } pixels = new unsigned char[width * height * 3]; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); memset(pixels, 0, width*height*3); tex.loadData(pixels, width, height, GL_RGB); } return true; } else { ofLog(OF_LOG_ERROR, "error allocating a video device"); ofLog(OF_LOG_ERROR, "please check your camera with AMCAP or other software"); bGrabberInited = false; return false; } //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_UNICAP //-------------------------------- if( !bGrabberInited ){ if ( !bChooseDevice ){ deviceID = 0; } width = w; height = h; pixels = new unsigned char[width * height * 3]; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); memset(pixels, 0, width*height*3); tex.loadData(pixels, width, height, GL_RGB); } bGrabberInited = ucGrabber.open_device (deviceID); if( bGrabberInited ){ ofLog(OF_LOG_NOTICE, "choosing device %i: %s", deviceID,ucGrabber.device_identifier()); ucGrabber.set_format(w,h); ucGrabber.start_capture(); } } return bGrabberInited; //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_GSTREAMER //-------------------------------- if(gstUtils.initGrabber(w,h)){ if ( !bChooseDevice ){ deviceID = 0; } width = w; height = h; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); tex.loadData(gstUtils.getPixels(), width, height, GL_RGB); } bGrabberInited = true; ofLog(OF_LOG_VERBOSE, "ofVideoGrabber: initied"); }else{ bGrabberInited = false; ofLog(OF_LOG_ERROR, "ofVideoGrabber: couldn't init"); } return bGrabberInited; //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_V4L //-------------------------------- if (bChooseDevice){ device = deviceID; } else { device = 0; } sprintf(dev_name, "/dev/video%i", device); ofLog(OF_LOG_NOTICE, "choosing device %s",dev_name); bool bOk = initV4L(w, h, dev_name); if (bOk == true){ bV4LGrabberInited = true; width = getV4L_Width(); height = getV4L_Height(); pixels = new unsigned char[width * height * 3]; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); //memset(pixels, 0, width*height*3); //tex.loadData(pixels, width, height, GL_RGB); } ofLog(OF_LOG_NOTICE, "success allocating a video device "); return true; } else { ofLog(OF_LOG_ERROR, "error allocating a video device"); ofLog(OF_LOG_ERROR, "please check your camera and verify that your driver is correctly installed."); return false; } //--------------------------------- //--------------------------------- #endif //--------------------------------- }
/** * Parses a line of data given to a control channel. * @param pClient the client of the channel * @param argIx array with start index of each argument * @param argCount number of arguments * @return 0 on success, a negative value otherwise */ static int ctrlParse(ClientElem_t *pClient, int *argIx, int argCount, int cmdlen) { int res = -1; char *pCmd = pClient->cmdbuf; switch (pCmd[0]) { /* Identify this control channel */ case 'I': { SEND("%i", getIndexByElement(pClient)) res = 0; break; } /* Attach this channel to given control channel and make it a data channel */ case 'A': { int ix = atoi(&pCmd[argIx[1]]); ClientElem_t *pOtherClient = getElementByIndex(ix); if (pOtherClient == NULL) { SEND("ERROR no such channel") } else if (pClient == pOtherClient) { SEND("ERROR cannot attach to self") } else if (pOtherClient->portNumerator == -1) { SEND("ERROR channel not connected to device") } else { pClient->serialHdl = pOtherClient->serialHdl; pClient->portNumerator = pOtherClient->portNumerator; pClient->type = TYPE_DATA; res = 0; } break; } /* List devices */ case 'L': { res = listDevices(pClient); break; } /* Open device */ case 'O': { if (openDevice(pClient, argIx, argCount, cmdlen) >= 0) { res = 0; } break; } /* Configure UART */ case 'U': { res = setUART(pClient, argIx, argCount); break; } /* Kill client, close device */ case 'C': { DBG_PRINT("Killing client"); pClient->running = 0; res = 0; break; } /* Kill server */ case 'X': { DBG_PRINT("Killing server"); g_serverRunning = 0; res = 0; break; } default: { SEND("ERROR unkown command: %s", pCmd) } } if (res == 0) { SEND("OK") } return res; }
QSharedPointer<CaptureDevice> TimeLapseCapture::parseArguments() { QCommandLineParser parser; ErrorMessageHelper die(err.device(), &parser); parser.setApplicationDescription("Tool for capture sequence of images from digital camera (V4L or GPhoto2 API)."); parser.addHelpOption(); parser.addVersionOption(); QCommandLineOption outputOption(QStringList() << "o" << "output", QCoreApplication::translate("main", "Output directory."), QCoreApplication::translate("main", "directory")); parser.addOption(outputOption); QCommandLineOption verboseOption(QStringList() << "V" << "verbose", QCoreApplication::translate("main", "Verbose output.")); parser.addOption(verboseOption); QCommandLineOption deviceOption(QStringList() << "d" << "device", QCoreApplication::translate("main", "Capture device."), QCoreApplication::translate("main", "device")); parser.addOption(deviceOption); QCommandLineOption listOption(QStringList() << "l" << "list", QCoreApplication::translate("main", "List available capture devices and exits.")); parser.addOption(listOption); QCommandLineOption intervalOption(QStringList() << "i" << "interval", QCoreApplication::translate("main", "Capture interval (in milliseconds). Default is 10000."), QCoreApplication::translate("main", "interval")); parser.addOption(intervalOption); QCommandLineOption cntOption(QStringList() << "c" << "count", QCoreApplication::translate("main", "How many images should be captured. Default value is infinite."), QCoreApplication::translate("main", "count")); parser.addOption(cntOption); QCommandLineOption rowOption(QStringList() << "r" << "raw", QCoreApplication::translate("main", "Store all captured images in raw.")); parser.addOption(rowOption); QCommandLineOption getShutterSpeedOption(QStringList() << "s" << "shutterspeed-options", QCoreApplication::translate("main", "Prints available shutterspeed setting choices and exits.")); parser.addOption(getShutterSpeedOption); QCommandLineOption adaptiveShutterSpeedOption(QStringList() << "a" << "adaptive-shutterspeed", QCoreApplication::translate("main", "Camera shutterspeed will be adaptively changed after exposure metering.\n" "This option setup how many images should be used for exposure metering. \n" "Default value is 0 - it means that shutterspeed will not be changed by capture tool." ), QCoreApplication::translate("main", "count")); parser.addOption(adaptiveShutterSpeedOption); QCommandLineOption shutterSpeedStepOption(QStringList() << "shutterspeed-step", QCoreApplication::translate("main", "How large should be step when changing shutterspeed. \n" "Default value is 1." ), QCoreApplication::translate("main", "step")); parser.addOption(shutterSpeedStepOption); QCommandLineOption minShutterSpeedOption(QStringList() << "min-shutterspeed", QCoreApplication::translate("main", "Minimum shutterspeed (fastest shutter) used by adaptive shutterspeed"), QCoreApplication::translate("main", "shutterspeed")); parser.addOption(minShutterSpeedOption); QCommandLineOption maxShutterSpeedOption(QStringList() << "max-shutterspeed", QCoreApplication::translate("main", "Maximum shutterspeed (slowest shutter) used by adaptive shutterspeed.\n" "If camera supports BULB shutterspeed, it can be defined as \"BULB:XX\" here (it means bulb with XX s exposure)." ), QCoreApplication::translate("main", "shutterspeed")); parser.addOption(maxShutterSpeedOption); // Process the actual command line arguments given by the user parser.process(*this); // verbose? if (!parser.isSet(verboseOption)) { blackHole = new BlackHoleDevice(); verboseOutput.setDevice(blackHole); } else { // verbose verboseOutput << "Turning on verbose output..." << endl; verboseOutput << applicationName() << " " << applicationVersion() << endl; } // raw? storeRawImages = parser.isSet(rowOption); // interval if (parser.isSet(intervalOption)) { bool ok = false; long i = parser.value(intervalOption).toLong(&ok); if (!ok) die << "Cant parse interval."; if (i <= 0) die << "Interval have to be possitive"; interval = i; } // count if (parser.isSet(cntOption)) { bool ok = false; int i = parser.value(cntOption).toInt(&ok); if (!ok) die << "Cant parse count."; cnt = i; } // list devices? QList<QSharedPointer < CaptureDevice>> devices = listDevices(); QSharedPointer<CaptureDevice> dev; if (parser.isSet(listOption)) { if (devices.isEmpty()) { die << QCoreApplication::translate("main", "No compatible capture device found"); } else { out << "Found devices: " << endl; for (QSharedPointer<CaptureDevice> d : devices) { out << " " << d->toString() << endl; } } std::exit(0); } // capture device bool assigned = false; if (!parser.isSet(deviceOption)) { verboseOutput << "Found devices: " << endl; for (QSharedPointer<CaptureDevice> d : devices) { if (!assigned) { dev = d; assigned = true; } verboseOutput << " " << d->toString() << endl; } if (!assigned) die << "No supported device."; } else { QString devVal = parser.value(deviceOption); for (QSharedPointer<CaptureDevice> d : devices) { if (d->toString().contains(devVal, Qt::CaseInsensitive)) { assigned = true; dev = d; break; } } if (!assigned) { die << QString("No device matching \"%1\" found.").arg(devVal); } } out << "Using device " << dev->toString() << endl; // getShutterSpeedOption ? QList<ShutterSpeedChoice> choices = dev->getShutterSpeedChoices(); if (parser.isSet(getShutterSpeedOption)) { if (choices.isEmpty()) { err << "Device " << dev->toShortString() << " don't support shutterspeed setting" << endl; } else { out << "Device " << dev->toShortString() << " shutterspeed choices:" << endl; for (ShutterSpeedChoice ch : choices) { out << " " << ch.toString() << endl; } out << "Current shutter speed: " << dev->currentShutterSpeed().toString() << endl; } std::exit(0); } // automatic chutter speed // this functionality is experimental! if (parser.isSet(adaptiveShutterSpeedOption)) { bool ok = false; int changeThreshold = parser.value(adaptiveShutterSpeedOption).toInt(&ok); if (!ok) die << "Cant parse adaptive shutterspeed option."; if (changeThreshold > 0) { if (choices.isEmpty()) { die << "Camera don't support shutterspeed setting."; } else { ShutterSpeedChoice currentShutterSpeed; ShutterSpeedChoice minShutterSpeed; ShutterSpeedChoice maxShutterSpeed; currentShutterSpeed = dev->currentShutterSpeed(); minShutterSpeed = choices.first(); for (ShutterSpeedChoice ch : choices) { if ((!ch.isBulb()) && ch.toMicrosecond() > maxShutterSpeed.toMicrosecond()) maxShutterSpeed = ch; } int shutterSpeedStep = 1; if (parser.isSet(shutterSpeedStepOption)) { bool ok = false; shutterSpeedStep = parser.value(shutterSpeedStepOption).toInt(&ok); if (!ok) die << "Cant parse shutterspeed step."; if (shutterSpeedStep < 1) die << "Shutterspeed step can be less than one."; } if (parser.isSet(minShutterSpeedOption)) { QString optStr = parser.value(minShutterSpeedOption); minShutterSpeed = getShutterSpeed(optStr, choices, &die); } if (parser.isSet(maxShutterSpeedOption)) { QString optStr = parser.value(maxShutterSpeedOption); maxShutterSpeed = getShutterSpeed(optStr, choices, &die); } out << "Using automatic shutter speed:" << endl; out << " current shutter speed: " << currentShutterSpeed.toString() << endl; out << " min shutter speed: " << minShutterSpeed.toString() << endl; out << " max shutter speed: " << maxShutterSpeed.toString() << endl; out << " change threshold: " << changeThreshold << endl; out << " change step: " << shutterSpeedStep << endl; if (minShutterSpeed.toMicrosecond() <= 0 || maxShutterSpeed.toMicrosecond() <= 0 || maxShutterSpeed.toMicrosecond() < minShutterSpeed.toMicrosecond()) die << "Invalid shutter speed configurarion"; if (maxShutterSpeed.toMs() > interval) { err << QString("Warning: Maximum shutter speed (%1 ms) is greater than capture interval (%2 ms)!") .arg(maxShutterSpeed.toMs()) .arg(interval) << endl; } shutterSpdAlg = new MatrixMeteringAlg(choices, currentShutterSpeed, minShutterSpeed, maxShutterSpeed, &err, &verboseOutput, changeThreshold, shutterSpeedStep); } } } // output if (!parser.isSet(outputOption)) die << "Output directory is not set"; output = QDir(parser.value(outputOption)); if (output.exists()) err << "Output directory exists already." << endl; if (!output.mkpath(".")) die << QString("Can't create output directory %1 !").arg(output.path()); return dev; }
int __cdecl main(int argc, char** argv) { static int opt_silent = 1, log_level = WDI_LOG_LEVEL_WARNING; struct wdi_device_info *list; int c,r; BOOL list_usbs=FALSE, bootloaderWinusbInstall=FALSE; BOOL pause=FALSE; char *inf_name = INF_NAME; char *ext_dir = DEFAULT_DIR; char *cert_name = NULL; static struct option long_options[] = { {"verbose", no_argument, 0, 'v'}, {"list", no_argument, 0, 'l'}, {"help", no_argument, 0, 'h'}, {"pause", no_argument, 0, 'p'}, {"bootloader", no_argument, 0, 'b'}, {0, 0, 0, 0} }; while(1) { c = getopt_long(argc, argv, "blhvp", long_options, NULL); oprintf("got argument: %d \n", c); if (c == -1) break; switch(c) { case 'v': opt_silent = 0; break; case 'h': usage(); break; case 'l': list_usbs=TRUE; break; case 'p': pause=TRUE; break; case 'b': bootloaderWinusbInstall = TRUE; break; default: usage(); exit(0); } } wdi_set_log_level(log_level); if (is_x64()) { oprintf("I see you are on a 64 bit system, nice\n"); } else { oprintf("What a lovely 32 bit system you have\n"); } if (list_usbs) { listDevices(opt_silent); } if (bootloaderWinusbInstall) { r=set_bootloader_to_winusb(opt_silent); <<<<<<< HEAD listDevices(opt_silent); // Print these before and after :) printf("RETURN:%d,%s", r, wdi_strerror(r)); //just print the return code for now =======