/** * begin sampling at given interface mode */ void Alislahish_PCM1808::begin(InterfaceModes mode, AudioInterfaceFormats format){ setPinModes(); //selectSamplingFrequency(freq); selectMode(mode); selectFormat(format); resume(); }
bool Engine::initialize() { bool result = false; QAudioFormat format = m_format; if (selectFormat()) { if (m_format != format) { resetAudioDevices(); if (m_file) { emit bufferLengthChanged(bufferLength()); emit dataLengthChanged(dataLength()); emit bufferChanged(0, 0, m_buffer); setRecordPosition(bufferLength()); result = true; } else { m_bufferLength = audioLength(m_format, BufferDurationUs); m_buffer.resize(m_bufferLength); m_buffer.fill(0); emit bufferLengthChanged(bufferLength()); if (m_generateTone) { if (0 == m_tone.endFreq) { const qreal nyquist = nyquistFrequency(m_format); m_tone.endFreq = qMin(qreal(SpectrumHighFreq), nyquist); } // Call function defined in utils.h, at global scope ::generateTone(m_tone, m_format, m_buffer); m_dataLength = m_bufferLength; emit dataLengthChanged(dataLength()); emit bufferChanged(0, m_dataLength, m_buffer); setRecordPosition(m_bufferLength); result = true; } else { emit bufferChanged(0, 0, m_buffer); m_audioInput = new QAudioInput(m_audioInputDevice, m_format, this); m_audioInput->setNotifyInterval(NotifyIntervalMs); result = true; } } m_audioOutput = new QAudioOutput(m_audioOutputDevice, m_format, this); m_audioOutput->setNotifyInterval(NotifyIntervalMs); } } else { if (m_file) emit errorMessage(tr("Audio format not supported"), formatToString(m_format)); else if (m_generateTone) emit errorMessage(tr("No suitable format found"), ""); else emit errorMessage(tr("No common input / output format found"), ""); } ENGINE_DEBUG << "Engine::initialize" << "m_bufferLength" << m_bufferLength; ENGINE_DEBUG << "Engine::initialize" << "m_dataLength" << m_dataLength; ENGINE_DEBUG << "Engine::initialize" << "format" << m_format; return result; }
bool ofGstUtils::initGrabber(int w, int h, int framerate){ bpp = 3; if(!camData.bInited) get_video_devices(camData); if(camData.webcam_devices.size()==0){ ofLog(OF_LOG_ERROR,"ofGstUtils: no devices found exiting without initializing"); return false; } ofGstVideoFormat & format = selectFormat(w, h, framerate); ofLog(OF_LOG_NOTICE,"ofGstUtils: selected format: " + ofToString(format.width) + "x" + ofToString(format.height) + " " + format.mimetype + " framerate: " + ofToString(format.choosen_framerate.numerator) + "/" + ofToString(format.choosen_framerate.denominator)); bIsCamera = true; bHavePixelsChanged = false; width = w; height = h; gstData.loop = g_main_loop_new (NULL, FALSE); const char * decodebin = ""; if(format.mimetype != "video/x-raw-yuv" && format.mimetype != "video/x-raw-rgb") decodebin = "decodebin !"; const char * scale = ""; if( format.mimetype != "video/x-raw-rgb" ) scale = "ffmpegcolorspace !"; if( w!=format.width || h!=format.height ) scale = "ffvideoscale method=2 !"; string format_str_pipeline = string("%s name=video_source device=%s ! ") + "%s,width=%d,height=%d,framerate=%d/%d ! " + "%s %s " + "video/x-raw-rgb, width=%d, height=%d, depth=24 ! appsink name=sink caps=video/x-raw-rgb"; gchar* pipeline_string =g_strdup_printf ( format_str_pipeline.c_str(), camData.webcam_devices[deviceID].gstreamer_src.c_str(), camData.webcam_devices[deviceID].video_device.c_str(), format.mimetype.c_str(), format.width, format.height, format.choosen_framerate.numerator, format.choosen_framerate.denominator, decodebin, scale, w,h); ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string); GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); if(startPipeline()){ play(); return true; }else{ return false; } }
/** * handle command without reply * @param cmd command in text representation * @param len length of cmd (including '\0') * @return 0 ok, -1 failure */ int CameraV4L2::handleCommand(char *cmd, int len){ int w; int h; enum v4l2_field fld; int ret=-1; unsigned int a,b,c; // backup double dblVal; ret = -1; // error on default PLAYER_MSG1(2,"Got command %s", cmd); switch(cmd[0]){ case 's': case 'S': if (sscanf(cmd+1," %d %d", &w, &h)!=2) break; fg2_stopCapture(fg); ret = tryPixelSettings(fg, w, h, this->v4l2_type_id, fieldType); fg2_startCapture(fg); break; case 'i': case 'I': // todo: rollback if needed if (cmd[2]>='0' && cmd[2]<='9') sscanf(cmd+2,"%d", &(this->source)); else { this->source = -1; } fg2_stopCapture(fg); ret = fg2_set_source(this->fg, this->source, cmd+2); fg2_startCapture(fg); break; case 'n': case 'N': a = this->norm; b = this->width; c = this->height; if (cmd[1]!=' ' || selectFormat(cmd+2)!=0) break; fg2_stopCapture(fg); ret = fg2_set_source_norm(this->fg, this->norm); ret |= tryPixelSettings(fg, this->width, this->height, this->v4l2_type_id, fieldType); if (ret!=0) { this->norm = a; this->width = b; this->height = c; // known working? fg2_set_source_norm(this->fg, this->norm); tryPixelSettings(fg, this->width, this->height, this->v4l2_type_id, fieldType); } fg2_startCapture(fg); break; case 'm': case 'M': a = this->data.format; b = this->depth; c = this->v4l2_type_id; if (cmd[1]!=' ' || selectFormat(cmd+2)!=0) break; fg2_stopCapture(fg); ret = tryPixelSettings(fg, this->width, this->height, this->v4l2_type_id, fieldType); if (ret!=0) { this->data.format = a; this->depth = b; this->v4l2_type_id = c; ret = tryPixelSettings(fg, this->width, this->height, this->v4l2_type_id, fieldType); } fg2_startCapture(fg); break; case 'w': case 'W': // todo: rollback if needed if (cmd[2]>='0' && cmd[2]<='9'){ sscanf(cmd+2,"%d", &(this->flip_rb)); ret = 0; } break; case 'f': case 'F': fld = fieldType; if (cmd[1]!=' ' || selectField(cmd+2)!=0) break; fg2_stopCapture(fg); ret = tryPixelSettings(fg, this->width, this->height, this->v4l2_type_id, fieldType); fg2_startCapture(fg); if (ret!=0) { fieldType = fld; } break; case 'c': case 'C': if (cmd[1] == 'i' || cmd[1] == 'I') { if (sscanf(cmd+2, "%lf %u", &dblVal, &a)!=2) break; ret = fg2_setControlValueI(fg, a, dblVal); break; } for(w=2; w<len-1; w++) if (cmd[w]==' ' || cmd[w+1]=='\0') break; if (sscanf(cmd+2,"%lf",&dblVal) != 1) break; ret = fg2_setControlValue(fg, cmd+w+1, dblVal); break; default: PLAYER_WARN1("Unknown command %s",cmd); break; } return ret; }
//////////////////////////////////////////////////////////////////////////////// // Constructor CameraV4L2::CameraV4L2( ConfigFile* cf, int section) : Driver(cf, section, PLAYER_CAMERA_CODE, PLAYER_ALL_MODE, sizeof(player_camera_data_t), 128, 10, 10) { showFPS = 0; sourceCh = 0; const char *schary; // Camera defaults to /dev/video0 and NTSC this->device = cf->ReadString(section, "dev_file", "/dev/video0"); // Input source this->sourceCh = NULL; schary = cf->ReadString(section, "input", NULL); if (schary!=NULL && schary[0]>='0' && schary[0]<='9') this->source = cf->ReadInt(section, "input", 0); else { this->source = -1; this->sourceCh = schary; } // NTSC or PAL schary = cf->ReadString(section, "norm", "pal"); if (selectNorm(schary)!=0) selectNorm(normy[0].name); // Size this->width = cf->ReadTupleInt(section, "size", 0, this->width); this->height = cf->ReadTupleInt(section, "size", 1, this->height); // Palette type schary = cf->ReadString(section, "mode", "RGB24"); if (selectFormat(schary)!=0) selectFormat(formaty[0].name); schary = cf->ReadString(section, "field", "ANY"); if (selectField(schary)!=0) selectField(fieldy[0].name); flip_rb = cf->ReadInt(section, "swap_rb", 0); showFPS = cf->ReadInt(section, "show_fps", -1); if (showFPS>9) showFPS = 9; //printf("#controls: %d", cf->GetTupleCount(section, "controls") ); numOfCtls = cf->GetTupleCount(section, "controls")/2; if (numOfCtls>0){ int i; double k=0; ctlNames = new const char*[numOfCtls]; ctlVals = new double[numOfCtls]; for(i=0; i<numOfCtls; i++){ ctlNames[i] = cf->ReadTupleString(section, "controls", i*2+1, "null"); k = cf->ReadTupleFloat(section, "controls", i*2, 200.0); if (k>1.0){ numOfCtls = i; PLAYER_WARN1("Wrong value format for control %s, need val <= 1.0", ctlNames[i]); } else { ctlVals[i]=(double)k;//?? } } } back_source = source; back_norm = norm; back_depth = depth; back_width = width; back_height = height; back_fieldType = fieldType; back_v4l2_type_id = v4l2_type_id; puts("Camerav4l2: Driver object created"); return; }
bool ofGstVideoGrabber::initGrabber(int w, int h){ if(!camData.bInited) get_video_devices(camData); if(camData.webcam_devices.size()==0){ ofLog(OF_LOG_ERROR,"ofGstUtils: no devices found exiting without initializing"); return false; } ofGstVideoFormat & format = selectFormat(w, h, attemptFramerate); ofLog(OF_LOG_NOTICE,"ofGstUtils: selected device: " + camData.webcam_devices[deviceID].product_name); ofLog(OF_LOG_NOTICE,"ofGstUtils: selected format: " + ofToString(format.width) + "x" + ofToString(format.height) + " " + format.mimetype + " framerate: " + ofToString(format.choosen_framerate.numerator) + "/" + ofToString(format.choosen_framerate.denominator)); bIsCamera = true; const char * decodebin = ""; if(format.mimetype == "video/x-raw-bayer") decodebin = "bayer2rgb !"; else if(format.mimetype != "video/x-raw-yuv" && format.mimetype != "video/x-raw-rgb") decodebin = "decodebin2 !"; const char * scale = "ffmpegcolorspace "; if( w!=format.width || h!=format.height ) scale = "ffvideoscale method=2 !"; string format_str_pipeline = string("%s name=video_source device=%s ! ") + "%s,width=%d,height=%d,framerate=%d/%d ! " + "%s %s "; gchar* pipeline_string =g_strdup_printf ( format_str_pipeline.c_str(), camData.webcam_devices[deviceID].gstreamer_src.c_str(), camData.webcam_devices[deviceID].video_device.c_str(), format.mimetype.c_str(), format.width, format.height, format.choosen_framerate.numerator, format.choosen_framerate.denominator, decodebin, scale, w,h); int bpp; switch(internalPixelFormat){ case OF_PIXELS_MONO: bpp = 8; break; case OF_PIXELS_RGB: bpp = 24; break; case OF_PIXELS_RGBA: case OF_PIXELS_BGRA: bpp = 32; break; default: bpp=24; break; } if( videoUtils.setPipeline(pipeline_string,bpp,false,w,h) ){ videoUtils.play(); return true; }else{ return false; } }