//-------------------------------------------------------------------- void ofQuickTimeGrabber::videoSettings(void){ //--------------------------------- #ifdef OF_VIDEO_CAPTURE_QUICKTIME //--------------------------------- Rect curBounds, curVideoRect; ComponentResult err; // Get our current state err = SGGetChannelBounds (gVideoChannel, &curBounds); if (err != noErr){ ofLogError("ofQuickTimeGrabber") << "videoSettings(): couldn't get get channel bounds: ComponentResult " << err; return; } err = SGGetVideoRect (gVideoChannel, &curVideoRect); if (err != noErr){ ofLogError("ofQuickTimeGrabber") << "videoSettings(): couldn't get video rect: ComponentResult " << err; return; } // Pause err = SGPause (gSeqGrabber, true); if (err != noErr){ ofLogError("ofQuickTimeGrabber") << "videoSettings(): couldn't set pause: ComponentResult " << err; return; } #ifdef TARGET_OSX //load any saved camera settings from file loadSettings(); static SGModalFilterUPP gSeqGrabberModalFilterUPP = NewSGModalFilterUPP(SeqGrabberModalFilterUPP); ComponentResult result = SGSettingsDialog(gSeqGrabber, gVideoChannel, 0, nil, 0, gSeqGrabberModalFilterUPP, nil); if (result != noErr){ ofLogError("ofQuickTimeGrabber") << "videoSettings(): settings dialog error: ComponentResult " << err; return; } //save any changed settings to file saveSettings(); #else SGSettingsDialog(gSeqGrabber, gVideoChannel, 0, nil, seqGrabSettingsPreviewOnly, NULL, 0); #endif SGSetChannelBounds(gVideoChannel, &videoRect); SGPause (gSeqGrabber, false); //--------------------------------- #endif //--------------------------------- }
//--------------------------------------------------------------------- bool ofVideoGrabber::loadSettings(){ if (bGrabberInited != true || deviceName.length() == 0) return false; ComponentResult err; UserData mySGVideoSettings = NULL; // get the settings using the key "ofVideoSettings-the name of the device" string pref = "ofVideoSettings-"+deviceName; CFStringRef cameraString = CFStringCreateWithCString(kCFAllocatorDefault,pref.c_str(),kCFStringEncodingMacRoman); GetSettingsPreference(cameraString, &mySGVideoSettings); if (mySGVideoSettings){ Rect curBounds, curVideoRect; //we need to make sure the dimensions don't get effected //by our preferences // Get our current state err = SGGetChannelBounds (gVideoChannel, &curBounds); if (err != noErr){ ofLog(OF_LOG_ERROR, "Error in SGGetChannelBounds"); } err = SGGetVideoRect (gVideoChannel, &curVideoRect); if (err != noErr){ ofLog(OF_LOG_ERROR, "Error in SGGetVideoRect"); } // use the saved settings preference to configure the SGChannel err = SGSetChannelSettings(gSeqGrabber, gVideoChannel, mySGVideoSettings, 0); if ( err != noErr ) { ofLog(OF_LOG_ERROR, "Error applying stored settings %i", err); return false; } DisposeUserData(mySGVideoSettings); // Pause err = SGPause (gSeqGrabber, true); if (err != noErr){ ofLog(OF_LOG_ERROR, "Error in SGPause"); } SGSetChannelBounds(gVideoChannel, &videoRect); SGPause (gSeqGrabber, false); }else{ ofLog(OF_LOG_WARNING, "No camera settings to load"); return false; } return true; }
// ###################################################################### static void setVideoChannelBounds(SGChannel videoChannel, const Rect* scaledSourceBounds, const Rect* scaledVideoBounds) { // Notes: see Q&A 1250 // calculate the matrix to transform the // scaledSourceBounds to the source bounds Rect sourceBounds; SGGetSrcVideoBounds(videoChannel, &sourceBounds); MatrixRecord scaledSourceBoundsToSourceBounds; RectMatrix(&scaledSourceBoundsToSourceBounds, scaledSourceBounds, &sourceBounds); // apply the same transform to the // scaledVideoBounds to get the video bounds Rect videoBounds = *scaledVideoBounds; TransformRect(&scaledSourceBoundsToSourceBounds, &videoBounds, 0); if (noErr != SGSetVideoRect(videoChannel, &videoBounds)) { // some video digitizers may only be able to capture full frame // and will return qtParamErr or possibly digiUnimpErr if they // can't handle working with less than full frame SGSetVideoRect(videoChannel, &sourceBounds); } // the channel bounds is scaledVideoBounds offset to (0, 0) Rect channelBounds = *scaledVideoBounds; OffsetRect(&channelBounds, -channelBounds.left, -channelBounds.top); // Note: SGSetChannelBounds merely allows the client to specify it's // preferred bounds. The actual bounds returned by the vDig in the // image description may be different if (noErr != SGSetChannelBounds(videoChannel, &channelBounds)) LFATAL("SGSetChannelBounds() failed"); }
OSErr CreateNewSGChannelForRecording(ComponentInstance seqGrab, SGDataUPP dataProc, CGrafPtr drawPort, Rect *theRect, SGChannel *sgChannel, long refCon) { OSErr err = noErr; BailErr((err = SGInitialize(seqGrab))); // tell it we're not making a movie BailErr((err = SGSetDataRef(seqGrab,0,0,seqGrabDontMakeMovie))); // It wants a port, even if its not drawing to it BailErr((err = SGSetGWorld(seqGrab, drawPort, GetMainDevice()))); BailErr((err = SGNewChannel(seqGrab, VideoMediaType, sgChannel))); // let the user configure the video channel BailErr((err = SGSettingsDialog(seqGrab, *sgChannel, 0, nil, 0, nil, 0))); BailErr((err = SGSetChannelBounds(*sgChannel, theRect))); // set usage for new video channel to avoid playthrough BailErr((err = SGSetChannelUsage(*sgChannel, seqGrabRecord ))); //note we don't set seqGrabPlayDuringRecord BailErr((err = SGSetDataProc(seqGrab, dataProc, refCon))); BailErr((err = SGStartRecord(seqGrab))); bail: return err; }
// Create the Sequence Grabber Video Channel void QuicktimeLiveImageStream::createSequenceGrabberVideoChannel() { // Check capability and setting of Sequence Grabber GDHandle origDevice; CGrafPtr origPort; // Create GWorld GetGWorld (&origPort, &origDevice); SetGWorld (m_gw, NULL); // set current graphics port to offscreen // Setup // Get a video channel ComponentResult result = SGNewChannel (m_gSeqGrabber, VideoMediaType, &m_gVideoChannel); if ((m_gVideoChannel != nil) && (result == noErr)) { result = SGInitChannel(m_gVideoChannel, m_gSeqGrabber); Rect gActiveVideoRect; // Usage if (g_s_use_sg_record) result = SGSetChannelUsage (m_gVideoChannel, seqGrabRecord | seqGrabLowLatencyCapture); else { result = SGSetChannelUsage (m_gVideoChannel, seqGrabPreview); } // result = SGSetUseScreenBuffer(m_gVideoChannel, FALSE); // Set OSG_DEBUG << "Setting up vdig from input prefs" << std::endl; result = SGSetChannelDevice ( m_gVideoChannel, m_videoDeviceIDStr); result = SGSetChannelDeviceInput( m_gVideoChannel, m_videoDeviceInputID); // result = SGSetChannelPlayFlags ( m_gVideoChannel, channelPlayFast | channelPlayHighQuality | channelPlayAllData); result = SGSetChannelPlayFlags ( m_gVideoChannel, channelPlayFast ); VideoDigitizerComponent vdig = SGGetVideoDigitizerComponent(m_gVideoChannel); VideoDigitizerError vid_err; vid_err = VDSetInputStandard (vdig, palIn); OSG_DEBUG << "Setup vdig from input prefs:" << std::endl; print_video_component_capability(vdig); result = SGVideoDigitizerChanged( m_gVideoChannel); result = SGGetSrcVideoBounds ( m_gVideoChannel, &gActiveVideoRect); result = SGSetChannelBounds ( m_gVideoChannel, &gActiveVideoRect); result = SGChangedSource (m_gSeqGrabber, m_gVideoChannel); Fixed frame_rate; result = SGGetFrameRate (m_gVideoChannel, &frame_rate); result = SGSetFrameRate (m_gVideoChannel, 100); // // Sound /* long sound_id; Str255 sound_driver_name; char* sound_driver_name_cstr; vid_err = VDGetSoundInputSource(vdig, (long)m_videoDeviceInputID, &sound_id); vid_err = VDGetSoundInputDriver(vdig, sound_driver_name); sound_driver_name_cstr = pstr_printable(sound_driver_name); OSG_DEBUG << "vdig sound driver name :" << sound_driver_name_cstr << std::endl; OSG_DEBUG << "vdig sound driver id :" << sound_id << std::endl; */ } else { OSG_FATAL << "Could not create SGNewChannel for Video Channel" << std::endl; } // Set GWorld back SetGWorld(origPort, origDevice); }
bool macsgCamera::initCamera(int width, int height, bool colour) { if (cameraID < 0) return false; this->width = width; this->height = height; this->colour = colour; this->fps = 30; bytes = (colour?3:1); int rowlength= width*bytes; switch (colour) { case false: { pixelFormat = k8IndexedGrayPixelFormat; //pixelFormat = kYVYU422PixelFormat; break; } case true: { pixelFormat = k24RGBPixelFormat; break; } } OSErr result; Rect srcRect = {0,0, height, width}; sg = OpenDefaultComponent(SeqGrabComponentType, 0); if(sg==NULL){ fprintf(stderr, "could not open default component\n"); } result = SGInitialize(sg); if(result!=noErr){ fprintf(stdout, "could not initialize SG\n"); } result = SGSetDataRef(sg, 0, 0, seqGrabDontMakeMovie); if (result != noErr){ fprintf(stdout, "dataref failed\n"); } result = SGNewChannel(sg, VideoMediaType, &vc); if(result!=noErr){ //fprintf(stdout, "could not make new SG channnel\n"); return false; } // result = SGSettingsDialog ( sg, vc ,0 ,NULL ,seqGrabSettingsPreviewOnly,NULL,0); // if(result!=noErr){ // fprintf(stdout, "could not get settings from dialog\n"); // } result = SGSetChannelBounds(vc, &srcRect); if(result!=noErr){ fprintf(stdout, "could not set SG ChannelBounds\n"); } /*result = SGSetFrameRate (vc, fps); if(result!=noErr){ fprintf(stdout, "could not set SG FrameRate\n"); }*/ result = SGSetChannelUsage(vc, seqGrabPreview); if(result!=noErr){ fprintf(stdout, "could not set SG ChannelUsage\n"); } result = SGSetChannelPlayFlags(vc, channelPlayAllData); if(result!=noErr){ fprintf(stdout, "could not set SG AllData\n"); }; buffer = new unsigned char[width*height*bytes]; result = QTNewGWorldFromPtr (&srcGWorld, pixelFormat, &srcRect, NULL, NULL, 0, buffer, rowlength); if (result!= noErr) { fprintf(stdout, "%d error at QTNewGWorldFromPtr\n", result); delete []buffer; buffer = NULL; return false; } if (srcGWorld == NULL) { fprintf(stdout, "Could not allocate off screen\n"); delete []buffer; buffer = NULL; return false; } result = SGSetGWorld(sg,(CGrafPtr)srcGWorld, NULL); if (result != noErr) { fprintf(stdout, "Could not set SGSetGWorld\n"); delete []buffer; buffer = NULL; return false; } result = SGPrepare(sg, TRUE, FALSE); if (result != noErr) { fprintf(stderr, "SGPrepare Preview failed\n"); } pbuffer = new unsigned char[width*height*bytes]; return true; }
//-------------------------------------------------------------------- bool ofVideoGrabber::initGrabber(int w, int h, bool setUseTexture){ bUseTexture = setUseTexture; //--------------------------------- #ifdef OF_VIDEO_CAPTURE_QUICKTIME //--------------------------------- //---------------------------------- 1 - open the sequence grabber if( !qtInitSeqGrabber() ){ ofLog(OF_LOG_ERROR, "error: unable to initialize the seq grabber"); return false; } //---------------------------------- 2 - set the dimensions width = w; height = h; MacSetRect(&videoRect, 0, 0, width, height); //---------------------------------- 3 - buffer allocation // Create a buffer big enough to hold the video data, // make sure the pointer is 32-byte aligned. // also the rgb image that people will grab offscreenGWorldPixels = (unsigned char*)malloc(4 * width * height + 32); pixels = new unsigned char[width*height*3]; QTNewGWorldFromPtr (&videogworld, k32ARGBPixelFormat, &videoRect, NULL, NULL, 0, offscreenGWorldPixels, 4 * width); LockPixels(GetGWorldPixMap(videogworld)); SetGWorld (videogworld, NULL); SGSetGWorld(gSeqGrabber, videogworld, nil); //---------------------------------- 4 - device selection bool didWeChooseADevice = bChooseDevice; bool deviceIsSelected = false; //if we have a device selected then try first to setup //that device if(didWeChooseADevice){ deviceIsSelected = qtSelectDevice(deviceID, true); if(!deviceIsSelected && bVerbose) ofLog(OF_LOG_WARNING, "unable to open device[%i] - will attempt other devices", deviceID); } //if we couldn't select our required device //or we aren't specifiying a device to setup //then lets try to setup ANY device! if(deviceIsSelected == false){ //lets list available devices listDevices(); setDeviceID(0); deviceIsSelected = qtSelectDevice(deviceID, false); } //if we still haven't been able to setup a device //we should error and stop! if( deviceIsSelected == false){ goto bail; } //---------------------------------- 5 - final initialization steps OSStatus err; err = SGSetChannelUsage(gVideoChannel,seqGrabPreview); if ( err != noErr ) goto bail; err = SGSetChannelBounds(gVideoChannel, &videoRect); if ( err != noErr ) goto bail; err = SGPrepare(gSeqGrabber, true, false); //theo swapped so preview is true and capture is false if ( err != noErr ) goto bail; err = SGStartPreview(gSeqGrabber); if ( err != noErr ) goto bail; bGrabberInited = true; loadSettings(); ofLog(OF_LOG_NOTICE,"end setup ofVideoGrabber"); ofLog(OF_LOG_NOTICE,"-------------------------------------\n"); //---------------------------------- 6 - setup texture if needed if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); memset(pixels, 0, width*height*3); tex.loadData(pixels, width, height, GL_RGB); } // we are done return true; //--------------------- (bail) something's wrong ----- bail: ofLog(OF_LOG_ERROR, "***** ofVideoGrabber error *****"); ofLog(OF_LOG_ERROR, "-------------------------------------\n"); //if we don't close this - it messes up the next device! if(bSgInited) qtCloseSeqGrabber(); bGrabberInited = false; return false; //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_DIRECTSHOW //--------------------------------- if (bChooseDevice){ device = deviceID; ofLog(OF_LOG_NOTICE, "choosing %i", deviceID); } else { device = 0; } width = w; height = h; bGrabberInited = false; bool bOk = VI.setupDevice(device, width, height); int ourRequestedWidth = width; int ourRequestedHeight = height; if (bOk == true){ bGrabberInited = true; width = VI.getWidth(device); height = VI.getHeight(device); if (width == ourRequestedWidth && height == ourRequestedHeight){ bDoWeNeedToResize = false; } else { bDoWeNeedToResize = true; width = ourRequestedWidth; height = ourRequestedHeight; } pixels = new unsigned char[width * height * 3]; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); memset(pixels, 0, width*height*3); tex.loadData(pixels, width, height, GL_RGB); } return true; } else { ofLog(OF_LOG_ERROR, "error allocating a video device"); ofLog(OF_LOG_ERROR, "please check your camera with AMCAP or other software"); bGrabberInited = false; return false; } //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_UNICAP //-------------------------------- if( !bGrabberInited ){ if ( !bChooseDevice ){ deviceID = 0; } width = w; height = h; pixels = new unsigned char[width * height * 3]; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); memset(pixels, 0, width*height*3); tex.loadData(pixels, width, height, GL_RGB); } bGrabberInited = ucGrabber.open_device (deviceID); if( bGrabberInited ){ ofLog(OF_LOG_NOTICE, "choosing device %i: %s", deviceID,ucGrabber.device_identifier()); ucGrabber.set_format(w,h); ucGrabber.start_capture(); } } return bGrabberInited; //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_GSTREAMER //-------------------------------- if(gstUtils.initGrabber(w,h)){ if ( !bChooseDevice ){ deviceID = 0; } width = w; height = h; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); tex.loadData(gstUtils.getPixels(), width, height, GL_RGB); } bGrabberInited = true; ofLog(OF_LOG_VERBOSE, "ofVideoGrabber: initied"); }else{ bGrabberInited = false; ofLog(OF_LOG_ERROR, "ofVideoGrabber: couldn't init"); } return bGrabberInited; //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_V4L //-------------------------------- if (bChooseDevice){ device = deviceID; } else { device = 0; } sprintf(dev_name, "/dev/video%i", device); ofLog(OF_LOG_NOTICE, "choosing device %s",dev_name); bool bOk = initV4L(w, h, dev_name); if (bOk == true){ bV4LGrabberInited = true; width = getV4L_Width(); height = getV4L_Height(); pixels = new unsigned char[width * height * 3]; if (bUseTexture){ // create the texture, set the pixels to black and // upload them to the texture (so at least we see nothing black the callback) tex.allocate(width,height,GL_RGB); //memset(pixels, 0, width*height*3); //tex.loadData(pixels, width, height, GL_RGB); } ofLog(OF_LOG_NOTICE, "success allocating a video device "); return true; } else { ofLog(OF_LOG_ERROR, "error allocating a video device"); ofLog(OF_LOG_ERROR, "please check your camera and verify that your driver is correctly installed."); return false; } //--------------------------------- //--------------------------------- #endif //--------------------------------- }
//-------------------------------------------------------------------- void ofVideoGrabber::videoSettings(void){ //--------------------------------- #ifdef OF_VIDEO_CAPTURE_QUICKTIME //--------------------------------- Rect curBounds, curVideoRect; ComponentResult err; // Get our current state err = SGGetChannelBounds (gVideoChannel, &curBounds); if (err != noErr){ ofLog(OF_LOG_ERROR, "Error in SGGetChannelBounds"); return; } err = SGGetVideoRect (gVideoChannel, &curVideoRect); if (err != noErr){ ofLog(OF_LOG_ERROR, "Error in SGGetVideoRect"); return; } // Pause err = SGPause (gSeqGrabber, true); if (err != noErr){ ofLog(OF_LOG_ERROR, "Error in SGPause"); return; } #ifdef TARGET_OSX //load any saved camera settings from file loadSettings(); static SGModalFilterUPP gSeqGrabberModalFilterUPP = NewSGModalFilterUPP(SeqGrabberModalFilterUPP); ComponentResult result = SGSettingsDialog(gSeqGrabber, gVideoChannel, 0, nil, seqGrabSettingsPreviewOnly, gSeqGrabberModalFilterUPP, nil); if (result != noErr){ ofLog(OF_LOG_ERROR, "error in dialogue"); return; } //save any changed settings to file saveSettings(); #else SGSettingsDialog(gSeqGrabber, gVideoChannel, 0, nil, seqGrabSettingsPreviewOnly, NULL, 0); #endif SGSetChannelBounds(gVideoChannel, &videoRect); SGPause (gSeqGrabber, false); //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_DIRECTSHOW //--------------------------------- if (bGrabberInited == true) VI.showSettingsWindow(device); //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_UNICAP //-------------------------------- ucGrabber.queryUC_imageProperties(); //--------------------------------- #endif //--------------------------------- //--------------------------------- #ifdef OF_VIDEO_CAPTURE_V4L //-------------------------------- queryV4L_imageProperties(); //--------------------------------- #endif //--------------------------------- }
//-------------------------------------------------------------------- bool ofQuickTimeGrabber::initGrabber(int w, int h){ //--------------------------------- #ifdef OF_VIDEO_CAPTURE_QUICKTIME //--------------------------------- //---------------------------------- 1 - open the sequence grabber if( !qtInitSeqGrabber() ){ ofLogError("ofQuickTimeGrabber") << "initGrabber(): unable to initialize the seq grabber"; return false; } //---------------------------------- 2 - set the dimensions //width = w; //height = h; MacSetRect(&videoRect, 0, 0, w, h); //---------------------------------- 3 - buffer allocation // Create a buffer big enough to hold the video data, // make sure the pointer is 32-byte aligned. // also the rgb image that people will grab offscreenGWorldPixels = (unsigned char*)malloc(4 * w * h + 32); pixels.allocate(w, h, OF_IMAGE_COLOR); #if defined(TARGET_OSX) && defined(__BIG_ENDIAN__) QTNewGWorldFromPtr (&(videogworld), k32ARGBPixelFormat, &(videoRect), NULL, NULL, 0, (offscreenGWorldPixels), 4 * w); #else QTNewGWorldFromPtr (&(videogworld), k24RGBPixelFormat, &(videoRect), NULL, NULL, 0, (pixels.getPixels()), 3 * w); #endif LockPixels(GetGWorldPixMap(videogworld)); SetGWorld (videogworld, NULL); SGSetGWorld(gSeqGrabber, videogworld, nil); //---------------------------------- 4 - device selection bool didWeChooseADevice = bChooseDevice; bool deviceIsSelected = false; //if we have a device selected then try first to setup //that device if(didWeChooseADevice){ deviceIsSelected = qtSelectDevice(deviceID, true); if(!deviceIsSelected && bVerbose) ofLogError("ofQuickTimeGrabber") << "initGrabber(): unable to open device[" << deviceID << "], will attempt other devices"; } //if we couldn't select our required device //or we aren't specifiying a device to setup //then lets try to setup ANY device! if(deviceIsSelected == false){ //lets list available devices listDevices(); setDeviceID(0); deviceIsSelected = qtSelectDevice(deviceID, false); } //if we still haven't been able to setup a device //we should error and stop! if( deviceIsSelected == false){ goto bail; } //---------------------------------- 5 - final initialization steps OSStatus err; err = SGSetChannelUsage(gVideoChannel,seqGrabPreview); if ( err != noErr ) goto bail; //----------------- callback method for notifying new frame err = SGSetChannelRefCon(gVideoChannel, (long)&bHavePixelsChanged ); if(!err) { VideoBottles vb; /* get the current bottlenecks */ vb.procCount = 9; err = SGGetVideoBottlenecks(gVideoChannel, &vb); if (!err) { myGrabCompleteProc = NewSGGrabCompleteBottleUPP(frameIsGrabbedProc); vb.grabCompleteProc = myGrabCompleteProc; /* add our GrabFrameComplete function */ err = SGSetVideoBottlenecks(gVideoChannel, &vb); } } err = SGSetChannelBounds(gVideoChannel, &videoRect); if ( err != noErr ) goto bail; err = SGPrepare(gSeqGrabber, true, false); //theo swapped so preview is true and capture is false if ( err != noErr ) goto bail; err = SGStartPreview(gSeqGrabber); if ( err != noErr ) goto bail; bGrabberInited = true; loadSettings(); if( attemptFramerate >= 0 ){ err = SGSetFrameRate(gVideoChannel, IntToFixed(attemptFramerate) ); if ( err != noErr ){ ofLogError("ofQuickTimeGrabber") << "initGrabber: couldn't setting framerate to " << attemptFramerate << ": OSStatus " << err; } } ofLogNotice("ofQuickTimeGrabber") << " inited grabbed "; ofLogNotice("ofQuickTimeGrabber") << "-------------------------------------"; // we are done return true; //--------------------- (bail) something's wrong ----- bail: ofLogError("ofQuickTimeGrabber") << "***** ofQuickTimeGrabber error *****"; ofLogError("ofQuickTimeGrabber") << "------------------------------------"; //if we don't close this - it messes up the next device! if(bSgInited) qtCloseSeqGrabber(); bGrabberInited = false; return false; //--------------------------------- #else //--------------------------------- return false; //--------------------------------- #endif //--------------------------------- }
static gboolean gst_osx_video_src_set_caps (GstBaseSrc * src, GstCaps * caps) { GstOSXVideoSrc *self = GST_OSX_VIDEO_SRC (src); GstStructure *structure = gst_caps_get_structure (caps, 0); gint width, height, framerate_num, framerate_denom; float fps; ComponentResult err; GST_DEBUG_OBJECT (src, "%s", G_STRFUNC); if (!self->seq_grab) return FALSE; gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); gst_structure_get_fraction (structure, "framerate", &framerate_num, &framerate_denom); fps = (float) framerate_num / framerate_denom; GST_DEBUG_OBJECT (src, "changing caps to %dx%d@%f", width, height, fps); SetRect (&self->rect, 0, 0, width, height); err = QTNewGWorld (&self->world, k422YpCbCr8PixelFormat, &self->rect, 0, NULL, 0); if (err != noErr) { GST_ERROR_OBJECT (self, "QTNewGWorld returned %d", (int) err); goto fail; } if (!LockPixels (GetPortPixMap (self->world))) { GST_ERROR_OBJECT (self, "LockPixels failed"); goto fail; } err = SGSetGWorld (self->seq_grab, self->world, NULL); if (err != noErr) { GST_ERROR_OBJECT (self, "SGSetGWorld returned %d", (int) err); goto fail; } err = SGSetChannelBounds (self->video_chan, &self->rect); if (err != noErr) { GST_ERROR_OBJECT (self, "SGSetChannelBounds returned %d", (int) err); goto fail; } // ###: if we ever support choosing framerates, do something with this /*err = SGSetFrameRate (self->video_chan, FloatToFixed(fps)); if (err != noErr) { GST_ERROR_OBJECT (self, "SGSetFrameRate returned %d", (int) err); goto fail; } */ return TRUE; fail: if (self->world) { SGSetGWorld (self->seq_grab, NULL, NULL); DisposeGWorld (self->world); self->world = NULL; } return FALSE; }
void pix_videoDarwin :: InitSeqGrabber() { OSErr anErr; Rect m_srcRect = {0,0, m_vidYSize, m_vidXSize}; SGDeviceList devices; short deviceIndex,inputIndex; short deviceCount = 0; SGDeviceInputList theSGInputList = NULL; bool showInputsAsDevices; // UserData *uD; /* int num_components = 0; Component c = 0; ComponentDescription cd; cd.componentType = SeqGrabComponentType; cd.componentSubType = 0; cd.componentManufacturer = 0; cd.componentFlags = 0; cd.componentFlagsMask = 0; while((c = FindNextComponent(c, &cd)) != 0) { num_components++; } // add component c to the list. // post("number of SGcomponents: %d",num_components); */ m_sg = OpenDefaultComponent(SeqGrabComponentType, 0); if(m_sg==NULL){ error("could not open default component"); return; } anErr = SGInitialize(m_sg); if(anErr!=noErr){ error("could not initialize SG error %d",anErr); return; } anErr = SGSetDataRef(m_sg, 0, 0, seqGrabDontMakeMovie); if (anErr != noErr){ error("dataref failed with error %d",anErr); } anErr = SGNewChannel(m_sg, VideoMediaType, &m_vc); if(anErr!=noErr){ error("could not make new SG channnel error %d",anErr); return; } anErr = SGGetChannelDeviceList(m_vc, sgDeviceListIncludeInputs, &devices); if(anErr!=noErr){ error("could not get SG channnel Device List"); }else{ deviceCount = (*devices)->count; deviceIndex = (*devices)->selectedIndex; logpost(NULL, 3, "SG channnel Device List count %d index %d",deviceCount,deviceIndex); int i; for (i = 0; i < deviceCount; i++){ logpost(NULL, 3, "SG channnel Device List %.*s", (*devices)->entry[i].name[0], (*devices)->entry[i].name+1); } SGGetChannelDeviceAndInputNames(m_vc, NULL, NULL, &inputIndex); showInputsAsDevices = ((*devices)->entry[deviceIndex].flags) & sgDeviceNameFlagShowInputsAsDevices; theSGInputList = ((SGDeviceName *)(&((*devices)->entry[deviceIndex])))->inputs; //fugly //we should have device names in big ass undocumented structs //walk through the list //for (i = 0; i < deviceCount; i++){ for (i = 0; i < inputIndex; i++){ logpost(NULL, 3, "SG channnel Input Device List %d %.*s", i, (*theSGInputList)->entry[i].name[0], (*theSGInputList)->entry[i].name+1); } } //this call sets the input device if (m_inputDevice > 0 && m_inputDevice < deviceCount) //check that the device is not out of bounds //anErr = SGSetChannelDeviceInput(m_vc,m_inputDevice); logpost(NULL, 3, "SGSetChannelDevice trying %s", (*devices)->entry[m_inputDevice].name[0], (*devices)->entry[m_inputDevice].name+1); anErr = SGSetChannelDevice(m_vc, (*devices)->entry[m_inputDevice].name); if(anErr!=noErr) error("SGSetChannelDevice returned error %d",anErr); anErr = SGSetChannelDeviceInput(m_vc,m_inputDeviceChannel); if(anErr!=noErr) error("SGSetChannelDeviceInput returned error %d",anErr); /* //attempt to save SG settings to disk NewUserData(uD); SGGetSettings(m_sg,uD,0); short uDCount; uDCount = CountUserDataType(*uD,sgClipType); post("UserDataType count %d",uDCount); Handle myHandle; PutUserDataIntoHandle(*uD,myHandle); int myFile; myFile = open("/Users/lincoln/Documents/temp",O_CREAT | O_RDWR, 0600); write(myFile,myHandle,4096); close(myFile); */ //grab the VDIG info from the SGChannel m_vdig = SGGetVideoDigitizerComponent(m_vc); vdigErr = VDGetDigitizerInfo(m_vdig,&m_vdigInfo); //not sure if this is useful Str255 vdigName; memset(vdigName,0,255); vdigErr = VDGetInputName(m_vdig,m_inputDevice,vdigName); logpost(NULL, 3, "vdigName is %s",vdigName); // pascal string? Rect vdRect; vdigErr = VDGetDigitizerRect(m_vdig,&vdRect); logpost(NULL, 3, "digitizer rect is top %d bottom %d left %d right %d",vdRect.top,vdRect.bottom,vdRect.left,vdRect.right); vdigErr = VDGetActiveSrcRect(m_vdig,0,&vdRect); logpost(NULL, 3, "active src rect is top %d bottom %d left %d right %d",vdRect.top,vdRect.bottom,vdRect.left,vdRect.right); anErr = SGSetChannelBounds(m_vc, &m_srcRect); if(anErr!=noErr){ error("could not set SG ChannelBounds "); } anErr = SGSetVideoRect(m_vc, &m_srcRect); if(anErr!=noErr){ error("could not set SG Rect "); } anErr = SGSetChannelUsage(m_vc, seqGrabPreview); if(anErr!=noErr){ error("could not set SG ChannelUsage "); } switch (m_quality){ case 0: anErr = SGSetChannelPlayFlags(m_vc, channelPlayNormal); post("set SG NormalQuality"); break; case 1: anErr = SGSetChannelPlayFlags(m_vc, channelPlayHighQuality); post("set SG HighQuality"); break; case 2: anErr = SGSetChannelPlayFlags(m_vc, channelPlayFast); post("set SG FastQuality"); break; case 3: anErr = SGSetChannelPlayFlags(m_vc, channelPlayAllData); post("set SG PlayAlldata"); break; } if (m_colorspace==GL_BGRA_EXT){ m_pixBlock.image.xsize = m_vidXSize; m_pixBlock.image.ysize = m_vidYSize; m_pixBlock.image.setCsizeByFormat(GL_RGBA_GEM); m_pixBlock.image.reallocate(); m_rowBytes = m_vidXSize*4; anErr = QTNewGWorldFromPtr (&m_srcGWorld, k32ARGBPixelFormat, &m_srcRect, NULL, NULL, 0, m_pixBlock.image.data, m_rowBytes); post ("using RGB"); }else{ m_pixBlock.image.xsize = m_vidXSize; m_pixBlock.image.ysize = m_vidYSize; m_pixBlock.image.csize = 2; m_pixBlock.image.format = GL_YCBCR_422_APPLE; #ifdef __VEC__ m_pixBlock.image.type = GL_UNSIGNED_SHORT_8_8_REV_APPLE; #else m_pixBlock.image.type = GL_UNSIGNED_SHORT_8_8_APPLE; #endif m_pixBlock.image.reallocate(); m_rowBytes = m_vidXSize*2; anErr = QTNewGWorldFromPtr (&m_srcGWorld, // k422YpCbCr8CodecType, k422YpCbCr8PixelFormat, // '2vuy', // kComponentVideoUnsigned, &m_srcRect, NULL, NULL, 0, m_pixBlock.image.data, m_rowBytes); post ("using YUV"); } if (anErr!= noErr) { error("%d error at QTNewGWorldFromPtr", anErr); return; } if (NULL == m_srcGWorld) { error("could not allocate off screen"); return; } SGSetGWorld(m_sg,(CGrafPtr)m_srcGWorld, NULL); SGStartPreview(m_sg); //moved to starttransfer? m_haveVideo = 1; }
static int sequence_grabber_start(V4lState *s) { int err; Rect theRect = {0, 0, s->vsize.height, s->vsize.width}; err = QTNewGWorld(&(s->pgworld), // returned GWorld k24BGRPixelFormat, &theRect, // bounding rectangle 0, // color table NULL, // graphic device handle 0); // flags if (err!=noErr) { return -1; } if(!LockPixels(GetPortPixMap(s->pgworld))) { v4m_close(s); return -1; } s->seqgrab = OpenDefaultComponent(SeqGrabComponentType, 0); err = SGInitialize(s->seqgrab); if (err!=noErr) { v4m_close(s); return -1; } err = SGSetDataRef(s->seqgrab, 0, 0, seqGrabDontMakeMovie); if (err!=noErr) { v4m_close(s); return -1; } err = SGSetGWorld(s->seqgrab, s->pgworld, GetMainDevice()); if (err!=noErr) { v4m_close(s); return -1; } err = SGNewChannel(s->seqgrab, VideoMediaType, &s->sgchanvideo); if (err!=noErr) { v4m_close(s); return -1; } err = SGSetChannelBounds(s->sgchanvideo, &theRect); if (err!=noErr) { v4m_close(s); return -1; } err = SGSetChannelUsage(s->sgchanvideo, seqGrabRecord); if (err!=noErr) { v4m_close(s); return -1; } err = SGSetDataProc(s->seqgrab,NewSGDataUPP(sgdata_callback),(long)s); if (err!=noErr) { v4m_close(s); return -1; } err = SGStartRecord(s->seqgrab); if (err!=noErr) { v4m_close(s); return -1; } return 0; }