void imageBase :: setProperties(gem::Properties&props) { // nada m_properties=props; #if 0 std::vector<std::string> keys=props.keys(); int i=0; for(i=0; i<keys.size(); i++) { enum gem::Properties::PropertyType typ=props.type(keys[i]); std::cerr << "key["<<keys[i]<<"]: "<<typ<<" :: "; switch(typ) { case (gem::Properties::NONE): props.erase(keys[i]); break; case (gem::Properties::DOUBLE): std::cerr << gem::any_cast<double>(props.get(keys[i])); break; case (gem::Properties::STRING): std::cerr << "'" << gem::any_cast<std::string>(props.get(keys[i])) << "'"; break; default: std::cerr << "<unknown:" << props.get(keys[i]).get_type().name() << ">"; break; } } std::cerr << std::endl; #endif }
void videoVLC::setProperties(gem::Properties&props) { int width=-1; int height=-1; m_props=props; double d; if(props.get("width", d)) { if(d>0) { width = d; } } if(props.get("height", d)) { if(d>0) { height=d; } } if(!m_mediaplayer) { if(width>0) { m_pixBlock.image.xsize=width; } if(height>0) { m_pixBlock.image.ysize=height; } } else { // changes will take effect with next restart } }
void setParameters(gem::Properties&parms) { unsigned int i=0; for(i=0; i<m_parameterNames.size(); i++) { std::string key=m_parameterNames[i]; std::string s1, s2; double d1, d2; switch(m_parameter.type(key)) { case gem::Properties::NONE: if(gem::Properties::NONE==parms.type(key)) { parms.erase(key); setParameter(i); } break; case gem::Properties::DOUBLE: if(m_parameter.get(key, d1) && parms.get(key, d2)) { if(d1!=d2) { m_parameter.set(key, d2); setParameter(i, d2); } } break; case gem::Properties::STRING: if(m_parameter.get(key, s1) && parms.get(key, s2)) { if(s1!=s2) { m_parameter.set(key, s2); setParameter(i, s2); } } break; default: break; } } }
void modelASSIMP3 :: setProperties(gem::Properties&props) { double d; #if 0 std::vector<std::string>keys=props.keys(); unsigned int i; for(i=0; i<keys.size(); i++) { post("key[%d]=%s ... %d", i, keys[i].c_str(), props.type(keys[i])); } #endif std::string s; if(props.get("textype", s)) { // if there are NO texcoords, we only accept 'linear' and 'spheremap' // else, we also allow 'UV' // not-accepted textype, simply use the last one if(m_have_texcoords && "UV" == s) m_textype = ""; else if(("linear" == s) || ("spheremap" == s)) m_textype = s; m_rebuild = true; } if(props.get("rescale", d)) { bool b=(bool)d; if(b) { float tmp; tmp = m_max.x-m_min.x; tmp = aisgl_max(m_max.y - m_min.y,tmp); tmp = aisgl_max(m_max.z - m_min.z,tmp); m_scale = 2.f / tmp; m_offset.x=-m_center.x; m_offset.y=-m_center.y; m_offset.z=-m_center.z; } else { // FIXXME shouldn't this be the default??? m_scale=1.; m_offset.x=m_offset.y=m_offset.z=0.f; } } if(props.get("usematerials", d)) { bool useMaterial=d; if(useMaterial!=m_useMaterial) m_rebuild=true; m_useMaterial=useMaterial; } render(); }
void filmQT4L::setProperties(gem::Properties&props) { double d; if(props.get("colorspace", d)) { m_wantedFormat=d; } }
///////////////////////////////////////////////////////// // really open the file ! (OS dependent) // ///////////////////////////////////////////////////////// bool filmQT4L :: open(const std::string&filename, const gem::Properties&wantProps) { int wantedFormat=GEM_RGBA; double d; unsigned int format=0; if(wantProps.get("format", d)) { format=d; } switch(format) { default: break; case GEM_RGBA: case GEM_YUV: case GEM_GRAY: m_wantedFormat=format; break; } char*cfilename=const_cast<char*>(filename.c_str()); if (quicktime_check_sig(cfilename)) { /* ok, this is quicktime */ if (!(m_quickfile = quicktime_open(filename.c_str(), 1, 0))) { verbose(0, "[GEM:filmQT4L] Unable to open file: %s", filename.c_str()); return false; } m_curFrame = -1; // Get the number of tracks m_numTracks = quicktime_video_tracks(m_quickfile); // Get the length of the movie (on track current track) m_numFrames = quicktime_video_length(m_quickfile, m_curTrack); // Get the frame-rate m_fps = quicktime_frame_rate(m_quickfile, m_curTrack); // Get the video dimensions m_image.image.xsize = quicktime_video_width (m_quickfile, m_curTrack); m_image.image.ysize = quicktime_video_height(m_quickfile, m_curTrack); if (!quicktime_supported_video(m_quickfile, m_curTrack)) { char *codec = quicktime_video_compressor(m_quickfile, m_curTrack); verbose(0, "[GEM:filmQT4L] unsupported CODEC '%s'!", codec); quicktime_close(m_quickfile); m_quickfile=0; return false; } m_image.image.setCsizeByFormat(wantedFormat); m_image.image.reallocate(); m_qtimage.xsize=m_image.image.xsize; m_qtimage.ysize=m_image.image.ysize; m_qtimage.setCsizeByFormat(GEM_RGB); m_qtimage.reallocate(); m_newfilm = true; return true; } goto unsupported; unsupported: close(); return false; }
void videoVFW :: setProperties(gem::Properties&props) { double d; bool dorestart=false; if (props.get("width", d)) { m_width=d; dorestart=true; } if (props.get("height", d)) { m_height=d; dorestart=true; } if(dorestart && m_hWndC) reset(); }
bool imageMAGICK::save(const imageStruct&image, const std::string&filename, const std::string&mimetype, const gem::Properties&props) { imageStruct*img=const_cast<imageStruct*>(&image); imageStruct*pImage=img; std::string cs; switch(img->format) { case GL_LUMINANCE: cs="K"; break; case GL_RGBA: cs="RGBA"; break; default: pImage=new imageStruct(); pImage->convertFrom(img, GL_RGB); case GL_RGB: cs="RGB"; break; case GL_BGRA_EXT: cs="BGRA"; break; } try{ Magick::Image mimage(pImage->xsize, pImage->ysize, cs, Magick::CharPixel, pImage->data); // since openGL is upside down if(!pImage->upsidedown) { mimage.flip(); } // 8 bits per channel are enough! // LATER make this dependent on the image->type mimage.depth(8); double quality; if(props.get("quality", quality)) { mimage.quality(quality); } try { // finally convert and export mimage.write(filename); } catch (Magick::Warning e) { verbose(1, "magick saving problem: %s", e.what()); } } catch (Magick::Exception e){ error("%s", e.what()); if(pImage!=&image)delete pImage; pImage=NULL; return false; } catch (...) { error("imageMAGICK:: uncaught exception!"); return false; } if(pImage!=&image)delete pImage; pImage=NULL; return true; }
void videoTEST::setProperties(gem::Properties&props) { m_props=props; double d; if(props.get("width", d)) { if(d>0) m_pixBlock.image.xsize = d; } if(props.get("height", d)) { if(d>0) m_pixBlock.image.ysize = d; } std::string s; if(props.get("type", s)) { if("noise"==s) m_type=0; else if("red"==s) m_type=1; else if("green"==s) m_type=2; else if("blue"==s) m_type=3; } }
///////////////////////////////////////////////////////// // really open the file ! (OS dependent) // ///////////////////////////////////////////////////////// bool filmMPEG3 :: open(const std::string filename, const gem::Properties&wantProps) { char*cfilename=const_cast<char*>(filename.c_str()); if (mpeg3_check_sig(cfilename)){/* ok, this is mpeg(3) */ #ifdef FILMMPEG3_OPEN17 // new API with more sophisticated error-feedback mpeg_file= mpeg3_open(cfilename, 0); #else // old API mpeg_file= mpeg3_open(cfilename); #endif if(!mpeg_file) { //error("filmMPEG3: this file %s does not seem to hold any video data", filename.c_str()); goto unsupported; } if (!mpeg3_has_video(mpeg_file)){ error("filmMPEG3: this file %s does not seem to hold any video data", filename.c_str()); goto unsupported; } m_numTracks = mpeg3_total_vstreams(mpeg_file); if(m_curTrack>=m_numTracks || m_curTrack<0) m_curTrack=0; m_numFrames = mpeg3_video_frames(mpeg_file, m_curTrack); m_fps = mpeg3_frame_rate(mpeg_file, m_curTrack); m_image.image.xsize=mpeg3_video_width(mpeg_file, m_curTrack); m_image.image.ysize=mpeg3_video_height(mpeg_file, m_curTrack); if (!m_image.image.xsize*m_image.image.ysize)goto unsupported; double d; if(wantProps.get("colorspace", d)) { m_image.image.setCsizeByFormat((int)d); m_wantedFormat=m_image.image.format; } m_image.image.reallocate(); changeImage(0,-1); m_newfilm=true; return true; } goto unsupported; unsupported: close(); return false; }
///////////////////////////////////////////////////////// // open the file // ///////////////////////////////////////////////////////// bool filmAVIPLAY :: open(const std::string filename, const gem::Properties&wantProps) { double d; if(wantProps.get("colorspace", d) && d>0) { m_wantedFormat=d; } // how do we close the avifile ??? automagically ? if (!(m_avifile = CreateIAviReadFile(filename.c_str())))goto unsupported; while(!(*m_avifile).IsOpened()) { struct timeval sleep; sleep.tv_sec=0; sleep.tv_usec=500;/*500us*/ select(0,0,0,0,&sleep); } if (!(*m_avifile).IsValid())goto unsupported; m_numTracks = (*m_avifile).VideoStreamCount(); if (m_numTracks<1)return false; if (m_curTrack>=m_numTracks)m_curTrack = 0; try { m_avistream=(*m_avifile).GetStream(m_curTrack, avm::IStream::StreamType(1)); } catch (const char* string) { m_avistream = 0; } if (!m_avistream)goto unsupported; if ((*m_avistream).StartStreaming()==-1)goto unsupported; m_numFrames = (*m_avistream).GetLength(); m_curFrame = -1; if (1) { avm::StreamInfo *l_info = (*m_avistream).GetStreamInfo(); m_image.image.xsize = (*l_info).GetVideoWidth(); m_image.image.ysize = (*l_info).GetVideoHeight(); m_fps= (*l_info).GetFps(); } m_image.image.setCsizeByFormat(m_wantedFormat); if (!(m_image.image.xsize*m_image.image.ysize*m_image.image.csize))goto unsupported; m_readNext=true; m_newfilm = true; return true; goto unsupported; unsupported: close(); return false; }
virtual bool open(const std::string&name, const gem::Properties&requestprops) { if(m_handle)close(); std::vector<std::string> backends; if(requestprops.type("backends")!=gem::Properties::UNSET) { requestprops.get("backends", backends); } // requestprops.erase("backends"); bool tried=false; if(!backends.empty()) { unsigned int i, j; for(j=0; !m_handle && j<backends.size(); j++) { std::string id=backends[j]; for(i=0; i<m_handles.size(); i++) { /* coverity[assign_where_compare_meant] we set 'tried' to true if we have found at least one matching backend */ if(id==m_ids[i]&& (tried=true) && m_handles[i]->open(name, requestprops)) { m_handle=m_handles[i]; } } } } if(!m_handle && !tried) { if(!backends.empty() && !m_handles.empty()) { verbose(2, "no available loader selected, falling back to valid ones"); } unsigned int i=0; for(i=0; i<m_handles.size(); i++) { if(m_handles[i] && m_handles[i]->open(name, requestprops)) { m_handle=m_handles[i]; break; } else { } } } return (NULL!=m_handle); }
void videoDECKLINK::setProperties(gem::Properties&props) { std::vector<std::string>keys=props.keys(); int i=0; for(i=0; i<keys.size(); i++) { const std::string key =keys[i]; if("format" == key) { std::string s; double d; switch(props.type(key)) { case gem::Properties::STRING: if(props.get(key, s)) { m_formatnum =-1; m_formatname=s; } break; case gem::Properties::DOUBLE: if(props.get(key, d)) { m_formatnum =(int)d; m_formatname=""; } break; } } if("connection" == key) { BMDVideoConnection vconn = m_connectionType; std::string s; double d; switch(props.type(key)) { case gem::Properties::STRING: if(props.get(key, s)) { if ("SDI" == s) { vconn=bmdVideoConnectionSDI; } else if ("HDMI" == s) { vconn=bmdVideoConnectionHDMI; } else if ("OpticalSDI" == s) { vconn=bmdVideoConnectionOpticalSDI; } else if ("Component" == s) { vconn=bmdVideoConnectionComponent; } else if ("Composite" == s) { vconn=bmdVideoConnectionComposite; } else if ("SVideo" == s) { vconn=bmdVideoConnectionSVideo; } } break; case gem::Properties::DOUBLE: if(props.get(key, d)) { int idx =(int)d; switch(idx) { default: case 0: vconn=bmdVideoConnectionSDI; break; case 1: vconn=bmdVideoConnectionHDMI; break; case 2: vconn=bmdVideoConnectionOpticalSDI; break; case 3: vconn=bmdVideoConnectionComponent; break; case 4: vconn=bmdVideoConnectionComposite; break; case 5: vconn=bmdVideoConnectionSVideo; break; } } break; } if(m_dlConfig && (m_connectionType != vconn)) { m_dlConfig->SetInt(bmdDeckLinkConfigVideoInputConnection, vconn); } m_connectionType = vconn; } } m_props=props; }
bool imageTIFF::save(const imageStruct&constimage, const std::string&filename, const std::string&mimetype, const gem::Properties&props) { TIFF *tif = NULL; if(GL_YUV422_GEM==constimage.format) { error("don't know how to write YUV-images with libTIFF"); return false; } tif=TIFFOpen(filename.c_str(), "w"); if (tif == NULL) { return false; } imageStruct image; constimage.copy2Image(&image); image.fixUpDown(); uint32 width=image.xsize, height = image.ysize; short bits=8, samps=image.csize; int npixels = width * height; //int planar_conf = PLANARCONFIG_CONTIG; std::string software = "PD/GEM"; std::string artist; std::string hostcomputer; double xresolution = 72., yresolution=72.; short resunit = RESUNIT_INCH; props.get("xresolution", xresolution); props.get("yresolution", yresolution); std::string resunit_s; if(props.get("resolutionunit", resunit_s)) { if(("inch"==resunit_s) || ("english"==resunit_s) || ("imperial"==resunit_s)) resunit=RESUNIT_INCH; else if(("centimeter"==resunit_s) || ("metric"==resunit_s)) resunit=RESUNIT_CENTIMETER; else resunit=RESUNIT_NONE; } props.get("software", software); props.get("artist", artist); props.get("hostcomputer", hostcomputer); TIFFSetField(tif, TIFFTAG_IMAGEWIDTH, width); TIFFSetField(tif, TIFFTAG_IMAGELENGTH, height); TIFFSetField(tif, TIFFTAG_BITSPERSAMPLE, bits); TIFFSetField(tif, TIFFTAG_SAMPLESPERPIXEL, samps); TIFFSetField(tif, TIFFTAG_PLANARCONFIG, 1); TIFFSetField(tif, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_RGB); TIFFSetField(tif, TIFFTAG_XRESOLUTION, xresolution); // RATIONAL TIFFSetField(tif, TIFFTAG_YRESOLUTION, yresolution); // RATIONAL TIFFSetField(tif, TIFFTAG_RESOLUTIONUNIT, resunit); if(!software.empty()) TIFFSetField(tif, TIFFTAG_SOFTWARE, software.c_str()); if(!artist.empty()) TIFFSetField(tif, TIFFTAG_ARTIST, artist.c_str()); if(!hostcomputer.empty()) TIFFSetField(tif, TIFFTAG_HOSTCOMPUTER, hostcomputer.c_str()); int yStride = image.xsize * image.csize; unsigned char *srcLine = &(image.data[npixels * image.csize]); srcLine -= yStride; for (uint32 row = 0; row < height; row++) { unsigned char *buf = srcLine; if (TIFFWriteScanline(tif, buf, row, 0) < 0) { error("GEM: could not write line %d to image %s", row, filename.c_str()); TIFFClose(tif); delete [] buf; return(false); } srcLine -= yStride; } TIFFClose(tif); return true; }
bool videoVLC::open(gem::Properties&props) { if(m_mediaplayer) { close(); } m_pixBlock.image.xsize=0; m_pixBlock.image.ysize=0; setProperties(props); if(m_devname.empty()) { return false; } libvlc_media_t*media = libvlc_media_new_location (m_instance, m_devname.c_str()); if(!media) { media = libvlc_media_new_path (m_instance, m_devname.c_str()); } if(!media) { return false; } char buf[MAXVLCSTRING]; libvlc_media_add_option(media,":noaudio"); libvlc_media_add_option(media,":no-video-title-show"); int w=m_pixBlock.image.xsize; int h=m_pixBlock.image.ysize; std::vector<std::string>keys=props.keys(); unsigned int i; for(i=0; i<keys.size(); i++) { std::string key=keys[i]; double d; std::string s; buf[0]=0; if(0) {} else if("width"==key) { if(props.get(key, d)&&(d>0)) { w=d; } } else if("height"==key) { if(props.get(key, d)&&(d>0)) { h=d; } } else { gem::Properties::PropertyType type = props.type(key); switch(type) { case gem::Properties::NONE: snprintf(buf, MAXVLCSTRING, ":%s", key.c_str()); break; case gem::Properties::DOUBLE: if(props.get(key, d)) { snprintf(buf, MAXVLCSTRING, ":%s=%g", key.c_str(), d); } break; case gem::Properties::STRING: if(props.get(key, s)) { /* need to find an option that actually takes strings, so i can test this with spaces */ snprintf(buf, MAXVLCSTRING, ":%s=%s", key.c_str(), s.c_str()); } break; default: break; } if(0!=buf[0]) { buf[MAXVLCSTRING-1]=0; libvlc_media_add_option(media,buf); } } } resize(w,h,0); m_pixBlock.image.setWhite(); m_mediaplayer=libvlc_media_player_new_from_media(media); libvlc_media_release(media); /* helper classes to register callbacks */ struct _callbackObj { static void*lock(void*opaque, void**plane ) { videoVLC*obj=(videoVLC*)opaque; if(obj) { return obj->lockFrame(plane); } return NULL; } static void unlock(void*opaque, void*picture, void*const*plane) { videoVLC*obj=(videoVLC*)opaque; if(obj) { obj->unlockFrame(picture, plane); } } static void display(void*opaque, void*picture) { videoVLC*obj=(videoVLC*)opaque; } _callbackObj(videoVLC*data) { libvlc_video_set_callbacks(data->m_mediaplayer, lock, unlock, NULL, data); } }; struct _formatCallbackObj { static unsigned format(void**opaque, char *chroma, unsigned *width, unsigned *height, unsigned *pitches, unsigned *lines) { videoVLC**objptr=(videoVLC**)opaque; if(objptr && *objptr) { return (*objptr)->setFormat(chroma, *width, *height, *pitches, *lines); } return 0; } _formatCallbackObj(videoVLC*data) { libvlc_video_set_format_callbacks(data->m_mediaplayer, format, NULL ); } }; /* instantiate helper-classes (which registers callbacks) */ _callbackObj(this); _formatCallbackObj(this); return true; }
///////////////////////////////////////////////////////// // openDevice // ///////////////////////////////////////////////////////// bool videoVFW :: openDevice(gem::Properties&props) { char driverName[256]; char driverDesc[256]; if (capGetDriverDescription(0, driverName, 256, driverDesc, 256)) post("videoVFW: driver '%s'", driverName); double d; if (props.get("width", d)) m_width=d; if (props.get("height", d)) m_height=d; if(m_hWndC)closeDevice(); // Connect to the daemon m_hWndC = capCreateCaptureWindow ((LPSTR) "GEM video", // window name if pop-up 0, // window style (not visible) 0, 0, m_width, m_height,// window position and dimensions GetDesktopWindow(), 0); if (!m_hWndC) { error("Unable to create capture window"); return false; } if (!capDriverConnect(m_hWndC, 0)) { error("Unable to connect to video driver"); closeDevice(); return false; } CAPTUREPARMS params; if (!capCaptureGetSetup(m_hWndC, ¶ms, sizeof(CAPTUREPARMS))) { error("Unable to get capture parameters"); closeDevice(); return false; } params.fYield = TRUE; params.fCaptureAudio = FALSE; params.wPercentDropForError = 100; params.fLimitEnabled = FALSE; params.AVStreamMaster = AVSTREAMMASTER_NONE; params.fStepCaptureAt2x = FALSE; params.fAbortLeftMouse = FALSE; params.fAbortRightMouse = FALSE; if (!capCaptureSetSetup(m_hWndC, ¶ms, sizeof(CAPTUREPARMS))) { error("Unable to set capture parameters"); closeDevice(); return false; } if (!capSetCallbackOnVideoStream(m_hWndC, videoVFW::videoFrameCallback)) { error("Unable to set frame callback"); closeDevice(); return false; } if (!capSetUserData(m_hWndC, this)) { error("Unable to set user data"); closeDevice(); return false; } DWORD formSize = capGetVideoFormat(m_hWndC, NULL, 0); BITMAPINFO *videoFormat = (BITMAPINFO *)(new char[formSize]); if (!capGetVideoFormat(m_hWndC, videoFormat, formSize)) { error("Unable to get video format"); closeDevice(); return false; } videoFormat->bmiHeader.biWidth = m_width; videoFormat->bmiHeader.biHeight = m_height; videoFormat->bmiHeader.biBitCount = 24; videoFormat->bmiHeader.biCompression = BI_RGB; videoFormat->bmiHeader.biClrUsed = 0; videoFormat->bmiHeader.biClrImportant = 0; videoFormat->bmiHeader.biSizeImage = 0; if (!capSetVideoFormat(m_hWndC, videoFormat, formSize)) { error("Unable to set video format"); delete videoFormat; closeDevice(); return false; } if (!capGetVideoFormat(m_hWndC, videoFormat, formSize)) { error("Unable to get video format"); } m_width=static_cast<int>(videoFormat->bmiHeader.biWidth); m_height=static_cast<int>(videoFormat->bmiHeader.biHeight); verbose(1, "Connected with %dx%d @ %d", m_width, m_height, static_cast<int>(videoFormat->bmiHeader.biBitCount)); delete videoFormat; m_image.image.xsize = m_width; m_image.image.ysize = m_height; m_image.image.setCsizeByFormat(GL_RGBA); m_image.image.reallocate(); m_image.image.setBlack(); return true; }
bool imageQT::save(const imageStruct&constimage, const std::string&filename, const std::string&mimetype, const gem::Properties&props) { OSErr err=noErr; ComponentResult cErr = 0; GWorldPtr img = NULL; GraphicsExportComponent geComp = NULL; Rect r; FSSpec spec; OSType osFileType=kQTFileTypeTIFF; mime2type(mimetype, osFileType); std::string myfilename=filename.c_str(); const UInt8*filename8=reinterpret_cast<const UInt8*>(myfilename.c_str()); #if defined __APPLE__ FSRef ref; err = ::FSPathMakeRef(filename8, &ref, NULL ); if (err == fnfErr) { // if the file does not yet exist, then let's create the file touch(myfilename); err = FSPathMakeRef(filename8, &ref, NULL); } if (err != noErr) { verbose(1, "[GEM:imageQT] error#%d in FSPathMakeRef()", err); } err = ::FSGetCatalogInfo(&ref, kFSCatInfoNodeFlags, NULL, NULL, &spec, NULL); if (err != noErr) { verbose(1, "[GEM:imageQT] error#%d in FSGetCatalogInfo()", err); } err = FSMakeFSSpec(spec.vRefNum, spec.parID, filename8, &spec); //this always gives an error -37 ??? #elif defined _WIN32 touch(myfilename); err = FSMakeFSSpec (0, 0L, filename8, &spec); #endif if (err != noErr && err != -37) { verbose(1, "[GEM:imageQT] error#%d in FSMakeFSSpec()", err); } err = OpenADefaultComponent(GraphicsExporterComponentType, osFileType, &geComp); if (err != noErr) { verbose(0, "[GEM:imageQT] error#%d in OpenADefaultComponent()", err); return false; // FIXME: } r.top = 0; r.left = 0; r.bottom = constimage.ysize; r.right = constimage.xsize; imageStruct rgbaimg; rgbaimg.convertFrom(&constimage, GL_RGBA_GEM); unsigned char *data = NULL; if(!rgbaimg.upsidedown) { // the image is openGL-oriented, not quicktime-oriented! flip it! int rowBytes = rgbaimg.xsize * rgbaimg.csize; int imageSize = rgbaimg.ysize * rowBytes; data = new unsigned char[imageSize]; InvertGLImage(rgbaimg.data, data, imageSize, rowBytes); } err = QTNewGWorldFromPtr(&img, IMAGEQT_RGBA_PIXELFORMAT, //k32RGBAPixelFormat, &r, NULL, NULL, 0, (data?data:rgbaimg.data), static_cast<long>(rgbaimg.xsize * rgbaimg.csize)); // is this the right place to free the "data" buffer (if used)? // i don't know, whether quicktime still needs the buffer... if (err != noErr) { verbose(0, "[GEM:imageQT] error#%d in QTNewGWorldFromPtr()", err); if(data) { delete[]data; } return false; // FIXME: } // Set the input GWorld for the exporter cErr = GraphicsExportSetInputGWorld(geComp, img); if (cErr != noErr) { verbose(0, "[GEM:imageQT] error#%d in GraphicsExportSetInputGWorld()", cErr); if(data) { delete[]data; } return false; // FIXME: } // Set the output file to our FSSpec cErr = GraphicsExportSetOutputFile(geComp, &spec); if (cErr != noErr) { verbose(0, "[GEM:imageQT] error#%d in GraphicsExportSetOutputFile()", cErr); if(data) { delete[]data; } return false; // FIXME: } // Set the compression quality (needed for JPEG, not necessarily for other formats) /* codecMinQuality codecLowQuality codecNormalQuality codecHighQuality codecMaxQuality codecLosslessQuality */ CodecQ quality=codecHighQuality; double d=0.; if(props.get("quality", d)) { // <0 = minqality // >=100 = lossless if(d<0.) { d=0.; } else if(d>100.) { d=100.; } CodecQ maxQ=codecLosslessQuality; double maxQ_d=(double)maxQ; double quality_d=maxQ_d * d / 100.; // 0..maxQ quality=(CodecQ)quality_d; } cErr = GraphicsExportSetCompressionQuality(geComp, quality); // Export it cErr = GraphicsExportDoExport(geComp, NULL); if (cErr != noErr) { verbose(0, "[GEM:imageQT] ERROR: %i in GraphicsExportDoExport()", cErr); if(data) { delete[]data; } return false; // FIXME: } // finally, close the component if (geComp != NULL) { CloseComponent(geComp); } if(data) { delete[]data; } return true; }
bool imageJPEG::save(const imageStruct&constimage, const std::string&filename, const std::string&mimetype, const gem::Properties&props) { struct jpeg_compress_struct cinfo; /* More stuff */ FILE * outfile=NULL; /* target file */ JSAMPROW row_pointer; /* pointer to JSAMPLE row[s] */ int row_stride; /* physical row width in image buffer */ // We set up the normal JPEG error routines, then override error_exit my_error_mgr jerr; cinfo.err = jpeg_std_error(&jerr.pub); jerr.pub.error_exit = my_error_exit; // Establish the setjmp return context for my_error_exit to use. if ( setjmp(jerr.setjmp_buffer) ) { // If we get here, the JPEG code has signaled an error. // We need to clean up the JPEG object, close the input file, and return. jpeg_destroy_compress(&cinfo); if(outfile) fclose(outfile); return(false); } double fquality=100; props.get("quality", fquality); int quality=fquality; if(GL_YUV422_GEM==constimage.format) { error("don't know how to write YUV-images with libJPEG"); return false; } /* Now we can initialize the JPEG compression object. */ jpeg_create_compress(&cinfo); if ((outfile = fopen(filename.c_str(), "wb")) == NULL) { error("can't open %s\n", filename.c_str()); return (false); } jpeg_stdio_dest(&cinfo, outfile); imageStruct image; constimage.convertTo(&image, GL_RGB); // image.fixUpDown(); JSAMPLE *image_buffer = image.data; cinfo.image_width = image.xsize; /* image width and height, in pixels */ cinfo.image_height = image.ysize; cinfo.input_components = 3; /* # of color components per pixel */ cinfo.in_color_space = JCS_RGB; /* colorspace of input image */ jpeg_set_defaults(&cinfo); jpeg_set_quality(&cinfo, quality, TRUE /* limit to baseline-JPEG values */); jpeg_start_compress(&cinfo, TRUE); row_stride = image.xsize * image.csize; /* JSAMPLEs per row in image_buffer */ while (cinfo.next_scanline < cinfo.image_height) { /* jpeg_write_scanlines expects an array of pointers to scanlines. * Here the array is only one element long, but you could pass * more than one scanline at a time if that's more convenient. */ int rowindex=cinfo.next_scanline; if(!image.upsidedown) rowindex=(cinfo.image_height-cinfo.next_scanline-1); row_pointer = & image_buffer[rowindex * row_stride]; if(jpeg_write_scanlines(&cinfo, &row_pointer, 1) < 0){ error("GEM: could not write line %d to image %s", cinfo.next_scanline, filename.c_str()); jpeg_finish_compress(&cinfo); fclose(outfile); jpeg_destroy_compress(&cinfo); return(false); } } jpeg_finish_compress(&cinfo); fclose(outfile); jpeg_destroy_compress(&cinfo); return true; }
bool filmQT :: open(const std::string filename, const gem::Properties&wantProps) { FSSpec theFSSpec; OSErr err = noErr; Rect m_srcRect; long m_rowBytes; short refnum = 0; long movieDur, movieScale; OSType whichMediaType; short flags = 0; int wantedFormat; double d; if (filename.empty())return false; if (!m_bInit){ error("filmQT: object not correctly initialized\n"); return false; } if(wantProps.get("colorspace", d)) m_wantedFormat=d; wantedFormat= (m_wantedFormat)?m_wantedFormat:GL_RGBA; // Clean up any open files: closeMess(); Str255 pstrFilename; CopyCStringToPascal(filename.c_str(), pstrFilename); // Convert to Pascal string err = FSMakeFSSpec (0, 0L, pstrFilename, &theFSSpec); // Make specification record #ifdef __APPLE__ if (err != noErr) { FSRef ref; err = ::FSPathMakeRef((const UInt8*)filename.c_str(), &ref, NULL); err = ::FSGetCatalogInfo(&ref, kFSCatInfoNone, NULL, NULL, &theFSSpec, NULL); } #endif if (err != noErr) { error("filmQT: Unable to find file: %s (%d)", filename.c_str(), err); //goto unsupported; } err = ::OpenMovieFile(&theFSSpec, &refnum, fsRdPerm); if (err) { error("filmQT: Couldn't open the movie file: %s (%d)", filename.c_str(), err); if (refnum) ::CloseMovieFile(refnum); goto unsupported; } err = ::NewMovieFromFile(&m_movie, refnum, NULL, NULL, newMovieActive, NULL); if (err) { error("filmQT: Couldn't make a movie from file: %s (%d)", filename.c_str(), err); if (refnum) ::CloseMovieFile(refnum); m_movie=NULL; goto unsupported; } if (refnum) ::CloseMovieFile(refnum); m_curFrame = -1; m_numTracks = static_cast<int>(GetMovieTrackCount(m_movie)); // Get the length of the movie movieDur = static_cast<long>(GetMovieDuration(m_movie)); movieScale = static_cast<long>(GetMovieTimeScale(m_movie)); whichMediaType = VisualMediaCharacteristic; // shouldn't the flags be OR'ed instead of ADDed ? (jmz) flags = nextTimeMediaSample | nextTimeEdgeOK; GetMovieNextInterestingTime( m_movie, flags, static_cast<TimeValue>(1), &whichMediaType, 0, static_cast<Fixed>(1<<16), NULL, &duration); m_numFrames = movieDur/duration; m_fps = m_numFrames; // Get the bounds for the movie ::GetMovieBox(m_movie, &m_srcRect); // OffsetRect(&m_srcRect, -m_srcRect.left, -m_srcRect.top); SetMovieBox(m_movie, &m_srcRect); m_image.image.xsize = m_srcRect.right - m_srcRect.left; m_image.image.ysize = m_srcRect.bottom - m_srcRect.top; m_image.image.setCsizeByFormat(GL_RGBA); m_image.image.allocate(); m_rowBytes = m_image.image.xsize * 4; // SetMoviePlayHints(m_movie, hintsHighQuality, hintsHighQuality); err = SetMovieAudioMute(m_movie, true, 0); if(noErr!=err) { error("filmQT: unable to mute movie..."); } err = QTNewGWorldFromPtr( &m_srcGWorld, FILMQT_DEFAULT_PIXELFORMAT, &m_srcRect, NULL, NULL, 0, m_image.image.data, m_rowBytes); if (err) { error("filmQT: Couldn't make QTNewGWorldFromPtr %d", err); goto unsupported; } // *** set the graphics world for displaying the movie *** ::SetMovieGWorld(m_movie, m_srcGWorld, GetGWorldDevice(m_srcGWorld)); if(GetMoviesError()){ close(); goto unsupported; } SetMovieRate(m_movie,X2Fix(1.0)); ::MoviesTask(m_movie, 0); // *** this does the actual drawing into the GWorld *** return true; unsupported: return false; }
void videoUNICAP :: setProperties(gem::Properties&props) { m_props=props; debugPost("handle=%p", m_handle); if(!m_handle) return; unicap_status_t status = 0; bool restart=false; unsigned int width=0, height=0; std::vector<std::string> keys=props.keys(); int i=0; for(i=0; i<keys.size(); i++) { std::string key=keys[i]; double d=0; std::string s; if(("width"==key) && props.get(key, d)) { width=d; if(m_width!=width) { m_width=width; restart=true; } continue; } if(("height"==key) && props.get(key, d)) { height=d; if(m_height!=height) { m_height=height; restart=true; } continue; } unicap_property_t prop; strncpy(prop.identifier, key.c_str(), 128); status=unicap_get_property(m_handle, &prop ); if(SUCCESS(status)) { switch(prop.type) { case UNICAP_PROPERTY_TYPE_VALUE_LIST: case UNICAP_PROPERTY_TYPE_FLAGS: case UNICAP_PROPERTY_TYPE_RANGE: if(props.get(key, d)) { prop.value=d; status= unicap_set_property(m_handle, &prop ); } break; case UNICAP_PROPERTY_TYPE_MENU: if(props.get(key, d)) { if(d>=0 && d < prop.menu.menu_item_count) { int i=d; /* unfortunately we must use the symbolic value and cannot simply set using the index... */ strncpy(prop.menu_item, prop.menu.menu_items[i], 128); status= unicap_set_property(m_handle, &prop ); } } else if (props.get(key, s)) { strncpy(prop.menu_item, s.c_str(), 128); status= unicap_set_property(m_handle, &prop ); } break; default: // ? break; } if(!SUCCESS(status)) { verbose(1, "could not set property '%s'", key.c_str()); #if 0 } else { verbose(1, "successfully set property '%s'", key.c_str()); #endif } } } while(restart) { restart=false; debugPost("restarting stream due to property change"); bool running=stop(); debugPost("running=%d", running); if (running)start(); } }
///////////////////////////////////////////////////////// // open the file // ///////////////////////////////////////////////////////// bool filmAVI :: open(const std::string filename, const gem::Properties&wantProps) { AVISTREAMINFO streaminfo; long lSize = 0; // in bytes double d; if(wantProps.get("colorspace", d) && d>0) m_wantedFormat=d; if (AVIStreamOpenFromFile(&m_streamVid, filename.c_str(), streamtypeVIDEO, 0, OF_READ, NULL)) { verbose(2, "[pix_film:AVI]: Unable to open file: %s", filename.c_str()); goto unsupported; } if( AVIStreamInfo( m_streamVid, &streaminfo, sizeof(streaminfo)) || AVIStreamReadFormat(m_streamVid, AVIStreamStart(m_streamVid), NULL, &lSize)) { verbose(2, "[pix_film:AVI]: Unable to read file format: %s", filename.c_str()); goto unsupported; } m_pbmihRaw = (BITMAPINFOHEADER*) new char[lSize]; if(AVIStreamReadFormat(m_streamVid, AVIStreamStart(m_streamVid), m_pbmihRaw, &lSize)) { verbose(2, "[pix_film:AVI]: Unable to read file format: %s", filename.c_str()); goto unsupported; } if ((8 == m_pbmihRaw->biBitCount) || ((40 == m_pbmihRaw->biBitCount) && (mmioFOURCC('c','v','i','d') == m_pbmihRaw->biCompression))) { // HACK: attempt to decompress 8 bit films or BW cinepak films to greyscale m_pbmihDst = (BITMAPINFOHEADER*) new char[sizeof(BITMAPINFOHEADER) + 256*3]; verbose(3, "[pix_film:AVI]: Loading as greyscale"); *m_pbmihDst = *m_pbmihRaw; m_pbmihDst->biSize = sizeof(BITMAPINFOHEADER); m_format = GL_LUMINANCE; m_pbmihDst->biBitCount = 8; m_pbmihDst->biClrUsed = 256; m_pbmihDst->biClrImportant = 256; char* pClrPtr = ((char*)m_pbmihDst) + sizeof(BITMAPINFOHEADER); for (int i = 0; i < 256; i++){ *pClrPtr++ = i; *pClrPtr++ = i; *pClrPtr++ = i; } } else { m_pbmihDst = (BITMAPINFOHEADER*) new char[sizeof(BITMAPINFOHEADER)]; *m_pbmihDst = *m_pbmihRaw; m_format = GL_BGR_EXT; m_pbmihDst->biBitCount = 24; m_pbmihDst->biClrUsed = 0; m_pbmihDst->biClrImportant = 0; } m_pbmihDst->biCompression = BI_RGB; m_pbmihDst->biSizeImage = 0; // Get the length of the movie m_numFrames = streaminfo.dwLength - 1; m_fps = (double)streaminfo.dwRate / streaminfo.dwScale; m_image.image.xsize = streaminfo.rcFrame.right - streaminfo.rcFrame.left; m_image.image.ysize = streaminfo.rcFrame.bottom - streaminfo.rcFrame.top; m_image.image.setCsizeByFormat(m_wantedFormat); m_image.image.reallocate(); if (!(m_hic = ICLocate(ICTYPE_VIDEO, 0, m_pbmihRaw, m_pbmihDst, ICMODE_DECOMPRESS))){ verbose(2, "[pix_film:AVI]: Could not find decompressor: %s", filename.c_str()); goto unsupported; } if (m_format==GL_LUMINANCE){ if (ICERR_OK != ICDecompressSetPalette(m_hic, m_pbmihDst)){ verbose(2, "[pix_film:AVI]: Could not set palette: %s", filename.c_str()); } } if (ICERR_OK != ICDecompressBegin(m_hic, m_pbmihRaw, m_pbmihDst)){ verbose(2, "[pix_film:AVI]: Could not begin decompression: %s", filename.c_str()); goto unsupported; } //if (!m_pbmihRaw->biSizeImage) // m_pbmihRaw->biSizeImage = m_xsize * m_ysize * m_csize; //m_nRawBuffSize = MIN(streaminfo.dwSuggestedBufferSize, m_pbmihRaw->biSizeImage); m_nRawBuffSize = MAX(static_cast<int>(streaminfo.dwSuggestedBufferSize), static_cast<int>(m_pbmihRaw->biSizeImage)); if(!m_nRawBuffSize)m_nRawBuffSize = m_image.image.xsize * m_image.image.ysize * 3; m_RawBuffer = new unsigned char[m_nRawBuffSize]; m_frame = new unsigned char[m_nRawBuffSize]; m_reqFrame = 0; m_curFrame = -1; return true; unsupported: close(); return false; }
void videoOptiTrack::setProperties(gem::Properties&props) { std::string s; m_props=props; double d; bool resize=false; if(props.get("width", d)) { if(d>0) { m_pixBlock.image.xsize=d; m_resize=true; } } if(props.get("height", d)) { if(d>0) { m_pixBlock.image.ysize=d; m_resize=true; } } #define SETCAMERAPROP_BOOL(name) do { if(props.get(#name, d)) {bool b=(d>0.5); m_camera->Set##name(b); } } while(0) #define SETCAMERAPROP_INT(name) do { if(props.get(#name, d)) {int i=(int)d; m_camera->Set##name(i); } } while(0) #define SETCAMERAPROP_STR(name) do { if(props.get(#name, s)) {int i=(int)d; m_camera->Set##name(s.c_str()); } } while(0) SETCAMERAPROP_BOOL(AEC); SETCAMERAPROP_BOOL(AGC); SETCAMERAPROP_BOOL(ContinuousIR); SETCAMERAPROP_BOOL(EnableBlockingMask); SETCAMERAPROP_BOOL(HighPowerMode); SETCAMERAPROP_BOOL(IRFilter); SETCAMERAPROP_BOOL(MarkerOverlay); SETCAMERAPROP_BOOL(TextOverlay); SETCAMERAPROP_INT(Exposure); SETCAMERAPROP_INT(FrameDecimation); SETCAMERAPROP_INT(FrameRate); SETCAMERAPROP_INT(GrayscaleDecimation); SETCAMERAPROP_INT(Intensity); SETCAMERAPROP_INT(PrecisionCap); SETCAMERAPROP_INT(ShutterDelay); SETCAMERAPROP_INT(StatusIntensity); SETCAMERAPROP_INT(Threshold); SETCAMERAPROP_STR(Name); #undef SETCAMERAPROP_BOOL #undef SETCAMERAPROP_INT #undef SETCAMERAPROP_STR d=-1; if(props.get("quality", d)) { int quality=d; if(quality!=m_quality) { m_quality=quality; if(m_quality<0) { m_camera->SetVideoType(GrayscaleMode); } else { m_camera->SetVideoType(MJPEGMode); m_camera->SetMJPEGQuality(m_quality); } } } /* SetAllLED(eStatusLEDs); SetLED(eStatusLEDs, bool); SetVideoType(eVideoMode); SetLateMJPEGDecompression(bool); SetMJPEGQuality(int); SetName(const char*); SetNumeric(bool, int); SetWindow(int, int, int, int); SetObjectColor(int); SetBitMaskPixel(int x, int y, bool); SetCameraParameter(char*name, float value); */ }