int main(int argc, char *argv[]){ if(argc < 2){ printf("Usage : video_test <file | url>\n"); return 1; } SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER); SDL_Window *window = SDL_CreateWindow("wallyd", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 0,0, SDL_WINDOW_OPENGL | SDL_WINDOW_FULLSCREEN_DESKTOP); printf("Initialized plugin : %s\n", initPlugin((void*)window)); renderVideo(argv[1]); }
//-------------------------------------------------------------- void ofApp::draw(){ ofBackground(0, 0, 0); ofFill(); if(true) { if( myScreen.isReceivingPixels() ){ renderPixelsReceive(); }else if(gui.getValueI("CONTENT_TYPE")==0){ renderVideo(); }else if(gui.getValueI("CONTENT_TYPE")==1){ renderImage(); } } if(activeGUI) { ofShowCursor(); gui.draw(); }else{ ofHideCursor(); } }
void initModify::modifyVideoSize(myConfig & myconfig) { myVideo sourceVideo(myconfig._sourceFilePath); myVideo targetVideo(myconfig._targetFilePath); myVideo renderVideo(myconfig._renderFilePath); sourceVideo.setFrame(0); targetVideo.setFrame(0); renderVideo.setFrame(0); // get all the params from the config and video.. int sourceLen = sourceVideo.getVideoLength(); int targetLen = targetVideo.getVideoLength(); int renderLen = renderVideo.getVideoLength(); int len = min(sourceLen, min(targetLen, renderLen)); // I don't know what the f**k the last frame happen.. it will get runtime error.. len--; double myFps = targetVideo.getFps(); Size sourceSize = sourceVideo.getVideoSize(); Size targetSize = targetVideo.getVideoSize(); sourceSize.width *= myconfig.sourceScale; sourceSize.height *= myconfig.sourceScale; targetSize.width *= myconfig.targetScale; targetSize.height *= myconfig.targetScale; // make the video writer.. VideoWriter sourceWriter(myconfig.sourceFilePath, CV_FOURCC('D', 'I', 'V', 'X'), myFps, targetSize, true); VideoWriter targetWriter(myconfig.targetFilePath, CV_FOURCC('D', 'I', 'V', 'X'), myFps, targetSize, true); VideoWriter renderWriter(myconfig.renderFilePath, CV_FOURCC('D', 'I', 'V', 'X'), myFps, targetSize, true); VideoWriter compositeWriter(myconfig.compositeFilePath, CV_FOURCC('D', 'I', 'V', 'X'), myFps, targetSize, true); int totalPercent = 0; for (int i = 0; i < len; i++) { Mat sourceFrame, targetFrame, renderFrame; sourceVideo >> sourceFrame; targetVideo >> targetFrame; renderVideo >> renderFrame; resize(sourceFrame, sourceFrame, sourceSize); resize(targetFrame, targetFrame, targetSize); resize(renderFrame, renderFrame, sourceSize); int nChannels = sourceFrame.channels(); //Mat beforeOutSourceFrame, beforeOutTargetFrame, beforeOutRenderFrame; Mat outSourceFrame(targetSize, CV_8UC3), outTargetFrame = targetFrame.clone(), outRenderFrame(targetSize, CV_8UC3, CV_RGB(255, 0, 0)), outCompositeFrame = targetFrame.clone(); for (int r = 0; r < sourceSize.height; r++) { int tr = r + myconfig.offsetY; uchar * sFrame = sourceFrame.ptr<uchar>(r); uchar * sOutFrame = outSourceFrame.ptr<uchar>(tr); uchar * rFrame = renderFrame.ptr<uchar>(r); uchar * rOutFrame = outRenderFrame.ptr<uchar>(tr); for (int c = 0; c < sourceSize.width; c++) { int tc = c + myconfig.offsetX; if (tr >= 0 && tr < targetSize.height && tc >= 0 && tc < targetSize.width) { sOutFrame[tc * nChannels] = sFrame[c * nChannels]; sOutFrame[tc * nChannels + 1] = sFrame[c * nChannels + 1]; sOutFrame[tc * nChannels + 2] = sFrame[c * nChannels + 2]; int red = rFrame[c * nChannels + 2]; int green = rFrame[c * nChannels + 1]; int blue = rFrame[c * nChannels]; if (red < 100) { rOutFrame[tc * nChannels + 2] = red; rOutFrame[tc * nChannels + 1] = green; rOutFrame[tc * nChannels] = blue; //fprintf(stdout, "in r is %d g is %d b is %d\n", red, green, blue); } else { // as set is red color.... so nothing need to do... } } } } outCompositeFrame = myDraw::imageCombine(outSourceFrame, outTargetFrame, outRenderFrame, 0, 0); sourceWriter.write(outSourceFrame); targetWriter.write(outTargetFrame); renderWriter.write(outRenderFrame); compositeWriter.write(outCompositeFrame); // report the states... int nowPercent = (int)(((double)(i + 1)) / len * 100.0); nowPercent = nowPercent / 10 * 10; if (nowPercent != totalPercent) { fprintf(stdout, "get the initial video %d completed!!!!\n", nowPercent); totalPercent = nowPercent; } } }
void TimeLapseGenMain::on_actionGenerate_Timelapse_triggered() { imageFileInfo info; QByteArray data; QString outputFilePath; QString destName; bool needEnhancement; QPixmap map; FILE *fp; int rc; enum VideoQuality quality; enum VideoResolution resolution; int framesPerSec ; QString *outPutFileName; if(selectedFileNames.length() == 0 ) { QMessageBox msg; msg.setText("No Images loaded, load images first"); msg.setWindowTitle("Rendering Error !"); msg.setIcon(QMessageBox::Critical); msg.addButton(QMessageBox::Ok); msg.exec(); return; } preferencesDialog = new Preferences(this); if (preferencesDialog->exec() == QDialog::Accepted) { quality = preferencesDialog->getQuality(); resolution = preferencesDialog->getResolution(); framesPerSec = preferencesDialog->getFramesPerSec(); outPutFileName = preferencesDialog->getOutFileName(); outPutFileName->replace(" ", "\\ "); } else return; if ( avconvPresent == false && quality == HighQualityMp4 ) { QMessageBox::critical(this, "Can not render", "Rendering high quality MP4 required avconv utility, please try again with a different quality setting", QMessageBox::Ok); return; } /* First create a sub directory to store the images to render */ QFileInfo infoDir(selectedFileNames[0]); if (!infoDir.dir().exists("resized")) infoDir.dir().mkdir("resized"); /* Setup Progress Bar */ QProgressDialog progress("Resizing Images ....", QString(), 0, selectedFileNames.count(), this); progress.setWindowModality(Qt::WindowModal); progress.show(); outputFilePath = infoDir.path(); if ( brightness == 0 && contrast == 0 && saturation == 0 && eType == None) needEnhancement = false; else needEnhancement = true; /* Now for each image do the following * 1. if brightness or contrast or saturation are specified enhance image accordingly. * 2. If not, directly scale the image to specified size * 3. Save the enhanced/resized image to destination folder i.e resized * 4. Update the progress bar, Since we are enhancing and resizing in the same loop, * change the lable of progress bar to enhancing after 50% */ for(int i=0; i< ImageFileInfoList.size(); i++){ info = ImageFileInfoList.at(i); if (!needEnhancement) { map.load(*info.getFilePath()); //map.scaled(1920,1080, Qt::IgnoreAspectRatio, Qt::SmoothTransformation).save(destName,"JPEG", 95); } else { if ( eType != None) { switch(eType) { case vividImage: data = imageEditor->enhanceImageWithoutScaling(info.getFilePath(), 110, 200, 2000); break; case grayScale: data = imageEditor->enhanceImageWithoutScaling(info.getFilePath(), 100, 0, 100); break; case autoEnhance: data = imageEditor->enhanceImageWithoutScaling(info.getFilePath(), 110, 100, 1400); break; } } else { data = imageEditor->enhanceImageWithoutScaling(info.getFilePath(),(brightness+100), (saturation+100), contrast ); } map.loadFromData(data, "XPM"); } destName.append(outputFilePath.toAscii()); destName.append("/resized/"); destName.append(info.getFileName()); if ( resolution == v720p ) map.scaled(1280,720, Qt::IgnoreAspectRatio, Qt::SmoothTransformation).save(destName,"JPEG", 95); else if (resolution == v1080p ) map.scaled(1920,1080, Qt::IgnoreAspectRatio, Qt::SmoothTransformation).save(destName,"JPEG", 95); else if (resolution == v1440p) map.scaled(2560,1440, Qt::IgnoreAspectRatio, Qt::SmoothTransformation).save(destName,"JPEG", 95); progress.setValue(i); if ( progress.value() == (progress.maximum()/2) ) { if( needEnhancement ) progress.setLabelText("Enhacing Images...."); } destName.clear(); } /* Now start rendering timelapse Video */ /* first list all the files and direct it to a file */ QString cmd; cmd.append("cd "); cmd.append(outputFilePath.replace(" ", "\\ ")); cmd.append("/resized; ls -1rt | grep -v files > files.txt"); qDebug() << "LS Path : " << cmd.toAscii(); fp = popen(cmd.toAscii().constData(), "r"); rc = pclose(fp); if (rc != 0 ){ QMessageBox msg; msg.setWindowTitle("No Idea !"); msg.setText("Someting went wrong while rendering timelapse... !"); msg.setIcon(QMessageBox::Critical); msg.addButton(QMessageBox::Ok); msg.exec(); return; } /* Execute mencoder */ cmd.clear(); cmd.append("cd "); cmd.append(outputFilePath); cmd.append("/resized; "); cmd.append(mencoderCommandPath->toAscii()); cmd.append(" -idx -nosound -noskip -ovc lavc -lavcopts "); if(quality == HighQualityMp4 ) { cmd.append(tr("vcodec=ljpeg -mf fps=%1 'mf://@files.txt' ").arg(framesPerSec)); } else if ( quality == HighQualityAvi) { cmd.append(tr(" vcodec=msmpeg4:vbitrate=10000 -mf type=jpeg -mf fps=%1 'mf://@files.txt' ").arg(framesPerSec)); } else if ( quality == LowQualityMp4) { cmd.append(tr("vcodec=mpeg4 -mf fps=%1 'mf://@files.txt' ").arg(framesPerSec)); } if ( quality == HighQualityMp4 ) { cmd.append(" -o temp.avi; "); cmd.append("avconv -i temp.avi -c:v libx264 -preset slow -crf " ); cmd.append(tr("%1 ").arg(framesPerSec)); cmd.append(*outPutFileName); } else cmd.append(tr(" -o %1").arg(*outPutFileName)); qDebug() << "CMD : " << cmd.toAscii() << endl; /* Now execute mencoder command */ renderVideo(cmd); }