int main(int argc, char *argv[]) { if (argc <= 0) return -1; printf("sizeof Computer: %d\n", sizeof(struct Computer)); struct Computer *computer = createComputer(); struct Screen *screen = createScreen(640, 480, 80, 60); computer->screen = screen; loadProgram(computer, argv[1]); saveDump(&computer->ram, "init_dump.bin"); DBG_init(); DBG_register(&computer->cpu); run(computer); saveDump(&computer->ram, "dump.bin"); DBG_free(); freeComputer(computer); return 0; }
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - bool FrameBufferSoft::initSubsystem() { // Set up the rectangle list to be used in the dirty update delete myRectList; myRectList = new RectList(); delete myOverlayRectList; myOverlayRectList = new RectList(); if(!myRectList || !myOverlayRectList) { cerr << "ERROR: Unable to get memory for SDL rects" << endl; return false; } // Create the screen if(!createScreen()) return false; // Show some info # if 0 //LUDO: if(myOSystem->settings().getBool("showinfo")) cout << "Video rendering: Software mode" << endl << endl; # endif return true; }
bool LoadingChan::init( ) { if ( !CCLayer::init() ) return false ; _screenSize = CCDirector::sharedDirector()->getWinSize() ; createScreen( ) ; return true ; }
CScreen* CClientApp::openClientScreen() { CScreen* screen = createScreen(); EVENTQUEUE->adoptHandler(IScreen::getErrorEvent(), screen->getEventTarget(), new TMethodEventJob<CClientApp>( this, &CClientApp::handleScreenError)); return screen; }
void AnsiWidget::insetTextScreen(int x, int y, int w, int h) { if (_back == _screens[TEXT_SCREEN]) { _back = _screens[USER_SCREEN1]; } TextScreen *textScreen = (TextScreen *)createScreen(TEXT_SCREEN); textScreen->inset(x, y, w, h, _front); _front = _back = textScreen; _front->_dirty = true; flush(true); }
static CScreen* openServerScreen() { CScreen* screen = createScreen(); EVENTQUEUE->adoptHandler(IScreen::getErrorEvent(), screen->getEventTarget(), new CFunctionEventJob( &handleScreenError)); return screen; }
int bigBlindBet (P_PLAYER player) { processMoneyBet(player, 5, 5); char infoText[32]; strcpy(infoText, player->name); strcat(infoText, " is BIG BLIND"); createScreen (infoText, "", "PRESS ENTER TO CONTINUE"); getchar(); return 5; }
synergy::Screen* ClientApp::openClientScreen() { synergy::Screen* screen = createScreen(); screen->setEnableDragDrop(argsBase().m_enableDragDrop); m_events->adoptHandler(m_events->forIScreen().error(), screen->getEventTarget(), new TMethodEventJob<ClientApp>( this, &ClientApp::handleScreenError)); return screen; }
void AnsiWidget::insetMenuScreen(int x, int y, int w, int h) { if (_back == _screens[MENU_SCREEN]) { _back = _screens[USER_SCREEN1]; } TextScreen *menuScreen = (TextScreen *)createScreen(MENU_SCREEN); menuScreen->_x = x; menuScreen->_y = y; menuScreen->_width = w; menuScreen->_height = h; menuScreen->setOver(_front); _front = _back = menuScreen; _front->_dirty = true; }
bool MenuLayer::init() { if ( !CCLayer::init()) return false; _screenSize = CCDirector::sharedDirector()->getWinSize(); createScreen() ; setTouchEnabled( true ); scheduleUpdate(); SimpleAudioEngine::sharedEngine()->playBackgroundMusic(CCFileUtils::sharedFileUtils()->fullPathFromRelativePath("background.mp3"), true); return true; }
CScreen* CServerApp::openServerScreen() { CScreen* screen = createScreen(); m_events->adoptHandler(m_events->forIScreen().error(), screen->getEventTarget(), new TMethodEventJob<CServerApp>( this, &CServerApp::handleScreenError)); m_events->adoptHandler(m_events->forIScreen().suspend(), screen->getEventTarget(), new TMethodEventJob<CServerApp>( this, &CServerApp::handleSuspend)); m_events->adoptHandler(m_events->forIScreen().resume(), screen->getEventTarget(), new TMethodEventJob<CServerApp>( this, &CServerApp::handleResume)); return screen; }
void QEGLPlatformIntegration::initialize() { m_display = eglGetDisplay(nativeDisplay()); if (m_display == EGL_NO_DISPLAY) qFatal("Could not open egl display"); EGLint major, minor; if (!eglInitialize(m_display, &major, &minor)) qFatal("Could not initialize egl display"); m_screen = createScreen(); screenAdded(m_screen); m_inputContext = QPlatformInputContextFactory::create(); m_vtHandler.reset(new QFbVtHandler); }
// on "init" you need to initialize your instance bool MainMenuScene::init() { bool bRet = false; do { ////////////////////////////// // 1. super init first CC_BREAK_IF(! CCLayer::init()); createScreen(); bRet = true; } while (0); return bRet; }
CScreen* CServerApp::openServerScreen() { CScreen* screen = createScreen(); EVENTQUEUE->adoptHandler(IScreen::getErrorEvent(), screen->getEventTarget(), new TMethodEventJob<CServerApp>( this, &CServerApp::handleScreenError)); EVENTQUEUE->adoptHandler(IScreen::getSuspendEvent(), screen->getEventTarget(), new TMethodEventJob<CServerApp>( this, &CServerApp::handleSuspend)); EVENTQUEUE->adoptHandler(IScreen::getResumeEvent(), screen->getEventTarget(), new TMethodEventJob<CServerApp>( this, &CServerApp::handleResume)); return screen; }
int AnsiWidget::insetTextScreen(int x, int y, int w, int h) { int result = 0; for (int i = 0; i < MAX_SCREENS; i++) { if (_front == _screens[i]) { result = i; break; } } if (_back == _screens[TEXT_SCREEN]) { _back = _screens[USER_SCREEN1]; } TextScreen *textScreen = (TextScreen *)createScreen(TEXT_SCREEN); textScreen->inset(x, y, w, h, _front); _front = _back = textScreen; _front->_dirty = true; flush(true); return result; }
void NGLScene::initializeGL() { // we need to initialise the NGL lib which will load all of the OpenGL functions, this must // be done once we have a valid GL context but before we call any GL commands. If we dont do // this everything will crash ngl::NGLInit::instance(); glClearColor(1.0f, 1.0f, 1.0f, 1.0f); // Grey Background // enable depth testing for drawing glEnable(GL_DEPTH_TEST); // enable multisampling for smoother drawing glEnable(GL_MULTISAMPLE); createShader(); createScreen(); }
synergy::Screen* ServerApp::openServerScreen() { synergy::Screen* screen = createScreen(); screen->setEnableDragDrop(argsBase().m_enableDragDrop); m_events->adoptHandler(m_events->forIScreen().error(), screen->getEventTarget(), new TMethodEventJob<ServerApp>( this, &ServerApp::handleScreenError)); m_events->adoptHandler(m_events->forIScreen().suspend(), screen->getEventTarget(), new TMethodEventJob<ServerApp>( this, &ServerApp::handleSuspend)); m_events->adoptHandler(m_events->forIScreen().resume(), screen->getEventTarget(), new TMethodEventJob<ServerApp>( this, &ServerApp::handleResume)); return screen; }
void GlkEngine::initialize() { // Set up debug channels DebugMan.addDebugChannel(kDebugCore, "core", "Core engine debug level"); DebugMan.addDebugChannel(kDebugScripts, "scripts", "Game scripts"); DebugMan.addDebugChannel(kDebugGraphics, "graphics", "Graphics handling"); DebugMan.addDebugChannel(kDebugSound, "sound", "Sound and Music handling"); initGraphicsMode(); _conf = new Conf(getInterpreterType()); _screen = createScreen(); _screen->initialize(); _clipboard = new Clipboard(); _events = new Events(); _pcSpeaker = new PCSpeaker(_mixer); _pictures = new Pictures(); _selection = new Selection(); _sounds = new Sounds(); _streams = new Streams(); _windows = new Windows(_screen); }
int AnsiWidget::insetMenuScreen(int x, int y, int w, int h) { int result = 0; for (int i = 0; i < MAX_SCREENS; i++) { if (_front == _screens[i]) { result = i; break; } } if (_back == _screens[MENU_SCREEN]) { _back = _screens[USER_SCREEN1]; } TextScreen *menuScreen = (TextScreen *)createScreen(MENU_SCREEN); menuScreen->_x = x; menuScreen->_y = y; menuScreen->_width = w; menuScreen->_height = h; menuScreen->setOver(_front); _front = _back = menuScreen; _front->_dirty = true; return result; }
GameLayer::GameLayer() { _screenSize = CCDirector::sharedDirector()->getWinSize(); _running = false; createScreen(); std::string levelsFile = CCFileUtils::sharedFileUtils()->fullPathForFilename("levels.plist"); _levels = CCArray::createWithContentsOfFileThreadSafe(levelsFile.c_str()); _levels->retain(); initPhysics(); createPools(); setTouchEnabled( true ); setAccelerometerEnabled( true ); }
int betAI(P_PLAYER player) { int minimum = minimumBet(player); int bet = enterBetAI(player, minimum); const char* infoText; char textBuffer[50]; processMoneyBet(player, bet, minimum); switch (bet) { case -1: sprintf(textBuffer, "%s folded", player->name); infoText = textBuffer; //infoText = player->name + " folded."; break; case 0: if (minimum > 0) //infoText = player->name + " called to " + to_string(player->moneyBet); sprintf(textBuffer, "%s called to %d", player->name, player->moneyBet); else //infoText = player->name + " checked"; sprintf(textBuffer, "%s checked", player->name); infoText = textBuffer; break; default: //infoText = player->name + " has raised by " + to_string(bet) + " to bet of " + to_string(player->moneyBet); sprintf(textBuffer, "%s has raised by %d to bet of %d", player->name, bet, player->moneyBet); infoText = textBuffer; break; } if (player->allIn) { //infoText = player->name + " goes ALL IN with bet of " + to_string(player->moneyBet); sprintf(textBuffer, "%s goes ALL IN with bet of %d", player->name, player->moneyBet); infoText = textBuffer; } createScreen(infoText,"","PRESS ENTER TO CONTINUE"); getchar(); return bet; }
GraphicsEngine::GraphicsEngine(int width,int height, int bpp){ //Initialize SDL SDL_Init(SDL_INIT_EVERYTHING); TTF_Init(); //For clearing outside of the background image if it is smaller than window size background=NULL; smallBG=false; sideRect=NULL; downRect=NULL; //avoid divide by 0 updateTime=1; eventTime=1; refreshTime=1; screenWidth=width; screenHeight=height; screenBPP=bpp; createScreen(); //Default text color textColor.setR(0xFF); textColor.setG(0xFF); textColor.setB(0xFF); textFont=NULL; }
int main(int argc, char** argv) { // use an ArgumentParser object to manage the program arguments. osg::ArgumentParser arguments(&argc,argv); // set up the usage document, in case we need to print out how to use this program. arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName()); arguments.getApplicationUsage()->setDescription(arguments.getApplicationName()+" example demonstrates the use of ImageStream for rendering movies as textures."); arguments.getApplicationUsage()->setCommandLineUsage(arguments.getApplicationName()+" [options] filename ..."); arguments.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information"); arguments.getApplicationUsage()->addCommandLineOption("--texture2D","Use Texture2D rather than TextureRectangle."); arguments.getApplicationUsage()->addCommandLineOption("--shader","Use shaders to post process the video."); arguments.getApplicationUsage()->addCommandLineOption("--interactive","Use camera manipulator to allow movement around movie."); arguments.getApplicationUsage()->addCommandLineOption("--flip","Flip the movie so top becomes bottom."); // construct the viewer. osgViewer::Viewer viewer(arguments); if (arguments.argc()<1) { arguments.getApplicationUsage()->write(std::cout,osg::ApplicationUsage::COMMAND_LINE_OPTION); return 1; } osg::ref_ptr<osg::Group> root = new osg::Group; /* osg::Light* light = new osg::Light(); light->setPosition(osg::Vec4d(-500.0, 1000.0, 500.0, 1.0)); light->setDirection(osg::Vec3d(5.0, -10.0, -5.0)); light->setSpotCutoff(70); light->setAmbient(osg::Vec4d(0.05, 0.05, 0.05, 1.0)); light->setDiffuse(osg::Vec4d(0.5, 0.5, 0.5, 1.0)); //light->setQuadraticAttenuation(0.001); osg::LightSource* lightSource = new osg::LightSource(); lightSource->setLight(light); //osg::Light * attachedlight = lightSource->getLight(); //attache light to root group root->addChild(lightSource); //activate light osg::StateSet* stateSet = root->getOrCreateStateSet(); lightSource->setStateSetModes(*stateSet, osg::StateAttribute::ON); osg::StateSet* stateset = root->getOrCreateStateSet(); stateset->setMode(GL_LIGHTING,osg::StateAttribute::ON); */ osg::ref_ptr<osg::Geode> geode = new osg::Geode; //OpenCV-AR CvCapture* cameraCapture; CvCapture* fileCapture; //cameraCapture = cvCreateCameraCapture(0); fileCapture = cvCreateFileCapture("video/whal.avi"); cameraCapture = fileCapture; if(!cameraCapture) { fprintf(stderr,"OpenCV: Create camera capture failed\n"); return 1; } //printf("%f\n", cvGetCaptureProperty(cameraCapture, CV_CAP_PROP_FPS)); //cvSetCaptureProperty(cameraCapture, CV_CAP_PROP_FRAME_WIDTH, 1280); //cvSetCaptureProperty(cameraCapture, CV_CAP_PROP_FRAME_HEIGHT, 960); //cvSetCaptureProperty(cameraCapture, CV_CAP_PROP_FPS, 15); IplImage* frame = cvQueryFrame(cameraCapture); IplImage* flipFrame = cvCreateImage(cvGetSize(frame), frame->depth, frame->nChannels); //osg::Image* image = osgDB::readImageFile("aclib-large.png"); osg::Image* image = new osg::Image(); //image->setPixelBufferObject( new osg::PixelBufferObject(image)); image->setDataVariance( osg::Object::DYNAMIC ); iplImageToOsgImage(flipFrame, image); //load model osg::ref_ptr<osg::PositionAttitudeTransform> modelPat = new osg::PositionAttitudeTransform(); //osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFile("models/Cars/AstonMartin-DB9.3ds"); osg::ref_ptr<osg::Node> loadedModel = osgDB::readNodeFile("models/ferrari_car_2.osg"); modelPat->addChild(loadedModel); modelPat->setScale(osg::Vec3(0.5, 0.5, 0.5)); modelPat->setAttitude(osg::Quat(3.14 / 2, osg::Vec3d(-1.0, 0.0, 0.0))); if (!loadedModel) { std::cout << "No model data loaded" << std::endl; return 1; } //C_BODY std::vector<osg::MatrixTransform*> topMtList = getMatrixTransformListByName("C_TOP", loadedModel); std::vector<osg::MatrixTransform*> leftDoorMtList = getMatrixTransformListByName("C_LDOOR", loadedModel); std::vector<osg::MatrixTransform*> rightDoorMtList = getMatrixTransformListByName("C_RDOOR", loadedModel); std::vector<osg::MatrixTransform*> leftWheelsMtList = getMatrixTransformListByName("C_LWS", loadedModel); std::vector<osg::MatrixTransform*> rightWheelsMtList = getMatrixTransformListByName("C_RWS", loadedModel); std::vector<osg::MatrixTransform*> forwardBumperMtList = getMatrixTransformListByName("C_BUMP_F", loadedModel); std::vector<osg::MatrixTransform*> backBumperMtList = getMatrixTransformListByName("C_BUMP_B", loadedModel); std::vector<osg::MatrixTransform*> engineMtList = getMatrixTransformListByName("C_ENGINE", loadedModel); std::vector<osg::MatrixTransform*> bodyMtList = getMatrixTransformListByName("C_BODY", loadedModel); std::vector<osg::MatrixTransform*> salonMtList = getMatrixTransformListByName("C_SALON", loadedModel); /* //findNodeVisitor findNode("C_BODY"); FindNamedNode findNode("C_BODY"); loadedModel->accept(findNode); std::vector<osg::Node*> foundNodeList = findNode.getNodeList(); int listCount = foundNodeList.size(); printf("%d\n", listCount); std::vector<osg::MatrixTransform*> bodyMtList; //vector<int>::const_iterator i; for(int i = 0; i < listCount; i++) { bodyMtList.push_back(new osg::MatrixTransform()); //obj4Mt->setName("obj4Mt"); osg::Group* foundNodeParent = foundNodeList[i]->getParent(0); bodyMtList[i]->addChild(foundNodeList[i]); foundNodeParent->addChild(bodyMtList[i]); foundNodeParent->removeChild(foundNodeList[i]); } */ osg::Matrix translateMatrix; //osg::Node* foundNode = NULL; //foundNode = findNamedNode("obj5", loadedModel); //osg::ref_ptr<osg::MatrixTransform> obj5Mt = new osg::MatrixTransform(); //obj4Mt->setName("obj5Mt"); //osg::Group* foundNodeParent = foundNode->getParent(0); //obj5Mt->addChild(foundNode); //foundNodeParent->addChild(obj5Mt); //foundNodeParent->removeChild(foundNode); osg::Matrix rotateMatrix; float theta(M_PI * 0.1f); osg::Vec3f axis (1.0, 1.0, 0.1); osg::Quat wheelAxis( theta, axis); osg::BoundingSphere modelBoundingSphere = modelPat->getBound(); printf("%f\n", modelBoundingSphere.radius()); modelBoundingSphere.radius() *= 1.5f; osg::BoundingBox modelBoundingBox; modelBoundingBox.expandBy(modelBoundingSphere); //Light group //create light root->addChild(createLights(modelBoundingBox, root->getOrCreateStateSet())); //collect scene // only clear the depth buffer viewer.getCamera()->setClearMask(GL_DEPTH_BUFFER_BIT); // create a HUD as slave camera attached to the master view. viewer.setUpViewAcrossAllScreens(); osgViewer::Viewer::Windows windows; viewer.getWindows(windows); if (windows.empty()) return 1; osg::Camera* screenCamera = createScreen(image); // set up cameras to rendering on the first window available. screenCamera->setGraphicsContext(windows[0]); screenCamera->setViewport(0,0,windows[0]->getTraits()->width, windows[0]->getTraits()->height); //screenCamera->setViewport(0, 0, 6.4, 4.8); viewer.addSlave(screenCamera, false); //root->addChild( geode.get()); //root->addChild( createPyramid()); //root->addChild( createScreen());//100.0, 100.0, image)); root->addChild(modelPat); //root->addChild(objectPat); // set the scene to render viewer.setSceneData(root.get()); viewer.realize(); viewer.getCamera()->setClearColor(osg::Vec4(0.0f,0.0f,0.0f,1.0f)); /* //viewer.getCamera()->setProjameraCaptureectionMatrixAsOrtho(topleft.x(),bottomright.x(),topleft.y(),bottomright.y(), -10, 10); //viewer.getCamera()->setProjectionMatrixAsPerspective(60.0, screenAspectRatio, 100.0, -1.0); */ viewer.getCamera()->setViewMatrixAsLookAt(osg::Vec3d(100.0, 100.0, 100.0), osg::Vec3d(0.0, 0.0, 0.0), osg::Vec3d(0.0, 1.0, 0.0)); //Define vector of OpenCV-AR template vector<CvarTemplate> openCvArTemplateList; //load template CvarTemplate openCvArTemplate1; cvarLoadTemplateTag(&openCvArTemplate1,"aclib.png"); //cvarLoadTemplateTag(&openCvArTemplate1,"markers/431.jpg"); openCvArTemplateList.push_back(openCvArTemplate1); CvarTemplate openCvArTemplate2; cvarLoadTemplate(&openCvArTemplate2,"aclib.png",1); //cvarLoadTemplate(&openCvArTemplate2,"markers/431.jpg", 1); openCvArTemplateList.push_back(openCvArTemplate2); //Define OpenCV-AR marker; vector<CvarMarker> openCvArMarker; //Create OpenCV-AR camera CvarCamera openCvArCamera; //IplImage* frame = osgImage2IplImage(image); //cvarReadCamera("camera.yml", &openCvArCamera); cvarReadCamera(NULL, &openCvArCamera); cvarCameraScale(&openCvArCamera,frame->width,frame->height); viewer.getCamera()->setProjectionMatrix(osg::Matrixd(openCvArCamera.projection)); //CvarOpticalFlow *flow; // srand(time(NULL)); //int thresh = 60; double matchThresh = 0.7; //int state = 0; int counter = 0; while(!viewer.done()) { counter++; char c = 0;//cvWaitKey(33); //printf("%d\n", c); if (c == 27) { // нажата ESC printf("esc\n"); break; } if (c == 107) { // matchThresh up matchThresh = matchThresh + 0.01; } if (c == 106) { // matchThresh down matchThresh = matchThresh - 0.01; } if ((counter >= 300) and (counter < 310)) { // matchThresh down //Top translateMatrixTransformList(topMtList, 0.0, -1.2, 0.0); //Engine translateMatrixTransformList(engineMtList, 0.0, -1.0, 0.0); //Body translateMatrixTransformList(bodyMtList, 0.0, -0.8, 0.0); //Salon translateMatrixTransformList(salonMtList, 0.0, -0.4, 0.0); //leftWeels translateMatrixTransformList(leftWheelsMtList, -0.5, 0.0, 0.0); //rightWeels translateMatrixTransformList(rightWheelsMtList, 0.5, 0.0, 0.0); //Left doors translateMatrixTransformList(leftDoorMtList, -0.5, 0.0, 0.0); //Right doors translateMatrixTransformList(rightDoorMtList, 0.5, 0.0, 0.0); //Forward bumper translateMatrixTransformList(forwardBumperMtList, 0.0, 0.0, 0.5); //back bumper translateMatrixTransformList(backBumperMtList, 0.0, 0.0, -0.5); } //rotateMatrix.makeRotate(rotateMatrix.getRotate() * wheelAxis); //obj5Mt->setMatrix(rotateMatrix); //thresh = rand() % 256; //printf("Match thresh value: %f\n", matchThresh); frame = cvQueryFrame(cameraCapture); cvCopy(frame, flipFrame); cvFlip(flipFrame, flipFrame); //cvNamedWindow("Original", 1); //cvShowImage("Original", frame); iplImageToOsgImage(frame, image); image->dirty(); //osg::Image* = osg::Image(*image); //frame = osgImage2IplImage(image); //AR detection //GLdouble modelview[16] = {0}; //Detect marker int arDetect = cvarArMultRegistration(flipFrame,&openCvArMarker,openCvArTemplateList,&openCvArCamera, 60, 0.91); //printf("Marker found: %d\n", arDetect); viewer.getCamera()->setViewMatrixAsLookAt(osg::Vec3d(0.0, 0.0, 100.0), osg::Vec3d(0.0, 0.0, 1000.0), osg::Vec3d(0.0, 1.0, 0.0)); for(int i=0;i<arDetect;i++) { //if(openCvArMarker[i].tpl == 0); osg::Matrixf osgModelViewMatrix; for (int column = 0; column < 4; column++) { for (int row = 0; row < 4; row++) { osgModelViewMatrix(column, row) = openCvArMarker[i].modelview[column * 4 + row]; } } viewer.getCamera()->setViewMatrix(osgModelViewMatrix); } viewer.frame(); } return 0; }
FXAA::FXAA(const unsigned int width, const unsigned int height, const std::shared_ptr<Shader> shader):FXAA(createScreen(width, height), shader) { }
int stream_component_open(VideoState *is, int stream_index) { AVFormatContext *pFormatCtx = is->pFormatCtx; AVCodecContext *codecCtx = NULL; AVCodec *codec = NULL; AVDictionary *optionsDict = NULL; if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams) { return -1; } // Get a pointer to the codec context for the video stream codecCtx = pFormatCtx->streams[stream_index]->codec; if(codecCtx->codec_type == AVMEDIA_TYPE_AUDIO) { is->audio_callback = audio_callback; // Set audio settings from codec info AudioPlayer *player = malloc(sizeof(AudioPlayer)); is->audio_player = player; createEngine(&is->audio_player); createBufferQueueAudioPlayer(&is->audio_player, is, codecCtx->channels, codecCtx->sample_rate); //is->audio_hw_buf_size = 4096; } else if (codecCtx->codec_type == AVMEDIA_TYPE_VIDEO) { // Set video settings from codec info VideoPlayer *player = malloc(sizeof(VideoPlayer)); is->video_player = player; createVideoEngine(&is->video_player); createScreen(&is->video_player, is->native_window, 0, 0); } codec = avcodec_find_decoder(codecCtx->codec_id); if(!codec || (avcodec_open2(codecCtx, codec, &optionsDict) < 0)) { fprintf(stderr, "Unsupported codec!\n"); return -1; } switch(codecCtx->codec_type) { case AVMEDIA_TYPE_AUDIO: is->audioStream = stream_index; is->audio_st = pFormatCtx->streams[stream_index]; is->audio_buf_size = 0; is->audio_buf_index = 0; /* averaging filter for audio sync */ is->audio_diff_avg_coef = exp(log(0.01 / AUDIO_DIFF_AVG_NB)); is->audio_diff_avg_count = 0; /* Correct audio only if larger error than this */ is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / codecCtx->sample_rate; is->sws_ctx_audio = swr_alloc(); if (!is->sws_ctx_audio) { fprintf(stderr, "Could not allocate resampler context\n"); return -1; } memset(&is->audio_pkt, 0, sizeof(is->audio_pkt)); packet_queue_init(&is->audioq); break; case AVMEDIA_TYPE_VIDEO: is->videoStream = stream_index; is->video_st = pFormatCtx->streams[stream_index]; is->frame_timer = (double)av_gettime() / 1000000.0; is->frame_last_delay = 40e-3; is->video_current_pts_time = av_gettime(); packet_queue_init(&is->videoq); createScreen(&is->video_player, NULL, is->video_st->codec->width, is->video_st->codec->height); is->video_tid = malloc(sizeof(*(is->video_tid))); // uncomment for video pthread_create(is->video_tid, NULL, (void *) &video_thread, is); is->sws_ctx = createScaler(&is->video_player, is->video_st->codec); codecCtx->get_buffer2 = our_get_buffer; break; default: break; } return 0; }