void video_refresh_timer(void *userdata) {

        VideoState *is = (VideoState *) userdata;
        //VideoPicture *vp;

        if (is->video_st) {
                if (is->pictq_size == 0) {
                        schedule_refresh(is, 1);
                } else {
                        //vp = &is->pictq[is->pictq_rindex];
                        /* Now, normally here goes a ton of code
                         about timing, etc. we're just going to
                         guess at a delay for now. You can
                         increase and decrease this value and hard code
                         the timing - but I don't suggest that ;)
                         We'll learn how to do it for real later.
                         */
                        schedule_refresh(is, 80);

                        /* show the picture! */
                        video_display(is);

                        /* update queue for next picture! */
                        if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) {
                                is->pictq_rindex = 0;
                        }
                        SDL_LockMutex(is->pictq_mutex);
                        is->pictq_size--;
                        SDL_CondSignal(is->pictq_cond);
                        SDL_UnlockMutex(is->pictq_mutex);
                }
        } else {
                schedule_refresh(is, 100);
        }
}
Esempio n. 2
0
void video_refresh_timer(void *userdata) {

  VideoState *is = (VideoState *)userdata;
  VideoPicture *vp;
  double actual_delay, delay, sync_threshold, ref_clock, diff;
  
  if(is->video_ctx) {
    if(is->pictq_size == 0) {
      schedule_refresh(is, 1);
    } else {
      vp = &is->pictq[is->pictq_rindex];

      delay = vp->pts - is->frame_last_pts; /* the pts from last time */
      if(delay <= 0 || delay >= 1.0) {
        /* if incorrect delay, use previous one */
        delay = is->frame_last_delay;
      }
      /* save for next time */
      is->frame_last_delay = delay;
      is->frame_last_pts = vp->pts;

      /* update delay to sync to audio */
      ref_clock = get_audio_clock(is);
      diff = vp->pts - ref_clock;

      /* Skip or repeat the frame. Take delay into account
         FFPlay still doesn't "know if this is the best guess." */
      sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD;
      if(fabs(diff) < AV_NOSYNC_THRESHOLD) {
        if(diff <= -sync_threshold) {
          delay = 0;
        } else if(diff >= sync_threshold) {
          delay = 2 * delay;
        }
      }
      is->frame_timer += delay;
      /* computer the REAL delay */
      actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
      if(actual_delay < 0.010) {
        /* Really it should skip the picture instead */
        actual_delay = 0.010;
      }
      schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
      /* show the picture! */
      video_display(is);
      
      /* update queue for next picture! */
      if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) {
        is->pictq_rindex = 0;
      }

      SDL_LockMutex(is->pictq_mutex);
      is->pictq_size--;
      SDL_CondSignal(is->pictq_cond);
      SDL_UnlockMutex(is->pictq_mutex);
    }
  } else {
    schedule_refresh(is, 100);
  }
}
Esempio n. 3
0
void video_refresh_timer(void* data) {
	VideoState* is = (VideoState*) data;
	VideoPicture* vp;
	double actual_delay, delay, sync_threshold, ref_clock, diff;

	if (is->video_st) {
		if (is->pictq_size == 0) {
			schedule_refresh(is, 1);
		} else {
			vp = &is->pictq[is->pictq_rindex];

			is->video_current_pts = vp->pts;
			is->video_current_pts_time = av_gettime();

			delay = vp->pts - is->frame_last_pts;
			if (delay <= 0 || delay >= 1.0) {
				delay = is->frame_last_delay;
			}
			/* save for next time */
			is->frame_last_delay = delay;
			is->frame_last_pts = vp->pts;

			/* update delay to sync to audio if not master source*/
			if (is->av_sync_type != AV_SYNC_VIDEO_MASTER) {
				ref_clock = get_audio_clock(is);
				diff = vp->pts - ref_clock;
				sync_threshold =
						(delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD;
				if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
					if (diff <= -sync_threshold) {
						delay = 0;
					} else if (diff >= sync_threshold) {
						delay = 2 * delay;
					}
				}
			}
			is->frame_timer += delay;
			/* computer the REAL delay */
			actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
			if (actual_delay < 0.010) {
				actual_delay = 0.010;
			}
			schedule_refresh(is, int(actual_delay * 1000 + 0.5));

			/* show the picture! */
			video_display(is);

			if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) {
				is->pictq_rindex = 0;
			}

			SDL_LockMutex(is->pictq_mutex);
			is->pictq_size--;
			SDL_CondSignal(is->pictq_cond);
			SDL_UnlockMutex(is->pictq_mutex);
		}
	} else {
		schedule_refresh(is, 100);
	}
}
Esempio n. 4
0
/*
每次timer到时间会进来(timer到时间发 FF_REFRESH_EVENT,收到 FF_REFRESH_EVENT 会进来)
一个timer只进一次timer就失效了。不过本函数里面会再起一个timer。
从is->pictq拿出一个 VideoPicture 进行显示,然后pictq的读指针向前移动一步
*/
void video_refresh_timer(void *userdata) {

	VideoState *is = (VideoState *)userdata;
	VideoPicture *vp;
	double actual_delay, delay, sync_threshold, ref_clock, diff;

	if (is->video_st) {
		if (is->pictq_size == 0) {
			schedule_refresh(is, 1);
		}
		else {
			vp = &is->pictq[is->pictq_rindex];

			delay = vp->pts - is->frame_last_pts; /* the pts from last time */
			
			is->frame_last_pts = vp->pts;

			/* ----------- */
			/*音视频同步*/
			ref_clock = get_audio_clock(is);
			diff = vp->pts - ref_clock;
			if (diff <= -0.015) {
				delay = 0;
			}
			else if (diff >= 0.015) {
				delay = 2 * delay;
			}
			/* ----------- */


			if (delay == 0) {
				count_delay_is_zero++;
				delay = 0.010;
			}
			count_pict++;
			printf("delay==0 percentage is %lf",(double)count_delay_is_zero/count_pict);
			schedule_refresh(is, (int)(delay * 1000 + 0.5));

			/* show the picture! */
			video_display(is);

			/* update queue for next picture! */
			if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) {
				is->pictq_rindex = 0;
			}
			SDL_LockMutex(is->pictq_mutex);
			is->pictq_size--;
			SDL_CondSignal(is->pictq_cond);
			SDL_UnlockMutex(is->pictq_mutex);
		}
	}
	else {
		schedule_refresh(is, 100);
	}
}
Esempio n. 5
0
void create_videostates(char* filename,int i) {
	channels[i] = av_mallocz(sizeof(VideoState));
	
	channels[i]->channel = i;
	channels[i]->audioChannel = i;
	channels[i]->color_choice=NONE;
	
	
	av_strlcpy(channels[i]->filename, filename, sizeof(channels[i]->filename));
	
	
	channels[i]->pictq_mutex = SDL_CreateMutex();
	channels[i]->pictq_cond = SDL_CreateCond();

	schedule_refresh(channels[i], 40);

	channels[i]->av_sync_type = DEFAULT_AV_SYNC_TYPE;

	channels[i]->parse_tid = SDL_CreateThread(decode_thread, channels[i]);

	if (!channels[i]->parse_tid) {
		av_free(channels[i]);
		exit(0);
	}
	
}
int prepareAsync_l(VideoState **ps) {
	VideoState *is = *ps;

    if (is != 0) {
    	is->pictq_mutex = SDL_CreateMutex();
        is->pictq_cond = SDL_CreateCond();

    	//is->event_tid = malloc(sizeof(*(is->event_tid)));
    	//pthread_create(is->event_tid, NULL, (void *) &event_thread, is);

    	// uncomment for video
    	schedule_refresh(is, 40);

    	is->av_sync_type = DEFAULT_AV_SYNC_TYPE;
    	is->parse_tid = malloc(sizeof(*(is->parse_tid)));

    	if(!is->parse_tid) {
    	    av_free(is);
    	    return UNKNOWN_ERROR;
    	}

    	pthread_create(is->parse_tid, NULL, (void *) &decode_thread, is);

    	av_init_packet(&is->flush_pkt);
    	is->flush_pkt.data = (unsigned char *)"FLUSH";

    	return NO_ERROR;
    }
    return INVALID_OPERATION;
}
void VideoPlayer::playVideo (const std::string &resourceName)
{
  if (mState)
    close();

  mState = new VideoState();

  // Register all formats and codecs
  av_register_all();

  if(SDL_Init(SDL_INIT_AUDIO)) {
    throw std::runtime_error("Failed to initialize SDL");
  }

  mState->refresh = 0;
  mState->resourceName = resourceName;

  schedule_refresh(mState, 40);

  mState->av_sync_type = DEFAULT_AV_SYNC_TYPE;
  mState->parse_thread = boost::thread(decode_thread, mState);
}
Esempio n. 8
0
int main(int argc, char *argv[]) {

  SDL_Event       event;
  double          pts;
  VideoState      *is;

  is = av_mallocz(sizeof(VideoState));

  if(argc < 2) {
    fprintf(stderr, "Usage: test <file>\n");
    exit(1);
  }
  // Register all formats and codecs
  av_register_all();
  
  if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
    fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
    exit(1);
  }

  // Make a screen to put our video
#ifndef __DARWIN__
  screen = SDL_SetVideoMode(640, 480, 0, 0);
#else
  screen = SDL_SetVideoMode(640, 480, 24, 0);
#endif
  if(!screen) {
    fprintf(stderr, "SDL: could not set video mode - exiting\n");
    exit(1);
  }

  pstrcpy(is->filename, sizeof(is->filename), argv[1]);

  is->pictq_mutex = SDL_CreateMutex();
  is->pictq_cond = SDL_CreateCond();

  schedule_refresh(is, 40);

  is->av_sync_type = DEFAULT_AV_SYNC_TYPE;
  is->parse_tid = SDL_CreateThread(decode_thread, is);
  if(!is->parse_tid) {
    av_free(is);
    return -1;
  }

  av_init_packet(&flush_pkt);
  flush_pkt.data = "FLUSH";
  
  for(;;) {
    double incr, pos;
    SDL_WaitEvent(&event);
    switch(event.type) {
    case SDL_KEYDOWN:
      switch(event.key.keysym.sym) {
      case SDLK_LEFT:
	incr = -10.0;
	goto do_seek;
      case SDLK_RIGHT:
	incr = 10.0;
	goto do_seek;
      case SDLK_UP:
	incr = 60.0;
	goto do_seek;
      case SDLK_DOWN:
	incr = -60.0;
	goto do_seek;
      do_seek:
	if(global_video_state) {
	  pos = get_master_clock(global_video_state);
	  pos += incr;
	  stream_seek(global_video_state, (int64_t)(pos * AV_TIME_BASE), incr);
	}
	break;
      default:
	break;
      }
      break;
    case FF_QUIT_EVENT:
    case SDL_QUIT:
      is->quit = 1;
      SDL_Quit();
      exit(0);
      break;
    case FF_ALLOC_EVENT:
      alloc_picture(event.user.data1);
      break;
    case FF_REFRESH_EVENT:
      video_refresh_timer(event.user.data1);
      break;
    default:
      break;
    }
  }
  return 0;
}
int main(int argc, char *argv[]) {

  SDL_Event       event;

  VideoState      *is;

  is = av_mallocz(sizeof(VideoState));

  if(argc < 2) {
    fprintf(stderr, "Usage: test <file>\n");
    exit(1);
  }
  // Register all formats and codecs
  av_register_all();
  
  if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
    fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
    exit(1);
  }

  // Make a screen to put our video
#ifndef __DARWIN__
        screen = SDL_SetVideoMode(640, 480, 0, 0);
#else
        screen = SDL_SetVideoMode(640, 480, 24, 0);
#endif
  if(!screen) {
    fprintf(stderr, "SDL: could not set video mode - exiting\n");
    exit(1);
  }

  pstrcpy(is->filename, sizeof(is->filename), argv[1]);

  is->pictq_mutex = SDL_CreateMutex();
  is->pictq_cond = SDL_CreateCond();

  schedule_refresh(is, 40);

  is->parse_tid = SDL_CreateThread(decode_thread, is);
  if(!is->parse_tid) {
    av_free(is);
    return -1;
  }
  for(;;) {

    SDL_WaitEvent(&event);
    switch(event.type) {
    case FF_QUIT_EVENT:
    case SDL_QUIT:
      is->quit = 1;
      SDL_Quit();
      exit(0);
      break;
    case FF_ALLOC_EVENT:
      alloc_picture(event.user.data1);
      break;
    case FF_REFRESH_EVENT:
      video_refresh_timer(event.user.data1);
      break;
    default:
      break;
    }
  }
  return 0;

}
Esempio n. 10
0
int main(int argc, char *argv[]) {
	double			pos;
    VideoState      *is;


    if(argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }

#ifdef __MINGW32__
	ptw32_processInitialize();
	//ptw32_processTerminate();
#endif

	//XInitThreads();

    is = (VideoState *)av_mallocz(sizeof(VideoState));

	avcodec_register_all();
	//avdevice_register_all();
	avfilter_register_all();
    av_register_all();
	avformat_network_init();

	if (USE_EVENT_MULTI_THREAD)
	{
		if ( SDLMOD_StartEventLoop(SDLMOD_INIT_EVENTTHREAD) < 0 ) {
			fprintf(stderr, "Could not start SDLMOD event loop (multi thread)\n");
			exit(1);
		}
	}
	else
	{
		if ( SDLMOD_StartEventLoop(0) < 0 ) {
			fprintf(stderr, "Could not start SDLMOD event loop (main thread)\n");
			exit(1);
		}
	}
	if (SDLMOD_TimerInit() != 0)
	{
		fprintf(stderr, "SDLMOD_TimerInit failed\n");
		exit(1);
	}

	g_video_width = 640;
	g_video_height = 480;
	g_video_resized = 0;

	//screen = SDL_SetVideoMode(g_video_width, g_video_height, SDL_VIDEO_MODE_BPP, SDL_VIDEO_MODE_FLAGS);
	screen = SDLMOD_CreateRGBSurface(0,
		g_video_width, g_video_height, SDL_VIDEO_MODE_BPP,
		Rmask, Gmask, Bmask, Amask
		);
	if(!screen) {
		fprintf(stderr, "SDL: could not set video mode - exiting\n");
		exit(1);
	}

    screen_mutex = (pthread_mutex_t *)malloc(sizeof(pthread_mutex_t));
    pthread_mutex_init(screen_mutex, NULL);

    av_strlcpy(is->filename, argv[1], sizeof(is->filename));

	is->pictq_mutex = (pthread_mutex_t *)malloc(sizeof(pthread_mutex_t));
    pthread_mutex_init(is->pictq_mutex, NULL);
    is->pictq_cond = (pthread_cond_t *)malloc(sizeof(pthread_cond_t));
    pthread_cond_init(is->pictq_cond, NULL);
    
	schedule_refresh(is, 40);

	is->av_sync_type = AV_SYNC_VIDEO_MASTER;

    {
		int err;
		is->parse_tid = (pthread_t *)malloc(sizeof(pthread_t));
		err = pthread_create(is->parse_tid, NULL, decodeThread, is);
		if (err!=0)
		{
			free(is->parse_tid);
			printf("can't create thread: %s\n", strerror(err));
			exit(0);
		}
	}
    if (!is->parse_tid) {
        av_free(is);
        return -1;
    }

	av_init_packet(&flush_pkt);
	flush_pkt.data = (uint8_t*)"FLUSH";

#if USE_EVENT_LOOP_MT
	{
		pthread_t pid;
		pthread_create(&pid, NULL, event_loop, is);
		if (0)
		{
			pthread_join(pid, NULL);
		}
		else
		{
#if USE_GL
			glutInit(&argc, argv);
			glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA|GLUT_DEPTH);
			glutInitWindowSize(g_video_width, g_video_height);
			glutInitWindowPosition(0, 0);
			
			win = glutCreateWindow(TITLE);
			createMenu();

			glutDisplayFunc(display);
			glutReshapeFunc(reshape);
			glutMouseFunc(mouse);
			glutMotionFunc(motion);
			glutIdleFunc(idle);

			if (initEnvironment())
			{
				glutMainLoop();
			}
#else
			printf(">>>>>>Please input command (exit, quit, save):<<<<<<\n");
			char string[256];
			char* ret;
			while(1)
			{
				ret = gets(string);
				if (strcmp(string, "exit") == 0 || 
				    strcmp(string, "quit") == 0 ||
				    strcmp(string, "e") == 0 ||
				    strcmp(string, "q") == 0
				    )
				{
					break;
				}
				else if (strcmp(string, "save") == 0 ||
					strcmp(string, "s") == 0
				    )
				{
					save_bmp();
					printf("save_bmp() finish.\n");
				}
				else
				{
					printf("Please input command (exit, quit, save):\n");
				}
			}
#endif
		}
	}
#else
	event_loop(is);
#endif

    return 0;
}
int decode_thread(void *arg) {

    VideoState *is = (VideoState *)arg;
    AVFormatContext *pFormatCtx = NULL;
    AVPacket pkt1, *packet = &pkt1;

    AVDictionary *io_dict = NULL;
    AVIOInterruptCB callback;

    int video_index = -1;
    int audio_index = -1;
    int i;

    is->videoStream = -1;
    is->audioStream = -1;
    is->audio_need_resample = 0;

    global_video_state = is;
    // will interrupt blocking functions if we quit!
    callback.callback = decode_interrupt_cb;
    callback.opaque = is;

    if(avio_open2(&is->io_context, is->filename, 0, &callback, &io_dict)) {
        fprintf(stderr, "Unable to open I/O for %s\n", is->filename);
        return -1;
    }

    // Open video file
    if(avformat_open_input(&pFormatCtx, is->filename, NULL, NULL) != 0) {
        return -1;    // Couldn't open file
    }

    is->pFormatCtx = pFormatCtx;

    // Retrieve stream information
    if(avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        return -1;    // Couldn't find stream information
    }

    // Dump information about file onto standard error
    av_dump_format(pFormatCtx, 0, is->filename, 0);

    // Find the first video stream
    for(i = 0; i < pFormatCtx->nb_streams; i++) {
        if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO &&
                video_index < 0) {
            video_index = i;
        }

        if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO &&
                audio_index < 0) {
            audio_index = i;
        }
    }

    if(audio_index >= 0) {
        stream_component_open(is, audio_index);
    }

    if(video_index >= 0) {
        stream_component_open(is, video_index);
    }

    if(is->videoStream < 0 && is->audioStream < 0) {
        fprintf(stderr, "%s: could not open codecs\n", is->filename);
        goto fail;
    }

#ifdef __RESAMPLER__

    if( audio_index >= 0
            && pFormatCtx->streams[audio_index]->codec->sample_fmt != AV_SAMPLE_FMT_S16) {
        is->audio_need_resample = 1;
        is->pResampledOut = NULL;
        is->pSwrCtx = NULL;

        printf("Configure resampler: ");

#ifdef __LIBAVRESAMPLE__
        printf("libAvResample\n");
        is->pSwrCtx = avresample_alloc_context();
#endif

#ifdef __LIBSWRESAMPLE__
        printf("libSwResample\n");
        is->pSwrCtx = swr_alloc();
#endif

        // Some MP3/WAV don't tell this so make assumtion that
        // They are stereo not 5.1
        if (pFormatCtx->streams[audio_index]->codec->channel_layout == 0
                && pFormatCtx->streams[audio_index]->codec->channels == 2) {
            pFormatCtx->streams[audio_index]->codec->channel_layout = AV_CH_LAYOUT_STEREO;

        } else if (pFormatCtx->streams[audio_index]->codec->channel_layout == 0
                   && pFormatCtx->streams[audio_index]->codec->channels == 1) {
            pFormatCtx->streams[audio_index]->codec->channel_layout = AV_CH_LAYOUT_MONO;

        } else if (pFormatCtx->streams[audio_index]->codec->channel_layout == 0
                   && pFormatCtx->streams[audio_index]->codec->channels == 0) {
            pFormatCtx->streams[audio_index]->codec->channel_layout = AV_CH_LAYOUT_STEREO;
            pFormatCtx->streams[audio_index]->codec->channels = 2;
        }

        av_opt_set_int(is->pSwrCtx, "in_channel_layout",
                       pFormatCtx->streams[audio_index]->codec->channel_layout, 0);
        av_opt_set_int(is->pSwrCtx, "in_sample_fmt",
                       pFormatCtx->streams[audio_index]->codec->sample_fmt, 0);
        av_opt_set_int(is->pSwrCtx, "in_sample_rate",
                       pFormatCtx->streams[audio_index]->codec->sample_rate, 0);

        av_opt_set_int(is->pSwrCtx, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
        av_opt_set_int(is->pSwrCtx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
        av_opt_set_int(is->pSwrCtx, "out_sample_rate", 44100, 0);

#ifdef __LIBAVRESAMPLE__

        if (avresample_open(is->pSwrCtx) < 0) {
#else

        if (swr_init(is->pSwrCtx) < 0) {
#endif
            fprintf(stderr, " ERROR!! From Samplert: %d Hz Sample format: %s\n",
                    pFormatCtx->streams[audio_index]->codec->sample_rate,
                    av_get_sample_fmt_name(pFormatCtx->streams[audio_index]->codec->sample_fmt));
            fprintf(stderr, "         To 44100 Sample format: s16\n");
            is->audio_need_resample = 0;
            is->pSwrCtx = NULL;;
        }

    }

#endif

    // main decode loop

    for(;;) {
        if(is->quit) {
            break;
        }

        // seek stuff goes here
        if(is->seek_req) {
            int stream_index = -1;
            int64_t seek_target = is->seek_pos;

            if(is->videoStream >= 0) {
                stream_index = is->videoStream;

            } else if(is->audioStream >= 0) {
                stream_index = is->audioStream;
            }

            if(stream_index >= 0) {
                seek_target = av_rescale_q(seek_target, AV_TIME_BASE_Q, pFormatCtx->streams[stream_index]->time_base);
            }

            if(av_seek_frame(is->pFormatCtx, stream_index, seek_target, is->seek_flags) < 0) {
                fprintf(stderr, "%s: error while seeking\n", is->pFormatCtx->filename);

            } else {
                if(is->audioStream >= 0) {
                    packet_queue_flush(&is->audioq);
                    packet_queue_put(&is->audioq, &flush_pkt);
                }

                if(is->videoStream >= 0) {
                    packet_queue_flush(&is->videoq);
                    packet_queue_put(&is->videoq, &flush_pkt);
                }
            }

            is->seek_req = 0;
        }

        if(is->audioq.size > MAX_AUDIOQ_SIZE ||
                is->videoq.size > MAX_VIDEOQ_SIZE) {
            SDL_Delay(10);
            continue;
        }

        if(av_read_frame(is->pFormatCtx, packet) < 0) {
            if(is->pFormatCtx->pb->error == 0) {
                SDL_Delay(100); /* no error; wait for user input */
                continue;

            } else {
                break;
            }
        }

        // Is this a packet from the video stream?
        if(packet->stream_index == is->videoStream) {
            packet_queue_put(&is->videoq, packet);

        } else if(packet->stream_index == is->audioStream) {
            packet_queue_put(&is->audioq, packet);

        } else {
            av_free_packet(packet);
        }
    }

    /* all done - wait for it */
    while(!is->quit) {
        SDL_Delay(100);
    }

fail: {
        SDL_Event event;
        event.type = FF_QUIT_EVENT;
        event.user.data1 = is;
        SDL_PushEvent(&event);
    }
    return 0;
}

void stream_seek(VideoState *is, int64_t pos, int rel) {

    if(!is->seek_req) {
        is->seek_pos = pos;
        is->seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0;
        is->seek_req = 1;
    }
}

int main(int argc, char *argv[]) {

    SDL_Event       event;
    //double          pts;
    VideoState      *is;

    is = av_mallocz(sizeof(VideoState));

    if(argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }

    // Register all formats and codecs
    av_register_all();

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    // Make a screen to put our video
#ifndef __DARWIN__
    screen = SDL_SetVideoMode(640, 480, 0, 0);
#else
    screen = SDL_SetVideoMode(640, 480, 24, 0);
#endif

    if(!screen) {
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
        exit(1);
    }

    av_strlcpy(is->filename, argv[1], 1024);

    is->pictq_mutex = SDL_CreateMutex();
    is->pictq_cond = SDL_CreateCond();

    schedule_refresh(is, 40);

    is->av_sync_type = DEFAULT_AV_SYNC_TYPE;
    is->parse_tid = SDL_CreateThread(decode_thread, is);

    if(!is->parse_tid) {
        av_free(is);
        return -1;
    }

    av_init_packet(&flush_pkt);
    flush_pkt.data = (unsigned char *)"FLUSH";

    for(;;) {
        double incr, pos;
        SDL_WaitEvent(&event);

        switch(event.type) {
            case SDL_KEYDOWN:
                switch(event.key.keysym.sym) {
                    case SDLK_LEFT:
                        incr = -10.0;
                        goto do_seek;

                    case SDLK_RIGHT:
                        incr = 10.0;
                        goto do_seek;

                    case SDLK_UP:
                        incr = 60.0;
                        goto do_seek;

                    case SDLK_DOWN:
                        incr = -60.0;
                        goto do_seek;
do_seek:

                        if(global_video_state) {
                            pos = get_master_clock(global_video_state);
                            pos += incr;
                            stream_seek(global_video_state, (int64_t)(pos * AV_TIME_BASE), incr);
                        }

                        break;

                    default:
                        break;
                }

                break;

            case FF_QUIT_EVENT:
            case SDL_QUIT:
                is->quit = 1;
                /*
                 * If the video has finished playing, then both the picture and
                 * audio queues are waiting for more data.  Make them stop
                 * waiting and terminate normally.
                 */
                SDL_CondSignal(is->audioq.cond);
                SDL_CondSignal(is->videoq.cond);
                SDL_Quit();
                exit(0);
                break;

            case FF_ALLOC_EVENT:
                alloc_picture(event.user.data1);
                break;

            case FF_REFRESH_EVENT:
                video_refresh_timer(event.user.data1);
                break;

            default:
                break;
        }
    }

    return 0;
}
Esempio n. 12
0
int main(int argc, char *argv[]) {
  SDL_Event       event;
  VideoState      *is;
  is = av_mallocz(sizeof(VideoState));
  if(argc < 2) {
    fprintf(stderr, "Usage: test <file>\n");
    exit(1);
  }
  // Register all formats and codecs
  av_register_all();

  if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
    fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
    exit(1);
  }

  // Make a screen to put our video
#ifndef __DARWIN__
  screen = SDL_SetVideoMode(640, 480, 0, 0);
#else
  screen = SDL_SetVideoMode(640, 480, 24, 0);
#endif
  if(!screen) {
    fprintf(stderr, "SDL: could not set video mode - exiting\n");
    exit(1);
  }

  av_strlcpy(is->filename, argv[1], 1024);


  // 初始化为视频缓冲准备的锁(pictq)
  // 因为一旦事件驱动调用视频函数, 视频函数会从 pictq 抽出预解码帧。
  // 同时, 视频解码器会把信息放进去, 我们不知道那个动作会先发生。
  is->pictq_mutex = SDL_CreateMutex();
  is->pictq_cond = SDL_CreateCond();

  // schedule_refresh 是一个将要定义的函数。它的动作是告诉系统在某个特定的毫秒数后弹出 FF_REFRESH_EVENT 事件。
  schedule_refresh(is, 40);

  is->av_sync_type = DEFAULT_AV_SYNC_TYPE;
  // 生成一个新线程能完全访问原始进程中的内存,启动我们给的线程,在这种情况下, 调用 decode_thread()并与 VideoState 结构体连接。
  is->parse_tid = SDL_CreateThread(decode_thread, is);
  if(!is->parse_tid) {
    av_free(is);
    return -1;
  }

//  事件循环
  for(;;) {

    SDL_WaitEvent(&event);
    switch(event.type) {
    case FF_QUIT_EVENT:
    case SDL_QUIT:
      is->quit = 1;
      /*
       * If the video has finished playing, then both the picture and
       * audio queues are waiting for more data.  Make them stop
       * waiting and terminate normally.
       */
      SDL_CondSignal(is->audioq.cond);
      SDL_CondSignal(is->videoq.cond);
      SDL_Quit();
      exit(0);
      break;
    case FF_ALLOC_EVENT:
      alloc_picture(event.user.data1);
      break;
    case FF_REFRESH_EVENT:
      video_refresh_timer(event.user.data1);
      break;
    default:
      break;
    }
  }
  return 0;
}
Esempio n. 13
0
int main(int argc, char* argv[]) {
//	SetUnhandledExceptionFilter(callback);
	SDL_Event event;
	VideoState* is = NULL;
	is = (VideoState*) av_mallocz(sizeof(VideoState));

	if (argc < 2) {
		fprintf(stderr, "Usage: test <file>\n");
		exit(1);
	}

	av_register_all();

	if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
		fprintf(stderr, "Could't not initialize SDL - %s\n", SDL_GetError());
		exit(1);
	}

	screen = SDL_CreateWindow("Hello World", SDL_WINDOWPOS_CENTERED,
	SDL_WINDOWPOS_CENTERED, WINDOW_WIDTH, WINDOW_HEIGHT, SDL_WINDOW_OPENGL);
	if (!screen) {
		printf("Could not initialize SDL -%s\n", SDL_GetError());
		return -1;
	}
	render = SDL_CreateRenderer(screen, -1, 0);

	screen_mutex = SDL_CreateMutex();

	av_strlcpy(is->filename, argv[1], sizeof(is->filename));

	is->pictq_mutex = SDL_CreateMutex();
	is->pictq_cond = SDL_CreateCond();

	schedule_refresh(is, 40);

	is->av_sync_type = DEFAULT_AV_SYNC_TYPE;
	is->parse_tid = SDL_CreateThread(decode_thread, "decode_thread", is);
	if (!is->parse_tid) {
		av_free(is);
		return -1;
	}

	av_init_packet(&flush_pkt);
	flush_pkt.data = (unsigned char*) "FLUSH";

	for (;;) {
		double incr, pos;
		SDL_WaitEvent(&event);
		switch (event.type) {
		case SDL_KEYDOWN:
			switch (event.key.keysym.sym) {
			case SDLK_LEFT:
				incr = -10.0;
				goto do_seek;
			case SDLK_RIGHT:
				incr = 10.0;
				goto do_seek;
			case SDLK_UP:
				incr = 60.0;
				goto do_seek;
			case SDLK_DOWN:
				incr = -60.0;
				goto do_seek;
				do_seek: if (global_video_state) {
					pos = get_master_clock(global_video_state);
					pos += incr;
					stream_seek(global_video_state,
							(int64_t) (pos * AV_TIME_BASE), incr);
				}
				break;
			default:
				break;
			}
			break;
		case FF_QUIT_EVENT:
		case SDL_QUIT:
			is->quit = 1;
			SDL_Quit();
			return 0;
			break;
		case FF_REFRESH_EVENT:
			video_refresh_timer(event.user.data1);
			break;
		default:
			break;
		}
	}
	return 0;
}
Esempio n. 14
0
int main(int argc, char*argv[])
{
  if (argc > 1 && (strcmp(argv[1], "-h") == 0 || strcmp(argv[1], "--help") == 0))
  {
    usage();
    return 0;
  }
	signal( SIGINT, &onExit);

  ftime(&startTime);

  av_register_all();

  paused = 0;

  // Create space for all the video data
  VideoState      *is;
  is = av_mallocz(sizeof(VideoState));

  // Set up SDL window
  SDL_Event    event;
  memset(&event, 0, sizeof(SDL_Event));

  if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
    fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
    exit(1);
  }

  screen = SDL_SetVideoMode(640, 480, 24, 0);

  if(!screen) {
    fprintf(stderr, "SDL: could not set video mode - exiting\n");
    exit(1);
  }

	char * name = malloc(16);
	if(argc > 1){
		strcpy(name, argv[1]);
  }
	else
	{
	  printf("Enter target name of the server: ");
	  scanf("%s", name);
	}
  char * ip = nameserver_init(name);
  free(name);

  // Get the audio & video stream information
  parse_nameserver_msg(ip);

  global_video_state = is;
  //init_gis(global_video_state);
  init_gis(is);
  establish_peer_connections();

	//control_packet* cp = read_control_packet();
  // Initialize the audio & video streams
  /*
  stream_component_open(global_video_state, &cp->audio_codec_ctx);
  printf("[PLAYER] Opened Audio stream\n");
  stream_component_open(global_video_state, &cp->video_codec_ctx);
  printf("[PLAYER] Opened Video stream\n");
  */

	is->quit = 0;

  // Create thread locks
  is->pictq_mutex = SDL_CreateMutex();
  is->pictq_cond = SDL_CreateCond();

  schedule_refresh(is, 40);

  // Create the decode thread
  is->parse_tid = SDL_CreateThread(decode_thread, is);
  printf("[PLAYER] SDL intialized\n");

/*
  if(!is->parse_tid) {
    av_free(is);
    return -1;
  }
  */

	pthread_create(&keyboard_thread_id, NULL,  captureKeyboard, NULL);
	pthread_create(&stats_thread_id, NULL,  calculate_player_stats, NULL);
//	printf("[PLAYER] Keyboard thread :%d\n", keyboard_thread_id);

//  while(global_video_state->quit == 0) 
  while(1) {
    SDL_WaitEvent(&event);
    switch(event.type)
    {
      // Done decoding
      case FF_QUIT_EVENT:
      case SDL_QUIT:
        is->quit = 1;
        exit(0);
        break;
      // Get frame
      case FF_ALLOC_EVENT:
        alloc_picture(event.user.data1);
        break;

      // Refresh screen
      case FF_REFRESH_EVENT:
        video_refresh_timer(event.user.data1);
        break;

      default:
        break;
    }
  }

  printf("Frames per second: %i\n", 5);
  printf("[MAIN] Quit player\n");
  return 0;
}
Esempio n. 15
0
void video_display(VideoState *is) {
	SDLMOD_Rect rect;
	VideoPicture *vp;
	AVPicture pict;
	float aspect_ratio;
	int w, h, x, y;
	int i;

	vp = &is->pictq[is->pictq_rindex];
#if !USE_SWS_YUV_CONVERT
	if(vp->bmp) {
#else
	if(vp->sfc) {
#endif
		if(is->video_st->codec->sample_aspect_ratio.num == 0) {
			aspect_ratio = 0;
		} else {
			aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) *
								  is->video_st->codec->width / is->video_st->codec->height;
		}
		if(aspect_ratio <= 0.0) 
			aspect_ratio = (float)is->video_st->codec->width / (float)is->video_st->codec->height;
		
		h = screen->h;
		w = ((int)(h * aspect_ratio)) & -3;
		if(w > screen->w) {
			w = screen->w;
			h = ((int)(w / aspect_ratio)) & -3;
		}
		x = (screen->w - w) / 2;
		y = (screen->h - h) / 2;

		rect.x = x;
		rect.y = y;
		rect.w = w;
		rect.h = h;

#if !USE_SWS_YUV_CONVERT
		SDLMOD_DisplayYUVOverlay(vp->bmp, &rect);
#else
		if (vp->sfc->w > 0 && vp->sfc->h > 0 && rect.w > 0 && rect.h > 0)
		{
			SDLMOD_Rect srcrect;
			srcrect.x = 0;
			srcrect.y = 0;
			srcrect.w = vp->sfc->w;
			srcrect.h = vp->sfc->h;
			SDLMOD_LockSurface(screen);
			//FIXME: SoftStretch doesn't support empty rect (dstrect->h == 0), will crash.
			SDLMOD_SoftStretch(vp->sfc, &srcrect, screen, &rect);
			SDLMOD_UnlockSurface(screen);
		}
#endif
#if USE_GL
		glutPostRedisplay();
#endif
	}
}

void stream_seek(VideoState *is, int64_t pos, int rel) {
	if(!is->seek_req) {
		is->seek_pos = pos;
		is->seek_flags = rel < 0 ? AVSEEK_FLAG_BACKWARD : 0;
		is->seek_req = 1;
	}
}

void video_refresh_timer(void *userdata) {
	VideoState *is = (VideoState*)userdata;
	VideoPicture *vp;
	double actual_delay, delay, sync_threshold, ref_clock, diff;
	if(is->video_st) {
		if(is->pictq_size == 0) {
			schedule_refresh(is, 1);		
		} else {
			vp = &is->pictq[is->pictq_rindex];
			is->video_current_pts = vp->pts;
			is->video_current_pts_time = av_gettime();
			delay = vp->pts - is->frame_last_pts;
			if(delay <= 0 || delay >= 1.0) delay = is->frame_last_delay;
			is->frame_last_delay = delay;
			is->frame_last_pts = vp->pts;
			if(is->av_sync_type != AV_SYNC_VIDEO_MASTER) {
				ref_clock = get_master_clock(is);
				diff = vp->pts - ref_clock;
				sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD;
				if(fabs(diff) < AV_NOSYNC_THRESHOLD) {
					if(diff <= -sync_threshold)	{
						delay = 0;
					} else if(diff >= sync_threshold) {
						delay = 2 * delay;
					}
				}
			}
			is->frame_timer += delay;	
			actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
			if(actual_delay < 0.010) actual_delay = 0.010;
			schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
			video_display(is);
			if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) is->pictq_rindex = 0;
			pthread_mutex_lock(is->pictq_mutex);
			is->pictq_size--;
			//printf("video_refresh_timer signal %d\n", is->pictq_size);
			pthread_cond_signal(is->pictq_cond);
			pthread_mutex_unlock(is->pictq_mutex);
		 }
	} else
	  schedule_refresh(is, 100);
}
Esempio n. 16
0
void video_refresh_timer(void *userdata) {

    VideoState *is = (VideoState *)userdata;
    VideoPicture *vp;
    double actual_delay, delay, sync_threshold, ref_clock, diff;

    if(is->video_st) {
        if(is->pictq_size == 0) {
            fprintf(stderr, "%s pictq_size is 0, schedule another refresh\n", __FUNCTION__);
            schedule_refresh(is, 1);
        } else {
            vp = &is->pictq[is->pictq_rindex];

            delay = vp->pts - is->frame_last_pts; /* the pts from last time */
            fprintf(stderr, "delay 1: %.8f\n", delay);
            if(delay <= 0 || delay >= 1.0) { //larger than 1 seconds or smaller than 0
                /* if incorrect delay, use previous one */
                delay = is->frame_last_delay;
                fprintf(stderr, "delay 2: %.8f\n", delay);
            }
            /* save for next time */
            is->frame_last_delay = delay;
            is->frame_last_pts = vp->pts;

            /* update delay to sync to audio */
            ref_clock = get_audio_clock(is);
            diff = vp->pts - ref_clock;
            fprintf(stderr, "audio video diff: %.8f\n", diff);

            /* Skip or repeat the frame. Take delay into account
               FFPlay still doesn't "know if this is the best guess." */
            sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD;
            if(fabs(diff) < AV_NOSYNC_THRESHOLD) {
                if(diff <= -sync_threshold) {
                    delay = 0;
                } else if(diff >= sync_threshold) {
                    delay = 2 * delay;
                }
            }
            is->frame_timer += delay;
            /* computer the REAL delay */
            actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
            fprintf(stderr, "actual_delay %.8f\n", actual_delay);
            if(actual_delay < 0.010) { //smaller than 10 ms
                /* Really it should skip the picture instead */
                actual_delay = 0.010;
            }
            // Why add 0.5 here. I see many 0.5 in multimedia framework code, such as stagefright.
            fprintf(stderr, "%s, delay: %.8f\n", __FUNCTION__, actual_delay*1000+0.5);
            // Video is faster than audio, so we need a delay render.
            // after we show a frame, we figure out when the next frame should be shown
            schedule_refresh(is, (int)(actual_delay * 1000 + 0.5)); 
            /* show the picture! */
            video_display(is);
            fprintf(stderr, "\n---------------------------------------------------------------------\n");

            /* update queue for next picture! */
            if(++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) {
                is->pictq_rindex = 0;
            }
            SDL_LockMutex(is->pictq_mutex);
            is->pictq_size--;
            SDL_CondSignal(is->pictq_cond);
            SDL_UnlockMutex(is->pictq_mutex);
        }
    } else {
        fprintf(stderr, "%s, schedule_refresh for another 100 ms\n", __FUNCTION__);
        schedule_refresh(is, 100);
    }
}
Esempio n. 17
0
int main(int argc, char *argv[]) {

    SDL_Event       event;

    VideoState      *is;

    is = av_mallocz(sizeof(VideoState));

    if(argc < 2) {
        fprintf(stderr, "Usage: %s filepath\n", argv[0]);
        exit(1);
    }
    // Register all formats and codecs
    av_register_all();

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    // Make a screen to put our video
#ifndef __DARWIN__
    screen = SDL_SetVideoMode(640, 480, 0, 0);
#else
    screen = SDL_SetVideoMode(640, 480, 24, 0);
#endif
    if(!screen) {
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
        exit(1);
    }

    av_strlcpy(is->filename, argv[1], 1024);

    is->pictq_mutex = SDL_CreateMutex();
    is->pictq_cond = SDL_CreateCond();

    schedule_refresh(is, 40);

    is->parse_tid = SDL_CreateThread(decode_thread, is);
    if(!is->parse_tid) {
        av_free(is);
        return -1;
    }
    for(;;) {

        SDL_WaitEvent(&event);
        switch(event.type) {
            case FF_QUIT_EVENT:
            case SDL_QUIT:
                is->quit = 1;
                /*
                 * If the video has finished playing, then both the picture and
                 * audio queues are waiting for more data.  Make them stop
                 * waiting and terminate normally.
                 */
                SDL_CondSignal(is->audioq.cond);
                SDL_CondSignal(is->videoq.cond);
                SDL_Quit();
                exit(0);
                break;

            case FF_ALLOC_EVENT:
                alloc_picture(event.user.data1);
                break;

            case FF_REFRESH_EVENT:
                video_refresh_timer(event.user.data1);
                break;

            default:
                break;
        }
    }
    return 0;

}
Esempio n. 18
0
int main(int argc, char* argv[]) {

	int i, videoStream, audioStream;

	VideoState	*is;
	is = av_mallocz(sizeof(VideoState));

	if(argc < 2) {
		fprintf(stderr, "Usage: test <file>\n");
		exit(1);
	}

	if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
		fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
		exit(1);
	}


	av_register_all();

	AVFormatContext *pFormatCtx = NULL;

	av_strlcpy(is->filename, argv[1], sizeof(is->filename));
	is->pictq_mutex = SDL_CreateMutex();
	is->pictq_cond	= SDL_CreateCond();

	schedule_refresh(is, 40);

	is->parse_tid = SDL_CreateThread(decode_thread, is);
	if(!is->parse_tid) {
		av_free(is);
		return -1;
	}

	// Open video file
	if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0) {
		return -1; // Couldn't open file
	}

	// Retrive stream information
	if(avformat_find_stream_info(pFormatCtx, NULL) < 0) {
		return -1; //Couldn't find stream information
	}

	// Dump information about file onto standard error
	av_dump_format(pFormatCtx, 0, argv[1], 0);

	AVCodecContext *pCodecCtxOrig = NULL;
	AVCodecContext *pCodecCtx = NULL;

	AVCodecContext *aCodecCtxOrig = NULL;
	AVCodecContext *aCodecCtx = NULL;

	// Find the first video stream
	videoStream = -1;
	audioStream = -1;

	for(i=0; i < pFormatCtx->nb_streams; i++) {
		if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0) {
			videoStream = i;
		}
	}
	
	for(i=0; i < pFormatCtx->nb_streams; i++) {
		if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audioStream < 0) {
			audioStream = i;
		}
	}

	if(videoStream == -1) {
		return -1; // Didn't find a video stream
	}
	if(audioStream == -1) {
		return -1;
	}
	// Get a pointer to the codec context for the video stream
	pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
	aCodecCtxOrig = pFormatCtx->streams[audioStream]->codec;

	AVCodec *pCodec = NULL;
	AVCodec *aCodec = NULL;

	//Find the decoder for the video stream
	pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
	if(pCodec == NULL) {
		return -1;
	}
	aCodec = avcodec_find_decoder(aCodecCtxOrig->codec_id);
	if(aCodec == NULL) {
		return -1;
	}
	// Copy context
	pCodecCtx = avcodec_alloc_context3(pCodec);
	if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
		return -1;
	}

	aCodecCtx = avcodec_alloc_context3(aCodec);
	if(avcodec_copy_context(aCodecCtx, aCodecCtxOrig) != 0) {
		return -1;
	}
	SDL_AudioSpec wanted_spec, spec;

	wanted_spec.freq = aCodecCtx->sample_rate;
	wanted_spec.format = AUDIO_S16SYS;
	wanted_spec.channels = aCodecCtx->channels;
	wanted_spec.silence = 0;
	wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
	wanted_spec.callback = audio_callback;
	wanted_spec.userdata = aCodecCtx;

	if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
		fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
		return -1;
	}



	
	// Open codec
	AVDictionary *optionDict = NULL;
	if(avcodec_open2(pCodecCtx, pCodec, &optionDict) < 0) {
		return -1;
	}
	
	if(avcodec_open2(aCodecCtx, aCodec, NULL) < 0) {
		return -1;
	}

	packet_queue_init(&audioq);
	SDL_PauseAudio(0);

	
	// Allocate video frame
	AVFrame *pFrame = NULL;
	pFrame = av_frame_alloc();


	SDL_Surface *screen;
	screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
	if(!screen) {
		fprintf(stderr, "SDL: could not set video mode - exiting\n");
		exit(1);
	}

	SDL_Overlay *bmp = NULL;

	bmp = SDL_CreateYUVOverlay(pCodecCtx->width, 
							   pCodecCtx->height, 
							   SDL_YV12_OVERLAY, 
							   screen);


	printf("[loop]==========================\n");

	struct SwsContext *sws_ctx = NULL;
	int frameFinished;
	AVPacket packet;

	//initialize SWS context for software scaling
	sws_ctx = sws_getContext(pCodecCtx->width, 
							pCodecCtx->height,
							pCodecCtx->pix_fmt,
							pCodecCtx->width,
							pCodecCtx->height,
							AV_PIX_FMT_YUV420P,
							SWS_BILINEAR,
							NULL,
							NULL,
							NULL);

	// Read frame and display							
	i = 0;
	while(av_read_frame(pFormatCtx, &packet) >= 0) {
		if(packet.stream_index == videoStream) {
			//Decode video frame
			avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

			// Did we get a video frame?
			if(frameFinished) {
				SDL_LockYUVOverlay(bmp);

				AVPicture pict;
				pict.data[0] = bmp->pixels[0];
				pict.data[1] = bmp->pixels[2];
				pict.data[2] = bmp->pixels[1];

				pict.linesize[0] = bmp->pitches[0];
				pict.linesize[1] = bmp->pitches[2];
				pict.linesize[2] = bmp->pitches[1];

				// Convert the image into YUV format that SDL uses

				sws_scale(sws_ctx, 
						  (uint8_t const * const *)pFrame->data,
						  pFrame->linesize, 
						  0, 
						  pCodecCtx->height, 
						  pict.data, 
						  pict.linesize);

				SDL_UnlockYUVOverlay(bmp);

				SDL_Rect rect;
				rect.x = 0;
				rect.y = 0;
				rect.w = pCodecCtx->width;
				rect.h = pCodecCtx->height;
				SDL_DisplayYUVOverlay(bmp, &rect);
				av_free_packet(&packet);	
			}
		} else if (packet.stream_index == audioStream) {
			packet_queue_put(&audioq, &packet);

		} else {
			// Free the packet that was allocated by av_read_frame
			av_free_packet(&packet);
		}

		SDL_Event event;
		SDL_PollEvent(&event);
		switch(event.type) {
		case SDL_QUIT:
			quit = 1;
			SDL_Quit();
			exit(0);
			break;
		default:
			break;
		}
	}


	// Free the YUV frame
	av_free(pFrame);

	// Close the codec
	avcodec_close(pCodecCtx);
	avcodec_close(pCodecCtxOrig);

	// Close the video file
	avformat_close_input(&pFormatCtx);
	return 0;
}
Esempio n. 19
0
int _tmain(int argc, _TCHAR* argv[])
{
    if(argc<2)
    {
        APP_ERROR("Please type %s aviFileName\n", argv[0]);
        return -1;
    }

    int pos;
    MainAVIHeader *psAviH;


    //integrityCheck(argv[1]);

    int nAviStrhPos;
    struct AVI_strh *psAviStrH;
    DWORD nMainAvihSize;

    int nSize;
    BYTE *pHdrl;

    int nVidsNo;

    SDL_Surface *screen; 

    nSize = aviHdrlBufferGet(argv[1], &pHdrl);

    if (nSize < 0)
        return -1;

    pos = aviHeaderGetFromHdrl(pHdrl, nSize, &nMainAvihSize);
    if (pos >= 0)
    {
        psAviH = (MainAVIHeader *)(pHdrl + pos);

        nAviStrhPos = vidsStrhGetFromHdrl(pHdrl, nSize, psAviH->dwStreams, &nVidsNo);
        if (nAviStrhPos > 0)
        {
            psAviStrH = (struct AVI_strh *)(pHdrl + nAviStrhPos);
        }
    }

    //create screen according to W x H.
    screen = sdlMainScreenInit(psAviH->dwWidth, psAviH->dwHeight);

    Uint32 nTicksInc = (psAviH->dwMicroSecPerFrame)/1000;
    Uint32 nTicksIncUs = (psAviH->dwMicroSecPerFrame);
    printf("\tFrame interval is %d Ms\n", nTicksInc);

    //release memory
    free(pHdrl);

    //create decode thread.
    VideoState aviStream;
    memset(&aviStream, 0 , sizeof(aviStream));
    strcpy(aviStream.filename, argv[1]);

    aviStream.screen = screen;
    aviStream.mutex = SDL_CreateMutex();
    aviStream.cond = SDL_CreateCond();
    aviStream.bBlited = 1;
    aviStream.bFliped = 1;
    //aviStream.nTicksInc = nTicksInc;

    aviStream.parse_tid = SDL_CreateThread(decode_thread, &aviStream);
    if(!aviStream.parse_tid) 
    {
        SDL_Quit();
        return -1;
    }

    SDL_Event   event;
    int res;
    Uint32 nRefreshTicks = 0;
    for(;;) 
    {
        SDL_WaitEvent(&event);
        switch(event.type) 
        {
        case SDL_KEYDOWN:
            switch (event.key.keysym.sym) 
            {
                case SDLK_SPACE: 

                    if(!aviStream.pause)
                    {
                        aviStream.pause = 1;
                        printf("\t\t pause!\n");
                    }
                    else
                    {
                        //reset time control 
                        if(REFRESH_TIME_CTRL == TIME_CTRL_0)
                        {
                            nRefreshTicks = 0;
                        }
                        else
                        {
                            aviStream.nCurFrameCnt = 0;
                            aviStream.nRefTicks = 0;
                        }

                        printf("\n\n resume!\n");
                        SDL_LockMutex(aviStream.mutex);
                        aviStream.pause = 0; 
                        SDL_CondSignal(aviStream.cond);
                        SDL_UnlockMutex(aviStream.mutex);
                    }
                    break;
            }
            break;
        case PW_QUIT_EVENT:
        case SDL_QUIT:
            aviStream.quit = 1;
            SDL_CondSignal(aviStream.cond);
            SDL_WaitThread(aviStream.parse_tid, NULL);

            SDL_Quit();
            SDL_DestroyCond(aviStream.cond);
            SDL_DestroyMutex(aviStream.mutex);
            return 0;
            break;

        case PW_FRAME_REPEAT:
                if(REFRESH_TIME_CTRL == TIME_CTRL_0)
                {
                    //we not plan to support for TIME_CTRL_0 mode.
                }
                else
                {
                    // we just need to advanced the cnt, then next frame refresh would has such delay time.
                    aviStream.nCurFrameCnt++;

                    //for repeat frame case, if it is first frame, we needs update the nRefTicks
                    if(1 == aviStream.nCurFrameCnt)
                    {
                        aviStream.nRefTicks = SDL_GetTicks();
                    }

                    printf("\t\t [%d] frame repeat. frame timing ctrl 1\n", aviStream.nCurFrameCnt);
                }



            break;

        case PW_BLIT_EVENT:
            res = doBlit(event.user.data1);

            //schedule refresh timer if blit success. Or do nothing if postponed.
            if (res)
            {
                if(REFRESH_TIME_CTRL == TIME_CTRL_0)
                {
                    Uint32 ticks;
                    ticks = SDL_GetTicks();
                    int delay; 
                    static int nFrameCnt = 0;

                    delay = (nRefreshTicks > ticks)? (nRefreshTicks - ticks) : 0;  
                    schedule_refresh(&aviStream, delay);

                    printf("\t\t[%d] delay tick = %d Ms. frame timing ctrl 0\n", nFrameCnt, delay);
                    nFrameCnt++;
                }
                else
                {
                    aviStream.nCurFrameCnt++;

                    Uint32 nTargetTicks;
                    nTargetTicks = aviStream.nRefTicks + ((aviStream.nCurFrameCnt)*nTicksIncUs)/1000;

                    Uint32 ticks;
                    ticks = SDL_GetTicks();
                    int delay;

                    delay = (nTargetTicks > ticks)? (nTargetTicks - ticks) : 0;
                    schedule_refresh(&aviStream, delay);

                    //printf("\t\t [%d] delay tick = %d Ms. frame timing ctrl 1\n", aviStream.nCurFrameCnt, delay);
                    printf("\t\t [%d] delay tick = %d Ms. Show at %d Ms. frame timing ctrl 1\n", aviStream.nCurFrameCnt, delay, nTargetTicks-aviStream.nRefTicks);
                }

            }


            break;

        case PW_FLIP_EVENT:
            
            if(REFRESH_TIME_CTRL == TIME_CTRL_0)
            {
                //update nRefreshTicks
                Uint32 ticks;
                ticks = SDL_GetTicks();
                nRefreshTicks = ticks + nTicksInc;
                //printf("\t\t Refresh tick = %d Ms\n", ticks);
            }
            else
            {
                if(1 == aviStream.nCurFrameCnt)
                {
                    aviStream.nRefTicks = SDL_GetTicks();
                }
            }

            doRefresh(event.user.data1);
            break;

        default:
            break;
        }
    }
   
    return 0;

}
Esempio n. 20
0
int main(int argc, char *argv[]) {

	SDL_Event event;
	VideoState *is;
	int i;
	puts("start");
	global_mutex_lock = SDL_CreateMutex();

	is = av_mallocz(sizeof(VideoState));
	if (argc < 2){
		fprintf(stderr, "Usage: test <file>\n");
		exit(1);
	}
		
	av_register_all();		// Register all formats and codecs
	puts("avregister");
	//if (av_register_protocol(&e2URLProtocol) < 0){
	//	printf("Error - URL protocol \n");
	//	exit(-1)
	//}
	if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)){
		fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
		exit(1);
	}
	for(i=0; i<MAX_CHANNELS;i++){
		// Make a screen to put our video
		#ifndef __DARWIN__
		screen[i] = SDL_SetVideoMode(640, 480, 0, 0);
		#else
		screen[i] = SDL_SetVideoMode(640, 480, 24, 0);
		#endif
		if (!screen[i]){
			fprintf(stderr, "SDL: could not set video mode - exiting\n");
			exit(1);
		}
	}
	for(i=0; i<MAX_CHANNELS;i++){
		global_video_state[i] = av_mallocz(sizeof(VideoState));
		global_video_state[i]->videoIndex = i;
		puts("screen created");
		printf("i is: %d\n",i);
		av_strlcpy(global_video_state[i]->filename, argv[i+1], sizeof(global_video_state[i]->filename));
		puts("avstrlcpy");	
		global_video_state[i]->pictq_mutex = SDL_CreateMutex();
		global_video_state[i]->pictq_cond = SDL_CreateCond();
		schedule_refresh(global_video_state[i], 40);
		global_video_state[i]->av_sync_type = DEFAULT_AV_SYNC_TYPE;
		global_video_state[i]->parse_tid = SDL_CreateThread(decode_thread, global_video_state[i]);
		puts("main var created");
		if (!global_video_state[i]->parse_tid) {
			av_free(global_video_state[i]);
			return -1;
		}
	}
	av_init_packet(&f_pkt);
	puts("av_init_packet");
	f_pkt.data = (unsigned char*)"FLUSH";

	for (;;) {
		double inc , pos;
		SDL_WaitEvent(&event);
		switch (event.type) {
			case SDL_KEYDOWN:
				switch (event.key.keysym.sym) {
					case SDLK_LEFT:
						inc = -10.0;
						goto do_seek;
					case SDLK_RIGHT:
						inc = 10.0;
						goto do_seek;
					case SDLK_UP:
						inc = 60.0;
						goto do_seek;
					case SDLK_DOWN:
						inc = -60.0;
						goto do_seek;
					do_seek:
						SDL_LockMutex(global_mutex_lock);
						if (global_video_state[global_videoIndex]){
							pos = get_master_clock(global_video_state[global_videoIndex]);
							pos += inc;
							stream_seek(global_video_state[global_videoIndex],(int64_t)(pos *AV_TIME_BASE),inc);
						}
						SDL_UnlockMutex(global_mutex_lock);
						break;
					case SDLK_b:
						global_video_state[global_videoIndex]->color_req = 'b';
						break;
					case SDLK_r:
						global_video_state[global_videoIndex]->color_req = 'r';
						break;
					case SDLK_g:
						global_video_state[global_videoIndex]->color_req = 'g';
						break;
					case SDLK_w:
						global_video_state[global_videoIndex]->color_req = 'w';
						break;
					case SDLK_n:
						global_video_state[global_videoIndex]->color_req = 'n';
						break;
					case SDLK_1:
						change_channel(1);
						break;
					case SDLK_2:
						change_channel(2);
						break;
					case SDLK_3:
						change_channel(3);
						break;
					case SDLK_4:
						change_vidchannel(1);
						break;
					case SDLK_5:
						change_vidchannel(2);
						break;
					case SDLK_6:
						change_vidchannel(3);
						break;
					case SDLK_7:
						change_audchannel(1);
						break;
					case SDLK_8:
						change_audchannel(2);
						break;
					case SDLK_9:
						change_audchannel(3);
						break;
				default:
					break;
				}
				break;
			case FF_QUIT_EVENT:
				case SDL_QUIT:
					for(i=0; i<MAX_CHANNELS; i++){
						global_video_state[i]->quit = 1;
						SDL_CondSignal(global_video_state[i]->audioq.cond);
						SDL_CondSignal(global_video_state[i]->videoq.cond);
					}
					SDL_Quit();
					exit(0);
					break;
				case FF_ALLOC_EVENT:
					alloc_picture(event.user.data1);
					break;
				case FF_REFRESH_EVENT:
					video_refresh_timer(event.user.data1);
					break;
			default:
				break;
		}
	}
	return 0;

}