예제 #1
0
파일: glue.c 프로젝트: GWRon/sdl.mod
int bmx_SDL_UpdateYUVTexture(SDL_Texture * texture, Uint8 * yPlane, int yPitch, Uint8 * uPlane, int uPitch, Uint8 * vPlane, int vPitch, int x, int y, int w, int h) {
	if (x < 0 || y < 0 || w < 0 || h < 0) {
		return SDL_UpdateYUVTexture(texture, 0, yPlane, yPitch, uPlane, uPitch, vPlane, vPitch);
	} else {
		SDL_Rect r = { x, y, w, h };
		return SDL_UpdateYUVTexture(texture, &r, yPlane, yPitch, uPlane, uPitch, vPlane, vPitch);
	}
}
예제 #2
0
/**
    \fn displayImage
*/
bool sdlRenderImpl::displayImage(ADMImage *pic)
{
    if(!sdl_texture)
        return false;
    if(useYV12)
    {
        int imagePitch[3];
        uint8_t  *imagePtr[3];
        pic->GetPitches(imagePitch);
        pic->GetWritePlanes(imagePtr);
        SDL_UpdateYUVTexture(sdl_texture,
                                             NULL,
                                             imagePtr[0], imagePitch[0],
                                             imagePtr[2], imagePitch[2],
                                             imagePtr[1], imagePitch[1]);
    }else
    { // YUYV
        ADM_warning("[SDL] YUYV disabled\n");
        return false;
    }
    SDL_RenderClear(sdl_renderer);
    SDL_RenderCopy(sdl_renderer, sdl_texture, NULL, NULL);
    refresh();
    return true;
}
void updateBmp(VideoPlayer **ps, struct SwsContext *sws_ctx, void *bmp, AVFrame *pFrame, int width, int height) {
	VideoPlayer *is = *ps;

	AVPicture pict;
	pict.data[0] = is->yPlane;
	pict.data[1] = is->uPlane;
	pict.data[2] = is->vPlane;
	pict.linesize[0] = width;
	pict.linesize[1] = is->uvPitch;
	pict.linesize[2] = is->uvPitch;

	// Convert the image into YUV format that SDL uses
	sws_scale(sws_ctx, (uint8_t const * const *) pFrame->data,
			pFrame->linesize, 0, height, pict.data,
			pict.linesize);

	SDL_UpdateYUVTexture(
			bmp,
			NULL,
			is->yPlane,
		    width,
			is->uPlane,
			is->uvPitch,
			is->vPlane,
			is->uvPitch);
}
예제 #4
0
static int sdl_submit_decode_unit(PDECODE_UNIT decodeUnit) {
  if (decodeUnit->fullLength < DECODER_BUFFER_SIZE) {
    PLENTRY entry = decodeUnit->bufferList;
    int length = 0;
    while (entry != NULL) {
      memcpy(ffmpeg_buffer+length, entry->data, entry->length);
      length += entry->length;
      entry = entry->next;
    }

    int ret = ffmpeg_decode(ffmpeg_buffer, length);
    if (ret == 1) {
      AVFrame* frame = ffmpeg_get_frame();

      SDL_UpdateYUVTexture(bmp, NULL, frame->data[0], frame->linesize[0], frame->data[1], frame->linesize[1], frame->data[2], frame->linesize[2]);
      SDL_RenderClear(renderer);
      SDL_RenderCopy(renderer, bmp, NULL, NULL);
      SDL_RenderPresent(renderer);
    }
  } else {
    fprintf(stderr, "Video decode buffer too small");
    exit(1);
  }

  return DR_OK;
}
예제 #5
0
int video_prepare_yuv_frame(yuv_frame *src)
{
	if(opengl) return video_gl_prepare_yuv_frame(src);

	SDL_UpdateYUVTexture(texture, NULL, src->lum, stored_yuv_mode.width,
	        src->cr, stored_yuv_mode.width/2, src->cb, stored_yuv_mode.width/2);
	return 1;
}
예제 #6
0
void DWVideo::draw()
{
	if(frame_end)
	{
		SDL_UpdateYUVTexture(m_texture,NULL,m_frame->data[0],m_frame->width,
			m_frame->data[1],m_frame->linesize[1],m_frame->data[2],m_frame->linesize[2]);

		TextureManager::Instance()->drawVideo(m_texture,
			(int)m_position.getX(), (int)m_position.getY(),m_width, m_height,game::Instance()->getRenderer());
	}
}
예제 #7
0
void Display::refresh(
	std::array<uint8_t*, 3> planes, std::array<size_t, 3> pitches) {
	check_SDL(!SDL_UpdateYUVTexture(
		texture_.get(), nullptr,
		planes[0], pitches[0],
		planes[1], pitches[1],
		planes[2], pitches[2]), "texture update");
	SDL_RenderClear(renderer_.get());
	SDL_RenderCopy(renderer_.get(), texture_.get(), nullptr, nullptr);
	SDL_RenderPresent(renderer_.get());
}
예제 #8
0
bool SDLVideo::processOneFrame(Data data) {
	SDL_Event event;
	while (SDL_PollEvent(&event)) {
		switch (event.type) {
		case SDL_WINDOWEVENT:
			if (event.window.event == SDL_WINDOWEVENT_RESIZED) {
				SDL_RenderSetViewport(renderer, nullptr);
				displayrect->w = event.window.data1;
				displayrect->h = event.window.data2;
			}
			break;
		case SDL_QUIT:
#ifdef _MSC_VER
			GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0);
#else
			std::raise(SIGTERM);
#endif
			return false;
		}
	}

	auto pic = safe_cast<const DataPicture>(data);
	if (pic->getFormat() != pictureFormat) {
		pictureFormat = pic->getFormat();
		createTexture();
	}

	auto const now = m_clock->now();
	auto const timestamp = pic->getTime() + PREROLL_DELAY; // assume timestamps start at zero
	auto const delay = std::max<int64_t>(0, timestamp - now);
	auto const delayInMs = clockToTimescale(delay, 1000);
	SDL_Delay((Uint32)delayInMs);

	if (pictureFormat.format == YUV420P) {
		SDL_UpdateYUVTexture(texture, nullptr,
		                     pic->getPlane(0), (int)pic->getPitch(0),
		                     pic->getPlane(1), (int)pic->getPitch(1),
		                     pic->getPlane(2), (int)pic->getPitch(2));
	} else {
		SDL_UpdateTexture(texture, nullptr, pic->getPlane(0), (int)pic->getPitch(0));
	}
	SDL_RenderCopy(renderer, texture, nullptr, displayrect.get());
	SDL_RenderPresent(renderer);

	m_NumFrames++;

	return true;
}
예제 #9
0
int queue_picture(VideoState* is, AVFrame* pFrame, AVFrame* pFrameYUV,
		double pts) {
	VideoPicture* vp;

	SDL_LockMutex(is->pictq_mutex);
	while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE && !is->quit) {
		SDL_CondWait(is->pictq_cond, is->pictq_mutex);
	}
	SDL_UnlockMutex(is->pictq_mutex);

	if (is->quit)
		return -1;

	vp = &is->pictq[is->pictq_windex];
	if (!vp->bmp || vp->width != is->video_ctx->width
			|| vp->height != is->video_ctx->height) {
		vp->allocated = 0;
		alloc_picture(is);
		if (is->quit) {
			return -1;
		}
	}
	if (vp->bmp) {
		SDL_LockMutex(screen_mutex);
		vp->pts = pts;
		sws_scale(is->sws_ctx, (const uint8_t* const *) pFrame->data,
				pFrame->linesize, 0, is->video_ctx->height, pFrameYUV->data,
				pFrameYUV->linesize);
		SDL_UpdateYUVTexture(vp->bmp, NULL, pFrameYUV->data[0],
				pFrameYUV->linesize[0], pFrameYUV->data[1],
				pFrameYUV->linesize[1], pFrameYUV->data[2],
				pFrameYUV->linesize[2]);
		SDL_UnlockMutex(screen_mutex);

		if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) {
			is->pictq_windex = 0;
		}
		SDL_LockMutex(is->pictq_mutex);
		is->pictq_size++;
		SDL_UnlockMutex(is->pictq_mutex);
	}
	return 0;
}
예제 #10
0
void sdl_loop() {
  SDL_Event event;
  while(!done && SDL_WaitEvent(&event)) {
    switch (sdlinput_handle_event(&event)) {
    case SDL_QUIT_APPLICATION:
      done = true;
      break;
    case SDL_TOGGLE_FULLSCREEN:
      fullscreen_flags ^= SDL_WINDOW_FULLSCREEN_DESKTOP;
      SDL_SetWindowFullscreen(window, fullscreen_flags);
    case SDL_MOUSE_GRAB:
      SDL_SetRelativeMouseMode(SDL_TRUE);
      break;
    case SDL_MOUSE_UNGRAB:
      SDL_SetRelativeMouseMode(SDL_FALSE);
      break;
    default:
      if (event.type == SDL_QUIT)
        done = true;
      else if (event.type == SDL_USEREVENT) {
        if (event.user.code == SDL_CODE_FRAME) {
          if (++sdlCurrentFrame <= sdlNextFrame - SDL_BUFFER_FRAMES) {
            //Skip frame
          } else if (SDL_LockMutex(mutex) == 0) {
            Uint8** data = ((Uint8**) event.user.data1);
            int* linesize = ((int*) event.user.data2);
            SDL_UpdateYUVTexture(bmp, NULL, data[0], linesize[0], data[1], linesize[1], data[2], linesize[2]);
            SDL_UnlockMutex(mutex);
            SDL_RenderClear(renderer);
            SDL_RenderCopy(renderer, bmp, NULL, NULL);
            SDL_RenderPresent(renderer);
          } else
            fprintf(stderr, "Couldn't lock mutex\n");
        }
      }
    }
  }

  SDL_DestroyWindow(window);
  SDL_Quit();
}
예제 #11
0
static void UpdateTextureFromMSPicture (SDL_Texture **pTex, MSPicture *pic, const char *viewName) {
  int texW, texH;

  if (!pic) {
    if (*pTex) SDL_DestroyTexture (*pTex);
    *pTex = NULL;
  }
  else if (!pic->planes[0] || !pic->w || !pic->h) {
    if (*pTex) SDL_DestroyTexture (*pTex);
    *pTex = NULL;
  }
  else {
    // Check if the texture format is outdated...
    if (*pTex) {
      SDL_QueryTexture (*pTex, NULL, NULL, &texW, &texH);
      if (pic->w != texW || pic->h != texH) {
        // Format changed -> need to recreate the texture...
        SDL_DestroyTexture (*pTex);
        *pTex = NULL;
      }
    }
    // Create texture object (again) if necessary...
    if (!(*pTex)) {
      *pTex = SDL_CreateTexture (sdlRenderer, SDL_PIXELFORMAT_YV12,
                                 SDL_TEXTUREACCESS_STATIC, pic->w, pic->h);
        // Texture parameters taken from https://forums.libsdl.org/viewtopic.php?t=9898, "SDL 2.0 and ffmpeg. How to render texture in new version."
      if (!(*pTex)) {
        printf ("E: 'SDL_CreateTexture' failed for video texture: %s\n", SDL_GetError ());
      }
      SDL_SetTextureBlendMode (*pTex, SDL_BLENDMODE_NONE);
      printf ("I: Received resolution of %s view: %i x %i pixels.\n",
              viewName, pic->w, pic->h);
    }
    // Update the texture...
    if (SDL_UpdateYUVTexture (*pTex, NULL, pic->planes[0], pic->strides[0], pic->planes[1], pic->strides[1], pic->planes[2], pic->strides[2]) != 0) {
      printf ("E: 'SDL_UpdateYUVTexture' failed: %s\n", SDL_GetError ());
    }
  }
}
int main(int argc, char *argv[]) {
  AVFormatContext *pFormatCtx = NULL;
  int             i, videoStream, audioStream;
  AVCodecContext  *pCodecCtx = NULL;
  AVCodec         *pCodec = NULL;
  AVFrame         *pFrame = NULL; 
  AVPacket        packet;
  int             frameFinished;
  //float           aspect_ratio;
  
  AVCodecContext  *aCodecCtx = NULL;
  AVCodec         *aCodec = NULL;

    //SDL_Overlay     *bmp = NULL;
    //SDL_Surface     *screen = NULL;
    SDL_Window *m_pWindow = NULL;
    SDL_Renderer *m_pRenderer = NULL;
    
  SDL_Rect        rect;
  SDL_Event       event;
  SDL_AudioSpec   wanted_spec, spec;

  //struct SwsContext   *sws_ctx            = NULL;
  AVDictionary        *videoOptionsDict   = NULL;
  AVDictionary        *audioOptionsDict   = NULL;

  if(argc < 2) {
    fprintf(stderr, "Usage: test <file>\n");
    exit(1);
  }
  // Register all formats and codecs
  av_register_all();
  
  if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
  {
    fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
    exit(1);
  }

  // Open video file
  if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL)!=0)
    return -1; // Couldn't open file
  
  // Retrieve stream information
  if(avformat_find_stream_info(pFormatCtx, NULL)<0)
    return -1; // Couldn't find stream information
  
  // Dump information about file onto standard error
  av_dump_format(pFormatCtx, 0, argv[1], 0);
  
  // Find the first video stream
  videoStream=-1;
  audioStream=-1;
  for(i=0; i<pFormatCtx->nb_streams; i++) {
    if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO &&
       videoStream < 0) {
      videoStream=i;
       // printf("video stream:%d",i);
    }
    if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO &&
       audioStream < 0) {
      audioStream=i;
       // printf("audio stream:%d",i);
    }
  }
    
//    for(i=0; i<pFormatCtx->nb_streams; i++) {
//        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
//            printf("video stream:%d\n",i);
//        }
//        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO ) {
//            printf("audio stream:%d\n",i);
//        }
//    }
  if(videoStream==-1)
    return -1; // Didn't find a video stream
  if(audioStream==-1)
    return -1;
   
  aCodecCtx=pFormatCtx->streams[audioStream]->codec;
    
    
    int count = SDL_GetNumAudioDevices(0);
    
    for (int i = 0; i < count; ++i) {
        SDL_Log("Audio device %d: %s", i, SDL_GetAudioDeviceName(i, 0));
    }
    
    
  // Set audio settings from codec info
  wanted_spec.freq = aCodecCtx->sample_rate;
  wanted_spec.format = AUDIO_S16SYS;
  wanted_spec.channels = aCodecCtx->channels;
  wanted_spec.silence = 0;
  wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
  wanted_spec.callback = audio_callback;
  wanted_spec.userdata = aCodecCtx;
  
//  if(SDL_OpenAudio(&wanted_spec, &spec) < 0)
//  {
//    fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
//    return -1;
//  }
    SDL_AudioDeviceID dev;
    dev = SDL_OpenAudioDevice(NULL, 0, &wanted_spec, &spec, SDL_AUDIO_ALLOW_FORMAT_CHANGE);
    if(dev == 0)
    {
         fprintf(stderr, "Failed to open audio: %s\n", SDL_GetError());
    }
    else
    {
        if(wanted_spec.format != spec.format){
               fprintf(stderr, "We didn't get AUDIO_S16SYS audio format.\n");
               return -1;
        }
    }
    
    
  aCodec = avcodec_find_decoder(aCodecCtx->codec_id);
  if(!aCodec)
  {
    fprintf(stderr, "Unsupported codec!\n");
    return -1;
  }
  avcodec_open2(aCodecCtx, aCodec, &audioOptionsDict);

  // audio_st = pFormatCtx->streams[index]
  packet_queue_init(&audioq);
  //SDL_PauseAudio(0);
    SDL_PauseAudioDevice(dev,0);
    
  // Get a pointer to the codec context for the video stream
  pCodecCtx=pFormatCtx->streams[videoStream]->codec;
  
  // Find the decoder for the video stream
  pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
  if(pCodec==NULL)
  {
    fprintf(stderr, "Unsupported codec!\n");
    return -1; // Codec not found
  }
  // Open codec
  if(avcodec_open2(pCodecCtx, pCodec, &videoOptionsDict)<0)
    return -1; // Could not open codec
  
  // Allocate video frame
    pFrame=av_frame_alloc();
    AVFrame*   m_pFrameYUV = av_frame_alloc();
    //int t_alloc_ret = av_image_alloc(m_pFrameYUV->data,m_pFrameYUV->linesize,pCodecCtx->width,pCodecCtx->height,AV_PIX_FMT_YUV420P,1);
//    int t_size0 = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->coded_width, pCodecCtx->coded_height);
//    int t_size1 = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->coded_width, pCodecCtx->coded_height,1);
    //uint8_t *  out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->coded_width, pCodecCtx->coded_height));
    uint8_t *  out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    //avpicture_fill((AVPicture *)m_pFrameYUV , out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->coded_width, pCodecCtx->coded_height);
    av_image_fill_arrays(m_pFrameYUV->data , m_pFrameYUV->linesize, out_buffer,AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,1);
    
    
    struct SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->sw_pix_fmt,
                                                        pCodecCtx->width, pCodecCtx->height,AV_PIX_FMT_YUV420P,
                                                        SWS_BICUBIC,
                                                        NULL, NULL, NULL);

  // Make a screen to put our video

//#ifndef __DARWIN__
//        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
//#else
//        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
//#endif
//    
//    
//  if(!screen) {
//    fprintf(stderr, "SDL: could not set video mode - exiting\n");
//    exit(1);
//  }
  
  // Allocate a place to put our YUV image on that screen
//  bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
//				 pCodecCtx->height,
//				 SDL_YV12_OVERLAY,
//				 screen);
    // Make a screen to put our video
    m_pWindow = SDL_CreateWindow("test windows", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,pCodecCtx->width, pCodecCtx->height,SDL_WINDOW_SHOWN);
    if(!m_pWindow)
    {
        printf("SDL: could not create window - exiting:%s\n",SDL_GetError());
        return -1;
    }
    
    m_pRenderer = SDL_CreateRenderer(m_pWindow, -1, 0);
    SDL_RenderClear(m_pRenderer);
    SDL_Texture *m_pSdlTexture = SDL_CreateTexture(m_pRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
                                                   pCodecCtx->width, pCodecCtx->height);
    rect.x = 0;
    rect.y = 0;
    rect.w = pCodecCtx->width;
    rect.h = pCodecCtx->height;
    
    
    
    
  // Read frames and save first five frames to disk
  i=0;
  while(av_read_frame(pFormatCtx, &packet)>=0) {
    // Is this a packet from the video stream?
    if(packet.stream_index==videoStream) {
      // Decode video frame
      avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, 
			   &packet);
      
      // Did we get a video frame?
      if(frameFinished)
      {
    
//SDL_LockTexture(m_pSdlTexture, &rect, m_pFrameYUV->data, m_pFrameYUV->linesize);
          
	//SDL_LockYUVOverlay(bmp);

//	AVPicture pict;
//	pict.data[0] = bmp->pixels[0];
//	pict.data[1] = bmp->pixels[2];
//	pict.data[2] = bmp->pixels[1];
//
//	pict.linesize[0] = bmp->pitches[0];
//	pict.linesize[1] = bmp->pitches[2];
//	pict.linesize[2] = bmp->pitches[1];

	// Convert the image into YUV format that SDL uses
//    sws_scale
//    (
//        sws_ctx, 
//        (uint8_t const * const *)pFrame->data, 
//        pFrame->linesize, 
//        0,
//        pCodecCtx->height,
//        pict.data,
//        pict.linesize
//    );
      sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                m_pFrameYUV->data, m_pFrameYUV->linesize);
          
	//SDL_UnlockYUVOverlay(bmp);
     // SDL_UnlockTexture(m_pSdlTexture);
          
      SDL_UpdateYUVTexture(m_pSdlTexture, &rect,
                           m_pFrameYUV->data[0], m_pFrameYUV->linesize[0],
                           m_pFrameYUV->data[1], m_pFrameYUV->linesize[1],
                           m_pFrameYUV->data[2], m_pFrameYUV->linesize[2]);
//	rect.x = 0;
//	rect.y = 0;
//	rect.w = pCodecCtx->width;
//	rect.h = pCodecCtx->height;
	//SDL_DisplayYUVOverlay(bmp, &rect);
      SDL_RenderClear( m_pRenderer );//this line seems nothing to do
      SDL_RenderCopy( m_pRenderer, m_pSdlTexture,  NULL, &rect);
      
      SDL_RenderPresent(m_pRenderer);
      
      SDL_Delay(38);
          
//	av_free_packet(&packet);
      av_packet_unref(&packet);
      }
    }
    else if(packet.stream_index==audioStream)
    {
      packet_queue_put(&audioq, &packet);
    }
    else
    {
//      av_free_packet(&packet);
      av_packet_unref(&packet);
    }
    // Free the packet that was allocated by av_read_frame
    SDL_PollEvent(&event);
    switch(event.type)
    {
    case SDL_QUIT:
      quit = 1;
      SDL_Quit();
      exit(0);
      break;
    default:
      break;
    }

  }

  // Free the YUV frame
  av_free(pFrame);
  
  // Close the codec
  avcodec_close(pCodecCtx);
  
  // Close the video file
  avformat_close_input(&pFormatCtx);
  
  return 0;
}
int queue_picture(VideoState *is, AVFrame *pFrame, double pts) {

  VideoPicture *vp;
  AVPicture pict;

  /* wait until we have space for a new pic */
  SDL_LockMutex(is->pictq_mutex);
  while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
	!is->quit) {
    SDL_CondWait(is->pictq_cond, is->pictq_mutex);
  }
  SDL_UnlockMutex(is->pictq_mutex);

  if(is->quit)
    return -1;

  // windex is set to 0 initially
  vp = &is->pictq[is->pictq_windex];

  /* allocate or resize the buffer! */
//  if(!vp->bmp ||
    if(!vp->m_pFrame ||
     vp->width != is->video_st->codec->width ||
     vp->height != is->video_st->codec->height) {
    SDL_Event event;

    vp->allocated = 0;
    /* we have to do it in the main thread */
    event.type = FF_ALLOC_EVENT;
    event.user.data1 = is;
    SDL_PushEvent(&event);

    /* wait until we have a picture allocated */
    SDL_LockMutex(is->pictq_mutex);
    while(!vp->allocated && !is->quit) {
      SDL_CondWait(is->pictq_cond, is->pictq_mutex);
    }
    SDL_UnlockMutex(is->pictq_mutex);
    if(is->quit) {
      return -1;
    }
  }
  /* We have a place to put our picture on the queue */
  /* If we are skipping a frame, do we set this to null 
     but still return vp->allocated = 1? */


//  if(vp->bmp) {
    if(vp->m_pFrame){

//    SDL_LockYUVOverlay(vp->bmp);
    
    /* point pict at the queue */

//    pict.data[0] = vp->bmp->pixels[0];
//    pict.data[1] = vp->bmp->pixels[2];
//    pict.data[2] = vp->bmp->pixels[1];
//    
//    pict.linesize[0] = vp->bmp->pitches[0];
//    pict.linesize[1] = vp->bmp->pitches[2];
//    pict.linesize[2] = vp->bmp->pitches[1];
    
    // Convert the image into YUV format that SDL uses
//    sws_scale
//    (
//        is->sws_ctx,
//        (uint8_t const * const *)pFrame->data,
//        pFrame->linesize,
//        0, 
//        is->video_st->codec->height, 
//        pict.data, 
//        pict.linesize
//    );
    
//    SDL_UnlockYUVOverlay(vp->bmp);
        sws_scale(is->sws_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, is->video_st->codec->height,
                  m_pFrameYUV->data, m_pFrameYUV->linesize);
        SDL_Rect        rect;
        rect.x = 0;
        rect.y = 0;
        rect.w = is->video_st->codec->width;
        rect.h = is->video_st->codec->height;
        SDL_UpdateYUVTexture(m_pSdlTexture, &rect,
                             m_pFrameYUV->data[0], m_pFrameYUV->linesize[0],
                             m_pFrameYUV->data[1], m_pFrameYUV->linesize[1],
                             m_pFrameYUV->data[2], m_pFrameYUV->linesize[2]);
        
    vp->pts = pts;

    /* now we inform our display thread that we have a pic ready */
    if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) {
      is->pictq_windex = 0;
    }
    SDL_LockMutex(is->pictq_mutex);
    is->pictq_size++;
    SDL_UnlockMutex(is->pictq_mutex);
  }
  return 0;
}
예제 #14
0
void CVideoDecoder::decode(void)
{
	// Read frames and save first five frames to disk
	//if (!bOpen && buf_deque.size() >2){
		if (open() < 0) 
			return;
	//}

	rect_src.x = 0;
	rect_src.y = 0;
	rect_src.w = pCodecCtx->width;
	rect_src.h = pCodecCtx->height;
	int ret = -1;

	while (!bStop && (ret = av_read_frame(pFormatCtx, &packet)) >= 0) {
		// Is this a packet from the video stream?
		if (packet.stream_index == videoStream) {
			// Decode video frame
			avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

			// Did we get a video frame?
			if (frameFinished) {

				if (1){
					// Convert the image into YUV format that SDL uses
					sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
						pFrame->linesize, 0, pCodecCtx->height,
						pFrameYUV->data, pFrameYUV->linesize);

					//SDL_UpdateTexture(bmp, &rect_src, pFrameYUV->data[0], pFrameYUV->linesize[0]);
					SDL_UpdateYUVTexture(texture, &rect_src,
						pFrameYUV->data[0], pFrameYUV->linesize[0],
						pFrameYUV->data[1], pFrameYUV->linesize[1],
						pFrameYUV->data[2], pFrameYUV->linesize[2]);
				} else 
					SDL_UpdateTexture(texture, &rect_src, pFrame->data[0], pFrame->linesize[0]);
				SDL_RenderClear(renderer);
				SDL_RenderCopy(renderer, texture, &rect_src, &rect_dst);
				SDL_RenderPresent(renderer);
				//SDL_Delay(66);
			}
			
		}

		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
		SDL_PollEvent(&event);
		switch (event.type) {
		case SDL_QUIT:
			SDL_Quit();
			return;
			break;
		default:
			break;
		}

	}
	if (ret < 0){
		const char* err = SDL_GetError();
		printf("%s", err);
	}
}
예제 #15
0
int main(int argc, char* argv[])
{
	AVFormatContext	*pFormatCtx;
	int				i, videoindex;
	AVCodecContext	*pCodecCtx;
	AVCodec			*pCodec;
	AVFrame	*pFrame,*pFrameYUV;
	uint8_t *out_buffer;
	AVPacket *packet;
	int y_size;
	int ret, got_picture;
	struct SwsContext *img_convert_ctx;

	char filepath[]="bigbuckbunny_480x272.h265";
	//SDL---------------------------
	int screen_w=0,screen_h=0;
	SDL_Window *screen; 
	SDL_Renderer* sdlRenderer;
	SDL_Texture* sdlTexture;
	SDL_Rect sdlRect;

	FILE *fp_yuv;

	av_register_all();
	avformat_network_init();
	pFormatCtx = avformat_alloc_context();

	if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){
		printf("Couldn't open input stream.\n");
		return -1;
	}
	if(avformat_find_stream_info(pFormatCtx,NULL)<0){
		printf("Couldn't find stream information.\n");
		return -1;
	}
	videoindex=-1;
	for(i=0; i<pFormatCtx->nb_streams; i++) 
		if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
			videoindex=i;
			break;
		}
	if(videoindex==-1){
		printf("Didn't find a video stream.\n");
		return -1;
	}

	pCodecCtx=pFormatCtx->streams[videoindex]->codec;
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec==NULL){
		printf("Codec not found.\n");
		return -1;
	}
	if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
		printf("Could not open codec.\n");
		return -1;
	}
	
	pFrame=av_frame_alloc();
	pFrameYUV=av_frame_alloc();
	//AV_PIX_FMT_YUV420P
	out_buffer=(uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
	avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
	//out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
	//avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
	packet=(AVPacket *)av_malloc(sizeof(AVPacket));
	//Output Info-----------------------------
	printf("--------------- File Information ----------------\n");
	av_dump_format(pFormatCtx,0,filepath,0);
	printf("-------------------------------------------------\n");
	//img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, 
	//	pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 
	img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, 
		pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 

#if OUTPUT_YUV420P 
    fp_yuv=fopen("output.yuv","wb+");  
#endif  
	
	if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {  
		printf( "Could not initialize SDL - %s\n", SDL_GetError()); 
		return -1;
	} 

	screen_w = pCodecCtx->width;
	screen_h = pCodecCtx->height;
	//SDL 2.0 Support for multiple windows
	screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
		screen_w, screen_h,
		SDL_WINDOW_OPENGL);

	if(!screen) {  
		printf("SDL: could not create window - exiting:%s\n",SDL_GetError());  
		return -1;
	}

	sdlRenderer = SDL_CreateRenderer(screen, -1, 0);  
	//IYUV: Y + U + V  (3 planes)
	//YV12: Y + V + U  (3 planes)
	sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);  

	sdlRect.x=0;
	sdlRect.y=0;
	sdlRect.w=screen_w;
	sdlRect.h=screen_h;

	//SDL End----------------------
	while(av_read_frame(pFormatCtx, packet)>=0){
		if(packet->stream_index==videoindex){
			ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
			if(ret < 0){
				printf("Decode Error.\n");
				return -1;
			}
			if(got_picture)
			{
				sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, 
					pFrameYUV->data, pFrameYUV->linesize);
				
#if OUTPUT_YUV420P
				y_size=pCodecCtx->width*pCodecCtx->height;  
				fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y 
				fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
				fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
#endif
				//SDL---------------------------
#if 0
				SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );  
#else
				SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
				pFrameYUV->data[0], pFrameYUV->linesize[0],
				pFrameYUV->data[1], pFrameYUV->linesize[1],
				pFrameYUV->data[2], pFrameYUV->linesize[2]);
#endif	
				
				SDL_RenderClear( sdlRenderer );  
				SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &sdlRect);  
				SDL_RenderPresent( sdlRenderer );  
				//SDL End-----------------------
				//Delay 40ms
				SDL_Delay(40);
			}
		}
		av_free_packet(packet);
	}
	//flush decoder
	//FIX: Flush Frames remained in Codec
	while (1) {
		ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
		if (ret < 0)
			break;
		if (!got_picture)
			break;
		sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, 
			pFrameYUV->data, pFrameYUV->linesize);
#if OUTPUT_YUV420P
		int y_size=pCodecCtx->width*pCodecCtx->height;  
		fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y 
		fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
		fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
#endif
		//SDL---------------------------
		SDL_UpdateTexture( sdlTexture, &sdlRect, pFrameYUV->data[0], pFrameYUV->linesize[0] );  
		SDL_RenderClear( sdlRenderer );  
		SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &sdlRect);  
		SDL_RenderPresent( sdlRenderer );  
		//SDL End-----------------------
		//Delay 40ms
		SDL_Delay(40);
	}

	sws_freeContext(img_convert_ctx);

#if OUTPUT_YUV420P 
    fclose(fp_yuv);
#endif 

	SDL_Quit();

	av_frame_free(&pFrameYUV);
	av_frame_free(&pFrame);
	avcodec_close(pCodecCtx);
	avformat_close_input(&pFormatCtx);

	return 0;
}
int main(int argc, char *argv[]) {
    AVFormatContext *pFormatCtx = NULL;
    int             i, videoStream;
    AVCodecContext  *pCodecCtx = NULL;
    AVCodecParameters       *pCodecParam = NULL;
    AVCodec         *pCodec = NULL;
    AVFrame         *pFrame = NULL;
    AVPacket        packet;
    int             send_packet, receive_frame;
    //float           aspect_ratio;
    AVFrame        *pict;
    /*
    std::unique_ptr<AVFrame, std::function<void(AVFrame*)>> frame_converted{
        av_frame_alloc(),
        [](AVFrame* f){ av_free(f->data[0]); } };
    if (av_frame_copy_props(frame_converted.get(),
        frame_decoded.get()) < 0) {
        throw std::runtime_error("Copying frame properties");
    }
    if (av_image_alloc(
        frame_converted->data, frame_converted->linesize,
        video_decoder_->width(), video_decoder_->height(),
        video_decoder_->pixel_format(), 1) < 0) {
        throw std::runtime_error("Allocating picture");
    }
    */
    AVDictionary    *optionsDict = NULL;
    struct SwsContext *sws_ctx = NULL;

    SDL_Texture*    pTexture = nullptr;
    SDL_Window*     pWindows = nullptr;
    SDL_Renderer*   pRenderer = nullptr;

    SDL_Event       event;

    if (argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }
    // Register all formats and codecs
    av_register_all();

    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    // Open video file
    if (avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0)
        return -1; // Couldn't open file

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx, NULL)<0)
        return -1; // Couldn't find stream information

    // Dump information about file onto standard error
    av_dump_format(pFormatCtx, 0, argv[1], 0);

    // Find the first video stream
    videoStream = -1;
    for (i = 0; i<pFormatCtx->nb_streams; i++)
        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    if (videoStream == -1)
        return -1; // Didn't find a video stream

    // Get a pointer to the codec context for the video stream
    //AVCodecContext *codec is deprecated,so use the codecpar struct (AVCodecParameters) instead.
    pCodecParam = pFormatCtx->streams[videoStream]->codecpar;
    //but function avcodec_open2() need pCodecCtx,so copy  (AVCodecParameters) pCodecParam to (AVCodecContext) pCodecCtx
    pCodec = avcodec_find_decoder(pCodecParam->codec_id);
    // Find the decoder for the video stream
    if (pCodec == NULL) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1; // Codec not found
    }
    pCodecCtx = avcodec_alloc_context3(pCodec);
    avcodec_parameters_to_context(pCodecCtx, pCodecParam);

    // Open codec
    if (avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)
        return -1; // Could not open codec

    // Allocate video frame
    pFrame = av_frame_alloc();

    // Make a screen to put our video
#ifndef __DARWIN__
    pWindows = SDL_CreateWindow(argv[1],SDL_WINDOWPOS_CENTERED,SDL_WINDOWPOS_CENTERED,pCodecParam->width, pCodecParam->height,SDL_WINDOW_BORDERLESS|SDL_WINDOW_RESIZABLE);
#else
    screen = SDL_SetVideoMode(pCodecParam->width, pCodecParam->height, 24, 0);
#endif
    if (!pWindows) {
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
        exit(1);
    }
    
    // Allocate a place to put our YUV image on that screen
    pRenderer = SDL_CreateRenderer(pWindows, -1, 0);
    if (!pRenderer) {
        fprintf(stderr, "SDL: could not create renderer - exiting\n");
        exit(1);
    }
    pTexture = SDL_CreateTexture(pRenderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, pCodecParam->width, pCodecParam->height);
    sws_ctx =
        sws_getContext
        (
        pCodecParam->width,
        pCodecParam->height,
        (AVPixelFormat)pCodecParam->format,
        pCodecParam->width,
        pCodecParam->height,
        AV_PIX_FMT_YUV420P,
        SWS_BILINEAR,
        NULL,
        NULL,
        NULL
        );
    pict = av_frame_alloc();
    if (pict == nullptr){
        exit(1);
    }
    if (av_image_alloc(pict->data, pict->linesize,
        pCodecParam->width, pCodecParam->height,
        (AVPixelFormat)pCodecParam->format, 1) < 0){
        exit(1);
    }


    // Read frames and save first five frames to disk
    i = 0;
    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        // Is this a packet from the video stream?
        if (packet.stream_index == videoStream) {
            // Decode video frame
            //avcodec_decode_video2 is deprecated Use avcodec_send_packet() and avcodec_receive_frame().
            send_packet = avcodec_send_packet(pCodecCtx, &packet);
            receive_frame = avcodec_receive_frame(pCodecCtx, pFrame);

            // Did we get a video frame?
            if (send_packet == SEND_PACKET_SUCCESS && receive_frame == RECEIVE_FRAME_SUCCESS) {
                //SDL_LockYUVOverlay(bmp);
                //SDL_LockTexture(pTexture,NULL,);
                // Convert the image into YUV format that SDL uses
                if (av_frame_copy_props(pFrame,
                    pict) < 0) {
                    exit(1);
                }

                sws_scale
                    (
                    sws_ctx,
                    pFrame->data,
                    pFrame->linesize,
                    0,
                    pCodecParam->height,
                    pict->data,
                    pict->linesize
                    );
                
                //SDL_UnlockYUVOverlay(bmp);
                SDL_UpdateYUVTexture(pTexture, NULL, pict->data[0], pict->linesize[0], pict->data[1], pict->linesize[1], pict->data[2], pict->linesize[2]);
                SDL_RenderCopy(pRenderer, pTexture, NULL, NULL);
                SDL_RenderPresent(pRenderer);

            }
        }

        // Free the packet that was allocated by av_read_frame
        av_packet_unref(&packet);
        SDL_PollEvent(&event);
        switch (event.type) {
        case SDL_QUIT:
            SDL_DestroyRenderer(pRenderer);
            SDL_DestroyTexture(pTexture);
            SDL_DestroyWindow(pWindows);
            SDL_Quit();
            exit(0);
            break;
        default:
            break;
        }

    }

    // Free the YUV frame
    av_frame_free(&pFrame);
    //free pict
    av_freep(&pict->data[0]);
    av_frame_free(&pict);

    // Close the codec
    avcodec_close(pCodecCtx);

    // Close the video file
    avformat_close_input(&pFormatCtx);

    return 0;
}
예제 #17
0
int main() {

    FILE *fp = fopen("a.yuv","rb");
    char * iter;
    int i,j;

    int width, height;

    if(!fp)
        perror("No File");

    width = 640;
    height = 480;

    printf(" Width %d Height %d \n",width,height);

    unsigned char *y = new unsigned char[width * height *1.5];
    int usize = width * height /4 ;

    unsigned  char *u = new unsigned char[usize];
    unsigned  char* v = new unsigned char[usize];

    unsigned char *u_iter = u;
    unsigned char *v_iter = v;


    int  k=fread(y,1,width*height *1.5,fp);
    printf(" %x %x %d\n",y[0],y[1],k);
    // for chroma
    for (int i=0; i < width *height /4; i+=2)
    {
        fread(u_iter++,1,1,fp);

        fread(v_iter++,1,1,fp);
        //printf(" %x %x\n",*u_iter,*v_iter);

    }


    // init SDL
    SDL_Surface *screen; // even with SDL2, we can still bring ancient code back
    SDL_Window *window;
    SDL_Surface *image;

    SDL_Renderer *renderer;
    SDL_Texture *texture;

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }
    // create the window like normal
    window = SDL_CreateWindow("YUV Display", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, width, height, 0);

    // but instead of creating a renderer, we can draw directly to the screen
    //screen = SDL_GetWindowSurface(window);

    renderer = SDL_CreateRenderer(window, -1, 0);
    if (!renderer) {
        fprintf(stderr, "SDL: could not create renderer - exiting\n");
        exit(1);
    }

    // Allocate a place to put our YUV image on that screen
    texture = SDL_CreateTexture(
                  renderer,
                  SDL_PIXELFORMAT_IYUV,
                  SDL_TEXTUREACCESS_STREAMING,
                  width,
                  height
              );
    if (!texture) {
        fprintf(stderr, "SDL: could not create texture - exiting\n");
        exit(1);
    }
    // Cretae window and surface

    SDL_UpdateYUVTexture(
        texture,
        NULL,
        (unsigned char *)y,
        width,
        u,
        width/2,
        v,
        width/2
    );

    SDL_RenderClear(renderer);
    SDL_RenderCopy(renderer, texture, NULL, NULL);
    SDL_RenderPresent(renderer);


    SDL_Delay(8000);
    SDL_DestroyWindow(window);
    SDL_Quit();
//


    return 0;
}
int main(int argc, char* argv[]) {
	printf("Play simple video\n");
	if(argc < 2) {
		printf("Miss input video");
		return -1;
	}
	int ret = -1, i = -1, v_stream_idx = -1;
	char* vf_path = argv[1];
	// f**k, fmt_ctx must be inited by NULL
	AVFormatContext* fmt_ctx = NULL;
	AVCodecContext* codec_ctx = NULL;
	AVCodec* codec;
	AVFrame * frame;
	AVPacket packet;

	av_register_all();
	ret = avformat_open_input(&fmt_ctx, vf_path, NULL, NULL);
	if(ret < 0){
		printf("Open video file %s failed \n", vf_path);
		goto end;
	}
	if(avformat_find_stream_info(fmt_ctx, NULL)<0)
    	goto end;
    av_dump_format(fmt_ctx, 0, vf_path, 0);
    for(i = 0; i< fmt_ctx->nb_streams; i++) {
    	if(fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
    		v_stream_idx = i;
    		break;
    	}
    }
    if(v_stream_idx == -1) {
		printf("Cannot find video stream\n");
		goto end;
	}

	codec_ctx = avcodec_alloc_context3(NULL);
	avcodec_parameters_to_context(codec_ctx, fmt_ctx->streams[v_stream_idx]->codecpar);
	codec = avcodec_find_decoder(codec_ctx->codec_id);
	if(codec == NULL){
		printf("Unsupported codec for video file\n");
		goto end;
	}
	if(avcodec_open2(codec_ctx, codec, NULL) < 0){
		printf("Can not open codec\n");
		goto end;
	}

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
    	printf("Could not init SDL due to %s", SDL_GetError());
    	goto end;
    }
    SDL_Window *window;
    SDL_Renderer *renderer;
    SDL_Texture *texture;
    SDL_Event event;
    SDL_Rect r;
    window = SDL_CreateWindow("SDL_CreateTexture", SDL_WINDOWPOS_UNDEFINED,
    	SDL_WINDOWPOS_UNDEFINED, codec_ctx->width, codec_ctx->height,
    	SDL_WINDOW_RESIZABLE);
    r.x = 0;
    r.y = 0;
    r.w = codec_ctx->width;
    r.h = codec_ctx->height;

    renderer = SDL_CreateRenderer(window, -1, 0);
    // texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGBA8888, SDL_TEXTUREACCESS_TARGET,
    // 	codec_ctx->width, codec_ctx->height);
    texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING,
    	codec_ctx->width, codec_ctx->height);

    struct SwsContext      *sws_ctx = NULL;
    sws_ctx = sws_getContext(codec_ctx->width, codec_ctx->height, codec_ctx->pix_fmt,
    	codec_ctx->width, codec_ctx->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);

    frame = av_frame_alloc();

    int ret1, ret2;
    AVFrame* pict;
    pict = av_frame_alloc();


	int             numBytes;
	uint8_t         *buffer = NULL;
  	numBytes=avpicture_get_size(AV_PIX_FMT_YUV420P, codec_ctx->width,
			      codec_ctx->height);
  	buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
    // required, or bad dst image pointers
	avpicture_fill((AVPicture *)pict, buffer, AV_PIX_FMT_YUV420P,
		 codec_ctx->width, codec_ctx->height);
    i = 0;
	while (1) {
        SDL_PollEvent(&event);
        if(event.type == SDL_QUIT)
                break;
        ret = av_read_frame(fmt_ctx, &packet);
        if(ret <0){
        	continue;
        }
        if(packet.stream_index == v_stream_idx) {
			ret1 = avcodec_send_packet(codec_ctx, &packet);
			ret2 = avcodec_receive_frame(codec_ctx, frame);
			if(ret2 < 0 ){
				continue;
	    	}
	    	sws_scale(sws_ctx, (uint8_t const * const *)frame->data,
			      frame->linesize, 0, codec_ctx->height,
			      pict->data, pict->linesize);
	   //  	if(++i <=5 ){
				// save_frame(pict, codec_ctx->width, codec_ctx->height, i);
	   //  	}
	        SDL_UpdateYUVTexture(texture, &r, pict->data[0], pict->linesize[0],
	        	pict->data[1], pict->linesize[1],
	        	pict->data[2], pict->linesize[2]);
	        // SDL_UpdateTexture(texture, &r, pict->data[0], pict->linesize[0]);

	        // r.x=rand()%500;
	        // r.y=rand()%500;

	        // SDL_SetRenderTarget(renderer, texture);
	        // SDL_SetRenderDrawColor(renderer, 0x00, 0x00, 0x00, 0x00);
	        SDL_RenderClear(renderer);
	        // SDL_RenderDrawRect(renderer,&r);
	        // SDL_SetRenderDrawColor(renderer, 0xFF, 0x00, 0x00, 0x00);
	        // SDL_RenderFillRect(renderer, &r);
	        // SDL_SetRenderTarget(renderer, NULL);
	        SDL_RenderCopy(renderer, texture, NULL, NULL);
	        // SDL_RenderCopy(renderer, texture, &r, &r);
	        SDL_RenderPresent(renderer);
	        // SDL_Delay(50);
        }
        av_packet_unref(&packet);
    }

    SDL_DestroyRenderer(renderer);
    SDL_Quit();
	av_frame_free(&frame);
	avcodec_close(codec_ctx);
	avcodec_free_context(&codec_ctx);
    end:
	avformat_close_input(&fmt_ctx);
	printf("Shutdown\n");
	return 0;
}
예제 #19
0
static int consumer_play_video( consumer_sdl self, mlt_frame frame )
{
	// Get the properties of this consumer
	mlt_properties properties = self->properties;

//	mlt_image_format vfmt = mlt_properties_get_int( properties, "mlt_image_format" );
	mlt_image_format vfmt = mlt_image_yuv422;
	int width = self->width, height = self->height;
	uint8_t *image;

	int video_off = mlt_properties_get_int( properties, "video_off" );
	int preview_off = mlt_properties_get_int( properties, "preview_off" );
	int display_off = video_off | preview_off;

	if ( self->running && !display_off )
	{
		// Get the image, width and height
		mlt_frame_get_image( frame, &image, &vfmt, &width, &height, 0 );

		if ( self->running )
		{
			// Determine window's new display aspect ratio
			int x = mlt_properties_get_int( properties, "window_width" );
			if ( x && x != self->window_width )
				self->window_width = x;
			x = mlt_properties_get_int( properties, "window_height" );
			if ( x && x != self->window_height )
				self->window_height = x;
			double this_aspect = ( double )self->window_width / self->window_height;

			// Get the display aspect ratio
			double display_ratio = mlt_properties_get_double( properties, "display_ratio" );

			// Determine frame's display aspect ratio
			double frame_aspect = mlt_frame_get_aspect_ratio( frame ) * width / height;

			// Store the width and height received
			self->width = width;
			self->height = height;

			// If using hardware scaler
			if ( mlt_properties_get( properties, "rescale" ) != NULL &&
				!strcmp( mlt_properties_get( properties, "rescale" ), "none" ) )
			{
				// Use hardware scaler to normalise display aspect ratio
				self->sdl_rect.w = frame_aspect / this_aspect * self->window_width;
				self->sdl_rect.h = self->window_height;
				if ( self->sdl_rect.w > self->window_width )
				{
					self->sdl_rect.w = self->window_width;
					self->sdl_rect.h = this_aspect / frame_aspect * self->window_height;
				}
			}
			// Special case optimisation to negate odd effect of sample aspect ratio
			// not corresponding exactly with image resolution.
			else if ( (int)( this_aspect * 1000 ) == (int)( display_ratio * 1000 ) ) 
			{
				self->sdl_rect.w = self->window_width;
				self->sdl_rect.h = self->window_height;
			}
			// Use hardware scaler to normalise sample aspect ratio
			else if ( self->window_height * display_ratio > self->window_width )
			{
				self->sdl_rect.w = self->window_width;
				self->sdl_rect.h = self->window_width / display_ratio;
			}
			else
			{
				self->sdl_rect.w = self->window_height * display_ratio;
				self->sdl_rect.h = self->window_height;
			}
			
			self->sdl_rect.x = ( self->window_width - self->sdl_rect.w ) / 2;
			self->sdl_rect.y = ( self->window_height - self->sdl_rect.h ) / 2;
			self->sdl_rect.x -= self->sdl_rect.x % 2;

			mlt_properties_set_int( self->properties, "rect_x", self->sdl_rect.x );
			mlt_properties_set_int( self->properties, "rect_y", self->sdl_rect.y );
			mlt_properties_set_int( self->properties, "rect_w", self->sdl_rect.w );
			mlt_properties_set_int( self->properties, "rect_h", self->sdl_rect.h );
		}

		if ( self->running && image )
		{
			unsigned char* planes[4];
			int strides[4];

			mlt_image_format_planes( vfmt, width, height, image, planes, strides );
			if ( strides[1] ) {
				SDL_UpdateYUVTexture( self->sdl_texture, NULL,
					planes[0], strides[0],
					planes[1], strides[1],
					planes[2], strides[2] );
			} else {
				SDL_UpdateTexture( self->sdl_texture, NULL, planes[0], strides[0] );
			}
			SDL_RenderClear( self->sdl_renderer );
			SDL_RenderCopy( self->sdl_renderer, self->sdl_texture, NULL, &self->sdl_rect );
			SDL_RenderPresent( self->sdl_renderer );
		}

		mlt_events_fire( properties, "consumer-frame-show", frame, NULL );
	}
	else if ( self->running )
	{
		if ( !video_off ) {
			mlt_image_format preview_format = mlt_properties_get_int( properties, "preview_format" );
			vfmt = preview_format == mlt_image_none ? mlt_image_rgb24a : preview_format;
			mlt_frame_get_image( frame, &image, &vfmt, &width, &height, 0 );
		}
		mlt_events_fire( properties, "consumer-frame-show", frame, NULL );
	}

	return 0;
}
예제 #20
0
void * thread_routine(void *arg)
{
	struct mypara *recv_para = (struct mypara *)arg;;  //recv para data
	AVFormatContext	*pFormatCtx;
	int				i, videoindex;
	AVCodecContext	*pCodecCtx;
	AVCodec			*pCodec;
	AVFrame	*pFrame, *pFrameYUV;
	unsigned char *out_buffer;
	AVPacket *packet;
	int y_size;
	int ret, got_picture;
	struct SwsContext *img_convert_ctx;

	//char filepath[]="bigbuckbunny_480x272.h265";
	char filepath[] = "rtsp://192.168.131.4/0";
	//SDL---------------------------
	int screen_w = 0, screen_h = 0;
	SDL_Window *screen;
	SDL_Renderer* sdlRenderer;
	SDL_Texture* sdlTexture;
	SDL_Rect sdlRect, sdlRect_tmp;

	FILE *fp_yuv;

	//av_register_all();
	//avformat_network_init();
	pFormatCtx = avformat_alloc_context();

	if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0){
		printf("Couldn't open input stream.\n");
		return -1;
	}
	if (avformat_find_stream_info(pFormatCtx, NULL) < 0){
		printf("Couldn't find stream information.\n");
		return -1;
	}
	videoindex = -1;
	for (i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
			videoindex = i;
			break;
		}
	if (videoindex == -1){
		printf("Didn't find a video stream.\n");
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoindex]->codec;
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL){
		printf("Codec not found.\n");
		return -1;
	}
	//pthread_mutex_lock(&mutex);
	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0){
		printf("Could not open codec.\n");
		return -1;
	}
	//pthread_mutex_unlock(&mutex);

	pFrame = av_frame_alloc();
	pFrameYUV = av_frame_alloc();
	out_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1));
	av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer,
		AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);

	packet = (AVPacket *)av_malloc(sizeof(AVPacket));
	//Output Info-----------------------------
	printf("--------------- File Information ----------------\n");
	av_dump_format(pFormatCtx, 0, filepath, 0);
	printf("-------------------------------------------------\n");
	img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
		pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

#if OUTPUT_YUV420P 
	fp_yuv = fopen("output.yuv", "wb+");
#endif  

	//if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {  
	//	printf( "Could not initialize SDL - %s\n", SDL_GetError()); 
	//	return -1;
	//} 

	screen_w = pCodecCtx->width;
	screen_h = pCodecCtx->height;
	//SDL 2.0 Support for multiple windows
	//screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
	//	screen_w*2, screen_h,
	//	SDL_WINDOW_OPENGL);

	screen = (*recv_para).screen; //get the screen
	if (!screen) {
		printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
		return -1;
	}

	//sdlRenderer = SDL_CreateRenderer(screen, -1, 0);  
	sdlRenderer = (*recv_para).sdlRenderer;//get the sdlRenderer
	//IYUV: Y + U + V  (3 planes)
	//YV12: Y + V + U  (3 planes)
	pthread_mutex_lock(&mutex);
	sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
	pthread_mutex_unlock(&mutex);


	//temp sdlRect for render copy
	sdlRect_tmp.x = 0;
	sdlRect_tmp.y = 0;
	sdlRect_tmp.w = screen_w;
	sdlRect_tmp.h = screen_h;

	//four rect in one line
	// total 4*4 = 16 rect
	sdlRect.x = 0 + screen_w / 2 * ((*recv_para).id % 4);
	sdlRect.y = 0 + screen_h / 2 * ((*recv_para).id / 4);
	sdlRect.w = screen_w / 2;
	sdlRect.h = screen_h / 2;


	//SDL End----------------------
	while (thread_exit && av_read_frame(pFormatCtx, packet) >= 0){
		if (packet->stream_index == videoindex){
			ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
			if (ret < 0){
				printf("Decode Error.\n");
				return -1;
			}
			if (got_picture){
				//printf("id:%d\n",(*recv_para).id); //打印线程id
				//printf("x_pos:%d   y_pos:%d\n",sdlRect.x,sdlRect.y); //print rect position
				sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
					pFrameYUV->data, pFrameYUV->linesize);

#if OUTPUT_YUV420P
				y_size = pCodecCtx->width*pCodecCtx->height;
				fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv);    //Y 
				fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv);  //U
				fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv);  //V
#endif
				//SDL---------------------------
#if 0
				SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
#else
				pthread_mutex_lock(&mutex);  //mutex or SEGFAULT
				SDL_UpdateYUVTexture(sdlTexture, &sdlRect_tmp,//sdl tmp
					pFrameYUV->data[0], pFrameYUV->linesize[0],
					pFrameYUV->data[1], pFrameYUV->linesize[1],
					pFrameYUV->data[2], pFrameYUV->linesize[2]);
#endif	
				//SDL_RenderClear( sdlRenderer );  
				SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
				//SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &sdlRect1);  
				SDL_RenderPresent(sdlRenderer);
				pthread_mutex_unlock(&mutex);
				//SDL End-----------------------
				//Delay 40ms
				//SDL_Delay(40);
			}
		}
		av_free_packet(packet);
	}
	//flush decoder
	//FIX: Flush Frames remained in Codec
	while (1) {
		ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
		if (ret < 0)
			break;
		if (!got_picture)
			break;
		sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
			pFrameYUV->data, pFrameYUV->linesize);
#if OUTPUT_YUV420P
		int y_size = pCodecCtx->width*pCodecCtx->height;
		fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv);    //Y 
		fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv);  //U
		fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv);  //V
#endif
		//SDL---------------------------

		SDL_UpdateTexture(sdlTexture, &sdlRect, pFrameYUV->data[0], pFrameYUV->linesize[0]);
		SDL_RenderClear(sdlRenderer);
		SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
		SDL_RenderPresent(sdlRenderer);
		//SDL End-----------------------
		//Delay 40ms
		//SDL_Delay(40);
	}

	sws_freeContext(img_convert_ctx);

#if OUTPUT_YUV420P 
	fclose(fp_yuv);
#endif 

	SDL_RenderClear(sdlRenderer);
	SDL_Quit();

	av_frame_free(&pFrameYUV);
	av_frame_free(&pFrame);
	avcodec_close(pCodecCtx);
	avformat_close_input(&pFormatCtx);
}