EC_U32 SDL_VideoRender::Init(MediaCtxInfo* pMediaInfo, EC_U32 nScreenWidth, EC_U32 nScreenHight, EC_VOIDP pDrawable) { if (EC_NULL == pMediaInfo) return EC_Err_BadParam; AVCodecContext *pCodecCtx = (AVCodecContext*)(pMediaInfo->m_pVideoCodecInfo); if ((0 == nScreenWidth) || (0 == nScreenHight)) { nScreenWidth = pCodecCtx->width; nScreenHight = pCodecCtx->height; } m_pDrawable = pDrawable; char pEnvStr[256] = { 0 }; sprintf_s(pEnvStr, 255, "SDL_WINDOWID=0x%lx", pDrawable); SDL_putenv(pEnvStr); if (SDL_Init(SDL_INIT_VIDEO) < 0) return Video_Render_Err_InitFail; m_nWindowWidth = nScreenWidth; m_nWindowHeight = nScreenHight; m_nVideoWidth = pCodecCtx->width; m_nVideoHeight = pCodecCtx->height; m_nScreenWidth = GetSystemMetrics(SM_CXSCREEN); m_nScreenHeight = GetSystemMetrics(SM_CYSCREEN); EC_U32 nFlag = 0; if (m_pDrawable == EC_NULL) nFlag = SDL_FULLSCREEN; m_pScreen = SDL_SetVideoMode(m_nScreenWidth, m_nScreenHeight, 0, nFlag); if (EC_NULL == m_pScreen) return Video_Render_Err_CreatWindowFail; m_pOverlay = SDL_CreateYUVOverlay(m_nWindowWidth, m_nWindowHeight, SDL_YV12_OVERLAY, m_pScreen); if (EC_NULL == m_pOverlay) return Video_Render_Err_CreatRenderFail; SetVideoRect(&m_sSDLRect); return Video_Render_Err_None; }
EC_VOID SDL_VideoRender::UpdateVideoScreen(MediaEngVideoScreen *pVideoScreen) { m_nWindowWidth = pVideoScreen->nWidth; m_nWindowHeight = pVideoScreen->nHeight; EC_VOIDP pDrawable = pVideoScreen->pScreen; if (pDrawable != m_pDrawable) { EC_U32 nFlag = 0; SDL_FreeSurface(m_pScreen); if (m_pDrawable == EC_NULL) nFlag = SDL_FULLSCREEN; m_pScreen = SDL_SetVideoMode(m_nScreenWidth, m_nScreenHeight, 0, nFlag); } SDL_FreeYUVOverlay(m_pOverlay); m_pOverlay = SDL_CreateYUVOverlay(m_nWindowWidth, m_nWindowHeight, SDL_YV12_OVERLAY, m_pScreen); SetVideoRect(&m_sSDLRect); EraseVideoRim(); }
void alloc_picture(void *userdata){ VideoState *is = (VideoState *) userdata; VideoPicture *vp; vp = &is->pictq[is->pictq_windex]; if (vp->bmp){ // we already have one make another, bigger/smaller SDL_FreeYUVOverlay(vp->bmp); } // Allocate a place to put our YUV image on that screen vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width,is->video_st->codec->height, SDL_YV12_OVERLAY, screen[is->videoIndex]); vp->width = is->video_st->codec->width; vp->height = is->video_st->codec->height; SDL_LockMutex(is->pictq_mutex); vp->allocated = 1; SDL_CondSignal(is->pictq_cond); SDL_UnlockMutex(is->pictq_mutex); }
/** * sdlview_configure: Configure this instance of the module. See * tcmodule-data.h for function details. */ static int sdlview_configure(TCModuleInstance *self, const char *options, vob_t *vob) { SDLPrivateData *pd = NULL; int ret; TC_MODULE_SELF_CHECK(self, "configure"); pd = self->userdata; ret = configure_colorspace(pd, vob->im_v_codec, verbose); if (ret != TC_OK) { /* failure reason already tc_log()'d out */ return ret; } pd->w = vob->ex_v_width; pd->h = vob->ex_v_height; SDL_WM_SetCaption("transcode SDL preview", NULL); pd->surface = SDL_SetVideoMode(pd->w, pd->h, 0, SDL_HWSURFACE); if (!pd->surface) { tc_log_error(MOD_NAME, "cannot setup SDL Video Mode: %s", SDL_GetError()); return TC_ERROR; } pd->overlay = SDL_CreateYUVOverlay(pd->w, pd->h, SDL_YV12_OVERLAY, pd->surface); if (!pd->overlay) { tc_log_error(MOD_NAME, "cannot setup SDL YUV overlay: %s", SDL_GetError()); return TC_ERROR; } if (verbose) { tc_log_info(MOD_NAME, "preview window: %ix%i YV12 overlay", pd->w, pd->h); } return TC_OK; }
bool VideoFrame::init() { log::info << "Starting the SDL subsystem..." << log::endl; int result = SDL_Init(SDL_INIT_VIDEO); if (result < 0) { log::error << "Could not start SDL (error: " << result << ")" << log::endl; return false; } char driverName[128]; SDL_VideoDriverName(driverName, sizeof (driverName)); log::info << "Using Video Driver : " << driverName << log::endl; screen = SDL_SetVideoMode(width, height, 0, video_format); overlay = SDL_CreateYUVOverlay(width, height, SDL_YV12_OVERLAY, screen); SDL_WM_SetCaption(window_name.c_str(), NULL); return true; }
void SDL_Display(int edge, int frame_width, int frame_height, unsigned char *Y, unsigned char *U, unsigned char *V){ #ifndef SDL_NO_DISPLAY // Lock SDL_yuv_overlay if ( SDL_MUSTLOCK(screen) ) { if ( SDL_LockSurface(screen) < 0 ) return; } if (SDL_LockYUVOverlay(yuv_overlay) < 0) return; if (frame_width != screen -> w || frame_height != screen -> h){ screen -> clip_rect . w = screen -> w = frame_width; screen -> clip_rect . h = screen -> h = frame_height; screen = SDL_SetVideoMode(frame_width, frame_height, 24, SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL); yuv_overlay -> w = rect . w = frame_width + 2 * edge; yuv_overlay -> h = rect . h = frame_height; yuv_overlay = SDL_CreateYUVOverlay(frame_width + 2 * edge, frame_height, SDL_YV12_OVERLAY, screen); } if ( screen == NULL ) { printf("SDL: Couldn't set %dx%d: %s", frame_width, frame_height, SDL_GetError()); exit(1); } // let's draw the data (*yuv[3]) on a SDL screen (*screen) memcpy(yuv_overlay->pixels[0], Y, (frame_width + 2 * edge) * frame_height); memcpy(yuv_overlay->pixels[1], V, (frame_width + 2 * edge) * frame_height / 4); memcpy(yuv_overlay->pixels[2], U, (frame_width + 2 * edge) * frame_height / 4); // Unlock SDL_yuv_overlay if ( SDL_MUSTLOCK(screen) ) { SDL_UnlockSurface(screen); } SDL_UnlockYUVOverlay(yuv_overlay); // Show, baby, show! SDL_DisplayYUVOverlay(yuv_overlay, &rect); #endif }
SDL_Overlay * init_sdl_window(AVCodecContext * pCodecCtx, SDL_Overlay * bmp) { if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { fprintf(stderr, "Nao foi possivel inicializar o SDL - %s\n", SDL_GetError()); return NULL; } SDL_Surface * screen; screen = SDL_SetVideoMode(1280, 720, 0, 0); if (!screen) { fprintf(stderr, "SDL: Nao foi possivel configurar o modo do video\n"); return NULL; } bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); return bmp; }
/*---------------------------------------------------------------------- | SdlVideoOutput_CreateWindow +---------------------------------------------------------------------*/ static BLT_Result SdlVideoOutput_CreateWindow(SdlVideoOutput* self) { if (SDL_Init(SDL_INIT_VIDEO) < 0) { ATX_LOG_WARNING("SdlVideoOutput_CreateWindow - cannot init SDL"); return BLT_FAILURE; } self->screen = SDL_SetVideoMode(1000, 600, 24, SDL_HWSURFACE | SDL_RESIZABLE); if (self->screen == NULL) { ATX_LOG_WARNING("SdlVideoOutput_CreateWindow - SDL_SetVideoMode() failed"); return BLT_FAILURE; } self->yuv_overlay = SDL_CreateYUVOverlay(1000, 600, SDL_YV12_OVERLAY, self->screen); if (self->yuv_overlay == NULL) { ATX_LOG_WARNING("SdlVideoOutput_CreateWindow - SDL_CreateYUVOverlay() failed"); return BLT_FAILURE; } return BLT_SUCCESS; }
static void sdl_create_window(SdlOut *obj){ obj->screen = SDL_SetVideoMode(obj->size.width, obj->size.height, 0,SDL_SWSURFACE); if ( obj->screen == NULL ) { ms_warning("Couldn't set video mode: %s\n", SDL_GetError()); return ; } if (obj->screen->flags & SDL_HWSURFACE) ms_message("SDL surface created in hardware"); SDL_WM_SetCaption("Linphone Video", NULL); if (obj->format==MS_YUV420P){ ms_message("Using yuv overlay."); obj->overlay=SDL_CreateYUVOverlay(obj->size.width,obj->size.height,SDL_YV12_OVERLAY,obj->screen); if (obj->overlay==NULL){ ms_warning("Couldn't create yuv overlay: %s\n", SDL_GetError()); return; }else{ if (obj->overlay->hw_overlay) ms_message("YUV overlay using hardware acceleration."); } } }
static OMX_ERRORTYPE sdlivr_prc_prepare_to_transfer (void * ap_obj, OMX_U32 a_pid) { sdlivr_prc_t * p_prc = ap_obj; OMX_PARAM_PORTDEFINITIONTYPE portdef; TIZ_INIT_OMX_PORT_STRUCT (portdef, ARATELIA_YUV_RENDERER_PORT_INDEX); assert (p_prc); /* Retrieve port def from port */ tiz_check_omx (tiz_api_GetParameter (tiz_get_krn (handleOf (p_prc)), handleOf (p_prc), OMX_IndexParamPortDefinition, &portdef)); p_prc->port_def_ = portdef.format.video; TIZ_TRACE ( handleOf (p_prc), "nFrameWidth = [%u] nFrameHeight = [%u] " "nStride = [%d] nSliceHeight = [%u] nBitrate = [%u] " "xFramerate = [%u] eCompressionFormat = [%0x] eColorFormat = [%0x]", p_prc->port_def_.nFrameWidth, p_prc->port_def_.nFrameHeight, p_prc->port_def_.nStride, p_prc->port_def_.nSliceHeight, p_prc->port_def_.nBitrate, p_prc->port_def_.xFramerate, p_prc->port_def_.eCompressionFormat, p_prc->port_def_.eColorFormat); SDL_WM_SetCaption ("Tizonia YUV renderer", "YUV"); p_prc->p_surface = SDL_SetVideoMode ( p_prc->port_def_.nFrameWidth, p_prc->port_def_.nFrameHeight, 0, SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL | SDL_RESIZABLE); assert (!p_prc->p_overlay); p_prc->p_overlay = SDL_CreateYUVOverlay (p_prc->port_def_.nFrameWidth, p_prc->port_def_.nFrameHeight, SDL_YV12_OVERLAY, p_prc->p_surface); return p_prc->p_overlay ? OMX_ErrorNone : OMX_ErrorInsufficientResources; }
/** Set format on a port */ static MMAL_STATUS_T sdl_port_set_format(MMAL_PORT_T *port) { MMAL_COMPONENT_T *component = port->component; MMAL_COMPONENT_MODULE_T *module = component->priv->module; MMAL_STATUS_T status; if ((status=mmal_sdl_create_surface(module)) != MMAL_SUCCESS) return status; /* We only support I420 */ if (port->format->encoding != MMAL_ENCODING_I420) return MMAL_ENOSYS; /* Check if we need to re-create an overlay */ if (module->sdl_overlay && module->width == port->format->es->video.width && module->height == port->format->es->video.height) return MMAL_SUCCESS; /* Nothing to do */ if (module->sdl_overlay) SDL_FreeYUVOverlay(module->sdl_overlay); /* Create overlay */ module->sdl_overlay = SDL_CreateYUVOverlay(port->format->es->video.width, port->format->es->video.height, SDL_YV12_OVERLAY, module->sdl_surface); if (!module->sdl_overlay) { LOG_ERROR("cannot create SDL overlay"); return MMAL_ENOSPC; } module->width = port->format->es->video.width; module->height = port->format->es->video.height; port->buffer_size_min = module->width * module->height * 3 / 2; return MMAL_SUCCESS; }
Uint32 sdl_init(void) { /* SDL init */ if (SDL_Init(SDL_INIT_VIDEO) < 0) { fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError()); atexit(SDL_Quit); return 0; } info = SDL_GetVideoInfo(); if (!info) { fprintf(stderr, "SDL ERROR Video query failed: %s\n", SDL_GetError()); SDL_Quit(); return 0; } P.bpp = info->vfmt->BitsPerPixel; if (info->hw_available){ P.vflags = SDL_HWSURFACE; } else { P.vflags = SDL_SWSURFACE; } if ((screen = SDL_SetVideoMode(P.width, P.height, P.bpp, P.vflags)) == 0) { fprintf(stderr, "SDL ERROR Video mode set failed: %s\n", SDL_GetError()); SDL_Quit(); return 0; } my_overlay = SDL_CreateYUVOverlay(P.width, P.height, P.overlay_format, screen); if (!my_overlay) { fprintf(stderr, "Couldn't create overlay\n"); return 0; } return 1; }
PyObject* Overlay_New (PyTypeObject *type, PyObject *args, PyObject *kwds) { int pixelformat; PyGameOverlay *self; int w, h; SDL_Surface *screen; if (!PyArg_ParseTuple (args, "i(ii)", &pixelformat, &w, &h)) return NULL; if (!SDL_WasInit (SDL_INIT_VIDEO)) return RAISE (PyExc_SDLError, "cannot create overlay without pygame.display initialized"); screen = SDL_GetVideoSurface (); if (!screen) return RAISE (PyExc_SDLError, "Display mode not set"); // Create new Overlay object self= (PyGameOverlay *)type->tp_alloc (type, 0); if (!self) return NULL; // Create layer with desired format self->cOverlay = SDL_CreateYUVOverlay (w, h, pixelformat, screen); if (!self->cOverlay) return RAISE (PyExc_SDLError, "Cannot create overlay"); self->cRect.x= 0; self->cRect.y= 0; self->cRect.w= w; self->cRect.h= h; return (PyObject*)self; }
/* allocate a picture (needs to do that in main thread to avoid potential locking problems */ void alloc_picture(void *userdata) { VideoState *is = (VideoState *)userdata; Frame *vp; vp = &is->pictq.queue[is->pictq.windex]; if (vp->bmp) { // we already have one make another, bigger/smaller SDL_FreeYUVOverlay(vp->bmp); vp->bmp = NULL; } // Allocate a place to put our YUV image on that screen SDL_LockMutex(screen_mutex); vp->bmp = SDL_CreateYUVOverlay(is->video_ctx->width, is->video_ctx->height, SDL_YV12_OVERLAY, screen); SDL_UnlockMutex(screen_mutex); vp->width = is->video_ctx->width; vp->height = is->video_ctx->height; SDL_LockMutex(is->pictq.mutex); vp->allocated = 1; SDL_CondSignal(is->pictq.cond); SDL_UnlockMutex(is->pictq.mutex); }
int main(int argc, char *argv[]) { AVFormatContext *pFormatCtx; int i, videoStream, audioStream; AVCodecContext *pCodecCtx; AVCodec *pCodec; AVFrame *pFrame; AVPacket packet; int frameFinished; float aspect_ratio; struct SwsContext *img_convert_ctx; AVCodecContext *aCodecCtx; AVCodec *aCodec; SDL_Overlay *bmp; SDL_Surface *screen; SDL_Rect rect; SDL_Event event; SDL_AudioSpec wanted_spec, spec; if(argc < 2) { fprintf(stderr, "Usage: test <file>\n"); exit(1); } // Register all formats and codecs av_register_all(); if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); exit(1); } // Open video file if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0) return -1; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information // Dump information about file onto standard error dump_format(pFormatCtx, 0, argv[1], 0); // Find the first video stream videoStream=-1; audioStream=-1; for(i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO && videoStream < 0) { videoStream=i; } if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_AUDIO && audioStream < 0) { audioStream=i; } } if(videoStream==-1) return -1; // Didn't find a video stream if(audioStream==-1) return -1; aCodecCtx=pFormatCtx->streams[audioStream]->codec; // Set audio settings from codec info wanted_spec.freq = aCodecCtx->sample_rate; wanted_spec.format = AUDIO_S16SYS; wanted_spec.channels = aCodecCtx->channels; wanted_spec.silence = 0; wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; wanted_spec.callback = audio_callback; wanted_spec.userdata = aCodecCtx; if(SDL_OpenAudio(&wanted_spec, &spec) < 0) { fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError()); return -1; } aCodec = avcodec_find_decoder(aCodecCtx->codec_id); if(!aCodec) { fprintf(stderr, "Unsupported codec!\n"); return -1; } if (avcodec_open(aCodecCtx, aCodec) < 0) { fprintf(stderr, "Cannot open audio codec!\n"); return -1; } // audio_st = pFormatCtx->streams[index] packet_queue_init(&audioq); SDL_PauseAudio(0); // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; // Codec not found } // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) { fprintf(stderr, "Cannot open video codec!\n"); return -1; // Could not open codec } // construct the scale context, conversing to PIX_FMT_YUV420P img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);// other codes if (img_convert_ctx == NULL) { fprintf(stderr, "Cannot initialize the conversion context!\n"); return -1; } // Allocate video frame pFrame=avcodec_alloc_frame(); // Make a screen to put our video #ifndef __DARWIN__ screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); #else screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); #endif if(!screen) { fprintf(stderr, "SDL: could not set video mode - exiting\n"); exit(1); } // Allocate a place to put our YUV image on that screen bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); // Read frames and save first five frames to disk i=0; while(av_read_frame(pFormatCtx, &packet)>=0) { // Is this a packet from the video stream? if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size); // Did we get a video frame? if(frameFinished) { SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; // Convert the image into YUV format that SDL uses /* img_convert(&pict, PIX_FMT_YUV420P, (AVPicture *)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); */ sws_scale(img_convert_ctx, (const uint8_t * const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); rect.x = 0; rect.y = 0; rect.w = pCodecCtx->width; rect.h = pCodecCtx->height; SDL_DisplayYUVOverlay(bmp, &rect); av_free_packet(&packet); } } else if(packet.stream_index==audioStream) { packet_queue_put(&audioq, &packet); } else { av_free_packet(&packet); } // Free the packet that was allocated by av_read_frame SDL_PollEvent(&event); switch(event.type) { case SDL_QUIT: quit = 1; SDL_Quit(); exit(0); break; default: break; } } sws_freeContext(img_convert_ctx); // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file av_close_input_file(pFormatCtx); return 0; }
void ffmovie_setdisplay(FFMovie *movie, SDL_Surface *dest, SDL_Rect *rect) { /*MAIN THREAD*/ if(!movie->video_st || movie->abort_request || movie->context==NULL) { /*This movie has no video stream, or finished*/ return; } SDL_LockMutex(movie->dest_mutex); if(movie->dest_overlay) { /*clean any existing overlay*/ SDL_FreeYUVOverlay(movie->dest_overlay); movie->dest_overlay = NULL; } if(!dest) { /*no destination*/ movie->dest_overlay = NULL; } else { if(rect) { movie->dest_rect.x = rect->x; movie->dest_rect.y = rect->y; movie->dest_rect.w = rect->w; movie->dest_rect.h = rect->h; } else { movie->dest_rect.x = 0; movie->dest_rect.y = 0; movie->dest_rect.w = 0; movie->dest_rect.h = 0; } if(movie->dest_rect.w == 0) { movie->dest_rect.w = MIN(movie->video_st->codec.width, dest->w); } if(movie->dest_rect.h == 0) { movie->dest_rect.h = MIN(movie->video_st->codec.height, dest->h); } #if 0 /* XXX: use generic function */ /* XXX: disable overlay if no hardware acceleration or if RGB format */ switch(movie->video_st->codec.pix_fmt) { case PIX_FMT_YUV420P: case PIX_FMT_YUV422P: case PIX_FMT_YUV444P: case PIX_FMT_YUV422: case PIX_FMT_YUV410P: case PIX_FMT_YUV411P: is_yuv = 1; break; default: is_yuv = 0; break; } #endif movie->dest_surface = dest; movie->dest_overlay = SDL_CreateYUVOverlay( movie->video_st->codec.width, movie->video_st->codec.height, SDL_YV12_OVERLAY, dest); } SDL_UnlockMutex(movie->dest_mutex); /*set display time to now, force redraw*/ movie->dest_showtime = get_master_clock(movie); }
/* w, h is layer resolution */ int plat_sdl_change_video_mode(int w, int h, int force) { static int prev_w, prev_h; if (w == 0) w = prev_w; else prev_w = w; if (h == 0) h = prev_h; else prev_h = h; plat_target.vout_method=vout_mode_overlay; // invalid method might come from config.. if (plat_target.vout_method != 0 && plat_target.vout_method != vout_mode_overlay && plat_target.vout_method != vout_mode_gl) { fprintf(stderr, "invalid vout_method: %d\n", plat_target.vout_method); plat_target.vout_method = 0; } // skip GL recreation if window doesn't change - avoids flicker if (plat_target.vout_method == vout_mode_gl && plat_sdl_gl_active && plat_target.vout_fullscreen == old_fullscreen && !force) { return 0; } if (plat_sdl_overlay != NULL) { SDL_FreeYUVOverlay(plat_sdl_overlay); plat_sdl_overlay = NULL; } if (plat_sdl_gl_active) { gl_finish(); plat_sdl_gl_active = 0; } if (plat_target.vout_method != 0) { Uint32 flags = SDL_RESIZABLE | SDL_SWSURFACE; int win_w = window_w; int win_h = window_h; if (plat_target.vout_fullscreen) { flags |= SDL_FULLSCREEN; win_w = fs_w; win_h = fs_h; } // XXX: workaround some occasional mysterious deadlock in SDL_SetVideoMode // (seen on r-pi) SDL_PumpEvents(); unsigned int user_width = 0; sscanf(getenv("PCSX_WIDTH"), "%d", &user_width); if(user_width == 0) user_width=win_h; plat_sdl_screen = SDL_SetVideoMode(/*win_w*/ /*win_h*4/3*/ /*640*/ user_width*4/3, /*win_h*/ /*512*/ user_width, 0, flags); if (plat_sdl_screen == NULL) { fprintf(stderr, "SDL_SetVideoMode failed: %s\n", SDL_GetError()); plat_target.vout_method = 0; } } if (plat_target.vout_method == vout_mode_overlay) { plat_sdl_overlay = SDL_CreateYUVOverlay(w, h, SDL_UYVY_OVERLAY, plat_sdl_screen); if (plat_sdl_overlay != NULL) { if ((long)plat_sdl_overlay->pixels[0] & 3) fprintf(stderr, "warning: overlay pointer is unaligned\n"); plat_sdl_overlay_clear(); } else { fprintf(stderr, "warning: could not create overlay.\n"); plat_target.vout_method = 0; } } else if (plat_target.vout_method == vout_mode_gl) { plat_sdl_gl_active = (gl_init(display, window, &gl_quirks) == 0); if (!plat_sdl_gl_active) { fprintf(stderr, "warning: could not init GL.\n"); plat_target.vout_method = 0; } } if (plat_target.vout_method == 0) { SDL_PumpEvents(); plat_sdl_screen = SDL_SetVideoMode(w, h, 16, SDL_SWSURFACE); if (plat_sdl_screen == NULL) { fprintf(stderr, "SDL_SetVideoMode failed: %s\n", SDL_GetError()); return -1; } } old_fullscreen = plat_target.vout_fullscreen; if (plat_sdl_resize_cb != NULL) plat_sdl_resize_cb(plat_sdl_screen->w, plat_sdl_screen->h); return 0; }
int plat_sdl_init(void) { static const char *vout_list[] = { NULL, NULL, NULL, NULL }; const SDL_VideoInfo *info; SDL_SysWMinfo wminfo; int overlay_works = 0; int gl_works = 0; int i, ret, h; ret = SDL_Init(SDL_INIT_VIDEO | SDL_INIT_NOPARACHUTE); if (ret != 0) { fprintf(stderr, "SDL_Init failed: %s\n", SDL_GetError()); return -1; } info = SDL_GetVideoInfo(); if (info != NULL) { fs_w = info->current_w; fs_h = info->current_h; printf("plat_sdl: using %dx%d as fullscreen resolution\n", fs_w, fs_h); } g_menuscreen_w = 640; if (fs_w != 0 && g_menuscreen_w > fs_w) g_menuscreen_w = fs_w; g_menuscreen_h = 480; if (fs_h != 0) { h = fs_h; if (info && info->wm_available && h > WM_DECORATION_H) h -= WM_DECORATION_H; if (g_menuscreen_h > h) g_menuscreen_h = h; } ret = plat_sdl_change_video_mode(g_menuscreen_w, g_menuscreen_h, 1); if (ret != 0) { plat_sdl_screen = SDL_SetVideoMode(0, 0, 16, SDL_SWSURFACE); if (plat_sdl_screen == NULL) { fprintf(stderr, "SDL_SetVideoMode failed: %s\n", SDL_GetError()); goto fail; } if (plat_sdl_screen->w < 320 || plat_sdl_screen->h < 240) { fprintf(stderr, "resolution %dx%d is too small, sorry.\n", plat_sdl_screen->w, plat_sdl_screen->h); goto fail; } } g_menuscreen_w = window_w = plat_sdl_screen->w; g_menuscreen_h = window_h = plat_sdl_screen->h; // overlay/gl require native bpp in some cases.. plat_sdl_screen = SDL_SetVideoMode(g_menuscreen_w, g_menuscreen_h, 0, SDL_SWSURFACE); if (plat_sdl_screen == NULL) { fprintf(stderr, "SDL_SetVideoMode failed: %s\n", SDL_GetError()); goto fail; } plat_sdl_overlay = SDL_CreateYUVOverlay(plat_sdl_screen->w, plat_sdl_screen->h, SDL_UYVY_OVERLAY, plat_sdl_screen); if (plat_sdl_overlay != NULL) { printf("plat_sdl: overlay: fmt %x, planes: %d, pitch: %d, hw: %d\n", plat_sdl_overlay->format, plat_sdl_overlay->planes, *plat_sdl_overlay->pitches, plat_sdl_overlay->hw_overlay); if (plat_sdl_overlay->hw_overlay) overlay_works = 1; else fprintf(stderr, "warning: video overlay is not hardware accelerated, " "not going to use it.\n"); SDL_FreeYUVOverlay(plat_sdl_overlay); plat_sdl_overlay = NULL; } else fprintf(stderr, "overlay is not available.\n"); // get x11 display/window for GL SDL_VideoDriverName(vid_drv_name, sizeof(vid_drv_name)); #ifdef SDL_VIDEO_DRIVER_X11 if (strcmp(vid_drv_name, "x11") == 0) { SDL_VERSION(&wminfo.version); ret = SDL_GetWMInfo(&wminfo); if (ret > 0) { display = wminfo.info.x11.display; window = (void *)wminfo.info.x11.window; } } #endif ret = gl_init(display, window, &gl_quirks); if (ret == 0) { gl_works = 1; gl_finish(); } i = 0; vout_list[i++] = "SDL Window"; if (overlay_works) { plat_target.vout_method = vout_mode_overlay = i; vout_list[i++] = "Video Overlay"; } if (gl_works) { plat_target.vout_method = vout_mode_gl = i; vout_list[i++] = "OpenGL"; } plat_target.vout_methods = vout_list; return 0; fail: SDL_Quit(); return -1; }
/* Must be called with the sdl lock held */ static gboolean gst_sdlvideosink_create (GstSDLVideoSink * sdlvideosink) { if (GST_VIDEO_SINK_HEIGHT (sdlvideosink) <= 0) GST_VIDEO_SINK_HEIGHT (sdlvideosink) = sdlvideosink->height; if (GST_VIDEO_SINK_WIDTH (sdlvideosink) <= 0) GST_VIDEO_SINK_WIDTH (sdlvideosink) = sdlvideosink->width; gst_sdlvideosink_destroy (sdlvideosink); if (sdlvideosink->is_xwindows && !sdlvideosink->xwindow_id) { g_mutex_unlock (sdlvideosink->lock); gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sdlvideosink)); g_mutex_lock (sdlvideosink->lock); } /* create a SDL window of the size requested by the user */ if (sdlvideosink->full_screen) { sdlvideosink->screen = SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_SWSURFACE | SDL_FULLSCREEN); } else { sdlvideosink->screen = SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_HWSURFACE | SDL_RESIZABLE); } if (sdlvideosink->screen == NULL) goto no_screen; /* create a new YUV overlay */ sdlvideosink->overlay = SDL_CreateYUVOverlay (sdlvideosink->width, sdlvideosink->height, sdlvideosink->format, sdlvideosink->screen); if (sdlvideosink->overlay == NULL) goto no_overlay; GST_DEBUG ("Using a %dx%d %dbpp SDL screen with a %dx%d \'%" GST_FOURCC_FORMAT "\' YUV overlay", GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), sdlvideosink->screen->format->BitsPerPixel, sdlvideosink->width, sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format)); sdlvideosink->rect.x = 0; sdlvideosink->rect.y = 0; sdlvideosink->rect.w = GST_VIDEO_SINK_WIDTH (sdlvideosink); sdlvideosink->rect.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink); /*SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); */ GST_DEBUG ("sdlvideosink: setting %08x (%" GST_FOURCC_FORMAT ")", sdlvideosink->format, GST_FOURCC_ARGS (sdlvideosink->format)); return TRUE; /* ERRORS */ no_screen: { GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL), ("SDL: Couldn't set %dx%d: %s", GST_VIDEO_SINK_WIDTH (sdlvideosink), GST_VIDEO_SINK_HEIGHT (sdlvideosink), SDL_GetError ())); return FALSE; } no_overlay: { GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL), ("SDL: Couldn't create SDL YUV overlay (%dx%d \'%" GST_FOURCC_FORMAT "\'): %s", sdlvideosink->width, sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format), SDL_GetError ())); return FALSE; } }
int main(int argc, char *argv[]) { AVFormatContext *pFormatCtx; int i, videoStream; AVCodecContext *pCodecCtx; AVCodec *pCodec; AVFrame *pFrame; AVPacket packet; int frameFinished; float aspect_ratio; struct SwsContext *img_convert_ctx; SDL_Overlay *bmp; SDL_Surface *screen; SDL_Rect rect; SDL_Event event; if(argc < 2) { fprintf(stderr, "Usage: test <file>\n"); exit(1); } // Register all formats and codecs av_register_all(); if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); exit(1); } // Open video file if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0) return -1; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information // Dump information about file onto standard error dump_format(pFormatCtx, 0, argv[1], 0); // Find the first video stream videoStream=-1; for(i=0; i<pFormatCtx->nb_streams; i++) if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { videoStream=i; break; } if(videoStream==-1) return -1; // Didn't find a video stream // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; // Codec not found } // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) return -1; // Could not open codec // Allocate video frame pFrame=avcodec_alloc_frame(); // Make a screen to put our video #ifndef __DARWIN__ screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); #else screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); #endif if(!screen) { fprintf(stderr, "SDL: could not set video mode - exiting\n"); exit(1); } // Allocate a place to put our YUV image on that screen bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); // Read frames and save first five frames to disk i=0; while(av_read_frame(pFormatCtx, &packet)>=0) { // Is this a packet from the video stream? if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // Did we get a video frame? if(frameFinished) { SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; // Convert the image into YUV format that SDL uses /* img_convert(&pict, PIX_FMT_YUV420P, (AVPicture *)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); */ int dstFmt; dstFmt = PIX_FMT_YUV420P; img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, dstFmt, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); /* printf("*(pict.data[0]: %d\n", *(pict.data[0])); printf("*(pict.data[1]: %d\n", *(pict.data[1])); printf("*(pict.data[2]: %d\n", *(pict.data[2])); printf("*(pict.data[3]: %d\n", *(pict.data[3])); printf("linesize[0]: %d\n", pict.linesize[0]); printf("linesize[1]: %d\n", pict.linesize[1]); printf("linesize[2]: %d\n", pict.linesize[2]); printf("linesize[3]: %d\n", pict.linesize[3]); printf("width: %d\n", pCodecCtx->width); printf("height: %d\n", pCodecCtx->height); */ ++i; if(i>50) if(i<=51) { printf("frame 51\n"); if( *(pict.data[0]) == 20) printf("frame 51, line 0, x=1, 20\n"); } SDL_UnlockYUVOverlay(bmp); rect.x = 0; rect.y = 0; rect.w = pCodecCtx->width; rect.h = pCodecCtx->height; SDL_DisplayYUVOverlay(bmp, &rect); //SDL_Delay(1000); //return 0; } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_PollEvent(&event); switch(event.type) { case SDL_QUIT: SDL_Quit(); exit(0); break; default: break; } } // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file av_close_input_file(pFormatCtx); return 0; }
/***************************************************************************** * NewPicture: allocate a picture ***************************************************************************** * Returns 0 on success, -1 otherwise *****************************************************************************/ static int NewPicture( vout_thread_t *p_vout, picture_t *p_pic ) { int i_width = p_vout->output.i_width; int i_height = p_vout->output.i_height; if( p_vout->p_sys->p_overlay == NULL ) { /* RGB picture */ if( p_vout->p_sys->i_surfaces ) { /* We already allocated this surface, return */ return VLC_EGENERIC; } p_pic->p_sys = malloc( sizeof( picture_sys_t ) ); if( p_pic->p_sys == NULL ) { return VLC_ENOMEM; } switch( p_vout->p_sys->p_display->format->BitsPerPixel ) { case 8: p_pic->p->i_pixel_pitch = 1; break; case 15: case 16: p_pic->p->i_pixel_pitch = 2; break; case 24: case 32: p_pic->p->i_pixel_pitch = 4; break; default: return VLC_EGENERIC; } p_pic->p->p_pixels = p_vout->p_sys->p_display->pixels; p_pic->p->i_lines = p_vout->p_sys->p_display->h; p_pic->p->i_visible_lines = p_vout->p_sys->p_display->h; p_pic->p->i_pitch = p_vout->p_sys->p_display->pitch; p_pic->p->i_visible_pitch = p_pic->p->i_pixel_pitch * p_vout->p_sys->p_display->w; p_vout->p_sys->i_surfaces++; p_pic->i_planes = 1; } else { p_pic->p_sys = malloc( sizeof( picture_sys_t ) ); if( p_pic->p_sys == NULL ) { return VLC_ENOMEM; } p_pic->p_sys->p_overlay = SDL_CreateYUVOverlay( i_width, i_height, p_vout->output.i_chroma, p_vout->p_sys->p_display ); if( p_pic->p_sys->p_overlay == NULL ) { free( p_pic->p_sys ); return VLC_EGENERIC; } SDL_LockYUVOverlay( p_pic->p_sys->p_overlay ); p_pic->Y_PIXELS = p_pic->p_sys->p_overlay->pixels[0]; p_pic->p[Y_PLANE].i_lines = p_pic->p_sys->p_overlay->h; p_pic->p[Y_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h; p_pic->p[Y_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[0]; switch( p_vout->output.i_chroma ) { case SDL_YV12_OVERLAY: p_pic->p[Y_PLANE].i_pixel_pitch = 1; p_pic->p[Y_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w; p_pic->U_PIXELS = p_pic->p_sys->p_overlay->pixels[2]; p_pic->p[U_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[U_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[U_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[2]; p_pic->p[U_PLANE].i_pixel_pitch = 1; p_pic->p[U_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2; p_pic->V_PIXELS = p_pic->p_sys->p_overlay->pixels[1]; p_pic->p[V_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[V_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[V_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[1]; p_pic->p[V_PLANE].i_pixel_pitch = 1; p_pic->p[V_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2; p_pic->i_planes = 3; break; case SDL_IYUV_OVERLAY: p_pic->p[Y_PLANE].i_pixel_pitch = 1; p_pic->p[Y_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w; p_pic->U_PIXELS = p_pic->p_sys->p_overlay->pixels[1]; p_pic->p[U_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[U_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[U_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[1]; p_pic->p[U_PLANE].i_pixel_pitch = 1; p_pic->p[U_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2; p_pic->V_PIXELS = p_pic->p_sys->p_overlay->pixels[2]; p_pic->p[V_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[V_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2; p_pic->p[V_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[2]; p_pic->p[V_PLANE].i_pixel_pitch = 1; p_pic->p[V_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2; p_pic->i_planes = 3; break; default: p_pic->p[Y_PLANE].i_pixel_pitch = 2; p_pic->p[U_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w * 2; p_pic->i_planes = 1; break; } } return VLC_SUCCESS; }
/***************************************************************************** * OpenDisplay: open and initialize SDL device ***************************************************************************** * Open and initialize display according to preferences specified in the vout * thread fields. *****************************************************************************/ static int OpenDisplay( vout_thread_t *p_vout ) { uint32_t i_flags; int i_bpp; /* SDL f****d up fourcc definitions on bigendian machines */ uint32_t i_sdl_chroma; /* Set main window's size */ p_vout->p_sys->i_width = p_vout->b_fullscreen ? p_vout->output.i_width : p_vout->i_window_width; p_vout->p_sys->i_height = p_vout->b_fullscreen ? p_vout->output.i_height : p_vout->i_window_height; /* Initialize flags and cursor */ i_flags = SDL_ANYFORMAT | SDL_HWPALETTE | SDL_HWSURFACE | SDL_DOUBLEBUF; i_flags |= p_vout->b_fullscreen ? SDL_FULLSCREEN : SDL_RESIZABLE; i_bpp = SDL_VideoModeOK( p_vout->p_sys->i_width, p_vout->p_sys->i_height, SDL_DEFAULT_BPP, i_flags ); if( i_bpp == 0 ) { msg_Err( p_vout, "no video mode available" ); return VLC_EGENERIC; } p_vout->p_sys->p_display = SDL_SetVideoMode( p_vout->p_sys->i_width, p_vout->p_sys->i_height, i_bpp, i_flags ); if( p_vout->p_sys->p_display == NULL ) { msg_Err( p_vout, "cannot set video mode" ); return VLC_EGENERIC; } SDL_LockSurface( p_vout->p_sys->p_display ); /* Choose the chroma we will try first. */ switch( p_vout->render.i_chroma ) { case VLC_FOURCC('Y','U','Y','2'): case VLC_FOURCC('Y','U','N','V'): p_vout->output.i_chroma = VLC_FOURCC('Y','U','Y','2'); i_sdl_chroma = SDL_YUY2_OVERLAY; break; case VLC_FOURCC('U','Y','V','Y'): case VLC_FOURCC('U','Y','N','V'): case VLC_FOURCC('Y','4','2','2'): p_vout->output.i_chroma = VLC_FOURCC('U','Y','V','Y'); i_sdl_chroma = SDL_UYVY_OVERLAY; break; case VLC_FOURCC('Y','V','Y','U'): p_vout->output.i_chroma = VLC_FOURCC('Y','V','Y','U'); i_sdl_chroma = SDL_YVYU_OVERLAY; break; case VLC_FOURCC('Y','V','1','2'): case VLC_FOURCC('I','4','2','0'): case VLC_FOURCC('I','Y','U','V'): default: p_vout->output.i_chroma = VLC_FOURCC('Y','V','1','2'); i_sdl_chroma = SDL_YV12_OVERLAY; break; } p_vout->p_sys->p_overlay = SDL_CreateYUVOverlay( 32, 32, i_sdl_chroma, p_vout->p_sys->p_display ); /* FIXME: if the first overlay we find is software, don't stop, * because we may find a hardware one later ... */ /* If this best choice failed, fall back to other chromas */ if( p_vout->p_sys->p_overlay == NULL ) { p_vout->output.i_chroma = VLC_FOURCC('I','Y','U','V'); p_vout->p_sys->p_overlay = SDL_CreateYUVOverlay( 32, 32, SDL_IYUV_OVERLAY, p_vout->p_sys->p_display ); } if( p_vout->p_sys->p_overlay == NULL ) { p_vout->output.i_chroma = VLC_FOURCC('Y','V','1','2'); p_vout->p_sys->p_overlay = SDL_CreateYUVOverlay( 32, 32, SDL_YV12_OVERLAY, p_vout->p_sys->p_display ); } if( p_vout->p_sys->p_overlay == NULL ) { p_vout->output.i_chroma = VLC_FOURCC('Y','U','Y','2'); p_vout->p_sys->p_overlay = SDL_CreateYUVOverlay( 32, 32, SDL_YUY2_OVERLAY, p_vout->p_sys->p_display ); } if( p_vout->p_sys->p_overlay == NULL ) { msg_Warn( p_vout, "no SDL overlay for 0x%.8x (%4.4s)", p_vout->render.i_chroma, (char*)&p_vout->render.i_chroma ); switch( p_vout->p_sys->p_display->format->BitsPerPixel ) { case 8: p_vout->output.i_chroma = VLC_FOURCC('R','G','B','2'); p_vout->output.pf_setpalette = SetPalette; break; case 15: p_vout->output.i_chroma = VLC_FOURCC('R','V','1','5'); break; case 16: p_vout->output.i_chroma = VLC_FOURCC('R','V','1','6'); break; case 24: p_vout->output.i_chroma = VLC_FOURCC('R','V','2','4'); break; case 32: p_vout->output.i_chroma = VLC_FOURCC('R','V','3','2'); break; default: msg_Err( p_vout, "unknown screen depth %i", p_vout->p_sys->p_display->format->BitsPerPixel ); SDL_UnlockSurface( p_vout->p_sys->p_display ); SDL_FreeSurface( p_vout->p_sys->p_display ); return VLC_EGENERIC; } p_vout->output.i_rmask = p_vout->p_sys->p_display->format->Rmask; p_vout->output.i_gmask = p_vout->p_sys->p_display->format->Gmask; p_vout->output.i_bmask = p_vout->p_sys->p_display->format->Bmask; SDL_WM_SetCaption( VOUT_TITLE " (software RGB SDL output)", VOUT_TITLE " (software RGB SDL output)" ); } else { if( p_vout->p_sys->p_overlay->hw_overlay ) { SDL_WM_SetCaption( VOUT_TITLE " (hardware YUV SDL output)", VOUT_TITLE " (hardware YUV SDL output)" ); } else { SDL_WM_SetCaption( VOUT_TITLE " (software YUV SDL output)", VOUT_TITLE " (software YUV SDL output)" ); } } SDL_EventState( SDL_KEYUP, SDL_IGNORE ); /* ignore keys up */ return VLC_SUCCESS; }
/** ** Play a video file. ** ** @param name Filename of movie file. ** ** @return Non-zero if file isn't a supported movie. */ int PlayMovie(const std::string &name) { OggData data; CFile f; SDL_Rect rect; SDL_Overlay *yuv_overlay; CSample *sample; const EventCallback *old_callbacks; EventCallback callbacks; unsigned int start_ticks; int need_data; int diff; char buffer[PATH_MAX]; LibraryFileName(name.c_str(), buffer, sizeof(buffer)); if (f.open(buffer, CL_OPEN_READ) == -1) { fprintf(stderr, "Can't open file `%s'\n", name.c_str()); return -1; } memset(&data, 0, sizeof(data)); if (OggInit(&f, &data) || !data.video) { OggFree(&data); f.close(); return -1; } data.File = &f; if (data.tinfo.frame_width * 300 / 4 > data.tinfo.frame_height * 100) { rect.w = Video.Width; rect.h = Video.Width * data.tinfo.frame_height / data.tinfo.frame_width; rect.x = 0; rect.y = (Video.Height - rect.h) / 2; } else { rect.w = Video.Height * data.tinfo.frame_width / data.tinfo.frame_height; rect.h = Video.Height; rect.x = (Video.Width - rect.w) / 2; rect.y = 0; } yuv_overlay = SDL_CreateYUVOverlay(data.tinfo.frame_width, data.tinfo.frame_height, SDL_YV12_OVERLAY, TheScreen); if (yuv_overlay == NULL) { fprintf(stderr, "SDL_CreateYUVOverlay: %s\n", SDL_GetError()); OggFree(&data); f.close(); return 0; } StopMusic(); if ((sample = LoadVorbis(buffer, PlayAudioStream))) { if ((sample->Channels != 1 && sample->Channels != 2) || sample->SampleSize != 16) { fprintf(stderr, "Unsupported sound format in movie\n"); delete sample; SDL_FreeYUVOverlay(yuv_overlay); OggFree(&data); f.close(); return 0; } PlayMusic(sample); } callbacks.ButtonPressed = MovieCallbackButtonPressed; callbacks.ButtonReleased = MovieCallbackButtonReleased; callbacks.MouseMoved = MovieCallbackMouseMove; callbacks.MouseExit = MovieCallbackMouseExit; callbacks.KeyPressed = MovieCallbackKeyPressed; callbacks.KeyReleased = MovieCallbackKeyReleased; callbacks.KeyRepeated = MovieCallbackKeyRepeated; callbacks.NetworkEvent = NetworkEvent; old_callbacks = GetCallbacks(); SetCallbacks(&callbacks); Invalidate(); RealizeVideoMemory(); MovieStop = false; start_ticks = SDL_GetTicks(); need_data = 1; while (!MovieStop) { if (need_data) { if (TheoraProcessData(&data)) { break; } need_data = 0; } diff = SDL_GetTicks() - start_ticks - static_cast<int>( theora_granule_time(&data.tstate, data.tstate.granulepos) * 1000); if (diff > 100) { // too far behind, skip some frames need_data = 1; continue; } if (diff > 0) { OutputTheora(&data, yuv_overlay, &rect); need_data = 1; } WaitEventsOneFrame(); } StopMusic(); SDL_FreeYUVOverlay(yuv_overlay); OggFree(&data); f.close(); SetCallbacks(old_callbacks); return 0; }
/** * This function initializes SDL vout method. */ static int Open(vlc_object_t *object) { vout_display_t *vd = (vout_display_t *)object; vout_display_sys_t *sys; /* XXX: check for conflicts with the SDL audio output */ vlc_mutex_lock(&sdl_lock); /* Check if SDL video module has been initialized */ if (SDL_WasInit(SDL_INIT_VIDEO) != 0) { vlc_mutex_unlock(&sdl_lock); return VLC_EGENERIC; } vd->sys = sys = calloc(1, sizeof(*sys)); if (!sys) { vlc_mutex_unlock(&sdl_lock); return VLC_ENOMEM; } #ifdef HAVE_SETENV char *psz_driver = var_CreateGetNonEmptyString(vd, "sdl-video-driver"); if (psz_driver) { setenv("SDL_VIDEODRIVER", psz_driver, 1); free(psz_driver); } #endif /* */ int sdl_flags = SDL_INIT_VIDEO; #ifndef WIN32 /* Win32 SDL implementation doesn't support SDL_INIT_EVENTTHREAD yet*/ sdl_flags |= SDL_INIT_EVENTTHREAD; #endif #ifndef NDEBUG /* In debug mode you may want vlc to dump a core instead of staying stuck */ sdl_flags |= SDL_INIT_NOPARACHUTE; #endif /* Initialize library */ if (SDL_Init(sdl_flags) < 0) { vlc_mutex_unlock(&sdl_lock); msg_Err(vd, "cannot initialize SDL (%s)", SDL_GetError()); free(sys); return VLC_EGENERIC; } vlc_mutex_unlock(&sdl_lock); /* Translate keys into unicode */ SDL_EnableUNICODE(1); /* Get the desktop resolution */ /* FIXME: SDL has a problem with virtual desktop */ sys->desktop_width = SDL_GetVideoInfo()->current_w; sys->desktop_height = SDL_GetVideoInfo()->current_h; /* */ video_format_t fmt = vd->fmt; /* */ vout_display_info_t info = vd->info; /* Set main window's size */ int display_width; int display_height; if (vd->cfg->is_fullscreen) { display_width = sys->desktop_width; display_height = sys->desktop_height; } else { display_width = vd->cfg->display.width; display_height = vd->cfg->display.height; } /* Initialize flags and cursor */ sys->display_flags = SDL_ANYFORMAT | SDL_HWPALETTE | SDL_HWSURFACE | SDL_DOUBLEBUF; sys->display_flags |= vd->cfg->is_fullscreen ? SDL_FULLSCREEN : SDL_RESIZABLE; sys->display_bpp = SDL_VideoModeOK(display_width, display_height, 16, sys->display_flags); if (sys->display_bpp == 0) { msg_Err(vd, "no video mode available"); goto error; } sys->display = SDL_SetVideoMode(display_width, display_height, sys->display_bpp, sys->display_flags); if (!sys->display) { msg_Err(vd, "cannot set video mode"); goto error; } /* We keep the surface locked forever */ SDL_LockSurface(sys->display); /* */ vlc_fourcc_t forced_chroma = 0; char *psz_chroma = var_CreateGetNonEmptyString(vd, "sdl-chroma"); if (psz_chroma) { forced_chroma = vlc_fourcc_GetCodecFromString(VIDEO_ES, psz_chroma); if (forced_chroma) msg_Dbg(vd, "Forcing chroma to 0x%.8x (%4.4s)", forced_chroma, (const char*)&forced_chroma); free(psz_chroma); } /* Try to open an overlay if requested */ sys->overlay = NULL; const bool is_overlay = var_CreateGetBool(vd, "overlay"); if (is_overlay) { static const struct { vlc_fourcc_t vlc; uint32_t sdl; } vlc_to_sdl[] = { { VLC_CODEC_YV12, SDL_YV12_OVERLAY }, { VLC_CODEC_I420, SDL_IYUV_OVERLAY }, { VLC_CODEC_YUYV, SDL_YUY2_OVERLAY }, { VLC_CODEC_UYVY, SDL_UYVY_OVERLAY }, { VLC_CODEC_YVYU, SDL_YVYU_OVERLAY }, { 0, 0 } }; const vlc_fourcc_t forced_chromas[] = { forced_chroma, 0 }; const vlc_fourcc_t *fallback_chromas = vlc_fourcc_GetYUVFallback(fmt.i_chroma); const vlc_fourcc_t *chromas = forced_chroma ? forced_chromas : fallback_chromas; for (int pass = forced_chroma ? 1 : 0; pass < 2 && !sys->overlay; pass++) { for (int i = 0; chromas[i] != 0; i++) { const vlc_fourcc_t vlc = chromas[i]; uint32_t sdl = 0; for (int j = 0; vlc_to_sdl[j].vlc != 0 && !sdl; j++) { if (vlc_to_sdl[j].vlc == vlc) sdl = vlc_to_sdl[j].sdl; } if (!sdl) continue; sys->overlay = SDL_CreateYUVOverlay(fmt.i_width, fmt.i_height, sdl, sys->display); if (sys->overlay && !sys->overlay->hw_overlay && pass == 0) { /* Ignore non hardware overlay surface in first pass */ SDL_FreeYUVOverlay(sys->overlay); sys->overlay = NULL; } if (sys->overlay) { /* We keep the surface locked forever */ SDL_LockYUVOverlay(sys->overlay); fmt.i_chroma = vlc; sys->is_uv_swapped = vlc_fourcc_AreUVPlanesSwapped(fmt.i_chroma, vd->fmt.i_chroma); if (sys->is_uv_swapped) fmt.i_chroma = vd->fmt.i_chroma; break; } } } } else { msg_Warn(vd, "SDL overlay disabled by the user"); } /* */ vout_display_cfg_t place_cfg = *vd->cfg; place_cfg.display.width = display_width; place_cfg.display.height = display_height; vout_display_PlacePicture(&sys->place, &vd->source, &place_cfg, !sys->overlay); /* If no overlay, fallback to software output */ if (!sys->overlay) { /* */ switch (sys->display->format->BitsPerPixel) { case 8: fmt.i_chroma = VLC_CODEC_RGB8; break; case 15: fmt.i_chroma = VLC_CODEC_RGB15; break; case 16: fmt.i_chroma = VLC_CODEC_RGB16; break; case 24: fmt.i_chroma = VLC_CODEC_RGB24; break; case 32: fmt.i_chroma = VLC_CODEC_RGB32; break; default: msg_Err(vd, "unknown screen depth %i", sys->display->format->BitsPerPixel); goto error; } /* All we have is an RGB image with square pixels */ fmt.i_width = display_width; fmt.i_height = display_height; fmt.i_rmask = sys->display->format->Rmask; fmt.i_gmask = sys->display->format->Gmask; fmt.i_bmask = sys->display->format->Bmask; info.has_pictures_invalid = true; } if (vd->cfg->display.title) SDL_WM_SetCaption(vd->cfg->display.title, vd->cfg->display.title); else if (!sys->overlay) SDL_WM_SetCaption(VOUT_TITLE " (software RGB SDL output)", VOUT_TITLE " (software RGB SDL output)"); else if (sys->overlay->hw_overlay) SDL_WM_SetCaption(VOUT_TITLE " (hardware YUV SDL output)", VOUT_TITLE " (hardware YUV SDL output)"); else SDL_WM_SetCaption(VOUT_TITLE " (software YUV SDL output)", VOUT_TITLE " (software YUV SDL output)"); /* Setup events */ SDL_EventState(SDL_KEYUP, SDL_IGNORE); /* ignore keys up */ /* Setup vout_display now that everything is fine */ vd->fmt = fmt; vd->info = info; vd->get = Get; vd->prepare = NULL; vd->display = Display; vd->control = Control; vd->manage = Manage; /* */ vout_display_SendEventDisplaySize(vd, display_width, display_height, vd->cfg->is_fullscreen); return VLC_SUCCESS; error: msg_Err(vd, "cannot set up SDL (%s)", SDL_GetError()); if (sys->display) { SDL_UnlockSurface(sys->display); SDL_FreeSurface(sys->display); } vlc_mutex_lock(&sdl_lock); SDL_QuitSubSystem(SDL_INIT_VIDEO); vlc_mutex_unlock(&sdl_lock); free(sys); return VLC_EGENERIC; }
int main(int argc, char *argv[]) { // Initalizing these to NULL prevents segfaults! AVFormatContext *pFormatCtx = NULL; int i, videoStream; AVCodecContext *pCodecCtxOrig = NULL; AVCodecContext *pCodecCtx = NULL; // 코덱 컨트롤러(?) 이걸 자주 쓴다. AVCodec *pCodec = NULL; // 영상을 디코딩할 코덱 AVFrame *pFrame = NULL; // 영상데이터 라고 보면됨. AVPacket packet; int frameFinished; struct SwsContext *sws_ctx = NULL; // Convert the image into YUV format that SDL uses //SDL 관련 변수 SDL_Overlay *bmp; SDL_Surface *screen; SDL_Rect rect; SDL_Event event; CVideoSocket videoSocket; //줌인 줌 아웃을 위한 변수 int rect_w = 0; int rect_h = 0; // We catch any exceptions that might occur below -- see the catch statement for more details. try { // 여기부터 마이오 초기화 // First, we create a Hub with our application identifier. Be sure not to use the com.example namespace when // publishing your application. The Hub provides access to one or more Myos. // 마이오에서 제공하는 어플리케이션과 연결하는 허브 생성 myo::Hub hub("com.example.hello-myo"); // 마이오 찾는중 ... std::cout << "Attempting to find a Myo..." << std::endl; // Next, we attempt to find a Myo to use. If a Myo is already paired in Myo Connect, this will return that Myo // immediately. // waitForMyo() takes a timeout value in milliseconds. In this case we will try to find a Myo for 10 seconds, and // if that fails, the function will return a null pointer. // 마이오를 찾는 동안 대기하는 소스코드 myo::Myo* myo = hub.waitForMyo(10000); // If waitForMyo() returned a null pointer, we failed to find a Myo, so exit with an error message. // 마이오가 존재하지 않을경우 예외처리 if (!myo) { throw std::runtime_error("Unable to find a Myo!"); } // We've found a Myo. std::cout << "Connected to a Myo armband!" << std::endl << std::endl; // Next we construct an instance of our DeviceListener, so that we can register it with the Hub. // 마이오에서 얻은 데이터를 가공해주는 클래스 DataCollector collector; // Hub::addListener() takes the address of any object whose class inherits from DeviceListener, and will cause // Hub::run() to send events to all registered device listeners. // 데이터를 지속적으로 받아온다. hub.addListener(&collector); //---여기까지 마이오 초기화 // SDL 초기화 InitSDL(); // Open video file // 파일 또는 데이터 스트림을 연다. if (avformat_open_input(&pFormatCtx, videoSocket.videoStreamUrl, NULL, NULL) != 0) { return -1; // Couldn't open file } // Retrieve stream information // 데이터 스트림의 정보를 얻어온다. if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { return -1; // Couldn't find stream information } // Dump information about file onto standard error av_dump_format(pFormatCtx, 0, videoSocket.videoStreamUrl, 0); // Find the first video stream // 비디로 스트림을 찾는과정 - 어떤 형식의 데이터 스트림인지 판별 ( 우리는 h.264로 고정되어있지만...) videoStream = -1; for (i = 0; (unsigned)i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if (videoStream == -1) { return -1; // Didn't find a video stream } // Get a pointer to the codec context for the video stream pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id); if (pCodec == NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; // Codec not found } // Copy context // 왜 인지 모르겠지만 그냥 쓰지 않고 복사해서 사용한다. pCodecCtx = avcodec_alloc_context3(pCodec); if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) { fprintf(stderr, "Couldn't copy codec context"); return -1; // Error copying codec context } // Open codec if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) { return -1; // Could not open codec } // Allocate video frame pFrame = av_frame_alloc(); // Make a screen to put our video // 스크린을 생성 #ifndef __DARWIN__ screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); #else screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); #endif if (!screen) { fprintf(stderr, "SDL: could not set video mode - exiting\n"); exit(1); } // Allocate a place to put our YUV image on that screen // 이미지를 스크린에 그림 bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); // initialize SWS context for software scaling sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL ); while (av_read_frame(pFormatCtx, &packet) >= 0) { // 메인 루프 // In each iteration of our main loop, we run the Myo event loop for a set number of milliseconds. // 데이터를 어느정도 주기로 받아올지 정하는 소스 // 이 값이 낮아지면 영상을 받아오는데도 딜레이가 걸리기때문에 원하는 fps를 고려해야한다. hub.run(1000 / 500); // After processing events, we call the print() member function we defined above to print out the values we've // obtained from any events that have occurred. // 마이오 상태 모니터링 코드 collector.print(); // 마이오 루프 여기까지 // Is this a packet from the video stream? if (packet.stream_index == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // Did we get a video frame? // 비디오 프레임을 비트맵 이미지로 변환 if (frameFinished) { SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; // Convert the image into YUV format that SDL uses sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); // 소프트웨어상으로 줌인 줌아웃을 하기위해 영상프레임의 사이즈를 조절 rect.x = -rect_w/2; rect.y = -rect_h/2; rect.w = pCodecCtx->width + rect_w; rect.h = pCodecCtx->height + rect_h; SDL_DisplayYUVOverlay(bmp, &rect); } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_PollEvent(&event); //// 마이오의 동작을 체크해서 메시지 송신 //// 좌우 카메라 컨트롤 if (collector.currentPose == myo::Pose::waveOut) { SendData(videoSocket.ClientSocket, "right", videoSocket.ToServer); rest = true; } if (collector.currentPose == myo::Pose::waveIn) { SendData(videoSocket.ClientSocket, "left", videoSocket.ToServer); rest = true; } // 상하 카메라 컨트롤 if (collector.currentPose == myo::Pose::fingersSpread && collector.pitch_w > 10) { SendData(videoSocket.ClientSocket, "up", videoSocket.ToServer); rest = true; } if (collector.currentPose == myo::Pose::fingersSpread && collector.pitch_w < 6) { SendData(videoSocket.ClientSocket, "down", videoSocket.ToServer); rest = true; } if (collector.currentPose == myo::Pose::rest &&rest == true) { SendData(videoSocket.ClientSocket, "stop", videoSocket.ToServer); rest = false; } if (collector.currentPose == myo::Pose::doubleTap && collector.roll_w <= 5) { collector.currentPose = myo::Pose::rest; rest = true; myo->lock(); } if (collector.currentPose == myo::Pose::doubleTap && collector.roll_w > 5) { rest = true; myo->unlock(myo::Myo::unlockHold); } // 마이오의 동작을 체크해서 줌인 줌 아웃 if (collector.currentPose == myo::Pose::fist && collector.roll_w < 6) { ZoomOut(rect_w, rect_h, 0); } if (collector.currentPose == myo::Pose::fist && collector.roll_w > 8) { ZoomIn(rect_w, rect_h, 300); } // 키 이벤트를 받는 함수 switch (event.type) { case SDL_QUIT: SDL_Quit(); exit(0); break; case SDL_KEYDOWN: /* Check the SDLKey values and move change the coords */ switch (event.key.keysym.sym){ case SDLK_LEFT: // 문자열 송신 SendData(videoSocket.ClientSocket, "left", videoSocket.ToServer); break; case SDLK_RIGHT: // 문자열 송신 SendData(videoSocket.ClientSocket, "right", videoSocket.ToServer); break; case SDLK_UP: SendData(videoSocket.ClientSocket, "up", videoSocket.ToServer); break; case SDLK_DOWN: SendData(videoSocket.ClientSocket, "down", videoSocket.ToServer); break; case SDLK_q: // 줌 인 ZoomIn(rect_w,rect_h,300); break; case SDLK_w: // 줌 아웃 ZoomOut(rect_w, rect_h, 0); break; case SDLK_s: // 모터 stop SendData(videoSocket.ClientSocket, "stop", videoSocket.ToServer); break; case SDLK_x: // 플그램 종료 SDL_Quit(); exit(0); break; default: break; } default: break; } } // Free the YUV frame av_frame_free(&pFrame); // Close the codecs avcodec_close(pCodecCtx); avcodec_close(pCodecCtxOrig); // Close the video file avformat_close_input(&pFormatCtx); // 소켓 닫기 closesocket(videoSocket.ClientSocket); WSACleanup(); return 0; } // 개인적으로 exception handling을 이렇게하는걸 좋아하지 않지만... // 예제에서 이렇게 사용하였기에 일단 이렇게 두었다. catch (const std::exception& e) { std::cerr << "Error: " << e.what() << std::endl; std::cerr << "Press enter to continue."; std::cin.ignore(); return 1; } }
int main(int argc ,char **argv) { av_register_all(); AVFormatContext *pFormatCtx = NULL; AVInputFormat *file_iformat = NULL; //avio_set_interrupt_cb(decode_interrupt_cb); //Open video file printf("open video file:%s\n", argv[1]); if(avformat_open_input(&pFormatCtx, argv[1], file_iformat, NULL) < 0) { printf("canot open input file: %s\n", argv[1]); return -1; //Cannot open file } printf("open input file: %s OK\n", argv[1]); //Retrieve stream information if(av_find_stream_info(pFormatCtx) < 0) return -1;//cannot find stream infomation //Dump information about file no to standard error av_dump_format(pFormatCtx, 0, argv[1], 0); int i; int videoStream; int audioStream; videoStream = -1; audioStream = -1; AVCodecContext *vCodecCtx; AVCodecContext *aCodecCtx; //Find the first video stream for(i = 0; i < pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0) { videoStream = i; } if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audioStream < 0) { audioStream = i; } } if(videoStream == -1) { printf("no video stream\n"); return -1;//Did not find a video stream } if(audioStream == -1) { printf("no audio stream\n"); return -1;//Did not find a audio stream } printf("find video strean: %d\n", videoStream); printf("find audio strean: %d\n", audioStream); //Get a pointer to the codec context for the video stream vCodecCtx = pFormatCtx->streams[videoStream]->codec; AVCodec *vCodec; vCodec = avcodec_find_decoder(vCodecCtx->codec_id); if(vCodec == NULL) { fprintf(stderr, "Unsupported video codec\n"); return -1;//codec not find } //Open video codec if(avcodec_open(vCodecCtx, vCodec) < 0) { fprintf(stderr, "open video codec error\n"); return -1;//Could not open codec } //Get a pointer to the codec context for the audio stream aCodecCtx = pFormatCtx->streams[audioStream]->codec; static SDL_AudioSpec wanted_spec, spec; wanted_spec.freq = aCodecCtx->sample_rate; wanted_spec.format = AUDIO_S16SYS; wanted_spec.channels = aCodecCtx->channels; wanted_spec.silence = 0; wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE; wanted_spec.callback = audio_callback; wanted_spec.userdata = aCodecCtx; if(SDL_OpenAudio(&wanted_spec, &spec) < 0) { fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError()); return -1; } AVCodec *aCodec; aCodec = avcodec_find_decoder(aCodecCtx->codec_id); if(aCodec == NULL) { fprintf(stderr, "Unsupport audio codec\n"); return -1;//codec not found } if(avcodec_open(aCodecCtx, aCodec) < 0) { fprintf(stderr, "open avcodec error\n"); return -1; } packet_queue_init(&audioq); SDL_PauseAudio(0); AVFrame *pFrame; //Allocate video frame pFrame = avcodec_alloc_frame(); AVFrame *pFrameRGB; //Allocate an AVFrame structure pFrameRGB = avcodec_alloc_frame(); if(pFrameRGB == NULL) return -1; uint8_t *buffer; int numBytes; //Detemine required buffer size and allocate buffer numBytes = avpicture_get_size(PIX_FMT_RGB24, vCodecCtx->width, vCodecCtx->height); buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t)); //Assign appropriate parts of buffer to image planes in pFrameRGB //Note that pFrameRGB is an AVFrame, but AVFrame is a superset //of AVPicture avpicture_fill((AVPicture*)pFrameRGB, buffer, PIX_FMT_RGB24, vCodecCtx->width, vCodecCtx->height); if((SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))) { fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); exit(1); } SDL_Surface *screen; screen = SDL_SetVideoMode(vCodecCtx->width, vCodecCtx->height, 0, 0); if(!screen) { fprintf(stderr, "SDL: could not set video mode\n"); exit(1); } SDL_Overlay *bmp; bmp = SDL_CreateYUVOverlay(vCodecCtx->width, vCodecCtx->height, SDL_YV12_OVERLAY, screen); int frameFinished; AVPacket packet; SDL_Rect rect; i = 0; while(av_read_frame(pFormatCtx, &packet) >=0) { //is this a packet from video stream? if(packet.stream_index == videoStream) { //Decoder video frame avcodec_decode_video2(vCodecCtx, pFrame, &frameFinished, &packet); //Did we got a video frame? if(frameFinished) { usleep(40 * 1000); SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; //Convert the image into YUV format that SDL uses static struct SwsContext *img_convert_ctx; img_convert_ctx = sws_getCachedContext(img_convert_ctx, vCodecCtx->width, vCodecCtx->height, vCodecCtx->pix_fmt, vCodecCtx->width, vCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pFrame->height, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); rect.x = 0; rect.y = 0; rect.w = vCodecCtx->width; rect.h = vCodecCtx->height; SDL_DisplayYUVOverlay(bmp, &rect); } //Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_Event event; SDL_PollEvent(&event); switch(event.type) { case SDL_QUIT: quit = 1; SDL_Quit(); exit(0); break; defalut: break; } } else if(packet.stream_index == audioStream) { packet_queue_put(&audioq, &packet); } else { av_free_packet(&packet); } } //Free the RGB image av_free(buffer); av_free(pFrameRGB); //Free the YUV freame av_free(pFrame); //Close the codec avcodec_close(vCodecCtx); //Close the video file avformat_close_input(&pFormatCtx); }
static SDL_Overlay * video_init( void *data, SDL_Surface **pscreen ) { struct ALL_DATA *all_data = (struct ALL_DATA *) data; struct GLOBAL *global = all_data->global; int width = global->width; int height = global->height; if (*pscreen == NULL) //init SDL { char driver[128]; /*----------------------------- Test SDL capabilities ---------------------*/ if (SDL_Init(SDL_INIT_VIDEO|SDL_INIT_TIMER) < 0) { g_printerr("Couldn't initialize SDL: %s\n", SDL_GetError()); exit(1); } /* For this version, we will use hardware acceleration as default*/ if(global->hwaccel) // set global environmental variables if hw accel available { if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") ) putenv("SDL_VIDEO_YUV_HWACCEL=1"); if ( ! getenv("SDL_VIDEO_YUV_DIRECT") ) putenv("SDL_VIDEO_YUV_DIRECT=1"); } else { if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") ) putenv("SDL_VIDEO_YUV_HWACCEL=0"); if ( ! getenv("SDL_VIDEO_YUV_DIRECT") ) putenv("SDL_VIDEO_YUV_DIRECT=0"); } // print the name of the video driver if debugging if (SDL_VideoDriverName(driver, sizeof(driver)) && global->debug) { g_print("Video driver: %s\n", driver); } info = SDL_GetVideoInfo(); // get camera info if (info->wm_available && global->debug) g_print("A window manager is available\n"); if (info->hw_available) { if (global->debug) g_print("Hardware surfaces are available (%dK video memory)\n", info->video_mem); SDL_VIDEO_Flags |= SDL_HWSURFACE; SDL_VIDEO_Flags |= SDL_DOUBLEBUF; } else { SDL_VIDEO_Flags |= SDL_SWSURFACE; } if (info->blit_hw) { if (global->debug) g_print("Copy blits between hardware surfaces are accelerated\n"); SDL_VIDEO_Flags |= SDL_ASYNCBLIT; } if(!global->desktop_w) global->desktop_w = info->current_w; //get desktop width if(!global->desktop_h) global->desktop_h = info->current_h; //get desktop height if (global->debug) { if (info->blit_hw_CC) g_print ("Colorkey blits between hardware surfaces are accelerated\n"); if (info->blit_hw_A) g_print("Alpha blits between hardware surfaces are accelerated\n"); if (info->blit_sw) g_print ("Copy blits from software surfaces to hardware surfaces are accelerated\n"); if (info->blit_sw_CC) g_print ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n"); if (info->blit_sw_A) g_print("Alpha blits from software surfaces to hardware surfaces are accelerated\n"); if (info->blit_fill) g_print("Color fills on hardware surfaces are accelerated\n"); } SDL_WM_SetCaption(global->WVcaption, NULL); /* enable key repeat */ SDL_EnableKeyRepeat(SDL_DEFAULT_REPEAT_DELAY,SDL_DEFAULT_REPEAT_INTERVAL); } /*------------------------------ SDL init video ---------------------*/ if(global->debug) g_print("(Desktop resolution = %ix%i)\n", global->desktop_w, global->desktop_h); g_print("Checking video mode %ix%i@32bpp : ", width, height); int bpp = SDL_VideoModeOK( width, height, 32, SDL_VIDEO_Flags); if(!bpp) { g_print("Not available \n"); /*resize video mode*/ if ((width > global->desktop_w) || (height > global->desktop_h)) { width = global->desktop_w; /*use desktop video resolution*/ height = global->desktop_h; } else { width = 800; height = 600; } g_print("Resizing to %ix%i\n", width, height); } else // success: { g_print("OK \n"); if ((bpp != 32) && global->debug) g_print("recomended color depth = %i\n", bpp); global->bpp = bpp; } *pscreen = SDL_SetVideoMode( width, height, global->bpp, SDL_VIDEO_Flags); if(*pscreen == NULL) { return (NULL); } //use requested resolution for overlay even if not available as video mode SDL_Overlay* overlay=NULL; overlay = SDL_CreateYUVOverlay(global->width, global->height, SDL_YUY2_OVERLAY, *pscreen); SDL_ShowCursor(SDL_DISABLE); return (overlay); }
int main(int argc, char *argv[]) { #ifndef EMBEDED_X210 //PC platform const SDL_VideoInfo *info; char driver[128]; SDL_Surface *pscreen; SDL_Overlay *overlay; SDL_Rect drect; SDL_Event sdlevent; SDL_Thread *mythread; SDL_mutex *affmutex; Uint32 currtime; Uint32 lasttime; #endif int status; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; int i; int grabmethod = 1; int width = 320; int height = 240; int fps = 15; unsigned char frmrate = 0; char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; char * pRGBData=NULL; printf("luvcview version %s \n", version); for (i = 1; i < argc; i++) { /* skip bad arguments */ if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') { continue; } if (strcmp(argv[i], "-d") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -d, aborting.\n"); exit(1); } videodevice = strdup(argv[i + 1]); } if (strcmp(argv[i], "-g") == 0) { /* Ask for read instead default mmap */ grabmethod = 0; } if (strcmp(argv[i], "-w") == 0) { /* disable hw acceleration */ hwaccel = 1; } if (strcmp(argv[i], "-f") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -f, aborting.\n"); exit(1); } mode = strdup(argv[i + 1]); if (strncmp(mode, "yuv", 3) == 0) { format = V4L2_PIX_FMT_YUYV; } else if (strncmp(mode, "jpg", 3) == 0) { format = V4L2_PIX_FMT_MJPEG; } else { format = V4L2_PIX_FMT_MJPEG; } } if (strcmp(argv[i], "-s") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -s, aborting.\n"); exit(1); } sizestring = strdup(argv[i + 1]); width = strtoul(sizestring, &separateur, 10); if (*separateur != 'x') { printf("Error in size use -s widthxheight \n"); exit(1); } else { ++separateur; height = strtoul(separateur, &separateur, 10); if (*separateur != 0) printf("hmm.. dont like that!! trying this height \n"); printf(" size width: %d height: %d \n", width, height); } } if (strcmp(argv[i], "-i") == 0){ if (i + 1 >= argc) { printf("No parameter specified with -i, aborting. \n"); exit(1); } fpsstring = strdup(argv[i + 1]); fps = strtoul(fpsstring, &separateur, 10); printf(" interval: %d fps \n", fps); } if (strcmp(argv[i], "-S") == 0) { /* Enable raw stream capture from the start */ enableRawStreamCapture = 1; } if (strcmp(argv[i], "-c") == 0) { /* Enable raw frame capture for the first frame */ enableRawFrameCapture = 1; } if (strcmp(argv[i], "-C") == 0) { /* Enable raw frame stream capture from the start*/ enableRawFrameCapture = 2; } if (strcmp(argv[i], "-o") == 0) { /* set the avi filename */ if (i + 1 >= argc) { printf("No parameter specified with -o, aborting.\n"); exit(1); } avifilename = strdup(argv[i + 1]); } if (strcmp(argv[i], "-L") == 0) { /* query list of valid video formats */ queryformats = 1; } if (strcmp(argv[i], "-l") == 0) { /* query list of valid video formats */ querycontrols = 1; } if (strcmp(argv[i], "-r") == 0) { /* query list of valid video formats */ readconfigfile = 1; } if (strcmp(argv[i], "-h") == 0) { printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n"); printf("-h print this message \n"); printf("-d /dev/videoX use videoX device\n"); printf("-g use read method for grab instead mmap \n"); printf("-w disable SDL hardware accel. \n"); printf("-f video format default jpg others options are yuv jpg \n"); printf("-i fps use specified frame interval \n"); printf("-s widthxheight use specified input size \n"); printf("-c enable raw frame capturing for the first frame\n"); printf("-C enable raw frame stream capturing from the start\n"); printf("-S enable raw stream capturing from the start\n"); printf("-o avifile create avifile, default video.avi\n"); printf("-L query valid video formats\n"); printf("-l query valid controls and settings\n"); printf("-r read and set control settings from luvcview.cfg\n"); exit(0); } } #ifndef EMBEDED_X210 //PC platform /************* Test SDL capabilities ************/ if (SDL_Init(SDL_INIT_VIDEO) < 0) { fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError()); exit(1); } /* For this version, we'll be save and disable hardware acceleration */ if(hwaccel) { if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") ) { putenv("SDL_VIDEO_YUV_HWACCEL=0"); } } if (SDL_VideoDriverName(driver, sizeof(driver))) { printf("Video driver: %s\n", driver); } info = SDL_GetVideoInfo(); if (info->wm_available) { printf("A window manager is available\n"); } if (info->hw_available) { printf("Hardware surfaces are available (%dK video memory)\n", info->video_mem); SDL_VIDEO_Flags |= SDL_HWSURFACE; } if (info->blit_hw) { printf("Copy blits between hardware surfaces are accelerated\n"); SDL_VIDEO_Flags |= SDL_ASYNCBLIT; } if (info->blit_hw_CC) { printf ("Colorkey blits between hardware surfaces are accelerated\n"); } if (info->blit_hw_A) { printf("Alpha blits between hardware surfaces are accelerated\n"); } if (info->blit_sw) { printf ("Copy blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_CC) { printf ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_A) { printf ("Alpha blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_fill) { printf("Color fills on hardware surfaces are accelerated\n"); } if (!(SDL_VIDEO_Flags & SDL_HWSURFACE)) SDL_VIDEO_Flags |= SDL_SWSURFACE; #endif if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if ( queryformats ) { /* if we're supposed to list the video formats, do that now and go out */ check_videoIn(videoIn,(char *) videodevice); free(videoIn); #ifndef EMBEDED_X210 SDL_Quit(); #endif exit(1); } if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); /* if we're supposed to list the controls, do that now */ if ( querycontrols ) enum_controls(videoIn->fd); /* if we're supposed to read the control settings from a configfile, do that now */ if ( readconfigfile ) load_controls(videoIn->fd); #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT init_framebuffer(); #else x6410_init_Draw(videoIn->width,videoIn->height); #endif #else pscreen = SDL_SetVideoMode(videoIn->width, videoIn->height+30 , 0,SDL_VIDEO_Flags); overlay =SDL_CreateYUVOverlay(videoIn->width, videoIn->height+30 , SDL_YUY2_OVERLAY, pscreen); p = (unsigned char *) overlay->pixels[0]; drect.x = 0; drect.y = 0; drect.w =pscreen->w; drect.h = pscreen->h; #endif if (enableRawStreamCapture) { videoIn->captureFile = fopen("stream.raw", "wb"); if(videoIn->captureFile == NULL) { perror("Unable to open file for raw stream capturing"); } else { printf("Starting raw stream capturing to stream.raw ...\n"); } } if (enableRawFrameCapture) videoIn->rawFrameCapture = enableRawFrameCapture; initLut(); #ifndef EMBEDED_X210 SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL); lasttime = SDL_GetTicks(); creatButt(videoIn->width, 32); SDL_LockYUVOverlay(overlay); memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt, videoIn->width * 64); SDL_UnlockYUVOverlay(overlay); /* initialize thread data */ ptdata.ptscreen = &pscreen; ptdata.ptvideoIn = videoIn; ptdata.ptsdlevent = &sdlevent; ptdata.drect = &drect; affmutex = SDL_CreateMutex(); ptdata.affmutex = affmutex; mythread = SDL_CreateThread(eventThread, (void *) &ptdata); #endif pRGBData = (unsigned char *)malloc(videoIn->width*videoIn->width*4*sizeof(char)); if(pRGBData==NULL) { return ; } /* main big loop */ while (videoIn->signalquit) { #ifndef EMBEDED_X210 currtime = SDL_GetTicks(); if (currtime - lasttime > 0) { frmrate = 1000/(currtime - lasttime); } lasttime = currtime; #endif if (uvcGrab(videoIn) < 0) { printf("Error grabbing \n"); break; } /* if we're grabbing video, show the frame rate */ if (videoIn->toggleAvi) printf("\rframe rate: %d ",frmrate); #ifndef EMBEDED_X210 SDL_LockYUVOverlay(overlay); memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); SDL_UnlockYUVOverlay(overlay); SDL_DisplayYUVOverlay(overlay, &drect); #endif #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT // yuv to rgb565 ,and to frambuffer process_image(videoIn->framebuffer,fbp,videoIn->width,videoIn->height,vinfo,finfo); // convertYUYVtoRGB565(videoIn->framebuffer,pRGBData,videoIn->width,videoIn->height); // Pyuv422torgb24(videoIn->framebuffer, pRGBData, videoIn->width, videoIn->height); // memcpy(fbp,pRGBData,videoIn->width*videoIn->height*2); #else //X6410 post processor convert yuv to rgb,X210 not suport now. /* memcpy(pInbuffer, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); ioctl(dev_fb0, GET_FB_INFO, &fb_info); pp_param.SrcFrmSt = ioctl(dev_pp, S3C_PP_GET_RESERVED_MEM_ADDR_PHY); //must be physical adress pp_param.DstFrmSt = fb_info.map_dma_f1; //must be physical adress ioctl(dev_pp, S3C_PP_SET_PARAMS, &pp_param); ioctl(dev_pp, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param); ioctl(dev_pp, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param); ioctl(dev_pp, S3C_PP_START); */ #endif #endif if (videoIn->getPict) { switch(videoIn->formatIn){ case V4L2_PIX_FMT_MJPEG: get_picture(videoIn->tmpbuffer,videoIn->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: get_pictureYV2(videoIn->framebuffer,videoIn->width,videoIn->height); break; default: break; } videoIn->getPict = 0; printf("get picture !\n"); } #ifndef EMBEDED_X210 SDL_LockMutex(affmutex); ptdata.frmrate = frmrate; SDL_WM_SetCaption(videoIn->status, NULL); SDL_UnlockMutex(affmutex); #endif #ifdef EMBEDED_X210 usleep(10); #else SDL_Delay(10); #endif } #ifndef EMBEDED_X210 SDL_WaitThread(mythread, &status); SDL_DestroyMutex(affmutex); #endif /* if avifile is defined, we made a video: compute the exact fps and set it in the video */ if (videoIn->avifile != NULL) { float fps=(videoIn->framecount/(videoIn->recordtime/1000)); fprintf(stderr,"setting fps to %f\n",fps); AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height, fps, "MJPG"); AVI_close(videoIn->avifile); } close_v4l2(videoIn); #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT close_frambuffer(); #else x6410_DeInit_Draw(); #endif #endif free(pRGBData); free(videoIn); destroyButt(); freeLut(); printf(" Clean Up done Quit \n"); #ifndef EMBEDED_X210 SDL_Quit(); #endif }
int main(int argc,char *argv[]) { av_register_all(); // Register all available file formats and codecs with the library if(SDL_Init(SDL_INIT_VIDEO|SDL_INIT_AUDIO|SDL_INIT_TIMER)) { fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); exit(1); } AVFormatContext *pFormatCtx = NULL; // Open video file if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0) { fprintf(stderr, "Couldn't open the file!\n"); return -1; } // Retrieve stream information if(avformat_find_stream_info(pFormatCtx, NULL) < 0) { fprintf(stderr, "Couldn't find stream information!\n"); return -1; // Couldn't find stream information } // Dump information about file onto standard error av_dump_format(pFormatCtx, 0, argv[1], 0); int i; AVCodecContext *pCodecCtxOrig = NULL; AVCodecContext *pCodecCtx = NULL; // Find the first video stream int videoStream = -1; for(i=0; i < pFormatCtx->nb_streams; i++) if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } if(videoStream == -1) { fprintf(stderr, "Didn't find a video stream!\n"); return -1; // Didn't find a video stream } // Get a pointer to the codec context for the video stream pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec; AVCodec *pCodec = NULL; // Find the decoder for the video stream pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id); if(pCodec == NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; } // Copy context pCodecCtx = avcodec_alloc_context3(pCodec); if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) { fprintf(stderr, "Couldn't copy codec context"); return -1; // Error copying codec context } // Open codec if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { fprintf(stderr, "Couldn't open codec!\n"); return -1; // Could not open codec } AVFrame *pFrame = NULL; AVFrame *pFrameRGB = NULL; // Allocate video frame pFrame = av_frame_alloc(); if(pFrame == NULL) { fprintf(stderr, "Couldn't allocate pFrame!\n"); return -1; } // Allocate an AVFrame structure pFrameRGB = av_frame_alloc(); if(pFrameRGB == NULL) { fprintf(stderr, "Couldn't allocate pFrameRGB!\n"); return -1; } uint8_t *buffer = NULL; int numBytes; // Determine required buffer size and allocate buffer numBytes = avpicture_get_size(PIX_FMT_RGB24,pCodecCtx->width, pCodecCtx->height); buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); // Assign appropriate parts of buffer to image planes in pFrameRGB // Note that pFrameRGB is an AVFrame, but AVFrame is a superset // of AVPicture avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); int frameFinished; AVPacket packet; SDL_Surface *screen; #ifndef __DARWIN__ screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); #else screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); #endif if(!screen) { fprintf(stderr, "SDL: could not set video mode - exiting\n"); exit(1); } SDL_Overlay *bmp = NULL; struct SwsContext *sws_ctx = NULL; bmp = SDL_CreateYUVOverlay(pCodecCtx->width,pCodecCtx->height, SDL_YV12_OVERLAY, screen); // Initialize SWS context for softscaling sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL ); i = 0; while(av_read_frame(pFormatCtx, &packet) >= 0) { // Is this a packet from the video stream? if(packet.stream_index == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); SDL_Rect rect; // Did we get a video frame? if(frameFinished) { SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; // Convert the image into YUV format that SDL uses sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); rect.x = 0; rect.y = 0; rect.w = pCodecCtx->width; rect.h = pCodecCtx->height; SDL_DisplayYUVOverlay(bmp, &rect); } } SDL_Event event; // Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_PollEvent(&event); switch(event.type) { case SDL_QUIT: SDL_Quit(); exit(0); break; default: break; } } // Free the RGB image av_free(buffer); av_free(pFrameRGB); // Free the YUV frame av_free(pFrame); // Close the codecs avcodec_close(pCodecCtx); avcodec_close(pCodecCtxOrig); // Close the video file avformat_close_input(&pFormatCtx); return 0; }
int main(int argc, char* argv[]) { AVFormatContext *pFormatCtx; int i, videoindex; AVCodecContext *pCodecCtx; AVCodec *pCodec; AVDictionary *optionsDict = NULL; char filepath[] = "file.mp4"; av_register_all();//注册组件 avformat_network_init();//支持网络流 pFormatCtx = avformat_alloc_context();//初始化AVFormatContext if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){//打开文件 printf("无法打开文件\n"); return -1; } if(av_find_stream_info(pFormatCtx)<0)//查找流信息 { printf("Couldn't find stream information.\n"); return -1; } videoindex=-1; for(i=0; i<pFormatCtx->nb_streams; i++) //获取视频流ID if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { videoindex=i; break; } if(videoindex==-1) { printf("Didn't find a video stream.\n"); return -1; } pCodecCtx=pFormatCtx->streams[videoindex]->codec; pCodec=avcodec_find_decoder(pCodecCtx->codec_id);//查找解码器 if(pCodec==NULL) { printf("Codec not found.\n"); return -1; } if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)//打开解码器 { printf("Could not open codec.\n"); return -1; } AVFrame *pFrame,*pFrameYUV; pFrame=avcodec_alloc_frame();//存储解码后AVFrame pFrameYUV=avcodec_alloc_frame();//存储转换后AVFrame(为什么要转换?后文解释) uint8_t *out_buffer; out_buffer=(uint8_t*) malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));//分配AVFrame所需内存 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);//填充AVFrame //------------SDL初始化-------- if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { printf( "Could not initialize SDL - %s\n", SDL_GetError()); return -1; } SDL_Surface *screen; screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); if(!screen) { printf("SDL: could not set video mode - exiting\n"); return -1; } SDL_Overlay *bmp; bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height,SDL_YV12_OVERLAY, screen); SDL_Rect rect; //----------------------------- int ret, got_picture; static struct SwsContext *img_convert_ctx; int y_size = pCodecCtx->width * pCodecCtx->height; AVPacket *packet=(AVPacket *)malloc(sizeof(AVPacket));//存储解码前数据包AVPacket av_new_packet(packet, y_size); //输出一下信息----------------------------- printf("文件信息-----------------------------------------\n"); av_dump_format(pFormatCtx,0,filepath,0); printf("-------------------------------------------------\n"); //------------------------------ while(av_read_frame(pFormatCtx, packet)>=0)//循环获取压缩数据包AVPacket { if(packet->stream_index==videoindex) { ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码。输入为AVPacket,输出为AVFrame if(ret < 0) { printf("解码错误\n"); return -1; } if(got_picture) { //像素格式转换。pFrame转换为pFrameYUV。 img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); sws_freeContext(img_convert_ctx); //------------SDL显示-------- SDL_LockYUVOverlay(bmp); bmp->pixels[0]=pFrameYUV->data[0]; bmp->pixels[2]=pFrameYUV->data[1]; bmp->pixels[1]=pFrameYUV->data[2]; bmp->pitches[0]=pFrameYUV->linesize[0]; bmp->pitches[2]=pFrameYUV->linesize[1]; bmp->pitches[1]=pFrameYUV->linesize[2]; SDL_UnlockYUVOverlay(bmp); rect.x = 0; rect.y = 0; rect.w = pCodecCtx->width; rect.h = pCodecCtx->height; SDL_DisplayYUVOverlay(bmp, &rect); //延时40ms SDL_Delay(40); //------------SDL----------- } } av_free_packet(packet); } free(out_buffer); av_free(pFrameYUV); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); return 0; }