Esempio n. 1
0
/**
**  Draw Ogg data to the overlay
*/
static int OutputTheora(OggData *data, SDL_Overlay *yuv_overlay, SDL_Rect *rect)
{
    int i;
    yuv_buffer yuv;
    int crop_offset;

    theora_decode_YUVout(&data->tstate, &yuv);

    if (SDL_MUSTLOCK(TheScreen)) {
        if (SDL_LockSurface(TheScreen) < 0) {
            return - 1;
        }
    }

    if (SDL_LockYUVOverlay(yuv_overlay) < 0) {
        return -1;
    }

    crop_offset = data->tinfo.offset_x + yuv.y_stride * data->tinfo.offset_y;
    for (i = 0; i < yuv_overlay->h; ++i) {
        memcpy(yuv_overlay->pixels[0] + yuv_overlay->pitches[0] * i,
               yuv.y + crop_offset + yuv.y_stride * i, yuv_overlay->w);
    }

    crop_offset = (data->tinfo.offset_x / 2) + (yuv.uv_stride) *
                  (data->tinfo.offset_y / 2);
    for (i = 0; i < yuv_overlay->h / 2; ++i) {
        memcpy(yuv_overlay->pixels[1] + yuv_overlay->pitches[1] * i,
               yuv.v + yuv.uv_stride * i, yuv_overlay->w / 2);
        memcpy(yuv_overlay->pixels[2] + yuv_overlay->pitches[2] * i,
               yuv.u + crop_offset + yuv.uv_stride * i, yuv_overlay->w / 2);
    }

    if (SDL_MUSTLOCK(TheScreen)) {
        SDL_UnlockSurface(TheScreen);
    }
    SDL_UnlockYUVOverlay(yuv_overlay);

    SDL_DisplayYUVOverlay(yuv_overlay, rect);

    return 0;
}
Esempio n. 2
0
/*
 * Display the image - copy the yuv buffers to the SDL overlay
 * height and width are set via set_image_size call
 */
void CSDLVideo::display_image (const uint8_t *y, const uint8_t *u, 
			       const uint8_t *v,
			       uint32_t yStride, uint32_t uvStride)
{
  unsigned int ix;
  uint8_t *to;
  const uint8_t *from;
  if (yStride == 0) yStride = m_image_w;
  if (uvStride == 0)  uvStride = m_image_w / 2;

  SDL_LockMutex(m_mutex);
  if (SDL_LockYUVOverlay(m_image)) {
    video_message(LOG_ERR, "Failed to lock image");
    SDL_UnlockMutex(m_mutex);
    return;
  } 

  // Must always copy the buffer to memory.  This creates 2 copies of this
  // data (probably a total of 6 - libsock -> rtp -> decoder -> our ring ->
  // sdl -> hardware)
#ifdef OLD_SURFACE
  if (m_fullscreen == 0 && m_video_scale == 4) {
    // when scaling to 200%, don't use SDL stretch blit
    // use a smoothing (averaging) blit instead
    // we only do this for maybe windows - otherwise, let SDL do it.
#if 0
    // sorry - too many problems with this...
    FrameDoublerMmx(y, m_image->pixels[0], 
		    m_image_w, m_image_h);
    FrameDoublerMmx(v, m_image->pixels[1], 
		    m_image_w >> 1, m_image_h >> 1);
    FrameDoublerMmx(u, m_image->pixels[2], 
		    m_image_w >> 1, m_image_h >> 1);
#else
    FrameDoubler(y, m_image->pixels[0], 
		 m_image_w, m_image_h, m_image->pitches[0]);
    FrameDoubler(v, m_image->pixels[1], 
		 m_image_w >> 1, m_image_h >> 1, m_image->pitches[1]);
    FrameDoubler(u, m_image->pixels[2], 
		 m_image_w >> 1, m_image_h >> 1, m_image->pitches[2]);
#endif
  } else 
static void video_write(void){
  int i;
  yuv_buffer yuv;
  int crop_offset;
  theora_decode_YUVout(&td,&yuv);

  /* Lock SDL_yuv_overlay */
  if ( SDL_MUSTLOCK(screen) ) {
    if ( SDL_LockSurface(screen) < 0 ) return;
  }
  if (SDL_LockYUVOverlay(yuv_overlay) < 0) return;

  /* let's draw the data (*yuv[3]) on a SDL screen (*screen) */
  /* deal with border stride */
  /* reverse u and v for SDL */
  /* and crop input properly, respecting the encoded frame rect */
  crop_offset=ti.offset_x+yuv.y_stride*ti.offset_y;
  for(i=0;i<yuv_overlay->h;i++)
    memcpy(yuv_overlay->pixels[0]+yuv_overlay->pitches[0]*i,
           yuv.y+crop_offset+yuv.y_stride*i,
           yuv_overlay->w);
  crop_offset=(ti.offset_x/2)+(yuv.uv_stride)*(ti.offset_y/2);
  for(i=0;i<yuv_overlay->h/2;i++){
    memcpy(yuv_overlay->pixels[1]+yuv_overlay->pitches[1]*i,
           yuv.v+crop_offset+yuv.uv_stride*i,
           yuv_overlay->w/2);
    memcpy(yuv_overlay->pixels[2]+yuv_overlay->pitches[2]*i,
           yuv.u+crop_offset+yuv.uv_stride*i,
           yuv_overlay->w/2);
  }

  /* Unlock SDL_yuv_overlay */
  if ( SDL_MUSTLOCK(screen) ) {
    SDL_UnlockSurface(screen);
  }
  SDL_UnlockYUVOverlay(yuv_overlay);


  /* Show, baby, show! */
  SDL_DisplayYUVOverlay(yuv_overlay, &rect);

}
Esempio n. 4
0
/**
 * Display a picture
 */
static void Display(vout_display_t *vd, picture_t *p_pic)
{
    vout_display_sys_t *sys = vd->sys;

    if (sys->overlay) {
        SDL_Rect disp;
        disp.x = sys->place.x;
        disp.y = sys->place.y;
        disp.w = sys->place.width;
        disp.h = sys->place.height;

        SDL_UnlockYUVOverlay(sys->overlay);
        SDL_DisplayYUVOverlay(sys->overlay , &disp);
        SDL_LockYUVOverlay(sys->overlay);
    } else {
        SDL_Flip(sys->display);
    }

    picture_Release(p_pic);
}
Esempio n. 5
0
static void yuv1_render_graph(struct graphics_data *graphics)
{
  struct yuv_render_data *render_data = graphics->render_data;
  Uint32 mode = graphics->screen_mode;

  SDL_LockYUVOverlay(render_data->overlay);

  if(!mode)
  {
    render_graph32((Uint32 *)render_data->overlay->pixels[0],
     render_data->overlay->pitches[0], graphics, set_colors32[mode]);
  }
  else
  {
    render_graph32s((Uint32 *)render_data->overlay->pixels[0],
     render_data->overlay->pitches[0], graphics, set_colors32[mode]);
  }

  SDL_UnlockYUVOverlay(render_data->overlay);
}
Esempio n. 6
0
void SDL_Display(int edge, int frame_width, int frame_height, unsigned char *Y, unsigned char *U, unsigned char *V){

#ifndef SDL_NO_DISPLAY	

	// Lock SDL_yuv_overlay 
	if ( SDL_MUSTLOCK(screen) ) {
		if ( SDL_LockSurface(screen) < 0 ) return;
	}
	if (SDL_LockYUVOverlay(yuv_overlay) < 0) return;

	if (frame_width != screen -> w || frame_height != screen -> h){
		screen -> clip_rect . w = screen -> w = frame_width;
		screen -> clip_rect . h = screen -> h = frame_height;
		screen = SDL_SetVideoMode(frame_width, frame_height, 24, SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL);
		yuv_overlay -> w = rect . w = frame_width + 2 * edge;
		yuv_overlay -> h = rect . h = frame_height;
        yuv_overlay = SDL_CreateYUVOverlay(frame_width + 2 * edge, frame_height, SDL_YV12_OVERLAY, screen);
        
	}
		
	if ( screen == NULL ) {
		printf("SDL: Couldn't set %dx%d: %s", frame_width, frame_height, SDL_GetError());
		exit(1);
	}

	// let's draw the data (*yuv[3]) on a SDL screen (*screen) 
	memcpy(yuv_overlay->pixels[0], Y, (frame_width + 2 * edge) * frame_height);
	memcpy(yuv_overlay->pixels[1], V, (frame_width + 2 * edge) * frame_height / 4);
	memcpy(yuv_overlay->pixels[2], U, (frame_width + 2 * edge) * frame_height / 4);

	// Unlock SDL_yuv_overlay 
	if ( SDL_MUSTLOCK(screen) ) {
		SDL_UnlockSurface(screen);
	}
	SDL_UnlockYUVOverlay(yuv_overlay);

	// Show, baby, show!
	SDL_DisplayYUVOverlay(yuv_overlay, &rect);

#endif
}
Esempio n. 7
0
/**
 * Close a SDL video output
 */
static void Close(vlc_object_t *object)
{
    vout_display_t *vd = (vout_display_t *)object;
    vout_display_sys_t *sys = vd->sys;

    if (sys->pool)
        picture_pool_Delete(sys->pool);

    if (sys->overlay) {
        SDL_LockYUVOverlay(sys->overlay);
        SDL_FreeYUVOverlay(sys->overlay);
    }
    SDL_UnlockSurface (sys->display);
    SDL_FreeSurface(sys->display);

    vlc_mutex_lock(&sdl_lock);
    SDL_QuitSubSystem(SDL_INIT_VIDEO);
    vlc_mutex_unlock(&sdl_lock);

    free(sys);
}
Esempio n. 8
0
static void fill_overlay_at_pos(SDL_Overlay *lay, mblk_t *m, int x, int y, int w, int h){
	unsigned char *data=m->b_rptr;
	int i,j;
	int jlim,ilim;
	int off;
	unsigned char *dptr;
	
	ilim=MIN(x+w,lay->w);
	jlim=MIN(y+h,lay->h);
	SDL_LockYUVOverlay(lay);
	/* set Y */
	dptr=lay->pixels[0];
	for (j=y;j<jlim;j++){
		off=j*lay->w;
		for (i=x;i<ilim;i++){
			dptr[off + i]=*data;
			data++;
		}
	}
	/*set U and V*/
	ilim=ilim/2;
	jlim=jlim/2;
	dptr=lay->pixels[2];
	for (j=y/2;j<jlim;j++){
		off=j*(lay->w/2);
		for (i=x/2;i<ilim;i++){
			dptr[off + i]=*data;
			data++;
		}
	}
	dptr=lay->pixels[1];
	for (j=y/2;j<jlim;j++){
		off=j*(lay->w/2);
		for (i=x/2;i<ilim;i++){
			dptr[off + i]=*data;
			data++;
		}
	}
	SDL_UnlockYUVOverlay(lay);
}
Esempio n. 9
0
bool sdl_render::render_one_frame(AVFrame* data, int pix_fmt)
{
	SDL_Rect rect;

	uint8_t** px = m_yuv->pixels;

	SDL_LockYUVOverlay(m_yuv);

	m_yuv->pixels = data->data;
	//memcpy(m_yuv->pixels[0],data->data[0],data->linesize[0]*m_yuv->h);
// // 	memcpy(m_yuv->pixels[1],data->data[1],data->linesize[1]*m_yuv->h);
	//memcpy(m_yuv->pixels[2],data->data[2],data->linesize[2]*m_yuv->h);
	
	SDL_UnlockYUVOverlay(m_yuv);
	rect.x = 0;
	rect.y = 0;
	rect.w =  m_yuv->w;
	rect.h = m_yuv->h;
	SDL_DisplayYUVOverlay(m_yuv, &rect);
	m_yuv->pixels =	px;
	return true;
}
Esempio n. 10
0
void ConvertRGBtoYV12(SDL_Surface *s, SDL_Overlay *o, int monochrome, int luminance)
{
    int x,y;
    int yuv[3];
    Uint8 *p,*op[3];

    SDL_LockSurface(s);
    SDL_LockYUVOverlay(o);

    /* Black initialization */
    /*
    memset(o->pixels[0],0,o->pitches[0]*o->h);
    memset(o->pixels[1],128,o->pitches[1]*((o->h+1)/2));
    memset(o->pixels[2],128,o->pitches[2]*((o->h+1)/2));
    */

    /* Convert */
    for(y=0; y<s->h && y<o->h; y++)
    {
        p=((Uint8 *) s->pixels)+s->pitch*y;
        op[0]=o->pixels[0]+o->pitches[0]*y;
        op[1]=o->pixels[1]+o->pitches[1]*(y/2);
        op[2]=o->pixels[2]+o->pitches[2]*(y/2);
        for(x=0; x<s->w && x<o->w; x++)
        {
            RGBtoYUV(p, yuv, monochrome, luminance);
            *(op[0]++)=yuv[0];
            if(x%2==0 && y%2==0)
            {
                *(op[1]++)=yuv[2];
                *(op[2]++)=yuv[1];
            }
            p+=s->format->BytesPerPixel;
        }
    }

    SDL_UnlockYUVOverlay(o);
    SDL_UnlockSurface(s);
}
Esempio n. 11
0
int AVIWrapper::drawFrame(CImage* image)
{
    if (image == NULL) return -1;

    unsigned int i, j;
    uint32_t comp = image->GetFmt()->biCompression;

    unsigned char* buf = image->Data();
    SDL_LockYUVOverlay(screen_overlay);
    Uint8* dst_y = screen_overlay->pixels[0];
    Uint8* dst_u = screen_overlay->pixels[1];
    Uint8* dst_v = screen_overlay->pixels[2];

    if (comp == 0) { // BGR
        image->ToYUV();
        for (i = 0; i < width * height; i++)
            *dst_y++ = buf[i* 3];

        for (i = 0; i < height / 2; i++) {
            for (j = 0; j < width / 2; j++) {
                *dst_v++ = buf[(i * width * 2 + j * 2) * 3 + 1];
                *dst_u++ = buf[(i * width * 2 + j * 2) * 3 + 2];
            }
        }
    }
    else if (comp == IMG_FMT_YUY2) {
        memcpy(dst_y, buf, width * height * 2);
    }
    else if (comp == IMG_FMT_YV12) {
        memcpy(dst_y, buf, width * height + width * height / 2);
    }

    SDL_UnlockYUVOverlay(screen_overlay);
    SDL_DisplayYUVOverlay(screen_overlay, &screen_rect);

    return 0;
}
Esempio n. 12
0
static int queue_picture(FFMovie *movie, AVFrame *src_frame)
{
/*DECODE LOOP*/
    AVPicture pict;

    SDL_LockMutex(movie->dest_mutex);

    /* if the frame movie not skipped, then display it */

    if (movie->dest_overlay) {
        /* get a pointer on the bitmap */
        SDL_LockYUVOverlay(movie->dest_overlay);

        pict.data[0] = movie->dest_overlay->pixels[0];
        pict.data[1] = movie->dest_overlay->pixels[2];
        pict.data[2] = movie->dest_overlay->pixels[1];
        pict.linesize[0] = movie->dest_overlay->pitches[0];
        pict.linesize[1] = movie->dest_overlay->pitches[2];
        pict.linesize[2] = movie->dest_overlay->pitches[1];

/*
  first fields of AVFrame match AVPicture, so it appears safe to
  cast here (at least of ffmpeg-0.4.8, this is how ffplay does it)
  AVPicture is just a container for 4 pixel pointers and 4 strides
*/
        img_convert(&pict, PIX_FMT_YUV420P,
                    (AVPicture *)src_frame, movie->video_st->codec.pix_fmt,
                    movie->video_st->codec.width, movie->video_st->codec.height);

        SDL_UnlockYUVOverlay(movie->dest_overlay);

        video_refresh_timer(movie);
    }
    SDL_UnlockMutex(movie->dest_mutex);

    return 0;
}
Esempio n. 13
0
static gboolean
gst_sdlvideosink_lock (GstSDLVideoSink * sdlvideosink)
{
  /* assure that we've got a screen */
  if (!sdlvideosink->screen || !sdlvideosink->overlay)
    goto no_setup;

  /* Lock SDL/yuv-overlay */
  if (SDL_MUSTLOCK (sdlvideosink->screen)) {
    if (SDL_LockSurface (sdlvideosink->screen) < 0)
      goto could_not_lock;
  }
  if (SDL_LockYUVOverlay (sdlvideosink->overlay) < 0)
    goto lock_yuv;

  return TRUE;

  /* ERRORS */
no_setup:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("Tried to lock screen without being set-up"));
    return FALSE;
  }
could_not_lock:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("SDL: couldn't lock the SDL video window: %s", SDL_GetError ()));
    return FALSE;
  }
lock_yuv:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("SDL: couldn\'t lock the SDL YUV overlay: %s", SDL_GetError ()));
    return FALSE;
  }
}
    void VideoFrame::showFrame(const vpx_image_t* image) {
        std::lock_guard<std::mutex> lock(sdl_lock);
        SDL_LockYUVOverlay(overlay);

        unsigned char *frame_pointer = (unsigned char *) overlay->pixels[0];

        unsigned char *inputY = image->planes[VPX_PLANE_Y];
        int iY = 0;
        while (iY++ < height) {
            memcpy(frame_pointer, inputY, width);

            inputY += image->stride[VPX_PLANE_Y];
            frame_pointer += width;
        }

        unsigned char *inputU = image->planes[VPX_PLANE_U];
        int iU = 0;
        while (iU++ < height / 2) {
            memcpy(frame_pointer, inputU, width / 2);

            inputU += image->stride[VPX_PLANE_U];
            frame_pointer += width / 2;
        }

        unsigned char *inputV = image->planes[VPX_PLANE_V];
        int iV = 0;
        while (iV++ < height / 2) {
            memcpy(frame_pointer, inputV, width / 2);

            inputV += image->stride[VPX_PLANE_V];
            frame_pointer += width / 2;
        }

        SDL_UnlockYUVOverlay(overlay);
        SDL_DisplayYUVOverlay(overlay, &screen_rect);
    }
Esempio n. 15
0
// the goal of the following function is to redraw the SDL display twice a second so that the image follows the screen
// during window movement or if another window comes momentarily on top of the display while no images are coming. 
void
ConditionalTimeoutRedraw(chain_t* service)
{
  displaythread_info_t *info=NULL;
  float interval;
  info=(displaythread_info_t*)service->data;

  if (service->current_buffer->frame.size[0]!=-1) {
    info->redraw_current_time=times(&info->redraw_tms_buf);
    interval=fabs((float)(info->redraw_current_time-info->redraw_prev_time)/sysconf(_SC_CLK_TCK));
    if (interval>(1.0/service->camera->prefs.display_redraw_rate)) { // redraw e.g. 4 times per second
#ifdef HAVE_SDLLIB
      if (SDL_LockYUVOverlay(info->sdloverlay) == 0) {
	//MainWarning("Conditional display redraw");
	convert_to_yuv_for_SDL(&service->current_buffer->frame, info->sdloverlay, preferences.overlay_byte_order);
	SDLDisplayArea(service);
	SDL_UnlockYUVOverlay(info->sdloverlay);
	SDL_DisplayYUVOverlay(info->sdloverlay, &info->sdlvideorect);
      }
#endif
      info->redraw_prev_time=times(&info->redraw_tms_buf);
    }
  }
}
Esempio n. 16
0
int main(int argc, char *argv[])
{
#ifndef EMBEDED_X210  //PC platform
    const SDL_VideoInfo *info;
    char driver[128];
    SDL_Surface *pscreen;
    SDL_Overlay *overlay;
    SDL_Rect drect;
    SDL_Event sdlevent;
    SDL_Thread *mythread;
    SDL_mutex *affmutex;
    Uint32 currtime;
    Uint32 lasttime;
#endif
    int status;

    unsigned char *p = NULL;
    int hwaccel = 0;
    const char *videodevice = NULL;
    const char *mode = NULL;
    int format = V4L2_PIX_FMT_MJPEG;
    int i;
    int grabmethod = 1;
    int width = 320;
    int height = 240;
    int fps = 15;
    unsigned char frmrate = 0;
    char *avifilename = NULL;
    int queryformats = 0;
    int querycontrols = 0;
    int readconfigfile = 0;
    char *separateur;
    char *sizestring = NULL;
    char *fpsstring  = NULL;
    int enableRawStreamCapture = 0;
    int enableRawFrameCapture = 0;
    char * pRGBData=NULL;



    printf("luvcview version %s \n", version);
    for (i = 1; i < argc; i++)
    {
        /* skip bad arguments */
        if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') {
            continue;
        }
        if (strcmp(argv[i], "-d") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -d, aborting.\n");
                exit(1);
            }
            videodevice = strdup(argv[i + 1]);
        }
        if (strcmp(argv[i], "-g") == 0) {
            /* Ask for read instead default  mmap */
            grabmethod = 0;
        }
        if (strcmp(argv[i], "-w") == 0) {
            /* disable hw acceleration */
            hwaccel = 1;
        }
        if (strcmp(argv[i], "-f") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -f, aborting.\n");
                exit(1);
            }
            mode = strdup(argv[i + 1]);

            if (strncmp(mode, "yuv", 3) == 0) {
                format = V4L2_PIX_FMT_YUYV;

            } else if (strncmp(mode, "jpg", 3) == 0) {
                format = V4L2_PIX_FMT_MJPEG;

            } else {
                format = V4L2_PIX_FMT_MJPEG;

            }
        }
        if (strcmp(argv[i], "-s") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -s, aborting.\n");
                exit(1);
            }

            sizestring = strdup(argv[i + 1]);

            width = strtoul(sizestring, &separateur, 10);
            if (*separateur != 'x') {
                printf("Error in size use -s widthxheight \n");
                exit(1);
            } else {
                ++separateur;
                height = strtoul(separateur, &separateur, 10);
                if (*separateur != 0)
                    printf("hmm.. dont like that!! trying this height \n");
                printf(" size width: %d height: %d \n", width, height);
            }
        }
        if (strcmp(argv[i], "-i") == 0){
            if (i + 1 >= argc) {
                printf("No parameter specified with -i, aborting. \n");
                exit(1);
            }
            fpsstring = strdup(argv[i + 1]);
            fps = strtoul(fpsstring, &separateur, 10);
            printf(" interval: %d fps \n", fps);
        }
        if (strcmp(argv[i], "-S") == 0) {
            /* Enable raw stream capture from the start */
            enableRawStreamCapture = 1;
        }
        if (strcmp(argv[i], "-c") == 0) {
            /* Enable raw frame capture for the first frame */
            enableRawFrameCapture = 1;
        }
        if (strcmp(argv[i], "-C") == 0) {
            /* Enable raw frame stream capture from the start*/
            enableRawFrameCapture = 2;
        }
        if (strcmp(argv[i], "-o") == 0) {
            /* set the avi filename */
            if (i + 1 >= argc) {
                printf("No parameter specified with -o, aborting.\n");
                exit(1);
            }
            avifilename = strdup(argv[i + 1]);
        }
        if (strcmp(argv[i], "-L") == 0) {
            /* query list of valid video formats */
            queryformats = 1;
        }
        if (strcmp(argv[i], "-l") == 0) {
            /* query list of valid video formats */
            querycontrols = 1;
        }

        if (strcmp(argv[i], "-r") == 0) {
            /* query list of valid video formats */
            readconfigfile = 1;
        }
        if (strcmp(argv[i], "-h") == 0) {
            printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n");
            printf("-h	print this message \n");
            printf("-d	/dev/videoX       use videoX device\n");
            printf("-g	use read method for grab instead mmap \n");
            printf("-w	disable SDL hardware accel. \n");
            printf("-f	video format  default jpg  others options are yuv jpg \n");
            printf("-i	fps           use specified frame interval \n");
            printf("-s	widthxheight      use specified input size \n");
            printf("-c	enable raw frame capturing for the first frame\n");
            printf("-C	enable raw frame stream capturing from the start\n");
            printf("-S	enable raw stream capturing from the start\n");
            printf("-o	avifile  create avifile, default video.avi\n");
            printf("-L	query valid video formats\n");
            printf("-l	query valid controls and settings\n");
            printf("-r	read and set control settings from luvcview.cfg\n");
            exit(0);
        }
    }

#ifndef   EMBEDED_X210 //PC platform

    /************* Test SDL capabilities ************/
    if (SDL_Init(SDL_INIT_VIDEO) < 0) {
        fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError());
        exit(1);
    }
    
    /* For this version, we'll be save and disable hardware acceleration */
    if(hwaccel)
    {
        if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") )
        {
            putenv("SDL_VIDEO_YUV_HWACCEL=0");
        }
    }

    if (SDL_VideoDriverName(driver, sizeof(driver)))
    {
        printf("Video driver: %s\n", driver);
    }
    info = SDL_GetVideoInfo();

    if (info->wm_available) {
        printf("A window manager is available\n");
    }
    if (info->hw_available) {
        printf("Hardware surfaces are available (%dK video memory)\n",
               info->video_mem);
        SDL_VIDEO_Flags |= SDL_HWSURFACE;
    }
    if (info->blit_hw) {
        printf("Copy blits between hardware surfaces are accelerated\n");
        SDL_VIDEO_Flags |= SDL_ASYNCBLIT;
    }
    if (info->blit_hw_CC) {
        printf
                ("Colorkey blits between hardware surfaces are accelerated\n");
    }
    if (info->blit_hw_A) {
        printf("Alpha blits between hardware surfaces are accelerated\n");
    }
    if (info->blit_sw) {
        printf
                ("Copy blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_sw_CC) {
        printf
                ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_sw_A) {
        printf
                ("Alpha blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_fill) {
        printf("Color fills on hardware surfaces are accelerated\n");
    }



    if (!(SDL_VIDEO_Flags & SDL_HWSURFACE))
        SDL_VIDEO_Flags |= SDL_SWSURFACE;

#endif

    if (videodevice == NULL || *videodevice == 0) {
        videodevice = "/dev/video0";
    }

    if (avifilename == NULL || *avifilename == 0) {
        avifilename = "video.avi";
    }

    videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn));
    if ( queryformats ) {
        /* if we're supposed to list the video formats, do that now and go out */
        check_videoIn(videoIn,(char *) videodevice);
        free(videoIn);
#ifndef EMBEDED_X210
        SDL_Quit();
#endif
        exit(1);
    }

    if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0)
        exit(1);
    /* if we're supposed to list the controls, do that now */
    if ( querycontrols )
        enum_controls(videoIn->fd);
    
    /* if we're supposed to read the control settings from a configfile, do that now */
    if ( readconfigfile )
        load_controls(videoIn->fd);


#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
    init_framebuffer();
#else
    x6410_init_Draw(videoIn->width,videoIn->height);
#endif

#else
    pscreen = SDL_SetVideoMode(videoIn->width, videoIn->height+30 , 0,SDL_VIDEO_Flags);
    overlay =SDL_CreateYUVOverlay(videoIn->width, videoIn->height+30 , SDL_YUY2_OVERLAY, pscreen);
    p = (unsigned char *) overlay->pixels[0];

    drect.x = 0;
    drect.y = 0;
    drect.w =pscreen->w;
    drect.h = pscreen->h;

#endif

    if (enableRawStreamCapture)
    {
        videoIn->captureFile = fopen("stream.raw", "wb");
        if(videoIn->captureFile == NULL) {
            perror("Unable to open file for raw stream capturing");
        } else {
            printf("Starting raw stream capturing to stream.raw ...\n");
        }
    }
    if (enableRawFrameCapture)
        videoIn->rawFrameCapture = enableRawFrameCapture;

    initLut();

#ifndef EMBEDED_X210
    SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL);
    lasttime = SDL_GetTicks();
    creatButt(videoIn->width, 32);
    SDL_LockYUVOverlay(overlay);
    memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt,
           videoIn->width * 64);
    SDL_UnlockYUVOverlay(overlay);

    /* initialize thread data */
    ptdata.ptscreen = &pscreen;
    ptdata.ptvideoIn = videoIn;
    ptdata.ptsdlevent = &sdlevent;
    ptdata.drect = &drect;
    affmutex = SDL_CreateMutex();
    ptdata.affmutex = affmutex;
    mythread = SDL_CreateThread(eventThread, (void *) &ptdata);
#endif



    pRGBData = (unsigned char *)malloc(videoIn->width*videoIn->width*4*sizeof(char));
    if(pRGBData==NULL)
    {
        return ;
	}
    /* main big loop */
    while (videoIn->signalquit)
    {
#ifndef EMBEDED_X210
        currtime = SDL_GetTicks();
        if (currtime - lasttime > 0) {
            frmrate = 1000/(currtime - lasttime);
        }
        lasttime = currtime;
#endif
        if (uvcGrab(videoIn) < 0) {
            printf("Error grabbing \n");
            break;
        }

        /* if we're grabbing video, show the frame rate */
        if (videoIn->toggleAvi)
            printf("\rframe rate: %d     ",frmrate);

#ifndef EMBEDED_X210
        SDL_LockYUVOverlay(overlay);
        memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2);
        SDL_UnlockYUVOverlay(overlay);
        SDL_DisplayYUVOverlay(overlay, &drect);
#endif

#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
        // yuv to rgb565 ,and to frambuffer
        process_image(videoIn->framebuffer,fbp,videoIn->width,videoIn->height,vinfo,finfo);
    
    //    convertYUYVtoRGB565(videoIn->framebuffer,pRGBData,videoIn->width,videoIn->height);

   //   Pyuv422torgb24(videoIn->framebuffer, pRGBData, videoIn->width, videoIn->height);
    //    memcpy(fbp,pRGBData,videoIn->width*videoIn->height*2);
     
     

#else   //X6410 post processor convert yuv to rgb,X210 not suport now.

        /*
        memcpy(pInbuffer, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2);

        ioctl(dev_fb0, GET_FB_INFO, &fb_info);

        pp_param.SrcFrmSt = ioctl(dev_pp, S3C_PP_GET_RESERVED_MEM_ADDR_PHY); //must be  physical adress
        pp_param.DstFrmSt = fb_info.map_dma_f1; //must be physical adress

        ioctl(dev_pp, S3C_PP_SET_PARAMS, &pp_param);
        ioctl(dev_pp, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param);
        ioctl(dev_pp, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param);
        ioctl(dev_pp, S3C_PP_START);
        */
#endif
#endif
        if (videoIn->getPict)
        {
            switch(videoIn->formatIn){
            case V4L2_PIX_FMT_MJPEG:
                get_picture(videoIn->tmpbuffer,videoIn->buf.bytesused);
                break;
            case V4L2_PIX_FMT_YUYV:
                get_pictureYV2(videoIn->framebuffer,videoIn->width,videoIn->height);
                break;
            default:
                break;
            }
            videoIn->getPict = 0;
            printf("get picture !\n");
        }

#ifndef EMBEDED_X210
        SDL_LockMutex(affmutex);
        ptdata.frmrate = frmrate;
        SDL_WM_SetCaption(videoIn->status, NULL);
        SDL_UnlockMutex(affmutex);
#endif

#ifdef  EMBEDED_X210
        usleep(10);
#else
        SDL_Delay(10);
#endif


    }
#ifndef EMBEDED_X210
    SDL_WaitThread(mythread, &status);
    SDL_DestroyMutex(affmutex);
#endif
    /* if avifile is defined, we made a video: compute the exact fps and
       set it in the video */
    if (videoIn->avifile != NULL) {
        float fps=(videoIn->framecount/(videoIn->recordtime/1000));
        fprintf(stderr,"setting fps to %f\n",fps);
        AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height,
                      fps, "MJPG");
        AVI_close(videoIn->avifile);
    }

    close_v4l2(videoIn);

#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
    close_frambuffer();
#else
    x6410_DeInit_Draw();
#endif

#endif
    free(pRGBData);
    free(videoIn);
    destroyButt();
    freeLut();
    printf(" Clean Up done Quit \n");
#ifndef EMBEDED_X210
    SDL_Quit();
#endif
}
Esempio n. 17
0
int main_loop(void *data)
{
	int ret=0;
	int i,j,k,l,m,n,o;
	unsigned int YUVMacroPix;
	unsigned char *pix8 = (unsigned char *)&YUVMacroPix;
	Pix *pix2;
	char *pix;
	if ((pix2= malloc(sizeof(Pix)))==NULL) {
		printf("couldn't allocate memory for: pix2\n");
		ret=1;
		return(ret);
	}
	//fprintf(stderr,"Thread started...\n");
	/*
		ImageSurf=SDL_CreateRGBSurface(SDL_SWSURFACE, overlay->w,
		   overlay->h, 24, 0x00ff0000,0x0000ff00,0x000000ff,0);
	*/
	while (videoIn->signalquit) {
	 currtime = SDL_GetTicks();
	  if (currtime - lasttime > 0) {
		frmrate = 1000/(currtime - lasttime);
	 }
	 lasttime = currtime;
	
	// sprintf(capt,"Frame Rate: %d",frmrate);
	// SDL_WM_SetCaption(capt, NULL);
	
	 if (uvcGrab(videoIn) < 0) {
	    printf("Error grabbing=> Frame Rate is %d\n",frmrate);
	    videoIn->signalquit=0;
		ret = 2;
	 }
	
	 SDL_LockYUVOverlay(overlay);
	 memcpy(p, videoIn->framebuffer,
	       videoIn->width * (videoIn->height) * 2);
	 SDL_UnlockYUVOverlay(overlay);
	 SDL_DisplayYUVOverlay(overlay, &drect);
	
	 /*capture Image*/
	 if (videoIn->capImage){

		if((pim= malloc((pscreen->w)*(pscreen->h)*3))==NULL){/*24 bits -> 3bytes 32 bits ->4 bytes*/
		 	printf("Couldn't allocate memory for: pim\n");
	     	videoIn->signalquit=0;
			ret = 3;
		
		}
		
		//char *ppmheader = "P6\n# Generated by guvcview\n320 240\n255\n";
		//FILE * out = fopen("Yimage.ppm", "wb"); //saving as ppm
		//fprintf(out, ppmheader);
		
		k=overlay->h;
		//printf("overlay->h is %i\n",overlay->h);
		//printf("and pitches[0] is %i\n",overlay->pitches[0]);
	
			
		for(j=0;j<(overlay->h);j++){

			l=j*overlay->pitches[0];/*must add lines already writen=*/
						/*pitches is the overlay number */
						/*off bytes in a line (2*width) */
						
			m=(k*3*overlay->pitches[0])>>1;/*must add lines already writen=   */
						      /*for this case (rgb) every pixel   */
						      /*as 3 bytes (3*width=3*pitches/2)  */
						      /* >>1 = (/2) divide by 2 (?faster?)*/
			for (i=0;i<((overlay->pitches[0])>>2);i++){ /*>>2 = (/4)*/
				/*iterate every 4 bytes (32 bits)*/
				/*Y-U-V-Y1 =>2 pixel (4 bytes)   */
				
				n=i<<2;/*<<2 = (*4) multiply by 4 (?faster?)*/					
				pix8[0] = p[n+l];
				pix8[1] = p[n+1+l];
				pix8[2] = p[n+2+l];
				pix8[3] = p[n+3+l];
			
				/*get RGB data*/
				pix2=yuv2rgb(YUVMacroPix,0,pix2);
			
				/*In BitMaps lines are upside down and*/
				/*pixel format is bgr                 */
				
				o=i*6;				
			
				/*first pixel*/
				pim[o+m]=pix2->b;
				pim[o+1+m]=pix2->g;
				pim[o+2+m]=pix2->r;
				/*second pixel*/
				pim[o+3+m]=pix2->b1;
				pim[o+4+m]=pix2->g1;
				pim[o+5+m]=pix2->r1;	
		  	}
			k--;
	    	}
       /* SDL_LockSurface(ImageSurf);	
		memcpy(pix, pim,(pscreen->w)*(pscreen->h)*3); //24 bits -> 3bytes 32 bits ->4 bytes
		SDL_UnlockSurface(ImageSurf);*/
	

	    if(SaveBPM(videoIn->ImageFName, width, height, 24, pim)) {
	      fprintf (stderr,"Error: Couldn't capture Image to %s \n",
			     videoIn->ImageFName);
	    } 
	    else {	  
          printf ("Capture Image to %s \n",videoIn->ImageFName);
        }
		free(pim);
	    videoIn->capImage=FALSE;	
	  }
	  
	  /*capture AVI */
	  if (videoIn->capAVI && videoIn->signalquit){
	   long framesize;		
	   switch (AVIFormat) {
		case 1:
	  	   framesize=(pscreen->w)*(pscreen->h)*2; /*YUY2 -> 2 bytes per pixel */
	           if (AVI_write_frame (AviOut,
			       p, framesize) < 0)
	                printf ("write error on avi out \n");
		   break;
		case 2:
		    framesize=(pscreen->w)*(pscreen->h)*3; /*DIB 24/32 -> 3/4 bytes per pixel*/ 
		    if((pim= malloc(framesize))==NULL){
				printf("Couldn't allocate memory for: pim\n");
	     		videoIn->signalquit=0;
				ret = 4;
			}
		    k=overlay->h;
					
		for(j=0;j<(overlay->h);j++){

			l=j*overlay->pitches[0];/*must add lines already writen=*/
						/*pitches is the overlay number */
						/*off bytes in a line (2*width) */
						
			m=(k*3*overlay->pitches[0])>>1;/*must add lines already writen=   */
						      /*for this case (rgb) every pixel   */
						      /*as 3 bytes (3*width=3*pitches/2)  */
						      /* >>1 = (/2) divide by 2 (?faster?)*/
			for (i=0;i<((overlay->pitches[0])>>2);i++){ /*>>2 = (/4)*/
				/*iterate every 4 bytes (32 bits)*/
				/*Y-U-V-Y1 =>2 pixel (4 bytes)   */
				
				n=i<<2;/*<<2 = (*4) multiply by 4 (?faster?)*/					
				pix8[0] = p[n+l];
				pix8[1] = p[n+1+l];
				pix8[2] = p[n+2+l];
				pix8[3] = p[n+3+l];
			
				/*get RGB data*/
				pix2=yuv2rgb(YUVMacroPix,0,pix2);
			
				/*In BitMaps lines are upside down and*/
				/*pixel format is bgr                 */
				
				o=i*6;				
			
				/*first pixel*/
				pim[o+m]=pix2->b;
				pim[o+1+m]=pix2->g;
				pim[o+2+m]=pix2->r;
				/*second pixel*/
				pim[o+3+m]=pix2->b1;
				pim[o+4+m]=pix2->g1;
				pim[o+5+m]=pix2->r1;	
		  	}
			k--;
	    	}
		     
		     if (AVI_write_frame (AviOut,
			       pim, framesize) < 0)
	                printf ("write error on avi out \n");
		     free(pim);
		     break;


		} 
	   framecount++;	   
		  
	  } 
	  SDL_Delay(SDL_WAIT_TIME);
	
  }
Esempio n. 18
0
/*****************************************************************************
 * NewPicture: allocate a picture
 *****************************************************************************
 * Returns 0 on success, -1 otherwise
 *****************************************************************************/
static int NewPicture( vout_thread_t *p_vout, picture_t *p_pic )
{
    int i_width  = p_vout->output.i_width;
    int i_height = p_vout->output.i_height;

    if( p_vout->p_sys->p_overlay == NULL )
    {
        /* RGB picture */
        if( p_vout->p_sys->i_surfaces )
        {
            /* We already allocated this surface, return */
            return VLC_EGENERIC;
        }

        p_pic->p_sys = malloc( sizeof( picture_sys_t ) );

        if( p_pic->p_sys == NULL )
        {
            return VLC_ENOMEM;
        }

        switch( p_vout->p_sys->p_display->format->BitsPerPixel )
        {
            case 8:
                p_pic->p->i_pixel_pitch = 1;
                break;
            case 15:
            case 16:
                p_pic->p->i_pixel_pitch = 2;
                break;
            case 24:
            case 32:
                p_pic->p->i_pixel_pitch = 4;
                break;
            default:
                return VLC_EGENERIC;
        }

        p_pic->p->p_pixels = p_vout->p_sys->p_display->pixels;
        p_pic->p->i_lines = p_vout->p_sys->p_display->h;
        p_pic->p->i_visible_lines = p_vout->p_sys->p_display->h;
        p_pic->p->i_pitch = p_vout->p_sys->p_display->pitch;
        p_pic->p->i_visible_pitch =
            p_pic->p->i_pixel_pitch * p_vout->p_sys->p_display->w;

        p_vout->p_sys->i_surfaces++;

        p_pic->i_planes = 1;
    }
    else
    {
        p_pic->p_sys = malloc( sizeof( picture_sys_t ) );

        if( p_pic->p_sys == NULL )
        {
            return VLC_ENOMEM;
        }

        p_pic->p_sys->p_overlay =
            SDL_CreateYUVOverlay( i_width, i_height,
                                  p_vout->output.i_chroma,
                                  p_vout->p_sys->p_display );

        if( p_pic->p_sys->p_overlay == NULL )
        {
            free( p_pic->p_sys );
            return VLC_EGENERIC;
        }

        SDL_LockYUVOverlay( p_pic->p_sys->p_overlay );

        p_pic->Y_PIXELS = p_pic->p_sys->p_overlay->pixels[0];
        p_pic->p[Y_PLANE].i_lines = p_pic->p_sys->p_overlay->h;
        p_pic->p[Y_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h;
        p_pic->p[Y_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[0];

        switch( p_vout->output.i_chroma )
        {
        case SDL_YV12_OVERLAY:
            p_pic->p[Y_PLANE].i_pixel_pitch = 1;
            p_pic->p[Y_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w;

            p_pic->U_PIXELS = p_pic->p_sys->p_overlay->pixels[2];
            p_pic->p[U_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[U_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[U_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[2];
            p_pic->p[U_PLANE].i_pixel_pitch = 1;
            p_pic->p[U_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2;

            p_pic->V_PIXELS = p_pic->p_sys->p_overlay->pixels[1];
            p_pic->p[V_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[V_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[V_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[1];
            p_pic->p[V_PLANE].i_pixel_pitch = 1;
            p_pic->p[V_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2;

            p_pic->i_planes = 3;
            break;

        case SDL_IYUV_OVERLAY:
            p_pic->p[Y_PLANE].i_pixel_pitch = 1;
            p_pic->p[Y_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w;

            p_pic->U_PIXELS = p_pic->p_sys->p_overlay->pixels[1];
            p_pic->p[U_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[U_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[U_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[1];
            p_pic->p[U_PLANE].i_pixel_pitch = 1;
            p_pic->p[U_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2;

            p_pic->V_PIXELS = p_pic->p_sys->p_overlay->pixels[2];
            p_pic->p[V_PLANE].i_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[V_PLANE].i_visible_lines = p_pic->p_sys->p_overlay->h / 2;
            p_pic->p[V_PLANE].i_pitch = p_pic->p_sys->p_overlay->pitches[2];
            p_pic->p[V_PLANE].i_pixel_pitch = 1;
            p_pic->p[V_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w / 2;

            p_pic->i_planes = 3;
            break;

        default:
            p_pic->p[Y_PLANE].i_pixel_pitch = 2;
            p_pic->p[U_PLANE].i_visible_pitch = p_pic->p_sys->p_overlay->w * 2;

            p_pic->i_planes = 1;
            break;
        }
    }

    return VLC_SUCCESS;
}
Esempio n. 19
0
/**
 * This function initializes SDL vout method.
 */
static int Open(vlc_object_t *object)
{
    vout_display_t *vd = (vout_display_t *)object;
    vout_display_sys_t *sys;

    /* XXX: check for conflicts with the SDL audio output */
    vlc_mutex_lock(&sdl_lock);

    /* Check if SDL video module has been initialized */
    if (SDL_WasInit(SDL_INIT_VIDEO) != 0) {
        vlc_mutex_unlock(&sdl_lock);
        return VLC_EGENERIC;
    }

    vd->sys = sys = calloc(1, sizeof(*sys));
    if (!sys) {
        vlc_mutex_unlock(&sdl_lock);
        return VLC_ENOMEM;
    }

#ifdef HAVE_SETENV
    char *psz_driver = var_CreateGetNonEmptyString(vd, "sdl-video-driver");
    if (psz_driver) {
        setenv("SDL_VIDEODRIVER", psz_driver, 1);
        free(psz_driver);
    }
#endif

    /* */
    int sdl_flags = SDL_INIT_VIDEO;
#ifndef WIN32
    /* Win32 SDL implementation doesn't support SDL_INIT_EVENTTHREAD yet*/
    sdl_flags |= SDL_INIT_EVENTTHREAD;
#endif
#ifndef NDEBUG
    /* In debug mode you may want vlc to dump a core instead of staying stuck */
    sdl_flags |= SDL_INIT_NOPARACHUTE;
#endif

    /* Initialize library */
    if (SDL_Init(sdl_flags) < 0) {
        vlc_mutex_unlock(&sdl_lock);

        msg_Err(vd, "cannot initialize SDL (%s)", SDL_GetError());
        free(sys);
        return VLC_EGENERIC;
    }
    vlc_mutex_unlock(&sdl_lock);

    /* Translate keys into unicode */
    SDL_EnableUNICODE(1);

    /* Get the desktop resolution */
    /* FIXME: SDL has a problem with virtual desktop */
    sys->desktop_width  = SDL_GetVideoInfo()->current_w;
    sys->desktop_height = SDL_GetVideoInfo()->current_h;

    /* */
    video_format_t fmt = vd->fmt;

    /* */
    vout_display_info_t info = vd->info;

    /* Set main window's size */
    int display_width;
    int display_height;
    if (vd->cfg->is_fullscreen) {
        display_width  = sys->desktop_width;
        display_height = sys->desktop_height;
    } else {
        display_width  = vd->cfg->display.width;
        display_height = vd->cfg->display.height;
    }

    /* Initialize flags and cursor */
    sys->display_flags = SDL_ANYFORMAT | SDL_HWPALETTE | SDL_HWSURFACE | SDL_DOUBLEBUF;
    sys->display_flags |= vd->cfg->is_fullscreen ? SDL_FULLSCREEN : SDL_RESIZABLE;

    sys->display_bpp = SDL_VideoModeOK(display_width, display_height,
                                       16, sys->display_flags);
    if (sys->display_bpp == 0) {
        msg_Err(vd, "no video mode available");
        goto error;
    }

    sys->display = SDL_SetVideoMode(display_width, display_height,
                                    sys->display_bpp, sys->display_flags);
    if (!sys->display) {
        msg_Err(vd, "cannot set video mode");
        goto error;
    }

    /* We keep the surface locked forever */
    SDL_LockSurface(sys->display);

    /* */
    vlc_fourcc_t forced_chroma = 0;
    char *psz_chroma = var_CreateGetNonEmptyString(vd, "sdl-chroma");
    if (psz_chroma) {
        forced_chroma = vlc_fourcc_GetCodecFromString(VIDEO_ES, psz_chroma);
        if (forced_chroma)
            msg_Dbg(vd, "Forcing chroma to 0x%.8x (%4.4s)",
                    forced_chroma, (const char*)&forced_chroma);
        free(psz_chroma);
    }

    /* Try to open an overlay if requested */
    sys->overlay = NULL;
    const bool is_overlay = var_CreateGetBool(vd, "overlay");
    if (is_overlay) {
        static const struct
        {
            vlc_fourcc_t vlc;
            uint32_t     sdl;
        } vlc_to_sdl[] = {
            { VLC_CODEC_YV12, SDL_YV12_OVERLAY },
            { VLC_CODEC_I420, SDL_IYUV_OVERLAY },
            { VLC_CODEC_YUYV, SDL_YUY2_OVERLAY },
            { VLC_CODEC_UYVY, SDL_UYVY_OVERLAY },
            { VLC_CODEC_YVYU, SDL_YVYU_OVERLAY },

            { 0, 0 }
        };
        const vlc_fourcc_t forced_chromas[] = {
            forced_chroma, 0
        };
        const vlc_fourcc_t *fallback_chromas =
            vlc_fourcc_GetYUVFallback(fmt.i_chroma);
        const vlc_fourcc_t *chromas = forced_chroma ? forced_chromas : fallback_chromas;

        for (int pass = forced_chroma ? 1 : 0; pass < 2 && !sys->overlay; pass++) {
            for (int i = 0; chromas[i] != 0; i++) {
                const vlc_fourcc_t vlc = chromas[i];

                uint32_t sdl = 0;
                for (int j = 0; vlc_to_sdl[j].vlc != 0 && !sdl; j++) {
                    if (vlc_to_sdl[j].vlc == vlc)
                        sdl = vlc_to_sdl[j].sdl;
                }
                if (!sdl)
                    continue;

                sys->overlay = SDL_CreateYUVOverlay(fmt.i_width, fmt.i_height,
                                                    sdl, sys->display);
                if (sys->overlay && !sys->overlay->hw_overlay && pass == 0) {
                    /* Ignore non hardware overlay surface in first pass */
                    SDL_FreeYUVOverlay(sys->overlay);
                    sys->overlay = NULL;
                }
                if (sys->overlay) {
                    /* We keep the surface locked forever */
                    SDL_LockYUVOverlay(sys->overlay);

                    fmt.i_chroma = vlc;
                    sys->is_uv_swapped = vlc_fourcc_AreUVPlanesSwapped(fmt.i_chroma,
                                                                       vd->fmt.i_chroma);
                    if (sys->is_uv_swapped)
                        fmt.i_chroma = vd->fmt.i_chroma;
                    break;
                }
            }
        }
    } else {
        msg_Warn(vd, "SDL overlay disabled by the user");
    }

    /* */
    vout_display_cfg_t place_cfg = *vd->cfg;
    place_cfg.display.width  = display_width;
    place_cfg.display.height = display_height;
    vout_display_PlacePicture(&sys->place, &vd->source, &place_cfg, !sys->overlay);

    /* If no overlay, fallback to software output */
    if (!sys->overlay) {
        /* */
        switch (sys->display->format->BitsPerPixel) {
        case 8:
            fmt.i_chroma = VLC_CODEC_RGB8;
            break;
        case 15:
            fmt.i_chroma = VLC_CODEC_RGB15;
            break;
        case 16:
            fmt.i_chroma = VLC_CODEC_RGB16;
            break;
        case 24:
            fmt.i_chroma = VLC_CODEC_RGB24;
            break;
        case 32:
            fmt.i_chroma = VLC_CODEC_RGB32;
            break;
        default:
            msg_Err(vd, "unknown screen depth %i",
                    sys->display->format->BitsPerPixel);
            goto error;
        }

        /* All we have is an RGB image with square pixels */
        fmt.i_width  = display_width;
        fmt.i_height = display_height;
        fmt.i_rmask = sys->display->format->Rmask;
        fmt.i_gmask = sys->display->format->Gmask;
        fmt.i_bmask = sys->display->format->Bmask;

        info.has_pictures_invalid = true;
    }

    if (vd->cfg->display.title)
        SDL_WM_SetCaption(vd->cfg->display.title,
                          vd->cfg->display.title);
    else if (!sys->overlay)
        SDL_WM_SetCaption(VOUT_TITLE " (software RGB SDL output)",
                          VOUT_TITLE " (software RGB SDL output)");
    else if (sys->overlay->hw_overlay)
        SDL_WM_SetCaption(VOUT_TITLE " (hardware YUV SDL output)",
                          VOUT_TITLE " (hardware YUV SDL output)");
    else
        SDL_WM_SetCaption(VOUT_TITLE " (software YUV SDL output)",
                          VOUT_TITLE " (software YUV SDL output)");

    /* Setup events */
    SDL_EventState(SDL_KEYUP, SDL_IGNORE);               /* ignore keys up */

    /* Setup vout_display now that everything is fine */
    vd->fmt = fmt;
    vd->info = info;

    vd->get     = Get;
    vd->prepare = NULL;
    vd->display = Display;
    vd->control = Control;
    vd->manage  = Manage;

    /* */
    vout_display_SendEventDisplaySize(vd, display_width, display_height, vd->cfg->is_fullscreen);
    return VLC_SUCCESS;

error:
    msg_Err(vd, "cannot set up SDL (%s)", SDL_GetError());

    if (sys->display) {
        SDL_UnlockSurface(sys->display);
        SDL_FreeSurface(sys->display);
    }

    vlc_mutex_lock(&sdl_lock);
    SDL_QuitSubSystem(SDL_INIT_VIDEO);
    vlc_mutex_unlock(&sdl_lock);

    free(sys);
    return VLC_EGENERIC;
}
Esempio n. 20
0
int main(int argc, char *argv[]) 
{
	
	// Initalizing these to NULL prevents segfaults!
	AVFormatContext   *pFormatCtx = NULL;
	int               i, videoStream;
	AVCodecContext    *pCodecCtxOrig = NULL;
	AVCodecContext    *pCodecCtx = NULL; // 코덱 컨트롤러(?) 이걸 자주 쓴다.
	AVCodec           *pCodec = NULL; // 영상을 디코딩할 코덱
	AVFrame           *pFrame = NULL; // 영상데이터 라고 보면됨.
	AVPacket          packet;
	int               frameFinished;
	struct SwsContext *sws_ctx = NULL; // Convert the image into YUV format that SDL uses

	//SDL 관련 변수
	SDL_Overlay     *bmp;
	SDL_Surface     *screen;
	SDL_Rect        rect;
	SDL_Event       event;

	CVideoSocket videoSocket;
	
	//줌인 줌 아웃을 위한 변수
	int rect_w = 0;
	int rect_h = 0;
	
	// We catch any exceptions that might occur below -- see the catch statement for more details.
	try 
	{
	// 여기부터 마이오 초기화
	// First, we create a Hub with our application identifier. Be sure not to use the com.example namespace when
	// publishing your application. The Hub provides access to one or more Myos.
	// 마이오에서 제공하는 어플리케이션과 연결하는 허브 생성
	myo::Hub hub("com.example.hello-myo");

	// 마이오 찾는중 ...
	std::cout << "Attempting to find a Myo..." << std::endl;

	// Next, we attempt to find a Myo to use. If a Myo is already paired in Myo Connect, this will return that Myo
	// immediately.
	// waitForMyo() takes a timeout value in milliseconds. In this case we will try to find a Myo for 10 seconds, and
	// if that fails, the function will return a null pointer.
	// 마이오를 찾는 동안 대기하는 소스코드
	myo::Myo* myo = hub.waitForMyo(10000);

	// If waitForMyo() returned a null pointer, we failed to find a Myo, so exit with an error message.
	// 마이오가 존재하지 않을경우 예외처리
	if (!myo) 
	{
		throw std::runtime_error("Unable to find a Myo!");
	}

	// We've found a Myo.
	std::cout << "Connected to a Myo armband!" << std::endl << std::endl;

	// Next we construct an instance of our DeviceListener, so that we can register it with the Hub.
	// 마이오에서 얻은 데이터를 가공해주는 클래스
	DataCollector collector;

	// Hub::addListener() takes the address of any object whose class inherits from DeviceListener, and will cause
	// Hub::run() to send events to all registered device listeners.
	// 데이터를 지속적으로 받아온다.
	hub.addListener(&collector);

	//---여기까지 마이오 초기화
	
	// SDL 초기화
	InitSDL();

	// Open video file
	// 파일 또는 데이터 스트림을 연다.
	if (avformat_open_input(&pFormatCtx, videoSocket.videoStreamUrl, NULL, NULL) != 0)
	{
		return -1; // Couldn't open file
	}
	// Retrieve stream information
	// 데이터 스트림의 정보를 얻어온다.
	if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
	{
		return -1; // Couldn't find stream information
	}
	// Dump information about file onto standard error
	av_dump_format(pFormatCtx, 0, videoSocket.videoStreamUrl, 0);

	// Find the first video stream
	// 비디로 스트림을 찾는과정 - 어떤 형식의 데이터 스트림인지 판별 ( 우리는 h.264로 고정되어있지만...)
	videoStream = -1;
	for (i = 0; (unsigned)i < pFormatCtx->nb_streams; i++)
	{
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) 
		{
			videoStream = i;
			break;
		}
	}	
	if (videoStream == -1)
	{
		return -1; // Didn't find a video stream
	}
	// Get a pointer to the codec context for the video stream
	pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
	// Find the decoder for the video stream
	pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
	if (pCodec == NULL) 
	{
		fprintf(stderr, "Unsupported codec!\n");
		return -1; // Codec not found
	}
	// Copy context
	// 왜 인지 모르겠지만 그냥 쓰지 않고 복사해서 사용한다.
	pCodecCtx = avcodec_alloc_context3(pCodec);
	if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) 
	{
		fprintf(stderr, "Couldn't copy codec context");
		return -1; // Error copying codec context
	}

	// Open codec
	if (avcodec_open2(pCodecCtx, pCodec, NULL)<0)
	{
		return -1; // Could not open codec
	}
	
	// Allocate video frame
	pFrame = av_frame_alloc();

	// Make a screen to put our video
	// 스크린을 생성
#ifndef __DARWIN__
	screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
#else
	screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
#endif
	if (!screen) 
	{
		fprintf(stderr, "SDL: could not set video mode - exiting\n");
		exit(1);
	}

	// Allocate a place to put our YUV image on that screen
	// 이미지를 스크린에 그림
	bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
		pCodecCtx->height,
		SDL_YV12_OVERLAY,
		screen);

	// initialize SWS context for software scaling
	sws_ctx = sws_getContext(pCodecCtx->width,
		pCodecCtx->height,
		pCodecCtx->pix_fmt,
		pCodecCtx->width,
		pCodecCtx->height,
		AV_PIX_FMT_YUV420P,
		SWS_BILINEAR,
		NULL,
		NULL,
		NULL
		);

	while (av_read_frame(pFormatCtx, &packet) >= 0) 
	{
		// 메인 루프

		// In each iteration of our main loop, we run the Myo event loop for a set number of milliseconds.
		// 데이터를 어느정도 주기로 받아올지 정하는 소스
		// 이 값이 낮아지면 영상을 받아오는데도 딜레이가 걸리기때문에 원하는 fps를 고려해야한다.
		hub.run(1000 / 500);
		// After processing events, we call the print() member function we defined above to print out the values we've
		// obtained from any events that have occurred.
		// 마이오 상태 모니터링 코드
		collector.print();

		// 마이오 루프 여기까지


		// Is this a packet from the video stream?
		if (packet.stream_index == videoStream) 
		{
			// Decode video frame
			avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

			// Did we get a video frame?
			// 비디오 프레임을 비트맵 이미지로 변환
			if (frameFinished) 
			{
				SDL_LockYUVOverlay(bmp);

				AVPicture pict;
				pict.data[0] = bmp->pixels[0];
				pict.data[1] = bmp->pixels[2];
				pict.data[2] = bmp->pixels[1];

				pict.linesize[0] = bmp->pitches[0];
				pict.linesize[1] = bmp->pitches[2];
				pict.linesize[2] = bmp->pitches[1];

				
				// Convert the image into YUV format that SDL uses
				sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
					pFrame->linesize, 0, pCodecCtx->height,
					pict.data, pict.linesize);

				SDL_UnlockYUVOverlay(bmp);

				// 소프트웨어상으로 줌인 줌아웃을 하기위해 영상프레임의 사이즈를 조절
				rect.x = -rect_w/2;
				rect.y = -rect_h/2;
				rect.w = pCodecCtx->width + rect_w;
				rect.h = pCodecCtx->height + rect_h;
				SDL_DisplayYUVOverlay(bmp, &rect);

			}
		}

		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
		SDL_PollEvent(&event);

		//// 마이오의 동작을 체크해서 메시지 송신
		//// 좌우 카메라 컨트롤
		if (collector.currentPose == myo::Pose::waveOut)
		{
			SendData(videoSocket.ClientSocket, "right", videoSocket.ToServer);
			rest = true;
		}	
		if (collector.currentPose == myo::Pose::waveIn)
		{
			SendData(videoSocket.ClientSocket, "left", videoSocket.ToServer);
			rest = true;
		}
		// 상하 카메라 컨트롤
		if (collector.currentPose == myo::Pose::fingersSpread && collector.pitch_w > 10)
		{
			SendData(videoSocket.ClientSocket, "up", videoSocket.ToServer);
			rest = true;
		}
		if (collector.currentPose == myo::Pose::fingersSpread && collector.pitch_w < 6)
		{
			SendData(videoSocket.ClientSocket, "down", videoSocket.ToServer);
			rest = true;
		}
		if (collector.currentPose == myo::Pose::rest &&rest == true)
		{
			SendData(videoSocket.ClientSocket, "stop", videoSocket.ToServer);
			rest = false;
		}
		if (collector.currentPose == myo::Pose::doubleTap && collector.roll_w <= 5)
		{
			collector.currentPose = myo::Pose::rest;
			rest = true;
			myo->lock();						
		}
		if (collector.currentPose == myo::Pose::doubleTap && collector.roll_w > 5)
		{
			rest = true;
			myo->unlock(myo::Myo::unlockHold);
		}
		// 마이오의 동작을 체크해서 줌인 줌 아웃
		if (collector.currentPose == myo::Pose::fist && collector.roll_w < 6)
		{
			ZoomOut(rect_w, rect_h, 0);
		}
		if (collector.currentPose == myo::Pose::fist && collector.roll_w > 8)
		{
			ZoomIn(rect_w, rect_h, 300);
		}
		// 키 이벤트를 받는 함수
		switch (event.type) 
		{
		case SDL_QUIT:
			SDL_Quit();
			exit(0);
			break;
		case SDL_KEYDOWN:
			/* Check the SDLKey values and move change the coords */
			switch (event.key.keysym.sym){
			case SDLK_LEFT:
				// 문자열 송신
				SendData(videoSocket.ClientSocket, "left", videoSocket.ToServer);
				break;
			case SDLK_RIGHT:
				// 문자열 송신
				SendData(videoSocket.ClientSocket, "right", videoSocket.ToServer);
				break;
			case SDLK_UP:
				SendData(videoSocket.ClientSocket, "up", videoSocket.ToServer);
				break;
			case SDLK_DOWN:
				SendData(videoSocket.ClientSocket, "down", videoSocket.ToServer);
				break;
			case SDLK_q: // 줌 인
				ZoomIn(rect_w,rect_h,300);			
				break;
			case SDLK_w: // 줌 아웃
				ZoomOut(rect_w, rect_h, 0);								
				break;
			case SDLK_s: // 모터 stop
				SendData(videoSocket.ClientSocket, "stop", videoSocket.ToServer);
				break;
			case SDLK_x: // 플그램 종료
				SDL_Quit();
				exit(0);
				break;
			default:
				break;
			}
		default:
			break;
		}

	}
	
	// Free the YUV frame
	av_frame_free(&pFrame);

	// Close the codecs
	avcodec_close(pCodecCtx);
	avcodec_close(pCodecCtxOrig);

	// Close the video file
	avformat_close_input(&pFormatCtx);

	// 소켓 닫기
	closesocket(videoSocket.ClientSocket);
	WSACleanup();

	return 0;

	}
	// 개인적으로 exception handling을 이렇게하는걸 좋아하지 않지만...
	// 예제에서 이렇게 사용하였기에 일단 이렇게 두었다.
	catch (const std::exception& e) 
	{
		std::cerr << "Error: " << e.what() << std::endl;
		std::cerr << "Press enter to continue.";
		std::cin.ignore();
		return 1;
	}
	
}
Esempio n. 21
0
int main(int argc, char *argv[]) {
    AVFormatContext *pFormatCtx;
    int             i, videoStream, audioStream;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVFrame         *pFrame;
    AVPacket        packet;
    int             frameFinished;
    float           aspect_ratio;
    struct SwsContext *img_convert_ctx;

    AVCodecContext  *aCodecCtx;
    AVCodec         *aCodec;

    SDL_Overlay     *bmp;
    SDL_Surface     *screen;
    SDL_Rect        rect;
    SDL_Event       event;
    SDL_AudioSpec   wanted_spec, spec;

    if(argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }
    // Register all formats and codecs
    av_register_all();

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    // Open video file
    if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0)
        return -1; // Couldn't open file

    // Retrieve stream information
    if(av_find_stream_info(pFormatCtx)<0)
        return -1; // Couldn't find stream information

    // Dump information about file onto standard error
    dump_format(pFormatCtx, 0, argv[1], 0);

    // Find the first video stream
    videoStream=-1;
    audioStream=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++) {
        if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO &&
                videoStream < 0) {
            videoStream=i;
        }
        if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_AUDIO &&
                audioStream < 0) {
            audioStream=i;
        }
    }
    if(videoStream==-1)
        return -1; // Didn't find a video stream
    if(audioStream==-1)
        return -1;

    aCodecCtx=pFormatCtx->streams[audioStream]->codec;
    // Set audio settings from codec info
    wanted_spec.freq = aCodecCtx->sample_rate;
    wanted_spec.format = AUDIO_S16SYS;
    wanted_spec.channels = aCodecCtx->channels;
    wanted_spec.silence = 0;
    wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
    wanted_spec.callback = audio_callback;
    wanted_spec.userdata = aCodecCtx;

    if(SDL_OpenAudio(&wanted_spec, &spec) < 0) {
        fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
        return -1;
    }
    aCodec = avcodec_find_decoder(aCodecCtx->codec_id);
    if(!aCodec) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1;
    }
    
    if (avcodec_open(aCodecCtx, aCodec) < 0) {
		fprintf(stderr, "Cannot open audio codec!\n");
		return -1;
	}

    // audio_st = pFormatCtx->streams[index]
    packet_queue_init(&audioq);
    SDL_PauseAudio(0);

    // Get a pointer to the codec context for the video stream
    pCodecCtx=pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec==NULL) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1; // Codec not found
    }
    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0) {
		fprintf(stderr, "Cannot open video codec!\n");
        return -1; // Could not open codec
	}
	
	// construct the scale context, conversing to PIX_FMT_YUV420P
    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);// other codes
	if (img_convert_ctx == NULL) {
		fprintf(stderr, "Cannot initialize the conversion context!\n");
		return -1;
	}

    // Allocate video frame
    pFrame=avcodec_alloc_frame();

    // Make a screen to put our video

#ifndef __DARWIN__
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
#else
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
#endif
    if(!screen) {
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
        exit(1);
    }

    // Allocate a place to put our YUV image on that screen
    bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
                               pCodecCtx->height,
                               SDL_YV12_OVERLAY,
                               screen);


    // Read frames and save first five frames to disk
    i=0;
    while(av_read_frame(pFormatCtx, &packet)>=0) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            // Decode video frame
            avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
                                 packet.data, packet.size);

            // Did we get a video frame?
            if(frameFinished) {
                SDL_LockYUVOverlay(bmp);

                AVPicture pict;
                pict.data[0] = bmp->pixels[0];
                pict.data[1] = bmp->pixels[2];
                pict.data[2] = bmp->pixels[1];

                pict.linesize[0] = bmp->pitches[0];
                pict.linesize[1] = bmp->pitches[2];
                pict.linesize[2] = bmp->pitches[1];

                // Convert the image into YUV format that SDL uses
                /*
                img_convert(&pict, PIX_FMT_YUV420P,
                            (AVPicture *)pFrame, pCodecCtx->pix_fmt,
                            pCodecCtx->width, pCodecCtx->height);
				*/
				sws_scale(img_convert_ctx, (const uint8_t * const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize);
				
                SDL_UnlockYUVOverlay(bmp);

                rect.x = 0;
                rect.y = 0;
                rect.w = pCodecCtx->width;
                rect.h = pCodecCtx->height;
                SDL_DisplayYUVOverlay(bmp, &rect);
                av_free_packet(&packet);
            }
        } else if(packet.stream_index==audioStream) {
            packet_queue_put(&audioq, &packet);
        } else {
            av_free_packet(&packet);
        }
        // Free the packet that was allocated by av_read_frame
        SDL_PollEvent(&event);
        switch(event.type) {
        case SDL_QUIT:
            quit = 1;
            SDL_Quit();
            exit(0);
            break;
        default:
            break;
        }

    }
    
    sws_freeContext(img_convert_ctx);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codec
    avcodec_close(pCodecCtx);

    // Close the video file
    av_close_input_file(pFormatCtx);

    return 0;
}
Esempio n. 22
0
int main(int argc, char* argv[])
{
    AVFormatContext	*pFormatCtx;
    int				i, videoindex;
    AVCodecContext	*pCodecCtx;
    AVCodec			*pCodec;
    AVDictionary    *optionsDict = NULL;
    char filepath[] = "file.mp4";
    av_register_all();//注册组件
    avformat_network_init();//支持网络流
    pFormatCtx = avformat_alloc_context();//初始化AVFormatContext
    if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){//打开文件
        printf("无法打开文件\n");
        return -1;
    }
    if(av_find_stream_info(pFormatCtx)<0)//查找流信息
    {
        printf("Couldn't find stream information.\n");
        return -1;
    }
    videoindex=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++) //获取视频流ID
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            videoindex=i;
            break;
        }
    if(videoindex==-1)
    {
        printf("Didn't find a video stream.\n");
        return -1;
    }
    pCodecCtx=pFormatCtx->streams[videoindex]->codec;
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);//查找解码器
    if(pCodec==NULL)
    {
        printf("Codec not found.\n");
        return -1;
    }
    if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)//打开解码器
    {
        printf("Could not open codec.\n");
        return -1;
    }
    AVFrame	*pFrame,*pFrameYUV;
    pFrame=avcodec_alloc_frame();//存储解码后AVFrame
    pFrameYUV=avcodec_alloc_frame();//存储转换后AVFrame(为什么要转换?后文解释)
    uint8_t *out_buffer;
    out_buffer=(uint8_t*) malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));//分配AVFrame所需内存
    avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);//填充AVFrame
    //------------SDL初始化--------
    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {  
        printf( "Could not initialize SDL - %s\n", SDL_GetError()); 
        return -1;
    } 
    SDL_Surface *screen; 
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
    if(!screen) {  
        printf("SDL: could not set video mode - exiting\n");  
        return -1;
    }
    SDL_Overlay *bmp; 
    bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height,SDL_YV12_OVERLAY, screen); 
    SDL_Rect rect;
    //-----------------------------
    int ret, got_picture;
    static struct SwsContext *img_convert_ctx;
    int y_size = pCodecCtx->width * pCodecCtx->height;

    AVPacket *packet=(AVPacket *)malloc(sizeof(AVPacket));//存储解码前数据包AVPacket
    av_new_packet(packet, y_size);
    //输出一下信息-----------------------------
    printf("文件信息-----------------------------------------\n");
    av_dump_format(pFormatCtx,0,filepath,0);
    printf("-------------------------------------------------\n");
    //------------------------------
    while(av_read_frame(pFormatCtx, packet)>=0)//循环获取压缩数据包AVPacket
    {
        if(packet->stream_index==videoindex)
        {
            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码。输入为AVPacket,输出为AVFrame
            if(ret < 0)
            {
                printf("解码错误\n");
                return -1;
            }
            if(got_picture)
            {
                //像素格式转换。pFrame转换为pFrameYUV。
                img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 
                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
                sws_freeContext(img_convert_ctx);
                //------------SDL显示--------
                SDL_LockYUVOverlay(bmp);
                bmp->pixels[0]=pFrameYUV->data[0];
                bmp->pixels[2]=pFrameYUV->data[1];
                bmp->pixels[1]=pFrameYUV->data[2];     
                bmp->pitches[0]=pFrameYUV->linesize[0];
                bmp->pitches[2]=pFrameYUV->linesize[1];   
                bmp->pitches[1]=pFrameYUV->linesize[2];
                SDL_UnlockYUVOverlay(bmp); 
                rect.x = 0;    
                rect.y = 0;    
                rect.w = pCodecCtx->width;    
                rect.h = pCodecCtx->height;    
                SDL_DisplayYUVOverlay(bmp, &rect); 
                //延时40ms
                SDL_Delay(40);
                //------------SDL-----------
            }
        }
        av_free_packet(packet);
    }
    free(out_buffer);
    av_free(pFrameYUV);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

    return 0;
}
Esempio n. 23
0
static OMX_ERRORTYPE
sdlivr_prc_render_buffer (const sdlivr_prc_t * ap_prc,
                          OMX_BUFFERHEADERTYPE * p_hdr)
{
  assert (ap_prc);

  if (ap_prc->p_overlay)
    {
      const OMX_VIDEO_PORTDEFINITIONTYPE * p_vpd = &(ap_prc->port_def_);

      /* AVPicture pict; */
      SDL_Rect rect;
      uint8_t * y;
      uint8_t * u;
      uint8_t * v;
      unsigned int bytes;
      int pitch0, pitch1;

      if (p_vpd->nStride == 0)
        {
          /* align pitch on 16-pixel boundary. */
          pitch0 = (p_vpd->nFrameWidth + 15) & ~15;
        }
      else
        {
          pitch0 = p_vpd->nStride;
        }
      pitch1 = pitch0 / 2;

      /* hard-coded to be YUV420 plannar */
      y = p_hdr->pBuffer;
      u = y + pitch0 * p_vpd->nFrameHeight;
      v = u + pitch1 * p_vpd->nFrameHeight / 2;

      SDL_LockYUVOverlay (ap_prc->p_overlay);

      if (ap_prc->p_overlay->pitches[0] != pitch0
          || ap_prc->p_overlay->pitches[1] != pitch1
          || ap_prc->p_overlay->pitches[2] != pitch1)
        {
          int hh;
          uint8_t * y2;
          uint8_t * u2;
          uint8_t * v2;

          y2 = ap_prc->p_overlay->pixels[0];
          u2 = ap_prc->p_overlay->pixels[2];
          v2 = ap_prc->p_overlay->pixels[1];

          for (hh = 0; hh < p_vpd->nFrameHeight; hh++)
            {
              memcpy (y2, y, ap_prc->p_overlay->pitches[0]);
              y2 += ap_prc->p_overlay->pitches[0];
              y += pitch0;
            }
          for (hh = 0; hh < p_vpd->nFrameHeight / 2; hh++)
            {
              memcpy (u2, u, ap_prc->p_overlay->pitches[2]);
              u2 += ap_prc->p_overlay->pitches[2];
              u += pitch1;
            }
          for (hh = 0; hh < p_vpd->nFrameHeight / 2; hh++)
            {
              memcpy (v2, v, ap_prc->p_overlay->pitches[1]);
              v2 += ap_prc->p_overlay->pitches[1];
              v += pitch1;
            }
        }
      else
        {
          bytes = pitch0 * p_vpd->nFrameHeight;
          memcpy (ap_prc->p_overlay->pixels[0], y, bytes);

          bytes = pitch1 * p_vpd->nFrameHeight / 2;
          memcpy (ap_prc->p_overlay->pixels[2], u, bytes);

          bytes = pitch1 * p_vpd->nFrameHeight / 2;
          memcpy (ap_prc->p_overlay->pixels[1], v, bytes);
        }

      SDL_UnlockYUVOverlay (ap_prc->p_overlay);

      rect.x = 0;
      rect.y = 0;
      rect.w = p_vpd->nFrameWidth;
      rect.h = p_vpd->nFrameHeight;
      SDL_DisplayYUVOverlay (ap_prc->p_overlay, &rect);
    }

  p_hdr->nFilledLen = 0;

  return OMX_ErrorNone;
}
Esempio n. 24
0
int main(int argc ,char **argv)
{
	av_register_all();
	AVFormatContext *pFormatCtx = NULL;
	AVInputFormat *file_iformat = NULL;
	
	//avio_set_interrupt_cb(decode_interrupt_cb);	
	//Open video file
	printf("open video file:%s\n", argv[1]);
	if(avformat_open_input(&pFormatCtx, argv[1], file_iformat, NULL) < 0)
	{
		printf("canot open input file: %s\n", argv[1]);
		return -1; //Cannot open file
	}
	printf("open input file: %s OK\n", argv[1]);
	//Retrieve stream information
	if(av_find_stream_info(pFormatCtx) < 0)
		return -1;//cannot find stream infomation
	//Dump information about file no to standard error
	av_dump_format(pFormatCtx, 0, argv[1], 0);

	int i;
	int videoStream;
	int audioStream;
	videoStream = -1;
	audioStream = -1;
	AVCodecContext *vCodecCtx;
	AVCodecContext *aCodecCtx;
	//Find the first video stream
	for(i = 0; i < pFormatCtx->nb_streams; i++)
	{
		if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0) 
		{
			videoStream = i;
		}
		if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audioStream < 0)
		{
			audioStream = i;	
		}
	}
	if(videoStream == -1)
	{
		printf("no video stream\n");
		return -1;//Did not find a video stream
	}
	if(audioStream == -1)
	{
		printf("no audio stream\n");
		return -1;//Did not find a audio stream
	}
	printf("find video strean: %d\n", videoStream);
	printf("find audio strean: %d\n", audioStream);

	//Get a pointer to the codec context for the video stream
	vCodecCtx = pFormatCtx->streams[videoStream]->codec;
	AVCodec *vCodec;
	vCodec = avcodec_find_decoder(vCodecCtx->codec_id);
	if(vCodec == NULL)
	{
		fprintf(stderr, "Unsupported video codec\n");
		return -1;//codec not find
	}
	//Open video codec
	if(avcodec_open(vCodecCtx, vCodec) < 0)
	{
		fprintf(stderr, "open video codec error\n");
		return -1;//Could not open codec
	}
	//Get a pointer to the codec context for the audio stream
	aCodecCtx = pFormatCtx->streams[audioStream]->codec;
	static SDL_AudioSpec wanted_spec, spec;
	wanted_spec.freq = aCodecCtx->sample_rate;
	wanted_spec.format = AUDIO_S16SYS;
	wanted_spec.channels = aCodecCtx->channels;
	wanted_spec.silence = 0;
	wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
	wanted_spec.callback = audio_callback;
	wanted_spec.userdata = aCodecCtx;
	if(SDL_OpenAudio(&wanted_spec, &spec) < 0)
	{	
		fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
		return -1;
	}
	AVCodec *aCodec;
	aCodec = avcodec_find_decoder(aCodecCtx->codec_id);
	if(aCodec == NULL)
	{
		fprintf(stderr, "Unsupport audio codec\n");
		return -1;//codec not found
	}
	if(avcodec_open(aCodecCtx, aCodec) < 0)
	{
		fprintf(stderr, "open avcodec error\n");
		return -1;
	}
	packet_queue_init(&audioq);
	SDL_PauseAudio(0);

	AVFrame *pFrame;
	//Allocate video frame
	pFrame = avcodec_alloc_frame();
	AVFrame *pFrameRGB;
	//Allocate an AVFrame structure
	pFrameRGB = avcodec_alloc_frame();
	if(pFrameRGB == NULL)
		return -1;
	uint8_t *buffer;
	int numBytes;
	//Detemine required buffer size and allocate buffer
	numBytes = avpicture_get_size(PIX_FMT_RGB24, vCodecCtx->width, vCodecCtx->height);
	buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
	//Assign appropriate parts of buffer to image planes in pFrameRGB
	//Note that pFrameRGB is an AVFrame, but AVFrame is a superset
	//of AVPicture
	avpicture_fill((AVPicture*)pFrameRGB, buffer, PIX_FMT_RGB24, vCodecCtx->width, vCodecCtx->height);
	
	if((SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)))
	{
		fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
		exit(1);
	}
	SDL_Surface *screen;
	screen = SDL_SetVideoMode(vCodecCtx->width, vCodecCtx->height, 0, 0);
	if(!screen)
	{
		fprintf(stderr, "SDL: could not set video mode\n");
		exit(1);
	}
	SDL_Overlay *bmp;
	bmp = SDL_CreateYUVOverlay(vCodecCtx->width, vCodecCtx->height, SDL_YV12_OVERLAY, screen);

	int frameFinished;
	AVPacket packet;
	SDL_Rect rect;
	i = 0;
	while(av_read_frame(pFormatCtx, &packet) >=0)
	{
		//is this a packet from video stream?
		if(packet.stream_index == videoStream)
		{
			//Decoder video frame
			avcodec_decode_video2(vCodecCtx, pFrame, &frameFinished, &packet);
			//Did we got a video frame?
			if(frameFinished)
			{
				usleep(40 * 1000);
				SDL_LockYUVOverlay(bmp);
				AVPicture pict;
				pict.data[0] = bmp->pixels[0];
				pict.data[1] = bmp->pixels[2];
				pict.data[2] = bmp->pixels[1];
				pict.linesize[0] = bmp->pitches[0];
				pict.linesize[1] = bmp->pitches[2];
				pict.linesize[2] = bmp->pitches[1];
				//Convert the image into YUV format that SDL uses
				static struct SwsContext *img_convert_ctx;
				img_convert_ctx = sws_getCachedContext(img_convert_ctx,
		                   vCodecCtx->width, vCodecCtx->height, vCodecCtx->pix_fmt,
			               vCodecCtx->width, vCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
		        sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,
		                   0, pFrame->height, pict.data, pict.linesize);
				SDL_UnlockYUVOverlay(bmp); 
				rect.x = 0;
				rect.y = 0;
				rect.w = vCodecCtx->width;
				rect.h = vCodecCtx->height;
				SDL_DisplayYUVOverlay(bmp, &rect);
			}
			//Free the packet that was allocated by av_read_frame
			av_free_packet(&packet);	
			SDL_Event event;
			SDL_PollEvent(&event);
			switch(event.type)
			{
				case SDL_QUIT:
					quit = 1;
					SDL_Quit();
					exit(0);
					break;
				defalut:
					break;
			}
		}
		else if(packet.stream_index == audioStream)
		{
			packet_queue_put(&audioq, &packet);
		}
		else
		{
			av_free_packet(&packet);
		}
	}
	//Free the RGB image
	av_free(buffer);
	av_free(pFrameRGB);
	//Free the YUV freame
	av_free(pFrame);
	//Close the codec
	avcodec_close(vCodecCtx);
	//Close the video file
	avformat_close_input(&pFormatCtx);
}
int queue_picture(VideoState *is, AVFrame *pFrame, double pts) {

    VideoPicture *vp;
    //int dst_pix_fmt;
    AVPicture pict;

    /* wait until we have space for a new pic */
    SDL_LockMutex(is->pictq_mutex);

    while(is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
            !is->quit) {
        SDL_CondWait(is->pictq_cond, is->pictq_mutex);
    }

    SDL_UnlockMutex(is->pictq_mutex);

    if(is->quit) {
        return -1;
    }

    // windex is set to 0 initially
    vp = &is->pictq[is->pictq_windex];

    /* allocate or resize the buffer! */
    if(!vp->bmp ||
            vp->width != is->video_st->codec->width ||
            vp->height != is->video_st->codec->height) {
        SDL_Event event;

        vp->allocated = 0;
        /* we have to do it in the main thread */
        event.type = FF_ALLOC_EVENT;
        event.user.data1 = is;
        SDL_PushEvent(&event);

        /* wait until we have a picture allocated */
        SDL_LockMutex(is->pictq_mutex);

        while(!vp->allocated && !is->quit) {
            SDL_CondWait(is->pictq_cond, is->pictq_mutex);
        }

        SDL_UnlockMutex(is->pictq_mutex);

        if(is->quit) {
            return -1;
        }
    }

    /* We have a place to put our picture on the queue */
    /* If we are skipping a frame, do we set this to null
       but still return vp->allocated = 1? */


    if(vp->bmp) {

        SDL_LockYUVOverlay(vp->bmp);

        //dst_pix_fmt = PIX_FMT_YUV420P;
        /* point pict at the queue */

        pict.data[0] = vp->bmp->pixels[0];
        pict.data[1] = vp->bmp->pixels[2];
        pict.data[2] = vp->bmp->pixels[1];

        pict.linesize[0] = vp->bmp->pitches[0];
        pict.linesize[1] = vp->bmp->pitches[2];
        pict.linesize[2] = vp->bmp->pitches[1];

        // Convert the image into YUV format that SDL uses
        sws_scale
        (
            is->sws_ctx,
            (uint8_t const * const *)pFrame->data,
            pFrame->linesize,
            0,
            is->video_st->codec->height,
            pict.data,
            pict.linesize
        );

        SDL_UnlockYUVOverlay(vp->bmp);
        vp->pts = pts;

        /* now we inform our display thread that we have a pic ready */
        if(++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) {
            is->pictq_windex = 0;
        }

        SDL_LockMutex(is->pictq_mutex);
        is->pictq_size++;
        SDL_UnlockMutex(is->pictq_mutex);
    }

    return 0;
}
int main(int argc, char *argv[]) {
    AVFormatContext *pFormatCtx;
    int             i, videoStream;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVFrame         *pFrame;
    AVPacket        packet;
    int             frameFinished;
    float           aspect_ratio;
    struct SwsContext *img_convert_ctx;

    SDL_Overlay     *bmp;
    SDL_Surface     *screen;
    SDL_Rect        rect;
    SDL_Event       event;

    if(argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }
    // Register all formats and codecs
    av_register_all();

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    // Open video file
    if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0)
        return -1; // Couldn't open file

    // Retrieve stream information
    if(av_find_stream_info(pFormatCtx)<0)
        return -1; // Couldn't find stream information

    // Dump information about file onto standard error
    dump_format(pFormatCtx, 0, argv[1], 0);

    // Find the first video stream
    videoStream=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++)
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
            videoStream=i;
            break;
        }
    if(videoStream==-1)
        return -1; // Didn't find a video stream

    // Get a pointer to the codec context for the video stream
    pCodecCtx=pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec==NULL) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1; // Codec not found
    }

    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0)
        return -1; // Could not open codec

    // Allocate video frame
    pFrame=avcodec_alloc_frame();

    // Make a screen to put our video
#ifndef __DARWIN__
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
#else
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
#endif
    if(!screen) {
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
        exit(1);
    }

    // Allocate a place to put our YUV image on that screen
    bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
                               pCodecCtx->height,
                               SDL_YV12_OVERLAY,
                               screen);


    // Read frames and save first five frames to disk
    i=0;
    while(av_read_frame(pFormatCtx, &packet)>=0) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
                                  &packet);

            // Did we get a video frame?
            if(frameFinished) {
                SDL_LockYUVOverlay(bmp);

                AVPicture pict;
                pict.data[0] = bmp->pixels[0];
                pict.data[1] = bmp->pixels[2];
                pict.data[2] = bmp->pixels[1];

                pict.linesize[0] = bmp->pitches[0];
                pict.linesize[1] = bmp->pitches[2];
                pict.linesize[2] = bmp->pitches[1];

                // Convert the image into YUV format that SDL uses
                /*
                	img_convert(&pict, PIX_FMT_YUV420P,
                                    (AVPicture *)pFrame, pCodecCtx->pix_fmt,
                		    pCodecCtx->width, pCodecCtx->height);
                */
                int dstFmt;
                dstFmt = PIX_FMT_YUV420P;

                img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                                                 pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
                                                 dstFmt, SWS_BICUBIC, NULL, NULL, NULL);

                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize,
                          0, pCodecCtx->height, pict.data, pict.linesize);

                /*
                	printf("*(pict.data[0]: %d\n", *(pict.data[0]));
                	printf("*(pict.data[1]: %d\n", *(pict.data[1]));
                	printf("*(pict.data[2]: %d\n", *(pict.data[2]));
                	printf("*(pict.data[3]: %d\n", *(pict.data[3]));
                	printf("linesize[0]: %d\n", pict.linesize[0]);
                	printf("linesize[1]: %d\n", pict.linesize[1]);
                	printf("linesize[2]: %d\n", pict.linesize[2]);
                	printf("linesize[3]: %d\n", pict.linesize[3]);
                	printf("width: %d\n", pCodecCtx->width);
                	printf("height: %d\n", pCodecCtx->height);
                */
                ++i;
                if(i>50)
                    if(i<=51) {
                        printf("frame 51\n");
                        if( *(pict.data[0]) == 20)
                            printf("frame 51, line 0, x=1, 20\n");
                    }

                SDL_UnlockYUVOverlay(bmp);

                rect.x = 0;
                rect.y = 0;
                rect.w = pCodecCtx->width;
                rect.h = pCodecCtx->height;
                SDL_DisplayYUVOverlay(bmp, &rect);
//SDL_Delay(1000);
//return 0;
            }
        }

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);
        SDL_PollEvent(&event);
        switch(event.type) {
        case SDL_QUIT:
            SDL_Quit();
            exit(0);
            break;
        default:
            break;
        }

    }

    // Free the YUV frame
    av_free(pFrame);

    // Close the codec
    avcodec_close(pCodecCtx);

    // Close the video file
    av_close_input_file(pFormatCtx);

    return 0;
}
Esempio n. 27
0
int video_thread(void *arg)
{
	VideoState *is = (VideoState *)arg;
	AVPacket pkt1, *pkt = &pkt1;
	int got_frame = 0;
	AVFrame *frame = av_frame_alloc();
	Frame *vp;


	for (;;) {
		if (packet_queue_get(&is->videoq, pkt, 1) < 0) {
			// means we quit getting packets
			break;
		}
		// Decode video frame
		avcodec_decode_video2(is->video_ctx, frame, &got_frame, pkt);
		// Did we get a video frame?
		if (got_frame) 
		{
			vp = frame_queue_peek_writable(&is->pictq);
			if (!vp->bmp || !vp->allocated ||
				vp->width != frame->width ||
				vp->height != frame->height) 
			{
				SDL_Event event;
				vp->allocated = 0;
				vp->width = frame->width;
				vp->height = frame->height;
				/* the allocation must be done in the main thread to avoid
				locking problems. */
				event.type = FF_ALLOC_EVENT;
				event.user.data1 = is;
				SDL_PushEvent(&event);
				/* wait until the picture is allocated */
				SDL_LockMutex(is->pictq.mutex);
				while (!vp->allocated) {
					SDL_CondWait(is->pictq.cond, is->pictq.mutex);
				}
				SDL_UnlockMutex(is->pictq.mutex);
			}
			/* if the frame is not skipped, then display it */
			if (vp->bmp) 
			{
				AVPicture pict = { { 0 } };
				/* get a pointer on the bitmap */
				SDL_LockYUVOverlay(vp->bmp);
				pict.data[0] = vp->bmp->pixels[0];
				pict.data[1] = vp->bmp->pixels[2];
				pict.data[2] = vp->bmp->pixels[1];
				pict.linesize[0] = vp->bmp->pitches[0];
				pict.linesize[1] = vp->bmp->pitches[2];
				pict.linesize[2] = vp->bmp->pitches[1];
				is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
					vp->width, vp->height, is->video_ctx->pix_fmt, vp->width, vp->height,
					AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
				sws_scale(is->img_convert_ctx, frame->data, frame->linesize,
					0, vp->height, pict.data, pict.linesize);
				SDL_UnlockYUVOverlay(vp->bmp);
				/* 移动指针 */
				frame_queue_push(&is->pictq);
			}
			av_free_packet(pkt);
		}
	}
	av_frame_free(&frame);
	return 0;
}
Esempio n. 28
0
int main(int argc,char *argv[])
{
    av_register_all();  // Register all available file formats and codecs with the library
    if(SDL_Init(SDL_INIT_VIDEO|SDL_INIT_AUDIO|SDL_INIT_TIMER))
    {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    AVFormatContext *pFormatCtx = NULL;

    // Open video file
    if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0)
    {
        fprintf(stderr, "Couldn't open the file!\n");
        return -1;
    }

    // Retrieve stream information
    if(avformat_find_stream_info(pFormatCtx, NULL) < 0)
    {
        fprintf(stderr, "Couldn't find stream information!\n");
        return -1;  // Couldn't find stream information
    }

    // Dump information about file onto standard error
    av_dump_format(pFormatCtx, 0, argv[1], 0);

    int i;
    AVCodecContext *pCodecCtxOrig = NULL;
    AVCodecContext *pCodecCtx = NULL;

    // Find the first video stream
    int videoStream = -1;
    for(i=0; i < pFormatCtx->nb_streams; i++)
        if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            videoStream = i;
            break;
        }
    if(videoStream == -1)
    {
        fprintf(stderr, "Didn't find a video stream!\n");
        return -1;  // Didn't find a video stream
    }

    // Get a pointer to the codec context for the video stream
    pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;

    AVCodec *pCodec = NULL;

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
    if(pCodec == NULL)
    {
        fprintf(stderr, "Unsupported codec!\n");
        return -1;
    }
    // Copy context
    pCodecCtx = avcodec_alloc_context3(pCodec);

    if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0)
    {
        fprintf(stderr, "Couldn't copy codec context");
        return -1;  // Error copying codec context
    }
    // Open codec
    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
    {
        fprintf(stderr, "Couldn't open codec!\n");
        return -1;  // Could not open codec
    }

    AVFrame *pFrame = NULL;
    AVFrame *pFrameRGB = NULL;

    // Allocate video frame
    pFrame = av_frame_alloc();
    if(pFrame == NULL)
    {
        fprintf(stderr, "Couldn't allocate pFrame!\n");
        return -1;
    }
    // Allocate an AVFrame structure
    pFrameRGB = av_frame_alloc();
    if(pFrameRGB == NULL)
    {
        fprintf(stderr, "Couldn't allocate pFrameRGB!\n");
        return -1;
    }

    uint8_t *buffer = NULL;
    int numBytes;
    // Determine required buffer size and allocate buffer
    numBytes = avpicture_get_size(PIX_FMT_RGB24,pCodecCtx->width,
            pCodecCtx->height);
    buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

    // Assign appropriate parts of buffer to image planes in pFrameRGB
    // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
    // of AVPicture
    avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
            pCodecCtx->width, pCodecCtx->height);

    int frameFinished;
    AVPacket packet;

    SDL_Surface *screen;

#ifndef __DARWIN__
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
#else
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
#endif
    if(!screen)
    {
        fprintf(stderr, "SDL: could not set video mode - exiting\n");
        exit(1);
    }

    SDL_Overlay *bmp = NULL;
    struct SwsContext *sws_ctx = NULL;

    bmp = SDL_CreateYUVOverlay(pCodecCtx->width,pCodecCtx->height,
            SDL_YV12_OVERLAY, screen);

    // Initialize SWS context for softscaling
    sws_ctx = sws_getContext(pCodecCtx->width,
            pCodecCtx->height,
            pCodecCtx->pix_fmt,
            pCodecCtx->width,
            pCodecCtx->height,
            PIX_FMT_YUV420P,
            SWS_BILINEAR,
            NULL,
            NULL,
            NULL
            );

    i = 0;
    while(av_read_frame(pFormatCtx, &packet) >= 0)
    {
        // Is this a packet from the video stream?
        if(packet.stream_index == videoStream)
        {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            SDL_Rect rect;

            // Did we get a video frame?
            if(frameFinished)
            {
                SDL_LockYUVOverlay(bmp);

                AVPicture pict;
                pict.data[0] = bmp->pixels[0];
                pict.data[1] = bmp->pixels[2];
                pict.data[2] = bmp->pixels[1];

                pict.linesize[0] = bmp->pitches[0];
                pict.linesize[1] = bmp->pitches[2];
                pict.linesize[2] = bmp->pitches[1];
                // Convert the image into YUV format that SDL uses
                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
                        pFrame->linesize, 0, pCodecCtx->height,
                        pict.data, pict.linesize);

                SDL_UnlockYUVOverlay(bmp);
                rect.x = 0;
                rect.y = 0;
                rect.w = pCodecCtx->width;
                rect.h = pCodecCtx->height;
                SDL_DisplayYUVOverlay(bmp, &rect);
            }
        }

        SDL_Event event;

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);
        SDL_PollEvent(&event);
        switch(event.type)
        {
            case SDL_QUIT:
                SDL_Quit();
                exit(0);
                break;
            default:
                break;
        }
    }

    // Free the RGB image
    av_free(buffer);
    av_free(pFrameRGB);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codecs
    avcodec_close(pCodecCtx);
    avcodec_close(pCodecCtxOrig);

    // Close the video file
    avformat_close_input(&pFormatCtx);

    return 0;
}
Esempio n. 29
0
/* run in a thread (SDL overlay)*/
void *main_loop(void *data)
{
    struct ALL_DATA *all_data = (struct ALL_DATA *) data;

    struct VidState *s = all_data->s;
    struct paRecordData *pdata = all_data->pdata;
    struct GLOBAL *global = all_data->global;
    struct focusData *AFdata = all_data->AFdata;
    struct vdIn *videoIn = all_data->videoIn;

    struct particle* particles = NULL; //for the particles video effect

    SDL_Event event;
    /*the main SDL surface*/
    SDL_Surface *pscreen = NULL;
    SDL_Overlay *overlay = NULL;
    SDL_Rect drect;

    int width = global->width;
    int height = global->height;
    int format = global->format;

    SAMPLE vuPeak[2];  // The maximum vuLevel seen recently
    int vuPeakFreeze[2]; // The vuPeak values will be frozen for this many frames.
    vuPeak[0] = vuPeak[1] = 0;
    vuPeakFreeze[0] = vuPeakFreeze[1] = 0;

    BYTE *p = NULL;

    Control *focus_control = NULL;
    int last_focus = 0;

    if (global->AFcontrol)
    {
        focus_control = get_ctrl_by_id(s->control_list, AFdata->id);
        get_ctrl(videoIn->fd, s->control_list, AFdata->id, all_data);
        last_focus = focus_control->value;
        /*make sure we wait for focus to settle on first check*/
        if (last_focus < 0) last_focus = AFdata->f_max;
    }

    gboolean capVid = FALSE;
    gboolean signalquit = FALSE;

    /*------------------------------ SDL init video ---------------------*/
    if(!global->no_display)
    {
        overlay = video_init(data, &(pscreen));

        if(overlay == NULL)
        {
            g_print("FATAL: Couldn't create yuv overlay - please disable hardware accelaration\n");
            signalquit = TRUE; /*exit video thread*/
        }
        else
        {
            p = (unsigned char *) overlay->pixels[0];

            drect.x = 0;
            drect.y = 0;
            drect.w = pscreen->w;
            drect.h = pscreen->h;
        }
    }

    while (!signalquit)
    {
        __LOCK_MUTEX(__VMUTEX);
            capVid = videoIn->capVid;
            signalquit = videoIn->signalquit;
        __UNLOCK_MUTEX(__VMUTEX);

        /*-------------------------- Grab Frame ----------------------------------*/
        if (uvcGrab(videoIn, format, width, height, &global->fps, &global->fps_num) < 0)
        {
            g_printerr("Error grabbing image \n");
            continue;
        }
        else
        {
            if(!videoIn->timestamp)
            {
                global->skip_n++; //skip this frame
            }

            if(capVid)
            {
                if(global->framecount < 1)
                {
					/*reset video start time to first frame capture time */
					global->Vidstarttime = videoIn->timestamp;
					/** set current time for audio ts(0) reference (MONOTONIC)
					 *  only used if we have no audio capture before video
					 */
					__LOCK_MUTEX(__AMUTEX);
						pdata->ts_ref = ns_time_monotonic();
					__UNLOCK_MUTEX(__AMUTEX);
					//printf("video ts ref: %llu audio ts_ ref: %llu\n",global->Vidstarttime, pdata->ts_ref);
					global->v_ts = 0;
                }
                else
                {
                    global->v_ts = videoIn->timestamp - global->Vidstarttime;
                    /*always use the last frame time stamp for video stop time*/
                    global->Vidstoptime = videoIn->timestamp;
                }
            }

            if (global->FpsCount && !global->no_display)
            {/* sets fps count in window title bar */
                global->frmCount++;
                if (global->DispFps>0)
                { /*set every 2 sec*/
                    g_snprintf(global->WVcaption,24,"GUVCVideo - %3.2f fps",global->DispFps);
                    SDL_WM_SetCaption(global->WVcaption, NULL);

                    global->frmCount=0;/*resets*/
                    global->DispFps=0;
                }
            }

            /*---------------- autofocus control ------------------*/

            if (global->AFcontrol && (global->autofocus || AFdata->setFocus))
            { /*AFdata = NULL if no focus control*/
                if (AFdata->focus < 0)
                {
                    /*starting autofocus*/
                    AFdata->focus = AFdata->left; /*start left*/
                    focus_control->value = AFdata->focus;
                    if (set_ctrl (videoIn->fd, s->control_list, AFdata->id) != 0)
                        g_printerr("ERROR: couldn't set focus to %d\n", AFdata->focus);
                    /*number of frames until focus is stable*/
                    /*1.4 ms focus time - every 1 step*/
                    AFdata->focus_wait = (int) abs(AFdata->focus-last_focus)*1.4/(1000/global->fps)+1;
                    last_focus = AFdata->focus;
                }
                else
                {
                    if (AFdata->focus_wait == 0)
                    {
                        AFdata->sharpness=getSharpness (videoIn->framebuffer, width, height, 5);
                        if (global->debug)
                            g_print("sharp=%d focus_sharp=%d foc=%d right=%d left=%d ind=%d flag=%d\n",
                                AFdata->sharpness,AFdata->focus_sharpness,
                                AFdata->focus, AFdata->right, AFdata->left,
                                AFdata->ind, AFdata->flag);
                        AFdata->focus=getFocusVal (AFdata);
                        if ((AFdata->focus != last_focus))
                        {
                            focus_control->value = AFdata->focus;
                            if (set_ctrl (videoIn->fd, s->control_list, AFdata->id) != 0)
                                g_printerr("ERROR: couldn't set focus to %d\n",
                                    AFdata->focus);
                            /*number of frames until focus is stable*/
                            /*1.4 ms focus time - every 1 step*/
                            AFdata->focus_wait = (int) abs(AFdata->focus-last_focus)*1.4/(1000/global->fps)+1;
                        }
                        last_focus = AFdata->focus;
                    }
                    else
                    {
                        AFdata->focus_wait--;
                        if (global->debug) g_print("Wait Frame: %d\n",AFdata->focus_wait);
                    }
                }
            }
        }
        /*------------------------- Filter Frame ---------------------------------*/
        __LOCK_MUTEX(__GMUTEX);
        if(global->Frame_Flags>0)
        {
            if((global->Frame_Flags & YUV_PARTICLES)==YUV_PARTICLES)
                particles = particles_effect(videoIn->framebuffer, width, height, 20, 4, particles);

            if((global->Frame_Flags & YUV_MIRROR)==YUV_MIRROR)
                yuyv_mirror(videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_UPTURN)==YUV_UPTURN)
                yuyv_upturn(videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_NEGATE)==YUV_NEGATE)
                yuyv_negative (videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_MONOCR)==YUV_MONOCR)
                yuyv_monochrome (videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_PIECES)==YUV_PIECES)
                pieces (videoIn->framebuffer, width, height, 16 );

        }
        __UNLOCK_MUTEX(__GMUTEX);
        /*-------------------------capture Image----------------------------------*/
        if (videoIn->capImage)
        {
            /*
             * format and resolution can change(enabled) while capturing the frame
             * but you would need to be speedy gonzalez to press two buttons
             * at almost the same time :D
             */
            int ret = 0;
            if((ret=store_picture(all_data)) < 0)
                g_printerr("saved image to:%s ...Failed \n",videoIn->ImageFName);
            else if (!ret && global->debug) g_print("saved image to:%s ...OK \n",videoIn->ImageFName);

            videoIn->capImage=FALSE;
        }
        /*---------------------------capture Video---------------------------------*/
        if (capVid && !(global->skip_n))
        {
            __LOCK_MUTEX(__VMUTEX);
                if(videoIn->VidCapStop) videoIn->VidCapStop = FALSE;
            __UNLOCK_MUTEX(__VMUTEX);
            int res=0;

			/* format and resolution don't change(disabled) while capturing video
			 * store_video_frame may sleep if needed to avoid buffer overrun
			 */
            if((res=store_video_frame(all_data))<0) g_printerr("WARNING: droped frame (%i)\n",res);

        } /*video and audio capture have stopped */
        else
        {
            __LOCK_MUTEX(__VMUTEX);
                if(!(videoIn->VidCapStop)) videoIn->VidCapStop=TRUE;
            __UNLOCK_MUTEX(__VMUTEX);
        }

        /* decrease skip frame count */
        if (global->skip_n > 0)
        {
            if (global->debug && capVid) g_print("skiping frame %d...\n", global->skip_n);
            global->skip_n--;
        }

        __LOCK_MUTEX( __AMUTEX );
            if (global->Sound_enable && capVid) pdata->skip_n = global->skip_n;
        __UNLOCK_MUTEX( __AMUTEX );

        /*------------------------- Display Frame --------------------------------*/
        if(!global->no_display)
        {
			if (global->osdFlags && pdata->audio_buff[0])
			{
				draw_vu_meter(width, height, vuPeak, vuPeakFreeze, data);
			}
            SDL_LockYUVOverlay(overlay);
            memcpy(p, videoIn->framebuffer, width * height * 2);
            SDL_UnlockYUVOverlay(overlay);
            SDL_DisplayYUVOverlay(overlay, &drect);

            /*------------------------- Read Key events ------------------------------*/
            /* Poll for events */
            while( SDL_PollEvent(&event) )
            {
                //printf("event type:%i  event key:%i\n", event.type, event.key.keysym.scancode);
                if(event.type==SDL_KEYDOWN)
                {
                    if (videoIn->PanTilt)
                    {
                        switch( event.key.keysym.sym )
                        {
                            /* Keyboard event */
                            /* Pass the event data onto PrintKeyInfo() */
                            case SDLK_DOWN:
                                /*Tilt Down*/
                                uvcPanTilt (videoIn->fd, s->control_list, 0, 1);
                                break;

                            case SDLK_UP:
                                /*Tilt UP*/
                                uvcPanTilt (videoIn->fd, s->control_list, 0, -1);
                                break;

                            case SDLK_LEFT:
                                /*Pan Left*/
                                uvcPanTilt (videoIn->fd, s->control_list, 1, 1);
                                break;

                            case SDLK_RIGHT:
                                /*Pan Right*/
                                uvcPanTilt (videoIn->fd, s->control_list, 1, -1);
                                break;
                            default:
                                break;
                        }
                    }
                    switch( event.key.keysym.scancode )
                    {
                        case 220: /*webcam button*/
                            //gdk_threads_enter();
                           	if (all_data->global->default_action == 0)
                           		g_main_context_invoke(NULL, image_capture_callback, (gpointer) all_data);
							else
                            	g_main_context_invoke(NULL, video_capture_callback, (gpointer) all_data);
                       
                            break;
                    }
                    switch( event.key.keysym.sym )
                    {
                        case SDLK_q:
                            //shutDown
                            g_timeout_add(200, shutd_timer, all_data);
                            g_print("q pressed - Quiting...\n");
                            break;
                        case SDLK_SPACE:
							{
                            if(global->AFcontrol > 0)
                                setfocus_clicked(NULL, all_data);
							}
                            break;
                        case SDLK_i:
							g_main_context_invoke(NULL, image_capture_callback, (gpointer) all_data);
							break;
						case SDLK_v:
							g_main_context_invoke(NULL, video_capture_callback, (gpointer) all_data);
							break;
                        default:
                            break;
                    }
                }
                if(event.type==SDL_VIDEORESIZE)
                {
                    pscreen =
                        SDL_SetVideoMode(event.resize.w,
                                 event.resize.h,
                                 global->bpp,
                                 SDL_VIDEO_Flags);
                    drect.w = event.resize.w;
                    drect.h = event.resize.h;
                }
                if(event.type==SDL_QUIT)
                {
                    //shutDown
                    g_timeout_add(200, shutd_timer, all_data);
                }
            }
        }
        /* if set make the thread sleep - default no sleep (full throttle)*/
        if(global->vid_sleep) sleep_ms(global->vid_sleep);

        /*------------------------------------------*/
        /*  restart video (new resolution/format)   */
        /*------------------------------------------*/
        if (global->change_res)
        {
            g_print("setting new resolution (%d x %d)\n", global->width, global->height);
            /*clean up */

            if(particles) g_free(particles);
            particles = NULL;

            if (global->debug) g_print("cleaning buffer allocations\n");
            fflush(NULL);//flush all output buffers

            if(!global->no_display)
            {
                SDL_FreeYUVOverlay(overlay);
                overlay = NULL;
            }
            /*init device*/
            restart_v4l2(videoIn, global);
            /*set new resolution for video thread*/
            width = global->width;
            height = global->height;
            format = global->format;
            /* restart SDL with new values*/
            if(!global->no_display)
            {
                overlay = video_init(data, &(pscreen));
                if(overlay == NULL)
                {
                    g_print("FATAL: Couldn't create yuv overlay - please disable hardware accelaration\n");
                    signalquit = TRUE; /*exit video thread*/
                }
                else
                {
                    if (global->debug) g_print("yuv overlay created (%ix%i).\n", overlay->w, overlay->h);
                    p = (unsigned char *) overlay->pixels[0];

                    drect.x = 0;
                    drect.y = 0;
                    drect.w = pscreen->w;
                    drect.h = pscreen->h;

                    global->change_res = FALSE;
                }
            }
            else global->change_res = FALSE;
        }

    }/*loop end*/

    __LOCK_MUTEX(__VMUTEX);
        capVid = videoIn->capVid;
    __UNLOCK_MUTEX(__VMUTEX);
    /*check if thread exited while in Video capture mode*/
    if (capVid)
    {
        /*stop capture*/
        if (global->debug) g_print("stoping Video capture\n");
        //global->Vidstoptime = ns_time_monotonic(); /*this is set in IO thread*/
        videoIn->VidCapStop=TRUE;
        capVid = FALSE;
        __LOCK_MUTEX(__VMUTEX);
            videoIn->capVid = capVid;
        __UNLOCK_MUTEX(__VMUTEX);
        __LOCK_MUTEX(__AMUTEX);
            pdata->capVid = capVid;
        __UNLOCK_MUTEX(__AMUTEX);
        /*join IO thread*/
        if (global->debug) g_print("Shuting Down IO Thread\n");
        __THREAD_JOIN( all_data->IO_thread );
        if (global->debug) g_print("IO Thread finished\n");
    }

    if (global->debug) g_print("Thread terminated...\n");
    p = NULL;
    if(particles) g_free(particles);
    particles=NULL;

    if (global->debug) g_print("cleaning Thread allocations: 100%%\n");
    fflush(NULL);//flush all output buffers

    if(!global->no_display)
    {
        if(overlay)
            SDL_FreeYUVOverlay(overlay);
        //SDL_FreeSurface(pscreen);

        SDL_Quit();
    }

    if (global->debug) g_print("Video thread completed\n");

    global = NULL;
    AFdata = NULL;
    videoIn = NULL;
    return ((void *) 0);
}
Esempio n. 30
0
int open_window_sdl (void) {
	const SDL_VideoInfo *video_info;
	int video_bpp;

	if(SDL_Init(SDL_INIT_VIDEO) < 0) goto no_sdl;

	/* Get the "native" video mode */
	video_info = SDL_GetVideoInfo();
	switch (video_info->vfmt->BitsPerPixel) {
		case 16:
		case 32:
			video_bpp = video_info->vfmt->BitsPerPixel;
			break;
		default:
			video_bpp = 16;
			break;
	} 

	full_screen_width = video_info->current_w;
	full_screen_height = video_info->current_h;

	sdl_rect.x = 0;
	sdl_rect.y = 0;
	sdl_rect.h = ffctv_height;
	sdl_rect.w = ffctv_width;

	sdl_screen = SDL_SetVideoMode(sdl_rect.w, sdl_rect.h, video_bpp,MYSDLFLAGS);
	SDL_WM_SetCaption("xjadeo", "xjadeo");

	newsrc_sdl();

	if((!sdl_overlay)) 
		fprintf(stderr, "NO OVERLAY\n");
	if((!sdl_overlay || SDL_LockYUVOverlay(sdl_overlay)<0)) {
		printf("OVERLAY error.\n");
		goto no_overlay;
	}

	resized_sdl();

	if (sdl_overlay->pitches[0] != movie_width ||
			sdl_overlay->pitches[1] != sdl_overlay->pitches[2] ) {
		fprintf(stderr,"unsupported SDL YV12.\n"); 
		goto no_overlay;
	}  

	if (start_ontop) {
		sdl_set_ontop(1);
	}
	if (start_fullscreen) {
		sdl_toggle_fullscreen(1);
	}

	return(0);
 
no_overlay: 
	if(sdl_overlay) SDL_FreeYUVOverlay(sdl_overlay); 
	SDL_Quit();
no_sdl:
	return 1;
}