static int sdlview_filter_video(TCModuleInstance *self, vframe_list_t *frame)
{
    int ret = 0;
    uint8_t *src_planes[3] = { NULL, NULL, NULL };
    uint8_t *dst_planes[3] = { NULL, NULL, NULL };
    SDLPrivateData *pd = NULL;

    TC_MODULE_SELF_CHECK(self, "filter_video");
    TC_MODULE_SELF_CHECK(frame, "filter_video");

    pd = self->userdata;

    SDL_LockYUVOverlay(pd->overlay);

    YUV_INIT_PLANES(src_planes, frame->video_buf,
                    pd->src_fmt, pd->w, pd->h);
    dst_planes[0] = pd->overlay->pixels[0];
    dst_planes[1] = pd->overlay->pixels[1];
    dst_planes[2] = pd->overlay->pixels[2];

    ret = ac_imgconvert(src_planes, pd->src_fmt, dst_planes, IMG_YV12,
                        pd->w, pd->h);

    SDL_UnlockYUVOverlay(pd->overlay);

    pd->rectangle.x = 0;
    pd->rectangle.y = 0;
    pd->rectangle.w = pd->w;
    pd->rectangle.h = pd->h;

    SDL_DisplayYUVOverlay(pd->overlay, &(pd->rectangle)); 
    /* this one can fail? */

    return TC_OK;
}
Example #2
0
void
Draw()
{
    SDL_Rect rect;
    int i;
    int disp;

    if (!scale) {
        rect.w = overlay->w;
        rect.h = overlay->h;
        for (i = 0; i < h - rect.h && i < w - rect.w; i++) {
            rect.x = i;
            rect.y = i;
            SDL_DisplayYUVOverlay(overlay, &rect);
        }
    } else {
        rect.w = overlay->w / 2;
        rect.h = overlay->h / 2;
        rect.x = (w - rect.w) / 2;
        rect.y = (h - rect.h) / 2;
        disp = rect.y - 1;
        for (i = 0; i < disp; i++) {
            rect.w += 2;
            rect.h += 2;
            rect.x--;
            rect.y--;
            SDL_DisplayYUVOverlay(overlay, &rect);
        }
    }
    printf("Displayed %d times.\n", i);
}
    bool put_frame(AVFrame *frame)
    {
        if (frame)
        {
            SDL_LockYUVOverlay(overlay_);
            pict_.data[0] = overlay_->pixels[0];
            pict_.data[1] = overlay_->pixels[2];
            pict_.data[2] = overlay_->pixels[1];

            pict_.linesize[0] = overlay_->pitches[0];
            pict_.linesize[1] = overlay_->pitches[2];
            pict_.linesize[2] = overlay_->pitches[1];

            // Convert the image into YUV format that SDL uses
            if (sws_context_)
            {
                sws_scale(sws_context_, (uint8_t const * const *)frame->data,
                          frame->linesize, 0, height_,
                          pict_.data, pict_.linesize);;
            }
            SDL_UnlockYUVOverlay(overlay_);
            SDL_DisplayYUVOverlay(overlay_, &rect_);
            return true;
        }
        return false;
    }
Example #4
0
static void open_video(void){
  /* taken from player_sample.c test file for theora alpha */

  if ( SDL_Init(SDL_INIT_VIDEO) < 0 ) {
    printf("Unable to initialize SDL: %s\n", SDL_GetError());
    exit(1);
  }
  
  screen = SDL_SetVideoMode(ti.frame_width, ti.frame_height, 0, SDL_SWSURFACE);
  if ( screen == NULL ) {
    printf("Unable to set %dx%d video mode: %s\n", 
           ti.frame_width,ti.frame_height,SDL_GetError());
    exit(1);
  }
  
  yuv_overlay = SDL_CreateYUVOverlay(ti.frame_width, ti.frame_height,
				     SDL_YV12_OVERLAY,
				     screen);
  if ( yuv_overlay == NULL ) {
    printf("SDL: Couldn't create SDL_yuv_overlay: %s\n", 
	   SDL_GetError());
    exit(1);
  }
  rect.x = 0;
  rect.y = 0;
  rect.w = ti.frame_width;
  rect.h = ti.frame_height;

  SDL_DisplayYUVOverlay(yuv_overlay, &rect);
}
Example #5
0
void video_display(VideoState *is) {

    SDL_Rect rect;
    VideoPicture *vp;
    float aspect_ratio;
    int w, h, x, y;
    
    vp = &is->pictq[is->pictq_rindex];
    if(vp->bmp) {
        if(is->video_st->codec->sample_aspect_ratio.num == 0) {
            aspect_ratio = 0;
        } else {
            aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) *
                is->video_st->codec->width / is->video_st->codec->height;
        }
        if(aspect_ratio <= 0.0) {
            aspect_ratio = (float)is->video_st->codec->width /
                (float)is->video_st->codec->height;
        }
        h = screen->h;
        w = ((int)rint(h * aspect_ratio)) & -3;
        if(w > screen->w) {
            w = screen->w;
            h = ((int)rint(w / aspect_ratio)) & -3;
        }
        x = (screen->w - w) / 2;
        y = (screen->h - h) / 2;

        rect.x = x;
        rect.y = y;
        rect.w = w;
        rect.h = h;
        SDL_DisplayYUVOverlay(vp->bmp, &rect);
    }
}
Example #6
0
int Init_SDL(int edge, int frame_width, int frame_height){
	
#ifndef SDL_NO_DISPLAY	
	int screenwidth = 0, screenheight = 0;
	unsigned char *yuv[3];
	char *window_title = "SDL Display";


	/* First, initialize SDL's video subsystem. */
	if( SDL_Init( SDL_INIT_VIDEO ) < 0 ) {
		/* Failed, exit. */
		printf("Video initialization failed: %s\n", SDL_GetError( ) );
	}

	// set window title 
	SDL_WM_SetCaption(window_title, NULL);

	// yuv params
	yuv[0] = malloc((frame_width + 2 * edge) * frame_height * sizeof(unsigned char));
	yuv[1] = malloc((frame_width + edge) * frame_height / 4 * sizeof(unsigned char));
	yuv[2] = malloc((frame_width + edge) * frame_height / 4 * sizeof(unsigned char));

	screenwidth = frame_width;
	screenheight = frame_height;

	
	screen = SDL_SetVideoMode(screenwidth, screenheight, 24, SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL);

	if ( screen == NULL ) {
		printf("SDL: Couldn't set %dx%d: %s", screenwidth, screenheight, SDL_GetError());
		exit(1);
	}
	else {
		printf("SDL: Set %dx%d @ %d bpp \n",screenwidth, screenheight, screen->format->BitsPerPixel);
	}

	// since IYUV ordering is not supported by Xv accel on maddog's system
	//  (Matrox G400 --- although, the alias I420 is, but this is not
	//  recognized by SDL), we use YV12 instead, which is identical,
	//  except for ordering of Cb and Cr planes...
	// we swap those when we copy the data to the display buffer...

	yuv_overlay = SDL_CreateYUVOverlay(frame_width + 2 * edge, frame_height, SDL_YV12_OVERLAY, screen);

	if ( yuv_overlay == NULL ) {
		printf("SDL: Couldn't create SDL_yuv_overlay: %s",SDL_GetError());
		exit(1);
	}


	rect.x = 0;
	rect.y = 0;
	rect.w = screenwidth + 2 * edge;
	rect.h = screenheight;
	SDL_UnlockYUVOverlay(yuv_overlay);

	SDL_DisplayYUVOverlay(yuv_overlay, &rect);
#endif
	return 0;
}
Example #7
0
int vj_sdl_update_yuv_overlay(vj_sdl * vjsdl, uint8_t ** yuv420)
{
	if (!vj_sdl_lock(vjsdl))
		return 0;
#ifdef HAVE_SDL_TTF
	if( veejay_log_to_ringbuffer() ) {
		vj_sdl_draw_to_buffer( vjsdl->font, vjsdl->width, vjsdl->height );
		vj_sdl_font_logging( vjsdl->font, yuv420, vjsdl->width, vjsdl->height );
	}
#endif
	VJFrame *src_frame = (VJFrame*) vjsdl->src_frame;
	VJFrame *dst_frame = (VJFrame*) vjsdl->dst_frame;

	src_frame->data[0] = yuv420[0];
	src_frame->data[1] = yuv420[1];
	src_frame->data[2] = yuv420[2];
	dst_frame->data[0] = vjsdl->yuv_overlay->pixels[0];

	yuv_convert_and_scale_packed( vjsdl->scaler, vjsdl->src_frame,dst_frame );

	if (!vj_sdl_unlock(vjsdl))
		return 0;

	SDL_DisplayYUVOverlay(vjsdl->yuv_overlay, &(vjsdl->rectangle));

	return 1;
}
static void open_video(void){
  if ( SDL_Init(SDL_INIT_VIDEO) < 0 ) {
    fprintf(stderr, "Unable to init SDL: %s\n", SDL_GetError());
    exit(1);
  }

  screen = SDL_SetVideoMode(ti.frame_width, ti.frame_height, 0, SDL_SWSURFACE);
  if ( screen == NULL ) {
    fprintf(stderr, "Unable to set %dx%d video: %s\n",
            ti.frame_width,ti.frame_height,SDL_GetError());
    exit(1);
  }

  yuv_overlay = SDL_CreateYUVOverlay(ti.frame_width, ti.frame_height,
                                     SDL_YV12_OVERLAY,
                                     screen);
  if ( yuv_overlay == NULL ) {
    fprintf(stderr, "SDL: Couldn't create SDL_yuv_overlay: %s\n",
            SDL_GetError());
    exit(1);
  }
  rect.x = 0;
  rect.y = 0;
  rect.w = ti.frame_width;
  rect.h = ti.frame_height;

  SDL_DisplayYUVOverlay(yuv_overlay, &rect);
}
Example #9
0
void render_sdl (uint8_t *mybuffer) {
	/* http://www.fourcc.org/indexyuv.htm */

	size_t Ylen= movie_width * movie_height;
	size_t UVlen= movie_width/2 * movie_height/2; 

	// decode ffmpeg - YUV 
	uint8_t *Yptr=mybuffer;
	uint8_t *Uptr=Yptr + Ylen;
	uint8_t *Vptr=Uptr + UVlen;

	if (sdl_pic_format == SDL_YV12_OVERLAY) { 
	// encode SDL YV12
		stride_memcpy(sdl_overlay->pixels[0],Yptr,movie_width,movie_height,sdl_overlay->pitches[0],movie_width);//Y
		stride_memcpy(sdl_overlay->pixels[1],Vptr,movie_width/2,movie_height/2,sdl_overlay->pitches[1],movie_width/2);//V
		stride_memcpy(sdl_overlay->pixels[2],Uptr,movie_width/2,movie_height/2,sdl_overlay->pitches[2],movie_width/2);//U
	} else {
	// encode SDL YUV
		stride_memcpy(sdl_overlay->pixels[0],Yptr,movie_width,movie_height,sdl_overlay->pitches[0],movie_width);//Y
		stride_memcpy(sdl_overlay->pixels[1],Uptr,movie_width/2,movie_height/2,sdl_overlay->pitches[1],movie_width/2);//U
		stride_memcpy(sdl_overlay->pixels[2],Vptr,movie_width/2,movie_height/2,sdl_overlay->pitches[2],movie_width/2);//V
	}

	SDL_UnlockYUVOverlay(sdl_overlay);
	SDL_DisplayYUVOverlay(sdl_overlay, &sdl_dest_rect);
	SDL_LockYUVOverlay(sdl_overlay);
}
Example #10
0
//==================Video======================
static void SDL_VideoDisplayThread(void *)
{
    SDL_Rect rect;
    while(runFlag)
    {
        Sleep(GUISLEEPTIME);

        AVFrame* frame;
        char *data;
        int size;
        if(tunnel.isServerConnected()&&tunnel.getVideoData(&data,&size))
        {
            if(vdecoder.decodeVideoFrame((char*)data,size,&frame))
            {
                SDL_LockYUVOverlay(screenOverlay);
                screenOverlay->pixels[0]=frame->data[0];
                screenOverlay->pixels[1]=frame->data[1];
                screenOverlay->pixels[2]=frame->data[2];
                screenOverlay->pitches[0]=frame->linesize[0];
                screenOverlay->pitches[1]=frame->linesize[1];
                screenOverlay->pitches[2]=frame->linesize[2];
                rect.w = RWIDTH;
                rect.h = RHEIGHT;
                rect.x=0;
                rect.y=0;
                SDL_DisplayYUVOverlay(screenOverlay, &rect);
                SDL_UnlockYUVOverlay(screenOverlay);
            }
            free(data);
        }

    }

}
Example #11
0
int display_received_frame(codec_state *cs, AVFrame *r_video_frame)
{
    AVPicture pict;
    SDL_LockYUVOverlay(cs->video_picture.bmp);

    pict.data[0] = cs->video_picture.bmp->pixels[0];
    pict.data[1] = cs->video_picture.bmp->pixels[2];
    pict.data[2] = cs->video_picture.bmp->pixels[1];
    pict.linesize[0] = cs->video_picture.bmp->pitches[0];
    pict.linesize[1] = cs->video_picture.bmp->pitches[2];
    pict.linesize[2] = cs->video_picture.bmp->pitches[1];

    /* Convert the image into YUV format that SDL uses */
    sws_scale(cs->sws_SDL_r_ctx, (uint8_t const * const *)r_video_frame->data, r_video_frame->linesize, 0,
              cs->video_decoder_ctx->height, pict.data, pict.linesize );

    SDL_UnlockYUVOverlay(cs->video_picture.bmp);
    SDL_Rect rect;
    rect.x = 0;
    rect.y = 0;
    rect.w = cs->video_decoder_ctx->width;
    rect.h = cs->video_decoder_ctx->height;
    SDL_DisplayYUVOverlay(cs->video_picture.bmp, &rect);
    return 1;
}
Example #12
0
inline void sdlemu_draw_overlay(SDL_Surface *s, int size, int width, int height)
{
        static SDL_Rect		  src;
        static SDL_Rect		  dest;

        src.x  = 0;
        src.y  = 0;
        src.w  = width;
        src.h  = height;
        dest.x = 0;
        dest.y = 0;
        dest.w = width;
        dest.h = height;

		SDL_LockYUVOverlay(overlay);

        Convert32bit(s);

		overlay_rect.x = 0;
		overlay_rect.y = 0;
		overlay_rect.w = width  * size;
		overlay_rect.h = height * size;

		SDL_DisplayYUVOverlay( overlay, &overlay_rect);
		SDL_UnlockYUVOverlay(overlay);
}
Example #13
0
/*****************************************************************************
 * Display: displays previously rendered output
 *****************************************************************************
 * This function sends the currently rendered image to the display.
 *****************************************************************************/
static void Display( vout_thread_t *p_vout, picture_t *p_pic )
{
    unsigned int x, y, w, h;
    SDL_Rect disp;

    vout_PlacePicture( p_vout, p_vout->p_sys->i_width, p_vout->p_sys->i_height,
                       &x, &y, &w, &h );
    disp.x = x;
    disp.y = y;
    disp.w = w;
    disp.h = h;

    if( p_vout->p_sys->p_overlay == NULL )
    {
        /* RGB picture */
        SDL_Flip( p_vout->p_sys->p_display );
    }
    else
    {
        /* Overlay picture */
        SDL_UnlockYUVOverlay( p_pic->p_sys->p_overlay);
        SDL_DisplayYUVOverlay( p_pic->p_sys->p_overlay , &disp );
        SDL_LockYUVOverlay( p_pic->p_sys->p_overlay);
    }
}
void
av_sync(void)
{
	int num_rect = 0;
	SDL_Rect r;

#ifdef PROFILE_GRAPHICS
	float tot_area = 0;
	int i = 0;
	Uint32 ticks = SDL_GetTicks();
#endif

	SDL_Scale2x(screen_surf, screen_surf2x);
	/* copy palette and handle fading! */
	transform_palette();
	SDL_SetColors(screen_surf2x, pal_colors, 0, 256);
	SDL_BlitSurface(screen_surf2x, NULL, display, NULL);
	if (video_rect.h && video_rect.w)
	{
		av_need_update(&video_rect);
		r.h = 2 * video_rect.h;
		r.w = 2 * video_rect.w;
		r.x = 2 * video_rect.x;
		r.y = 2 * video_rect.y;
		SDL_DisplayYUVOverlay(video_overlay, &r);
	}
	if (news_rect.h && news_rect.w)
	{
		av_need_update(&news_rect);
		r.h = 2 * news_rect.h;
		r.w = 2 * news_rect.w;
		r.x = 2 * news_rect.x;
		r.y = 2 * news_rect.y;
		SDL_DisplayYUVOverlay(news_overlay, &r);
	}
	num_rect = get_dirty_rect_list();
	SDL_UpdateRects(display, num_rect, dirty_rect_list);
#ifdef PROFILE_GRAPHICS
	for (i = 0; i < num_rect; ++i)
		tot_area += dirty_rect_list[i].w * dirty_rect_list[i].h;
	tot_area = tot_area * 100 / (2 * MAX_X) / (2 * MAX_Y);
	TRACE4("%3d rects (%6.2f%%) updated in ~%3ums\n",
		num_rect, tot_area, SDL_GetTicks() - ticks);
#endif
	screen_dirty = 0;
}
Example #15
0
/** Show image from given colorspace.
 * @param colorspace colorspace of the supplied buffer
 * @param buffer image buffer
 */
void
ImageDisplay::show(colorspace_t colorspace, unsigned char *buffer)
{
	SDL_LockYUVOverlay(_overlay);
	convert(colorspace, YUV422_PACKED, buffer, _overlay->pixels[0], _width, _height);
	SDL_UnlockYUVOverlay(_overlay);
	SDL_DisplayYUVOverlay(_overlay, _rect);
}
Example #16
0
static PyObject*
Overlay_Display (PyGameOverlay *self, PyObject *args)
{
    SDL_Rect cRect;
    // Parse data params for frame
    int ls_y, ls_u, ls_v, y;
    unsigned char *src_y=0, *src_u=0, *src_v=0;

    if (PyTuple_Size (args))
    {
        if (!PyArg_ParseTuple (args, "(s#s#s#)", &src_y, &ls_y, &src_u, &ls_u,
                               &src_v, &ls_v))
            return NULL;
    }

    if (src_y)
    {
        Uint8 *dst_y=0, *dst_u=0, *dst_v=0;
        SDL_LockYUVOverlay (self->cOverlay);

        // No clipping at this time( only support for YUV420 )

        dst_y = self->cOverlay->pixels[0];
        dst_v = self->cOverlay->pixels[1];
        dst_u = self->cOverlay->pixels[2];

        for (y = 0; y < self->cOverlay->h; y++)
        {
            memcpy (dst_y, src_y, self->cOverlay->w);

            src_y += ls_y / self->cOverlay->h;
            dst_y += self->cOverlay->pitches[0];

            if (!(y & 1))
            {
                src_u += (ls_u * 2)/self->cOverlay->h;
                src_v += (ls_v * 2)/self->cOverlay->h;
                dst_u += self->cOverlay->pitches[ 1 ];
                dst_v += self->cOverlay->pitches[ 2 ];
            }
            else
            {
                memcpy (dst_u, src_u, (ls_u * 2) / self->cOverlay->h);
                memcpy (dst_v, src_v, (ls_v * 2) / self->cOverlay->h);
            }
        }

        SDL_UnlockYUVOverlay (self->cOverlay);
    }

    cRect.x = self->cRect.x;
    cRect.y = self->cRect.y;
    cRect.w = self->cRect.w;
    cRect.h = self->cRect.h;
    SDL_DisplayYUVOverlay (self->cOverlay, &cRect);

    Py_RETURN_NONE;
}
Example #17
0
void VideoMonitor::run()
{
	//if not inited or already running, return
	if(!recorder || stop_monitor || capture_thread) return;

	///start capture_thread
	capture_thread = Glib::Thread::create(sigc::mem_fun(*this, &VideoMonitor::capture), true);

	///monitor thread itself
	Glib::Dispatcher &emit_update_meters = *signal_update_meters;
	const unsigned char *tmp = NULL;
	while(true)
	{
		///check if we are to stop
		Glib::Mutex::Lock lock(mutex); ///< lock monitor
		if(stop_monitor) break;

		recorder->lock(); ///< lock recorder to get a picture and it's info
		///get info
		motion = recorder->getMotion();
		peak   = recorder->getPeak();
		lock.release(); ///< unlock monitor

		///get picture
		if(highlight_motion) tmp = recorder->getMBuffer();
		else tmp = recorder->getVBuffer();
		memcpy(buffer, tmp, recorder->getVBSize());
		recorder->unlock(); ///< unlock recorder

		///display grabbed image
		if(screen && overlay && sws)
		{
			SDL_LockYUVOverlay(overlay);

			//fill in the picture
			avpicture_fill((AVPicture*)in_picture, buffer, in_fmt,
							width, height);

			//Convert the image into YUV format that SDL uses
			sws_scale(sws, in_picture->data, in_picture->linesize, 0,
					  height, overlay_frame.data, overlay_frame.linesize);

			SDL_UnlockYUVOverlay(overlay);

			//display the overlay
			SDL_DisplayYUVOverlay(overlay, &screen_rect);

			//pool events (or they'll pile up and freez the app O_O)
			while(SDL_PollEvent(&event));
		}
		emit_update_meters();
		usleep(recorder->getFrameInterval()*1000000);
	}

	stop_capture();
	cleanup();
}
Example #18
0
int play_video(void *arg)
{
    SDL_Rect rect;
    VideoFrame *vf;
    int w, h, x, y;
    float aspect_ratio;
    Media *video = (Media *)arg;
    vf = &video->frame_buf[video->frame_display_index];
    //put our pict on the queue
    if(vf->bmp)
    {
	if(video->stream->codec->sample_aspect_ratio.num == 0) {
	    aspect_ratio = 0;
	} else {
	    aspect_ratio = av_q2d(video->stream->codec->sample_aspect_ratio) * video->stream->codec->width / video->stream->codec->height;
	}
	if(aspect_ratio <= 0.0) {
	    aspect_ratio = (float)video->stream->codec->width / (float)video->stream->codec->height;
	}
	h = screen->h;
	w = ((int)rint(h * aspect_ratio)) & -3;
	if(w > screen->w) {
	    w = screen->w;
	    h = ((int)rint(w / aspect_ratio)) & -3;
	}
	x = (screen->w - w) / 2;
	y = (screen->h - h) / 2;
#ifdef ORIGINAL_SIZE
	rect.x = 0;
	rect.y = 0;
	rect.w = video->stream->codec->width;
	rect.h = video->stream->codec->height;
	SDL_DisplayYUVOverlay(vf->bmp, &rect);
#else
	rect.x = 0;
	rect.y = 0;
	rect.w = screen->w;
	rect.h = screen->h;
	SDL_DisplayYUVOverlay(vf->bmp, &rect);
#endif //ORIGINAL_SIZE
    }

    return 0;
}
Example #19
0
/** Show image from YUV422_PLANAR colorspace.
 * @param yuv422_planar_buffer YUV422_PLANAR encoded image.
 */
void
ImageDisplay::show(unsigned char *yuv422_planar_buffer)
{
	SDL_LockYUVOverlay(_overlay);

	yuv422planar_to_yuv422packed(yuv422_planar_buffer, _overlay->pixels[0], _width, _height);

	SDL_UnlockYUVOverlay(_overlay);
	SDL_DisplayYUVOverlay(_overlay, _rect);
}
Example #20
0
static void video_display(FFMovie *movie)
{
/*DECODE THREAD - from video_refresh_timer*/

    SDL_LockMutex(movie->dest_mutex);
    if (movie->dest_overlay) {
        SDL_DisplayYUVOverlay(movie->dest_overlay, &movie->dest_rect);
    }
    SDL_UnlockMutex(movie->dest_mutex);
}
Example #21
0
void sdl_put(unsigned char *image, int width, int height)
{
    SDL_Event event;

    if (screen && overlay) {
        SDL_Rect rect;
        float aspect_ratio = (float)width / height;
        int pic_width, pic_height;

        if (width != cur_width || height != cur_height)
            sdl_video_open(width, height);

        if (SDL_MUSTLOCK(screen))
            if (SDL_LockSurface(screen) < 0) return;

        SDL_LockYUVOverlay(overlay);
        memcpy(overlay->pixels[0], image, width * height);
        memcpy(overlay->pixels[2], image + (width * height), (width * height / 4));
        memcpy(overlay->pixels[1], image + (width * height * 5 / 4), (width * height / 4));
        SDL_UnlockYUVOverlay(overlay);

        if (SDL_MUSTLOCK(screen))
            SDL_UnlockSurface(screen);

        pic_height = screen->h;
        pic_width = pic_height * aspect_ratio;
        if (pic_width > screen->w) {
            pic_width = screen->w;
            pic_height = pic_width / aspect_ratio;
        }
        rect.x = (screen->w - pic_width) / 2;
        rect.y = (screen->h - pic_height) / 2;
        rect.w = pic_width;
        rect.h = pic_height;

        if (SDL_DisplayYUVOverlay(overlay, &rect))
            MOTION_LOG(ERR, TYPE_ALL, SHOW_ERRNO, "%s: SDL_DisplayYUVOverlay: %s",
                        SDL_GetError());

        if (SDL_PollEvent(&event)) {
            if ((event.type == SDL_QUIT ||
                (event.type == SDL_KEYDOWN &&
                 event.key.keysym.sym == SDLK_ESCAPE)))
                sdl_stop();
            else if (event.type == SDL_KEYDOWN && event.key.keysym.sym == SDLK_f) {
                is_full_screen = !is_full_screen;
                sdl_video_open(width, height);
            }
            else if (event.type == SDL_VIDEORESIZE)
                screen = SDL_SetVideoMode(event.resize.w, event.resize.h,
                                          screen->format->BitsPerPixel,
                                          screen->flags);
        }
    }
}
Example #22
0
void draw_frame(void)
{
    SDL_LockYUVOverlay(my_overlay);
    (*drawer[FORMAT])();
    set_zoom_rect();
    video_rect.x = 0;
    video_rect.y = 0;
    video_rect.w = P.zoom_width;
    video_rect.h = P.zoom_height;
    SDL_UnlockYUVOverlay(my_overlay);
    SDL_DisplayYUVOverlay(my_overlay, &video_rect);
}
Example #23
0
static mrb_value mrb_sdl_video_display_yuv_overlay (mrb_state *mrb, mrb_value self) {
  mrb_value arg_overlay = mrb_nil_value();
  mrb_value arg_rect = mrb_nil_value();

  mrb_get_args(mrb, "|o", &arg_overlay);
  mrb_get_args(mrb, "|o", &arg_rect);
  
  SDL_Overlay* overlay = mrb_value_to_sdl_overlay(mrb, arg_overlay);
  SDL_Rect* rect = mrb_value_to_sdl_rect(mrb, arg_rect);

  return mrb_fixnum_value(SDL_DisplayYUVOverlay(overlay, rect));
}
Example #24
0
void vid_end()
{
	if (overlay)
	{
		SDL_UnlockYUVOverlay(overlay);
		if (fb.enabled)
			SDL_DisplayYUVOverlay(overlay, &overlay_rect);
		return;
	}
	SDL_UnlockSurface(screen);
	if (fb.enabled) SDL_Flip(screen);
}
Example #25
0
int show_thread(void *opaque){
	SDLParam *sdlparam=(SDLParam *)opaque;

	if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {  
		printf( "Could not initialize SDL - %s\n", SDL_GetError()); 
		return 0;
	} 

	sdlparam->screen = SDL_SetVideoMode(sdlparam->show_w, sdlparam->show_h, 0, 0);
	if(!sdlparam->screen) {  
		printf("SDL: could not set video mode - exiting\n");  
		return 0;
	}
	sdlparam->bmp = SDL_CreateYUVOverlay(sdlparam->pixel_w, sdlparam->pixel_h,SDL_YV12_OVERLAY, sdlparam->screen); 

	if(sdlparam->graphically_si){
		SDL_WM_SetCaption("Spatial perceptual Information",NULL);
	}else if(sdlparam->graphically_ti){
		SDL_WM_SetCaption("Temporal perceptual Information",NULL);
	}

	sdlparam->rect.x = 0;    
	sdlparam->rect.y = 0;    
	sdlparam->rect.w = sdlparam->show_w;    
	sdlparam->rect.h = sdlparam->show_h;    

	SDL_Event event;
	while(mark_exit==0) {
		SDL_WaitEvent(&event);
		switch(event.type){
		case REFRESH_EVENT:{
			SDL_LockYUVOverlay(sdlparam->bmp);
			sdlparam->bmp->pixels[0]=(Uint8 *)sdlparam->show_YBuffer;
			sdlparam->bmp->pixels[2]=(Uint8 *)sdlparam->show_UVBuffer;
			sdlparam->bmp->pixels[1]=(Uint8 *)sdlparam->show_UVBuffer+sdlparam->pixel_w*sdlparam->pixel_h/4;     
			sdlparam->bmp->pitches[0]=sdlparam->pixel_w;
			sdlparam->bmp->pitches[2]=sdlparam->pixel_w/2;   
			sdlparam->bmp->pitches[1]=sdlparam->pixel_w/2;
			SDL_UnlockYUVOverlay(sdlparam->bmp); 
			SDL_DisplayYUVOverlay(sdlparam->bmp, &sdlparam->rect); 
			break;
						   }
		case SDL_QUIT:{
			mark_exit=1;
			break;
					  }

		}
	}
	return 0;
}
Example #26
0
static void sdl_draw_frame (vo_instance_t * _instance,
			    uint8_t * const * buf, void * id)
{
    sdl_instance_t * instance = (sdl_instance_t *) _instance;
    SDL_Overlay * overlay = (SDL_Overlay *) id;
    SDL_Event event;

    while (SDL_PollEvent (&event))
	if (event.type == SDL_VIDEORESIZE)
	    instance->surface =
		SDL_SetVideoMode (event.resize.w, event.resize.h,
				  instance->bpp, instance->sdlflags);
    SDL_DisplayYUVOverlay (overlay, &(instance->surface->clip_rect));
}
Example #27
0
EC_VOID SDL_VideoRender::DrawFrame(VideoFrame* pFrame)
{
    if (EC_NULL == pFrame) return;

    AVFrame *pFrameYUV = (AVFrame*)pFrame;
    m_pOverlay->pixels[0] = pFrameYUV->data[0];
    m_pOverlay->pixels[2] = pFrameYUV->data[1];
    m_pOverlay->pixels[1] = pFrameYUV->data[2];
    m_pOverlay->pitches[0] = pFrameYUV->linesize[0];
    m_pOverlay->pitches[2] = pFrameYUV->linesize[1];
    m_pOverlay->pitches[1] = pFrameYUV->linesize[2];

    SDL_DisplayYUVOverlay(m_pOverlay, &m_sSDLRect);
}
Example #28
0
int SdlAbstract::ShowPicture (AVFrame *pFrame) {
    if (mpScreen == NULL || mpBmp == NULL || mpImgConvertCtx == NULL) {
        ERROR ("maybe init error priorly!");
        return -1;
    }

    if (pFrame == NULL) {
        ERROR ("pFrame == NULL!");
        return -1;
    }

    //int dst_pix_fmt = PIX_FMT_YUV420P;
    AVPicture pict;
    SDL_LockYUVOverlay (mpBmp);
    
    /* point pict at the queue */
    pict.data[0] = mpBmp->pixels[0];
    pict.data[1] = mpBmp->pixels[2];
    pict.data[2] = mpBmp->pixels[1];
    
    pict.linesize[0] = mpBmp->pitches[0];
    pict.linesize[1] = mpBmp->pitches[2];
    pict.linesize[2] = mpBmp->pitches[1];

    #ifdef TEST_RGB565    
    if (mpImgCvtTmp && mpBufTmp) {
        AVPicture pict2;
        pict2.data[0] = mpBufTmp;
        pict2.linesize[0] = miWidth;
        DEBUG ("begin scale RGB565");
        sws_scale (mpImgCvtTmp, pFrame->data, pFrame->linesize,
                   0, miHeight, pict2.data, pict2.linesize);
        DEBUG ("end scale RGB565");
    }
    #endif
    // Convert the image into YUV format that SDL uses
    sws_scale (mpImgConvertCtx, pFrame->data, pFrame->linesize,
               0, miHeight, pict.data, pict.linesize);
    //DEBUG ("pict(%d, %d, %d, %d)", pict.linesize[0],pict.linesize[1],pict.linesize[2],pict.linesize[3]);
    
    SDL_UnlockYUVOverlay (mpBmp);
    SDL_Rect rect;
    rect.x = 0;
    rect.y = 0;
    rect.w = miWidth;
    rect.h = miHeight;
    SDL_DisplayYUVOverlay(mpBmp, &rect);

    return 0;
}
Example #29
0
void
av_sync(void)
{
    SDL_Rect r;

#ifdef PROFILE_GRAPHICS
    float tot_area = 0;
    int i = 0;
    Uint32 ticks = SDL_GetTicks();
#endif

    SDL_Scale2x(display::graphics.screen()->surface(), display::graphics.scaledScreenSurface());
    /* copy palette and handle fading! */
    transform_palette();
    SDL_SetColors(display::graphics.scaledScreenSurface(), pal_colors, 0, 256);
    SDL_BlitSurface(display::graphics.scaledScreenSurface(), NULL, display::graphics.displaySurface(), NULL);

    if (display::graphics.videoRect().h && display::graphics.videoRect().w) {
        r.h = 2 * display::graphics.videoRect().h;
        r.w = 2 * display::graphics.videoRect().w;
        r.x = 2 * display::graphics.videoRect().x;
        r.y = 2 * display::graphics.videoRect().y;
        SDL_DisplayYUVOverlay(display::graphics.videoOverlay(), &r);
    }

    if (display::graphics.newsRect().h && display::graphics.newsRect().w) {
        r.h = 2 * display::graphics.newsRect().h;
        r.w = 2 * display::graphics.newsRect().w;
        r.x = 2 * display::graphics.newsRect().x;
        r.y = 2 * display::graphics.newsRect().y;
        SDL_DisplayYUVOverlay(display::graphics.newsOverlay(), &r);
    }

    //TODO: Since we're not always tracking the right dirty area (due to the graphics refactoring)
    // for now we update the entire display every time.
    SDL_UpdateRect(display::graphics.displaySurface(), 0, 0, 640, 400);
}
Example #30
0
void video_display(VideoState *is) {
	SDL_Rect rect;
	Frame *vp;
	vp = frame_queue_peek(&is->pictq);
	if (vp->bmp) {
		rect.x = 0;
		rect.y = 0;
		rect.w = screen->w;
		rect.h = screen->h;
		SDL_LockMutex(screen_mutex);
		SDL_DisplayYUVOverlay(vp->bmp, &rect);
		SDL_UnlockMutex(screen_mutex);
	}
	frame_queue_next(&is->pictq);
}