示例#1
0
static GF_Err SDL_Blit(GF_VideoOutput *dr, GF_VideoSurface *video_src, GF_Window *src_wnd, GF_Window *dst_wnd, u32 overlay_type)
{
	SDLVID();
	u32 amask = 0;
	u32 bpp, i;
	u8 *dst, *src;
	SDL_Rect srcrc, dstrc;
	SDL_Surface **pool;

	if (overlay_type) {
		if (!video_src) {
			if (ctx->yuv_overlay) {
				SDL_FreeYUVOverlay(ctx->yuv_overlay);
				ctx->yuv_overlay=NULL;
			}
			return GF_OK;
		}
		if (!ctx->yuv_overlay || (ctx->yuv_overlay->w != src_wnd->w) || (ctx->yuv_overlay->h != src_wnd->h) ) {
			if (ctx->yuv_overlay) SDL_FreeYUVOverlay(ctx->yuv_overlay);

			ctx->yuv_overlay = SDL_CreateYUVOverlay(src_wnd->w, src_wnd->h, SDL_YV12_OVERLAY, ctx->screen);
			if (!ctx->yuv_overlay) return GF_NOT_SUPPORTED;
		}
		/*copy pixels*/
		SDL_LockYUVOverlay(ctx->yuv_overlay);

		copy_yuv(ctx->yuv_overlay->pixels[0], ctx->yuv_overlay->pixels[1], ctx->yuv_overlay->pixels[2], GF_PIXEL_YV12, ctx->yuv_overlay->pitches[0], 
			video_src->video_buffer, video_src->pitch_y, video_src->pixel_format,
			video_src->width, video_src->height, src_wnd);

		SDL_UnlockYUVOverlay(ctx->yuv_overlay);

		dstrc.w = dst_wnd->w;
		dstrc.h = dst_wnd->h;
		dstrc.x = dst_wnd->x;
		dstrc.y = dst_wnd->y;
		SDL_DisplayYUVOverlay(ctx->yuv_overlay, &dstrc);
		return GF_OK;
	}

	/*SDL doesn't support stretching ...*/
	if ((src_wnd->w != dst_wnd->w) || (src_wnd->h!=dst_wnd->h)) return GF_NOT_SUPPORTED;

	switch (video_src->pixel_format) {
	case GF_PIXEL_RGB_24:
		pool = &ctx->pool_rgb;
		bpp = 3;
		break;
	case GF_PIXEL_RGBA:
		pool = &ctx->pool_rgba;
		amask = 0xFF000000;
		bpp = 4;
		break;
	default:
		return GF_NOT_SUPPORTED;
	}

	if (! *pool || ((*pool)->w < (int) src_wnd->w) || ((*pool)->h < (int) src_wnd->h) ) {
		if ((*pool)) SDL_FreeSurface((*pool));
		(*pool) = SDL_CreateRGBSurface(ctx->use_systems_memory ? SDL_SWSURFACE : SDL_HWSURFACE, 
						src_wnd->w, src_wnd->h, 8*bpp, 
						0x000000FF, 0x0000FF00, 0x00FF0000, amask);
		if (! (*pool) ) return GF_IO_ERR;
	}

	SDL_LockSurface(*pool);

	dst = (u8 *) ( (*pool)->pixels);
	src = video_src->video_buffer + video_src->pitch_y*src_wnd->y + src_wnd->x*bpp;
	for (i=0; i<src_wnd->h; i++) {
		memcpy(dst, src, bpp * src_wnd->w);
		src += video_src->pitch_y;
		dst += (*pool)->pitch;
	}
	SDL_UnlockSurface(*pool);

	srcrc.w = src_wnd->w;
	srcrc.h = src_wnd->h;
	srcrc.x = 0;
	srcrc.y = 0;

	dstrc.w = dst_wnd->w;
	dstrc.h = dst_wnd->h;
	dstrc.x = dst_wnd->x;
	dstrc.y = dst_wnd->y;

	SDL_BlitSurface(*pool, &srcrc, ctx->back_buffer, &dstrc);
	return GF_OK;
}
示例#2
0
static void *ucil_theora_worker_thread( ucil_theora_input_file_object_t *vobj )
{
   unicap_data_buffer_t new_frame_buffer;

   struct timeval ltime;
   int eos = 0;

   unicap_copy_format( &new_frame_buffer.format, &vobj->format );
   new_frame_buffer.type = UNICAP_BUFFER_TYPE_SYSTEM;
   new_frame_buffer.buffer_size = new_frame_buffer.format.buffer_size;
   new_frame_buffer.data = malloc( new_frame_buffer.format.buffer_size );

   gettimeofday( &ltime, NULL );
   
   while( !vobj->quit_capture_thread )
   {
      struct timespec abs_timeout;
      struct timeval  ctime;
      GList *entry;
      ogg_page og;
      ogg_packet op;
      size_t bytes;

      int buffer_ready = 0;
      


      if( !eos && ( ogg_stream_packetout( &vobj->os, &op ) > 0 ) )
      {
	 yuv_buffer yuv;

	 theora_decode_packetin( &vobj->th, &op );
	 theora_decode_YUVout( &vobj->th, &yuv );
	 copy_yuv( new_frame_buffer.data, &yuv, &vobj->ti );

	 buffer_ready = 1;
      } 
      else if( !eos )
      {
	 bytes = buffer_data( vobj->f, &vobj->oy );      
	 if( !bytes )
	 {
	    TRACE( "End of stream\n" );
	    eos = 1;
	    
	 }
	 
	 while( ogg_sync_pageout( &vobj->oy, &og ) > 0 )
	 {
	    ogg_stream_pagein( &vobj->os, &og );
	 }
	 continue;
      }
      else
      {
	 buffer_ready = 1;
      }

      gettimeofday( &ctime, NULL );
      abs_timeout.tv_sec = ctime.tv_sec + 1;
      abs_timeout.tv_nsec = ctime.tv_usec * 1000;      
      if( sem_timedwait( &vobj->sema, &abs_timeout ) )
      {
	 TRACE( "SEM_WAIT FAILED\n" );
	 continue;
      }

      if( buffer_ready && vobj->event_callback )
      {
	 vobj->event_callback( vobj->event_unicap_handle, UNICAP_EVENT_NEW_FRAME, &new_frame_buffer );
	 TRACE( "New frame\n" );
      }
      
      unicap_data_buffer_t *data_buffer = g_queue_pop_head( vobj->in_queue );
      if( data_buffer )
      {
	 unicap_copy_format( &data_buffer->format, &vobj->format );
	 memcpy( data_buffer->data, new_frame_buffer.data, vobj->format.buffer_size );
	 
	 g_queue_push_tail( vobj->out_queue, data_buffer );
      }

      sem_post( &vobj->sema );
      
      if( buffer_ready )
      {
	 gettimeofday( &ctime, NULL );
	 if( ctime.tv_usec < ltime.tv_usec )
	 {
	    ctime.tv_usec += 1000000;
	    ctime.tv_sec -= 1;
	 }
	 
	 ctime.tv_usec -= ltime.tv_usec;
	 ctime.tv_sec -= ltime.tv_sec;
	 
	 if( ( ctime.tv_sec == 0 ) &&
	     ( ctime.tv_usec < vobj->frame_intervall ) )
	 {
	    usleep( vobj->frame_intervall - ctime.tv_usec );
	 }
      
	 gettimeofday( &ltime, NULL );
      }
   }

   free( new_frame_buffer.data );
   return NULL;
}