Example #1
0
File: jpegplay.c Project: emon/emon
int 
jpeg_display_yuv(int argc, char *argv[])
{
	u_int8_t       *y_buf = NULL, *u_buf = NULL, *v_buf = NULL;
	u_int8_t       *yuv_buf_tmp;
	u_int8_t       *jpeg_buf;
	buf_t           jpeg_src_buf;
	size_t          read_size;	/* read size from file */
	int             i, x, y, offU, offV;
	int             row_stride;
	int             w, h, d;/* display size and depth(byte) */
	boolean         draw_screen;	/* draw screen surface or another
					 * surface */

	/* for jpeg library */
	struct jpeg_decompress_struct cinfo;
	struct jpeg_error_mgr jerr;

	/* for SDL */
	SDL_Surface    *screen;
	SDL_Overlay    *sdl_overlay = NULL;
	SDL_Rect        dstrect;
	u_int32_t       tick_start, tick_now;

	/* for realtime play */
	struct timeval  tv_start,tv_now;
	int64_t         ts_display,ts_diff;
	u_int32_t       ts_nowblk,ts_lastblk;

	/* for debug */
	struct timeval  tv_tmp1, tv_tmp2;

	/* SDL init */
	if (SDL_Init(SDL_INIT_VIDEO) < 0) {
		ComplainAndExit();
	}
	SDL_WM_SetCaption(OPT.title, OPT.title);
	atexit(SDL_Quit);

	/* jpeg library init / get image size and depth */
	cinfo.err = jpeg_std_error(&jerr);
	cinfo.err->error_exit = my_jpeg_abort_decompress;
	jpeg_create_decompress(&cinfo);

#ifdef USE_JPEG_MEM_SRC
	jpeg_buf = jpeg_mem_src_init(&cinfo, JPEG_BUF_MAX);	/* init */
#else				/* Mr.Okamura's */
	jpeg_buf = (u_int8_t *) malloc(JPEG_BUF_MAX);
#endif

	decoder_buf_read();	/* pipe -> buffer */
	read_size = decoder_buf_get(jpeg_buf, JPEG_BUF_MAX,&ts_nowblk);	/* buffer -> mem */

#ifdef USE_JPEG_MEM_SRC		/* emon's original */
	jpeg_mem_src(&cinfo, jpeg_buf, read_size);
#else				/* Mr.Okamura's */
	jpeg_src_buf.buf = jpeg_buf;
	jpeg_buf_src(&cinfo, jpeg_buf, read_size);
#endif
	jpeg_read_header(&cinfo, TRUE);
	w = cinfo.image_width;
	h = cinfo.image_height;
	d = cinfo.num_components;
	jpeg_abort_decompress(&cinfo);
	offU = (w * h) * 4 / 4;
	offV = (w * h) * 5 / 4;

	d_printf("\nJPEG info: image=(%d,%d), output=(%d,%d), Bpp=%d\n",
		 cinfo.image_width, cinfo.image_height, w, h, d);

	/* SDL setup */
	draw_screen = FALSE;	/* can I draw image VRAM directly ? */
	if ((screen = SDL_SetVideoMode(w, h, 0, SDL_ASYNCBLIT)) == NULL) {
		ComplainAndExit();
	}
	d1_printf("\nSDL screen  info: bpp=%d, Bpp=%d, "
		  "R/G/B-mask=%06x/%06x/%06x\n",
		  screen->format->BitsPerPixel,
		  screen->format->BytesPerPixel,
		  screen->format->Rmask,
		  screen->format->Gmask,
		  screen->format->Bmask
		);

	if ((sdl_overlay = my_SDL_CreateYUVOverlay(w, h, screen)) == NULL) {
		ComplainAndExit();
	}
	d1_printf("\nSDL surface info: format=0x%08x, planes=%d, "
		  "hw_overlay_flag=%d\n",
		  sdl_overlay->format,
		  sdl_overlay->planes,
		  sdl_overlay->hw_overlay
		);

	row_stride = JPEG_YCbCr_PITCH * w;
	yuv_buf_tmp = malloc(JPEG_YCbCr_PITCH * w * h);

	STAT.skip_count = 0;
	STAT.wait_count = 0;
	tick_start = SDL_GetTicks();	/* get start time */
	gettimeofday(&STAT.start, NULL);

	STAT.f_sigint = 0;
	signal(SIGINT, sigint_trap);
	for (STAT.frame_count = 1;; STAT.frame_count++) {
		int             sleep_usec = 0;
		if (STAT.f_sigint)
			sigint_quit();

		jpeg_has_error = 0;	/* reset flag */

		decoder_buf_read();
		read_size = decoder_buf_get(jpeg_buf, JPEG_BUF_MAX,&ts_nowblk);
#ifdef REALTIME_PLAY
		tick_now = SDL_GetTicks();
		if (tick_now - tick_start > STAT.frame_count * OPT.mspf) {
			/* skip frame because it's too late */
			STAT.skip_count++;
			d2_printf("s");
			continue;
		}
#endif
		if (read_size == 0) {	/* buffer empty */
			usleep(OPT.mspf * (1000 * (9 / 10)));
			STAT.frame_count--;	/* no skip, no display */
			continue;
		} else if (read_size == -1) {
			break;	/* end of all files */
		}
#ifdef REALTIME_PLAY
		sleep_usec =
			((STAT.frame_count - 1) * OPT.mspf
			 - (tick_now - tick_start)) * 1000;
		if (sleep_usec > 0) {
			usleep(sleep_usec);
			STAT.wait_count++;
			//decoder_buf_read();	/* check new data */
		}
#endif
#ifdef USE_JPEG_MEM_SRC
		jpeg_mem_src(&cinfo, jpeg_buf, read_size);
#else
		jpeg_src_buf.buf = jpeg_buf;
		jpeg_buf_src(&cinfo, jpeg_buf, read_size);
#endif
		jpeg_read_header(&cinfo, TRUE);

		cinfo.output_width = w;
		cinfo.output_height = h;
		cinfo.out_color_space = JCS_YCbCr;
		/* more fast decompression */
		cinfo.dct_method = JDCT_FASTEST;
		//cinfo.dct_method = JDCT_FLOAT;
		cinfo.do_fancy_upsampling = FALSE;

		if (!jpeg_has_error) {
			jpeg_start_decompress(&cinfo);
		}
		/* JPEG decode start */
		gettimeofday(&tv_tmp1, NULL);
		while (cinfo.output_scanline < cinfo.output_height &&
		       !jpeg_has_error) {
			JSAMPLE        *yuv_scanline;
			yuv_scanline = &(yuv_buf_tmp[cinfo.output_scanline *
						     row_stride]);
			jpeg_read_scanlines(&cinfo, &yuv_scanline, 1);
		}
		jpeg_finish_decompress(&cinfo);
		/* JPEG decode finish */
		gettimeofday(&tv_tmp2, NULL);
		STAT.decode_usec += timeval_diff_usec(&tv_tmp2, &tv_tmp1);

		decoder_buf_read();	/* check new data */

		switch (sdl_overlay->format) {

		case SDL_YV12_OVERLAY:
#ifdef  SDL_1_1_5		/* for < SDL 1.1.5 */
			y_buf = (u_int8_t *) sdl_overlay->pixels;
			u_buf = &(y_buf[offU]);
			v_buf = &(y_buf[offV]);
#else
			y_buf = sdl_overlay->pixels[0];
			u_buf = sdl_overlay->pixels[1];
			v_buf = sdl_overlay->pixels[2];
#endif
			for (i = 0; i < w * h; i++) {
				y_buf[i] = yuv_buf_tmp[i * JPEG_YCbCr_PITCH];
			}
			for (i = 0, y = 0; y < h; y += 2) {
				for (x = 0; x < w; x += 2, i++) {
					const int       p =
					(y * w + x) * JPEG_YCbCr_PITCH;
					u_buf[i] = yuv_buf_tmp[p + 2];
					v_buf[i] = yuv_buf_tmp[p + 1];
				}
			}
			break;
		case SDL_IYUV_OVERLAY:
#ifdef  SDL_1_1_5		/* for < SDL 1.1.5 */
			y_buf = (u_int8_t *) sdl_overlay->pixels;
			v_buf = &(y_buf[offU]);
			u_buf = &(y_buf[offV]);
#else
			y_buf = sdl_overlay->pixels[0];
			v_buf = sdl_overlay->pixels[1];
			u_buf = sdl_overlay->pixels[2];
#endif
			for (i = 0; i < w * h; i++) {
				y_buf[i] = yuv_buf_tmp[i * JPEG_YCbCr_PITCH];
			}
			for (i = 0, y = 0; y < h; y += 2) {
				for (x = 0; x < w; x += 2, i++) {
					const int       p =
					(y * w + x) * JPEG_YCbCr_PITCH;
					u_buf[i] = yuv_buf_tmp[p + 2];
					v_buf[i] = yuv_buf_tmp[p + 1];
				}
			}
			break;
		default:
			d_printf("\nI'm sorry. not support YUV format: %0x08x\n",
				 sdl_overlay->format);
			exit(1);
			break;
		}

		if (OPT.f_disp) {	/* display flag check */
			dstrect.x = 0;
			dstrect.y = 0;
			dstrect.w = sdl_overlay->w;
			dstrect.h = sdl_overlay->h;

			if (SDL_LockYUVOverlay(sdl_overlay) < 0) {
				ComplainAndExit();
			}
			if (SDL_DisplayYUVOverlay(sdl_overlay, &dstrect) < 0) {
				SDL_FreeYUVOverlay(sdl_overlay);
				ComplainAndExit();
			}
			SDL_UnlockYUVOverlay(sdl_overlay);
		}
	}			/* loop for all jpeg file */

	STAT.frame_count--;	/* because count up from 1 */
	tick_now = SDL_GetTicks();
	if (!draw_screen) {
		SDL_FreeYUVOverlay(sdl_overlay);
	}
	jpeg_destroy_decompress(&cinfo);
	statistics_print(&STAT);
	return 0;
}
Example #2
0
File: jpegplay.c Project: emon/emon
int 
jpeg_display_rgb(int argc, char *argv[])
{
	u_int8_t       *rgb_buf;/* RGB data */
	u_int8_t       *jpeg_buf;	/* JPEG data */
	buf_t           jpeg_src_buf;
	size_t          read_size;	/* read size from file */
	int             row_stride;
	int             w, h, d;/* display size and depth(byte) */
	boolean         draw_screen;	/* draw screen surface or another
					 * surface */
	int             screen_status; /* 0=general 1=blue 2=red */
	/* for jpeg library */
	struct jpeg_decompress_struct cinfo;
	struct jpeg_error_mgr jerr;

	/* for SDL */
	SDL_Surface    *screen;
	SDL_Surface    *sdl_image = NULL;
//	SDL_Rect        dstrect;

	/* for visualize packet loss */
	SDL_Surface    *sdl_img_blue = NULL;
	SDL_Surface    *sdl_img_red  = NULL;
	u_int8_t        pixel_blue[]={0,0,0xff};
	u_int8_t        pixel_red[]={0xff,0,0};
	u_int32_t       tick_start, tick_now;
	int             buf_status; /* -1=full */

	/* for realtime play */
	struct timeval  tv_start,tv_now;
	int64_t         ts_display,ts_diff;
	u_int32_t       ts_nowblk,ts_lastblk=0;
	
	/* for debug */
	struct timeval  tv_tmp1, tv_tmp2;
	

	/* SDL init */
	if (SDL_Init(SDL_INIT_VIDEO) < 0) {
		ComplainAndExit();
	}
	atexit(SDL_Quit);

	/* jpeg library init / get image size and depth */
	cinfo.err = jpeg_std_error(&jerr);
	cinfo.err->error_exit = my_jpeg_abort_decompress;
	jpeg_create_decompress(&cinfo);

#ifdef USE_JPEG_MEM_SRC
	jpeg_buf = jpeg_mem_src_init(&cinfo, JPEG_BUF_MAX);	/* init */
#else
	jpeg_buf = (u_int8_t *) malloc(JPEG_BUF_MAX);
#endif

	decoder_buf_read();
	read_size = decoder_buf_get(jpeg_buf, JPEG_BUF_MAX,&ts_nowblk);

#ifdef USE_JPEG_MEM_SRC
	jpeg_mem_src(&cinfo, jpeg_buf, read_size);	/* read from memory */
#else
	jpeg_src_buf.buf = jpeg_buf;
	jpeg_buf_src(&cinfo, jpeg_buf, read_size);	/* read from memory */
#endif

	jpeg_read_header(&cinfo, TRUE);
	w = cinfo.image_width;
	h = cinfo.image_height;
	d = cinfo.num_components;
	jpeg_abort_decompress(&cinfo);

	d_printf("\nJPEG info: image=(%d,%d), output=(%d,%d), Bpp=%d\n",
		 cinfo.image_width, cinfo.image_height, w, h, d);

	/* SDL setup / cleanup screen-surface */
	screen = SDL_SetVideoMode(w, h, 0, SDL_HWSURFACE);
	if (screen == NULL) {
		ComplainAndExit();
	}
	if (OPT.loss_visual){
		sdl_img_blue = SDL_CreateRGBSurface(
			SDL_HWSURFACE, w, h, 24
			,OPT.sdl_mask_R, OPT.sdl_mask_G
			,OPT.sdl_mask_B, OPT.sdl_mask_A);
		sdl_img_red = SDL_CreateRGBSurface(
			SDL_HWSURFACE, w, h, 24
			,OPT.sdl_mask_R, OPT.sdl_mask_G
			,OPT.sdl_mask_B, OPT.sdl_mask_A);
		jpeg_fillimg1color(sdl_img_red,pixel_red,3);
		jpeg_fillimg1color(sdl_img_blue,pixel_blue,3);
	}
	if (screen->format->BytesPerPixel == RGB_PIXELSIZE &&
	    screen->format->Rmask == OPT.sdl_mask_R &&
	    screen->format->Gmask == OPT.sdl_mask_G &&
	    screen->format->Bmask == OPT.sdl_mask_B) {
		draw_screen = TRUE;
	} else {
		draw_screen = FALSE;
	}

	d1_printf("\nSDL screen  info: bpp=%d, Bpp=%d, "
		  "R/G/B-mask=%06x/%06x/%06x, Direct=%s",
		screen->format->BitsPerPixel, screen->format->BytesPerPixel,
		  screen->format->Rmask, screen->format->Gmask,
		  screen->format->Bmask,
		  draw_screen ? "ON" : "OFF");

	if (draw_screen==TRUE) {
		/* RGB_PIXELSIZE is defined in jmorecfg.h */
		row_stride = screen->pitch;
	        rgb_buf = (u_int8_t *) screen->pixels;
	} else {
		sdl_image = SDL_CreateRGBSurface(
						 SDL_HWSURFACE, w, h, 24,	/* depth (bit per pixel) */
					     OPT.sdl_mask_R, OPT.sdl_mask_G,
					    OPT.sdl_mask_B, OPT.sdl_mask_A);
		row_stride = sdl_image->pitch;
		d1_printf("\nSDL surface info: bpp=%d, Bpp=%d, "
			  "R/G/B-mask=%06x/%06x/%06x\n",
			  sdl_image->format->BitsPerPixel,
			  sdl_image->format->BytesPerPixel,
			  sdl_image->format->Rmask,
			  sdl_image->format->Gmask,
			  sdl_image->format->Bmask
			);
		rgb_buf = (u_int8_t *) sdl_image->pixels;
	}

	STAT.skip_count = 0;
	STAT.wait_count = 0;

	tick_start = SDL_GetTicks();	/* get start time */
	gettimeofday(&STAT.start, NULL);
	gettimeofday(&tv_start, NULL);
	ts_display=0;

	STAT.f_sigint = 0;
	signal(SIGINT, sigint_trap);



	jpeg_fillimg1color(screen,pixel_blue,3);
	screen_status=1;

	for (STAT.frame_count = 1;; STAT.frame_count++) {
		if (STAT.f_sigint)
			sigint_quit();

		jpeg_has_error = 0;	/* reset flag */
		buf_status=decoder_buf_read();
#if 0
		read_size = decoder_buf_get(jpeg_buf, JPEG_BUF_MAX);
#ifdef REALTIME_PLAY
		tick_now = SDL_GetTicks();
		if (tick_now - tick_start > STAT.frame_count * OPT.mspf) {
			/* skip frame because it's too late */
			STAT.skip_count++;
			continue;
		}
#endif
#else
		read_size = decoder_buf_get(jpeg_buf, JPEG_BUF_MAX,&ts_nowblk);
#ifdef REALTIME_PLAY
		if(buf_status==-1){
			/* need more cpu power */
			if(OPT.loss_visual){
				jpeg_blitimg2screen(sdl_img_red,screen);
				screen_status=2;
			}
			d1_printf("INFO:buffer skip\n");
			STAT.skip_count+=decoder_buf_get_datanum()/2;
			STAT.frame_count+=decoder_buf_get_datanum()/2;
			decoder_buf_rm(decoder_buf_get_datanum()/2);
			ts_display=0;
			gettimeofday(&tv_start, NULL);
			continue;
		}
#endif
#endif
		if (read_size == 0) {	/* buffer empty */
			d1_printf("INFO:buffer empty\n");
			STAT.frame_count--;	/* no skip, no display */
			if(OPT.loss_visual && screen_status!=1){
			  jpeg_blitimg2screen(sdl_img_blue,screen);
			  screen_status=1;
			}
			decoder_buf_prebuf();
			ts_display=0;
			gettimeofday(&tv_start, NULL);
			continue;
		} else if (read_size == -2) {
		    break;	/* end of all files */
		}
#ifdef USE_JPEG_MEM_SRC
		jpeg_mem_src(&cinfo, jpeg_buf, read_size);
#else
		jpeg_src_buf.buf = jpeg_buf;
		jpeg_buf_src(&cinfo, jpeg_buf, read_size);
#endif
		jpeg_read_header(&cinfo, TRUE);

		cinfo.output_width = w;
		cinfo.output_height = h;
		cinfo.out_color_space = JCS_RGB;	/* default */
		cinfo.output_components = d;
		/* more fast decompression */
		cinfo.dct_method = JDCT_FASTEST;
		//cinfo.dct_method = JDCT_FLOAT;
		cinfo.do_fancy_upsampling = FALSE;

		jpeg_start_decompress(&cinfo);

		/* screen surface lock */

		if (SDL_MUSTLOCK(screen)) {
			if (SDL_LockSurface(screen) < 0) {
				ComplainAndExit();
			}
		}
		/* JPEG decode start */
		gettimeofday(&tv_tmp1, NULL);
		while (cinfo.output_scanline < cinfo.output_height &&
		       !jpeg_has_error) {
			JSAMPLE        *rgb_scanline;
			rgb_scanline = &(rgb_buf[cinfo.output_scanline *
						 row_stride]);
			jpeg_read_scanlines(&cinfo, &rgb_scanline, 1);
		}
		/* screen surface unlock */
		if (SDL_MUSTLOCK(screen)) {
			SDL_UnlockSurface(screen);
		}
		jpeg_finish_decompress(&cinfo);
		/* JPEG decode finish */
		gettimeofday(&tv_tmp2, NULL);
		STAT.decode_usec += timeval_diff_usec(&tv_tmp2, &tv_tmp1);
#ifdef REALTIME_PLAY
		wait4rtdisplay(&tv_start,ts_display);

		if(ts_display==0){
		  ts_diff=OPT.freq;
		}else{
		  ts_diff=(u_int32_t)(ts_nowblk-ts_lastblk);
		}
		ts_lastblk=ts_nowblk;

		if(ts_diff<OPT.freq){
	 	  e_printf("INFO:reset TS interval\n");
		}
		if(ts_diff>OPT.freq){
		  d2_printf("blue back start for %u TS\n",(u_int32_t)ts_diff-OPT.freq);
		  ts_display+=(ts_diff-OPT.freq);
		  if(OPT.loss_visual && screen_status!=1){
  		    jpeg_blitimg2screen(sdl_img_blue,screen);
		    screen_status=1;
		  }
		  wait4rtdisplay(&tv_start,ts_display);
		  d2_printf("blue back end.\n");
		}else{
		  d3_printf("no blue back .\n");
		}
		ts_display+= OPT.freq;
#endif
		if (draw_screen==TRUE) {
			SDL_UpdateRect(screen, 0, 0, 0, 0);
			screen_status=0;
		} else {
			jpeg_blitimg2screen(sdl_image,screen);
			screen_status=0;
#if 0
			dstrect.x = 0;
			dstrect.y = 0;
			dstrect.w = sdl_image->w;
			dstrect.h = sdl_image->h;
			if (SDL_BlitSurface(sdl_image, NULL, screen, &dstrect) < 0) {
				SDL_FreeSurface(sdl_image);
				ComplainAndExit();
			}
#if 0
			if (SDL_MUSTLOCK(screen)) {
				SDL_UnlockSurface(screen);
			}
#endif
			SDL_UpdateRects(screen, 1, &dstrect);
#endif 
		}
	}			/* loop for all jpeg file */
	STAT.frame_count--;	/* because count from 1 */
	tick_now = SDL_GetTicks();

	if (!draw_screen) {
		SDL_FreeSurface(sdl_image);
	}
	jpeg_destroy_decompress(&cinfo);
	statistics_print(&STAT);
	return 0;
}
Example #3
0
// decode a frame
static mp_image_t* decode(sh_video_t *sh,void* data,int len,int flags){
 mp_image_t * mpi = NULL;
 int	      width,height,depth,i;

 if ( len <= 0 ) return NULL; // skipped frame

 cinfo.err=jpeg_std_error( &jerr.pub );
 jerr.pub.error_exit=my_error_exit;
 if( setjmp( jerr.setjmp_buffer ) )
  {
   mp_msg( MSGT_DECVIDEO,MSGL_ERR,"[ijpg] setjmp error ...\n" );
   return NULL;
  }
  
 jpeg_create_decompress( &cinfo );
 jpeg_buf_src( &cinfo,data,len );
 jpeg_read_header( &cinfo,TRUE );
 sh->disp_w=width=cinfo.image_width;
 sh->disp_h=height=cinfo.image_height;
 jpeg_start_decompress( &cinfo );
 depth=cinfo.output_components * 8;

 switch( depth ) {
   case 8:
   case 24: break;
   default: mp_msg( MSGT_DECVIDEO,MSGL_ERR,"Sorry, unsupported JPEG colorspace: %d.\n",depth ); return NULL;
 }

 if ( last_w!=width || last_h!=height )
  {
   if(!mpcodecs_config_vo( sh,width,height, IMGFMT_RGB24 )) return NULL;
   if(temp_row) free(temp_row);
   temp_row=malloc(3*width+16);
   last_w=width; last_h=height;
  }

 mpi=mpcodecs_get_image( sh,MP_IMGTYPE_TEMP,MP_IMGFLAG_ACCEPT_STRIDE,width,height );
 if ( !mpi ) return NULL;

 row_stride=cinfo.output_width * cinfo.output_components;

 for ( i=0;i < height;i++ )
  {
   unsigned char * drow = mpi->planes[0] + mpi->stride[0] * i;
   unsigned char * row = (mpi->imgfmt==IMGFMT_RGB24 && depth==24) ? drow : temp_row;
   jpeg_read_scanlines( &cinfo,(JSAMPLE**)&row,1 );
   if(depth==8){
       // grayscale -> rgb/bgr 24/32
       int x;
       if(mpi->bpp==32)
         for(x=0;x<width;x++) drow[4*x]=0x010101*row[x];
       else
         for(x=0;x<width;x++) drow[3*x+0]=drow[3*x+1]=drow[3*x+2]=row[x];
   } else {
       int x;
       switch(mpi->imgfmt){
       // rgb24 -> bgr24
       case IMGFMT_BGR24:
           for(x=0;x<3*width;x+=3){
	       drow[x+0]=row[x+2];
	       drow[x+1]=row[x+1];
	       drow[x+2]=row[x+0];
	   }
	   break;
       // rgb24 -> bgr32
       case IMGFMT_BGR32:
           for(x=0;x<width;x++){
#ifdef WORDS_BIGENDIAN
	       drow[4*x+1]=row[3*x+0];
	       drow[4*x+2]=row[3*x+1];
	       drow[4*x+3]=row[3*x+2];
#else
	       drow[4*x+0]=row[3*x+2];
	       drow[4*x+1]=row[3*x+1];
	       drow[4*x+2]=row[3*x+0];
#endif
	   }
	   break;
       }
   }
  }
  
 jpeg_finish_decompress(&cinfo);                                                                   
 jpeg_destroy_decompress(&cinfo);                                                                  
	    
 return mpi;
}