Esempio n. 1
0
/* /////////////////////////////////////////////////////////////////////////////
 * Write AVI file
 */
int TTAVIWriter::writeAVI(int startIndex, int endIndex, const QFileInfo& aviFInfo)
{
  QTime       searchTime;
  TFrameInfo* frameInfo;
  int         frameCount = endIndex - startIndex + 1;

  log->debugMsg(__FILE__, __LINE__, "---------------------------------------------");
  log->debugMsg(__FILE__, __LINE__, QString("write AVI start %1 / end %2 / count %3").
      arg(startIndex).arg(endIndex).arg(frameCount));

  avi_file = AVI_open_output_file(aviFInfo.absoluteFilePath().toLatin1().data());

  //TODO: go back to previous sequence to ensure correct encoding of
  //      open GOP's
  current_frame = decoder->moveToFrameIndex(startIndex);

  // decode the current slice
  //frameInfo = decoder->decodeMPEG2Frame( formatYV12 );
  frameInfo = decoder->getFrameInfo();

  log->debugMsg(__FILE__, __LINE__, QString("AVI %1 width %2 x height %3").
      arg(aviFInfo.absoluteFilePath()).
      arg(frameInfo->width).
      arg(frameInfo->height));

  AVI_set_video(avi_file, frameInfo->width, frameInfo->height, mFrameRate, (char*)"YV12");

  ref_data = new quint8[frameInfo->size+2*frameInfo->chroma_size];

  //for (int i = start_frame_pos; i <= end_frame_pos; i++)
  for (int i = 1; i <= frameCount; i++)
  {
    // get decoded mpeg-frame data
    decoder->getCurrentFrameData(ref_data);

    log->debugMsg(__FILE__, __LINE__, QString("write frame %1 with frame-type %2").
        arg(startIndex+i-1).
        arg(frameInfo->type));

    // write the mpeg-frame data to AVI file
    AVI_write_frame(avi_file, (char*)ref_data, frameInfo->size+2*frameInfo->chroma_size, 2);

    // decode the next mpeg-frame
    decoder->moveToFrameIndex(startIndex+i);
    frameInfo = decoder->getFrameInfo();
    //frameInfo = decoder->decodeMPEG2Frame( formatYV12 );

  }
  log->debugMsg(__FILE__, __LINE__, "---------------------------------------------");

  delete [] ref_data;

  return frameCount;
}
Esempio n. 2
0
Revel_Error Revel_BaseEncoder::EncodeStart(const char *filename)
{
    if (m_isEncoding)
        return REVEL_ERR_BUSY;

	// check params
    if (m_params.width == 0 || m_params.height == 0)
    {
		Reset();
        return REVEL_ERR_PARAMS;
    }

    // Create output file
    m_outFile = AVI_open_output_file( (char*)filename );
    if (m_outFile == NULL)
    {
		Reset();
        return REVEL_ERR_FILE;
    }
    AVI_set_video(m_outFile, m_params.width, m_params.height, m_params.frameRate, "XVID");
    if (m_params.hasAudio == 1)
    {
        // Sanity-check the audio settings first, since there's a good chance
        // the caller will leave all these fields uninitialized when updating
        // from Revel 1.0.x.
        if (m_params.audioBits < 0 || m_params.audioBits > 256 ||
            m_params.audioChannels < 0 || m_params.audioChannels > 256 ||
            m_params.audioRate < 0)
        {
            Reset();
            return REVEL_ERR_PARAMS;
        }
        AVI_set_audio(m_outFile, m_params.audioChannels, m_params.audioRate,
            m_params.audioBits, m_params.audioSampleFormat);
    }
    else
    {
        AVI_set_audio(m_outFile, 0, 44100, 16, WAVE_FORMAT_UNKNOWN);
    }

    // Allocate frame buffer
    m_encodedFrame = new char[m_params.width*m_params.height*4];
    if (m_encodedFrame == NULL)
    {
        Reset();
        return REVEL_ERR_MEMORY;
    }

	m_outFilename = std::string(filename);
	m_totalOutBytes = 0;
    m_isEncoding = true;

    return REVEL_ERR_NONE;
}
Esempio n. 3
0
static void
capture_avi (GtkButton * CapAVIButt, GtkWidget * AVIFNameEntry)
{
	char *filename = gtk_entry_get_text(GTK_ENTRY(AVIFNameEntry));
	char *compression="YUY2";

	switch (AVIFormat) {
	 case 1:
		compression="YUY2";
		break;
	 case 2:
		compression="DIB ";
		break;
	 default:
		compression="YUY2";
	}	
	if(videoIn->capAVI) {
	 printf("stoping AVI capture\n");
	 gtk_button_set_label(CapAVIButt,"Capture");
	 AVIstoptime = ms_time();
	 printf("AVI stop time:%d\n",AVIstoptime);	
	 videoIn->capAVI = FALSE;
	 aviClose();		
	} 
	else {
	 if (!(videoIn->signalquit)) {
		/*thread exited while in AVI capture mode*/
        /* we have to close AVI                  */
     printf("close AVI since thread as exited\n");
	 gtk_button_set_label(CapAVIButt,"Capture");
	 printf("AVI stop time:%d\n",AVIstoptime);	
	 aviClose(); 		 
	 } 
	 else {
	   /* thread is running so start AVI capture*/	 
	   printf("starting AVI capture to %s\n",filename);
	   gtk_button_set_label(CapAVIButt,"Stop");  
	   AviOut = AVI_open_output_file(filename);
	   /*4CC compression "YUY2" (YUV) or "DIB " (RGB24)  or  whathever*/	
	   
	   AVI_set_video(AviOut, videoIn->width, videoIn->height, videoIn->fps,compression);		
	 
	   videoIn->AVIFName = filename;
	   AVIstarttime = ms_time();
       printf("AVI start time:%d\n",AVIstarttime);		
	   videoIn->capAVI = TRUE;
	 }
	}	
}
Esempio n. 4
0
/* ///////////////////////////////////////////////////////////////////////////// 
 * Write AVI file
 */
int TTAVIWriter::writeAVI(int start_frame_pos, int end_frame_pos, const QFileInfo& avi_finfo)
{
  QTime       searchTime;
  TFrameInfo* frameInfo;
  int         frame_count = end_frame_pos - start_frame_pos;

  //qDebug( "%s------------------------------------------------",c_name );
  //qDebug( "%swrite AVI: start: %ld | end: %ld | count: %d",c_name,start_frame_pos,end_frame_pos,frame_count );
  //qDebug( "%s------------------------------------------------",c_name );

  avi_file = AVI_open_output_file( avi_finfo.absoluteFilePath().toLatin1().data() );


  //TODO: go back to previous sequence to ensure correct encoding of
  //      open GOP's
  // move decode position to "ref_frame_pos"
  current_frame = decoder->moveToFrameIndex( start_frame_pos );
  
  // decode the current slice
  frameInfo = decoder->decodeMPEG2Frame( formatYV12 );

  //qDebug( "%sAVI frame info: width: %d x height: %d",c_name,frameInfo->width,frameInfo->height );

  //TODO: avoid setting hard coded frame rate!
  AVI_set_video(avi_file, frameInfo->width, frameInfo->height, 25.0, (char*)"YV12");
  
  ref_data = new quint8[frameInfo->size+2*frameInfo->chroma_size];
  

  for (int i = start_frame_pos; i <= end_frame_pos; i++)
  {
    // get decoded mpeg-frame data
    decoder->getCurrentFrameData( ref_data );

    // write the mpeg-frame data to AVI file
    AVI_write_frame(avi_file, (char*)ref_data, frameInfo->size+2*frameInfo->chroma_size, 2);

    // decode the next mpeg-frame
    frameInfo = decoder->decodeMPEG2Frame( formatYV12 );    
  }
  //qDebug( "%s------------------------------------------------",c_name );
  
  delete [] ref_data;

  return frame_count;  
}
Esempio n. 5
0
static void avr_on_video_reconfig ( void *udta, u32 width, u32 height )
{

    GF_AVRedirect *avr = ( GF_AVRedirect * ) udta;

    if ((avr->srcHeight != height || avr->srcWidth != width))
    {
#ifdef AVR_DUMP_RAW_AVI
        char comp[5];
        comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0;
        AVI_set_video ( avr->avi_out, width, height, 30, comp );
#endif /* AVR_DUMP_RAW_AVI */
        GF_LOG ( GF_LOG_INFO, GF_LOG_MODULE, ( "[AVRedirect] Video reconfig width %d height %d\n", width, height ) );
        gf_mx_p(avr->frameMutex);
        if ( avr->frame ) gf_free ( avr->frame );
        avr->size = 3*width*height;
        avr->frame = gf_malloc ( sizeof ( char ) *avr->size );
        avr->srcWidth = width;
        avr->srcHeight = height;
        avr->swsContext = sws_getCachedContext ( avr->swsContext, avr->srcWidth, avr->srcHeight, PIX_FMT_RGB24, avr->srcWidth, avr->srcHeight, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL );
        gf_mx_v(avr->frameMutex);
    }
}
Esempio n. 6
0
static int initVideoFile(struct ALL_DATA *all_data)
{
	struct GWIDGET *gwidget = all_data->gwidget;
	struct paRecordData *pdata = all_data->pdata;
	struct GLOBAL *global = all_data->global;
	struct vdIn *videoIn = all_data->videoIn;
	struct VideoFormatData *videoF = all_data->videoF;
	
	const char *compression= get_vid4cc(global->VidCodec);
	videoF->vcodec = get_vcodec_id(global->VidCodec);
	videoF->acodec = CODEC_ID_NONE;
	videoF->keyframe = 0;
	int ret = 0;
	
	__LOCK_MUTEX(__VMUTEX);
		gboolean capVid = videoIn->capVid;
	__UNLOCK_MUTEX(__VMUTEX);
	
	/*alloc video ring buffer*/
	alloc_videoBuff(all_data);
	
	switch (global->VidFormat)
	{
		case AVI_FORMAT:
			if(videoF->AviOut != NULL)
			{
				g_free(videoF->AviOut);
				videoF->AviOut = NULL;
			}
			videoF->AviOut = g_new0(struct avi_t, 1);
			
			if(AVI_open_output_file(videoF->AviOut, videoIn->VidFName)<0) 
			{
				g_printerr("Error: Couldn't create Video.\n");
				capVid = FALSE; /*don't start video capture*/
				__LOCK_MUTEX(__VMUTEX);
					videoIn->capVid = capVid;
				__UNLOCK_MUTEX(__VMUTEX);
				pdata->capVid = capVid;
				return(-1);
			} 
			else 
			{
				AVI_set_video(videoF->AviOut, global->width, global->height, 
					global->fps, compression);
		  
				/* start video capture*/
				capVid = TRUE;
				__LOCK_MUTEX(__VMUTEX);
					videoIn->capVid = capVid;
				__UNLOCK_MUTEX(__VMUTEX);
				pdata->capVid = capVid;
				
				/* start sound capture*/
				if(global->Sound_enable > 0) 
				{
					/*get channels and sample rate*/
					set_sound(global, pdata);
					/*set audio header for avi*/
					AVI_set_audio(videoF->AviOut, global->Sound_NumChan, 
						global->Sound_SampRate,
						get_aud_bit_rate(get_ind_by4cc(global->Sound_Format)), /*bit rate*/
						get_aud_bits(get_ind_by4cc(global->Sound_Format)),     /*sample size - only used for PCM*/
						global->Sound_Format);
					/* Initialize sound (open stream)*/
					if(init_sound (pdata)) 
					{
						g_printerr("Audio initialization error\n");
						global->Sound_enable=0;
						if(!(global->no_display))
                        {
						    gdk_threads_enter();
						    gtk_toggle_button_set_active(GTK_TOGGLE_BUTTON(gwidget->SndEnable),0);
						    gdk_flush();
						    gdk_threads_leave();
						}
						else
                            capture_vid(NULL, all_data);
					} 
				}
			}
			break;
			
		case MKV_FORMAT:
			if(global->Sound_enable > 0) 
			{
				/*set channels, sample rate and allocate buffers*/
				set_sound(global, pdata);
			}
			if(init_FormatContext((void *) all_data)<0)
			{
				capVid = FALSE;
				__LOCK_MUTEX(__VMUTEX);
					videoIn->capVid = capVid;
				__UNLOCK_MUTEX(__VMUTEX);
				pdata->capVid = capVid;
				return (-1);
			}
			
			videoF->old_apts = 0;
			videoF->apts = 0;
			videoF->vpts = 0;
			
			/* start video capture*/
			capVid = TRUE;
			__LOCK_MUTEX(__VMUTEX);
				videoIn->capVid = capVid;
			__UNLOCK_MUTEX(__VMUTEX);
			pdata->capVid = capVid;
			
			
			/* start sound capture*/
			if(global->Sound_enable > 0) 
			{
				/* Initialize sound (open stream)*/
				if(init_sound (pdata)) 
				{
					g_printerr("Audio initialization error\n");
					global->Sound_enable=0;
					if(!(global->no_display))
                    {
					    /*will this work with the checkbox disabled?*/
					    gdk_threads_enter();
					    gtk_toggle_button_set_active(GTK_TOGGLE_BUTTON(gwidget->SndEnable),0);
					    gdk_flush();
					    gdk_threads_leave();
					}
					else
					    capture_vid(NULL, all_data);
				}
			}
			break;
//WebM = MKV + WebVTT
//add META data capture
		case WEBM_FORMAT:
                     
			if(global->Sound_enable > 0) 
			{
				/*set channels, sample rate and allocate buffers*/
				set_sound(global, pdata);
			}
			if(init_FormatContext((void *) all_data)<0)
			{
				capVid = FALSE;
				__LOCK_MUTEX(__VMUTEX);
					videoIn->capVid = capVid;
				__UNLOCK_MUTEX(__VMUTEX);
				pdata->capVid = capVid;
				return (-1);
			}
			
			videoF->old_apts = 0;
			videoF->apts = 0;
			videoF->vpts = 0;
			
			/* start video capture*/
			capVid = TRUE;
			__LOCK_MUTEX(__VMUTEX);
				videoIn->capVid = capVid;
			__UNLOCK_MUTEX(__VMUTEX);
			pdata->capVid = capVid;
			
			
			/* start sound capture*/
			if(global->Sound_enable > 0) 
			{
				/* Initialize sound (open stream)*/
				if(init_sound (pdata)) 
				{
					g_printerr("Audio initialization error\n");
					global->Sound_enable=0;
					if(!(global->no_display))
                    {
					    /*will this work with the checkbox disabled?*/
					    gdk_threads_enter();
					    gtk_toggle_button_set_active(GTK_TOGGLE_BUTTON(gwidget->SndEnable),0);
					    gdk_flush();
					    gdk_threads_leave();
					}
					else
					    capture_vid(NULL, all_data);
				}
			}
			break;            
		default:
			
			break;
	}
	
	return (ret);
}
Esempio n. 7
0
int main(int argc, char *argv[])
{
#ifndef EMBEDED_X210  //PC platform
    const SDL_VideoInfo *info;
    char driver[128];
    SDL_Surface *pscreen;
    SDL_Overlay *overlay;
    SDL_Rect drect;
    SDL_Event sdlevent;
    SDL_Thread *mythread;
    SDL_mutex *affmutex;
    Uint32 currtime;
    Uint32 lasttime;
#endif
    int status;

    unsigned char *p = NULL;
    int hwaccel = 0;
    const char *videodevice = NULL;
    const char *mode = NULL;
    int format = V4L2_PIX_FMT_MJPEG;
    int i;
    int grabmethod = 1;
    int width = 320;
    int height = 240;
    int fps = 15;
    unsigned char frmrate = 0;
    char *avifilename = NULL;
    int queryformats = 0;
    int querycontrols = 0;
    int readconfigfile = 0;
    char *separateur;
    char *sizestring = NULL;
    char *fpsstring  = NULL;
    int enableRawStreamCapture = 0;
    int enableRawFrameCapture = 0;
    char * pRGBData=NULL;



    printf("luvcview version %s \n", version);
    for (i = 1; i < argc; i++)
    {
        /* skip bad arguments */
        if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') {
            continue;
        }
        if (strcmp(argv[i], "-d") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -d, aborting.\n");
                exit(1);
            }
            videodevice = strdup(argv[i + 1]);
        }
        if (strcmp(argv[i], "-g") == 0) {
            /* Ask for read instead default  mmap */
            grabmethod = 0;
        }
        if (strcmp(argv[i], "-w") == 0) {
            /* disable hw acceleration */
            hwaccel = 1;
        }
        if (strcmp(argv[i], "-f") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -f, aborting.\n");
                exit(1);
            }
            mode = strdup(argv[i + 1]);

            if (strncmp(mode, "yuv", 3) == 0) {
                format = V4L2_PIX_FMT_YUYV;

            } else if (strncmp(mode, "jpg", 3) == 0) {
                format = V4L2_PIX_FMT_MJPEG;

            } else {
                format = V4L2_PIX_FMT_MJPEG;

            }
        }
        if (strcmp(argv[i], "-s") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -s, aborting.\n");
                exit(1);
            }

            sizestring = strdup(argv[i + 1]);

            width = strtoul(sizestring, &separateur, 10);
            if (*separateur != 'x') {
                printf("Error in size use -s widthxheight \n");
                exit(1);
            } else {
                ++separateur;
                height = strtoul(separateur, &separateur, 10);
                if (*separateur != 0)
                    printf("hmm.. dont like that!! trying this height \n");
                printf(" size width: %d height: %d \n", width, height);
            }
        }
        if (strcmp(argv[i], "-i") == 0){
            if (i + 1 >= argc) {
                printf("No parameter specified with -i, aborting. \n");
                exit(1);
            }
            fpsstring = strdup(argv[i + 1]);
            fps = strtoul(fpsstring, &separateur, 10);
            printf(" interval: %d fps \n", fps);
        }
        if (strcmp(argv[i], "-S") == 0) {
            /* Enable raw stream capture from the start */
            enableRawStreamCapture = 1;
        }
        if (strcmp(argv[i], "-c") == 0) {
            /* Enable raw frame capture for the first frame */
            enableRawFrameCapture = 1;
        }
        if (strcmp(argv[i], "-C") == 0) {
            /* Enable raw frame stream capture from the start*/
            enableRawFrameCapture = 2;
        }
        if (strcmp(argv[i], "-o") == 0) {
            /* set the avi filename */
            if (i + 1 >= argc) {
                printf("No parameter specified with -o, aborting.\n");
                exit(1);
            }
            avifilename = strdup(argv[i + 1]);
        }
        if (strcmp(argv[i], "-L") == 0) {
            /* query list of valid video formats */
            queryformats = 1;
        }
        if (strcmp(argv[i], "-l") == 0) {
            /* query list of valid video formats */
            querycontrols = 1;
        }

        if (strcmp(argv[i], "-r") == 0) {
            /* query list of valid video formats */
            readconfigfile = 1;
        }
        if (strcmp(argv[i], "-h") == 0) {
            printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n");
            printf("-h	print this message \n");
            printf("-d	/dev/videoX       use videoX device\n");
            printf("-g	use read method for grab instead mmap \n");
            printf("-w	disable SDL hardware accel. \n");
            printf("-f	video format  default jpg  others options are yuv jpg \n");
            printf("-i	fps           use specified frame interval \n");
            printf("-s	widthxheight      use specified input size \n");
            printf("-c	enable raw frame capturing for the first frame\n");
            printf("-C	enable raw frame stream capturing from the start\n");
            printf("-S	enable raw stream capturing from the start\n");
            printf("-o	avifile  create avifile, default video.avi\n");
            printf("-L	query valid video formats\n");
            printf("-l	query valid controls and settings\n");
            printf("-r	read and set control settings from luvcview.cfg\n");
            exit(0);
        }
    }

#ifndef   EMBEDED_X210 //PC platform

    /************* Test SDL capabilities ************/
    if (SDL_Init(SDL_INIT_VIDEO) < 0) {
        fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError());
        exit(1);
    }
    
    /* For this version, we'll be save and disable hardware acceleration */
    if(hwaccel)
    {
        if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") )
        {
            putenv("SDL_VIDEO_YUV_HWACCEL=0");
        }
    }

    if (SDL_VideoDriverName(driver, sizeof(driver)))
    {
        printf("Video driver: %s\n", driver);
    }
    info = SDL_GetVideoInfo();

    if (info->wm_available) {
        printf("A window manager is available\n");
    }
    if (info->hw_available) {
        printf("Hardware surfaces are available (%dK video memory)\n",
               info->video_mem);
        SDL_VIDEO_Flags |= SDL_HWSURFACE;
    }
    if (info->blit_hw) {
        printf("Copy blits between hardware surfaces are accelerated\n");
        SDL_VIDEO_Flags |= SDL_ASYNCBLIT;
    }
    if (info->blit_hw_CC) {
        printf
                ("Colorkey blits between hardware surfaces are accelerated\n");
    }
    if (info->blit_hw_A) {
        printf("Alpha blits between hardware surfaces are accelerated\n");
    }
    if (info->blit_sw) {
        printf
                ("Copy blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_sw_CC) {
        printf
                ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_sw_A) {
        printf
                ("Alpha blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_fill) {
        printf("Color fills on hardware surfaces are accelerated\n");
    }



    if (!(SDL_VIDEO_Flags & SDL_HWSURFACE))
        SDL_VIDEO_Flags |= SDL_SWSURFACE;

#endif

    if (videodevice == NULL || *videodevice == 0) {
        videodevice = "/dev/video0";
    }

    if (avifilename == NULL || *avifilename == 0) {
        avifilename = "video.avi";
    }

    videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn));
    if ( queryformats ) {
        /* if we're supposed to list the video formats, do that now and go out */
        check_videoIn(videoIn,(char *) videodevice);
        free(videoIn);
#ifndef EMBEDED_X210
        SDL_Quit();
#endif
        exit(1);
    }

    if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0)
        exit(1);
    /* if we're supposed to list the controls, do that now */
    if ( querycontrols )
        enum_controls(videoIn->fd);
    
    /* if we're supposed to read the control settings from a configfile, do that now */
    if ( readconfigfile )
        load_controls(videoIn->fd);


#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
    init_framebuffer();
#else
    x6410_init_Draw(videoIn->width,videoIn->height);
#endif

#else
    pscreen = SDL_SetVideoMode(videoIn->width, videoIn->height+30 , 0,SDL_VIDEO_Flags);
    overlay =SDL_CreateYUVOverlay(videoIn->width, videoIn->height+30 , SDL_YUY2_OVERLAY, pscreen);
    p = (unsigned char *) overlay->pixels[0];

    drect.x = 0;
    drect.y = 0;
    drect.w =pscreen->w;
    drect.h = pscreen->h;

#endif

    if (enableRawStreamCapture)
    {
        videoIn->captureFile = fopen("stream.raw", "wb");
        if(videoIn->captureFile == NULL) {
            perror("Unable to open file for raw stream capturing");
        } else {
            printf("Starting raw stream capturing to stream.raw ...\n");
        }
    }
    if (enableRawFrameCapture)
        videoIn->rawFrameCapture = enableRawFrameCapture;

    initLut();

#ifndef EMBEDED_X210
    SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL);
    lasttime = SDL_GetTicks();
    creatButt(videoIn->width, 32);
    SDL_LockYUVOverlay(overlay);
    memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt,
           videoIn->width * 64);
    SDL_UnlockYUVOverlay(overlay);

    /* initialize thread data */
    ptdata.ptscreen = &pscreen;
    ptdata.ptvideoIn = videoIn;
    ptdata.ptsdlevent = &sdlevent;
    ptdata.drect = &drect;
    affmutex = SDL_CreateMutex();
    ptdata.affmutex = affmutex;
    mythread = SDL_CreateThread(eventThread, (void *) &ptdata);
#endif



    pRGBData = (unsigned char *)malloc(videoIn->width*videoIn->width*4*sizeof(char));
    if(pRGBData==NULL)
    {
        return ;
	}
    /* main big loop */
    while (videoIn->signalquit)
    {
#ifndef EMBEDED_X210
        currtime = SDL_GetTicks();
        if (currtime - lasttime > 0) {
            frmrate = 1000/(currtime - lasttime);
        }
        lasttime = currtime;
#endif
        if (uvcGrab(videoIn) < 0) {
            printf("Error grabbing \n");
            break;
        }

        /* if we're grabbing video, show the frame rate */
        if (videoIn->toggleAvi)
            printf("\rframe rate: %d     ",frmrate);

#ifndef EMBEDED_X210
        SDL_LockYUVOverlay(overlay);
        memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2);
        SDL_UnlockYUVOverlay(overlay);
        SDL_DisplayYUVOverlay(overlay, &drect);
#endif

#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
        // yuv to rgb565 ,and to frambuffer
        process_image(videoIn->framebuffer,fbp,videoIn->width,videoIn->height,vinfo,finfo);
    
    //    convertYUYVtoRGB565(videoIn->framebuffer,pRGBData,videoIn->width,videoIn->height);

   //   Pyuv422torgb24(videoIn->framebuffer, pRGBData, videoIn->width, videoIn->height);
    //    memcpy(fbp,pRGBData,videoIn->width*videoIn->height*2);
     
     

#else   //X6410 post processor convert yuv to rgb,X210 not suport now.

        /*
        memcpy(pInbuffer, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2);

        ioctl(dev_fb0, GET_FB_INFO, &fb_info);

        pp_param.SrcFrmSt = ioctl(dev_pp, S3C_PP_GET_RESERVED_MEM_ADDR_PHY); //must be  physical adress
        pp_param.DstFrmSt = fb_info.map_dma_f1; //must be physical adress

        ioctl(dev_pp, S3C_PP_SET_PARAMS, &pp_param);
        ioctl(dev_pp, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param);
        ioctl(dev_pp, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param);
        ioctl(dev_pp, S3C_PP_START);
        */
#endif
#endif
        if (videoIn->getPict)
        {
            switch(videoIn->formatIn){
            case V4L2_PIX_FMT_MJPEG:
                get_picture(videoIn->tmpbuffer,videoIn->buf.bytesused);
                break;
            case V4L2_PIX_FMT_YUYV:
                get_pictureYV2(videoIn->framebuffer,videoIn->width,videoIn->height);
                break;
            default:
                break;
            }
            videoIn->getPict = 0;
            printf("get picture !\n");
        }

#ifndef EMBEDED_X210
        SDL_LockMutex(affmutex);
        ptdata.frmrate = frmrate;
        SDL_WM_SetCaption(videoIn->status, NULL);
        SDL_UnlockMutex(affmutex);
#endif

#ifdef  EMBEDED_X210
        usleep(10);
#else
        SDL_Delay(10);
#endif


    }
#ifndef EMBEDED_X210
    SDL_WaitThread(mythread, &status);
    SDL_DestroyMutex(affmutex);
#endif
    /* if avifile is defined, we made a video: compute the exact fps and
       set it in the video */
    if (videoIn->avifile != NULL) {
        float fps=(videoIn->framecount/(videoIn->recordtime/1000));
        fprintf(stderr,"setting fps to %f\n",fps);
        AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height,
                      fps, "MJPG");
        AVI_close(videoIn->avifile);
    }

    close_v4l2(videoIn);

#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
    close_frambuffer();
#else
    x6410_DeInit_Draw();
#endif

#endif
    free(pRGBData);
    free(videoIn);
    destroyButt();
    freeLut();
    printf(" Clean Up done Quit \n");
#ifndef EMBEDED_X210
    SDL_Quit();
#endif
}
Esempio n. 8
0
int main(int argc, char *argv[]) {
	char driver[128];
	int status;
	//Uint32 currtime;
	//Uint32 lasttime;
	unsigned char *p = NULL;
	int hwaccel = 0;
	const char *videodevice = NULL;
	const char *mode = NULL;
	int format = V4L2_PIX_FMT_MJPEG;
	int i;
	int grabmethod = 1;
	int width = 320;
	int height = 240;
	int fps = 15;
	unsigned char frmrate = 0;
	char *avifilename = NULL;
	int queryformats = 0;
	int querycontrols = 0;
	int readconfigfile = 0;
	char *separateur;
	char *sizestring = NULL;
	char *fpsstring = NULL;
	int enableRawStreamCapture = 0;
	int enableRawFrameCapture = 0;

	printf("luvcview version %s \n", version);
	for (i = 1; i < argc; i++) {
		/* skip bad arguments */
		if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') {
			continue;
		}
		if (strcmp(argv[i], "-d") == 0) {
			if (i + 1 >= argc) {
				printf("No parameter specified with -d, aborting.\n");
				exit(1);
			}
			videodevice = strdup(argv[i + 1]);
		}
		if (strcmp(argv[i], "-g") == 0) {
			/* Ask for read instead default  mmap */
			grabmethod = 0;
		}
		if (strcmp(argv[i], "-w") == 0) {
			/* disable hw acceleration */
			hwaccel = 1;
		}
		if (strcmp(argv[i], "-f") == 0) {
			if (i + 1 >= argc) {
				printf("No parameter specified with -f, aborting.\n");
				exit(1);
			}
			mode = strdup(argv[i + 1]);

			if (strncmp(mode, "yuv", 3) == 0) {
				format = V4L2_PIX_FMT_YUYV;

			} else if (strncmp(mode, "jpg", 3) == 0) {
				format = V4L2_PIX_FMT_MJPEG;

			} else {
				format = V4L2_PIX_FMT_JPEG;

			}
		}
		if (strcmp(argv[i], "-s") == 0) {
			if (i + 1 >= argc) {
				printf("No parameter specified with -s, aborting.\n");
				exit(1);
			}

			sizestring = strdup(argv[i + 1]);

			width = strtoul(sizestring, &separateur, 10);
			if (*separateur != 'x') {
				printf("Error in size use -s widthxheight \n");
				exit(1);
			} else {
				++separateur;
				height = strtoul(separateur, &separateur, 10);
				if (*separateur != 0)
					printf("hmm.. dont like that!! trying this height \n");
				printf(" size width: %d height: %d \n", width, height);
			}
		}
		if (strcmp(argv[i], "-i") == 0) {
			if (i + 1 >= argc) {
				printf("No parameter specified with -i, aborting. \n");
				exit(1);
			}
			fpsstring = strdup(argv[i + 1]);
			fps = strtoul(fpsstring, &separateur, 10);
			printf(" interval: %d fps \n", fps);
		}
		if (strcmp(argv[i], "-S") == 0) {
			/* Enable raw stream capture from the start */
			enableRawStreamCapture = 1;
		}
		if (strcmp(argv[i], "-c") == 0) {
			/* Enable raw frame capture for the first frame */
			enableRawFrameCapture = 1;
		}
		if (strcmp(argv[i], "-C") == 0) {
			/* Enable raw frame stream capture from the start*/
			enableRawFrameCapture = 2;
		}
		if (strcmp(argv[i], "-o") == 0) {
			/* set the avi filename */
			if (i + 1 >= argc) {
				printf("No parameter specified with -o, aborting.\n");
				exit(1);
			}
			avifilename = strdup(argv[i + 1]);
		}
		if (strcmp(argv[i], "-L") == 0) {
			/* query list of valid video formats */
			queryformats = 1;
		}
		if (strcmp(argv[i], "-l") == 0) {
			/* query list of valid video formats */
			querycontrols = 1;
		}

		if (strcmp(argv[i], "-r") == 0) {
			/* query list of valid video formats */
			readconfigfile = 1;
		}

		if (strcmp(argv[i], "-O") == 0) {
			/* get picture */
			getpictureflag = 1;
		}

		if (strcmp(argv[i], "-h") == 0) {
			printf(
					"usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n");
			printf("-h	print this message \n");
			printf("-d	/dev/videoX       use videoX device\n");
			printf("-g	use read method for grab instead mmap \n");
			printf("-w	disable SDL hardware accel. \n");
			printf(
					"-f	video format  default jpg  others options are yuv jpg \n");
			printf("-i	fps           use specified frame interval \n");
			printf("-s	widthxheight      use specified input size \n");
			printf("-c	enable raw frame capturing for the first frame\n");
			printf("-C	enable raw frame stream capturing from the start\n");
			printf("-S	enable raw stream capturing from the start\n");
			printf("-o	avifile  create avifile, default video.avi\n");
			printf("-L	query valid video formats\n");
			printf("-l	query valid controls and settings\n");
			printf("-r	read and set control settings from luvcview.cfg\n");
			printf("-O	get picture.\n");
			exit(0);
		}
	}

	if (videodevice == NULL || *videodevice == 0) {
		videodevice = "/dev/video0";
	}

	if (avifilename == NULL || *avifilename == 0) {
		avifilename = "video.avi";
	}

	videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn));
	if (queryformats) {
		/* if we're supposed to list the video formats, do that now and go out */
		check_videoIn(videoIn, (char *) videodevice);
		free(videoIn);
		exit(1);
	}
	if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format,
			grabmethod, avifilename) < 0)
		exit(1);
	/* if we're supposed to list the controls, do that now */
	if (querycontrols)
		enum_controls(videoIn->fd);

	/* if we're supposed to read the control settings from a configfile, do that now */
	if (readconfigfile)
		load_controls(videoIn->fd);

    printf("Enable Raw Stream Capture\n");
	if (enableRawStreamCapture) {
		videoIn->captureFile = fopen("stream.raw", "wb");
		if (videoIn->captureFile == NULL) {
			perror("Unable to open file for raw stream capturing");
		} else {
			printf("Starting raw stream capturing to stream.raw ...\n");
		}
	}
	if (enableRawFrameCapture)
		videoIn->rawFrameCapture = enableRawFrameCapture;

	initLut();

    printf("Begin main big loop\n");
    int loopNum = 0;
	/* main big loop */
	while (videoIn->signalquit) {
		//
		if (uvcGrab(videoIn) < 0) {
			printf("Error grabbing \n");
			break;
		}
		//
		/* if we're grabbing video, show the frame rate */
		if (videoIn->toggleAvi)
			printf("\rframe rate: %d ", frmrate);
		//

		if (getpictureflag) {
			//if (videoIn->getPict) { 
			switch (videoIn->formatIn) {
			case V4L2_PIX_FMT_MJPEG:
				get_picture(videoIn->tmpbuffer, videoIn->buf.bytesused);
				break;
			case V4L2_PIX_FMT_YUYV:
				printf("get picture yuv...\n");
				get_pictureYV2(videoIn->framebuffer, videoIn->width,
						videoIn->height);
				break;
			default:
				break;
			}
			videoIn->getPict = 0;
			printf("get picture !\n");
		}
        printf("loop number %d\n",loopNum);
        loopNum ++;
	}

	/* if avifile is defined, we made a video: compute the exact fps and
	 set it in the video */
	if (videoIn->avifile != NULL) {
		float fps = (videoIn->framecount / (videoIn->recordtime / 1000));
		fprintf(stderr, "setting fps to %f\n", fps);
		AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height, fps,
				"MJPG");
		AVI_close(videoIn->avifile);
	}

	close_v4l2(videoIn);
	free(videoIn);
	freeLut();
	printf(" Clean Up done Quit \n");
}
Esempio n. 9
0
void bifs_to_vid(M4File *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Float fps, s32 frameID, s32 dump_time)
{
	M4User client;
	int N;
	BIFSVID b2v;
	u16 es_id;
	Bool first_dump, needs_raw;
	u32 i, j, di, count, timescale, frameNum;
	Float duration, cur_time;
	M4VideoSurface fb;
	M4Err e;
	char old_driv[1024], *test;
	char config_path[M4_MAX_PATH];
	avi_t *avi_out; 
	Bool reset_fps;
	ESDescriptor *esd;
	char comp[5];
	char *conv_buf;

	memset(config_path, 0, M4_MAX_PATH);
	memset(&client, 0, sizeof(M4User));
	if (szConfigFile && strlen(szConfigFile)) {
		client.config = NewIniFile(szConfigFile, GPAC_CFG_FILE);
	} else {
		client.config = loadconfigfile();
	}

	if (!client.config) {
		fprintf(stdout, "Configuration File \"GPAC.cfg\" not found\nPlease enter full path to config file:\n");
		scanf("%s", config_path);
		client.config = NewIniFile(config_path, GPAC_CFG_FILE);
		if (!client.config) {
			fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
			return;
		}
	}

	avi_out = NULL;
	conv_buf = NULL;
	needs_raw = 0;
	test = IF_GetKey(client.config, "General", "PluginsDirectory");
	client.plugins = NewPluginManager((const unsigned char *) test, client.config);
	strcpy(old_driv, "raw_out");
	if (!PM_GetPluginsCount(client.plugins)) {
		printf("Error: no plugins found\n");
		goto err_exit;
	}

	/*switch driver to raw_driver*/
	test = IF_GetKey(client.config, "Video", "DriverName");
	if (test) strcpy(old_driv, test);

	test = IF_GetKey(client.config, "Rendering", "RendererName");
	/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
	if (test && strstr(test, "2D")) {
		IF_SetKey(client.config, "Video", "DriverName", "raw_out");
		needs_raw = 1;
	}

	b2v.sr = NewSceneRender(&client, 0, 1, NULL);
	SR_SetOption(b2v.sr, M4O_Visible, 0);

	b2v.sg = NewSceneGraph();
	SG_SetSceneTimeCallback(b2v.sg, get_scene_time, &b2v);
	SG_SetInitCallback(b2v.sg, node_init, &b2v);
	SG_SetModifiedCallback(b2v.sg, node_modif, &b2v);

	/*load config*/
	SR_SetOption(b2v.sr, M4O_ReloadConfig, 1);

	b2v.bifs = BIFS_NewDecoder(b2v.sg, 0);
	BIFS_SetClock(b2v.bifs, get_scene_time, &b2v);

	if (needs_raw) {
		test = IF_GetKey(client.config, "Video", "DriverName");
		if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
			printf("couldn't load raw output driver (%s used)\n", test);
			goto err_exit;
		}
	}

	strcpy(config_path, "");
	if (out_dir) {
		strcat(config_path, out_dir);
		if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\");
	}
	strcat(config_path, rad_name);
	strcat(config_path, "_bifs");
	if (!dump_type) {
		strcat(config_path, ".avi");
		avi_out = AVI_open_output_file(config_path);
		comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0;
		if (!avi_out) goto err_exit;
	}


	for (i=0; i<M4_GetTrackCount(file); i++) {
		esd = M4_GetStreamDescriptor(file, i+1, 1);
		if (!esd) continue;
		if (!esd->dependsOnESID && (esd->decoderConfig->streamType == M4ST_SCENE)) break;
		OD_DeleteDescriptor((Descriptor **) &esd);
	}
	if (!esd) {
		printf("no bifs track found\n");
		goto err_exit;
	}

	b2v.duration = (u32) M4_GetMediaDuration(file, i+1);
	timescale = M4_GetMediaTimeScale(file, i+1);
	es_id = (u16) M4_GetTrackID(file, i+1);

	e = BIFS_ConfigureStream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);

	if (e) {
		printf("BIFS init error %s\n", M4ErrToString(e));
		OD_DeleteDescriptor((Descriptor **) &esd);
		goto err_exit;
	}
	if (dump_time>=0) dump_time = dump_time *1000 / timescale;

	SR_SetSceneGraph(b2v.sr, b2v.sg);
	count = M4_GetSampleCount(file, i+1);

	reset_fps = 0;
	if (!fps) { 
		fps = (Float) (count * timescale);
		fps /= b2v.duration;
		printf("Estimated BIFS FrameRate %g\n", fps);
		reset_fps = 1;
	}

	if (!width || !height) {
		SG_GetSizeInfo(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}

	SR_SetSize(b2v.sr, width, height);

	SR_GetScreenBuffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	if (avi_out) {
		AVI_set_video(avi_out, width, height, fps, comp);
		conv_buf = malloc(sizeof(char) * width * height * 3);
	}
	printf("Dumping at BIFS resolution %d x %d\n\n", width, height);
	SR_ReleaseScreenBuffer(b2v.sr, &fb);

	cur_time = 0;

	duration = 1 / fps;
	if (reset_fps) fps = 0;

	frameNum = 1;
	first_dump = 1;
	N = 0;
	for (j=0; j<count+N; j++) {
		if(j < count)
		{
			M4Sample *samp = M4_GetSample(file, i+1, j+1, &di);

			b2v.cts = samp->DTS + samp->CTS_Offset;
			/*apply command*/
			BIFS_DecodeAU(b2v.bifs, es_id, samp->data, samp->dataLength);
			M4_DeleteSample(&samp);

			if ((frameID>=0) && (j<(u32)frameID)) continue;
			if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue;
		}
		/*render frame*/
		SR_RenderFrame(b2v.sr);
		/*needed for background2D !!*/
		if (first_dump) {
			SR_RenderFrame(b2v.sr);
			first_dump = 0;
		}

		if (fps) {
			Float bifs_time = (Float) b2v.cts;
			bifs_time /= timescale;
			if (cur_time > bifs_time) continue;

			while (1) {
				printf("dumped frame time %g (frame %d - sample %d)\r", cur_time, frameNum, j+1);
				dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum);
				frameNum++;
				cur_time += duration;
				if (cur_time > bifs_time) break;
			}
		} else {
			dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum);
			if (frameID>=0 || dump_time>=0) break;
			frameNum++;
			printf("dumped frame %d / %d\r", j+1, count);
		}

	}
	OD_DeleteDescriptor((Descriptor **) &esd);

	/*destroy everything*/
	BIFS_DeleteDecoder(b2v.bifs);
	SG_Delete(b2v.sg);
	SR_SetSceneGraph(b2v.sr, NULL);
	SR_Delete(b2v.sr);

err_exit:
	if (avi_out) AVI_close(avi_out);
	if (conv_buf) free(conv_buf);
	if (client.plugins) PM_Delete(client.plugins);
	if (needs_raw) IF_SetKey(client.config, "Video", "DriverName", old_driv);
	IF_Delete(client.config);
}
Esempio n. 10
0
File: main.c Progetto: Bevara/GPAC
void bifs_to_vid(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time)
{
	GF_User user;
	BIFSVID b2v;
	u16 es_id;
	Bool first_dump, needs_raw;
	u32 i, j, di, count, timescale, frameNum;
	u32 duration, cur_time;
	GF_VideoSurface fb;
	GF_Err e;
	char old_driv[1024];
	const char *test;
	char config_path[GF_MAX_PATH];
	avi_t *avi_out;
	Bool reset_fps;
	GF_ESD *esd;
	char comp[5];
	char *conv_buf;

	memset(&user, 0, sizeof(GF_User));
	if (szConfigFile && strlen(szConfigFile)) {
		user.config = gf_cfg_init(config_path, NULL);
	} else {
		user.config = gf_cfg_init(NULL, NULL);
	}

	if (!user.config) {
		fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path);
		return;
	}
	avi_out = NULL;
	conv_buf = NULL;
	esd = NULL;
	needs_raw = 0;
	test = gf_cfg_get_key(user.config, "General", "ModulesDirectory");
	user.modules = gf_modules_new((const unsigned char *) test, user.config);
	strcpy(old_driv, "raw_out");
	if (!gf_modules_get_count(user.modules)) {
		printf("Error: no modules found\n");
		goto err_exit;
	}

	/*switch driver to raw_driver*/
	test = gf_cfg_get_key(user.config, "Video", "DriverName");
	if (test) strcpy(old_driv, test);

	test = gf_cfg_get_key(user.config, "Compositor", "RendererName");
	/*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/
	if (test && strstr(test, "2D")) {
		gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output");
		needs_raw = 1;
	}

	needs_raw = 0;
	user.init_flags = GF_TERM_NO_AUDIO | GF_TERM_FORCE_3D;
	b2v.sr = gf_sc_new(&user, 0, NULL);
	gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0);

	b2v.sg = gf_sg_new();
	gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v);
	gf_sg_set_init_callback(b2v.sg, node_init, &b2v);
	gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v);

	/*load config*/
	gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1);

	b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0);

	if (needs_raw) {
		test = gf_cfg_get_key(user.config, "Video", "DriverName");
		if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) {
			printf("couldn't load raw output driver (%s used)\n", test);
			goto err_exit;
		}
	}

	strcpy(config_path, "");
	if (out_dir) {
		strcat(config_path, out_dir);
		if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\");
	}
	strcat(config_path, rad_name);
	strcat(config_path, "_bifs");
	if (!dump_type) {
		strcat(config_path, ".avi");
		avi_out = AVI_open_output_file(config_path);
		comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0;
		if (!avi_out) goto err_exit;
	}


	for (i=0; i<gf_isom_get_track_count(file); i++) {
		esd = gf_isom_get_esd(file, i+1, 1);
		if (!esd) continue;
		if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break;
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	if (!esd) {
		printf("no bifs track found\n");
		goto err_exit;
	}

	b2v.duration = gf_isom_get_media_duration(file, i+1);
	timescale = gf_isom_get_media_timescale(file, i+1);
	es_id = (u16) gf_isom_get_track_id(file, i+1);
	e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
	if (e) {
		printf("BIFS init error %s\n", gf_error_to_string(e));
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
		goto err_exit;
	}
	if (dump_time>=0) dump_time = dump_time *1000 / timescale;

	gf_sc_set_scene(b2v.sr, b2v.sg);
	count = gf_isom_get_sample_count(file, i+1);

	reset_fps = 0;
	if (!fps) {
		fps = (Float) (count * timescale);
		fps /= (Double) (s64) b2v.duration;
		printf("Estimated BIFS FrameRate %g\n", fps);
		reset_fps = 1;
	}

	if (!width || !height) {
		gf_sg_get_scene_size_info(b2v.sg, &width, &height);
	}
	/*we work in RGB24, and we must make sure the pitch is %4*/
	if ((width*3)%4) {
		printf("Adjusting width (%d) to have a stride multiple of 4\n", width);
		while ((width*3)%4) width--;
	}

	gf_sc_set_size(b2v.sr, width, height);
	gf_sc_draw_frame(b2v.sr);

	gf_sc_get_screen_buffer(b2v.sr, &fb);
	width = fb.width;
	height = fb.height;
	if (avi_out) {
		AVI_set_video(avi_out, width, height, fps, comp);
		conv_buf = gf_malloc(sizeof(char) * width * height * 3);
	}
	printf("Dumping at BIFS resolution %d x %d\n\n", width, height);
	gf_sc_release_screen_buffer(b2v.sr, &fb);

	cur_time = 0;

	duration = (u32)(timescale / fps);
	if (reset_fps) fps = 0;

	frameNum = 1;
	first_dump = 1;
	for (j=0; j<count; j++) {
		GF_ISOSample *samp = gf_isom_get_sample(file, i+1, j+1, &di);

		b2v.cts = samp->DTS + samp->CTS_Offset;
		/*apply command*/
		gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0);
		gf_isom_sample_del(&samp);

		if ((frameID>=0) && (j<(u32)frameID)) continue;
		if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue;
		/*render frame*/
		gf_sc_draw_frame(b2v.sr);
		/*needed for background2D !!*/
		if (first_dump) {
			gf_sc_draw_frame(b2v.sr);
			first_dump = 0;
		}

		if (fps) {
			if (cur_time > b2v.cts) continue;

			while (1) {
				printf("dumped frame time %f (frame %d - sample %d)\r", ((Float)cur_time)/timescale, frameNum, j+1);
				dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum);
				frameNum++;
				cur_time += duration;
				if (cur_time > b2v.cts) break;
			}
		} else {
			dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum);
			if (frameID>=0 || dump_time>=0) break;
			frameNum++;
			printf("dumped frame %d / %d\r", j+1, count);
		}

	}
	gf_odf_desc_del((GF_Descriptor *) esd);

	/*destroy everything*/
	gf_bifs_decoder_del(b2v.bifs);
	gf_sg_del(b2v.sg);
	gf_sc_set_scene(b2v.sr, NULL);
	gf_sc_del(b2v.sr);

err_exit:
	if (avi_out) AVI_close(avi_out);
	if (conv_buf) gf_free(conv_buf);
	if (user.modules) gf_modules_del(user.modules);
	if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv);
	gf_cfg_del(user.config);
}
Esempio n. 11
0
int main(int argc, char *argv[])
{
  avi_t *avifile, *avifile1, *avifile2;

  char *outfile=NULL, *infile=NULL, *audfile=NULL;

  long rate, mp3rate;

  int j, ch, cc=0, track_num=0, out_track_num=-1;
  int width, height, format=0, format_add, chan, bits, aud_error=0;

  double fps;

  char *codec;

  long offset, frames, n, bytes, aud_offset=0;

  int key;

  int aud_tracks;

  // for mp3 audio
  FILE *f=NULL;
  int len, headlen, chan_i, rate_i, mp3rate_i;
  unsigned long vid_chunks=0;
  char head[8];
  off_t pos;
  double aud_ms = 0.0, vid_ms = 0.0;
  double aud_ms_w[AVI_MAX_TRACKS];

  ac_init(AC_ALL);

  if(argc==1) usage(EXIT_FAILURE);

  while ((ch = getopt(argc, argv, "A:a:b:ci:o:p:f:x:?hv")) != -1) {

    switch (ch) {

    case 'i':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      infile = optarg;

      break;

    case 'A':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      out_track_num = atoi(optarg);

      if(out_track_num<-1) usage(EXIT_FAILURE);

      break;

    case 'a':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      track_num = atoi(optarg);

      if(track_num<0) usage(EXIT_FAILURE);

      break;

    case 'b':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      is_vbr = atoi(optarg);

      if(is_vbr<0) usage(EXIT_FAILURE);

      break;

    case 'c':

      drop_video = 1;

      break;

    case 'o':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      outfile = optarg;

      break;

    case 'p':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      audfile = optarg;

      break;

    case 'f':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      comfile = optarg;

      break;


    case 'x':

      if(optarg[0]=='-') usage(EXIT_FAILURE);
      indexfile = optarg;

      break;

    case 'v':
      version();
      exit(EXIT_SUCCESS);
    case 'h':
      usage(EXIT_SUCCESS);
    default:
      usage(EXIT_FAILURE);
    }
  }

  if(outfile == NULL || infile == NULL) usage(EXIT_FAILURE);

  printf("scanning file %s for video/audio parameter\n", infile);

  // open first file for video/audio info read only
  if(indexfile) {
      if (NULL == (avifile1 = AVI_open_input_indexfile(infile,0,indexfile))) {
	  AVI_print_error("AVI open with index file");
      }
  }
  else if(NULL == (avifile1 = AVI_open_input_file(infile,1))) {
      AVI_print_error("AVI open");
      exit(1);
  }

  AVI_info(avifile1);

  // safety checks

  if(strcmp(infile, outfile)==0) {
    printf("error: output filename conflicts with input filename\n");
    exit(1);
  }

  ch = optind;

  while (ch < argc) {

    if(tc_file_check(argv[ch]) != 0) {
      printf("error: file not found\n");
      exit(1);
    }

    if(strcmp(argv[ch++], outfile)==0) {
      printf("error: output filename conflicts with input filename\n");
      exit(1);
    }
  }

  // open output file
  if(NULL == (avifile = AVI_open_output_file(outfile))) {
    AVI_print_error("AVI open");
    exit(1);
  }


  // read video info;

  width  =  AVI_video_width(avifile1);
  height =  AVI_video_height(avifile1);

  fps    =  AVI_frame_rate(avifile1);
  codec  =  AVI_video_compressor(avifile1);

  //set video in outputfile
  AVI_set_video(avifile, width, height, fps, codec);

  if (comfile!=NULL)
    AVI_set_comment_fd(avifile, open(comfile, O_RDONLY));

  //multi audio tracks?
  aud_tracks = AVI_audio_tracks(avifile1);
  if (out_track_num < 0) out_track_num = aud_tracks;

  for(j=0; j<aud_tracks; ++j) {

      if (out_track_num == j) continue;
      AVI_set_audio_track(avifile1, j);

      rate   =  AVI_audio_rate(avifile1);
      chan   =  AVI_audio_channels(avifile1);
      bits   =  AVI_audio_bits(avifile1);

      format =  AVI_audio_format(avifile1);
      mp3rate=  AVI_audio_mp3rate(avifile1);
      //printf("TRACK %d MP3RATE %ld VBR %ld\n", j, mp3rate, AVI_get_audio_vbr(avifile1));

      //set next track of output file
      AVI_set_audio_track(avifile, j);
      AVI_set_audio(avifile, chan, rate, bits, format, mp3rate);
      AVI_set_audio_vbr(avifile, AVI_get_audio_vbr(avifile1));
  }

  if(audfile!=NULL) goto audio_merge;

  // close reopen in merger function
  AVI_close(avifile1);

  //-------------------------------------------------------------

  printf("merging multiple AVI-files (concatenating) ...\n");

  // extract and write to new files

  printf ("file %02d %s\n", ++cc, infile);
  merger(avifile, infile);

  while (optind < argc) {

    printf ("file %02d %s\n", ++cc, argv[optind]);
    merger(avifile, argv[optind++]);
  }

  // close new AVI file

  AVI_close(avifile);

  printf("... done merging %d file(s) in %s\n", cc, outfile);

  // reopen file for video/audio info
  if(NULL == (avifile = AVI_open_input_file(outfile,1))) {
    AVI_print_error("AVI open");
    exit(1);
  }
  AVI_info(avifile);

  return(0);

  //-------------------------------------------------------------


// *************************************************
// Merge the audio track of an additional AVI file
// *************************************************

 audio_merge:

  printf("merging audio %s track %d (multiplexing) into %d ...\n", audfile, track_num, out_track_num);

  // open audio file read only
  if(NULL == (avifile2 = AVI_open_input_file(audfile,1))) {
    int f=open(audfile, O_RDONLY), ret=0;
    char head[1024], *c;
    c = head;
    if (f>0 && (1024 == read(f, head, 1024)) ) {
      while ((c-head<1024-8) && (ret = tc_probe_audio_header(c, 8))<=0 ) {
	c++;
      }
      close(f);

      if (ret > 0) {
	aud_offset = c-head;
	//printf("found atrack 0x%x off=%ld\n", ret, aud_offset);
	goto merge_mp3;
      }
    }

    AVI_print_error("AVI open");
    exit(1);
  }

  AVI_info(avifile2);

  //switch to requested track

  if(AVI_set_audio_track(avifile2, track_num)<0) {
    fprintf(stderr, "invalid audio track\n");
  }

  rate   =  AVI_audio_rate(avifile2);
  chan   =  AVI_audio_channels(avifile2);
  bits   =  AVI_audio_bits(avifile2);

  format =  AVI_audio_format(avifile2);
  mp3rate=  AVI_audio_mp3rate(avifile2);

  //set next track
  AVI_set_audio_track(avifile, out_track_num);
  AVI_set_audio(avifile, chan, rate, bits, format, mp3rate);
  AVI_set_audio_vbr(avifile, AVI_get_audio_vbr(avifile2));

  AVI_seek_start(avifile1);
  frames =  AVI_video_frames(avifile1);
  offset = 0;

  printf ("file %02d %s\n", ++cc, infile);

  for (n=0; n<AVI_MAX_TRACKS; n++)
    aud_ms_w[n] = 0.0;
  vid_chunks=0;

  for (n=0; n<frames; ++n) {

    // video
    bytes = AVI_read_frame(avifile1, data, &key);

    if(bytes < 0) {
      AVI_print_error("AVI read video frame");
      return(-1);
    }

    if(AVI_write_frame(avifile, data, bytes, key)<0) {
      AVI_print_error("AVI write video frame");
      return(-1);
    }
    ++vid_chunks;
    vid_ms = vid_chunks*1000.0/fps;

    for(j=0; j<aud_tracks; ++j) {

      if (j == out_track_num) continue;
      AVI_set_audio_track(avifile1, j);
      AVI_set_audio_track(avifile, j);
      chan   = AVI_audio_channels(avifile1);

      // audio
      chan = AVI_audio_channels(avifile1);
      if(chan) {
	  sync_audio_video_avi2avi(vid_ms, &aud_ms_w[j], avifile1, avifile);
      }
    }


    // merge additional track

    // audio
    chan = AVI_audio_channels(avifile2);
    AVI_set_audio_track(avifile, out_track_num);

    if(chan) {
	sync_audio_video_avi2avi(vid_ms, &aud_ms, avifile2, avifile);
    }

    // progress
    fprintf(stderr, "[%s] (%06ld-%06ld)\r", outfile, offset, offset + n);

  }

  fprintf(stderr,"\n");

  offset = frames;

  //more files to merge?

  AVI_close(avifile1);

  while (optind < argc) {

    printf ("file %02d %s\n", ++cc, argv[optind]);

    if(NULL == ( avifile1 = AVI_open_input_file(argv[optind++],1))) {
      AVI_print_error("AVI open");
      goto finish;
    }

    AVI_seek_start(avifile1);
    frames =  AVI_video_frames(avifile1);

    for (n=0; n<frames; ++n) {

      // video
      bytes = AVI_read_frame(avifile1, data, &key);

      if(bytes < 0) {
	AVI_print_error("AVI read video frame");
	return(-1);
      }

      if(AVI_write_frame(avifile, data, bytes, key)<0) {
	AVI_print_error("AVI write video frame");
	return(-1);
      }

      ++vid_chunks;
      vid_ms = vid_chunks*1000.0/fps;

      // audio
      for(j=0; j<aud_tracks; ++j) {

	if (j == out_track_num) continue;
	AVI_set_audio_track(avifile1, j);
	AVI_set_audio_track(avifile, j);

	chan   = AVI_audio_channels(avifile1);

	if(chan) {
	  sync_audio_video_avi2avi(vid_ms, &aud_ms_w[j], avifile1, avifile);
	}
      }

      // merge additional track

      chan   = AVI_audio_channels(avifile2);
      AVI_set_audio_track(avifile, out_track_num);

      if(chan) {
	  sync_audio_video_avi2avi(vid_ms, &aud_ms, avifile2, avifile);
      } // chan

      // progress
      fprintf(stderr, "[%s] (%06ld-%06ld)\r", outfile, offset, offset + n);
    }

    fprintf(stderr, "\n");

    offset += frames;
    AVI_close(avifile1);
  }

 finish:

  // close new AVI file

  printf("... done multiplexing in %s\n", outfile);

  AVI_info(avifile);
  AVI_close(avifile);

  return(0);


// *************************************************
// Merge a raw audio file which is either MP3 or AC3
// *************************************************

merge_mp3:

  f = fopen(audfile,"rb");
  if (!f) { perror ("fopen"); exit(1); }

  fseek(f, aud_offset, SEEK_SET);
  len = fread(head, 1, 8, f);
  format_add  = tc_probe_audio_header(head, len);
  headlen = tc_get_audio_header(head, len, format_add, &chan_i, &rate_i, &mp3rate_i);
  fprintf(stderr, "... this looks like a %s track ...\n", (format_add==0x55)?"MP3":"AC3");

  fseek(f, aud_offset, SEEK_SET);

  //set next track
  AVI_set_audio_track(avifile, out_track_num);
  AVI_set_audio(avifile, chan_i, rate_i, 16, format_add, mp3rate_i);
  AVI_set_audio_vbr(avifile, is_vbr);

  AVI_seek_start(avifile1);
  frames =  AVI_video_frames(avifile1);
  offset = 0;

  for (n=0; n<AVI_MAX_TRACKS; ++n)
      aud_ms_w[n] = 0.0;

  for (n=0; n<frames; ++n) {

    // video
    bytes = AVI_read_frame(avifile1, data, &key);

    if(bytes < 0) {
      AVI_print_error("AVI read video frame");
      return(-1);
    }

    if(AVI_write_frame(avifile, data, bytes, key)<0) {
      AVI_print_error("AVI write video frame");
      return(-1);
    }

    vid_chunks++;
    vid_ms = vid_chunks*1000.0/fps;

    for(j=0; j<aud_tracks; ++j) {

      if (j == out_track_num) continue;
      AVI_set_audio_track(avifile1, j);
      AVI_set_audio_track(avifile, j);
      chan   = AVI_audio_channels(avifile1);

      if(chan) {
	sync_audio_video_avi2avi(vid_ms, &aud_ms_w[j], avifile1, avifile);
      }
    }


    // merge additional track

    if(headlen>4 && !aud_error) {
      while (aud_ms < vid_ms) {
	//printf("reading Audio Chunk ch(%ld) vms(%lf) ams(%lf)\n", vid_chunks, vid_ms, aud_ms);
	pos = ftell(f);

	len = fread (head, 1, 8, f);
	if (len<=0) { //eof
	  fprintf(stderr, "EOF in %s; continuing ..\n", audfile);
	  aud_error=1;
	  break;
	}

	if ( (headlen = tc_get_audio_header(head, len, format_add, NULL, NULL, &mp3rate_i))<0) {
	  fprintf(stderr, "Broken %s track #(%d)? skipping\n", (format_add==0x55?"MP3":"AC3"), aud_tracks);
	  aud_ms = vid_ms;
	  aud_error=1;
	} else { // look in import/tcscan.c for explanation
	  aud_ms += (headlen*8.0)/(mp3rate_i);
	}

	fseek (f, pos, SEEK_SET);

	len = fread (data, headlen, 1, f);
	if (len<=0) { //eof
	  fprintf(stderr, "EOF in %s; continuing ..\n", audfile);
	  aud_error=1;
	  break;
	}

	AVI_set_audio_track(avifile, out_track_num);

	if(AVI_write_audio(avifile, data, headlen)<0) {
	  AVI_print_error("AVI write audio frame");
	  return(-1);
	}

      }
    }

    // progress
    fprintf(stderr, "[%s] (%06ld-%06ld)\r", outfile, offset, offset + n);

  }

  fprintf(stderr,"\n");
  offset = frames;

  // more files?
  while (optind < argc) {

    printf ("file %02d %s\n", ++cc, argv[optind]);

    if(NULL == ( avifile1 = AVI_open_input_file(argv[optind++],1))) {
      AVI_print_error("AVI open");
      goto finish;
    }

    AVI_seek_start(avifile1);
    frames =  AVI_video_frames(avifile1);

    for (n=0; n<frames; ++n) {

      // video
      bytes = AVI_read_frame(avifile1, data, &key);

      if(bytes < 0) {
	AVI_print_error("AVI read video frame");
	return(-1);
      }

      if(AVI_write_frame(avifile, data, bytes, key)<0) {
	AVI_print_error("AVI write video frame");
	return(-1);
      }

      vid_chunks++;
      vid_ms = vid_chunks*1000.0/fps;

      for(j=0; j<aud_tracks; ++j) {

	if (j == out_track_num) continue;
	AVI_set_audio_track(avifile1, j);
	AVI_set_audio_track(avifile, j);
	chan   = AVI_audio_channels(avifile1);

	if(chan) {
	  sync_audio_video_avi2avi(vid_ms, &aud_ms_w[j], avifile1, avifile);
	}
      }

      // merge additional track
      // audio

      if(headlen>4 && !aud_error) {
	while (aud_ms < vid_ms) {
	  //printf("reading Audio Chunk ch(%ld) vms(%lf) ams(%lf)\n", vid_chunks, vid_ms, aud_ms);
	  pos = ftell(f);

	  len = fread (head, 8, 1, f);
	  if (len<=0) { //eof
	    fprintf(stderr, "EOF in %s; continuing ..\n", audfile);
	    aud_error=1; break;
	  }

	  if ( (headlen = tc_get_audio_header(head, len, format_add, NULL, NULL, &mp3rate_i))<0) {
	    fprintf(stderr, "Broken %s track #(%d)?\n", (format_add==0x55?"MP3":"AC3"), aud_tracks);
	    aud_ms = vid_ms;
	    aud_error=1;
	  } else { // look in import/tcscan.c for explanation
	    aud_ms += (headlen*8.0)/(mp3rate_i);
	  }

	  fseek (f, pos, SEEK_SET);

	  len = fread (data, headlen, 1, f);
	  if (len<=0) { //eof
	    fprintf(stderr, "EOF in %s; continuing ..\n", audfile);
	    aud_error=1; break;
	  }

	  AVI_set_audio_track(avifile, out_track_num);

	  if(AVI_write_audio(avifile, data, headlen)<0) {
	    AVI_print_error("AVI write audio frame");
	    return(-1);
	  }

	}
      }

      // progress
      fprintf(stderr, "[%s] (%06ld-%06ld)\r", outfile, offset, offset + n);
    }

    fprintf(stderr, "\n");

    offset += frames;
    AVI_close(avifile1);
  }


  if (f) fclose(f);

  printf("... done multiplexing in %s\n", outfile);

  AVI_close(avifile);

  return(0);
}
Esempio n. 12
0
evid_handler_t *evid_create_handler(frame_par_t *frame_par, evid_xvidpar_t *evid_xvidpar, double framerate, char *filename, char **errstr) {
  xvid_enc_create_t xvid_enc_create;
  handler_t *handler;

  handler=(handler_t *)x_malloc(sizeof(handler_t));
  // frame_par
  memcpy(&(handler->frame_par), frame_par, sizeof(frame_par_t));
  // xvid_par
  memcpy(&(handler->evid_xvidpar), evid_xvidpar, sizeof(evid_xvidpar_t));
  // sets file name
  handler->filename=(char *)x_malloc(strlen(filename)+1);
  strcpy(handler->filename, filename);

  // avilib
  if((handler->avilib.avi=AVI_open_output_file(filename))==NULL) {
    x_asprintf(errstr, "%s.\n", AVI_strerror());
    free(handler);
    return NULL;
  }

  AVI_set_video(handler->avilib.avi, frame_par->width, frame_par->height, framerate, "xvid");
  AVI_set_audio(handler->avilib.avi, 0, 44100, 16, WAVE_FORMAT_UNKNOWN);

  // xvid encoder
  memset(&xvid_enc_create, 0, sizeof(xvid_enc_create));
  xvid_enc_create.profile=XVID_PROFILE_S_L0; // XVID_PROFILE_AS_L4; // do not know abot this...
  xvid_enc_create.version=XVID_VERSION;
  xvid_enc_create.width=frame_par->width;
  xvid_enc_create.height=frame_par->height;
  xvid_enc_create.num_zones=0;
  xvid_enc_create.zones=NULL;
  xvid_enc_create.num_plugins=0;
  xvid_enc_create.plugins=NULL;
  xvid_enc_create.num_threads=0;
  xvid_enc_create.max_bframes=0;
  xvid_enc_create.global=0;
  xvid_enc_create.global|=XVID_GLOBAL_CLOSED_GOP;
  xvid_enc_create.fincr=1000;
  xvid_enc_create.fbase=(int)(framerate*1000);
  xvid_enc_create.max_key_interval=(int)(framerate*10);
  xvid_enc_create.frame_drop_ratio=0;
  xvid_enc_create.bquant_ratio=150;
  xvid_enc_create.bquant_offset=100;
  xvid_enc_create.min_quant[0]=2;
  xvid_enc_create.max_quant[0]=31;

  if(xvid_encore(NULL, XVID_ENC_CREATE, &xvid_enc_create, NULL)) {
    x_asprintf(errstr, "Error initializing XviD encore.\n");
    if(AVI_close(handler->avilib.avi)) {
      free((*errstr));
      x_asprintf(errstr, "Error initializing XviD encore / Error closing AVI file: %s.\n", AVI_strerror());
    }
    unlink(filename); // no error checking here
    free(handler);
    return NULL;
  }
  handler->xvid.handle=xvid_enc_create.handle;
  // bitstream
  handler->xvid.bitstream=x_malloc((size_t)(handler->frame_par.width*handler->frame_par.height*PALETTE_DEPTH));

  // errstr
  handler->errstr=NULL;
  return (evid_handler_t *)handler;
}
Esempio n. 13
0
int main(int argc, char *argv[])
{

  avi_t *avifile1=NULL;
  avi_t *avifile2=NULL;
  avi_t *avifile3=NULL;

  char *in_file=NULL, *out_file=NULL;

  long frames, bytes;

  double fps;

  char *codec;

  int track_num=0, aud_tracks;
  int encode_null=0;

  int i, j, n, key, shift=0;

  int ch, preload=0;

  long rate, mp3rate;

  int width, height, format, chan, bits;

  int be_quiet = 0;
  FILE *status_fd = stderr;

  /* for null frame encoding */
  char nulls[32000];
  long nullbytes=0;
  char tmp0[] = "/tmp/nullfile.00.avi"; /* XXX: use mktemp*() */

  buffer_list_t *ptr;

  double vid_ms = 0.0, shift_ms = 0.0, one_vid_ms = 0.0;
  double aud_ms [ AVI_MAX_TRACKS ];
  int aud_bitrate = 0;
  int aud_chunks = 0;

  ac_init(AC_ALL);

  if(argc==1) usage(EXIT_FAILURE);

  while ((ch = getopt(argc, argv, "a:b:vi:o:n:Nq?h")) != -1)
    {

	switch (ch) {

	case 'i':

	     if(optarg[0]=='-') usage(EXIT_FAILURE);
	    in_file=optarg;

	    break;

	case 'a':

	  if(optarg[0]=='-') usage(EXIT_FAILURE);
	  track_num = atoi(optarg);

	  if(track_num<0) usage(EXIT_FAILURE);

	  break;

	case 'b':

	  if(optarg[0]=='-') usage(EXIT_FAILURE);
	  is_vbr = atoi(optarg);

	  if(is_vbr<0) usage(EXIT_FAILURE);

	  break;

	case 'o':

	    if(optarg[0]=='-') usage(EXIT_FAILURE);
	    out_file=optarg;

	    break;

	case 'f':

	    if(optarg[0]=='-') usage(EXIT_FAILURE);
	    comfile = optarg;

	    break;

	case 'n':

	    if(sscanf(optarg,"%d", &shift)!=1) {
		fprintf(stderr, "invalid parameter for option -n\n");
		usage(EXIT_FAILURE);
	    }
	    break;

	case 'N':
	    encode_null=1;
	    break;
	case 'q':
	    be_quiet = 1;
	    break;
	case 'v':
	    version();
	    exit(0);
	    break;
      case 'h':
	usage(EXIT_SUCCESS);
      default:
	usage(EXIT_FAILURE);
      }
    }

  // check
  if(in_file==NULL || out_file == NULL) usage(EXIT_FAILURE);

  if(shift == 0) fprintf(stderr, "no sync requested - exit");

  memset (nulls, 0, sizeof(nulls));


  // open file
  if(NULL == (avifile1 = AVI_open_input_file(in_file,1))) {
      AVI_print_error("AVI open");
      exit(1);
  }

  if(strcmp(in_file, out_file)==0) {
      printf("error: output filename conflicts with input filename\n");
      exit(1);
  }

  if(NULL == (avifile2 = AVI_open_output_file(out_file))) {
    AVI_print_error("AVI open");
    exit(1);
  }

  if (be_quiet) {
    if (!(status_fd = fopen("/dev/null", "w"))) {
      fprintf(stderr, "Can't open /dev/null\n");
      exit(1);
    }
  }

  // read video info;

  AVI_info(avifile1);

  // read video info;

  frames =  AVI_video_frames(avifile1);
   width =  AVI_video_width(avifile1);
  height =  AVI_video_height(avifile1);

  fps    =  AVI_frame_rate(avifile1);
  codec  =  AVI_video_compressor(avifile1);

  //set video in outputfile
  AVI_set_video(avifile2, width, height, fps, codec);

  if (comfile!=NULL)
    AVI_set_comment_fd(avifile2, open(comfile, O_RDONLY));

  aud_tracks = AVI_audio_tracks(avifile1);

  for(j=0; j<aud_tracks; ++j) {

    AVI_set_audio_track(avifile1, j);

    rate   =  AVI_audio_rate(avifile1);
    chan   =  AVI_audio_channels(avifile1);
    bits   =  AVI_audio_bits(avifile1);

    format =  AVI_audio_format(avifile1);
    mp3rate=  AVI_audio_mp3rate(avifile1);

    //set next track of output file
    AVI_set_audio_track(avifile2, j);
    AVI_set_audio(avifile2, chan, rate, bits, format, mp3rate);
    AVI_set_audio_vbr(avifile2, is_vbr);
  }

  //switch to requested audio_channel

  if(AVI_set_audio_track(avifile1, track_num)<0) {
    fprintf(stderr, "invalid auto track\n");
  }

  AVI_set_audio_track(avifile2, track_num);

  if (encode_null) {
      char cmd[1024];

      rate   =  AVI_audio_rate(avifile2);
      chan   =  AVI_audio_channels(avifile2);
      bits   =  AVI_audio_bits(avifile2);
      format =  AVI_audio_format(avifile2);
      mp3rate=  AVI_audio_mp3rate(avifile2);

      if (bits==0) bits=16;
      if (mp3rate%2) mp3rate++;

      fprintf(status_fd, "Creating silent mp3 frame with current parameter\n");
      memset (cmd, 0, sizeof(cmd));
      tc_snprintf(cmd, sizeof(cmd), "transcode -i /dev/zero -o %s -x raw,raw"
	      " -n 0x1 -g 16x16 -y raw,raw -c 0-5 -e %ld,%d,%d -b %ld -q0",
	      tmp0, rate,bits,chan, mp3rate);

      printf(cmd);
      system(cmd);

      if(NULL == (avifile3 = AVI_open_input_file(tmp0,1))) {
	  AVI_print_error("AVI open");
	  exit(1);
      }

      nullbytes = AVI_audio_size(avifile3, 3);

      /* just read a few frames */
      if(AVI_read_audio(avifile3, nulls, nullbytes) < 0) {
	  AVI_print_error("AVI audio read frame");
	  return(-1);
      }
      memset (nulls, 0, sizeof(nulls));
      if(AVI_read_audio(avifile3, nulls, nullbytes) < 0) {
	  AVI_print_error("AVI audio read frame");
	  return(-1);
      }
      memset (nulls, 0, sizeof(nulls));
      if(AVI_read_audio(avifile3, nulls, nullbytes) < 0) {
	  AVI_print_error("AVI audio read frame");
	  return(-1);
      }


      /*
      printf("\nBytes (%ld): \n", nullbytes);
      {
	  int asd=0;
	  for (asd=0; asd<nullbytes; asd++){
	      printf("%x ",(unsigned char)nulls[asd]);
	  }
	  printf("\n");
      }
      */



  }

  vid_ms   = 0.0;
  shift_ms = 0.0;
  for (n=0; n<AVI_MAX_TRACKS; ++n)
      aud_ms[n] = 0.0;

  // ---------------------------------------------------------------------

  for (n=0; n<frames; ++n) {

    // video unchanged
    bytes = AVI_read_frame(avifile1, data, &key);

    if(bytes < 0) {
      AVI_print_error("AVI read video frame");
      return(-1);
    }

    if(AVI_write_frame(avifile2, data, bytes, key)<0) {
      AVI_print_error("AVI write video frame");
      return(-1);
    }

    vid_ms = (n+1)*1000.0/fps;


    // Pass-through all other audio tracks.
    for(j=0; j<aud_tracks; ++j) {

	// skip track we want to modify
	if (j == track_num) continue;

	// switch to track
	AVI_set_audio_track(avifile1, j);
	AVI_set_audio_track(avifile2, j);
	sync_audio_video_avi2avi(vid_ms, &aud_ms[j], avifile1, avifile2);
    }

    //switch to requested audio_channel
    if(AVI_set_audio_track(avifile1, track_num)<0) {
	fprintf(stderr, "invalid auto track\n");
    }
    AVI_set_audio_track(avifile2, track_num);
    shift_ms = (double)shift*1000.0/fps;
    one_vid_ms = 1000.0/fps;
    format = AVI_audio_format(avifile1);
    rate   = AVI_audio_rate(avifile1);
    chan   = AVI_audio_channels(avifile1);
    bits   = AVI_audio_bits(avifile1);
    bits   = bits==0?16:bits;
    mp3rate= AVI_audio_mp3rate(avifile1);


    if(shift>0) {

      // for n < shift, shift audio frames are discarded

      if(!preload) {

	if (tc_format_ms_supported(format)) {
	  for(i=0;i<shift;++i) {
	      //fprintf (stderr, "shift (%d) i (%d) n (%d) a (%d)\n", shift, i, n, aud_chunks);
	    while (aud_ms[track_num] < vid_ms + one_vid_ms*(double)i) {

		aud_bitrate = (format==0x1||format==0x2000)?1:0;
		aud_chunks++;
		if( (bytes = AVI_read_audio_chunk(avifile1, data)) <= 0) {
		    aud_ms[track_num] = vid_ms + one_vid_ms*i;
		    if (bytes == 0) continue;
		    AVI_print_error("AVI 2 audio read frame");
		    break;
		}

		if ( !aud_bitrate && tc_get_audio_header(data, bytes, format, NULL, NULL, &aud_bitrate)<0) {
		    // if this is the last frame of the file, slurp in audio chunks
		    if (n == frames-1) continue;
		    aud_ms[track_num] = vid_ms + one_vid_ms*i;
		} else
		    aud_ms[track_num] += (bytes*8.0)/(format==0x1?((double)(rate*chan*bits)/1000.0):
				       (format==0x2000?(double)(mp3rate):aud_bitrate));
	    }
	  }

	} else { // fallback
	    bytes=0;
	    for(i=0;i<shift;++i) {
		do {
		    if( (bytes = AVI_read_audio_chunk(avifile1, data)) < 0) {
			AVI_print_error("AVI audio read frame");
			return(-1);
		    }
		} while (AVI_can_read_audio(avifile1));
	    }
	}
	preload=1;
      }


      // copy rest of the track
      if(n<frames-shift) {
	if (tc_format_ms_supported(format)) {

	    while (aud_ms[track_num] < vid_ms + shift_ms) {

		aud_chunks++;
		aud_bitrate = (format==0x1||format==0x2000)?1:0;

		if( (bytes = AVI_read_audio_chunk(avifile1, data)) < 0) {
		    aud_ms[track_num] = vid_ms + shift_ms;
		    AVI_print_error("AVI 3 audio read frame");
		    break;
		}

		if(AVI_write_audio(avifile2, data, bytes) < 0) {
		    AVI_print_error("AVI 3 write audio frame");
		    return(-1);
		}

		fprintf(status_fd, "V [%05d][%08.2f] | A [%05d][%08.2f] [%05ld]\r", n, vid_ms, aud_chunks, aud_ms[track_num], bytes);

		if (bytes == 0) {
		    aud_ms[track_num] = vid_ms + shift_ms;
		    continue;
		}

		if(n>=frames-2*shift) {

		    // save audio frame for later
		    ptr = buffer_register(n);

		    if(ptr==NULL) {
			fprintf(stderr,"buffer allocation failed\n");
			break;
		    }

		    ac_memcpy(ptr->data, data, bytes);
		    ptr->size = bytes;
		    ptr->status = BUFFER_READY;
		}


		if ( !aud_bitrate && tc_get_audio_header(data, bytes, format, NULL, NULL, &aud_bitrate)<0) {
		    if (n == frames-1) continue;
		    aud_ms[track_num] = vid_ms + shift_ms;
		} else
		    aud_ms[track_num] += (bytes*8.0)/(format==0x1?((double)(rate*chan*bits)/1000.0):
				       (format==0x2000?(double)(mp3rate):aud_bitrate));
	    }

	} else { // fallback
	bytes = AVI_audio_size(avifile1, n+shift-1);

	do {
	    if( (bytes = AVI_read_audio_chunk(avifile1, data)) < 0) {
		AVI_print_error("AVI audio read frame");
		return(-1);
	    }

	    if(AVI_write_audio(avifile2, data, bytes) < 0) {
		AVI_print_error("AVI write audio frame");
		return(-1);
	    }

	    fprintf(status_fd, "V [%05d] | A [%05d] [%05ld]\r", n, n+shift, bytes);

	    if(n>=frames-2*shift) {

		// save audio frame for later
		ptr = buffer_register(n);

		if(ptr==NULL) {
		    fprintf(stderr,"buffer allocation failed\n");
		    break;
		}

		ac_memcpy(ptr->data, data, bytes);
		ptr->size = bytes;
		ptr->status = BUFFER_READY;
	    }
	} while (AVI_can_read_audio(avifile1));
	}
      }

      // padding at the end
      if(n>=frames-shift) {

	if (!ptrlen) {
	    ptr = buffer_retrieve();
	    ac_memcpy (ptrdata, ptr->data, ptr->size);
	    ptrlen = ptr->size;
	}

	if (tc_format_ms_supported(format)) {

	    while (aud_ms[track_num] < vid_ms + shift_ms) {

		aud_bitrate = (format==0x1||format==0x2000)?1:0;

		// mute this -- check if can mute (valid A header)!
		if (tc_probe_audio_header(ptrdata, ptrlen) > 0)
		    tc_format_mute(ptrdata, ptrlen, format);

		if(AVI_write_audio(avifile2, ptrdata, ptrlen) < 0) {
		    AVI_print_error("AVI write audio frame");
		    return(-1);
		}

		fprintf(status_fd, " V [%05d][%08.2f] | A [%05d][%08.2f] [%05ld]\r", n, vid_ms, n+shift, aud_ms[track_num], bytes);

		if ( !aud_bitrate && tc_get_audio_header(ptrdata, ptrlen, format, NULL, NULL, &aud_bitrate)<0) {
		    //if (n == frames-1) continue;
		    aud_ms[track_num] = vid_ms + shift_ms;
		} else
		    aud_ms[track_num] += (ptrlen*8.0)/(format==0x1?((double)(rate*chan*bits)/1000.0):
				       (format==0x2000?(double)(mp3rate):aud_bitrate));
	    }

	} else { // fallback

	// get next audio frame
	ptr = buffer_retrieve();

	while (1) {
	    printf("ptr->id (%d) ptr->size (%d)\n", ptr->id, ptr->size);

	    if(ptr==NULL) {
		fprintf(stderr,"no buffer found\n");
		break;
	    }

	    if (encode_null) {
		if(AVI_write_audio(avifile2, nulls, nullbytes)<0) {
		    AVI_print_error("AVI write audio frame");
		    return(-1);
		}
	    } else {
		// simple keep old frames to force exact time delay
		if(AVI_write_audio(avifile2, ptr->data, ptr->size)<0) {
		    AVI_print_error("AVI write audio frame");
		    return(-1);
		}
	    }

	    fprintf(status_fd, "V [%05d] | padding\r", n);

	    if (ptr->next && ptr->next->id == ptr->id) {
		buffer_remove(ptr);
		ptr = buffer_retrieve();
		continue;
	    }

	    buffer_remove(ptr);
	    break;
	}  // 1
      }
      }


// *************************************
// negative shift (pad audio at start)
// *************************************

    } else {

      if (tc_format_ms_supported(format)) {
	/*
	fprintf(status_fd, "n(%d) -shift(%d) shift_ms (%.2lf) vid_ms(%.2lf) aud_ms[%d](%.2lf) v-s(%.2lf)\n",
	    n, -shift, shift_ms, vid_ms, track_num, aud_ms[track_num], vid_ms + shift_ms);
	    */

	// shift<0 -> shift_ms<0 !
	while (aud_ms[track_num] < vid_ms) {
	    /*
	  fprintf(stderr, " 1 (%02d) %s frame_read len=%4ld (A/V) (%8.2f/%8.2f)\n",
	      n, format==0x55?"MP3":"AC3", bytes, aud_ms[track_num], vid_ms);
	      */

	  aud_bitrate = (format==0x1||format==0x2000)?1:0;

	  if( (bytes = AVI_read_audio_chunk(avifile1, data)) < 0) {
	    AVI_print_error("AVI 2 audio read frame");
	    aud_ms[track_num] = vid_ms;
	    break;
	    //return(-1);
	  }

	  // save audio frame for later
	  ptr = buffer_register(n);

	  if(ptr==NULL) {
	    fprintf(stderr,"buffer allocation failed\n");
	    break;
	  }

	  ac_memcpy(ptr->data, data, bytes);
	  ptr->size = bytes;
	  ptr->status = BUFFER_READY;

	  if(n<-shift) {

	    // mute this -- check if can mute!
	    if (tc_probe_audio_header(data, bytes) > 0)
		tc_format_mute(data, bytes, format);

	    // simple keep old frames to force exact time delay
	    if(AVI_write_audio(avifile2, data, bytes)<0) {
	      AVI_print_error("AVI write audio frame");
	      return(-1);
	    }

	    fprintf(status_fd, "V [%05d] | padding\r", n);

	  } else {
	    if (n==-shift)
		fprintf(status_fd, "\n");

	    // get next audio frame
	    ptr = buffer_retrieve();

	    if(ptr==NULL) {
	      fprintf(stderr,"no buffer found\n");
	      break;
	    }

	    if(AVI_write_audio(avifile2, ptr->data, ptr->size)<0) {
	      AVI_print_error("AVI write audio frame");
	      return(-1);
	    }
	    bytes = ptr->size;
	    ac_memcpy (data, ptr->data, bytes);

	    fprintf(status_fd, "V [%05d] | A [%05d]\r", n, ptr->id);

	    buffer_remove(ptr);
	  }

	  if ( !aud_bitrate && tc_get_audio_header(data, bytes, format, NULL, NULL, &aud_bitrate)<0) {
	    if (n == frames-1) continue;
	    aud_ms[track_num] = vid_ms;
	  } else
	    aud_ms[track_num] += (bytes*8.0)/(format==0x1?((double)(rate*chan*bits)/1000.0):
				       (format==0x2000?(double)(mp3rate):aud_bitrate));

	  /*
	  fprintf(stderr, " 1 (%02d) %s frame_read len=%4ld (A/V) (%8.2f/%8.2f)\n",
	      n, format==0x55?"MP3":"AC3", bytes, aud_ms[track_num], vid_ms);
	      */

	}








      } else { // no supported format

      bytes = AVI_audio_size(avifile1, n);


      if(bytes > SIZE_RGB_FRAME) {
	fprintf(stderr, "invalid frame size\n");
	return(-1);
      }

      if(AVI_read_audio(avifile1, data, bytes) < 0) {
	AVI_print_error("AVI audio read frame");
	return(-1);
      }

      // save audio frame for later
      ptr = buffer_register(n);

      if(ptr==NULL) {
	fprintf(stderr,"buffer allocation failed\n");
	break;
      }

      ac_memcpy(ptr->data, data, bytes);
      ptr->size = bytes;
      ptr->status = BUFFER_READY;


      if(n<-shift) {

	if (encode_null) {
	    if(AVI_write_audio(avifile2, nulls, nullbytes)<0) {
		AVI_print_error("AVI write audio frame");
		return(-1);
	    }
	} else {
	// simple keep old frames to force exact time delay
	    if(AVI_write_audio(avifile2, data, bytes)<0) {
		AVI_print_error("AVI write audio frame");
		return(-1);
	    }
	}

	fprintf(status_fd, "V [%05d] | padding\r", n);

      } else {

	// get next audio frame
	ptr = buffer_retrieve();

	if(ptr==NULL) {
	  fprintf(stderr,"no buffer found\n");
	  break;
	}

	if(AVI_write_audio(avifile2, ptr->data, ptr->size)<0) {
	  AVI_print_error("AVI write audio frame");
	  return(-1);
	}

	fprintf(status_fd, "V [%05d] | A [%05d]\r", n, ptr->id);

	buffer_remove(ptr);
      }
    }
    }
  }

  fprintf(status_fd, "\n");

  if (be_quiet) {
    fclose(status_fd);
  }

  AVI_close(avifile1);
  AVI_close(avifile2);

  if (avifile3) {
      memset(nulls, 0, sizeof(nulls));
      tc_snprintf(nulls, sizeof(nulls), "rm -f %s", tmp0);
      system(nulls);
      AVI_close(avifile3);
  }

  return(0);
}