示例#1
0
/* run in a thread (SDL overlay)*/
void *main_loop(void *data)
{
    struct ALL_DATA *all_data = (struct ALL_DATA *) data;

    struct VidState *s = all_data->s;
    struct paRecordData *pdata = all_data->pdata;
    struct GLOBAL *global = all_data->global;
    struct focusData *AFdata = all_data->AFdata;
    struct vdIn *videoIn = all_data->videoIn;

    struct particle* particles = NULL; //for the particles video effect

    SDL_Event event;
    /*the main SDL surface*/
    SDL_Surface *pscreen = NULL;
    SDL_Overlay *overlay = NULL;
    SDL_Rect drect;

    int width = global->width;
    int height = global->height;
    int format = global->format;

    SAMPLE vuPeak[2];  // The maximum vuLevel seen recently
    int vuPeakFreeze[2]; // The vuPeak values will be frozen for this many frames.
    vuPeak[0] = vuPeak[1] = 0;
    vuPeakFreeze[0] = vuPeakFreeze[1] = 0;

    BYTE *p = NULL;

    Control *focus_control = NULL;
    int last_focus = 0;

    if (global->AFcontrol)
    {
        focus_control = get_ctrl_by_id(s->control_list, AFdata->id);
        get_ctrl(videoIn->fd, s->control_list, AFdata->id, all_data);
        last_focus = focus_control->value;
        /*make sure we wait for focus to settle on first check*/
        if (last_focus < 0) last_focus = AFdata->f_max;
    }

    gboolean capVid = FALSE;
    gboolean signalquit = FALSE;

    /*------------------------------ SDL init video ---------------------*/
    if(!global->no_display)
    {
        overlay = video_init(data, &(pscreen));

        if(overlay == NULL)
        {
            g_print("FATAL: Couldn't create yuv overlay - please disable hardware accelaration\n");
            signalquit = TRUE; /*exit video thread*/
        }
        else
        {
            p = (unsigned char *) overlay->pixels[0];

            drect.x = 0;
            drect.y = 0;
            drect.w = pscreen->w;
            drect.h = pscreen->h;
        }
    }

    while (!signalquit)
    {
        __LOCK_MUTEX(__VMUTEX);
            capVid = videoIn->capVid;
            signalquit = videoIn->signalquit;
        __UNLOCK_MUTEX(__VMUTEX);

        /*-------------------------- Grab Frame ----------------------------------*/
        if (uvcGrab(videoIn, format, width, height, &global->fps, &global->fps_num) < 0)
        {
            g_printerr("Error grabbing image \n");
            continue;
        }
        else
        {
            if(!videoIn->timestamp)
            {
                global->skip_n++; //skip this frame
            }

            if(capVid)
            {
                if(global->framecount < 1)
                {
					/*reset video start time to first frame capture time */
					global->Vidstarttime = videoIn->timestamp;
					/** set current time for audio ts(0) reference (MONOTONIC)
					 *  only used if we have no audio capture before video
					 */
					__LOCK_MUTEX(__AMUTEX);
						pdata->ts_ref = ns_time_monotonic();
					__UNLOCK_MUTEX(__AMUTEX);
					//printf("video ts ref: %llu audio ts_ ref: %llu\n",global->Vidstarttime, pdata->ts_ref);
					global->v_ts = 0;
                }
                else
                {
                    global->v_ts = videoIn->timestamp - global->Vidstarttime;
                    /*always use the last frame time stamp for video stop time*/
                    global->Vidstoptime = videoIn->timestamp;
                }
            }

            if (global->FpsCount && !global->no_display)
            {/* sets fps count in window title bar */
                global->frmCount++;
                if (global->DispFps>0)
                { /*set every 2 sec*/
                    g_snprintf(global->WVcaption,24,"GUVCVideo - %3.2f fps",global->DispFps);
                    SDL_WM_SetCaption(global->WVcaption, NULL);

                    global->frmCount=0;/*resets*/
                    global->DispFps=0;
                }
            }

            /*---------------- autofocus control ------------------*/

            if (global->AFcontrol && (global->autofocus || AFdata->setFocus))
            { /*AFdata = NULL if no focus control*/
                if (AFdata->focus < 0)
                {
                    /*starting autofocus*/
                    AFdata->focus = AFdata->left; /*start left*/
                    focus_control->value = AFdata->focus;
                    if (set_ctrl (videoIn->fd, s->control_list, AFdata->id) != 0)
                        g_printerr("ERROR: couldn't set focus to %d\n", AFdata->focus);
                    /*number of frames until focus is stable*/
                    /*1.4 ms focus time - every 1 step*/
                    AFdata->focus_wait = (int) abs(AFdata->focus-last_focus)*1.4/(1000/global->fps)+1;
                    last_focus = AFdata->focus;
                }
                else
                {
                    if (AFdata->focus_wait == 0)
                    {
                        AFdata->sharpness=getSharpness (videoIn->framebuffer, width, height, 5);
                        if (global->debug)
                            g_print("sharp=%d focus_sharp=%d foc=%d right=%d left=%d ind=%d flag=%d\n",
                                AFdata->sharpness,AFdata->focus_sharpness,
                                AFdata->focus, AFdata->right, AFdata->left,
                                AFdata->ind, AFdata->flag);
                        AFdata->focus=getFocusVal (AFdata);
                        if ((AFdata->focus != last_focus))
                        {
                            focus_control->value = AFdata->focus;
                            if (set_ctrl (videoIn->fd, s->control_list, AFdata->id) != 0)
                                g_printerr("ERROR: couldn't set focus to %d\n",
                                    AFdata->focus);
                            /*number of frames until focus is stable*/
                            /*1.4 ms focus time - every 1 step*/
                            AFdata->focus_wait = (int) abs(AFdata->focus-last_focus)*1.4/(1000/global->fps)+1;
                        }
                        last_focus = AFdata->focus;
                    }
                    else
                    {
                        AFdata->focus_wait--;
                        if (global->debug) g_print("Wait Frame: %d\n",AFdata->focus_wait);
                    }
                }
            }
        }
        /*------------------------- Filter Frame ---------------------------------*/
        __LOCK_MUTEX(__GMUTEX);
        if(global->Frame_Flags>0)
        {
            if((global->Frame_Flags & YUV_PARTICLES)==YUV_PARTICLES)
                particles = particles_effect(videoIn->framebuffer, width, height, 20, 4, particles);

            if((global->Frame_Flags & YUV_MIRROR)==YUV_MIRROR)
                yuyv_mirror(videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_UPTURN)==YUV_UPTURN)
                yuyv_upturn(videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_NEGATE)==YUV_NEGATE)
                yuyv_negative (videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_MONOCR)==YUV_MONOCR)
                yuyv_monochrome (videoIn->framebuffer, width, height);

            if((global->Frame_Flags & YUV_PIECES)==YUV_PIECES)
                pieces (videoIn->framebuffer, width, height, 16 );

        }
        __UNLOCK_MUTEX(__GMUTEX);
        /*-------------------------capture Image----------------------------------*/
        if (videoIn->capImage)
        {
            /*
             * format and resolution can change(enabled) while capturing the frame
             * but you would need to be speedy gonzalez to press two buttons
             * at almost the same time :D
             */
            int ret = 0;
            if((ret=store_picture(all_data)) < 0)
                g_printerr("saved image to:%s ...Failed \n",videoIn->ImageFName);
            else if (!ret && global->debug) g_print("saved image to:%s ...OK \n",videoIn->ImageFName);

            videoIn->capImage=FALSE;
        }
        /*---------------------------capture Video---------------------------------*/
        if (capVid && !(global->skip_n))
        {
            __LOCK_MUTEX(__VMUTEX);
                if(videoIn->VidCapStop) videoIn->VidCapStop = FALSE;
            __UNLOCK_MUTEX(__VMUTEX);
            int res=0;

			/* format and resolution don't change(disabled) while capturing video
			 * store_video_frame may sleep if needed to avoid buffer overrun
			 */
            if((res=store_video_frame(all_data))<0) g_printerr("WARNING: droped frame (%i)\n",res);

        } /*video and audio capture have stopped */
        else
        {
            __LOCK_MUTEX(__VMUTEX);
                if(!(videoIn->VidCapStop)) videoIn->VidCapStop=TRUE;
            __UNLOCK_MUTEX(__VMUTEX);
        }

        /* decrease skip frame count */
        if (global->skip_n > 0)
        {
            if (global->debug && capVid) g_print("skiping frame %d...\n", global->skip_n);
            global->skip_n--;
        }

        __LOCK_MUTEX( __AMUTEX );
            if (global->Sound_enable && capVid) pdata->skip_n = global->skip_n;
        __UNLOCK_MUTEX( __AMUTEX );

        /*------------------------- Display Frame --------------------------------*/
        if(!global->no_display)
        {
			if (global->osdFlags && pdata->audio_buff[0])
			{
				draw_vu_meter(width, height, vuPeak, vuPeakFreeze, data);
			}
            SDL_LockYUVOverlay(overlay);
            memcpy(p, videoIn->framebuffer, width * height * 2);
            SDL_UnlockYUVOverlay(overlay);
            SDL_DisplayYUVOverlay(overlay, &drect);

            /*------------------------- Read Key events ------------------------------*/
            /* Poll for events */
            while( SDL_PollEvent(&event) )
            {
                //printf("event type:%i  event key:%i\n", event.type, event.key.keysym.scancode);
                if(event.type==SDL_KEYDOWN)
                {
                    if (videoIn->PanTilt)
                    {
                        switch( event.key.keysym.sym )
                        {
                            /* Keyboard event */
                            /* Pass the event data onto PrintKeyInfo() */
                            case SDLK_DOWN:
                                /*Tilt Down*/
                                uvcPanTilt (videoIn->fd, s->control_list, 0, 1);
                                break;

                            case SDLK_UP:
                                /*Tilt UP*/
                                uvcPanTilt (videoIn->fd, s->control_list, 0, -1);
                                break;

                            case SDLK_LEFT:
                                /*Pan Left*/
                                uvcPanTilt (videoIn->fd, s->control_list, 1, 1);
                                break;

                            case SDLK_RIGHT:
                                /*Pan Right*/
                                uvcPanTilt (videoIn->fd, s->control_list, 1, -1);
                                break;
                            default:
                                break;
                        }
                    }
                    switch( event.key.keysym.scancode )
                    {
                        case 220: /*webcam button*/
                            //gdk_threads_enter();
                           	if (all_data->global->default_action == 0)
                           		g_main_context_invoke(NULL, image_capture_callback, (gpointer) all_data);
							else
                            	g_main_context_invoke(NULL, video_capture_callback, (gpointer) all_data);
                       
                            break;
                    }
                    switch( event.key.keysym.sym )
                    {
                        case SDLK_q:
                            //shutDown
                            g_timeout_add(200, shutd_timer, all_data);
                            g_print("q pressed - Quiting...\n");
                            break;
                        case SDLK_SPACE:
							{
                            if(global->AFcontrol > 0)
                                setfocus_clicked(NULL, all_data);
							}
                            break;
                        case SDLK_i:
							g_main_context_invoke(NULL, image_capture_callback, (gpointer) all_data);
							break;
						case SDLK_v:
							g_main_context_invoke(NULL, video_capture_callback, (gpointer) all_data);
							break;
                        default:
                            break;
                    }
                }
                if(event.type==SDL_VIDEORESIZE)
                {
                    pscreen =
                        SDL_SetVideoMode(event.resize.w,
                                 event.resize.h,
                                 global->bpp,
                                 SDL_VIDEO_Flags);
                    drect.w = event.resize.w;
                    drect.h = event.resize.h;
                }
                if(event.type==SDL_QUIT)
                {
                    //shutDown
                    g_timeout_add(200, shutd_timer, all_data);
                }
            }
        }
        /* if set make the thread sleep - default no sleep (full throttle)*/
        if(global->vid_sleep) sleep_ms(global->vid_sleep);

        /*------------------------------------------*/
        /*  restart video (new resolution/format)   */
        /*------------------------------------------*/
        if (global->change_res)
        {
            g_print("setting new resolution (%d x %d)\n", global->width, global->height);
            /*clean up */

            if(particles) g_free(particles);
            particles = NULL;

            if (global->debug) g_print("cleaning buffer allocations\n");
            fflush(NULL);//flush all output buffers

            if(!global->no_display)
            {
                SDL_FreeYUVOverlay(overlay);
                overlay = NULL;
            }
            /*init device*/
            restart_v4l2(videoIn, global);
            /*set new resolution for video thread*/
            width = global->width;
            height = global->height;
            format = global->format;
            /* restart SDL with new values*/
            if(!global->no_display)
            {
                overlay = video_init(data, &(pscreen));
                if(overlay == NULL)
                {
                    g_print("FATAL: Couldn't create yuv overlay - please disable hardware accelaration\n");
                    signalquit = TRUE; /*exit video thread*/
                }
                else
                {
                    if (global->debug) g_print("yuv overlay created (%ix%i).\n", overlay->w, overlay->h);
                    p = (unsigned char *) overlay->pixels[0];

                    drect.x = 0;
                    drect.y = 0;
                    drect.w = pscreen->w;
                    drect.h = pscreen->h;

                    global->change_res = FALSE;
                }
            }
            else global->change_res = FALSE;
        }

    }/*loop end*/

    __LOCK_MUTEX(__VMUTEX);
        capVid = videoIn->capVid;
    __UNLOCK_MUTEX(__VMUTEX);
    /*check if thread exited while in Video capture mode*/
    if (capVid)
    {
        /*stop capture*/
        if (global->debug) g_print("stoping Video capture\n");
        //global->Vidstoptime = ns_time_monotonic(); /*this is set in IO thread*/
        videoIn->VidCapStop=TRUE;
        capVid = FALSE;
        __LOCK_MUTEX(__VMUTEX);
            videoIn->capVid = capVid;
        __UNLOCK_MUTEX(__VMUTEX);
        __LOCK_MUTEX(__AMUTEX);
            pdata->capVid = capVid;
        __UNLOCK_MUTEX(__AMUTEX);
        /*join IO thread*/
        if (global->debug) g_print("Shuting Down IO Thread\n");
        __THREAD_JOIN( all_data->IO_thread );
        if (global->debug) g_print("IO Thread finished\n");
    }

    if (global->debug) g_print("Thread terminated...\n");
    p = NULL;
    if(particles) g_free(particles);
    particles=NULL;

    if (global->debug) g_print("cleaning Thread allocations: 100%%\n");
    fflush(NULL);//flush all output buffers

    if(!global->no_display)
    {
        if(overlay)
            SDL_FreeYUVOverlay(overlay);
        //SDL_FreeSurface(pscreen);

        SDL_Quit();
    }

    if (global->debug) g_print("Video thread completed\n");

    global = NULL;
    AFdata = NULL;
    videoIn = NULL;
    return ((void *) 0);
}
示例#2
0
/*
 * audio record callback
 * args:
 *   s - pointer to pa_stream
 *   length - buffer length
 *   data - pointer to user data
 *
 * asserts:
 *   none
 *
 * returns: none
 */
static void stream_request_cb(pa_stream *s, size_t length, void *data)
{

    audio_context_t *audio_ctx = (audio_context_t *) data;

	if(audio_ctx->channels == 0)
	{
		fprintf(stderr, "AUDIO: (pulseaudio) stream_request_cb failed: channels = 0\n");
		return;
	}
	
	if(audio_ctx->samprate == 0)
	{
		fprintf(stderr, "AUDIO: (pulseaudio) stream_request_cb failed: samprate = 0\n");
		return;
	}
	
	uint64_t frame_length = NSEC_PER_SEC / audio_ctx->samprate; /*in nanosec*/
	int64_t ts = 0;
	int64_t buff_ts = 0;
	uint32_t i = 0;

	while (pa_stream_readable_size(s) > 0)
	{
		const void *inputBuffer;
		size_t length;

		/*read from stream*/
		if (pa_stream_peek(s, &inputBuffer, &length) < 0)
		{
			fprintf(stderr, "AUDIO: (pulseaudio) pa_stream_peek() failed\n");
			return;
		}

		if(length == 0)
		{
			fprintf(stderr, "AUDIO: (pulseaudio) empty buffer!\n");
			return; /*buffer is empty*/
		}

		get_latency(s);

		ts = ns_time_monotonic() - (latency * 1000);

		if(audio_ctx->last_ts <= 0)
			audio_ctx->last_ts = ts;


		uint32_t numSamples = (uint32_t) length / sizeof(sample_t);

		const sample_t *rptr = (const sample_t*) inputBuffer;
		sample_t *capture_buff = (sample_t *) audio_ctx->capture_buff;

		int chan = 0;
		/*store capture samples or silence if inputBuffer == NULL (hole)*/
		for( i = 0; i < numSamples; ++i )
		{
			capture_buff[sample_index] = inputBuffer ? *rptr++ : 0;
			sample_index++;

			/*store peak value*/
			if(audio_ctx->capture_buff_level[chan] < capture_buff[sample_index])
				audio_ctx->capture_buff_level[chan] = capture_buff[sample_index];
			chan++;
			if(chan >= audio_ctx->channels)
				chan = 0;

			if(sample_index >= audio_ctx->capture_buff_size)
			{
				buff_ts = ts + ( i / audio_ctx->channels ) * frame_length;

				audio_fill_buffer(audio_ctx, buff_ts);

				/*reset*/
				audio_ctx->capture_buff_level[0] = 0;
				audio_ctx->capture_buff_level[1] = 0;
				sample_index = 0;
			}
		}

		pa_stream_drop(s); /*clean the samples*/
	}

}
示例#3
0
int
init_sound(struct paRecordData* pdata)
{
	int err = paNoError;
	int i = 0;
	int j = 0;

	/*alloc audio ring buffers*/
    if(!(pdata->audio_buff[0]))
    {
		for(j=0; j< AUDBUFF_NUM; j++)
		{
			pdata->audio_buff[j] = g_new0(AudBuff, AUDBUFF_SIZE);
			for(i=0; i<AUDBUFF_SIZE; i++)
			{
				pdata->audio_buff[j][i].frame = g_new0(SAMPLE, pdata->aud_numSamples);
				pdata->audio_buff[j][i].used = FALSE;
				pdata->audio_buff[j][i].time_stamp = 0;
			}
			pdata->audio_buff_flag[j] = AUD_READY;
		}
	}

	/*alloc the callback buffer*/
	pdata->recordedSamples = g_new0(SAMPLE, pdata->aud_numSamples);

	switch(pdata->api)
	{
#ifdef PULSEAUDIO
		case PULSE:
			err = pulse_init_audio(pdata);
			if(err)
				goto error;
			break;
#endif
		case PORT:
		default:
			err = port_init_audio(pdata);
			if(err)
				goto error;
			break;
	}

	/*sound start time - used to sync with video*/
	pdata->snd_begintime = ns_time_monotonic();

	return (0);

error:
	pdata->streaming=FALSE;
	pdata->flush=0;
	pdata->delay=0;

	if(pdata->recordedSamples) g_free( pdata->recordedSamples );
	pdata->recordedSamples=NULL;
	if(pdata->audio_buff)
	{

		for(j=0; j< AUDBUFF_NUM; j++)
		{
			for(i=0; i<AUDBUFF_SIZE; i++)
			{
				g_free(pdata->audio_buff[j][i].frame);
			}
			g_free(pdata->audio_buff[j]);
			pdata->audio_buff[j] = NULL;
		}
	}
	/*lavc is allways checked and cleaned when finishing worker thread*/
	return(-1);
}
示例#4
0
/*
 * start portaudio stream capture
 * args:
 *   audio_ctx - pointer to audio context data
 *
 * asserts:
 *   audio_ctx is not null
 *
 * returns: error code
 */
int audio_start_portaudio(audio_context_t *audio_ctx)
{
	/*assertions*/
	assert(audio_ctx != NULL);

	PaError err = paNoError;
	PaStream *stream = (PaStream *) audio_ctx->stream;

	if(stream)
	{
		if( !(Pa_IsStreamStopped( stream )))
		{
			Pa_AbortStream( stream );
			Pa_CloseStream( stream );
			audio_ctx->stream = NULL;
			stream = audio_ctx->stream;
		}
	}

	PaStreamParameters inputParameters;

	inputParameters.device = audio_ctx->list_devices[audio_ctx->device].id;
	inputParameters.channelCount = audio_ctx->channels;
	inputParameters.sampleFormat = paFloat32; /*sample_t - float*/

	if (Pa_GetDeviceInfo( inputParameters.device ))
		inputParameters.suggestedLatency = Pa_GetDeviceInfo( inputParameters.device )->defaultLowInputLatency;
		//inputParameters.suggestedLatency = Pa_GetDeviceInfo( inputParameters.device )->defaultHighInputLatency;
	else
		inputParameters.suggestedLatency = DEFAULT_LATENCY_DURATION/1000.0;
	inputParameters.hostApiSpecificStreamInfo = NULL;

	/*---------------------------- start recording Audio. ----------------------------- */
	audio_ctx->snd_begintime = ns_time_monotonic();

	audio_ctx->stream_flag = AUDIO_STRM_ON;

	err = Pa_OpenStream(
		&stream,                     /* stream */
		&inputParameters,            /* inputParameters    */
		NULL,                        /* outputParameters   */
		audio_ctx->samprate,         /* sample rate        */
		paFramesPerBufferUnspecified,/* buffer in frames (use API optimal)*/
		paNoFlag,                    /* PaNoFlag - clip and dhiter*/
		recordCallback,              /* sound callback     */
		audio_ctx );                 /* callback userData  */

	if( err == paNoError )
	{
		err = Pa_StartStream( stream );
		audio_ctx->stream = (void *) stream; /* store stream pointer*/
	}

	if( err != paNoError )
	{
		fprintf(stderr, "AUDIO: An error occured while starting the portaudio API\n" );
		fprintf(stderr, "       Error number: %d\n", err );
		fprintf(stderr, "       Error message: %s\n", Pa_GetErrorText( err ) );

		if(stream) Pa_AbortStream( stream );
		audio_ctx->stream_flag = AUDIO_STRM_OFF;

		return(-1);
	}

	const PaStreamInfo* stream_info = Pa_GetStreamInfo (stream);
	if(verbosity > 1)
		printf("AUDIO: latency of %8.3f msec\n", 1000 * stream_info->inputLatency);

	return 0;
}
示例#5
0
/*--------------------------- sound callback ------------------------------*/
int
record_sound ( const void *inputBuffer, unsigned long numSamples, void *userData )
{
	struct paRecordData *pdata = (struct paRecordData*)userData;

	__LOCK_MUTEX( __AMUTEX );
        gboolean capVid = pdata->capVid;
        int channels = pdata->channels;
        int skip_n = pdata->skip_n;
    __UNLOCK_MUTEX( __AMUTEX );

	const SAMPLE *rptr = (const SAMPLE*) inputBuffer;
    	int i;


	UINT64 numFrames = numSamples / channels;
	/* buffer ends at timestamp "now", calculate beginning timestamp */
    UINT64 nsec_per_frame = G_NSEC_PER_SEC / pdata->samprate;

    UINT64 ts = ns_time_monotonic() - numFrames * nsec_per_frame;

	if (skip_n > 0) /*skip audio while were skipping video frames*/
	{

		if(capVid)
		{
			__LOCK_MUTEX( __AMUTEX );
				pdata->snd_begintime = ns_time_monotonic(); /*reset first time stamp*/
			__UNLOCK_MUTEX( __AMUTEX );
			return (0); /*still capturing*/
		}
		else
		{	__LOCK_MUTEX( __AMUTEX );
				pdata->streaming=FALSE;
			__LOCK_MUTEX( __AMUTEX );
			return (-1); /*capture has stopped*/
		}
	}

	// __LOCK_MUTEX( __AMUTEX );
        // pdata->streaming=TRUE;
    // __UNLOCK_MUTEX( __AMUTEX );

    for( i=0; i<numSamples; i++ )
    {
        pdata->recordedSamples[pdata->sampleIndex] = inputBuffer ? *rptr++ : 0;
        pdata->sampleIndex++;

        fill_audio_buffer(pdata, ts);

        /* increment timestamp accordingly while copying */
        if (i % channels == 0)
            ts += nsec_per_frame;
    }


    if(capVid) return (0); /*still capturing*/
	else
    {
        __LOCK_MUTEX( __AMUTEX );
            pdata->streaming=FALSE;
            /* mark current buffer as ready to process */
            pdata->audio_buff_flag[pdata->bw_ind] = AUD_PROCESS;
        __UNLOCK_MUTEX( __AMUTEX );
    }

	return(-1); /* audio capture stopped*/
}