Exemplo n.º 1
0
Bool validator_on_event_play(void *udta, GF_Event *event, Bool consumed_by_compositor)
{
	GF_Validator *validator = (GF_Validator *)udta;
	switch (event->type) {
	case GF_EVENT_CONNECT:
		if (event->connect.is_connected) {
            gf_sc_add_video_listener(validator->term->compositor, &validator->video_listener);
			validator->ck = validator->term->root_scene->scene_codec ?
                            validator->term->root_scene->scene_codec->ck :
                            validator->term->root_scene->dyn_ck;
		}
		break;
	case GF_EVENT_CLICK:
	case GF_EVENT_MOUSEUP:
	case GF_EVENT_MOUSEDOWN:
	case GF_EVENT_MOUSEOVER:
	case GF_EVENT_MOUSEOUT:
	case GF_EVENT_MOUSEMOVE:
	case GF_EVENT_MOUSEWHEEL:
	case GF_EVENT_KEYDOWN:
	case GF_EVENT_TEXTINPUT:
        return 1;
	case GF_EVENT_KEYUP:
        if ((event->key.key_code == GF_KEY_END)&&(event->key.flags & GF_KEY_MOD_CTRL)) {
            GF_Event evt;
            evt.type = GF_EVENT_QUIT;
            validator->term->compositor->video_out->on_event(validator->term->compositor->video_out->evt_cbk_hdl, &evt);
        }
        return 1;
	}
	return 0;
}
Exemplo n.º 2
0
static void validator_test_open(GF_Validator *validator)
{
    char filename[100];
    if (validator->test_base)
        sprintf(filename, "%s%c%s", validator->test_base, GF_PATH_SEPARATOR, validator->test_filename);
    else
        sprintf(filename, "%s", validator->test_filename);
    gf_sc_add_video_listener(validator->term->compositor, &validator->video_listener);
    if (validator->is_recording)
        validator->snapshot_next_frame = 1;
    gf_term_connect(validator->term, filename);
	validator->ck = validator->term->root_scene->scene_codec ?
                    validator->term->root_scene->scene_codec->ck :
                    validator->term->root_scene->dyn_ck;
}
Exemplo n.º 3
0
Bool validator_on_event_record(void *udta, GF_Event *event, Bool consumed_by_compositor)
{
	GF_Validator *validator = (GF_Validator *)udta;
	Bool rec_event = 1;
	switch (event->type) {
	case GF_EVENT_CONNECT:
		if (event->connect.is_connected) {
			if (!validator->trace_mode) {
				gf_sc_add_video_listener(validator->term->compositor, &validator->video_listener);
			}
			validator->ck = validator->term->root_scene->scene_codec ? validator->term->root_scene->scene_codec->ck : validator->term->root_scene->dyn_ck;
		}
		break;
	case GF_EVENT_KEYDOWN:
		if (event->key.key_code == GF_KEY_INSERT) {
			rec_event = 0;
		} else if (event->key.key_code == GF_KEY_PAGEDOWN) {
			rec_event = 0;
		} else if (event->key.key_code == GF_KEY_PAGEUP) {
			rec_event = 0;
		} else if (event->key.key_code == GF_KEY_END) {
			rec_event = 0;
		} else if (event->key.key_code == GF_KEY_CONTROL) {
			rec_event = 0;
		} else if (event->key.flags & GF_KEY_MOD_CTRL) {
			rec_event = 0;
		}
		break;
	case GF_EVENT_KEYUP:
		if (event->key.flags & GF_KEY_MOD_CTRL) {
			rec_event = 0;
			if (event->key.key_code == GF_KEY_INSERT) {
				char *snap_name = validator_create_snapshot(validator);
				validator_xvs_add_snapshot_node(validator, snap_name, gf_clock_time(validator->ck));
				gf_free(snap_name);
			} else if (event->key.key_code == GF_KEY_END) {
				GF_Event evt;
				memset(&evt, 0, sizeof(GF_Event));
				evt.type = GF_EVENT_QUIT;
				validator->term->compositor->video_out->on_event(validator->term->compositor->video_out->evt_cbk_hdl, &evt);
			} else if (event->key.key_code == GF_KEY_F1) {
				validator->snapshot_next_frame = 1;
			}
		} else if (event->key.key_code == GF_KEY_PAGEDOWN) {
			rec_event = 0;
			validator_xvs_close(validator);
			gf_term_disconnect(validator->term);
			gf_sc_remove_video_listener(validator->term->compositor, &validator->video_listener);
			validator_xvs_next(validator, 0);
		} else if (event->key.key_code == GF_KEY_PAGEUP) {
			rec_event = 0;
			validator_xvs_close(validator);
			gf_term_disconnect(validator->term);
			gf_sc_remove_video_listener(validator->term->compositor, &validator->video_listener);
			validator_xvs_next(validator, 1);
		} else if (event->key.key_code == GF_KEY_CONTROL) {
			rec_event = 0;
		}
		break;
	}
	if (rec_event) {
		validator_xvs_add_event_dom(validator, event);
	}
	return 0;
}
Exemplo n.º 4
0
/**
 * This thread sends the frame to TS mux
 * \param Parameter The GF_AVRedirect pointer
 */
static Bool video_encoding_thread_run(void *param)
{
    GF_AVRedirect * avr = (GF_AVRedirect*) param;
    u64 currentFrameTimeProcessed = 0;
    u32 lastEncodedFrameTime = 0;
    AVCodecContext * ctx = NULL;
    assert( avr );
    gf_sc_add_video_listener ( avr->term->compositor, &avr->video_listen );
    while (avr->is_running && (!ctx || !avr->swsContext)) {
        ctx = ts_get_video_codec_context(avr->ts_implementation);
        gf_sleep(16);
    }
    if (!ctx) {
        goto exit;
    }
    printf("******* Video Codec Context = %d/%d, start="LLU"\n", ctx->time_base.num, ctx->time_base.den, ctx->timecode_frame_start);
    while (avr->is_running) {
        {
            gf_mx_p(avr->frameMutex);
            while (!avr->frameTime || currentFrameTimeProcessed == avr->frameTime) {
                gf_mx_v(avr->frameMutex);
                if (!avr->is_running) {
                    goto exit;
                }
                gf_mx_p(avr->frameMutex);
				gf_sleep(1);
            }
            assert( currentFrameTimeProcessed != avr->frameTime);
            currentFrameTimeProcessed = avr->frameTime;
            {
                avpicture_fill ( ( AVPicture * ) avr->RGBpicture, avr->frame, PIX_FMT_RGB24, avr->srcWidth, avr->srcHeight );
                assert( avr->swsContext );
                sws_scale ( avr->swsContext,
#ifdef USE_AVCODEC2
                            ( const uint8_t * const * )
#else
                            ( uint8_t ** )
#endif /* USE_AVCODEC2 */
                            avr->RGBpicture->data, avr->RGBpicture->linesize,
                            0, avr->srcHeight,
                            avr->YUVpicture->data, avr->YUVpicture->linesize );
#ifdef AVR_DUMP_RAW_AVI
                if ( AVI_write_frame ( avr->avi_out, avr->frame, avr->size, 1 ) <0 )
                {
                    GF_LOG ( GF_LOG_ERROR, GF_LOG_MODULE, ( "[AVRedirect] Error writing video frame\n" ) );
                }
#endif /* AVR_DUMP_RAW_AVI */
                gf_mx_v(avr->frameMutex);
                if (avr->encode)
                {
                    int written;
                    //u32 sysclock = gf_sys_clock();
                    avr->YUVpicture->pts = currentFrameTimeProcessed;
                    //printf("Encoding frame PTS="LLU", frameNum=%u, time=%u...", avr->YUVpicture->pts, avr->YUVpicture->coded_picture_number, currentFrameTimeProcessed);
                    written = avcodec_encode_video ( ctx, avr->videoOutbuf, avr->videoOutbufSize, avr->YUVpicture );
                    //ctx->coded_frame->pts = currentFrameTimeProcessed;
                    if ( written < 0 )
                    {
                        GF_LOG ( GF_LOG_ERROR, GF_LOG_MODULE, ( "[AVRedirect] Error while encoding video frame =%d\n", written ) );
                    } else
                        if ( written > 0 )
                        {
                            ts_encode_video_frame(avr->ts_implementation, avr->videoOutbuf, written);
                        }
                    lastEncodedFrameTime = currentFrameTimeProcessed;
                }
            }
        }
        avr->frameTimeEncoded = currentFrameTimeProcessed;
		gf_sleep(1);
    } /* End of main loop */
exit:
    GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[AVRedirect] Ending video encoding thread...\n"));
    if (avr->term)
        gf_sc_remove_video_listener ( avr->term->compositor, &avr->video_listen );
    return 0;
}