Exemple #1
0
/*access to the first available CU for rendering
this is a blocking call since input may change the output (temporal scalability)*/
GF_CMUnit *gf_cm_get_output(GF_CompositionMemory *cb)
{
	GF_CMUnit *out = NULL;

	/*if paused or stop or buffering, do nothing*/
	switch (cb->Status) {
	case CB_BUFFER:
		return NULL;
	case CB_STOP:
	case CB_PAUSE:
		/*only visual buffers deliver data when paused*/
		if (cb->odm->codec->type != GF_STREAM_VISUAL) goto exit;
		break;
	case CB_BUFFER_DONE:
		cb->Status = CB_PLAY;
		break;
	}

	/*no output*/
	if (!cb->output->dataLength) {
		if ((cb->Status != CB_STOP) && cb->HasSeenEOS && (cb->odm && cb->odm->codec)) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Switching composition memory to stop state - time %d\n", cb->odm->OD->objectDescriptorID, (u32) cb->odm->media_stop_time));

			cb->Status = CB_STOP;
			cb->odm->current_time = (u32) cb->odm->media_stop_time;
#ifndef GPAC_DISABLE_VRML
			/*force update of media time*/
			mediasensor_update_timing(cb->odm, 1);
#endif
		}
		goto exit;
	}

	/*update the timing*/
	if ((cb->Status != CB_STOP) && cb->odm && cb->odm->codec) {
		cb->odm->current_time = cb->output->TS;

		/*handle visual object - EOS if no more data (we keep the last CU for rendering, so check next one)*/
		if (cb->HasSeenEOS && (!cb->output->next->dataLength || (cb->Capacity==1))) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Switching composition memory to stop state - time %d\n", cb->odm->OD->objectDescriptorID, (u32) cb->odm->media_stop_time));
			cb->Status = CB_STOP;
			cb->odm->current_time = (u32) cb->odm->media_stop_time;
#ifndef GPAC_DISABLE_VRML
			/*force update of media time*/
			mediasensor_update_timing(cb->odm, 1);
#endif
			gf_odm_signal_eos(cb->odm);
		}
	}
	out = cb->output;

	//assert(out->TS >= cb->LastRenderedTS);

exit:
	return out;
}
Exemple #2
0
/*render : setup media sensor and update timing in case of inline scenes*/
void RenderMediaSensor(GF_Node *node, void *rs, Bool is_destroy)
{
	GF_TraverseState *tr_state = (GF_TraverseState *)rs;
	GF_Clock *ck;
	Bool do_update_clock = 1;
	MediaSensorStack *st = (MediaSensorStack *)gf_node_get_private(node);

	if (is_destroy) {
		/*unlink from OD*/
		if (st->stream && st->stream->odm)
			gf_list_del_item(st->stream->odm->ms_stack, st);

		gf_list_del(st->seg);
		gf_free(st);
		return;
	}
	//we need to disable culling otherwise we may never be called back again ...
	tr_state->disable_cull = 1;

	if (!st->stream) st->stream = gf_mo_register(node, &st->sensor->url, 0, 0);
	if (!st->stream || !st->stream->odm) return;

	if (!st->is_init) {
		gf_list_add(st->stream->odm->ms_stack, st);
		gf_odm_init_segments(st->stream->odm, st->seg, &st->sensor->url);
		st->is_init = 1;
		st->active_seg = 0;

	}
	/*media sensor bound to natural media (audio, video) is updated when fetching the stream
	data for rendering.*/

	ck = NULL;
	/*check inline scenes - if the scene is set to restart DON'T MODIFY SENSOR: since we need a 2 render
	passes to restart inline, scene is considered as not running*/
	if (st->stream->odm->subscene && !st->stream->odm->subscene->needs_restart) {
		if (st->stream->odm->subscene->scene_codec) ck = st->stream->odm->subscene->scene_codec->ck;
		/*dynamic scene*/
		else ck = st->stream->odm->subscene->dyn_ck;
		if (st->stream->odm->subscene->is_dynamic_scene) do_update_clock = 0;
	}
	/*check anim streams*/
	else if (st->stream->odm->codec && (st->stream->odm->codec->type==GF_STREAM_SCENE)) ck = st->stream->odm->codec->ck;
	/*check OCR streams*/
	else if (st->stream->odm->ocr_codec) ck = st->stream->odm->ocr_codec->ck;

	if (ck && gf_clock_is_started(ck) ) {
		if (do_update_clock)
			st->stream->odm->media_current_time = gf_clock_media_time(ck);
		mediasensor_update_timing(st->stream->odm, 0);
	}
	//if main addon is VoD and selected and clock is paused, fire a timeshift update
	else if (st->stream->odm->subscene && st->stream->odm->subscene->sys_clock_at_main_activation) {
		GF_Event evt;
		memset(&evt, 0, sizeof(evt));
		evt.type = GF_EVENT_TIMESHIFT_UPDATE;
		gf_term_send_event(st->stream->odm->term, &evt);
	}
}
Exemple #3
0
/*render : setup media sensor and update timing in case of inline scenes*/
void RenderMediaSensor(GF_Node *node, void *rs, Bool is_destroy)
{
	GF_Clock *ck;
	MediaSensorStack *st = (MediaSensorStack *)gf_node_get_private(node);

	if (is_destroy) {
		/*unlink from OD*/
		if (st->stream && st->stream->odm) 
			gf_list_del_item(st->stream->odm->ms_stack, st);

		gf_list_del(st->seg);
		gf_free(st);
		return;
	}

	if (!st->stream) st->stream = gf_mo_register(node, &st->sensor->url, 0, 0);
	if (!st->stream || !st->stream->odm) return;

	if (!st->is_init) {
		gf_list_add(st->stream->odm->ms_stack, st);
		gf_odm_init_segments(st->stream->odm, st->seg, &st->sensor->url);
		st->is_init = 1;
		st->active_seg = 0;
		
	}
	/*media sensor bound to natural media (audio, video) is updated when fetching the stream
	data for rendering.*/

	ck = NULL;
	/*check inline scenes - if the scene is set to restart DON'T MODIFY SENSOR: since we need a 2 render
	passes to restart inline, scene is considered as not running*/
	if (st->stream->odm->subscene && !st->stream->odm->subscene->needs_restart) {
		if (st->stream->odm->subscene->scene_codec) ck = st->stream->odm->subscene->scene_codec->ck;
		/*dynamic scene*/
		else ck = st->stream->odm->subscene->dyn_ck;
		/*since audio may be used alone through an inline scene, we need to refresh the graph*/
		if (ck && !ck->has_seen_eos && st->stream->odm->state) gf_term_invalidate_compositor(st->stream->odm->term);
	}
	/*check anim streams*/
	else if (st->stream->odm->codec && (st->stream->odm->codec->type==GF_STREAM_SCENE)) ck = st->stream->odm->codec->ck;
	/*check OCR streams*/
	else if (st->stream->odm->ocr_codec) ck = st->stream->odm->ocr_codec->ck;

	if (ck && gf_clock_is_started(ck) ) {
		st->stream->odm->current_time = gf_clock_time(ck);
		mediasensor_update_timing(st->stream->odm, 0);
	}
}
Exemple #4
0
/*access to the first available CU for rendering
this is a blocking call since input may change the output (temporal scalability)*/
GF_CMUnit *gf_cm_get_output(GF_CompositionMemory *cb)
{
	/*if paused or stop or buffering, do nothing*/
	switch (cb->Status) {
	case CB_BUFFER:
	case CB_STOP:
		/*only visual buffers deliver data when buffering or stop*/
		if (cb->odm->codec->type != GF_STREAM_VISUAL) return NULL;
		break;
	case CB_BUFFER_DONE:
		//For non-visual output move to play state upon fetch
		if (cb->odm->codec->type != GF_STREAM_VISUAL)
			cb->Status = CB_PLAY;
		break;
	//we always deliver in pause, up to the caller to decide to consume or not the frame
	case CB_PAUSE:
		break;
	}

	/*no output*/
	if (!cb->UnitCount || !cb->output->dataLength) {
		if ((cb->Status != CB_STOP) && cb->HasSeenEOS && (cb->odm && cb->odm->codec)) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Switching composition memory to stop state - time %d\n", cb->odm->OD->objectDescriptorID, (u32) cb->odm->media_stop_time));

			if ((cb->Status==CB_BUFFER_DONE) && (cb->odm->codec->type == GF_STREAM_VISUAL) ){
				gf_clock_buffer_off(cb->odm->codec->ck);
			}
			cb->Status = CB_STOP;
			cb->odm->media_current_time = (u32) cb->odm->media_stop_time;
#ifndef GPAC_DISABLE_VRML
			/*force update of media time*/
			mediasensor_update_timing(cb->odm, 1);
#endif
			gf_odm_signal_eos(cb->odm);
		}
		return NULL;
	}

	/*update the timing*/
	if ((cb->Status != CB_STOP) && cb->odm && cb->odm->codec) {
		if (cb->odm->codec->ck->has_media_time_shift) {
			cb->odm->media_current_time = cb->output->TS + cb->odm->codec->ck->media_time_at_init - cb->odm->codec->ck->init_time;
		} else {
			cb->odm->media_current_time = cb->output->TS;
		}

		/*handle visual object - EOS if no more data (we keep the last CU for rendering, so check next one)*/
		if (cb->HasSeenEOS && (cb->odm->codec->type == GF_STREAM_VISUAL) && (!cb->output->next->dataLength || (cb->Capacity==1))) {
			GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d] Switching composition memory to stop state - time %d\n", cb->odm->OD->objectDescriptorID, (u32) cb->odm->media_stop_time));
			if (cb->Status==CB_BUFFER_DONE) {
				gf_clock_buffer_off(cb->odm->codec->ck);
			}
			cb->Status = CB_STOP;
			cb->odm->media_current_time = (u32) cb->odm->media_stop_time;
#ifndef GPAC_DISABLE_VRML
			/*force update of media time*/
			mediasensor_update_timing(cb->odm, 1);
#endif
			gf_odm_signal_eos(cb->odm);
		}
	}
	if (cb->output->sender_ntp) {
		cb->LastRenderedNTPDiff = gf_net_get_ntp_diff_ms(cb->output->sender_ntp);
		cb->LastRenderedNTP = cb->output->sender_ntp;
	}

	return cb->output;
}