Example #1
0
GF_EXPORT
GF_Err gf_term_process_flush(GF_Terminal *term)
{
	u32 i;
	CodecEntry *ce;
	if (!(term->flags & GF_TERM_NO_COMPOSITOR_THREAD) ) return GF_BAD_PARAM;

	/*update till frame mature*/
	while (1) {

		if (term->flags & GF_TERM_NO_DECODER_THREAD) {
			gf_term_handle_services(term);
			gf_mx_p(term->mm_mx);
			i=0;
			while ((ce = (CodecEntry*)gf_list_enum(term->codecs, &i))) {
				gf_codec_process(ce->dec, 10000);
			}
			gf_mx_v(term->mm_mx);
		}

		if (!gf_sc_draw_frame(term->compositor, NULL))
			break;

		if (! (term->user->init_flags & GF_TERM_NO_REGULATION))
			break;
	}
	return GF_OK;
}
Example #2
0
u32 RunSingleDec(void *ptr)
{
	GF_Err e;
	u64 time_taken;
	CodecEntry *ce = (CodecEntry *) ptr;

	GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaDecoder %d] Entering thread ID %d\n", ce->dec->odm->OD->objectDescriptorID, gf_th_id() ));

	while (ce->flags & GF_MM_CE_RUNNING) {
		time_taken = gf_sys_clock_high_res();
		if (!ce->dec->force_cb_resize) {
			gf_mx_p(ce->mx);
			e = gf_codec_process(ce->dec, ce->dec->odm->term->frame_duration);
			if (e) gf_term_message(ce->dec->odm->term, ce->dec->odm->net_service->url, "Decoding Error", e);
			gf_mx_v(ce->mx);
		}
		time_taken = gf_sys_clock_high_res() - time_taken;


		/*no priority boost this way for systems codecs, priority is dynamically set by not releasing the
		graph when late and moving on*/
		if (!ce->dec->CB || (ce->dec->CB->UnitCount == ce->dec->CB->Capacity))
			ce->dec->PriorityBoost = 0;

		/*while on don't sleep*/
		if (ce->dec->PriorityBoost) continue;

		if (time_taken<20) {
			gf_sleep(1);
		}
	}
	ce->flags |= GF_MM_CE_DEAD;
	return 0;
}
Example #3
0
GF_EXPORT
GF_Err gf_term_process_flush(GF_Terminal *term)
{
	u32 i;
	CodecEntry *ce;
	if (!(term->flags & GF_TERM_NO_COMPOSITOR_THREAD) ) return GF_BAD_PARAM;

	/*update till frame mature*/
	while (1) {

		if (term->flags & GF_TERM_NO_DECODER_THREAD) {
			gf_term_handle_services(term);
			gf_mx_p(term->mm_mx);
			i=0;
			while ((ce = (CodecEntry*)gf_list_enum(term->codecs, &i))) {
				gf_codec_process(ce->dec, 10000);
			}
			gf_mx_v(term->mm_mx);
		}

		if (!gf_sc_draw_frame(term->compositor, 1, NULL)) {
			if (!term->root_scene || !term->root_scene->root_od)
				break;

			if (gf_list_count(term->media_queue) ) 
				continue;

			//wait for audio to be flushed
			if (gf_sc_check_audio_pending(term->compositor) ) 
				continue;

			//force end of buffer
			if (gf_scene_check_clocks(term->root_scene->root_od->net_service, term->root_scene, 1))
				break;
		}

		if (! (term->user->init_flags & GF_TERM_NO_REGULATION))
			break;
	}
	return GF_OK;
}
Example #4
0
static u32 MM_SimulationStep_Decoder(GF_Terminal *term, u32 *nb_active_decs)
{
	CodecEntry *ce;
	GF_Err e;
	u32 count, remain;
	u32 time_taken, time_slice, time_left;

#ifndef GF_DISABLE_LOG
	term->compositor->networks_time = gf_sys_clock();
#endif

//	GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Media Manager] Entering simultion step\n"));
	gf_term_handle_services(term);

#ifndef GF_DISABLE_LOG
	term->compositor->networks_time = gf_sys_clock() - term->compositor->networks_time;
#endif

#ifndef GF_DISABLE_LOG
	term->compositor->decoders_time = gf_sys_clock();
#endif
	gf_mx_p(term->mm_mx);

	count = gf_list_count(term->codecs);
	time_left = term->frame_duration;
	*nb_active_decs = 0;

	if (term->last_codec >= count) term->last_codec = 0;
	remain = count;
	/*this is ultra basic a nice scheduling system would be much better*/
	while (remain) {
		ce = (CodecEntry*)gf_list_get(term->codecs, term->last_codec);
		if (!ce) break;

		if (!(ce->flags & GF_MM_CE_RUNNING) || (ce->flags & GF_MM_CE_THREADED) || ce->dec->force_cb_resize) {
			remain--;
			if (!remain) break;
			term->last_codec = (term->last_codec + 1) % count;
			continue;
		}
		time_slice = ce->dec->Priority * time_left / term->cumulated_priority;
		if (ce->dec->PriorityBoost) time_slice *= 2;
		time_taken = gf_sys_clock();
		(*nb_active_decs) ++;
		e = gf_codec_process(ce->dec, time_slice);
		time_taken = gf_sys_clock() - time_taken;
		/*avoid signaling errors too often...*/
#ifndef GPAC_DISABLE_LOG
		if (e) {
			GF_LOG(GF_LOG_WARNING, GF_LOG_CODEC, ("[ODM%d] Decoding Error %s\n", ce->dec->odm->OD->objectDescriptorID, gf_error_to_string(e) ));
		} else {
			//GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Decode time slice %d ms out of %d ms\n", ce->dec->decio ? ce->dec->decio->module_name : "RAW", time_taken, time_left ));
		}
#endif
		if (ce->flags & GF_MM_CE_DISCARDED) {
			gf_free(ce);
			gf_list_rem(term->codecs, term->last_codec);
			count--;
			if (!count)
				break;
		} else {
			if (ce->dec->CB && (ce->dec->CB->UnitCount >= ce->dec->CB->Min)) ce->dec->PriorityBoost = 0;
		}
		term->last_codec = (term->last_codec + 1) % count;

		remain -= 1;
		if (time_left > time_taken) {
			time_left -= time_taken;
			if (!remain) break;
		} else {
			time_left = 0;
			break;
		}
	}
	gf_mx_v(term->mm_mx);
#ifndef GF_DISABLE_LOG
	term->compositor->decoders_time = gf_sys_clock() - term->compositor->decoders_time;
#endif

	return time_left;
}
Example #5
0
/*dispatch the AU in the DB*/
static void Channel_DispatchAU(GF_Channel *ch, u32 duration)
{
	u32 time;
	GF_DBUnit *au;

	if (!ch->buffer || !ch->len) {
		if (ch->buffer) {
			gf_free(ch->buffer);
			ch->buffer = NULL;
		}
		return;
	}

	au = gf_db_unit_new();
	if (!au) {
		gf_free(ch->buffer);
		ch->buffer = NULL;
		ch->len = 0;
		return;
	}

	au->CTS = ch->CTS;
	au->DTS = ch->DTS;
	if (ch->IsRap) au->flags |= GF_DB_AU_RAP;
	if (ch->CTS_past_offset) {
		au->CTS = ch->CTS_past_offset;
		au->flags |= GF_DB_AU_CTS_IN_PAST;
		ch->CTS_past_offset = 0;
	}
	if (ch->no_timestamps) {
		au->flags |= GF_DB_AU_NO_TIMESTAMPS;
		ch->no_timestamps=0;
	}
	au->data = ch->buffer;
	au->dataLength = ch->len;
	au->PaddingBits = ch->padingBits;

	ch->IsRap = 0;
	ch->padingBits = 0;
	au->next = NULL;
	ch->buffer = NULL;

	if (ch->len + ch->media_padding_bytes != ch->allocSize) {
		au->data = (char*)gf_realloc(au->data, sizeof(char) * (au->dataLength + ch->media_padding_bytes));
	}
	if (ch->media_padding_bytes) memset(au->data + au->dataLength, 0, sizeof(char)*ch->media_padding_bytes);
	
	ch->len = ch->allocSize = 0;

	gf_es_lock(ch, 1);

	if (ch->service && ch->service->cache) {
		GF_SLHeader slh;
		memset(&slh, 0, sizeof(GF_SLHeader));
		slh.accessUnitEndFlag = slh.accessUnitStartFlag = 1;
		slh.compositionTimeStampFlag = slh.decodingTimeStampFlag = 1;
		slh.decodingTimeStamp = ch->net_dts;
		slh.compositionTimeStamp = ch->net_cts;
		slh.randomAccessPointFlag = (au->flags & GF_DB_AU_RAP) ? 1 : 0;
		ch->service->cache->Write(ch->service->cache, ch, au->data, au->dataLength, &slh);
	}

	if (!ch->AU_buffer_first) {
		ch->AU_buffer_first = au;
		ch->AU_buffer_last = au;
		ch->AU_Count = 1;
	} else {
		if (ch->AU_buffer_last->DTS<=au->DTS) {
			ch->AU_buffer_last->next = au;
			ch->AU_buffer_last = ch->AU_buffer_last->next;
		}
		/*enable deinterleaving only for audio channels (some video transport may not be able to compute DTS, cf MPEG1-2/RTP)
		HOWEVER, we must recompute a monotone increasing DTS in case the decoder does perform frame reordering
		in which case the DTS is used for presentation time!!*/
		else if (ch->esd->decoderConfig->streamType!=GF_STREAM_AUDIO) {
#if 0
			GF_DBUnit *au_prev, *ins_au;
			u32 DTS;
#endif
			au->DTS = 0;
			/*append AU*/
			ch->AU_buffer_last->next = au;
			ch->AU_buffer_last = ch->AU_buffer_last->next;

#if 0
			GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] Media deinterleaving OD %d ch %d\n", ch->esd->ESID, ch->odm->OD->objectDescriptorID));

			DTS = au->DTS;
			au_prev = ch->AU_buffer_first;
			/*locate first AU in buffer with DTS greater than new unit CTS*/
			while (au_prev->next && (au_prev->DTS < DTS) ) au_prev = au_prev->next;
			/*remember insertion point*/
			ins_au = au_prev;
			/*shift all following frames DTS*/
			while (au_prev->next) {
				au_prev->next->DTS = au_prev->DTS;
				au_prev = au_prev->next;
			}
			/*and apply*/
			ins_au->DTS = DTS;
#endif
		} else {
			GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] Audio deinterleaving OD %d ch %d\n", ch->esd->ESID, ch->odm->OD->objectDescriptorID));
			/*de-interleaving of AUs*/
			if (ch->AU_buffer_first->DTS > au->DTS) {
				au->next = ch->AU_buffer_first;
				ch->AU_buffer_first = au;
			} else {
				GF_DBUnit *au_prev = ch->AU_buffer_first;
				while (au_prev->next && au_prev->next->DTS<au->DTS) {
					au_prev = au_prev->next;
				}
				assert(au_prev);
				if (au_prev->next->DTS==au->DTS) {
					gf_free(au->data);
					gf_free(au);
				} else {
					au->next = au_prev->next;
					au_prev->next = au;
				}
			}
		}
		ch->AU_Count += 1;
	}

	Channel_UpdateBufferTime(ch);
	ch->au_duration = 0;
	if (duration) ch->au_duration = (u32) ((u64)1000 * duration / ch->ts_res);

	GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d - Dispatch AU DTS %d - CTS %d - size %d time %d Buffer %d Nb AUs %d - First AU relative timing %d\n", ch->esd->ESID, au->DTS, au->CTS, au->dataLength, gf_clock_real_time(ch->clock), ch->BufferTime, ch->AU_Count, ch->AU_buffer_first ? ch->AU_buffer_first->DTS - gf_clock_time(ch->clock) : 0 ));

	/*little optimisation: if direct dispatching is possible, try to decode the AU
	we must lock the media scheduler to avoid deadlocks with other codecs accessing the scene or 
	media resources*/
	if (ch->dispatch_after_db) {
		u32 retry = 100;
		u32 current_frame;
		GF_Terminal *term = ch->odm->term;
		ch_buffer_off(ch);

		gf_es_lock(ch, 0);
		if (gf_mx_try_lock(term->mm_mx)) {
			switch (ch->esd->decoderConfig->streamType) {
			case GF_STREAM_OD:
				gf_codec_process(ch->odm->subscene->od_codec, 100);
				break;
			case GF_STREAM_SCENE:
				if (ch->odm->codec) 
					gf_codec_process(ch->odm->codec, 100);
				else
					gf_codec_process(ch->odm->subscene->scene_codec, 100);
				break;
			}
			gf_mx_v(term->mm_mx);
		}
		gf_es_lock(ch, 1);

		current_frame = term->compositor->frame_number;
		/*wait for initial setup to complete before giving back the hand to the caller service*/
		while (retry) {
			/*Scene bootstrap: if the scene is attached, wait for first frame to complete so that initial PLAY on
			objects can be evaluated*/
			if (term->compositor->scene && (term->compositor->frame_number==current_frame) ) {
				retry--;
				gf_sleep(1);
				continue;
			}
			/*Media bootstrap: wait for all pending requests on media objects are processed*/
			if (gf_list_count(term->media_queue)) {
				retry--;
				gf_sleep(1);
				continue;
			}
			break;
		}
	}

	time = gf_term_get_time(ch->odm->term);
	if (ch->BufferOn) {
		ch->last_au_time = time;
		Channel_UpdateBuffering(ch, 1);
	} else {
		/*trigger the data progress every 500 ms*/
		if (ch->last_au_time + 500 > time) {
			gf_term_service_media_event(ch->odm, GF_EVENT_MEDIA_PROGRESS);
			ch->last_au_time = time;
		}
	}

	gf_es_lock(ch, 0);
}