Esempio n. 1
0
static void term_on_data_packet(GF_ClientService *service, LPNETCHANNEL netch, char *data, u32 data_size, GF_SLHeader *hdr, GF_Err reception_status)
{
	GF_Channel *ch;

	ch = gf_term_get_channel(service, netch);
	if (!ch)
		return;

	if (reception_status==GF_EOS) {
		gf_es_on_eos(ch);
		return;
	}
	/*otherwise dispatch with error code*/
	gf_es_receive_sl_packet(service, ch, data, data_size, hdr, reception_status);
}
Esempio n. 2
0
/*scalable browsing of input channels: find the AU with the lowest DTS on all input channels*/
static void Decoder_GetNextAU(GF_Codec *codec, GF_Channel **activeChannel, GF_DBUnit **nextAU)
{
	GF_Channel *ch;
	GF_DBUnit *AU;
	u32 count, minDTS, i;
	count = gf_list_count(codec->inChannels);
	*nextAU = NULL;
	*activeChannel = NULL;

	if (!count) return;

	minDTS = (u32) -1;
	/*reverse browsing to make sure we fill enhancement before base layer*/
	for (i=count;i>0;i--) {
		ch = (GF_Channel*)gf_list_get(codec->inChannels, i-1);

		if ((codec->type==GF_STREAM_OCR) && ch->IsClockInit) {
			/*check duration - we assume that scalable OCR streams are just pure nonsense...*/
			if (ch->is_pulling && codec->odm->duration) {
				if (gf_clock_time(codec->ck) > codec->odm->duration)
					gf_es_on_eos(ch);
			}
			return;
		}

		AU = gf_es_get_au(ch);
		if (!AU) {
			if (! (*activeChannel)) *activeChannel = ch;
			continue;
		}

		/*we use <= to make sure we first fetch data on base layer if same
		DTS (which is possible in spatial scalability)*/
		if (AU->DTS <= minDTS) {
			minDTS = AU->DTS;
			*activeChannel = ch;
			*nextAU = AU;
		}
	}

	/*FIXME - we're breaking sync (couple of frames delay)*/
	if (codec->is_reordering && *nextAU)
		(*nextAU)->CTS = (*nextAU)->DTS;
}
Esempio n. 3
0
/*handles reception of an SL-PDU, logical or physical*/
void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *payload, u32 payload_size, GF_SLHeader *header, GF_Err reception_status)
{
	GF_SLHeader hdr;
	u32 nbAU, OldLength, size, AUSeqNum;
	Bool EndAU, NewAU;

	if (ch->bypass_sl_and_db) {
		GF_SceneDecoder *sdec;
		ch->IsClockInit = 1;
		if (ch->odm->subscene) {
			sdec = (GF_SceneDecoder *)ch->odm->subscene->scene_codec->decio;
		} else {
			sdec = (GF_SceneDecoder *)ch->odm->codec->decio;
		}
		gf_mx_p(ch->mx);
		sdec->ProcessData(sdec, payload, payload_size, ch->esd->ESID, 0, 0);
		gf_mx_v(ch->mx);
		return;
	}

	if (ch->es_state != GF_ESM_ES_RUNNING) return;

	if (ch->skip_sl) {
		Channel_ReceiveSkipSL(serv, ch, payload, payload_size);
		return;
	}
	if (ch->is_raw_channel) {
		ch->CTS = ch->DTS = (u32) (ch->ts_offset + (header->compositionTimeStamp - ch->seed_ts) * 1000 / ch->ts_res);
		if (!ch->IsClockInit) {
			gf_es_check_timing(ch);
		}
		if (payload)
			gf_es_dispatch_raw_media_au(ch, payload, payload_size, ch->CTS);
		return;
	}

	/*physical SL-PDU - depacketize*/
	if (!header) {
		u32 SLHdrLen;
		if (!payload_size) return;
		gf_sl_depacketize(ch->esd->slConfig, &hdr, payload, payload_size, &SLHdrLen);
		payload_size -= SLHdrLen;
		payload += SLHdrLen;
	} else {
		hdr = *header;
	}

	/*we ignore OCRs for the moment*/
	if (hdr.OCRflag) {
		if (!ch->IsClockInit) {
			/*channel is the OCR, re-initialize the clock with the proper OCR*/
			if (gf_es_owns_clock(ch)) {
				u32 OCR_TS;

				/*timestamps of PCR stream haven been shifted - shift the OCR as well*/
				if (ch->seed_ts) {
					u64 diff_ts;
					Double scale = hdr.m2ts_pcr ? 27000000 : ch->esd->slConfig->OCRResolution;
					scale /= ch->ts_res;
					diff_ts = (u64) (ch->seed_ts * scale);
					hdr.objectClockReference -= diff_ts;
				}

				/*if SL is mapped from network module(eg not coded), OCR=PCR shall be given in 27Mhz units*/
				if (hdr.m2ts_pcr) {
					OCR_TS = (u32) ( hdr.objectClockReference / 27000);
				} else {
					OCR_TS = (u32) ( (s64) (hdr.objectClockReference) * ch->ocr_scale);
				}
				OCR_TS += ch->ts_offset;
				ch->clock->clock_init = 0;

				gf_clock_set_time(ch->clock, OCR_TS);
				/*many TS streams deployed with HLS have broken PCRs - we will check their consistency
				when receiving the first AU with DTS/CTS on this channel*/
				ch->clock->probe_ocr = 1;
				GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: initializing clock at STB %d from OCR TS %d (original TS "LLD") - %d buffering - OTB %d\n", ch->esd->ESID, gf_term_get_time(ch->odm->term), OCR_TS, hdr.objectClockReference, ch->clock->Buffering, gf_clock_time(ch->clock) ));
				if (ch->clock->clock_init) ch->IsClockInit = 1;

			}
		}
#if 0
		/*adjust clock if M2TS PCR discontinuity*/
		else if (hdr.m2ts_pcr==2) {
			u32 ck;
			u32 OCR_TS = (u32) ( hdr.objectClockReference / 27000);
			ck = gf_clock_time(ch->clock);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: At OTB %d - OCR Discontinuity OCR: adjusting to %d (original TS "LLD") - original clock %d\n", ch->esd->ESID, gf_clock_real_time(ch->clock), OCR_TS, hdr.objectClockReference, ck));
//			gf_clock_set_time(ch->clock, (u32) OCR_TS);
		}
		/*compute clock drift*/
		else {
			u32 ck;
			u32 OCR_TS;
			if (hdr.m2ts_pcr) {
				OCR_TS = (u32) ( hdr.objectClockReference / 27000);
			} else {
				OCR_TS = (u32) ( (s64) (hdr.objectClockReference) * ch->ocr_scale);
			}
			ck = gf_clock_time(ch->clock);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: At OTB %d adjusting OCR to %d (original TS "LLD") - diff %d\n", ch->esd->ESID, gf_clock_real_time(ch->clock), OCR_TS, hdr.objectClockReference, (s32) OCR_TS - (s32) ck));
//			gf_clock_set_time(ch->clock, (u32) OCR_TS);
		}
#else
		{
			u32 ck;
			u32 OCR_TS;
			if (hdr.m2ts_pcr) {
				OCR_TS = (u32) ( hdr.objectClockReference / 27000);
			} else {
				OCR_TS = (u32) ( (s64) (hdr.objectClockReference) * ch->ocr_scale);
			}
			ck = gf_clock_time(ch->clock);
			GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: At OTB %d got OCR %d (original TS "LLD") - diff %d%s\n", ch->esd->ESID, gf_clock_real_time(ch->clock), OCR_TS, hdr.objectClockReference, (s32) OCR_TS - (s32) ck, (hdr.m2ts_pcr==2) ? " - PCR Discontinuity flag" : "" ));
		}
#endif
		if (!payload_size) return;
	}


	/*check state*/
	if (!ch->codec_resilient && (reception_status==GF_CORRUPTED_DATA)) {
		Channel_WaitRAP(ch);
		return;
	}

	if (!ch->esd->slConfig->useAccessUnitStartFlag) {
		/*no AU signaling - each packet is an AU*/
		if (!ch->esd->slConfig->useAccessUnitEndFlag) 
			hdr.accessUnitEndFlag = hdr.accessUnitStartFlag = 1;
		/*otherwise AU are signaled by end of previous packet*/
		else
			hdr.accessUnitStartFlag = ch->NextIsAUStart;
	}

	/*get RAP*/
	if (ch->esd->slConfig->hasRandomAccessUnitsOnlyFlag) {
		hdr.randomAccessPointFlag = 1;
	} else if ((ch->carousel_type!=GF_ESM_CAROUSEL_MPEG2) && (!ch->esd->slConfig->useRandomAccessPointFlag || ch->codec_resilient) ) {
		ch->stream_state = 0;
	}

	if (ch->esd->slConfig->packetSeqNumLength) {
		if (ch->pck_sn && hdr.packetSequenceNumber) {
			/*repeated -> drop*/
			if (ch->pck_sn == hdr.packetSequenceNumber) {
				GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: repeated packet, droping\n", ch->esd->ESID));
				return;
			}
			/*if codec has no resiliency check packet drops*/
			if (!ch->codec_resilient && !hdr.accessUnitStartFlag) {
				if (ch->pck_sn == (u32) (1<<ch->esd->slConfig->packetSeqNumLength) ) {
					if (hdr.packetSequenceNumber) {
						GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: packet loss, droping & wait RAP\n", ch->esd->ESID));
						Channel_WaitRAP(ch);
						return;
					}
				} else if (ch->pck_sn + 1 != hdr.packetSequenceNumber) {
					GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: packet loss, droping & wait RAP\n", ch->esd->ESID));
					Channel_WaitRAP(ch);
					return;
				}
			}
		}
		ch->pck_sn = hdr.packetSequenceNumber;
	}

	/*if empty, skip the packet*/
	if (hdr.paddingFlag && !hdr.paddingBits) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: Empty packet - skipping\n", ch->esd->ESID));
		return;
	}
	/*IDLE stream shall be processed*/

	NewAU = 0;
	if (hdr.accessUnitStartFlag) {
		NewAU = 1;
		ch->NextIsAUStart = 0;
		ch->skip_carousel_au = 0;

		/*if we have a pending AU, add it*/
		if (ch->buffer) {
			if (ch->esd->slConfig->useAccessUnitEndFlag) {
				GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: missed end of AU (DTS %d)\n", ch->esd->ESID, ch->DTS));
			}
			if (ch->codec_resilient) {
				if (!ch->IsClockInit && !ch->skip_time_check_for_pending) gf_es_check_timing(ch);
				Channel_DispatchAU(ch, 0);
			} else {
				gf_free(ch->buffer);
				ch->buffer = NULL;
				ch->AULength = 0;
				ch->len = ch->allocSize = 0;
			}
		}
		ch->skip_time_check_for_pending = 0;
		AUSeqNum = hdr.AU_sequenceNumber;
		/*Get CTS */
		if (ch->esd->slConfig->useTimestampsFlag) {
			if (hdr.compositionTimeStampFlag) {
				ch->net_dts = ch->net_cts = hdr.compositionTimeStamp;
				/*get DTS */
				if (hdr.decodingTimeStampFlag) ch->net_dts = hdr.decodingTimeStamp;

#if 0
				/*until clock is not init check seed ts*/
				if (!ch->IsClockInit && (ch->net_dts < ch->seed_ts)) 
					ch->seed_ts = ch->net_dts;
#endif
				

				if (ch->net_cts<ch->seed_ts) {
					u64 diff = ch->seed_ts - ch->net_cts;
					ch->CTS_past_offset = (u32) (diff * 1000 / ch->ts_res) + ch->ts_offset;

					ch->net_dts = ch->net_cts = 0;
					ch->CTS = ch->DTS = gf_clock_time(ch->clock);
				} else {
					if (ch->net_dts>ch->seed_ts) ch->net_dts -= ch->seed_ts;
					else ch->net_dts=0;
					ch->net_cts -= ch->seed_ts;
					ch->CTS_past_offset = 0;

					/*TS Wraping not tested*/
					ch->CTS = (u32) (ch->ts_offset + (s64) (ch->net_cts) * 1000 / ch->ts_res);
					ch->DTS = (u32) (ch->ts_offset + (s64) (ch->net_dts) * 1000 / ch->ts_res);
				}

				if (ch->clock->probe_ocr && gf_es_owns_clock(ch)) {
					s32 diff_ts = ch->DTS;
					diff_ts -= ch->clock->init_time;
					if (ABS(diff_ts) > 10000) {
						GF_LOG(GF_LOG_ERROR, GF_LOG_SYNC, ("[SyncLayer] ES%d: invalid clock reference detected - DTS %d but OCR %d - using DTS as OCR\n", ch->esd->ESID, ch->DTS, ch->clock->init_time));
						ch->clock->clock_init = 0;
						gf_clock_set_time(ch->clock, ch->DTS-1000);
					}
					ch->clock->probe_ocr = 0;
				}

				ch->no_timestamps = 0;
			} else {
				ch->no_timestamps = 1;
			}
		} else {
			/*use CU duration*/
			if (!ch->IsClockInit)
				ch->DTS = ch->CTS = ch->ts_offset;

			if (!ch->esd->slConfig->AUSeqNumLength) {
				if (!ch->au_sn) {
					ch->CTS = ch->ts_offset;
					ch->au_sn = 1;
				} else {
					ch->CTS += ch->esd->slConfig->CUDuration;
				}
			} else {
				//use the sequence number to get the TS
				if (AUSeqNum < ch->au_sn) {
					nbAU = ( (1<<ch->esd->slConfig->AUSeqNumLength) - ch->au_sn) + AUSeqNum;
				} else {
					nbAU = AUSeqNum - ch->au_sn;
				}
				ch->CTS += nbAU * ch->esd->slConfig->CUDuration;
			}
		}

		/*if the AU Length is carried in SL, get its size*/
		if (ch->esd->slConfig->AULength > 0) {
			ch->AULength = hdr.accessUnitLength;
		} else {
			ch->AULength = 0;
		}
		/*carousel for repeated AUs.*/
		if (ch->carousel_type) {
/* not used :			Bool use_rap = hdr.randomAccessPointFlag; */

			if (ch->carousel_type==GF_ESM_CAROUSEL_MPEG2) {
				AUSeqNum = hdr.m2ts_version_number_plus_one-1;
				/*mpeg-2 section carrouseling does not take into account the RAP nature of the tables*/


				if (AUSeqNum==ch->au_sn) {
					if (ch->stream_state) {
						ch->stream_state=0;
						GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: MPEG-2 Carousel: tuning in\n", ch->esd->ESID));
					} else {
						ch->skip_carousel_au = 1;
						GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: MPEG-2 Carousel: repeated AU (TS %d) - skipping\n", ch->esd->ESID, ch->CTS));
						return;
					}
				} else {
					GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: MPEG-2 Carousel: updated AU (TS %d)\n", ch->esd->ESID, ch->CTS));
					ch->stream_state=0;
					ch->au_sn = AUSeqNum;
				}
			} else {
				if (hdr.randomAccessPointFlag) {
					/*initial tune-in*/
					if (ch->stream_state==1) {
						GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found (TS %d) - tuning in\n", ch->esd->ESID, ch->CTS));
						ch->au_sn = AUSeqNum;
						ch->stream_state = 0;
					}
					/*carousel RAP*/
					else if (AUSeqNum == ch->au_sn) {
						/*error recovery*/
						if (ch->stream_state==2) {
							GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found (TS %d) - recovering\n", ch->esd->ESID, ch->CTS));
							ch->stream_state = 0;
						} 
						else {
							ch->skip_carousel_au = 1;
							GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found (TS %d) - skipping\n", ch->esd->ESID, ch->CTS));
							return;
						}
					}
					/*regular RAP*/
					else {
						if (ch->stream_state==2) {
							GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP Carousel found (TS %d) - recovering from previous errors\n", ch->esd->ESID, ch->CTS));
						}
 						ch->au_sn = AUSeqNum;
						ch->stream_state = 0;
					}
				} 
				/*regular AU but waiting for RAP*/
				else if (ch->stream_state) {
#if 0
					ch->skip_carousel_au = 1;
					GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Waiting for RAP Carousel - skipping\n", ch->esd->ESID));
					return;
#else
					GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Tuning in before RAP\n", ch->esd->ESID));
#endif
				}
				/*previous packet(s) loss: check for critical or non-critical AUs*/
				else if (reception_status == GF_REMOTE_SERVICE_ERROR) { 
					if (ch->au_sn == AUSeqNum) {
						GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Lost a non critical packet\n", ch->esd->ESID));
					} 
					/*Packet lost are critical*/
					else {
						ch->stream_state = 2;
						GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Lost a critical packet - skipping\n", ch->esd->ESID));
						return;
					}
				} else {
					ch->au_sn = AUSeqNum;
					GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: NON-RAP AU received (TS %d)\n", ch->esd->ESID, ch->DTS));
				}
			}
		}

		/*no carousel signaling, tune-in at first RAP*/
		else if (hdr.randomAccessPointFlag) {
			ch->stream_state = 0;
			GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: RAP AU received\n", ch->esd->ESID));
		}
		/*waiting for RAP, return*/
		else if (ch->stream_state) {
			GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[SyncLayer] ES%d: Waiting for RAP - skipping AU (DTS %d)\n", ch->esd->ESID, ch->DTS));
			return;
		}
	}

	/*update the RAP marker on a packet base (to cope with AVC/H264 NALU->AU reconstruction)*/
	if (hdr.randomAccessPointFlag) ch->IsRap = 1;

	/*get AU end state*/	
	OldLength = ch->buffer ? ch->len : 0;
	EndAU = hdr.accessUnitEndFlag;
	if (ch->AULength == OldLength + payload_size) EndAU = 1;
	if (EndAU) ch->NextIsAUStart = 1;

	if (EndAU && !ch->IsClockInit) gf_es_check_timing(ch);

	/* we need to skip all the packets of the current AU in the carousel scenario */
	if (ch->skip_carousel_au == 1) return;

	if (!payload_size && EndAU && ch->buffer) {
		GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d: Empty packet, flushing buffer\n", ch->esd->ESID));
		Channel_DispatchAU(ch, 0);
		return;
	}
	if (!payload_size) return;

	/*missed begining, unusable*/
	if (!ch->buffer && !NewAU) {
		if (ch->esd->slConfig->useAccessUnitStartFlag) {
			GF_LOG(GF_LOG_ERROR, GF_LOG_SYNC, ("[SyncLayer] ES%d: missed begin of AU\n", ch->esd->ESID));
		}
		if (ch->codec_resilient) NewAU = 1;
		else return;
	}

	/*Write the Packet payload to the buffer*/
	if (NewAU) {
		/*we should NEVER have a bitstream at this stage*/
		assert(!ch->buffer);
		/*ignore length fields*/
		size = payload_size + ch->media_padding_bytes;
		ch->buffer = (char*)gf_malloc(sizeof(char) * size);
		if (!ch->buffer) {
			assert(0);
			return;
		}

		ch->allocSize = size;
		memset(ch->buffer, 0, sizeof(char) * size);
		ch->len = 0;
	}
	if (!ch->esd->slConfig->usePaddingFlag) hdr.paddingFlag = 0;
	
	if (ch->ipmp_tool) {
		GF_Err e;
		GF_IPMPEvent evt;
		memset(&evt, 0, sizeof(evt));
		evt.event_type=GF_IPMP_TOOL_PROCESS_DATA;
		evt.channel = ch;
		evt.data = payload;
		evt.data_size = payload_size;
		evt.is_encrypted = hdr.isma_encrypted;
		evt.isma_BSO = hdr.isma_BSO;
		e = ch->ipmp_tool->process(ch->ipmp_tool, &evt);

		/*we discard undecrypted AU*/
		if (e) {
			if (e==GF_EOS) {
				gf_es_on_eos(ch);
				/*restart*/
				if (evt.restart_requested) {
					if (ch->odm->parentscene->is_dynamic_scene) {
						gf_scene_restart_dynamic(ch->odm->parentscene, 0);
					} else {
						mediacontrol_restart(ch->odm);
					}
				}
			}
			return;
		}
	}

	gf_es_lock(ch, 1);

	if (hdr.paddingFlag && !EndAU) {	
		/*to do - this shouldn't happen anyway */

	} else {
		/*check if enough space*/
		size = ch->allocSize;
		if (size && (payload_size + ch->len <= size)) {
			memcpy(ch->buffer+ch->len, payload, payload_size);
			ch->len += payload_size;
		} else {
			size = payload_size + ch->len + ch->media_padding_bytes;
			ch->buffer = (char*)gf_realloc(ch->buffer, sizeof(char) * size);
			memcpy(ch->buffer+ch->len, payload, payload_size);
			ch->allocSize = size;
			ch->len += payload_size;
		}
		if (hdr.paddingFlag) ch->padingBits = hdr.paddingBits;
	}

	if (EndAU) Channel_DispatchAU(ch, hdr.au_duration);


	gf_es_lock(ch, 0);
}
Esempio n. 4
0
GF_DBUnit *gf_es_get_au(GF_Channel *ch)
{
	Bool comp, is_new_data;
	GF_Err e, state;
	GF_SLHeader slh;

	if (ch->es_state != GF_ESM_ES_RUNNING) return NULL;

	if (!ch->is_pulling) {
		/*we must update buffering before fetching in order to stop buffering for streams with very few
		updates (especially streams with one update, like most of OD streams)*/
		if (ch->BufferOn) Channel_UpdateBuffering(ch, 0);
		if (ch->first_au_fetched && ch->BufferOn) return NULL;
		return ch->AU_buffer_first;
	}

	/*pull from stream - resume clock if needed*/
	ch_buffer_off(ch);

	memset(&slh, 0, sizeof(GF_SLHeader));

	e = gf_term_channel_get_sl_packet(ch->service, ch, (char **) &ch->AU_buffer_pull->data, &ch->AU_buffer_pull->dataLength, &slh, &comp, &state, &is_new_data);
	if (e) state = e;
	switch (state) {
	case GF_EOS:
		gf_es_on_eos(ch);
		return NULL;
	case GF_OK:
		break;
	default:
        {
            char m[100];
            sprintf(m, "Data reception failure on channel %d", ch->esd->ESID);
		    gf_term_message(ch->odm->term, ch->service->url , m, state);
		    return NULL;
        }
	}
	assert(!comp);
	/*update timing if new stream data but send no data*/
	if (is_new_data) {
		gf_es_receive_sl_packet(ch->service, ch, NULL, 0, &slh, GF_OK);
		
		if (ch->ipmp_tool) {
			GF_IPMPEvent evt;
			memset(&evt, 0, sizeof(evt));
			evt.event_type=GF_IPMP_TOOL_PROCESS_DATA;
			evt.data = ch->AU_buffer_pull->data;
			evt.data_size = ch->AU_buffer_pull->dataLength;
			evt.is_encrypted = slh.isma_encrypted;
			evt.isma_BSO = slh.isma_BSO;
			evt.channel = ch;
			e = ch->ipmp_tool->process(ch->ipmp_tool, &evt);

			/*we discard undecrypted AU*/
			if (e) {
				if (e==GF_EOS) {
					gf_es_on_eos(ch);
					/*restart*/
					if (evt.restart_requested) {
						if (ch->odm->parentscene->is_dynamic_scene) {
							gf_scene_restart_dynamic(ch->odm->parentscene, 0);
						} else {
							mediacontrol_restart(ch->odm);
						}
					}
				}
				gf_term_channel_release_sl_packet(ch->service, ch);
				return NULL;
			}
		}
	}

	/*this may happen in file streaming when data has not arrived yet, in which case we discard the AU*/
	if (!ch->AU_buffer_pull->data) {
		gf_term_channel_release_sl_packet(ch->service, ch);
		return NULL;
	}
	ch->AU_buffer_pull->CTS = (u32) ch->CTS;
	ch->AU_buffer_pull->DTS = (u32) ch->DTS;
	ch->AU_buffer_pull->PaddingBits = ch->padingBits;
	if (ch->IsRap) ch->AU_buffer_pull->flags |= GF_DB_AU_RAP;
	return ch->AU_buffer_pull;
}
Esempio n. 5
0
/*special handling of decoders not using ESM*/
static GF_Err PrivateScene_Process(GF_Codec *codec, u32 TimeAvailable)
{
	Bool resume_clock;
	u32 now;
	GF_Channel *ch;
	GF_Scene *scene_locked;
	GF_SceneDecoder *sdec = (GF_SceneDecoder *)codec->decio;
	GF_Err e = GF_OK;

	/*muting systems codec means we don't decode until mute is off - likely there will be visible however
	there is no other way to decode system AUs without modifying the content, which is what mute is about on visual...*/
	if (codec->Muted) return GF_OK;

	if (codec->Status == GF_ESM_CODEC_EOS) {
		gf_term_stop_codec(codec);
		return GF_OK;
	}

	scene_locked = codec->odm->subscene ? codec->odm->subscene : codec->odm->parentscene;

	ch = (GF_Channel*)gf_list_get(codec->inChannels, 0);
	if (!ch) return GF_OK;
	resume_clock = 0;
	/*init channel clock*/
	if (!ch->IsClockInit) {
		Bool started;
		/*signal seek*/
		if (!gf_mx_try_lock(scene_locked->root_od->term->compositor->mx)) return GF_OK;
		gf_es_init_dummy(ch);

		sdec->ProcessData(sdec, NULL, 0, ch->esd->ESID, -1, GF_CODEC_LEVEL_NORMAL);
		gf_mx_v(scene_locked->root_od->term->compositor->mx);
		started = gf_clock_is_started(ch->clock);
		/*let's be nice to the scene loader (that usually involves quite some parsing), pause clock while
		parsing*/
		gf_clock_pause(ch->clock);
		codec->last_unit_dts = 0;
		if (!started) return GF_OK;
	}

	codec->odm->current_time = codec->last_unit_cts = gf_clock_time(codec->ck);

	/*lock scene*/
	GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[PrivateDec] Codec %s Processing at %d\n", sdec->module_name , codec->odm->current_time));

	if (!gf_mx_try_lock(scene_locked->root_od->term->compositor->mx)) return GF_OK;
	now = gf_term_get_time(codec->odm->term);
	e = sdec->ProcessData(sdec, NULL, 0, ch->esd->ESID, codec->odm->current_time, GF_CODEC_LEVEL_NORMAL);
	now = gf_term_get_time(codec->odm->term) - now;
	codec->last_unit_dts ++;
	/*resume on error*/
	if (e && (codec->last_unit_dts<2) ) {
		gf_clock_resume(ch->clock);
		codec->last_unit_dts = 2;
	}
	/*resume clock on 2nd decode (we assume parsing is done in 2 steps, one for first frame display, one for complete parse)*/
	else if (codec->last_unit_dts==2) {
		gf_clock_resume(ch->clock);
	}

	codec_update_stats(codec, 0, now);

	gf_mx_v(scene_locked->root_od->term->compositor->mx);

	if (e==GF_EOS) {
		/*first end of stream, evaluate duration*/
		//if (!codec->odm->duration) gf_odm_set_duration(codec->odm, ch, codec->odm->current_time);
		gf_es_on_eos(ch);
		return GF_OK;
	}
	return e;
}
Esempio n. 6
0
File: decoder.c Progetto: erelh/gpac
static void MediaDecoder_GetNextAU(GF_Codec *codec, GF_Channel **activeChannel, GF_DBUnit **nextAU)
{
	GF_Channel *ch;
	GF_DBUnit *AU;
	u32 count, minDTS, i;
	count = gf_list_count(codec->inChannels);
	*nextAU = NULL;
	*activeChannel = NULL;

	if (!count) return;

	minDTS = 0;

	/*browse from base to top layer*/
	for (i=0;i<count;i++) {
		ch = (GF_Channel*)gf_list_get(codec->inChannels, i);

		if ((codec->type==GF_STREAM_OCR) && ch->IsClockInit) {
			/*check duration - we assume that scalable OCR streams are just pure nonsense...*/
			if (ch->is_pulling && codec->odm->duration) {
				if (gf_clock_time(codec->ck) > codec->odm->duration)
					gf_es_on_eos(ch);
			}
			return;
		}

		AU = gf_es_get_au(ch);
		if (!AU) {
			if (! (*activeChannel)) *activeChannel = ch;
			continue;
		}

		/*aggregate all AUs with the same timestamp on the base AU and delete the upper layers)*/
		if (! *nextAU) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d selected as first layer\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS));
			*nextAU = AU;
			*activeChannel = ch;
			minDTS = AU->DTS;
		} else if (AU->DTS == minDTS) {
			GF_DBUnit *baseAU = *nextAU;
			assert(baseAU);
			baseAU->data = gf_realloc(baseAU->data, baseAU->dataLength + AU->dataLength);
			memcpy(baseAU->data + baseAU->dataLength , AU->data, AU->dataLength);
			baseAU->dataLength += AU->dataLength;
			GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d reaggregated on base layer %d\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS, (*activeChannel)->esd->ESID));
			gf_es_drop_au(ch);
		} else {
			break;
		}
	}

	if (codec->is_reordering && *nextAU && codec->first_frame_dispatched) {
		if ((*activeChannel)->esd->slConfig->no_dts_signaling) {
			u32 CTS = (*nextAU)->CTS;
			/*reordering !!*/
			u32 prev_ts_diff;
			u32 diff = 0;
			if (codec->recomputed_cts && (codec->recomputed_cts > (*nextAU)->CTS)) {
				diff = codec->recomputed_cts - CTS;
			}

			prev_ts_diff = (CTS > codec->last_unit_cts) ? (CTS - codec->last_unit_cts) : (codec->last_unit_cts - CTS);
			if (!diff) diff = prev_ts_diff;
			else if (prev_ts_diff && (prev_ts_diff < diff) ) diff = prev_ts_diff;

			if (!codec->min_au_duration || (diff < codec->min_au_duration))
				codec->min_au_duration = diff;
		} else {
			codec->min_au_duration = 0;
			/*FIXME - we're breaking sync (couple of frames delay)*/
			(*nextAU)->CTS = (*nextAU)->DTS;
		}
	}
}