Exemple #1
0
//Write a sample to the file - this is only called for self-contained media
GF_Err WriteSample(MovieWriter *mw, u32 size, u64 offset, u8 isEdited, GF_BitStream *bs)
{
	GF_DataMap *map;
	u32 bytes;

	if (size>mw->size) {
		mw->buffer = (char*)gf_realloc(mw->buffer, size);
		mw->size = size;
	}

	if (!mw->buffer) return GF_OUT_OF_MEM;

	if (isEdited) {
		map = mw->movie->editFileMap;
	} else {
		map = mw->movie->movieFileMap;
	}
	//get the payload...
	bytes = gf_isom_datamap_get_data(map, mw->buffer, size, offset);
	if (bytes != size) return GF_IO_ERR;
	//write it to our stream...
	bytes = gf_bs_write_data(bs, mw->buffer, size);
	if (bytes != size) return GF_IO_ERR;

	mw->nb_done++;
	gf_set_progress("ISO File Writing", mw->nb_done, mw->total_samples);
	return GF_OK;
}
Exemple #2
0
int main(int argc, char **argv)
{
	char data[188];
	u32 size, fsize, fdone;
	GF_M2TS_Demuxer *ts;

	FILE *src = gf_fopen(argv[1], "rb");
	ts = gf_m2ts_demux_new();
	ts->on_event = on_m2ts_event;

	fseek(src, 0, SEEK_END);
	fsize = ftell(src);
	fseek(src, 0, SEEK_SET);
	fdone = 0;

	while (!feof(src)) {
		size = fread(data, 1, 188, src);
		if (size<188) break;

		gf_m2ts_process_data(ts, data, size);
		if (has_seen_pat) break;
	}

	dest = gf_fopen("pes.mp3", "wb");
	gf_m2ts_reset_parsers(ts);
	gf_fseek(src, 0, SEEK_SET);
	fdone = 0;
	while (!feof(src)) {
		size = fread(data, 1, 188, src);
		if (size<188) break;

		gf_m2ts_process_data(ts, data, size);

		fdone += size;
		gf_set_progress("MPEG-2 TS Parsing", fdone, fsize);
	}
	gf_set_progress("MPEG-2 TS Parsing", fsize, fsize);

	gf_fclose(src);
	gf_m2ts_demux_del(ts);
	if (dest) gf_fclose(dest);
	return 0;
}
Exemple #3
0
GF_Err WriteToFile(GF_ISOFile *movie)
{
	FILE *stream;
	GF_BitStream *bs;
	MovieWriter mw;
	GF_Err e = GF_OK;
	if (!movie) return GF_BAD_PARAM;

	if (movie->openMode == GF_ISOM_OPEN_READ) return GF_BAD_PARAM;

	e = gf_isom_insert_copyright(movie);
	if (e) return e;

	memset(&mw, 0, sizeof(mw));
	mw.movie = movie;

	//capture mode: we don't need a new bitstream
	if (movie->openMode == GF_ISOM_OPEN_WRITE) {
		e = WriteFlat(&mw, 0, movie->editFileMap->bs);
	} else {
		//OK, we need a new bitstream
		stream = gf_f64_open(movie->finalName, "w+b");
		if (!stream) return GF_IO_ERR;
		bs = gf_bs_from_file(stream, GF_BITSTREAM_WRITE);
		if (!bs) {
			fclose(stream);
			return GF_OUT_OF_MEM;
		}

		switch (movie->storageMode) {
		case GF_ISOM_STORE_TIGHT:
		case GF_ISOM_STORE_INTERLEAVED:
			e = WriteInterleaved(&mw, bs, 0);
			break;
		case GF_ISOM_STORE_DRIFT_INTERLEAVED:
			e = WriteInterleaved(&mw, bs, 1);
			break;
		case GF_ISOM_STORE_STREAMABLE:
			e = WriteFlat(&mw, 1, bs);
			break;
		default:
			e = WriteFlat(&mw, 0, bs);
			break;
		}
		
		gf_bs_del(bs);
		fclose(stream);
	}
	if (mw.buffer) gf_free(mw.buffer);
	if (mw.nb_done<mw.total_samples) {
		gf_set_progress("ISO File Writing", mw.total_samples, mw.total_samples);
	}
	return e;
}
Exemple #4
0
GF_EXPORT
GF_Err gf_hinter_track_process(GF_RTPHinter *tkHint)
{
	GF_Err e;
	u32 i, descIndex, duration;
	u64 ts;
	u8 PadBits;
	Double ft;
	GF_ISOSample *samp;

	tkHint->HintSample = tkHint->RTPTime = 0;

	tkHint->TotalSample = gf_isom_get_sample_count(tkHint->file, tkHint->TrackNum);
	ft = tkHint->rtp_p->sl_config.timestampResolution;
	ft /= tkHint->OrigTimeScale;
	
	e = GF_OK;
	for (i=0; i<tkHint->TotalSample; i++) {
		samp = gf_isom_get_sample(tkHint->file, tkHint->TrackNum, i+1, &descIndex);
		if (!samp) return GF_IO_ERR;

		//setup SL
		tkHint->CurrentSample = i + 1;

		/*keep same AU indicator if sync shadow - TODO FIXME: this assumes shadows are placed interleaved with 
		the track content which is the case for GPAC scene carousel generation, but may not always be true*/
		if (samp->IsRAP==2) {
			tkHint->rtp_p->sl_header.AU_sequenceNumber -= 1;
			samp->IsRAP = 1;
		}

		ts = (u64) (ft * (s64) (samp->DTS+samp->CTS_Offset));
		tkHint->rtp_p->sl_header.compositionTimeStamp = ts;

		ts = (u64) (ft * (s64)(samp->DTS));
		tkHint->rtp_p->sl_header.decodingTimeStamp = ts;
		tkHint->rtp_p->sl_header.randomAccessPointFlag = samp->IsRAP;

		tkHint->base_offset_in_sample = 0;
		/*crypted*/
		if (tkHint->rtp_p->slMap.IV_length) {
			GF_ISMASample *s = gf_isom_get_ismacryp_sample(tkHint->file, tkHint->TrackNum, samp, descIndex);
			/*one byte take for selective_enc flag*/
			if (s->flags & GF_ISOM_ISMA_USE_SEL_ENC) tkHint->base_offset_in_sample += 1;
			if (s->flags & GF_ISOM_ISMA_IS_ENCRYPTED) tkHint->base_offset_in_sample += s->IV_length + s->KI_length;
			gf_free(samp->data);
			samp->data = s->data;
			samp->dataLength = s->dataLength;
			gp_rtp_builder_set_cryp_info(tkHint->rtp_p, s->IV, (char*)s->key_indicator, (s->flags & GF_ISOM_ISMA_IS_ENCRYPTED) ? 1 : 0);
			s->data = NULL;
			s->dataLength = 0;
			gf_isom_ismacryp_delete_sample(s);
		}

		if (tkHint->rtp_p->sl_config.usePaddingFlag) {
			gf_isom_get_sample_padding_bits(tkHint->file, tkHint->TrackNum, i+1, &PadBits);
			tkHint->rtp_p->sl_header.paddingBits = PadBits;
		} else {
			tkHint->rtp_p->sl_header.paddingBits = 0;
		}
		
		duration = gf_isom_get_sample_duration(tkHint->file, tkHint->TrackNum, i+1);
		ts = (u32) (ft * (s64) (duration));

		/*unpack nal units*/
		if (tkHint->avc_nalu_size) {
			u32 v, size;
			u32 remain = samp->dataLength;
			char *ptr = samp->data;

			tkHint->rtp_p->sl_header.accessUnitStartFlag = 1;
			tkHint->rtp_p->sl_header.accessUnitEndFlag = 0;
			while (remain) {
				size = 0;
				v = tkHint->avc_nalu_size;
				while (v) {
					size |= (u8) *ptr;
					ptr++;
					remain--;
					v-=1;
					if (v) size<<=8;
				}
				tkHint->base_offset_in_sample = samp->dataLength-remain;
				remain -= size;
				tkHint->rtp_p->sl_header.accessUnitEndFlag = remain ? 0 : 1;
				e = gf_rtp_builder_process(tkHint->rtp_p, ptr, size, (u8) !remain, samp->dataLength, duration, (u8) (descIndex + GF_RTP_TX3G_SIDX_OFFSET) );
				ptr += size;
				tkHint->rtp_p->sl_header.accessUnitStartFlag = 0;
			}
		} else {
			e = gf_rtp_builder_process(tkHint->rtp_p, samp->data, samp->dataLength, 1, samp->dataLength, duration, (u8) (descIndex + GF_RTP_TX3G_SIDX_OFFSET) );
		}
		tkHint->rtp_p->sl_header.packetSequenceNumber += 1;

		//signal some progress
		gf_set_progress("Hinting", tkHint->CurrentSample, tkHint->TotalSample);

		tkHint->rtp_p->sl_header.AU_sequenceNumber += 1;
		gf_isom_sample_del(&samp);

		if (e) return e;
	}

	//flush
	gf_rtp_builder_process(tkHint->rtp_p, NULL, 0, 1, 0, 0, 0);

	gf_isom_end_hint_sample(tkHint->file, tkHint->HintTrack, (u8) tkHint->SampleIsRAP);
	return GF_OK;
}
Exemple #5
0
GF_Err gf_webvtt_parser_parse(GF_WebVTTParser *parser, u32 duration)
{
	char            szLine[2048];
	char            *sOK;
	u32             len;
	GF_Err          e;
	Bool            do_parse = GF_TRUE;
	GF_WebVTTCue    *cue = NULL;
	u32             start = 0;
	u32             end = 0;
	char            *prevLine = NULL;
	char            *header = NULL;
	u32             header_len = 0;
	Bool            had_marks = GF_FALSE;

	if (!parser) return GF_BAD_PARAM;
	if (parser->is_srt) {
		parser->on_header_parsed(parser->user, gf_strdup("WEBVTT\n"));
	}
	while (do_parse) {
		sOK = gf_text_get_utf8_line(szLine, 2048, parser->vtt_in, parser->unicode_type);
		REM_TRAIL_MARKS(szLine, "\r\n")
		len = (u32) strlen(szLine);
		switch (parser->state) {
		case WEBVTT_PARSER_STATE_WAITING_SIGNATURE:
			if (!sOK || len < 6 || strnicmp(szLine, "WEBVTT", 6) || (len > 6 && szLine[6] != ' ' && szLine[6] != '\t')) {
				e = GF_CORRUPTED_DATA;
				parser->report_message(parser->user, e, "Bad WEBVTT file signature %s", szLine);
				goto exit;
			} else {
				if (had_marks) {
					szLine[len] = '\n';
					len++;
				}
				header = gf_strdup(szLine);
				header_len = len;
				parser->state = WEBVTT_PARSER_STATE_WAITING_HEADER;
			}
			break; /* proceed to next line */
		case WEBVTT_PARSER_STATE_WAITING_HEADER:
			if (prevLine) {
				u32 prev_len = (u32) strlen(prevLine);
				header = (char *)gf_realloc(header, header_len + prev_len + 1);
				strcpy(header+header_len,prevLine);
				header_len += prev_len;
				gf_free(prevLine);
				prevLine = NULL;
			}
			if (sOK && len) {
				if (strstr(szLine, "-->")) {
					parser->on_header_parsed(parser->user, header);
					/* continue to the next state without breaking */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP;
					/* no break, continue to the next state*/
				} else {
					if (had_marks) {
						szLine[len] = '\n';
						len++;
					}
					prevLine = gf_strdup(szLine);
					break; /* proceed to next line */
				}
			} else {
				parser->on_header_parsed(parser->user, header);
				if (!sOK) {
					/* end of file, parsing is done */
					do_parse = GF_FALSE;
					break;
				} else {
					/* empty line means end of header */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
					/* no break, continue to the next state*/
				}
			}
		case WEBVTT_PARSER_STATE_WAITING_CUE:
			if (sOK && len) {
				if (strstr(szLine, "-->")) {
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP;
					/* continue to the next state without breaking */
				} else {
					/* discard the previous line */
					/* should we do something with it ? callback ?*/
					if (prevLine) {
						gf_free(prevLine);
						prevLine = NULL;
					}
					/* save this new line */
					if (had_marks) {
						szLine[len] = '\n';
						len++;
					}
					prevLine = gf_strdup(szLine);
					/* stay in the same state */
					break;
				}
			} else {
				/* discard the previous line */
				/* should we do something with it ? callback ?*/
				if (prevLine) {
					gf_free(prevLine);
					prevLine = NULL;
				}
				if (!sOK) {
					do_parse = GF_FALSE;
					break;
				} else {
					/* remove empty lines and stay in the same state */
					break;
				}
			}
		case WEBVTT_PARSER_STATE_WAITING_CUE_TIMESTAMP:
			if (sOK && len) {
				if (cue == NULL) {
					cue   = gf_webvtt_cue_new();
				}
				if (prevLine) {
					gf_webvtt_cue_add_property(cue, WEBVTT_ID, prevLine, (u32) strlen(prevLine));
					gf_free(prevLine);
					prevLine = NULL;
				}
				e = gf_webvtt_parser_parse_timings_settings(parser, cue, szLine, len);
				if (e) {
					if (cue) gf_webvtt_cue_del(cue);
					cue = NULL;
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
				} else {
					start = (u32)gf_webvtt_timestamp_get(&cue->start);
					end   = (u32)gf_webvtt_timestamp_get(&cue->end);
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE_PAYLOAD;
				}
			} else {
				/* not possible */
				assert(0);
			}
			break;
		case WEBVTT_PARSER_STATE_WAITING_CUE_PAYLOAD:
			if (sOK && len) {
				if (had_marks) {
					szLine[len] = '\n';
					len++;
				}
				gf_webvtt_cue_add_property(cue, WEBVTT_PAYLOAD, szLine, len);
				/* remain in the same state as a cue payload can have multiple lines */
				break;
			} else {
				/* end of the current cue */
				gf_webvtt_add_cue_to_samples(parser, parser->samples, cue);
				cue = NULL;

				gf_set_progress("Importing WebVTT", gf_ftell(parser->vtt_in), parser->file_size);
				if ((duration && (end >= duration)) || !sOK) {
					do_parse = GF_FALSE;
					break;
				} else {
					/* empty line, move to next cue */
					parser->state = WEBVTT_PARSER_STATE_WAITING_CUE;
					break;
				}
			}
		}
		if (duration && (start >= duration)) {
			do_parse = GF_FALSE;
			break;
		}
	}


	/* no more cues to come, flush everything */
	if (cue) {
		gf_webvtt_add_cue_to_samples(parser, parser->samples, cue);
		cue = NULL;
	}
	while (gf_list_count(parser->samples) > 0) {
		GF_WebVTTSample *sample = (GF_WebVTTSample *)gf_list_get(parser->samples, 0);
		parser->last_duration = sample->end - sample->start;
		gf_list_rem(parser->samples, 0);
		parser->on_sample_parsed(parser->user, sample);
	}
	gf_set_progress("Importing WebVTT", parser->file_size, parser->file_size);
	e = GF_OK;
exit:
	if (cue) gf_webvtt_cue_del(cue);
	if (prevLine) gf_free(prevLine);
	if (header) gf_free(header);
	return e;
}
Exemple #6
0
static GF_Err SVG_ProcessData(GF_SceneDecoder *plug, const char *inBuffer, u32 inBufferLength,
                              u16 ES_ID, u32 stream_time, u32 mmlevel)
{
	GF_Err e = GF_OK;
	SVGIn *svgin = (SVGIn *)plug->privateStack;

	if (stream_time==(u32)-1) {
		if (svgin->src) gzclose(svgin->src);
		svgin->src = NULL;
		gf_sm_load_done(&svgin->loader);
		svgin->loader.fileName = NULL;
		svgin->file_pos = 0;
		gf_sg_reset(svgin->scene->graph);
		return GF_OK;
	}

	switch (svgin->oti) {
	/*!OTI for SVG dummy stream (dsi = file name) - GPAC internal*/
	case GPAC_OTI_PRIVATE_SCENE_SVG:
		/*full doc parsing*/
		if ((svgin->sax_max_duration==(u32) -1) && svgin->file_size) {
			/*init step*/
			if (!svgin->loader.fileName) {
				/*not done yet*/
				if (!svg_check_download(svgin)) return GF_OK;
				svgin->loader.fileName = svgin->file_name;
				e = gf_sm_load_init(&svgin->loader);
			} else {
				e = gf_sm_load_run(&svgin->loader);
			}
		}
		/*chunk parsing*/
		else {
			u32 entry_time;
			char file_buf[SVG_PROGRESSIVE_BUFFER_SIZE+2];
			/*initial load*/
			if (!svgin->src && !svgin->file_pos) {
				svgin->src = gzopen(svgin->file_name, "rb");
				if (!svgin->src) return GF_URL_ERROR;
				svgin->loader.fileName = svgin->file_name;
				gf_sm_load_init(&svgin->loader);
			}
			e = GF_OK;
			entry_time = gf_sys_clock();

			while (1) {
				u32 diff;
				s32 nb_read;
				nb_read = gzread(svgin->src, file_buf, SVG_PROGRESSIVE_BUFFER_SIZE);
				/*we may have read nothing but we still need to call parse in case the parser got suspended*/
				if (nb_read<=0) {
					nb_read = 0;
					if ((e==GF_EOS) && gzeof(svgin->src)) {
						gf_set_progress("SVG Parsing", svgin->file_pos, svgin->file_size);
						gzclose(svgin->src);
						svgin->src = NULL;
						gf_sm_load_done(&svgin->loader);
					}
					goto exit;
				}

				file_buf[nb_read] = file_buf[nb_read+1] = 0;

				e = gf_sm_load_string(&svgin->loader, file_buf, 0);
				svgin->file_pos += nb_read;



				/*handle decompression*/
				if (svgin->file_pos > svgin->file_size) svgin->file_size = svgin->file_pos + 1;
				if (e) break;

				gf_set_progress("SVG Parsing", svgin->file_pos, svgin->file_size);
				diff = gf_sys_clock() - entry_time;
				if (diff > svgin->sax_max_duration) {
					break;
				}
			}
		}
		break;

	/*!OTI for streaming SVG - GPAC internal*/
	case GPAC_OTI_SCENE_SVG:
		e = gf_sm_load_string(&svgin->loader, inBuffer, 0);
		break;

	/*!OTI for streaming SVG + gz - GPAC internal*/
	case GPAC_OTI_SCENE_SVG_GZ:
		e = svgin_deflate(svgin, inBuffer, inBufferLength);
		break;

	/*!OTI for DIMS (dsi = 3GPP DIMS configuration) - GPAC internal*/
	case GPAC_OTI_SCENE_DIMS:
	{
		u8 prev, dims_hdr;
		u32 nb_bytes, size;
		u64 pos;
		char * buf2 = gf_malloc(inBufferLength);
		GF_BitStream *bs = gf_bs_new(inBuffer, inBufferLength, GF_BITSTREAM_READ);
		memcpy(buf2, inBuffer, inBufferLength);
//			FILE *f = gf_f64_open("dump.svg", "wb");
//
		while (gf_bs_available(bs)) {
			pos = gf_bs_get_position(bs);
			size = gf_bs_read_u16(bs);
			nb_bytes = 2;
			/*GPAC internal hack*/
			if (!size) {
				size = gf_bs_read_u32(bs);
				nb_bytes = 6;
			}
//	            gf_fwrite( inBuffer + pos + nb_bytes + 1, 1, size - 1, f );

			dims_hdr = gf_bs_read_u8(bs);
			prev = buf2[pos + nb_bytes + size];

			buf2[pos + nb_bytes + size] = 0;
			if (dims_hdr & GF_DIMS_UNIT_C) {
				e = svgin_deflate(svgin, buf2 + pos + nb_bytes + 1, size - 1);
			} else {
				e = gf_sm_load_string(&svgin->loader, buf2 + pos + nb_bytes + 1, 0);
			}
			buf2[pos + nb_bytes + size] = prev;
			gf_bs_skip_bytes(bs, size-1);

		}
//          fclose(f);
		gf_bs_del(bs);
	}
	break;

	default:
		return GF_BAD_PARAM;
	}

exit:
	if ((e>=GF_OK) && (svgin->scene->graph_attached!=1) && (gf_sg_get_root_node(svgin->loader.scene_graph)!=NULL) ) {
		gf_scene_attach_to_compositor(svgin->scene);
	}
	/*prepare for next playback*/
	if (e) {
		gf_sm_load_done(&svgin->loader);
		svgin->loader.fileName = NULL;
		e = GF_EOS;
	}
	return e;
}
Exemple #7
0
//-------------------------------
Bool CNativeWrapper::GPAC_EventProc(void *cbk, GF_Event *evt) {
	if (cbk)
	{
		CNativeWrapper* ptr = (CNativeWrapper*)cbk;
		char msg[4096];
		msg[0] = 0;
		LOGD("GPAC_EventProc() Message=%d", evt->type);
		switch (evt->type) {
		case GF_EVENT_CLICK:
		case GF_EVENT_MOUSEUP:
		case GF_EVENT_MOUSEDOWN:
		case GF_EVENT_MOUSEOVER:
		case GF_EVENT_MOUSEOUT:
		case GF_EVENT_MOUSEMOVE:
		case GF_EVENT_MOUSEWHEEL:
		case GF_EVENT_KEYUP:
		case GF_EVENT_KEYDOWN:
		case GF_EVENT_LONGKEYPRESS:
		case GF_EVENT_TEXTINPUT:
			/* We ignore all these events */
			break;
		case GF_EVENT_MEDIA_SETUP_BEGIN:
		case GF_EVENT_MEDIA_SETUP_DONE:
		case GF_EVENT_MEDIA_LOAD_START:
		case GF_EVENT_MEDIA_PLAYING:
		case GF_EVENT_MEDIA_WAITING:
		case GF_EVENT_MEDIA_PROGRESS:
		case GF_EVENT_MEDIA_LOAD_DONE:
		case GF_EVENT_ABORT:
		case GF_EVENT_ERROR:
			LOGD("GPAC_EventProc() Media Event detected = [index=%d]", evt->type - GF_EVENT_MEDIA_SETUP_BEGIN);
			break;
		case GF_EVENT_MESSAGE:
		{
			ptr->debug_log("GPAC_EventProc start");
			if ( evt->message.message )
			{
				strcat(msg, evt->message.message);
				strcat(msg, ": ");
			}
			strcat(msg, gf_error_to_string(evt->message.error));

			ptr->debug_log(msg);
			//ptr->MessageBox(msg, evt->message.service ? evt->message.service : "GF_EVENT_MESSAGE", evt->message.error);
			ptr->debug_log("GPAC_EventProc end");
		};
		break;
		case GF_EVENT_CONNECT:
			if (evt->connect.is_connected)
				ptr->MessageBox("Connected", "Connected to scene", GF_OK);
			else
				ptr->MessageBox("Disconnected", "Disconnected from scene.", GF_OK);
			break;
		case GF_EVENT_PROGRESS:
		{
			const char * szTitle;;
			if (evt->progress.progress_type==0)
				szTitle = "Buffering";
			else if (evt->progress.progress_type==1)
				szTitle = "Downloading...";
			else if (evt->progress.progress_type==2)
				szTitle = "Import ";
			else
				szTitle = "Unknown Progress Event";
			ptr->Osmo4_progress_cbk(ptr, szTitle, evt->progress.done, evt->progress.total);
			gf_set_progress(szTitle, evt->progress.done, evt->progress.total);
		}
		break;
		case GF_EVENT_TEXT_EDITING_START:
		case GF_EVENT_TEXT_EDITING_END:
		{
			JavaEnvTh * env = ptr->getEnv();
			if (!env || !env->cbk_showKeyboard)
				return GF_FALSE;
			LOGI("Needs to display/hide the Virtual Keyboard (%d)", evt->type);
			env->env->CallVoidMethod(env->cbk_obj, env->cbk_showKeyboard, GF_EVENT_TEXT_EDITING_START == evt->type);
			LOGV("Done showing virtual keyboard (%d)", evt->type);
		}
		break;
		case GF_EVENT_EOS:
			LOGI("EOS Reached (%d)", evt->type);
			break;
		case GF_EVENT_DISCONNECT:
			/* FIXME : not sure about this behaviour */
			if (ptr)
				ptr->disconnect();
			break;
		case GF_EVENT_NAVIGATE:
			ptr->navigate( evt);
			break;
		default:
			LOGI("Unknown Message %d", evt->type);
		}
	}
	return GF_FALSE;
}
Exemple #8
0
static GF_Err gf_sm_load_run_isom(GF_SceneLoader *load)
{
	GF_Err e;
	FILE *logs;
	u32 i, j, di, nbBifs, nbLaser, nb_samp, samp_done, init_offset;
	GF_StreamContext *sc;
	GF_ESD *esd;
	GF_ODCodec *od_dec;
#ifndef GPAC_DISABLE_BIFS
	GF_BifsDecoder *bifs_dec;
#endif
#ifndef GPAC_DISABLE_LASER
	GF_LASeRCodec *lsr_dec;
#endif

	if (!load || !load->isom) return GF_BAD_PARAM;

	nbBifs = nbLaser = 0;
	e = GF_OK;
#ifndef GPAC_DISABLE_BIFS
	bifs_dec = gf_bifs_decoder_new(load->scene_graph, 1);
	gf_bifs_decoder_set_extraction_path(bifs_dec, load->localPath, load->fileName);
#endif
	od_dec = gf_odf_codec_new();
	logs = NULL;
#ifndef GPAC_DISABLE_LASER
	lsr_dec = gf_laser_decoder_new(load->scene_graph);
#endif
	esd = NULL;
	/*load each stream*/
	nb_samp = 0;
	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		switch (type) {
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_OD:
			nb_samp += gf_isom_get_sample_count(load->isom, i+1);
			break;
		default:
			break;
		}
	}
	samp_done = 1;
	gf_isom_text_set_streaming_mode(load->isom, 1);

	for (i=0; i<gf_isom_get_track_count(load->isom); i++) {
		u32 type = gf_isom_get_media_type(load->isom, i+1);
		switch (type) {
		case GF_ISOM_MEDIA_SCENE:
		case GF_ISOM_MEDIA_OD:
			break;
		default:
			continue;
		}
		esd = gf_isom_get_esd(load->isom, i+1, 1);
		if (!esd) continue;


		if ((esd->decoderConfig->objectTypeIndication == GPAC_OTI_SCENE_AFX) ||
		        (esd->decoderConfig->objectTypeIndication == GPAC_OTI_SCENE_SYNTHESIZED_TEXTURE)
		   ) {
			nb_samp += gf_isom_get_sample_count(load->isom, i+1);
			continue;
		}
		sc = gf_sm_stream_new(load->ctx, esd->ESID, esd->decoderConfig->streamType, esd->decoderConfig->objectTypeIndication);
		sc->streamType = esd->decoderConfig->streamType;
		sc->ESID = esd->ESID;
		sc->objectType = esd->decoderConfig->objectTypeIndication;
		sc->timeScale = gf_isom_get_media_timescale(load->isom, i+1);

		/*we still need to reconfig the BIFS*/
		if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
#ifndef GPAC_DISABLE_BIFS
			/*BIFS*/
			if (esd->decoderConfig->objectTypeIndication<=2) {
				if (!esd->dependsOnESID && nbBifs && !i)
					mp4_report(load, GF_OK, "several scene namespaces used or improper scene dependencies in file - import may be incorrect");
				if (!esd->decoderConfig->decoderSpecificInfo) {
					/* Hack for T-DMB non compliant streams */
					e = gf_bifs_decoder_configure_stream(bifs_dec, esd->ESID, NULL, 0, esd->decoderConfig->objectTypeIndication);
				} else {
					e = gf_bifs_decoder_configure_stream(bifs_dec, esd->ESID, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication);
				}
				if (e) goto exit;
				nbBifs++;
			}
#endif

#ifndef GPAC_DISABLE_LASER
			/*LASER*/
			if (esd->decoderConfig->objectTypeIndication==0x09) {
				if (!esd->dependsOnESID && nbBifs && !i)
					mp4_report(load, GF_OK, "several scene namespaces used or improper scene dependencies in file - import may be incorrect");
				e = gf_laser_decoder_configure_stream(lsr_dec, esd->ESID, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
				if (e) goto exit;
				nbLaser++;
			}
#endif
		}

		init_offset = 0;
		/*dump all AUs*/
		for (j=0; j<gf_isom_get_sample_count(load->isom, i+1); j++) {
			GF_AUContext *au;
			GF_ISOSample *samp = gf_isom_get_sample(load->isom, i+1, j+1, &di);
			if (!samp) {
				mp4_report(load, gf_isom_last_error(load->isom), "Unable to fetch sample %d from track ID %d - aborting track import", j+1, gf_isom_get_track_id(load->isom, i+1));
				break;
			}
			/*check if track has initial offset*/
			if (!j && gf_isom_get_edit_segment_count(load->isom, i+1)) {
				u64 EditTime, dur, mtime;
				u8 mode;
				gf_isom_get_edit_segment(load->isom, i+1, 1, &EditTime, &dur, &mtime, &mode);
				if (mode==GF_ISOM_EDIT_EMPTY) {
					init_offset = (u32) (dur * sc->timeScale / gf_isom_get_timescale(load->isom) );
				}
			}
			samp->DTS += init_offset;

			au = gf_sm_stream_au_new(sc, samp->DTS, ((Double)(s64) samp->DTS) / sc->timeScale, (samp->IsRAP==RAP) ? 1 : 0);

			if (esd->decoderConfig->streamType==GF_STREAM_SCENE) {
#ifndef GPAC_DISABLE_BIFS
				if (esd->decoderConfig->objectTypeIndication<=2)
					e = gf_bifs_decode_command_list(bifs_dec, esd->ESID, samp->data, samp->dataLength, au->commands);
#endif
#ifndef GPAC_DISABLE_LASER
				if (esd->decoderConfig->objectTypeIndication==0x09)
					e = gf_laser_decode_command_list(lsr_dec, esd->ESID, samp->data, samp->dataLength, au->commands);
#endif
			} else {
				e = gf_odf_codec_set_au(od_dec, samp->data, samp->dataLength);
				if (!e) e = gf_odf_codec_decode(od_dec);
				if (!e) {
					while (1) {
						GF_ODCom *odc = gf_odf_codec_get_com(od_dec);
						if (!odc) break;
						/*update ESDs if any*/
						UpdateODCommand(load->isom, odc);
						gf_list_add(au->commands, odc);
					}
				}
			}
			gf_isom_sample_del(&samp);
			if (e) {
				mp4_report(load, gf_isom_last_error(load->isom), "decoding sample %d from track ID %d failed", j+1, gf_isom_get_track_id(load->isom, i+1));
				goto exit;
			}

			samp_done++;
			gf_set_progress("MP4 Loading", samp_done, nb_samp);
		}
		gf_odf_desc_del((GF_Descriptor *) esd);
		esd = NULL;
	}
	gf_isom_text_set_streaming_mode(load->isom, 0);

exit:
#ifndef GPAC_DISABLE_BIFS
	gf_bifs_decoder_del(bifs_dec);
#endif
	gf_odf_codec_del(od_dec);
#ifndef GPAC_DISABLE_LASER
	gf_laser_decoder_del(lsr_dec);
#endif
	if (esd) gf_odf_desc_del((GF_Descriptor *) esd);
	if (logs) gf_fclose(logs);
	return e;
}
Exemple #9
0
Bool Osmo4_EventProc(void *priv, GF_Event *evt)
{
	u32 dur;
	Osmo4 *gpac = (Osmo4 *) priv;
	CMainFrame *pFrame = (CMainFrame *) gpac->m_pMainWnd;
	/*shutdown*/
	if (!pFrame) return 0;

	switch (evt->type) {
	case GF_EVENT_DURATION:
		dur = (u32) (1000 * evt->duration.duration);
		//if (dur<1100) dur = 0;
		pFrame->m_pPlayList->SetDuration((u32) evt->duration.duration );
		gpac->max_duration = dur;
		gpac->can_seek = evt->duration.can_seek;
		if (!gpac->can_seek) {
			pFrame->m_Sliders.m_PosSlider.EnableWindow(FALSE);
		} else {
			pFrame->m_Sliders.m_PosSlider.EnableWindow(TRUE);
			pFrame->m_Sliders.m_PosSlider.SetRangeMin(0);
			pFrame->m_Sliders.m_PosSlider.SetRangeMax(dur);
		}
		break;

	case GF_EVENT_MESSAGE:
		if (!evt->message.service || !strcmp(evt->message.service, (LPCSTR) pFrame->m_pPlayList->GetURL() )) {
			pFrame->console_service = "main service";
		} else {
			pFrame->console_service = evt->message.service;
		}
		if (evt->message.error!=GF_OK) {
			if (evt->message.error<GF_OK || !gpac->m_NoConsole) {
				pFrame->console_err = evt->message.error;
				pFrame->console_message = evt->message.message;
				gpac->m_pMainWnd->PostMessage(WM_CONSOLEMSG, 0, 0);

				/*any error before connection confirm is a service connection error*/
				if (!gpac->m_isopen) pFrame->m_pPlayList->SetDead();
			}
			return 0;
		}
		if (gpac->m_NoConsole) return 0;

		/*process user message*/
		pFrame->console_err = GF_OK;
		pFrame->console_message = evt->message.message;
		gpac->m_pMainWnd->PostMessage(WM_CONSOLEMSG, 0, 0);
		break;
	case GF_EVENT_PROGRESS:
		char *szType;
		if (evt->progress.progress_type==0) szType = "Buffer ";
		else if (evt->progress.progress_type==1) szType = "Download ";
		else if (evt->progress.progress_type==2) szType = "Import ";
		gf_set_progress(szType, evt->progress.done, evt->progress.total);
		break;
	case GF_EVENT_NAVIGATE_INFO:
		pFrame->console_message = evt->navigate.to_url;
		gpac->m_pMainWnd->PostMessage(WM_CONSOLEMSG, 1000, 0);
		break;

	case GF_EVENT_SCENE_SIZE:
		if (evt->size.width && evt->size.height) {
			gpac->orig_width = evt->size.width;
			gpac->orig_height = evt->size.height;
			if (gpac->m_term && !pFrame->m_bFullScreen) 
				pFrame->PostMessage(WM_SETSIZE, evt->size.width, evt->size.height);
		}
		break;
	/*don't resize on win32 msg notif*/
#if 0
	case GF_EVENT_SIZE:
		if (/*gpac->m_term && !pFrame->m_bFullScreen && */gpac->orig_width && (evt->size.width < W32_MIN_WIDTH) ) 
			pFrame->PostMessage(WM_SETSIZE, W32_MIN_WIDTH, (W32_MIN_WIDTH*gpac->orig_height) / gpac->orig_width);
		else
			pFrame->PostMessage(WM_SETSIZE, evt->size.width, evt->size.height);
		break;
#endif

	case GF_EVENT_CONNECT:
//		if (pFrame->m_bStartupFile) return 0;

		pFrame->BuildStreamList(1);
		if (evt->connect.is_connected) {
			pFrame->BuildChapterList(0);
			gpac->m_isopen = 1;
			//resetting sliders when opening a new file creates a deadlock on the window thread which is disconnecting
			pFrame->m_wndToolBar.SetButtonInfo(5, ID_FILE_PLAY, TBBS_BUTTON, gpac->m_isopen ? 4 : 3);
			pFrame->m_Sliders.m_PosSlider.SetPos(0);
			pFrame->SetProgTimer(1);
		} else {
			gpac->max_duration = 0;
			gpac->m_isopen = 0;
			pFrame->BuildChapterList(1);
		}
		if (!pFrame->m_bFullScreen) {
			pFrame->SetFocus();
			pFrame->SetForegroundWindow();
		}
		break;

	case GF_EVENT_QUIT:
		pFrame->PostMessage(WM_CLOSE, 0L, 0L);
		break;
	case GF_EVENT_MIGRATE:
	{
	}
		break;
	case GF_EVENT_KEYDOWN:
		gf_term_process_shortcut(gpac->m_term, evt);
		/*update volume control*/
		pFrame->m_Sliders.SetVolume();

		switch (evt->key.key_code) {
		case GF_KEY_HOME:
			gf_term_set_option(gpac->m_term, GF_OPT_NAVIGATION_TYPE, 1);
			break;
		case GF_KEY_ESCAPE:
			pFrame->PostMessage(WM_COMMAND, ID_VIEW_FULLSCREEN);
			break;
		case GF_KEY_MEDIANEXTTRACK:
			pFrame->m_pPlayList->PlayNext();
			break;
		case GF_KEY_MEDIAPREVIOUSTRACK:
			pFrame->m_pPlayList->PlayPrev();
			break;
		case GF_KEY_H:
			if ((evt->key.flags & GF_KEY_MOD_CTRL) && gpac->m_isopen)
				gf_term_switch_quality(gpac->m_term, 1);
			break;
		case GF_KEY_L:
			if ((evt->key.flags & GF_KEY_MOD_CTRL) && gpac->m_isopen)
				gf_term_switch_quality(gpac->m_term, 0);
			break;
		}
		break;
	case GF_EVENT_NAVIGATE:
		/*fixme - a proper browser would require checking mime type & co*/
		/*store URL since it may be destroyed, and post message*/
		gpac->m_navigate_url = evt->navigate.to_url;
		pFrame->PostMessage(WM_NAVIGATE, NULL, NULL);
		return 1;
	case GF_EVENT_VIEWPOINTS:
		pFrame->BuildViewList();
		return 0;
	case GF_EVENT_STREAMLIST:
		pFrame->BuildStreamList(0);
		return 0;
	case GF_EVENT_SET_CAPTION:
		pFrame->SetWindowText(evt->caption.caption);
		break;
	case GF_EVENT_DBLCLICK:
		pFrame->PostMessage(WM_COMMAND, ID_VIEW_FULLSCREEN);
		return 0;
	case GF_EVENT_AUTHORIZATION:
	{
		UserPassDialog passdlg;
		return passdlg.GetPassword(evt->auth.site_url, evt->auth.user, evt->auth.password);
	}
	}
	return 0;
}