void CMp4ByteStream::play (uint64_t start)
{
  m_play_start_time = start;

  MP4Timestamp mp4_ts;
  MP4SampleId mp4_sampleId;

  m_parent->lock_file_mutex();
  mp4_ts = MP4ConvertToTrackTimestamp(m_parent->get_file(),
				      m_track,
				      start,
				      MP4_MSECS_TIME_SCALE);
  mp4_sampleId = MP4GetSampleIdFromTime(m_parent->get_file(),
					m_track,
					mp4_ts, 
					TRUE);
  uint64_t ts;
  MP4Timestamp sampleTime;

  sampleTime = MP4GetSampleTime(m_parent->get_file(),
				m_track,
				mp4_sampleId);
  ts = MP4ConvertFromTrackTimestamp(m_parent->get_file(),
				    m_track,
				    sampleTime,
				    MP4_MSECS_TIME_SCALE);
  m_parent->unlock_file_mutex();
#ifdef DEBUG_MP4_FRAME
  mp4f_message(LOG_DEBUG, "%s searching timestamp "U64" gives "U64,
	       m_name, start, mp4_ts);
  mp4f_message(LOG_DEBUG, "%s values are sample time "U64" ts "U64,
	       m_name, sampleTime, ts);
#endif
  set_timebase(mp4_sampleId);
}
QWORD MP4RtpTrack::SeekNearestSyncFrame(QWORD time)
{
	//Reset us
	Reset();
	//Get time in track units
	MP4Duration when = time*timeScale/1000;
	//Get nearest sample
	sampleId = MP4GetSampleIdFromTime(mp4,hint,when,false);
	//Check
	if (sampleId == MP4_INVALID_SAMPLE_ID)
		//Nothing
		return MP4_INVALID_TIMESTAMP;
	//Find nearest sync
	while(sampleId>0)
	{
		//If it is a sync frame
		if (MP4GetSampleSync(mp4,hint,sampleId)>0)
		{
			//Get sample time
			when = MP4GetSampleTime(mp4,hint,sampleId);
			//And convert it to timescale
			return when*1000/timeScale;
		}
		//new one
		sampleId--;
	}
	//Nothing found go to init
	return MP4_INVALID_TIMESTAMP;
}
QWORD MP4TextTrack::Read(Listener *listener)
{
	int next = 0;
	int last = 0;
	int first = 0;

	// Get number of samples for this sample
	frameSamples = MP4GetSampleDuration(mp4, track, sampleId);

	// Get size of sample
	frameSize = MP4GetSampleSize(mp4, track, sampleId);

	// Get sample timestamp
	frameTime = MP4GetSampleTime(mp4, track, sampleId);
	//Convert to miliseconds
	frameTime = MP4ConvertFromTrackTimestamp(mp4, track, frameTime, 1000);

	// Get data pointer
	BYTE *data = (BYTE*)malloc(frameSize);
	//Get max data lenght
	DWORD dataLen = frameSize;

	MP4Timestamp	startTime;
	MP4Duration	duration;
	MP4Duration	renderingOffset;

	// Read next rtp packet
	if (!MP4ReadSample(
				mp4,				// MP4FileHandle hFile
				track,				// MP4TrackId hintTrackId
				sampleId++,			// MP4SampleId sampleId,
				(u_int8_t **) &data,		// u_int8_t** ppBytes
				(u_int32_t *) &dataLen,		// u_int32_t* pNumBytes
				&startTime,			// MP4Timestamp* pStartTime
				&duration,			// MP4Duration* pDuration
				&renderingOffset,		// MP4Duration* pRenderingOffset
				NULL				// bool* pIsSyncSample
	))
		//Last
		return MP4_INVALID_TIMESTAMP;

	//Log("Got text frame [time:%d,start:%d,duration:%d,lenght:%d,offset:%d\n",frameTime,startTime,duration,dataLen,renderingOffset);
	//Dump(data,dataLen);
	//Get length
	if (dataLen>2)
	{
		//Get string length
		DWORD len = data[0]<<8 | data[1];
		//Set frame
		frame.SetFrame(startTime,data+2+renderingOffset,len-renderingOffset-2);
		//call listener
		if (listener)
			//Call it
			listener->onTextFrame(frame);
	}
	
	// exit next send time
	return GetNextFrameTime();
}
QWORD MP4RtpTrack::GetNextFrameTime()
{
	QWORD ts = MP4GetSampleTime(mp4, hint, sampleId);
	//Check it
	if (ts==MP4_INVALID_TIMESTAMP)
		//Return it
		return ts;
	//Convert to miliseconds
	ts = MP4ConvertFromTrackTimestamp(mp4, hint, ts, 1000);

	//Get next timestamp
	return ts;
}
	bool Context::getPacket(MP4TrackId hint, RuntimeProperties & rt,
				bool header, void * buffer, u_int & size, u_int & ts)
	{
		if (rt.frame == 0 || rt.packet == rt.packetsPerFrame) {
			++rt.frame;
			if(!MP4ReadRtpHint(fh, hint, rt.frame, &rt.packetsPerFrame))
				return false;
			rt.packet = 0;
			rt.last_frame = MP4GetSampleTime(fh, hint, rt.frame);
		}

		ts = rt.last_frame;
		if (!MP4ReadRtpPacket(fh, hint, rt.packet, (u_int8_t **) &buffer, &size, 0, header, true)) return false;
		++rt.packet;
		return true;
	}
QWORD MP4TextTrack::Seek(QWORD time)
{
	//Reset us
	Reset();
	//Get time in track units
	MP4Duration when = time*timeScale/1000;
	//Get nearest sample
	sampleId = MP4GetSampleIdFromTime(mp4,track,when,false);
	//Check
	if (sampleId == MP4_INVALID_SAMPLE_ID)
		//Nothing
		return MP4_INVALID_TIMESTAMP;
	//Get sample time
	when = MP4GetSampleTime(mp4,track,sampleId);
	//And convert it to timescale
	return when*1000/timeScale;
}
QWORD MP4TextTrack::GetNextFrameTime()
{
	//Get next timestamp
	return  MP4GetSampleTime(mp4, track, sampleId);
}
QWORD MP4RtpTrack::Read(Listener *listener)
{
	int last = 0;
	uint8_t* data;
	bool isSyncSample;

	// If it's first packet of a frame
	if (!numHintSamples)
	{
		// Get number of rtp packets for this sample
		if (!MP4ReadRtpHint(mp4, hint, sampleId, &numHintSamples))
		{
			//Print error
			Error("Error reading hintt");
			//Exit
			return MP4_INVALID_TIMESTAMP;
		}

		// Get number of samples for this sample
		frameSamples = MP4GetSampleDuration(mp4, hint, sampleId);

		// Get size of sample
		frameSize = MP4GetSampleSize(mp4, hint, sampleId);

		// Get sample timestamp
		frameTime = MP4GetSampleTime(mp4, hint, sampleId);
		//Convert to miliseconds
		frameTime = MP4ConvertFromTrackTimestamp(mp4, hint, frameTime, 1000);

		// Check if it is H264 and it is a Sync frame
		if (codec==VideoCodec::H264 && MP4GetSampleSync(mp4,track,sampleId))
			// Send SEI info
			SendH263SEI(listener);

		//Get max data lenght
		BYTE *data = NULL;
		DWORD dataLen = 0;
		MP4Timestamp	startTime;
		MP4Duration	duration;
		MP4Duration	renderingOffset;

		//Get values
		data	= frame->GetData();
		dataLen = frame->GetMaxMediaLength();
		
		// Read next rtp packet
		if (!MP4ReadSample(
			mp4,				// MP4FileHandle hFile
			track,				// MP4TrackId hintTrackId
			sampleId,			// MP4SampleId sampleId,
			(u_int8_t **) &data,		// u_int8_t** ppBytes
			(u_int32_t *) &dataLen,		// u_int32_t* pNumBytes
			&startTime,			// MP4Timestamp* pStartTime
			&duration,			// MP4Duration* pDuration
			&renderingOffset,		// MP4Duration* pRenderingOffset
			&isSyncSample			// bool* pIsSyncSample
			))
		{
			Error("Error reading sample");
			//Last
			return MP4_INVALID_TIMESTAMP;
		}

		//Check type
		if (media == MediaFrame::Video)
		{
			//Get video frame
			VideoFrame *video = (VideoFrame*)frame;
			//Set lenght
			video->SetLength(dataLen);
			//Timestamp
			video->SetTimestamp(startTime*90000/timeScale);
			//Set intra
			video->SetIntra(isSyncSample);
		} else {
			//Get Audio frame
			AudioFrame *audio = (AudioFrame*)frame;
			//Set lenght
			audio->SetLength(dataLen);
			//Timestamp
			audio->SetTimestamp(startTime*8000/timeScale);
		}

		//Check listener
		if (listener)
			//Frame callback
			listener->onMediaFrame(*frame);
	}

	// if it's the last
	if (packetIndex + 1 == numHintSamples)
		//Set last mark
		last = 1;
	
	// Set mark bit
	rtp.SetMark(last);

	// Get data pointer
	data = rtp.GetMediaData();
	//Get max data lenght
	DWORD dataLen = rtp.GetMaxMediaLength();

	// Read next rtp packet
	if (!MP4ReadRtpPacket(
				mp4,				// MP4FileHandle hFile
				hint,				// MP4TrackId hintTrackId
				packetIndex++,			// u_int16_t packetIndex
				(u_int8_t **) &data,		// u_int8_t** ppBytes
				(u_int32_t *) &dataLen,		// u_int32_t* pNumBytes
				0,				// u_int32_t ssrc DEFAULT(0)
				0,				// bool includeHeader DEFAULT(true)
				1				// bool includePayload DEFAULT(true)
	))
	{
		//Error
		Error("Error reading packet [%d,%d,%d]\n", hint, track,packetIndex);
		//Exit
		return MP4_INVALID_TIMESTAMP;
	}
		

	//Check
	if (dataLen>rtp.GetMaxMediaLength())
	{
		//Error
		Error("RTP packet too big [%u,%u]\n",dataLen,rtp.GetMaxMediaLength());
		//Exit
		return MP4_INVALID_TIMESTAMP;
	}
	
	//Set lenght
	rtp.SetMediaLength(dataLen);
	// Write frame
	listener->onRTPPacket(rtp);

	// Are we the last packet in a hint?
	if (last)
	{
		// The first hint
		packetIndex = 0;
		// Go for next sample
		sampleId++;
		numHintSamples = 0;
		//Return next frame time
		return GetNextFrameTime();
	}

	// This packet is this one
	return frameTime;
}