示例#1
0
__int64 SUBTITLESOURCELIST::GetNextTimecode()
{
	__int64 j;

	SUBTITLESOURCE* s;
	if (!info.active_source->IsEndOfStream()) {
		s = info.active_source;
			j = s->GetNextTimecode();
			if (j!=TIMECODE_UNKNOWN) {
				return j * s->GetTimecodeScale() / GetTimecodeScale();
			} else {
				return TIMECODE_UNKNOWN;
			}
//		return s->GetNextTimecode() * s->GetTimecodeScale() / GetTimecodeScale();
	} else {
		if (info.iActiveSource < info.iCount - 1) {
			s = info.subtitles[info.iActiveSource+1];
			j = s->GetNextTimecode();
			if (j!=TIMECODE_UNKNOWN) {
				return j * s->GetTimecodeScale() / GetTimecodeScale();
			} else {
				return TIMECODE_UNKNOWN;
			}
		} else {
			s = info.subtitles[info.iActiveSource];
			return (s->GetBias(BIAS_UNSCALED) + s->GetDuration()*s->GetTimecodeScale())/GetTimecodeScale();
		}
	}
}
示例#2
0
__int64 SUBTITLESFROMMATROSKA::GetNextTimecode()
{
	__int64 iNTC = info.m->GetNextTimecode(info.iStream);
	if (iNTC!=TIMECODE_UNKNOWN) {
		iNTC=iNTC*info.m->GetTimecodeScale()/GetTimecodeScale();
	}
	return (iNTC!=TIMECODE_UNKNOWN)?iNTC + GetBias():iNTC;
}
示例#3
0
int SUBTITLESOURCELIST::Read(void* lpDest, int* iSize, __int64* lpiTimecode,
							ADVANCEDREAD_INFO* lpAARI)
{
	int i;

	SUBTITLESOURCE* s = info.active_source;
	if (!s->IsEndOfStream()) {
		if (s->Read(lpDest,iSize,lpiTimecode,lpAARI)<=0) {
			return SUBS_ERR;
			if (iSize) *iSize = 0; 
		}
		if (lpiTimecode) {
			SetCurrentTimecode(*lpiTimecode * s->GetTimecodeScale(), TIMECODE_UNSCALED);
			*lpiTimecode = GetCurrentTimecode();
			lpAARI->iDuration = lpAARI->iDuration * s->GetTimecodeScale() / GetTimecodeScale();

			// do style mapping for SSA
			if (s->GetFormat() == SUBFORMAT_SSA) {
				SSA_STYLE*  style = *(SSA_STYLE**)lpDest;
				char*		cText = ((char*)lpDest)+4;
				
				char		cFinal[1024]; cFinal[0] = 0;

				sprintf(cFinal,"%d,,%s,%s",++iDisplayOrderCount,
					(style && (style=FindSSAStyle(style)))?style->sssStruct.lpcName:"Default",cText);
				i = strlen(cFinal);
				if (iSize) *iSize = i; 
				memcpy(lpDest,cFinal,i+1);
			}

			return SUBS_OK;
		}
	} else {
		if (info.iActiveSource < info.iCount - 1) {
			info.active_source = info.subtitles[++info.iActiveSource];
			return Read(lpDest,iSize,lpiTimecode,lpAARI);
		} else {
			return SUBS_ERR;
		}
	}

	return SUBS_ERR;
}
示例#4
0
void Context::NotifyVideoFrame(
    StreamVideo* pVideo,
    StreamVideo::VideoFrame* pFrame)
{
    pVideo;
    assert(pVideo);
    assert(pVideo == m_pVideo);
    assert(pFrame);
    assert(m_file.GetStream());  //TODO

    StreamVideo::frames_t& vframes = pVideo->GetFrames();
    assert(!vframes.empty());
    assert(vframes.back() == pFrame);

    const ULONG vt = pFrame->GetTimecode();

    StreamVideo::frames_t& rframes = m_pVideo->GetKeyFrames();

    if (rframes.empty())
    {
        rframes.push_back(pFrame);
        return;
    }

    if (pFrame->IsKey())
        rframes.push_back(pFrame);
    else
    {
        const StreamVideo::VideoFrame* const pvf0 = rframes.back();
        assert(pvf0);

        const ULONG vt0 = pvf0->GetTimecode();
        assert(vt >= vt0);

        const LONGLONG dt = LONGLONG(vt) - LONGLONG(vt0);
        assert(dt >= 0);

        const LONGLONG scale = GetTimecodeScale();
        assert(scale >= 1);

        const LONGLONG ns = scale * dt;

        //TODO: allow this to be parameterized
        if (ns <= 1000000000)  //1 sec
            return;

        rframes.push_back(pFrame);
    }

    //At this point, we have at least 2 rframes, which means
    //at least one cluster is potentially available to be written
    //to the file.  (Here the constraints that the video stream
    //needs to satisfy have been satisified.  We might still have
    //to wait for the audio stream to satisfy its constraints.)

    if ((m_pAudio == 0) || m_bEOSAudio)
    {
        CreateNewCluster(pFrame);
        return;
    }

    const StreamAudio::frames_t& aframes = m_pAudio->GetFrames();

    if (aframes.empty())
        return;

    const StreamAudio::AudioFrame* const paf = aframes.back();
    assert(paf);

    const ULONG at = paf->GetTimecode();

    if (at < vt)
        return;

    CreateNewCluster(pFrame);
}
示例#5
0
__int64 AUDIOSOURCELIST::GetUnstretchedDuration()
{
	__int64 res = 0;
	for (int i=0; i<info.iCount; i++) {
		res+=info.audiosources[i]->GetUnstretchedDuration() * info.audiosources[i]->GetTimecodeScale() / GetTimecodeScale();
	}

	return res;
}
示例#6
0
int AUDIOSOURCELIST::Read(void* lpDest, DWORD dwMicrosecDesired, DWORD* lpdwMicrosecRead,
						  __int64* lpqwNanosecRead, __int64* lpiTimecode, ADVANCEDREAD_INFO* lpAARI)
{
	__int64 j;
	if (!lpqwNanosecRead) lpqwNanosecRead = &j;

	// can read sth from current source?
	if (!info.active_source->IsEndOfStream()) {
		int res = info.active_source->Read(lpDest,dwMicrosecDesired,
			lpdwMicrosecRead,lpqwNanosecRead,lpiTimecode,lpAARI);

		/* if no real data was read and suddently the end of the stream
		   has been reached, the end was crap */
		if (res == 0 && info.active_source->IsEndOfStream()) {
			return Read(lpDest, dwMicrosecDesired, lpdwMicrosecRead,
				lpqwNanosecRead, lpiTimecode, lpAARI);
		}

		if (lpiTimecode) {
			SetCurrentTimecode(*lpiTimecode * info.active_source->GetTimecodeScale(), TIMECODE_UNSCALED);
			*lpiTimecode = GetCurrentTimecode();
		}
		if (lpAARI) {
			if (lpAARI->iNextTimecode != TIMECODE_UNKNOWN) {
	//			lpAARI->iNextTimecode = lpAARI->iNextTimecode*info.active_source->GetTimecodeScale() / GetTimecodeScale() + GetBias();
			}
		}
		if (lpAARI) {
			// end reached -> current audio frame was last one of active source
			if (info.active_source->IsEndOfStream()) {
				// is there another audio source?
				if (info.iActiveSource < info.iCount-1) {
					AUDIOSOURCE* next = info.audiosources[info.iActiveSource+1];
					// join seemless? -> set BIAS for next source accordingly
					if (IsSeamless()) {
						next->SetBias(GetCurrentTimecode() * GetTimecodeScale()+
							*lpqwNanosecRead, BIAS_UNSCALED);
					}
					lpAARI->iNextTimecode = next->GetBias(BIAS_UNSCALED) / GetTimecodeScale();
				}
			} else {
				if (lpAARI->iNextTimecode != TIMECODE_UNKNOWN) {
					lpAARI->iNextTimecode *= info.active_source->GetTimecodeScale();
					lpAARI->iNextTimecode /= GetTimecodeScale();
					lpAARI->iNextTimecode += GetBias();
				}
			}
		}

		return res;
	} else {
		// end of list?
		if (info.iActiveSource >= info.iCount-1) {
			return AS_ERR;
		} else {
		// one more file available

			/* create log entry */
			char cTime[64]; memset(cTime, 0, sizeof(cTime));
			Millisec2Str(info.active_source->GetCurrentTimecode() *
				info.active_source->GetTimecodeScale() / 1000000, cTime);

			char cName[1024]; memset(cName, 0, sizeof(cName));
			GetName(cName);

			char cMsg[2048]; memset(cMsg, 0, sizeof(cMsg));
			sprintf(cMsg, "End of stream encountered\nName: %s\nTimecode: %s",
				cName, cTime);

		//	GetApplicationTraceFile()->Trace(TRACE_LEVEL_NOTE, "End of stream", cMsg);


			info.active_source = info.audiosources[++info.iActiveSource];
			if (IsSeamless()) {
				info.active_source->SetBias(info.audiosources[info.iActiveSource-1]->GetCurrentTimecode());
			}
			return Read(lpDest,dwMicrosecDesired,lpdwMicrosecRead,lpqwNanosecRead,lpiTimecode,lpAARI);
		}
	}
}
示例#7
0
int SUBTITLESFROMMATROSKA::Read(void* lpDest, int* iSize, __int64* lpiTimecode,
							ADVANCEDREAD_INFO* lpAARI)
{
	READ_INFO	r;

	// if SRT: read text and return as result
	if (GetFormat() == SUBFORMAT_SRT) {
		info.m->SetActiveTrack(info.iStream);
		if (info.m->Read(&r)==READBL_OK) {
			if (iSize) *iSize = r.pData->GetSize();
			if (lpDest) memcpy(lpDest,r.pData->GetData(),r.pData->GetSize());
			if (lpiTimecode) {
				*lpiTimecode = (r.qwTimecode*info.m->GetTimecodeScale() + GetBias(BIAS_UNSCALED))/GetTimecodeScale();
			}
			if (lpAARI) {
				lpAARI->iDuration = r.qwDuration * info.m->GetTimecodeScale() / GetTimecodeScale();
			}
			DecBufferRefCount(&r.pData);
		} else {
			if (iSize) *iSize = 0;
		}
	}
	// if SRT: read text and return as result
	if (GetFormat() == SUBFORMAT_VOBSUB) {
		info.m->SetActiveTrack(info.iStream);
		if (info.m->Read(&r)==READBL_OK) {
			if (iSize) *iSize = r.pData->GetSize();
			if (lpDest) memcpy(lpDest,r.pData->GetData(),r.pData->GetSize());
			if (lpiTimecode) {
				*lpiTimecode = (r.qwTimecode*info.m->GetTimecodeScale() + GetBias(BIAS_UNSCALED))/GetTimecodeScale();
			}
			if (lpAARI) {
				lpAARI->iDuration = r.qwDuration * info.m->GetTimecodeScale() / GetTimecodeScale();
			}
			DecBufferRefCount(&r.pData);
		} else {
			if (iSize) *iSize = 0;
		}
	}

	// if SSA: split style apart!
	if (GetFormat() == SUBFORMAT_SSA) {
		info.m->SetActiveTrack(info.iStream);
		if (info.m->Read(&r)==READBL_OK) {
			char* lpcDest = (char*)lpDest;
			
			int j = 2; char* c = (char*)r.pData->GetData(); char* cStyle;
			while (*c++ != ',' || --j);
			cStyle = c;
			while (*c++ != ',');
			*(c-1)=0;

			int i = strlen(c);
			if (iSize) *iSize = i;
			if (lpDest) memcpy(lpcDest+4,c,i+1);
			memset(lpcDest,0,4);

			if (lpiTimecode) {
				*lpiTimecode = (r.qwTimecode*info.m->GetTimecodeScale() + GetBias(BIAS_UNSCALED))/GetTimecodeScale();
			}
//			if (lpiTimecode) *lpiTimecode = r.qwTimecode + GetBias();
			if (lpAARI) {
				lpAARI->iDuration = r.qwDuration * info.m->GetTimecodeScale() / GetTimecodeScale();
			}

			SSA_STYLE* style = NULL;
			
			for (i=0;i<GetSSAStyleCount();i++) {
				char* stn = cStyle;
				if (!strcmp(GetSSAStyle(i)->sssStruct.lpcName,cStyle)) {
					style = GetSSAStyle(i);
				}
				if (stn[0]=='*' && !strcmp(stn+1,GetSSAStyle(i)->sssStruct.lpcName)) {
					style = GetSSAStyle(i);
				}
			}
			memcpy(lpDest,&style,sizeof(style));

			DecBufferRefCount(&r.pData);
		} else return SUBS_ERR;
	}

	return SUBS_OK;
}