FrameParserStatus_t   FrameParser_VideoDvp_c::ReadHeaders( void )
{
Buffer_t		 Buffer;
BufferStructure_t	*BufferStructure;
BufferStatus_t		 BufferStatus;
StreamInfo_t		*StreamInfo;   
PlayerSequenceType_t	 SequenceType;

    //
    // Find the stream info structure, and extract the buffer
    //

    StreamInfo	= (StreamInfo_t*)BufferData;
    Buffer	= (Buffer_t)StreamInfo->buffer_class;

    //
    // Switch the ownership of the buffer
    //

    Buffer->TransferOwnership( IdentifierFrameParser );

    //
    // Modify the buffer structure to match the actual capture
    //

    BufferStatus = Buffer->ObtainMetaDataReference( Player->MetaDataBufferStructureType, (void**)&BufferStructure );
    if (BufferStatus != BufferNoError)
    {
	report( severity_error, "FrameParser_VideoDvp_c::RevPlayPurgeDecodeStacks - Unable to access buffer structure parameters %x.\n", BufferStatus);
	return FrameParserError;
    }

    //
    // Fill out appropriate frame and video parameters
    //

    ParsedFrameParameters->NewStreamParameters 			= false;
    ParsedFrameParameters->SizeofStreamParameterStructure 	= sizeof(StreamInfo_t);
    ParsedFrameParameters->StreamParameterStructure		= &StreamInfo;

    ParsedFrameParameters->FirstParsedParametersForOutputFrame 	= true;
    ParsedFrameParameters->FirstParsedParametersAfterInputJump	= false;
    ParsedFrameParameters->SurplusDataInjected			= false;
    ParsedFrameParameters->ContinuousReverseJump		= false;
    ParsedFrameParameters->KeyFrame				= true;
    ParsedFrameParameters->ReferenceFrame			= true;		// Turn off autogeneration of DTS
    ParsedFrameParameters->IndependentFrame			= true;
    ParsedFrameParameters->NumberOfReferenceFrameLists		= 0;
    
    ParsedFrameParameters->NewFrameParameters			= true;
    ParsedVideoParameters->Content.PixelAspectRatio		= Rational_t(StreamInfo->pixel_aspect_ratio.Numerator, StreamInfo->pixel_aspect_ratio.Denominator);

    ParsedVideoParameters->Content.Width			= StreamInfo->width;
    ParsedVideoParameters->Content.Height			= StreamInfo->height;
    ParsedVideoParameters->Content.DisplayWidth			= StreamInfo->width;
    ParsedVideoParameters->Content.DisplayHeight		= StreamInfo->height;
    ParsedVideoParameters->Content.Progressive			= !StreamInfo->interlaced;
    ParsedVideoParameters->Content.FrameRate			= Rational_t(StreamInfo->FrameRateNumerator, StreamInfo->FrameRateDenominator);
    ParsedVideoParameters->Content.OverscanAppropriate 		= 0;
    ParsedVideoParameters->Content.VideoFullRange		= (StreamInfo->VideoFullRange != 0);
    ParsedVideoParameters->Content.ColourMatrixCoefficients	= ((StreamInfo->ColourMode == DVP_COLOUR_MODE_601) ? MatrixCoefficients_ITU_R_BT601 :
								  ((StreamInfo->ColourMode == DVP_COLOUR_MODE_709) ? MatrixCoefficients_ITU_R_BT709 :
														     MatrixCoefficients_Undefined));

    ParsedVideoParameters->InterlacedFrame			= StreamInfo->interlaced;    
    ParsedVideoParameters->DisplayCount[0]			= 1;
    ParsedVideoParameters->DisplayCount[1]			= ParsedVideoParameters->InterlacedFrame ? 1 : 0;
    ParsedVideoParameters->SliceType				= SliceTypeI;
    ParsedVideoParameters->TopFieldFirst			= StreamInfo->top_field_first;
    ParsedVideoParameters->PictureStructure			= StructureFrame;
 
    ParsedVideoParameters->PanScan.Count			= 0;

    FirstDecodeOfFrame						= true;
    FrameToDecode						= true;

    //
    // Do we need to update any of the window sizes
    //

    SequenceType	= (NextDecodeFrameIndex == 0) ? SequenceTypeImmediate : SequenceTypeBeforePlaybackTime;

    if( memcmp( &StreamInfo->InputWindow, &InputWindow, sizeof(DvpRectangle_t) ) != 0 )
    {
	InputWindow	= StreamInfo->InputWindow;

	Player->CallInSequence( Stream, SequenceType, CodedFramePlaybackTime, 
				ManifestorVideoFnSetInputWindow, 
				InputWindow.X, InputWindow.Y, InputWindow.Width, InputWindow.Height );
    }

//

    if( memcmp( &StreamInfo->OutputWindow, &OutputWindow, sizeof(DvpRectangle_t) ) != 0 )
    {
	OutputWindow	= StreamInfo->OutputWindow;

	Player->CallInSequence( Stream, SequenceType, CodedFramePlaybackTime, 
				ManifestorVideoFnSetOutputWindow, 
				OutputWindow.X, OutputWindow.Y, OutputWindow.Width, OutputWindow.Height );
    }

//

    return FrameParserNoError;
}
void   Player_Generic_c::ProcessDecodeToManifest(       PlayerStream_t            Stream )
{
unsigned int                      i;
PlayerStatus_t                    Status;
RingStatus_t                      RingStatus;
unsigned int                      AccumulatedBufferTableOccupancy;
PlayerBufferRecord_t             *AccumulatedBufferTable;
Buffer_t                          Buffer = NULL;
Buffer_t                          OriginalCodedFrameBuffer;
BufferType_t                      BufferType;
PlayerControlStructure_t         *ControlStructure;
ParsedFrameParameters_t          *ParsedFrameParameters;
unsigned int                      LowestIndex;
unsigned int                      LowestDisplayFrameIndex;
unsigned int                      DesiredFrameIndex;
unsigned int			  PossibleDecodeBuffers;
unsigned int                      MaxDecodesOutOfOrder;
PlayerSequenceNumber_t           *SequenceNumberStructure;
unsigned long long                LastEntryTime;
unsigned long long                SequenceNumber;
unsigned long long                MinumumSequenceNumberAccumulated;
unsigned long long                MaximumActualSequenceNumberSeen;
unsigned long long                Time;
unsigned int                      AccumulatedBeforeControlMessagesCount;
unsigned int                      AccumulatedAfterControlMessagesCount;
bool                              SequenceCheck;
bool                              ProcessNow;
unsigned int                     *Count;
PlayerBufferRecord_t             *Table;
Buffer_t                          MarkerFrameBuffer;
bool                              FirstFrame;
bool                              DiscardBuffer;
bool				  LastPreManifestDiscardBuffer;
unsigned char                     SubmitInitialFrame;
Buffer_t                          InitialFrameBuffer;

    //
    // Set parameters
    //

    AccumulatedBufferTableOccupancy             = 0;
    AccumulatedBufferTable                      = Stream->AccumulatedDecodeBufferTable;

    LastEntryTime                               = OS_GetTimeInMicroSeconds();
    SequenceNumber                              = INVALID_SEQUENCE_VALUE;
    Time                                        = INVALID_TIME;
    AccumulatedBeforeControlMessagesCount       = 0;
    AccumulatedAfterControlMessagesCount        = 0;

    MinumumSequenceNumberAccumulated		= 0xffffffffffffffffULL;
    MaximumActualSequenceNumberSeen             = 0;
    DesiredFrameIndex                           = 0;
    FirstFrame                                  = true;

    MarkerFrameBuffer                           = NULL;
    InitialFrameBuffer				= NULL;

    LastPreManifestDiscardBuffer		= false;

    //
    // Signal we have started
    //

    OS_LockMutex( &Lock );

    Stream->ProcessRunningCount++;

    if( Stream->ProcessRunningCount == Stream->ExpectedProcessCount )
	OS_SetEvent( &Stream->StartStopEvent );

    OS_UnLockMutex( &Lock );

    //
    // Main Loop
    //

    while( !Stream->Terminating )
    {
	//
	// Buffer re-ordering loop
	//

	while( !Stream->Terminating )
	{
	    //
	    // Scan the list of accumulated buffers to see if the next display frame is available
	    // (whether because it was already accumulated, or because its display frame index has been updated)
	    //

	    MinumumSequenceNumberAccumulated    = 0xffffffffffffffffULL;
	    if( AccumulatedBufferTableOccupancy != 0 )
	    {
		LowestIndex                     = INVALID_INDEX;
		LowestDisplayFrameIndex         = INVALID_INDEX;
		for( i=0; i<Stream->NumberOfDecodeBuffers; i++ )
		    if( AccumulatedBufferTable[i].Buffer != NULL )
		    {
			MinumumSequenceNumberAccumulated        = min(MinumumSequenceNumberAccumulated, AccumulatedBufferTable[i].SequenceNumber);

			if( (AccumulatedBufferTable[i].ParsedFrameParameters->DisplayFrameIndex != INVALID_INDEX)       &&
			    ((LowestIndex == INVALID_INDEX) || (AccumulatedBufferTable[i].ParsedFrameParameters->DisplayFrameIndex < LowestDisplayFrameIndex)) )
			{
			    LowestDisplayFrameIndex     = AccumulatedBufferTable[i].ParsedFrameParameters->DisplayFrameIndex;
			    LowestIndex                 = i;
			}
		    }

		Stream->Manifestor->GetDecodeBufferCount( &PossibleDecodeBuffers );
		MaxDecodesOutOfOrder		= (Stream->Playback->Direction == PlayForward) ? (PossibleDecodeBuffers >> 1) : (3 * PossibleDecodeBuffers)/4;
		MaxDecodesOutOfOrder		= min( (PossibleDecodeBuffers - PLAYER_MINIMUM_NUMBER_OF_WORKING_DECODE_BUFFERS), MaxDecodesOutOfOrder );
		if( Stream->Playback->Direction == PlayForward )
		    MaxDecodesOutOfOrder	= min( PLAYER_LIMIT_ON_OUT_OF_ORDER_DECODES, MaxDecodesOutOfOrder );

		if( (LowestIndex != INVALID_INDEX) &&
		    (   (LowestDisplayFrameIndex == DesiredFrameIndex) || 
			(AccumulatedBufferTableOccupancy >= MaxDecodesOutOfOrder) ||
			(AccumulatedBufferTable[LowestIndex].ParsedFrameParameters->CollapseHolesInDisplayIndices) ||
			(MarkerFrameBuffer != NULL) ) )
		{
		    Buffer                                                      = AccumulatedBufferTable[LowestIndex].Buffer;
		    ParsedFrameParameters                                       = AccumulatedBufferTable[LowestIndex].ParsedFrameParameters;
		    SequenceNumber                                              = AccumulatedBufferTable[LowestIndex].SequenceNumber;
		    BufferType                                                  = Stream->DecodeBufferType;
		    AccumulatedBufferTable[LowestIndex].Buffer                  = NULL;
		    AccumulatedBufferTable[LowestIndex].ParsedFrameParameters   = NULL;
		    AccumulatedBufferTableOccupancy--;
		    break;
		}
	    }

	    //
	    // Skip any frame indices that were unused
	    //

	    while( CheckForNonDecodedFrame( Stream, DesiredFrameIndex ) )
		DesiredFrameIndex++;

	    //
	    // If we get here with a marker frame, then we have emptied our accumulated list
	    //

	    if( MarkerFrameBuffer != NULL )
	    {
		SequenceNumber                          = SequenceNumberStructure->Value;
		MaximumActualSequenceNumberSeen		= max(SequenceNumber, MaximumActualSequenceNumberSeen);
		ProcessAccumulatedControlMessages(  Stream, 
						    &AccumulatedBeforeControlMessagesCount,
						    PLAYER_MAX_DTOM_MESSAGES,
						    Stream->AccumulatedBeforeDtoMControlMessages, 
						    SequenceNumber, Time );

		ProcessAccumulatedControlMessages(  Stream,
						    &AccumulatedAfterControlMessagesCount,
						    PLAYER_MAX_DTOM_MESSAGES,
						    Stream->AccumulatedAfterDtoMControlMessages, 
						    SequenceNumber, Time );

		Stream->ManifestedBufferRing->Insert( (unsigned int)MarkerFrameBuffer );        // Pass on the marker
		MarkerFrameBuffer                       = NULL;
		Stream->DiscardingUntilMarkerFrameDtoM  = false;
		continue;
	    }

	    //
	    // Get a new buffer (continue will still perform the scan)
	    //

	    RingStatus  = Stream->DecodedFrameRing->Extract( (unsigned int *)(&Buffer), PLAYER_NEXT_FRAME_EVENT_WAIT );
	    if( (RingStatus == RingNothingToGet) || (Buffer == NULL) || Stream->Terminating )
		continue;

	    Buffer->TransferOwnership( IdentifierProcessDecodeToManifest );
	    Buffer->GetType( &BufferType );
	    if( BufferType != Stream->DecodeBufferType )
		break;

	    //
	    // Obtain a sequence number from the buffer
	    //

	    Status      = Buffer->ObtainAttachedBufferReference( Stream->CodedFrameBufferType, &OriginalCodedFrameBuffer );
	    if( Status != PlayerNoError )
	    {
		report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to obtain the the original coded frame buffer - Implementation error\n" );
		Buffer->DecrementReferenceCount( IdentifierProcessDecodeToManifest );
		continue;
	    }

	    Status      = OriginalCodedFrameBuffer->ObtainMetaDataReference( MetaDataSequenceNumberType, (void **)(&SequenceNumberStructure) );
	    if( Status != PlayerNoError )
	    {
		report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to obtain the meta data \"SequenceNumber\" - Implementation error\n" );
		Buffer->DecrementReferenceCount( IdentifierProcessDecodeToManifest );
		continue;
	    }

	    SequenceNumberStructure->TimeEntryInProcess2        = OS_GetTimeInMicroSeconds();
	    SequenceNumberStructure->DeltaEntryInProcess2       = SequenceNumberStructure->TimeEntryInProcess2 - LastEntryTime;
	    LastEntryTime                                       = SequenceNumberStructure->TimeEntryInProcess2;
	    SequenceNumber                                      = SequenceNumberStructure->Value;

	    //
	    // Check, is this a marker frame
	    //

	    if( SequenceNumberStructure->MarkerFrame )
	    {
		MarkerFrameBuffer       = Buffer;
		continue;                       // allow us to empty the accumulated buffer list
	    }

	    //
	    // If this is the first seen decode buffer do we wish to offer it up as an initial frame
	    //

	    if( FirstFrame )
	    {
		FirstFrame              = false;
		SubmitInitialFrame      = PolicyValue( Stream->Playback, Stream, PolicyManifestFirstFrameEarly );
		if( SubmitInitialFrame == PolicyValueApply )
		{
		    Status      	= Stream->Manifestor->InitialFrame( Buffer );
		    if( Status != ManifestorNoError )
			report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Failed to apply InitialFrame action.\n" );

		    if( InitialFrameBuffer != NULL )
			Stream->Codec->ReleaseDecodeBuffer( InitialFrameBuffer );

		    InitialFrameBuffer	= Buffer;
		    InitialFrameBuffer->IncrementReferenceCount();
		}
	    }

	    //
	    // Do we want to insert this in the table
	    //

	    Status      = Buffer->ObtainMetaDataReference( MetaDataParsedFrameParametersReferenceType, (void **)(&ParsedFrameParameters) );
	    if( Status != PlayerNoError )
	    {
		report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to obtain the meta data \"ParsedFrameParametersReference\" - Implementation error\n" );
		Buffer->DecrementReferenceCount( IdentifierProcessDecodeToManifest );
		continue;
	    }

#if 0
{
unsigned int A, B;
	Stream->DecodeBufferPool->GetPoolUsage( &A, &B, NULL, NULL, NULL );

report( severity_info, "Got(%d) %3d (R = %d, K = %d) %d, %016llx - %d %d/%d\n", Stream->StreamType, ParsedFrameParameters->DecodeFrameIndex, ParsedFrameParameters->ReferenceFrame, ParsedFrameParameters->KeyFrame, ParsedFrameParameters->DisplayFrameIndex, ParsedFrameParameters->NormalizedPlaybackTime, AccumulatedBufferTableOccupancy, B, A );
}
#endif
	    if( ParsedFrameParameters->DisplayFrameIndex <= DesiredFrameIndex )
		break;

	    for( i=0; i<Stream->NumberOfDecodeBuffers; i++ )
		if( AccumulatedBufferTable[i].Buffer == NULL )
		{
		    AccumulatedBufferTable[i].Buffer                    = Buffer;
		    AccumulatedBufferTable[i].SequenceNumber            = SequenceNumber;
		    AccumulatedBufferTable[i].ParsedFrameParameters     = ParsedFrameParameters;
		    AccumulatedBufferTableOccupancy++;
		    break;
		}

	    if( i >= Stream->NumberOfDecodeBuffers )
	    {
		report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to insert buffer in table - Implementation error.\n" );
		break;  // Assume it is immediate, as an implementation error this is pretty nasty
	    }
	}

	if( Stream->Terminating )
	    continue;

	// --------------------------------------------------------------------------------------------
	// We now have a buffer after frame re-ordering
	//
	// First calculate the sequence number that applies to this frame
	// this calculation may appear wierd, the idea is this, assume you
	// have a video stream IPBB, sequence numbers 0 1 2 3, frame reordering
	// will yield sequence numbers 0 2 3 1 IE any command to be executed at
	// the end of the stream will appear 1 frame early, the calculations 
	// below will re-wossname the sequence numbers to 0 1 1 3 causing the 
	// signal to occur at the correct point.
	//

	//
	// Deal with a coded frame buffer 
	//

	if( BufferType == Stream->DecodeBufferType )
	{
	    //
	    // Report any re-ordering problems
	    //

	    if( ParsedFrameParameters->CollapseHolesInDisplayIndices && (ParsedFrameParameters->DisplayFrameIndex > DesiredFrameIndex) )
		DesiredFrameIndex	= ParsedFrameParameters->DisplayFrameIndex;

	    if( ParsedFrameParameters->DisplayFrameIndex > DesiredFrameIndex )
		report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Hole in display frame indices (Got %d Expected %d).\n", ParsedFrameParameters->DisplayFrameIndex, DesiredFrameIndex );

	    if( ParsedFrameParameters->DisplayFrameIndex < DesiredFrameIndex )
		report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Frame re-ordering failure (Got %d Expected %d) - Implementation error.\n", ParsedFrameParameters->DisplayFrameIndex, DesiredFrameIndex );

	    //
	    // First calculate the sequence number that applies to this frame
	    // this calculation may appear wierd, the idea is this, assume you
	    // have a video stream IPBB, sequence numbers 0 1 2 3, frame reordering
	    // will yield sequence numbers 0 2 3 1 IE any command to be executed at
	    // the end of the stream will appear 1 frame early, the calculations 
	    // below will re-wossname the sequence numbers to 0 1 1 3 causing the 
	    // signal to occur at the correct point.
	    //

	    MaximumActualSequenceNumberSeen     = max(SequenceNumber, MaximumActualSequenceNumberSeen);
	    SequenceNumber                      = min(MaximumActualSequenceNumberSeen, MinumumSequenceNumberAccumulated );

	    Time                                = ParsedFrameParameters->NativePlaybackTime;

	    //
	    // Process any outstanding control messages to be applied before this buffer
	    //

	    ProcessAccumulatedControlMessages(  Stream, 
						&AccumulatedBeforeControlMessagesCount,
						PLAYER_MAX_DTOM_MESSAGES,
						Stream->AccumulatedBeforeDtoMControlMessages, 
						SequenceNumber, Time );

	    //
	    // If we are paused, then we loop waiting for something to happen
	    //

	    if( Stream->Playback->Speed == 0 )
	    {
		while( (Stream->Playback->Speed == 0) && !Stream->Step && !Stream->Terminating && !Stream->DiscardingUntilMarkerFrameDtoM )
		{
		    OS_WaitForEvent( &Stream->SingleStepMayHaveHappened, PLAYER_NEXT_FRAME_EVENT_WAIT );
		    OS_ResetEvent( &Stream->SingleStepMayHaveHappened );
		}

		Stream->Step    = false;
	    }

	    //
	    // If we are not discarding everything, then procede to process the buffer
	    //

	    DiscardBuffer       = Stream->DiscardingUntilMarkerFrameDtoM;

	    //
	    // Handle output timing functions, await entry into the decode window, 
	    // Then check for frame drop (whether due to trick mode, or because 
	    // we are running late). 
	    // NOTE1 Indicating we are not before decode, means 
	    // reference frames can be dropped, we will simply not display them
	    // NOTE2 We may block in these functions, so it is important to 
	    // recheck flags
	    //

	    if( !DiscardBuffer )
	    {
		Status  = Stream->OutputTimer->TestForFrameDrop( Buffer, OutputTimerBeforeOutputTiming );

		if( Status == OutputTimerNoError )
		{
		    //
		    // Note we loop here if we are engaged in re-timing the decoded frames
		    //

		    while( !Stream->Terminating && Stream->ReTimeQueuedFrames )
			OS_SleepMilliSeconds( PLAYER_RETIMING_WAIT );

		    Stream->OutputTimer->GenerateFrameTiming( Buffer );
		    Status  = Stream->OutputTimer->TestForFrameDrop( Buffer, OutputTimerBeforeManifestation );
		}

		if( Stream->DiscardingUntilMarkerFrameDtoM ||
		    Stream->Terminating ||
		    (Status != OutputTimerNoError) )
		    DiscardBuffer       = true;

		if( (DiscardBuffer != LastPreManifestDiscardBuffer) &&
		    (Status        == OutputTimerUntimedFrame) )
		{
		    report( severity_error, "Discarding untimed frames.\n" );
		}
		LastPreManifestDiscardBuffer	= DiscardBuffer;

#if 0
		// Nick debug data
		if( Status != OutputTimerNoError )
		    report( severity_info, "Timer Discard(%d) %3d (before Manifest) %08x\n", Stream->StreamType, ParsedFrameParameters->DecodeFrameIndex, Status );
#endif
	    }

	    //
	    // Pass the buffer to the manifestor for manifestation
	    // we do not release our hold on this buffer, buffers passed
	    // to the manifestor always re-appear on its output ring.
	    // NOTE calculate next desired frame index before we 
	    // give away the buffer, because ParsedFrameParameters
	    // can become invalid after either of the calls below.
	    //

	    DesiredFrameIndex   = ParsedFrameParameters->DisplayFrameIndex + 1;

	    if( !DiscardBuffer )
	    {
		SequenceNumberStructure->TimePassToManifestor   = OS_GetTimeInMicroSeconds();

#if 0
{
static unsigned long long         LastOutputTime = 0;
static unsigned long long         LastOutputTime1 = 0;
VideoOutputTiming_t              *OutputTiming;
unsigned int                      C0,C1,C2,C3;

Buffer->ObtainMetaDataReference( MetaDataVideoOutputTimingType, (void **)&OutputTiming );
Stream->CodedFrameBufferPool->GetPoolUsage( &C0, &C1, NULL, NULL, NULL );
Stream->DecodeBufferPool->GetPoolUsage( &C2, &C3, NULL, NULL, NULL );
report( severity_info, "Ord %3d (R = %d, K = %d) %d, %6lld %6lld %6lld %6lld (%d/%d %d/%d) (%d %d) %6lld %6lld\n",
	ParsedFrameParameters->DecodeFrameIndex, ParsedFrameParameters->ReferenceFrame, ParsedFrameParameters->KeyFrame, ParsedFrameParameters->DisplayFrameIndex,
	OutputTiming->SystemPlaybackTime - SequenceNumberStructure->TimePassToManifestor,
	SequenceNumberStructure->TimePassToManifestor - SequenceNumberStructure->TimeEntryInProcess2,
	SequenceNumberStructure->TimePassToManifestor - SequenceNumberStructure->TimeEntryInProcess1,
	SequenceNumberStructure->TimePassToManifestor - SequenceNumberStructure->TimeEntryInProcess0,
	C0, C1, C2, C3,
	Stream->FramesToManifestorCount, Stream->FramesFromManifestorCount,
	OutputTiming->SystemPlaybackTime - LastOutputTime, ParsedFrameParameters->NormalizedPlaybackTime - LastOutputTime1 );

//Buffer->TransferOwnership( IdentifierProcessDecodeToManifest, IdentifierManifestor );
//if( (OutputTiming->SystemPlaybackTime - SequenceNumberStructure->TimePassToManifestor) > 0xffffffffULL )
//    Stream->DecodeBufferPool->Dump( DumpAll );

    LastOutputTime = OutputTiming->SystemPlaybackTime;
    LastOutputTime1 = ParsedFrameParameters->NormalizedPlaybackTime;

}
if( Stream->FramesToManifestorCount >= 55 )
{
OS_SleepMilliSeconds( 1000 );
report( severity_info, "Ord(%d) %3d (R = %d, K = %d) %d, %016llx %016llx\n", Stream->StreamType, ParsedFrameParameters->DecodeFrameIndex, ParsedFrameParameters->ReferenceFrame, ParsedFrameParameters->KeyFrame, ParsedFrameParameters->DisplayFrameIndex, ParsedFrameParameters->NormalizedPlaybackTime, ParsedFrameParameters->NativePlaybackTime );
OS_SleepMilliSeconds( 4000 );
}
#endif
		Stream->FramesToManifestorCount++;
		Status	= Stream->Manifestor->QueueDecodeBuffer( Buffer );

		if( Status != ManifestorNoError )
		    DiscardBuffer	= true;

		if( InitialFrameBuffer != NULL )
		{
		    Stream->Codec->ReleaseDecodeBuffer( InitialFrameBuffer );
		    InitialFrameBuffer	= NULL;
		}
	    }

	    if( DiscardBuffer )
	    {
		Stream->Codec->ReleaseDecodeBuffer( Buffer );

		if( Stream->Playback->Speed == 0 )
		    Stream->Step	= true;
	    }

	    //
	    // Process any outstanding control messages to be applied after this buffer
	    //

	    ProcessAccumulatedControlMessages(  Stream,
						&AccumulatedAfterControlMessagesCount,
						PLAYER_MAX_DTOM_MESSAGES,
						Stream->AccumulatedAfterDtoMControlMessages, 
						SequenceNumber, Time );
	}

	//
	// Deal with a player control structure
	//

	else if( BufferType == BufferPlayerControlStructureType )
	{
	    Buffer->ObtainDataReference( NULL, NULL, (void **)(&ControlStructure) );

	    ProcessNow  = (ControlStructure->SequenceType == SequenceTypeImmediate);
	    if( !ProcessNow )
	    {
		SequenceCheck   = (ControlStructure->SequenceType == SequenceTypeBeforeSequenceNumber) ||
				  (ControlStructure->SequenceType == SequenceTypeAfterSequenceNumber);

		ProcessNow      = SequenceCheck ? ((SequenceNumber != INVALID_SEQUENCE_VALUE) && (ControlStructure->SequenceValue <= MaximumActualSequenceNumberSeen)) :
						  ((Time           != INVALID_SEQUENCE_VALUE) && (ControlStructure->SequenceValue <= Time));
	    }

	    if( ProcessNow )
		ProcessControlMessage( Stream, Buffer, ControlStructure );
	    else
	    {
		if( (ControlStructure->SequenceType == SequenceTypeBeforeSequenceNumber) ||
		    (ControlStructure->SequenceType == SequenceTypeBeforePlaybackTime) )
		{
		    Count       = &AccumulatedBeforeControlMessagesCount;
		    Table       = Stream->AccumulatedBeforeDtoMControlMessages;
		}
		else
		{
		    Count       = &AccumulatedAfterControlMessagesCount;
		    Table       = Stream->AccumulatedAfterDtoMControlMessages;
		}

		AccumulateControlMessage( Buffer, ControlStructure, Count, PLAYER_MAX_DTOM_MESSAGES, Table );
	    }
	}
	else
	{
	    report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unknown buffer type received - Implementation error.\n" );
	    Buffer->DecrementReferenceCount();
	}
    }
void   Player_Generic_c::ProcessPostManifest(	PlayerStream_t		  Stream )
{
PlayerStatus_t			  Status;
RingStatus_t			  RingStatus;
Buffer_t			  Buffer;
Buffer_t			  OriginalCodedFrameBuffer;
BufferType_t			  BufferType;
PlayerControlStructure_t	 *ControlStructure;
ParsedFrameParameters_t		 *ParsedFrameParameters;
PlayerSequenceNumber_t		 *SequenceNumberStructure;
unsigned long long		  LastEntryTime;
unsigned long long		  SequenceNumber;
unsigned long long                MaximumActualSequenceNumberSeen;
unsigned long long 		  Time;
unsigned int			  AccumulatedBeforeControlMessagesCount;
unsigned int			  AccumulatedAfterControlMessagesCount;
bool				  ProcessNow;
unsigned int			 *Count;
PlayerBufferRecord_t		 *Table;
VideoOutputTiming_t		 *OutputTiming;
unsigned long long		  Now;

//

    LastEntryTime				= OS_GetTimeInMicroSeconds();
    SequenceNumber				= INVALID_SEQUENCE_VALUE;
    MaximumActualSequenceNumberSeen             = 0;
    Time					= INVALID_TIME;
    AccumulatedBeforeControlMessagesCount	= 0;
    AccumulatedAfterControlMessagesCount	= 0;

    //
    // Signal we have started
    //

    OS_LockMutex( &Lock );

    Stream->ProcessRunningCount++;

    if( Stream->ProcessRunningCount == Stream->ExpectedProcessCount )
	OS_SetEvent( &Stream->StartStopEvent );

    OS_UnLockMutex( &Lock );

    //
    // Main Loop
    //

    while( !Stream->Terminating )
    {
	RingStatus	= Stream->ManifestedBufferRing->Extract( (unsigned int *)(&Buffer), PLAYER_MAX_EVENT_WAIT );

	Now	= OS_GetTimeInMicroSeconds();
	if( Stream->ReTimeQueuedFrames && ((Now - Stream->ReTimeStart) > PLAYER_MAX_TIME_IN_RETIMING) )
	    Stream->ReTimeQueuedFrames	= false;

	if( RingStatus == RingNothingToGet )
	    continue;

	Buffer->GetType( &BufferType );
	Buffer->TransferOwnership( IdentifierProcessPostManifest );

	//
	// Deal with a coded frame buffer 
	//

	if( BufferType == Stream->DecodeBufferType )
	{
	    Stream->FramesFromManifestorCount++;

#if 0  
{
	static unsigned long long         LastTime = 0;
	static unsigned long long         LastActualTime = 0;
	AudioOutputTiming_t              *OutputTiming;
	
	Buffer->ObtainMetaDataReference( MetaDataAudioOutputTimingType, (void **)&OutputTiming);
	
	report( severity_info, "Post Dn = %d, DS= %6lld, DAS = %6lld, S = %016llx,AS = %016llx\n",
	                OutputTiming->DisplayCount,
	                OutputTiming->SystemPlaybackTime - LastTime,
	                OutputTiming->ActualSystemPlaybackTime - LastActualTime,
	                OutputTiming->SystemPlaybackTime,
					OutputTiming->ActualSystemPlaybackTime );

    LastTime            = OutputTiming->SystemPlaybackTime;
    LastActualTime      = OutputTiming->ActualSystemPlaybackTime;
}
#endif
#if 0
{
static unsigned long long	  LastTime = 0;
static unsigned long long	  LastActualTime = 0;
VideoOutputTiming_t     	 *OutputTiming;

Buffer->ObtainMetaDataReference( MetaDataVideoOutputTimingType, (void **)&OutputTiming );

report( severity_info, "Post Dn = %d %d, I = %d, TFF = %d, DS= %6lld, DAS = %6lld, S = %016llx, AS = %016llx\n",
		OutputTiming->DisplayCount[0], OutputTiming->DisplayCount[1],
		OutputTiming->Interlaced, OutputTiming->TopFieldFirst,
		OutputTiming->SystemPlaybackTime - LastTime,
		OutputTiming->ActualSystemPlaybackTime - LastActualTime,
		OutputTiming->SystemPlaybackTime, OutputTiming->ActualSystemPlaybackTime );

    LastTime 		= OutputTiming->SystemPlaybackTime;
    LastActualTime 	= OutputTiming->ActualSystemPlaybackTime;
}
#endif

	    //
	    // Obtain a sequence number from the buffer
	    //

	    Status	= Buffer->ObtainAttachedBufferReference( Stream->CodedFrameBufferType, &OriginalCodedFrameBuffer );
	    if( Status != PlayerNoError )
	    {
	        report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the the original coded frame buffer - Implementation error\n" );
		Buffer->DecrementReferenceCount( IdentifierProcessPostManifest );
		continue;
	    }

	    Status	= OriginalCodedFrameBuffer->ObtainMetaDataReference( MetaDataSequenceNumberType, (void **)(&SequenceNumberStructure) );
	    if( Status != PlayerNoError )
	    {
	        report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the meta data \"SequenceNumber\" - Implementation error\n" );
		Buffer->DecrementReferenceCount( IdentifierProcessPostManifest );
		continue;
	    }

	    Status	= Buffer->ObtainMetaDataReference( MetaDataParsedFrameParametersReferenceType, (void **)(&ParsedFrameParameters) );
	    if( Status != PlayerNoError )
	    {
	        report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the meta data \"ParsedFrameParametersReference\" - Implementation error\n" );
		Buffer->DecrementReferenceCount( IdentifierProcessPostManifest );
		continue;
	    }

	    //
	    // Check for whether or not we are in re-timing
	    //

	    if( Stream->ReTimeQueuedFrames && !SequenceNumberStructure->MarkerFrame )
	    {
		Status	= Buffer->ObtainMetaDataReference( (Stream->StreamType == StreamTypeVideo ? MetaDataVideoOutputTimingType : MetaDataAudioOutputTimingType),
							   (void **)&OutputTiming );
		if( Status != PlayerNoError )
		{
	            report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the meta data \"%s\" - Implementation error\n",
				(Stream->StreamType == StreamTypeVideo ? "VideoOutputTiming" : "AudioOutputTiming") );
		    Buffer->DecrementReferenceCount( IdentifierProcessPostManifest );
		    continue;
		}

		if( ValidTime(OutputTiming->ActualSystemPlaybackTime) )
		{
		    Stream->ReTimeQueuedFrames	= false;
		}
		else
		{
		    Stream->OutputTimer->GenerateFrameTiming( Buffer );
		    Status  = Stream->OutputTimer->TestForFrameDrop( Buffer, OutputTimerBeforeManifestation );
		    if( !Stream->Terminating && (Status == OutputTimerNoError) )
		    {
			Stream->FramesToManifestorCount++;
			Stream->Manifestor->QueueDecodeBuffer( Buffer );
			continue;
		    }
		}
	    }

	    //
	    // Extract the sequence number, and write the timing statistics
	    //

//report( severity_info, "MQ Post Man %d - %d\n", Stream->StreamType, ParsedFrameParameters->DisplayFrameIndex );

	    SequenceNumberStructure->TimeEntryInProcess3	= OS_GetTimeInMicroSeconds();
	    SequenceNumberStructure->DeltaEntryInProcess3	= SequenceNumberStructure->TimeEntryInProcess3 - LastEntryTime;
	    LastEntryTime					= SequenceNumberStructure->TimeEntryInProcess3;
	    SequenceNumber					= SequenceNumberStructure->Value;
	    MaximumActualSequenceNumberSeen			= max(SequenceNumber, MaximumActualSequenceNumberSeen);
	    Time						= ParsedFrameParameters->NativePlaybackTime;

#ifndef __TDT__
	    ProcessStatistics( Stream, SequenceNumberStructure );
#endif

	    if( SequenceNumberStructure->MarkerFrame )
	    {
		Stream->DiscardingUntilMarkerFramePostM	= false;
		Time					= INVALID_TIME;
	    }

	    //
	    // Process any outstanding control messages to be applied before this buffer
	    //

	    ProcessAccumulatedControlMessages( 	Stream, 
						&AccumulatedBeforeControlMessagesCount,
						PLAYER_MAX_POSTM_MESSAGES,
						Stream->AccumulatedBeforePostMControlMessages, 
						SequenceNumber, Time );

	    //
	    // Pass buffer back into output timer
	    // and release the buffer.
	    //

	    if( !SequenceNumberStructure->MarkerFrame )
	    {
		Stream->OutputTimer->RecordActualFrameTiming( Buffer );
		Stream->Codec->ReleaseDecodeBuffer( Buffer );
	    }
	    else
		Buffer->DecrementReferenceCount( IdentifierProcessPostManifest );

	    //
	    // Process any outstanding control messages to be applied after this buffer
	    //

	    ProcessAccumulatedControlMessages( 	Stream,
						&AccumulatedAfterControlMessagesCount,
						PLAYER_MAX_POSTM_MESSAGES,
						Stream->AccumulatedAfterPostMControlMessages, 
						SequenceNumber, Time );
	}

	//
	// Deal with a player control structure
	//

	else if( BufferType == BufferPlayerControlStructureType )
	{
	    Buffer->ObtainDataReference( NULL, NULL, (void **)(&ControlStructure) );

	    ProcessNow	= (ControlStructure->SequenceType == SequenceTypeImmediate) ||
			  ((SequenceNumber != INVALID_SEQUENCE_VALUE) && (ControlStructure->SequenceValue <= MaximumActualSequenceNumberSeen));

	    if( ProcessNow )
		ProcessControlMessage( Stream, Buffer, ControlStructure );
	    else
	    {
		if( (ControlStructure->SequenceType == SequenceTypeBeforeSequenceNumber) ||
		    (ControlStructure->SequenceType == SequenceTypeBeforePlaybackTime) )
		{
		    Count	= &AccumulatedBeforeControlMessagesCount;
		    Table	= Stream->AccumulatedBeforePostMControlMessages;
		}
		else
		{
		    Count	= &AccumulatedAfterControlMessagesCount;
		    Table	= Stream->AccumulatedAfterPostMControlMessages;
		}

		AccumulateControlMessage( Buffer, ControlStructure, Count, PLAYER_MAX_POSTM_MESSAGES, Table );
	    }
	}
	else
	{
	    report( severity_error, "Player_Generic_c::ProcessPostManifest - Unknown buffer type received - Implementation error.\n" );
	    Buffer->DecrementReferenceCount();
	}
    }

    report( severity_info, "3333 Holding control strutures %d\n", AccumulatedBeforeControlMessagesCount + AccumulatedAfterControlMessagesCount );

    //
    // Make sur no one will wait for these
    //

    Stream->ReTimeQueuedFrames	= false;

    //
    // Signal we have terminated
    //

    OS_LockMutex( &Lock );

    Stream->ProcessRunningCount--;

    if( Stream->ProcessRunningCount == 0 )
	OS_SetEvent( &Stream->StartStopEvent );

    OS_UnLockMutex( &Lock );
}
示例#4
0
CodecStatus_t   Codec_DvpVideo_c::Input(Buffer_t CodedBuffer)
{
	CodecStatus_t            Status;
	unsigned int             CodedDataLength;
	StreamInfo_t            *StreamInfo;
	Buffer_t             MarkerBuffer;
	BufferStructure_t        BufferStructure;
	ParsedFrameParameters_t     *ParsedFrameParameters;
	ParsedVideoParameters_t     *ParsedVideoParameters;
	Buffer_t             CapturedBuffer;
	ParsedVideoParameters_t     *CapturedParsedVideoParameters;
	//
	// Extract the useful coded data information
	//
	Status      = CodedBuffer->ObtainDataReference(NULL, &CodedDataLength, (void **)(&StreamInfo), CachedAddress);
	if (Status != PlayerNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain data reference.\n");
		return Status;
	}
	Status      = CodedBuffer->ObtainMetaDataReference(Player->MetaDataParsedFrameParametersType, (void **)(&ParsedFrameParameters));
	if (Status != PlayerNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain the meta data \"ParsedFrameParameters\".\n");
		return Status;
	}
	Status      = CodedBuffer->ObtainMetaDataReference(Player->MetaDataParsedVideoParametersType, (void**)&ParsedVideoParameters);
	if (Status != PlayerNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain the meta data \"ParsedVideoParameters\".\n");
		return Status;
	}
	//
	// Handle the special case of a marker frame
	//
	if ((CodedDataLength == 0) && !ParsedFrameParameters->NewStreamParameters && !ParsedFrameParameters->NewFrameParameters)
	{
		//
		// Get a marker buffer
		//
		memset(&BufferStructure, 0x00, sizeof(BufferStructure_t));
		BufferStructure.Format  = FormatMarkerFrame;
		Status      = Manifestor->GetDecodeBuffer(&BufferStructure, &MarkerBuffer);
		if (Status != ManifestorNoError)
		{
			report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to get marker decode buffer from manifestor.\n");
			return Status;
		}
		MarkerBuffer->TransferOwnership(IdentifierCodec);
		Status      = MarkerBuffer->AttachMetaData(Player->MetaDataParsedFrameParametersReferenceType, UNSPECIFIED_SIZE, (void *)ParsedFrameParameters);
		if (Status != PlayerNoError)
		{
			report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to attach a reference to \"ParsedFrameParameters\" to the marker buffer.\n");
			return Status;
		}
		MarkerBuffer->AttachBuffer(CodedBuffer);
		//
		// Queue/pass on the buffer
		//
		OutputRing->Insert((uintptr_t)MarkerBuffer);
		return CodecNoError;
	}
	//
	// Attach the coded data fields to the decode/captured buffer
	//
	CapturedBuffer  = (Buffer_t)StreamInfo->buffer_class;
	if (CapturedBuffer == NULL)
	{
		report(severity_fatal, "Codec_DvpVideo_c::Input(DVP) - NULL Buffer\n");
		return CodecNoError;
	}
//
	Status      = CapturedBuffer->ObtainMetaDataReference(Player->MetaDataParsedVideoParametersType, (void**)&CapturedParsedVideoParameters);
	if (Status != PlayerNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain the meta data \"ParsedVideoParameters\" from the captured buffer.\n");
		return Status;
	}
	memcpy(CapturedParsedVideoParameters, ParsedVideoParameters, sizeof(ParsedVideoParameters_t));
//
	Status      = CapturedBuffer->AttachMetaData(Player->MetaDataParsedFrameParametersReferenceType, UNSPECIFIED_SIZE, (void *)ParsedFrameParameters);
	if (Status != BufferNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to attach Frame Parameters\n");
		return Status;
	}
	//
	// Switch the ownership hierarchy, and allow the captured buffer to exist on it's own.
	//
	CapturedBuffer->IncrementReferenceCount();
	Status  = CodedBuffer->DetachBuffer(CapturedBuffer);
	if (Status != BufferNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to detach captured buffer from coded frame buffer\n");
		return Status;
	}
	Status      = CapturedBuffer->AttachBuffer(CodedBuffer);
	if (Status != BufferNoError)
	{
		report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to attach captured buffer to Coded Frame Buffer\n");
		return Status;
	}
	//
	// Pass the captured buffer on
	//
	OutputRing->Insert((uintptr_t)CapturedBuffer);
	return CodecNoError;
}