CodecStatus_t Codec_DvpVideo_c::ReleaseDecodeBuffer(Buffer_t Buffer) { #if 0 unsigned int Length; unsigned char *Pointer; Buffer->ObtainDataReference(&Length, NULL, (void **)(&Pointer), CachedAddress); memset(Pointer, 0x10, 0xa8c00); #endif Buffer->DecrementReferenceCount(); return CodecNoError; }
DemultiplexorStatus_t Demultiplexor_Base_c::Demux( PlayerPlayback_t Playback, DemultiplexorContext_t Context, Buffer_t Buffer) { PlayerStatus_t Status; DemultiplexorBaseContext_t BaseContext = (DemultiplexorBaseContext_t)Context; // Status = Buffer->ObtainMetaDataReference(Player->MetaDataInputDescriptorType, (void **)(&BaseContext->Descriptor)); if (Status != PlayerNoError) { report(severity_error, "Demultiplexor_Base_c::Demux - Unable to obtain the meta data input descriptor.\n"); return Status; } // Status = Buffer->ObtainDataReference(NULL, &BaseContext->BufferLength, (void **)(&BaseContext->BufferData)); if (Status != PlayerNoError) { report(severity_error, "Demultiplexor_Base_c::Demux - unable to obtain data reference.\n"); return Status; } // return DemultiplexorNoError; }
PlayerStatus_t Player_Generic_c::CallInSequence( PlayerStream_t Stream, PlayerSequenceType_t SequenceType, PlayerSequenceValue_t SequenceValue, PlayerComponentFunction_t Fn, ... ) { va_list List; BufferStatus_t Status; Buffer_t ControlStructureBuffer; PlayerControlStructure_t *ControlStructure; Ring_t DestinationRing; // // Garner a control structure, fill it in // Status = PlayerControlStructurePool->GetBuffer( &ControlStructureBuffer, IdentifierInSequenceCall ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::CallInSequence - Failed to get a control structure buffer.\n" ); return Status; } ControlStructureBuffer->ObtainDataReference( NULL, NULL, (void **)(&ControlStructure) ); ControlStructure->Action = ActionInSequenceCall; ControlStructure->SequenceType = SequenceType; ControlStructure->SequenceValue = SequenceValue; ControlStructure->InSequence.Fn = Fn; // DestinationRing = NULL; switch( Fn ) { case FrameParserFnSetModuleParameters: DestinationRing = Stream->CollatedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.UnsignedInt = va_arg( List, unsigned int ); memcpy( ControlStructure->InSequence.Block, va_arg( List, void * ), ControlStructure->InSequence.UnsignedInt ); va_end( List ); break; // case CodecFnOutputPartialDecodeBuffers: DestinationRing = Stream->ParsedFrameRing; break; // case CodecFnReleaseReferenceFrame: DestinationRing = Stream->ParsedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.UnsignedInt = va_arg( List, unsigned int ); va_end( List ); //report(severity_info, "Requesting a release %d\n", ControlStructure->InSequence.UnsignedInt ); break; // case CodecFnSetModuleParameters: DestinationRing = Stream->ParsedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.UnsignedInt = va_arg( List, unsigned int ); memcpy( ControlStructure->InSequence.Block, va_arg( List, void * ), ControlStructure->InSequence.UnsignedInt ); va_end( List ); break; // case ManifestorFnSetModuleParameters: DestinationRing = Stream->DecodedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.UnsignedInt = va_arg( List, unsigned int ); memcpy( ControlStructure->InSequence.Block, va_arg( List, void * ), ControlStructure->InSequence.UnsignedInt ); va_end( List ); break; // case ManifestorFnQueueEventSignal: DestinationRing = Stream->DecodedFrameRing; va_start( List, Fn ); memcpy( &ControlStructure->InSequence.Event, va_arg( List, PlayerEventRecord_t * ), sizeof(PlayerEventRecord_t) ); va_end( List ); break; // case ManifestorVideoFnSetInputWindow: case ManifestorVideoFnSetOutputWindow: DestinationRing = Stream->DecodedFrameRing; va_start( List, Fn ); { unsigned int *Words = (unsigned int *)ControlStructure->InSequence.Block; Words[0] = va_arg( List, unsigned int ); Words[1] = va_arg( List, unsigned int ); Words[2] = va_arg( List, unsigned int ); Words[3] = va_arg( List, unsigned int ); } va_end( List ); break; // case OutputTimerFnResetTimeMapping: DestinationRing = Stream->DecodedFrameRing; break; // case OutputTimerFnSetModuleParameters: DestinationRing = Stream->DecodedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.UnsignedInt = va_arg( List, unsigned int ); memcpy( ControlStructure->InSequence.Block, va_arg( List, void * ), ControlStructure->InSequence.UnsignedInt ); va_end( List ); break; // case OSFnSetEventOnManifestation: DestinationRing = Stream->DecodedFrameRing; // This is where manifestation would take place va_start( List, Fn ); ControlStructure->InSequence.Pointer = (void *)va_arg( List, OS_Event_t * ); va_end( List ); break; // case OSFnSetEventOnPostManifestation: DestinationRing = Stream->ManifestedBufferRing; va_start( List, Fn ); ControlStructure->InSequence.Pointer = (void *)va_arg( List, OS_Event_t * ); va_end( List ); break; // case PlayerFnSwitchFrameParser: DestinationRing = Stream->CollatedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.Pointer = (void *)va_arg( List, PlayerStream_t ); va_end( List ); break; // case PlayerFnSwitchCodec: DestinationRing = Stream->ParsedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.Pointer = (void *)va_arg( List, PlayerStream_t ); va_end( List ); break; // case PlayerFnSwitchOutputTimer: DestinationRing = Stream->DecodedFrameRing; va_start( List, Fn ); ControlStructure->InSequence.Pointer = (void *)va_arg( List, PlayerStream_t ); va_end( List ); break; // case PlayerFnSwitchComplete: DestinationRing = Stream->ManifestedBufferRing; va_start( List, Fn ); ControlStructure->InSequence.Pointer = (void *)va_arg( List, PlayerStream_t ); va_end( List ); break; // default: report( severity_error, "Player_Generic_c::CallInSequence - Unsupported function call.\n" ); ControlStructureBuffer->DecrementReferenceCount( IdentifierInSequenceCall ); return PlayerNotSupported; } // // Send it to the appropriate process // DestinationRing->Insert( (unsigned int)ControlStructureBuffer ); // return PlayerNoError; }
//}}} //{{{ FillOutSetStreamParametersCommand // ///////////////////////////////////////////////////////////////////////// // // Function to fill out the stream parameters // structure for an Rmv mme transformer. // CodecStatus_t Codec_MmeVideoRmv_c::FillOutSetStreamParametersCommand( void ) { RmvStreamParameters_t* Parsed = (RmvStreamParameters_t*)ParsedFrameParameters->StreamParameterStructure; RmvVideoSequence_t* SequenceHeader = &Parsed->SequenceHeader; unsigned int MaxWidth; unsigned int MaxHeight; RV89Dec_fid_t FormatId; int IsRV8 = 1; unsigned int NumRPRSizes; unsigned int i; MaxWidth = SequenceHeader->MaxWidth; MaxHeight = SequenceHeader->MaxHeight; if ((SequenceHeader->BitstreamVersion == RV9_BITSTREAM_VERSION) && (SequenceHeader->BitstreamMinorVersion == RV9_BITSTREAM_MINOR_VERSION)) { FormatId = RV89DEC_FID_REALVIDEO30; IsRV8 = 0; } else if ((SequenceHeader->BitstreamVersion == RV8_BITSTREAM_VERSION) && (SequenceHeader->BitstreamMinorVersion == RV8_BITSTREAM_MINOR_VERSION)) { FormatId = RV89DEC_FID_REALVIDEO30; IsRV8 = 1; } else if (SequenceHeader->BitstreamMinorVersion == RV89_RAW_BITSTREAM_MINOR_VERSION) { FormatId = RV89DEC_FID_RV89COMBO; if (SequenceHeader->BitstreamVersion == RV8_BITSTREAM_VERSION) IsRV8 = 1; } else { CODEC_ERROR ("Invalid Bitstream versions (%d, %d)\n", SequenceHeader->BitstreamVersion, SequenceHeader->BitstreamMinorVersion); return CodecError; } NumRPRSizes = IsRV8 ? SequenceHeader->NumRPRSizes : 0; #if 0 if ((MaxWidth != InitializationParameters.MaxWidth) || (MaxHeight != InitializationParameters.MaxHeight) || (FormatId != InitializationParameters.StreamFormatIdentifier) || (IsRV8 != InitializationParameters.isRV8) || (NumRPRSizes != InitializationParameters.NumRPRSizes)) { #endif InitializationParameters.MaxWidth = MaxWidth; InitializationParameters.MaxHeight = MaxHeight; InitializationParameters.StreamFormatIdentifier = FormatId; InitializationParameters.isRV8 = IsRV8; InitializationParameters.NumRPRSizes = NumRPRSizes; for (i=0; i<(NumRPRSizes*2); i+=2) { InitializationParameters.RPRSize[i] = SequenceHeader->RPRSize[i]; InitializationParameters.RPRSize[i+1] = SequenceHeader->RPRSize[i+1]; } InitializationParameters.pIntraMBInfo = NULL; RestartTransformer = true; return CodecNoError; } //}}} //{{{ FillOutDecodeCommand // ///////////////////////////////////////////////////////////////////////// // // Function to fill out the decode parameters // structure for an rmv mme transformer. // //#define RV89_INTERFACE_V0_0_4 CodecStatus_t Codec_MmeVideoRmv_c::FillOutDecodeCommand( void ) { RmvCodecDecodeContext_t* Context = (RmvCodecDecodeContext_t*)DecodeContext; RmvFrameParameters_t* Frame = (RmvFrameParameters_t*)ParsedFrameParameters->FrameParameterStructure; RV89Dec_TransformParams_t* Param; RmvVideoSegmentList_t* SegmentList; Buffer_t SegmentInfoBuffer; RV89Dec_Segment_Info* SegmentInfo; Buffer_t RasterBuffer; BufferStructure_t RasterBufferStructure; unsigned char* RasterBufferBase; CodecStatus_t Status; unsigned int i; // For rmv we do not do slice decodes. KnownLastSliceInFieldFrame = true; Param = &Context->DecodeParameters; SegmentList = &Frame->SegmentList; // Fillout the straight forward command parameters #if defined (RV89_INTERFACE_V0_0_4) Param->InBuffer.pCompressedData = (unsigned char*)CodedData; Param->InBuffer.CompressedDataBufferSize = CodedDataLength; #elif defined (SMALL_CIRCULAR_BUFFER) // The first two assume a circular buffer arrangement Param->InBuffer.pStartPtr = (unsigned char*)CodedData; Param->InBuffer.pEndPtr = Param->InBuffer.pStartPtr + CodedDataLength + 4096; Param->InBuffer.PictureOffset = 0; Param->InBuffer.PictureSize = CodedDataLength; #else // The first two assume a circular buffer arrangement Param->InBuffer.pStartPtr = (unsigned char*)0x00; Param->InBuffer.pEndPtr = (unsigned char*)0xffffffff; Param->InBuffer.PictureOffset = (unsigned int)CodedData; Param->InBuffer.PictureSize = CodedDataLength; #endif // Get the segment list buffer Status = SegmentListPool->GetBuffer (&SegmentInfoBuffer, (sizeof(RV89Dec_Segment_Info))*(SegmentList->NumSegments+1)); if (Status != BufferNoError) { CODEC_ERROR ("Failed to get segment info buffer.\n" ); return Status; } // Copy segment list //Param->InBuffer.NumSegments = SegmentList->NumSegments; Param->InBuffer.NumSegments = SegmentList->NumSegments; SegmentInfoBuffer->ObtainDataReference (NULL, NULL, (void**)&SegmentInfo, UnCachedAddress); for (i=0; i<SegmentList->NumSegments; i++) { SegmentInfo[i].is_valid = 1; SegmentInfo[i].offset = SegmentList->Segment[i].Offset; } SegmentInfo[i].is_valid = 0; SegmentInfo[i].offset = CodedDataLength; // Tell far side how to find list SegmentInfoBuffer->ObtainDataReference (NULL, NULL, (void **)&Param->InBuffer.pSegmentInfo, PhysicalAddress); // Attach allocated segment list buffer to decode context and let go of it DecodeContextBuffer->AttachBuffer (SegmentInfoBuffer); SegmentInfoBuffer->DecrementReferenceCount (); Status = FillOutDecodeBufferRequest (&RasterBufferStructure); if (Status != BufferNoError) { report (severity_error, "Codec_MmeVideoRmv_c::FillOutDecodeCommand - Failed to fill out a buffer request structure.\n"); return Status; } // Override the format so we get one sized for raster rather than macroblock RasterBufferStructure.Format = FormatVideo420_Planar; RasterBufferStructure.ComponentBorder[0] = 16; RasterBufferStructure.ComponentBorder[1] = 16; // Ask the manifestor for a buffer of the new format Status = Manifestor->GetDecodeBuffer (&RasterBufferStructure, &RasterBuffer); if (Status != BufferNoError) { report (severity_error, "Codec_MmeVideoRmv_c::FillOutDecodeCommand - Failed to obtain a decode buffer from the manifestor.\n"); return Status; } RasterBuffer->ObtainDataReference (NULL, NULL, (void **)&RasterBufferBase, PhysicalAddress); // Fill in all buffer luma and chroma pointers Param->Outbuffer.pLuma = (RV89Dec_LumaAddress_t)BufferState[CurrentDecodeBufferIndex].BufferLumaPointer; Param->Outbuffer.pChroma = (RV89Dec_ChromaAddress_t)BufferState[CurrentDecodeBufferIndex].BufferChromaPointer; #if defined (RV89_INTERFACE_V0_0_4) // Move pointer to first byte inside border RasterBufferStructure.ComponentOffset[0] = RasterBufferStructure.Dimension[0] * 16 + 16; RasterBufferStructure.ComponentOffset[1] += RasterBufferStructure.Dimension[0] * 8 + 8; #endif #if 0 // Initialise decode buffers to bright pink unsigned char* LumaBuffer; unsigned char* ChromaBuffer; unsigned int LumaSize = InitializationParameters.MaxWidth*InitializationParameters.MaxHeight; CurrentDecodeBuffer->ObtainDataReference( NULL, NULL, (void**)&LumaBuffer, UnCachedAddress); ChromaBuffer = LumaBuffer+LumaSize; memset (LumaBuffer, 0x00, LumaSize); memset (ChromaBuffer, 0x80, LumaSize/2); RasterBuffer->ObtainDataReference( NULL, NULL, (void**)&LumaBuffer, UnCachedAddress); LumaSize = RasterBufferStructure.ComponentOffset[1]; ChromaBuffer = LumaBuffer+LumaSize; memset (LumaBuffer, 0xff, LumaSize); memset (ChromaBuffer, 0xff, LumaSize/2); #endif Param->CurrDecFrame.pLuma = (RV89Dec_LumaAddress_t)(RasterBufferBase + RasterBufferStructure.ComponentOffset[0]); Param->CurrDecFrame.pChroma = (RV89Dec_ChromaAddress_t)(RasterBufferBase + RasterBufferStructure.ComponentOffset[1]); // Attach planar buffer to decode buffer and let go of it CurrentDecodeBuffer->AttachBuffer (RasterBuffer); RasterBuffer->DecrementReferenceCount (); // Preserve raster buffer pointers for later use as reference frames BufferState[CurrentDecodeBufferIndex].BufferRasterPointer = Param->CurrDecFrame.pLuma; BufferState[CurrentDecodeBufferIndex].BufferMacroblockStructurePointer = Param->CurrDecFrame.pChroma; // Fillout the reference frame lists - default to self if not present if ((ParsedFrameParameters->NumberOfReferenceFrameLists == 0) || (DecodeContext->ReferenceFrameList[0].EntryCount == 0)) { Param->PrevRefFrame.pLuma = Param->CurrDecFrame.pLuma; Param->PrevRefFrame.pChroma = Param->CurrDecFrame.pChroma; Param->PrevMinusOneRefFrame.pLuma = Param->CurrDecFrame.pLuma; Param->PrevMinusOneRefFrame.pChroma = Param->CurrDecFrame.pChroma; } else { i = DecodeContext->ReferenceFrameList[0].EntryIndicies[0]; Param->PrevRefFrame.pLuma = (RV89Dec_LumaAddress_t)BufferState[i].BufferRasterPointer; Param->PrevRefFrame.pChroma = (RV89Dec_ChromaAddress_t)BufferState[i].BufferMacroblockStructurePointer; i = DecodeContext->ReferenceFrameList[0].EntryIndicies[1]; Param->PrevMinusOneRefFrame.pLuma = (RV89Dec_LumaAddress_t)BufferState[i].BufferRasterPointer; Param->PrevMinusOneRefFrame.pChroma = (RV89Dec_ChromaAddress_t)BufferState[i].BufferMacroblockStructurePointer; } //{{{ DEBUG { report( severity_info, "Codec Picture No %d, Picture type %d\n", PictureNo++, Frame->PictureHeader.PictureCodingType); #if 0 report( severity_info, "Codec Picture No %d, Picture type %d\n", PictureNo++, Frame->PictureHeader.PictureCodingType); report( severity_info, " InBuffer.pCompressedData = %08x\n", Param->InBuffer.pCompressedData); report( severity_info, " InBuffer.CompressedDataBufferSize = %d\n", Param->InBuffer.CompressedDataBufferSize); report( severity_info, " InBuffer.NumSegments = %d\n", Param->InBuffer.NumSegments); report( severity_info, " InBuffer.pSegmentInfo = %08x\n", Param->InBuffer.pSegmentInfo); for (i=0; i<Param->InBuffer.NumSegments+1; i++) { report( severity_info, " InBuffer.SegmentInfo[%d] = %d, %d\n", i, SegmentInfo[i].is_valid, SegmentInfo[i].offset); } report( severity_info, " CurrDecFrame.pLuma = %08x\n", Param->CurrDecFrame.pLuma); report( severity_info, " CurrDecFrame.pChroma = %08x\n", Param->CurrDecFrame.pChroma); report( severity_info, " Outbuffer.pLuma = %08x\n", Param->Outbuffer.pLuma); report( severity_info, " Outbuffer.pChroma = %08x\n", Param->Outbuffer.pChroma); report( severity_info, " PrevRefFrame.pLuma = %08x\n", Param->PrevRefFrame.pLuma); report( severity_info, " PrevRefFrame.pChroma = %08x\n", Param->PrevRefFrame.pChroma); report( severity_info, " PrevMinusOneRefFrame.pLuma = %08x\n", Param->PrevMinusOneRefFrame.pLuma); report( severity_info, " PrevMinusOneRefFrame.pChroma = %08x\n", Param->PrevMinusOneRefFrame.pChroma); #endif } //}}} // Fillout the actual command memset( &Context->BaseContext.MMECommand, 0x00, sizeof(MME_Command_t) ); Context->BaseContext.MMECommand.CmdStatus.AdditionalInfoSize = sizeof(RV89Dec_TransformStatusAdditionalInfo_t); Context->BaseContext.MMECommand.CmdStatus.AdditionalInfo_p = (MME_GenericParams_t)(&Context->DecodeStatus); Context->BaseContext.MMECommand.ParamSize = sizeof(RV89Dec_TransformParams_t); Context->BaseContext.MMECommand.Param_p = (MME_GenericParams_t)(&Context->DecodeParameters); return CodecNoError; }
PlayerStatus_t Player_Generic_c::InjectData(PlayerPlayback_t Playback, Buffer_t Buffer) { unsigned int i; unsigned int Length; void *Data; PlayerInputMuxType_t MuxType; PlayerStatus_t Status; #ifdef __TDT__ DemultiplexorStatus_t DemuxStatus = NULL; #endif PlayerInputDescriptor_t *Descriptor; // Status = Buffer->ObtainMetaDataReference(MetaDataInputDescriptorType, (void **)(&Descriptor)); if (Status != PlayerNoError) { report(severity_error, "Player_Generic_c::InjectData - Unable to obtain the meta data input descriptor.\n"); return Status; } // if (Descriptor->MuxType == MuxTypeUnMuxed) { // // Un muxed data, call the appropriate collator // Status = Buffer->ObtainDataReference(NULL, &Length, &Data); if (Status != PlayerNoError) { report(severity_error, "Player_Generic_c::InjectData - unable to obtain data reference.\n"); return Status; } Status = Descriptor->UnMuxedStream->Collator->Input(Descriptor, Length, Data); } else { // // Data is muxed - seek a demultiplexor and pass on the call // for (i = 0; i < DemultiplexorCount; i++) { Demultiplexors[i]->GetHandledMuxType(&MuxType); if (MuxType == Descriptor->MuxType) break; } if (i < DemultiplexorCount) { #ifdef __TDT__ DemuxStatus = Demultiplexors[i]->Demux(Playback, Descriptor->DemultiplexorContext, Buffer); #else Status = Demultiplexors[i]->Demux(Playback, Descriptor->DemultiplexorContext, Buffer); #endif } else { report(severity_error, "Player_Generic_c::InjectData - No suitable demultiplexor registerred for this MuxType (%d).\n", Descriptor->MuxType); Status = PlayerUnknowMuxType; } } #ifdef __TDT__ if (DemuxStatus == DemultiplexorBufferOverflow) for (PlayerStream_t Stream = Playback->ListOfStreams; Stream != NULL; Stream = Stream->Next) Stream->Collator->DiscardAccumulatedData(); #endif // // Release the buffer // Buffer->DecrementReferenceCount(IdentifierGetInjectBuffer); return Status; }
CodecStatus_t Codec_MmeVideoAvs_c::FillOutDecodeCommand( void ) { AvsCodecDecodeContext_t* Context = (AvsCodecDecodeContext_t *)DecodeContext; AvsFrameParameters_t* Parsed = (AvsFrameParameters_t *)ParsedFrameParameters->FrameParameterStructure; AvsVideoPictureHeader_t* PictureHeader = &Parsed->PictureHeader; MME_AVSVideoDecodeParams_t* Param; AVS_StartCodecsParam_t* StartCodes; AVS_DecodedBufferAddress_t* Decode; AVS_RefPicListAddress_t* RefList; unsigned int Entry; CODEC_DEBUG("%s\n", __FUNCTION__); // For avs we do not do slice decodes. KnownLastSliceInFieldFrame = true; Param = &Context->DecodeParameters; Decode = &Param->DecodedBufferAddr; RefList = &Param->RefPicListAddr; StartCodes = &Param->StartCodecs; #if defined (TRANSFORMER_AVSDEC_HD) Param->PictureStartAddr_p = (AVS_CompressedData_t)(CodedData + PictureHeader->top_field_offset); #else Param->PictureStartAddr_p = (AVS_CompressedData_t)CodedData; #endif Param->PictureEndAddr_p = (AVS_CompressedData_t)(CodedData + CodedDataLength); Decode->Luma_p = (AVS_LumaAddress_t)BufferState[CurrentDecodeBufferIndex].BufferLumaPointer; Decode->Chroma_p = (AVS_ChromaAddress_t)BufferState[CurrentDecodeBufferIndex].BufferChromaPointer; if (Player->PolicyValue( Playback, this->Stream, PolicyDecimateDecoderOutput) != PolicyValueDecimateDecoderOutputDisabled) { Decode->LumaDecimated_p = (AVS_LumaAddress_t)BufferState[CurrentDecodeBufferIndex].DecimatedLumaPointer; Decode->ChromaDecimated_p = (AVS_ChromaAddress_t)BufferState[CurrentDecodeBufferIndex].DecimatedChromaPointer; } else { Decode->LumaDecimated_p = NULL; Decode->ChromaDecimated_p = NULL; } //{{{ Obtain MBStruct buffer #if defined (AVS_MBSTRUCT) if (PictureHeader->ReversePlay) { if (PictureHeader->picture_coding_type != AVS_PICTURE_CODING_TYPE_B) { // Get the macroblock structure buffer Buffer_t AvsMbStructBuffer; unsigned int Size; CodecStatus_t Status; Size = ((DecodingWidth + 15) / 16) * ((DecodingHeight + 15) / 16) * 6 * sizeof(unsigned int); Status = AvsMbStructPool->GetBuffer (&AvsMbStructBuffer, Size); if( Status != BufferNoError ) { CODEC_ERROR ("Failed to get macroblock structure buffer.\n" ); return Status; } AvsMbStructBuffer->ObtainDataReference (NULL, NULL, (void **)&Decode->MBStruct_p, PhysicalAddress); CurrentDecodeBuffer->AttachBuffer (AvsMbStructBuffer); // Attach to decode buffer (so it will be freed at the same time) AvsMbStructBuffer->DecrementReferenceCount(); // and release ownership of the buffer to the decode buffer // Remember the MBStruct pointer in case we have a second field to follow BufferState[CurrentDecodeBufferIndex].BufferMacroblockStructurePointer = (unsigned char*)Decode->MBStruct_p; } } else Decode->MBStruct_p = (U32*)AllocatorPhysicalAddress (MbStructMemoryDevice); #else Decode->MBStruct_p = (U32*)AllocatorPhysicalAddress (MbStructMemoryDevice); #endif //}}} //{{{ Initialise decode buffers to bright pink #if 0 unsigned int LumaSize = DecodingWidth*DecodingHeight; unsigned char* LumaBuffer; unsigned char* ChromaBuffer; static unsigned int Colour; CurrentDecodeBuffer->ObtainDataReference( NULL, NULL, (void**)&LumaBuffer, UnCachedAddress); ChromaBuffer = LumaBuffer+LumaSize; memset (LumaBuffer, 0xff, LumaSize); memset (ChromaBuffer, Colour++ & 0xff, LumaSize/2); #endif //}}} //{{{ Fillout the reference frame lists if (ParsedFrameParameters->NumberOfReferenceFrameLists != 0) { if (DecodeContext->ReferenceFrameList[0].EntryCount > 0) { Entry = DecodeContext->ReferenceFrameList[0].EntryIndicies[0]; RefList->BackwardRefLuma_p = (AVS_LumaAddress_t)BufferState[Entry].BufferLumaPointer; RefList->BackwardRefChroma_p = (AVS_ChromaAddress_t)BufferState[Entry].BufferChromaPointer; //Param->Picture_structure_bwd = (AVS_PicStruct_t)BufferState[i].PictureSyntax); #if defined (AVS_MBSTRUCT) if (PictureHeader->ReversePlay) Decode->MBStruct_p = (U32*)BufferState[Entry].BufferMacroblockStructurePointer; #endif } if( DecodeContext->ReferenceFrameList[0].EntryCount > 1 ) { Entry = DecodeContext->ReferenceFrameList[0].EntryIndicies[1]; RefList->ForwardRefLuma_p = (AVS_LumaAddress_t)BufferState[Entry].BufferLumaPointer; RefList->ForwardRefChroma_p = (AVS_ChromaAddress_t)BufferState[Entry].BufferChromaPointer; #if defined (AVS_MBSTRUCT) if ((PictureHeader->ReversePlay) && (PictureHeader->picture_coding_type == AVS_PICTURE_CODING_TYPE_P)) Decode->MBStruct_p = (U32*)BufferState[Entry].BufferMacroblockStructurePointer; #endif } } //}}} //{{{ Fill in remaining fields Param->Progressive_frame = (AVS_FrameSyntax_t)PictureHeader->progressive_frame; Param->MainAuxEnable = AVS_MAINOUT_EN; Param->HorizontalDecimationFactor = AVS_HDEC_1; Param->VerticalDecimationFactor = AVS_VDEC_1; Param->AebrFlag = 0; Param->Picture_structure = (AVS_PicStruct_t)PictureHeader->picture_structure; Param->Picture_structure_bwd = (AVS_PicStruct_t)PictureHeader->picture_structure; Param->Fixed_picture_qp = (MME_UINT)PictureHeader->fixed_picture_qp; Param->Picture_qp = (MME_UINT)PictureHeader->picture_qp; Param->Skip_mode_flag = (AVS_SkipMode_t)PictureHeader->skip_mode_flag; Param->Loop_filter_disable = (MME_UINT)PictureHeader->loop_filter_disable; Param->alpha_offset = (S32)PictureHeader->alpha_c_offset; Param->beta_offset = (S32)PictureHeader->beta_offset; Param->Picture_ref_flag = (AVS_PicRef_t)PictureHeader->picture_reference_flag; Param->tr = (S32)PictureHeader->tr; Param->imgtr_next_P = (S32)PictureHeader->imgtr_next_P; Param->imgtr_last_P = (S32)PictureHeader->imgtr_last_P; Param->imgtr_last_prev_P = (S32)PictureHeader->imgtr_last_prev_P; // To do Param->field_flag = (AVS_FieldSyntax_t)0; Param->topfield_pos = (U32)PictureHeader->top_field_offset; Param->botfield_pos = (U32)PictureHeader->bottom_field_offset; Param->DecodingMode = AVS_NORMAL_DECODE; Param->AdditionalFlags = (MME_UINT)0; Param->FrameType = (AVS_PictureType_t)PictureHeader->picture_coding_type; //}}} //{{{ Fill in decimation values if required switch (Player->PolicyValue (Playback, this->Stream, PolicyDecimateDecoderOutput)) { case PolicyValueDecimateDecoderOutputDisabled: { // Normal Case Param->MainAuxEnable = AVS_MAINOUT_EN; Param->HorizontalDecimationFactor = AVS_HDEC_1; Param->VerticalDecimationFactor = AVS_VDEC_1; break; } case PolicyValueDecimateDecoderOutputHalf: { Param->MainAuxEnable = AVS_AUX_MAIN_OUT_EN; Param->HorizontalDecimationFactor = AVS_HDEC_ADVANCED_2; if (Param->Progressive_frame) Param->VerticalDecimationFactor = AVS_VDEC_ADVANCED_2_PROG; else Param->VerticalDecimationFactor = AVS_VDEC_ADVANCED_2_INT; break; } case PolicyValueDecimateDecoderOutputQuarter: { Param->MainAuxEnable = AVS_AUX_MAIN_OUT_EN; Param->HorizontalDecimationFactor = AVS_HDEC_ADVANCED_4; Param->VerticalDecimationFactor = AVS_VDEC_ADVANCED_2_INT; break; } } //}}} //{{{ Fill out slice list if HD decode #if defined (TRANSFORMER_AVSDEC_HD) StartCodes->SliceCount = Parsed->SliceHeaderList.no_slice_headers; for (unsigned int i=0; i<StartCodes->SliceCount; i++) { StartCodes->SliceArray[i].SliceStartAddrCompressedBuffer_p = (AVS_CompressedData_t)(CodedData + Parsed->SliceHeaderList.slice_array[i].slice_offset); StartCodes->SliceArray[i].SliceAddress = Parsed->SliceHeaderList.slice_array[i].slice_start_code; } #endif //}}} //{{{ Set up raster buffers if SD decode #if !defined (TRANSFORMER_AVSDEC_HD) { // AVS SD uses both raster and Omega 2 buffers so obtain a raster buffer from the pool as well. // Pass details to the firmware in the scatterpages BufferStatus_t Status; Buffer_t RasterBuffer; BufferStructure_t RasterBufferStructure; unsigned char* RasterBufferBase; Status = FillOutDecodeBufferRequest (&RasterBufferStructure); if (Status != BufferNoError) { CODEC_ERROR("%s - Failed to fill out a buffer request structure.\n", __FUNCTION__); return Status; } // Override the format so we get one sized for raster rather than macroblock RasterBufferStructure.Format = FormatVideo420_PlanarAligned; RasterBufferStructure.ComponentBorder[0] = 32; RasterBufferStructure.ComponentBorder[1] = 32; // Ask the manifestor for a buffer of the new format Status = Manifestor->GetDecodeBuffer (&RasterBufferStructure, &RasterBuffer); if (Status != BufferNoError) { CODEC_ERROR("%s - Failed to obtain a decode buffer from the manifestor.\n", __FUNCTION__); return Status; } RasterBuffer->ObtainDataReference (NULL, NULL, (void **)&RasterBufferBase, UnCachedAddress); //{{{ Fill in details for all buffers for (int i = 0; i < AVS_NUM_MME_BUFFERS; i++) { DecodeContext->MMEBufferList[i] = &DecodeContext->MMEBuffers[i]; DecodeContext->MMEBuffers[i].StructSize = sizeof (MME_DataBuffer_t); DecodeContext->MMEBuffers[i].UserData_p = NULL; DecodeContext->MMEBuffers[i].Flags = 0; DecodeContext->MMEBuffers[i].StreamNumber = 0; DecodeContext->MMEBuffers[i].NumberOfScatterPages = 1; DecodeContext->MMEBuffers[i].ScatterPages_p = &DecodeContext->MMEPages[i]; DecodeContext->MMEBuffers[i].StartOffset = 0; } //}}} // Then overwrite bits specific to other buffers DecodeContext->MMEBuffers[AVS_MME_CURRENT_FRAME_BUFFER].ScatterPages_p[0].Page_p = RasterBufferBase + RasterBufferStructure.ComponentOffset[0]; DecodeContext->MMEBuffers[AVS_MME_CURRENT_FRAME_BUFFER].TotalSize = RasterBufferStructure.Size; // Preserve raster buffer pointers for later use as reference frames BufferState[CurrentDecodeBufferIndex].BufferRasterPointer = RasterBufferBase + RasterBufferStructure.ComponentOffset[0]; if (ParsedFrameParameters->NumberOfReferenceFrameLists != 0) { if (DecodeContext->ReferenceFrameList[0].EntryCount > 0) { Entry = DecodeContext->ReferenceFrameList[0].EntryIndicies[0]; DecodeContext->MMEBuffers[AVS_MME_BACKWARD_REFERENCE_FRAME_BUFFER].ScatterPages_p[0].Page_p = BufferState[Entry].BufferRasterPointer; DecodeContext->MMEBuffers[AVS_MME_BACKWARD_REFERENCE_FRAME_BUFFER].TotalSize = RasterBufferStructure.Size; } if (DecodeContext->ReferenceFrameList[0].EntryCount > 1) { Entry = DecodeContext->ReferenceFrameList[0].EntryIndicies[1]; DecodeContext->MMEBuffers[AVS_MME_FORWARD_REFERENCE_FRAME_BUFFER].ScatterPages_p[0].Page_p = BufferState[Entry].BufferRasterPointer; DecodeContext->MMEBuffers[AVS_MME_FORWARD_REFERENCE_FRAME_BUFFER].TotalSize = RasterBufferStructure.Size; } } //{{{ Initialise remaining scatter page values for (int i = 0; i < AVS_NUM_MME_BUFFERS; i++) { // Only one scatterpage, so size = totalsize DecodeContext->MMEBuffers[i].ScatterPages_p[0].Size = DecodeContext->MMEBuffers[i].TotalSize; DecodeContext->MMEBuffers[i].ScatterPages_p[0].BytesUsed = 0; DecodeContext->MMEBuffers[i].ScatterPages_p[0].FlagsIn = 0; DecodeContext->MMEBuffers[i].ScatterPages_p[0].FlagsOut = 0; } //}}} // Attach planar buffer to decode buffer and let go of it CurrentDecodeBuffer->AttachBuffer (RasterBuffer); RasterBuffer->DecrementReferenceCount (); //{{{ Initialise raster decode buffers to bright pink #if 0 { unsigned int LumaSize = (DecodingWidth+32)*(DecodingHeight+32); unsigned char* LumaBuffer = (unsigned char*)DecodeContext->MMEBuffers[AVS_MME_CURRENT_FRAME_BUFFER].ScatterPages_p[0].Page_p; unsigned char* ChromaBuffer = &LumaBuffer[LumaSize]; memset (LumaBuffer, 0xff, LumaSize); memset (ChromaBuffer, 0xff, LumaSize/2); } #endif //}}} } #endif //}}} // Fillout the actual command memset( &Context->BaseContext.MMECommand, 0x00, sizeof(MME_Command_t) ); Context->BaseContext.MMECommand.CmdStatus.AdditionalInfoSize = sizeof(Context->DecodeStatus); Context->BaseContext.MMECommand.CmdStatus.AdditionalInfo_p = (MME_GenericParams_t)(&Context->DecodeStatus); Context->BaseContext.MMECommand.ParamSize = sizeof(Context->DecodeParameters); Context->BaseContext.MMECommand.Param_p = (MME_GenericParams_t)(&Context->DecodeParameters); #if !defined (TRANSFORMER_AVSDEC_HD) DecodeContext->MMECommand.NumberInputBuffers = AVS_NUM_MME_INPUT_BUFFERS; DecodeContext->MMECommand.NumberOutputBuffers = AVS_NUM_MME_OUTPUT_BUFFERS; DecodeContext->MMECommand.DataBuffers_p = (MME_DataBuffer_t**)DecodeContext->MMEBufferList; #endif return CodecNoError; }
void Player_Generic_c::ProcessPostManifest( PlayerStream_t Stream ) { PlayerStatus_t Status; RingStatus_t RingStatus; Buffer_t Buffer; Buffer_t OriginalCodedFrameBuffer; BufferType_t BufferType; PlayerControlStructure_t *ControlStructure; ParsedFrameParameters_t *ParsedFrameParameters; PlayerSequenceNumber_t *SequenceNumberStructure; unsigned long long LastEntryTime; unsigned long long SequenceNumber; unsigned long long MaximumActualSequenceNumberSeen; unsigned long long Time; unsigned int AccumulatedBeforeControlMessagesCount; unsigned int AccumulatedAfterControlMessagesCount; bool ProcessNow; unsigned int *Count; PlayerBufferRecord_t *Table; VideoOutputTiming_t *OutputTiming; unsigned long long Now; // LastEntryTime = OS_GetTimeInMicroSeconds(); SequenceNumber = INVALID_SEQUENCE_VALUE; MaximumActualSequenceNumberSeen = 0; Time = INVALID_TIME; AccumulatedBeforeControlMessagesCount = 0; AccumulatedAfterControlMessagesCount = 0; // // Signal we have started // OS_LockMutex( &Lock ); Stream->ProcessRunningCount++; if( Stream->ProcessRunningCount == Stream->ExpectedProcessCount ) OS_SetEvent( &Stream->StartStopEvent ); OS_UnLockMutex( &Lock ); // // Main Loop // while( !Stream->Terminating ) { RingStatus = Stream->ManifestedBufferRing->Extract( (unsigned int *)(&Buffer), PLAYER_MAX_EVENT_WAIT ); Now = OS_GetTimeInMicroSeconds(); if( Stream->ReTimeQueuedFrames && ((Now - Stream->ReTimeStart) > PLAYER_MAX_TIME_IN_RETIMING) ) Stream->ReTimeQueuedFrames = false; if( RingStatus == RingNothingToGet ) continue; Buffer->GetType( &BufferType ); Buffer->TransferOwnership( IdentifierProcessPostManifest ); // // Deal with a coded frame buffer // if( BufferType == Stream->DecodeBufferType ) { Stream->FramesFromManifestorCount++; #if 0 { static unsigned long long LastTime = 0; static unsigned long long LastActualTime = 0; AudioOutputTiming_t *OutputTiming; Buffer->ObtainMetaDataReference( MetaDataAudioOutputTimingType, (void **)&OutputTiming); report( severity_info, "Post Dn = %d, DS= %6lld, DAS = %6lld, S = %016llx,AS = %016llx\n", OutputTiming->DisplayCount, OutputTiming->SystemPlaybackTime - LastTime, OutputTiming->ActualSystemPlaybackTime - LastActualTime, OutputTiming->SystemPlaybackTime, OutputTiming->ActualSystemPlaybackTime ); LastTime = OutputTiming->SystemPlaybackTime; LastActualTime = OutputTiming->ActualSystemPlaybackTime; } #endif #if 0 { static unsigned long long LastTime = 0; static unsigned long long LastActualTime = 0; VideoOutputTiming_t *OutputTiming; Buffer->ObtainMetaDataReference( MetaDataVideoOutputTimingType, (void **)&OutputTiming ); report( severity_info, "Post Dn = %d %d, I = %d, TFF = %d, DS= %6lld, DAS = %6lld, S = %016llx, AS = %016llx\n", OutputTiming->DisplayCount[0], OutputTiming->DisplayCount[1], OutputTiming->Interlaced, OutputTiming->TopFieldFirst, OutputTiming->SystemPlaybackTime - LastTime, OutputTiming->ActualSystemPlaybackTime - LastActualTime, OutputTiming->SystemPlaybackTime, OutputTiming->ActualSystemPlaybackTime ); LastTime = OutputTiming->SystemPlaybackTime; LastActualTime = OutputTiming->ActualSystemPlaybackTime; } #endif // // Obtain a sequence number from the buffer // Status = Buffer->ObtainAttachedBufferReference( Stream->CodedFrameBufferType, &OriginalCodedFrameBuffer ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the the original coded frame buffer - Implementation error\n" ); Buffer->DecrementReferenceCount( IdentifierProcessPostManifest ); continue; } Status = OriginalCodedFrameBuffer->ObtainMetaDataReference( MetaDataSequenceNumberType, (void **)(&SequenceNumberStructure) ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the meta data \"SequenceNumber\" - Implementation error\n" ); Buffer->DecrementReferenceCount( IdentifierProcessPostManifest ); continue; } Status = Buffer->ObtainMetaDataReference( MetaDataParsedFrameParametersReferenceType, (void **)(&ParsedFrameParameters) ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the meta data \"ParsedFrameParametersReference\" - Implementation error\n" ); Buffer->DecrementReferenceCount( IdentifierProcessPostManifest ); continue; } // // Check for whether or not we are in re-timing // if( Stream->ReTimeQueuedFrames && !SequenceNumberStructure->MarkerFrame ) { Status = Buffer->ObtainMetaDataReference( (Stream->StreamType == StreamTypeVideo ? MetaDataVideoOutputTimingType : MetaDataAudioOutputTimingType), (void **)&OutputTiming ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessPostManifest - Unable to obtain the meta data \"%s\" - Implementation error\n", (Stream->StreamType == StreamTypeVideo ? "VideoOutputTiming" : "AudioOutputTiming") ); Buffer->DecrementReferenceCount( IdentifierProcessPostManifest ); continue; } if( ValidTime(OutputTiming->ActualSystemPlaybackTime) ) { Stream->ReTimeQueuedFrames = false; } else { Stream->OutputTimer->GenerateFrameTiming( Buffer ); Status = Stream->OutputTimer->TestForFrameDrop( Buffer, OutputTimerBeforeManifestation ); if( !Stream->Terminating && (Status == OutputTimerNoError) ) { Stream->FramesToManifestorCount++; Stream->Manifestor->QueueDecodeBuffer( Buffer ); continue; } } } // // Extract the sequence number, and write the timing statistics // //report( severity_info, "MQ Post Man %d - %d\n", Stream->StreamType, ParsedFrameParameters->DisplayFrameIndex ); SequenceNumberStructure->TimeEntryInProcess3 = OS_GetTimeInMicroSeconds(); SequenceNumberStructure->DeltaEntryInProcess3 = SequenceNumberStructure->TimeEntryInProcess3 - LastEntryTime; LastEntryTime = SequenceNumberStructure->TimeEntryInProcess3; SequenceNumber = SequenceNumberStructure->Value; MaximumActualSequenceNumberSeen = max(SequenceNumber, MaximumActualSequenceNumberSeen); Time = ParsedFrameParameters->NativePlaybackTime; #ifndef __TDT__ ProcessStatistics( Stream, SequenceNumberStructure ); #endif if( SequenceNumberStructure->MarkerFrame ) { Stream->DiscardingUntilMarkerFramePostM = false; Time = INVALID_TIME; } // // Process any outstanding control messages to be applied before this buffer // ProcessAccumulatedControlMessages( Stream, &AccumulatedBeforeControlMessagesCount, PLAYER_MAX_POSTM_MESSAGES, Stream->AccumulatedBeforePostMControlMessages, SequenceNumber, Time ); // // Pass buffer back into output timer // and release the buffer. // if( !SequenceNumberStructure->MarkerFrame ) { Stream->OutputTimer->RecordActualFrameTiming( Buffer ); Stream->Codec->ReleaseDecodeBuffer( Buffer ); } else Buffer->DecrementReferenceCount( IdentifierProcessPostManifest ); // // Process any outstanding control messages to be applied after this buffer // ProcessAccumulatedControlMessages( Stream, &AccumulatedAfterControlMessagesCount, PLAYER_MAX_POSTM_MESSAGES, Stream->AccumulatedAfterPostMControlMessages, SequenceNumber, Time ); } // // Deal with a player control structure // else if( BufferType == BufferPlayerControlStructureType ) { Buffer->ObtainDataReference( NULL, NULL, (void **)(&ControlStructure) ); ProcessNow = (ControlStructure->SequenceType == SequenceTypeImmediate) || ((SequenceNumber != INVALID_SEQUENCE_VALUE) && (ControlStructure->SequenceValue <= MaximumActualSequenceNumberSeen)); if( ProcessNow ) ProcessControlMessage( Stream, Buffer, ControlStructure ); else { if( (ControlStructure->SequenceType == SequenceTypeBeforeSequenceNumber) || (ControlStructure->SequenceType == SequenceTypeBeforePlaybackTime) ) { Count = &AccumulatedBeforeControlMessagesCount; Table = Stream->AccumulatedBeforePostMControlMessages; } else { Count = &AccumulatedAfterControlMessagesCount; Table = Stream->AccumulatedAfterPostMControlMessages; } AccumulateControlMessage( Buffer, ControlStructure, Count, PLAYER_MAX_POSTM_MESSAGES, Table ); } } else { report( severity_error, "Player_Generic_c::ProcessPostManifest - Unknown buffer type received - Implementation error.\n" ); Buffer->DecrementReferenceCount(); } } report( severity_info, "3333 Holding control strutures %d\n", AccumulatedBeforeControlMessagesCount + AccumulatedAfterControlMessagesCount ); // // Make sur no one will wait for these // Stream->ReTimeQueuedFrames = false; // // Signal we have terminated // OS_LockMutex( &Lock ); Stream->ProcessRunningCount--; if( Stream->ProcessRunningCount == 0 ) OS_SetEvent( &Stream->StartStopEvent ); OS_UnLockMutex( &Lock ); }
void Player_Generic_c::ProcessDecodeToManifest( PlayerStream_t Stream ) { unsigned int i; PlayerStatus_t Status; RingStatus_t RingStatus; unsigned int AccumulatedBufferTableOccupancy; PlayerBufferRecord_t *AccumulatedBufferTable; Buffer_t Buffer = NULL; Buffer_t OriginalCodedFrameBuffer; BufferType_t BufferType; PlayerControlStructure_t *ControlStructure; ParsedFrameParameters_t *ParsedFrameParameters; unsigned int LowestIndex; unsigned int LowestDisplayFrameIndex; unsigned int DesiredFrameIndex; unsigned int PossibleDecodeBuffers; unsigned int MaxDecodesOutOfOrder; PlayerSequenceNumber_t *SequenceNumberStructure; unsigned long long LastEntryTime; unsigned long long SequenceNumber; unsigned long long MinumumSequenceNumberAccumulated; unsigned long long MaximumActualSequenceNumberSeen; unsigned long long Time; unsigned int AccumulatedBeforeControlMessagesCount; unsigned int AccumulatedAfterControlMessagesCount; bool SequenceCheck; bool ProcessNow; unsigned int *Count; PlayerBufferRecord_t *Table; Buffer_t MarkerFrameBuffer; bool FirstFrame; bool DiscardBuffer; bool LastPreManifestDiscardBuffer; unsigned char SubmitInitialFrame; Buffer_t InitialFrameBuffer; // // Set parameters // AccumulatedBufferTableOccupancy = 0; AccumulatedBufferTable = Stream->AccumulatedDecodeBufferTable; LastEntryTime = OS_GetTimeInMicroSeconds(); SequenceNumber = INVALID_SEQUENCE_VALUE; Time = INVALID_TIME; AccumulatedBeforeControlMessagesCount = 0; AccumulatedAfterControlMessagesCount = 0; MinumumSequenceNumberAccumulated = 0xffffffffffffffffULL; MaximumActualSequenceNumberSeen = 0; DesiredFrameIndex = 0; FirstFrame = true; MarkerFrameBuffer = NULL; InitialFrameBuffer = NULL; LastPreManifestDiscardBuffer = false; // // Signal we have started // OS_LockMutex( &Lock ); Stream->ProcessRunningCount++; if( Stream->ProcessRunningCount == Stream->ExpectedProcessCount ) OS_SetEvent( &Stream->StartStopEvent ); OS_UnLockMutex( &Lock ); // // Main Loop // while( !Stream->Terminating ) { // // Buffer re-ordering loop // while( !Stream->Terminating ) { // // Scan the list of accumulated buffers to see if the next display frame is available // (whether because it was already accumulated, or because its display frame index has been updated) // MinumumSequenceNumberAccumulated = 0xffffffffffffffffULL; if( AccumulatedBufferTableOccupancy != 0 ) { LowestIndex = INVALID_INDEX; LowestDisplayFrameIndex = INVALID_INDEX; for( i=0; i<Stream->NumberOfDecodeBuffers; i++ ) if( AccumulatedBufferTable[i].Buffer != NULL ) { MinumumSequenceNumberAccumulated = min(MinumumSequenceNumberAccumulated, AccumulatedBufferTable[i].SequenceNumber); if( (AccumulatedBufferTable[i].ParsedFrameParameters->DisplayFrameIndex != INVALID_INDEX) && ((LowestIndex == INVALID_INDEX) || (AccumulatedBufferTable[i].ParsedFrameParameters->DisplayFrameIndex < LowestDisplayFrameIndex)) ) { LowestDisplayFrameIndex = AccumulatedBufferTable[i].ParsedFrameParameters->DisplayFrameIndex; LowestIndex = i; } } Stream->Manifestor->GetDecodeBufferCount( &PossibleDecodeBuffers ); MaxDecodesOutOfOrder = (Stream->Playback->Direction == PlayForward) ? (PossibleDecodeBuffers >> 1) : (3 * PossibleDecodeBuffers)/4; MaxDecodesOutOfOrder = min( (PossibleDecodeBuffers - PLAYER_MINIMUM_NUMBER_OF_WORKING_DECODE_BUFFERS), MaxDecodesOutOfOrder ); if( Stream->Playback->Direction == PlayForward ) MaxDecodesOutOfOrder = min( PLAYER_LIMIT_ON_OUT_OF_ORDER_DECODES, MaxDecodesOutOfOrder ); if( (LowestIndex != INVALID_INDEX) && ( (LowestDisplayFrameIndex == DesiredFrameIndex) || (AccumulatedBufferTableOccupancy >= MaxDecodesOutOfOrder) || (AccumulatedBufferTable[LowestIndex].ParsedFrameParameters->CollapseHolesInDisplayIndices) || (MarkerFrameBuffer != NULL) ) ) { Buffer = AccumulatedBufferTable[LowestIndex].Buffer; ParsedFrameParameters = AccumulatedBufferTable[LowestIndex].ParsedFrameParameters; SequenceNumber = AccumulatedBufferTable[LowestIndex].SequenceNumber; BufferType = Stream->DecodeBufferType; AccumulatedBufferTable[LowestIndex].Buffer = NULL; AccumulatedBufferTable[LowestIndex].ParsedFrameParameters = NULL; AccumulatedBufferTableOccupancy--; break; } } // // Skip any frame indices that were unused // while( CheckForNonDecodedFrame( Stream, DesiredFrameIndex ) ) DesiredFrameIndex++; // // If we get here with a marker frame, then we have emptied our accumulated list // if( MarkerFrameBuffer != NULL ) { SequenceNumber = SequenceNumberStructure->Value; MaximumActualSequenceNumberSeen = max(SequenceNumber, MaximumActualSequenceNumberSeen); ProcessAccumulatedControlMessages( Stream, &AccumulatedBeforeControlMessagesCount, PLAYER_MAX_DTOM_MESSAGES, Stream->AccumulatedBeforeDtoMControlMessages, SequenceNumber, Time ); ProcessAccumulatedControlMessages( Stream, &AccumulatedAfterControlMessagesCount, PLAYER_MAX_DTOM_MESSAGES, Stream->AccumulatedAfterDtoMControlMessages, SequenceNumber, Time ); Stream->ManifestedBufferRing->Insert( (unsigned int)MarkerFrameBuffer ); // Pass on the marker MarkerFrameBuffer = NULL; Stream->DiscardingUntilMarkerFrameDtoM = false; continue; } // // Get a new buffer (continue will still perform the scan) // RingStatus = Stream->DecodedFrameRing->Extract( (unsigned int *)(&Buffer), PLAYER_NEXT_FRAME_EVENT_WAIT ); if( (RingStatus == RingNothingToGet) || (Buffer == NULL) || Stream->Terminating ) continue; Buffer->TransferOwnership( IdentifierProcessDecodeToManifest ); Buffer->GetType( &BufferType ); if( BufferType != Stream->DecodeBufferType ) break; // // Obtain a sequence number from the buffer // Status = Buffer->ObtainAttachedBufferReference( Stream->CodedFrameBufferType, &OriginalCodedFrameBuffer ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to obtain the the original coded frame buffer - Implementation error\n" ); Buffer->DecrementReferenceCount( IdentifierProcessDecodeToManifest ); continue; } Status = OriginalCodedFrameBuffer->ObtainMetaDataReference( MetaDataSequenceNumberType, (void **)(&SequenceNumberStructure) ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to obtain the meta data \"SequenceNumber\" - Implementation error\n" ); Buffer->DecrementReferenceCount( IdentifierProcessDecodeToManifest ); continue; } SequenceNumberStructure->TimeEntryInProcess2 = OS_GetTimeInMicroSeconds(); SequenceNumberStructure->DeltaEntryInProcess2 = SequenceNumberStructure->TimeEntryInProcess2 - LastEntryTime; LastEntryTime = SequenceNumberStructure->TimeEntryInProcess2; SequenceNumber = SequenceNumberStructure->Value; // // Check, is this a marker frame // if( SequenceNumberStructure->MarkerFrame ) { MarkerFrameBuffer = Buffer; continue; // allow us to empty the accumulated buffer list } // // If this is the first seen decode buffer do we wish to offer it up as an initial frame // if( FirstFrame ) { FirstFrame = false; SubmitInitialFrame = PolicyValue( Stream->Playback, Stream, PolicyManifestFirstFrameEarly ); if( SubmitInitialFrame == PolicyValueApply ) { Status = Stream->Manifestor->InitialFrame( Buffer ); if( Status != ManifestorNoError ) report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Failed to apply InitialFrame action.\n" ); if( InitialFrameBuffer != NULL ) Stream->Codec->ReleaseDecodeBuffer( InitialFrameBuffer ); InitialFrameBuffer = Buffer; InitialFrameBuffer->IncrementReferenceCount(); } } // // Do we want to insert this in the table // Status = Buffer->ObtainMetaDataReference( MetaDataParsedFrameParametersReferenceType, (void **)(&ParsedFrameParameters) ); if( Status != PlayerNoError ) { report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to obtain the meta data \"ParsedFrameParametersReference\" - Implementation error\n" ); Buffer->DecrementReferenceCount( IdentifierProcessDecodeToManifest ); continue; } #if 0 { unsigned int A, B; Stream->DecodeBufferPool->GetPoolUsage( &A, &B, NULL, NULL, NULL ); report( severity_info, "Got(%d) %3d (R = %d, K = %d) %d, %016llx - %d %d/%d\n", Stream->StreamType, ParsedFrameParameters->DecodeFrameIndex, ParsedFrameParameters->ReferenceFrame, ParsedFrameParameters->KeyFrame, ParsedFrameParameters->DisplayFrameIndex, ParsedFrameParameters->NormalizedPlaybackTime, AccumulatedBufferTableOccupancy, B, A ); } #endif if( ParsedFrameParameters->DisplayFrameIndex <= DesiredFrameIndex ) break; for( i=0; i<Stream->NumberOfDecodeBuffers; i++ ) if( AccumulatedBufferTable[i].Buffer == NULL ) { AccumulatedBufferTable[i].Buffer = Buffer; AccumulatedBufferTable[i].SequenceNumber = SequenceNumber; AccumulatedBufferTable[i].ParsedFrameParameters = ParsedFrameParameters; AccumulatedBufferTableOccupancy++; break; } if( i >= Stream->NumberOfDecodeBuffers ) { report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unable to insert buffer in table - Implementation error.\n" ); break; // Assume it is immediate, as an implementation error this is pretty nasty } } if( Stream->Terminating ) continue; // -------------------------------------------------------------------------------------------- // We now have a buffer after frame re-ordering // // First calculate the sequence number that applies to this frame // this calculation may appear wierd, the idea is this, assume you // have a video stream IPBB, sequence numbers 0 1 2 3, frame reordering // will yield sequence numbers 0 2 3 1 IE any command to be executed at // the end of the stream will appear 1 frame early, the calculations // below will re-wossname the sequence numbers to 0 1 1 3 causing the // signal to occur at the correct point. // // // Deal with a coded frame buffer // if( BufferType == Stream->DecodeBufferType ) { // // Report any re-ordering problems // if( ParsedFrameParameters->CollapseHolesInDisplayIndices && (ParsedFrameParameters->DisplayFrameIndex > DesiredFrameIndex) ) DesiredFrameIndex = ParsedFrameParameters->DisplayFrameIndex; if( ParsedFrameParameters->DisplayFrameIndex > DesiredFrameIndex ) report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Hole in display frame indices (Got %d Expected %d).\n", ParsedFrameParameters->DisplayFrameIndex, DesiredFrameIndex ); if( ParsedFrameParameters->DisplayFrameIndex < DesiredFrameIndex ) report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Frame re-ordering failure (Got %d Expected %d) - Implementation error.\n", ParsedFrameParameters->DisplayFrameIndex, DesiredFrameIndex ); // // First calculate the sequence number that applies to this frame // this calculation may appear wierd, the idea is this, assume you // have a video stream IPBB, sequence numbers 0 1 2 3, frame reordering // will yield sequence numbers 0 2 3 1 IE any command to be executed at // the end of the stream will appear 1 frame early, the calculations // below will re-wossname the sequence numbers to 0 1 1 3 causing the // signal to occur at the correct point. // MaximumActualSequenceNumberSeen = max(SequenceNumber, MaximumActualSequenceNumberSeen); SequenceNumber = min(MaximumActualSequenceNumberSeen, MinumumSequenceNumberAccumulated ); Time = ParsedFrameParameters->NativePlaybackTime; // // Process any outstanding control messages to be applied before this buffer // ProcessAccumulatedControlMessages( Stream, &AccumulatedBeforeControlMessagesCount, PLAYER_MAX_DTOM_MESSAGES, Stream->AccumulatedBeforeDtoMControlMessages, SequenceNumber, Time ); // // If we are paused, then we loop waiting for something to happen // if( Stream->Playback->Speed == 0 ) { while( (Stream->Playback->Speed == 0) && !Stream->Step && !Stream->Terminating && !Stream->DiscardingUntilMarkerFrameDtoM ) { OS_WaitForEvent( &Stream->SingleStepMayHaveHappened, PLAYER_NEXT_FRAME_EVENT_WAIT ); OS_ResetEvent( &Stream->SingleStepMayHaveHappened ); } Stream->Step = false; } // // If we are not discarding everything, then procede to process the buffer // DiscardBuffer = Stream->DiscardingUntilMarkerFrameDtoM; // // Handle output timing functions, await entry into the decode window, // Then check for frame drop (whether due to trick mode, or because // we are running late). // NOTE1 Indicating we are not before decode, means // reference frames can be dropped, we will simply not display them // NOTE2 We may block in these functions, so it is important to // recheck flags // if( !DiscardBuffer ) { Status = Stream->OutputTimer->TestForFrameDrop( Buffer, OutputTimerBeforeOutputTiming ); if( Status == OutputTimerNoError ) { // // Note we loop here if we are engaged in re-timing the decoded frames // while( !Stream->Terminating && Stream->ReTimeQueuedFrames ) OS_SleepMilliSeconds( PLAYER_RETIMING_WAIT ); Stream->OutputTimer->GenerateFrameTiming( Buffer ); Status = Stream->OutputTimer->TestForFrameDrop( Buffer, OutputTimerBeforeManifestation ); } if( Stream->DiscardingUntilMarkerFrameDtoM || Stream->Terminating || (Status != OutputTimerNoError) ) DiscardBuffer = true; if( (DiscardBuffer != LastPreManifestDiscardBuffer) && (Status == OutputTimerUntimedFrame) ) { report( severity_error, "Discarding untimed frames.\n" ); } LastPreManifestDiscardBuffer = DiscardBuffer; #if 0 // Nick debug data if( Status != OutputTimerNoError ) report( severity_info, "Timer Discard(%d) %3d (before Manifest) %08x\n", Stream->StreamType, ParsedFrameParameters->DecodeFrameIndex, Status ); #endif } // // Pass the buffer to the manifestor for manifestation // we do not release our hold on this buffer, buffers passed // to the manifestor always re-appear on its output ring. // NOTE calculate next desired frame index before we // give away the buffer, because ParsedFrameParameters // can become invalid after either of the calls below. // DesiredFrameIndex = ParsedFrameParameters->DisplayFrameIndex + 1; if( !DiscardBuffer ) { SequenceNumberStructure->TimePassToManifestor = OS_GetTimeInMicroSeconds(); #if 0 { static unsigned long long LastOutputTime = 0; static unsigned long long LastOutputTime1 = 0; VideoOutputTiming_t *OutputTiming; unsigned int C0,C1,C2,C3; Buffer->ObtainMetaDataReference( MetaDataVideoOutputTimingType, (void **)&OutputTiming ); Stream->CodedFrameBufferPool->GetPoolUsage( &C0, &C1, NULL, NULL, NULL ); Stream->DecodeBufferPool->GetPoolUsage( &C2, &C3, NULL, NULL, NULL ); report( severity_info, "Ord %3d (R = %d, K = %d) %d, %6lld %6lld %6lld %6lld (%d/%d %d/%d) (%d %d) %6lld %6lld\n", ParsedFrameParameters->DecodeFrameIndex, ParsedFrameParameters->ReferenceFrame, ParsedFrameParameters->KeyFrame, ParsedFrameParameters->DisplayFrameIndex, OutputTiming->SystemPlaybackTime - SequenceNumberStructure->TimePassToManifestor, SequenceNumberStructure->TimePassToManifestor - SequenceNumberStructure->TimeEntryInProcess2, SequenceNumberStructure->TimePassToManifestor - SequenceNumberStructure->TimeEntryInProcess1, SequenceNumberStructure->TimePassToManifestor - SequenceNumberStructure->TimeEntryInProcess0, C0, C1, C2, C3, Stream->FramesToManifestorCount, Stream->FramesFromManifestorCount, OutputTiming->SystemPlaybackTime - LastOutputTime, ParsedFrameParameters->NormalizedPlaybackTime - LastOutputTime1 ); //Buffer->TransferOwnership( IdentifierProcessDecodeToManifest, IdentifierManifestor ); //if( (OutputTiming->SystemPlaybackTime - SequenceNumberStructure->TimePassToManifestor) > 0xffffffffULL ) // Stream->DecodeBufferPool->Dump( DumpAll ); LastOutputTime = OutputTiming->SystemPlaybackTime; LastOutputTime1 = ParsedFrameParameters->NormalizedPlaybackTime; } if( Stream->FramesToManifestorCount >= 55 ) { OS_SleepMilliSeconds( 1000 ); report( severity_info, "Ord(%d) %3d (R = %d, K = %d) %d, %016llx %016llx\n", Stream->StreamType, ParsedFrameParameters->DecodeFrameIndex, ParsedFrameParameters->ReferenceFrame, ParsedFrameParameters->KeyFrame, ParsedFrameParameters->DisplayFrameIndex, ParsedFrameParameters->NormalizedPlaybackTime, ParsedFrameParameters->NativePlaybackTime ); OS_SleepMilliSeconds( 4000 ); } #endif Stream->FramesToManifestorCount++; Status = Stream->Manifestor->QueueDecodeBuffer( Buffer ); if( Status != ManifestorNoError ) DiscardBuffer = true; if( InitialFrameBuffer != NULL ) { Stream->Codec->ReleaseDecodeBuffer( InitialFrameBuffer ); InitialFrameBuffer = NULL; } } if( DiscardBuffer ) { Stream->Codec->ReleaseDecodeBuffer( Buffer ); if( Stream->Playback->Speed == 0 ) Stream->Step = true; } // // Process any outstanding control messages to be applied after this buffer // ProcessAccumulatedControlMessages( Stream, &AccumulatedAfterControlMessagesCount, PLAYER_MAX_DTOM_MESSAGES, Stream->AccumulatedAfterDtoMControlMessages, SequenceNumber, Time ); } // // Deal with a player control structure // else if( BufferType == BufferPlayerControlStructureType ) { Buffer->ObtainDataReference( NULL, NULL, (void **)(&ControlStructure) ); ProcessNow = (ControlStructure->SequenceType == SequenceTypeImmediate); if( !ProcessNow ) { SequenceCheck = (ControlStructure->SequenceType == SequenceTypeBeforeSequenceNumber) || (ControlStructure->SequenceType == SequenceTypeAfterSequenceNumber); ProcessNow = SequenceCheck ? ((SequenceNumber != INVALID_SEQUENCE_VALUE) && (ControlStructure->SequenceValue <= MaximumActualSequenceNumberSeen)) : ((Time != INVALID_SEQUENCE_VALUE) && (ControlStructure->SequenceValue <= Time)); } if( ProcessNow ) ProcessControlMessage( Stream, Buffer, ControlStructure ); else { if( (ControlStructure->SequenceType == SequenceTypeBeforeSequenceNumber) || (ControlStructure->SequenceType == SequenceTypeBeforePlaybackTime) ) { Count = &AccumulatedBeforeControlMessagesCount; Table = Stream->AccumulatedBeforeDtoMControlMessages; } else { Count = &AccumulatedAfterControlMessagesCount; Table = Stream->AccumulatedAfterDtoMControlMessages; } AccumulateControlMessage( Buffer, ControlStructure, Count, PLAYER_MAX_DTOM_MESSAGES, Table ); } } else { report( severity_error, "Player_Generic_c::ProcessDecodeToManifest - Unknown buffer type received - Implementation error.\n" ); Buffer->DecrementReferenceCount(); } }
CodecStatus_t Codec_DvpVideo_c::Input(Buffer_t CodedBuffer) { CodecStatus_t Status; unsigned int CodedDataLength; StreamInfo_t *StreamInfo; Buffer_t MarkerBuffer; BufferStructure_t BufferStructure; ParsedFrameParameters_t *ParsedFrameParameters; ParsedVideoParameters_t *ParsedVideoParameters; Buffer_t CapturedBuffer; ParsedVideoParameters_t *CapturedParsedVideoParameters; // // Extract the useful coded data information // Status = CodedBuffer->ObtainDataReference(NULL, &CodedDataLength, (void **)(&StreamInfo), CachedAddress); if (Status != PlayerNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain data reference.\n"); return Status; } Status = CodedBuffer->ObtainMetaDataReference(Player->MetaDataParsedFrameParametersType, (void **)(&ParsedFrameParameters)); if (Status != PlayerNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain the meta data \"ParsedFrameParameters\".\n"); return Status; } Status = CodedBuffer->ObtainMetaDataReference(Player->MetaDataParsedVideoParametersType, (void**)&ParsedVideoParameters); if (Status != PlayerNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain the meta data \"ParsedVideoParameters\".\n"); return Status; } // // Handle the special case of a marker frame // if ((CodedDataLength == 0) && !ParsedFrameParameters->NewStreamParameters && !ParsedFrameParameters->NewFrameParameters) { // // Get a marker buffer // memset(&BufferStructure, 0x00, sizeof(BufferStructure_t)); BufferStructure.Format = FormatMarkerFrame; Status = Manifestor->GetDecodeBuffer(&BufferStructure, &MarkerBuffer); if (Status != ManifestorNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to get marker decode buffer from manifestor.\n"); return Status; } MarkerBuffer->TransferOwnership(IdentifierCodec); Status = MarkerBuffer->AttachMetaData(Player->MetaDataParsedFrameParametersReferenceType, UNSPECIFIED_SIZE, (void *)ParsedFrameParameters); if (Status != PlayerNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to attach a reference to \"ParsedFrameParameters\" to the marker buffer.\n"); return Status; } MarkerBuffer->AttachBuffer(CodedBuffer); // // Queue/pass on the buffer // OutputRing->Insert((uintptr_t)MarkerBuffer); return CodecNoError; } // // Attach the coded data fields to the decode/captured buffer // CapturedBuffer = (Buffer_t)StreamInfo->buffer_class; if (CapturedBuffer == NULL) { report(severity_fatal, "Codec_DvpVideo_c::Input(DVP) - NULL Buffer\n"); return CodecNoError; } // Status = CapturedBuffer->ObtainMetaDataReference(Player->MetaDataParsedVideoParametersType, (void**)&CapturedParsedVideoParameters); if (Status != PlayerNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Unable to obtain the meta data \"ParsedVideoParameters\" from the captured buffer.\n"); return Status; } memcpy(CapturedParsedVideoParameters, ParsedVideoParameters, sizeof(ParsedVideoParameters_t)); // Status = CapturedBuffer->AttachMetaData(Player->MetaDataParsedFrameParametersReferenceType, UNSPECIFIED_SIZE, (void *)ParsedFrameParameters); if (Status != BufferNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to attach Frame Parameters\n"); return Status; } // // Switch the ownership hierarchy, and allow the captured buffer to exist on it's own. // CapturedBuffer->IncrementReferenceCount(); Status = CodedBuffer->DetachBuffer(CapturedBuffer); if (Status != BufferNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to detach captured buffer from coded frame buffer\n"); return Status; } Status = CapturedBuffer->AttachBuffer(CodedBuffer); if (Status != BufferNoError) { report(severity_error, "Codec_DvpVideo_c::Input(DVP) - Failed to attach captured buffer to Coded Frame Buffer\n"); return Status; } // // Pass the captured buffer on // OutputRing->Insert((uintptr_t)CapturedBuffer); return CodecNoError; }