//{{{  ReadQuantizationMatrices
// //////////////////////////////////////////////////////////////////////////////////////////////////
//
//      Private - Read the quantization matrices.
//
FrameParserStatus_t   FrameParser_VideoMjpeg_c::ReadQuantizationMatrices( void )
{
    int                 DataSize;
    unsigned char       Temporary;
    unsigned int        Table;
    unsigned int        Precision;

    DataSize            = (int)GetShort() - 2;
    while (DataSize > 0)
    {
        Temporary       = GetByte();
        Table           = Temporary & 0xf;
        Precision       = Temporary >> 4;

        if (Table >= MAX_QUANTIZATION_MATRICES)
        {
            FRAME_TRACE("%s - Table index too large (%d)\n", __FUNCTION__, Table);
            return FrameParserError;
        }

        if (Precision != 0)
        {
            for(int i=0; i<QUANTIZATION_MATRIX_SIZE; i++)
                QuantizationMatrices[Table][MjpegCoefficientMatrixNaturalOrder[i]]      = GetShort();

            DataSize   -= 1 + (QUANTIZATION_MATRIX_SIZE*sizeof(unsigned short int));
        }
        else
        {
            for(int i=0; i<QUANTIZATION_MATRIX_SIZE; i++)
                QuantizationMatrices[Table][MjpegCoefficientMatrixNaturalOrder[i]]      = (unsigned short int)GetByte();

            DataSize   -= 1 + (QUANTIZATION_MATRIX_SIZE*sizeof(unsigned char));
        }
    }


#ifdef DUMP_HEADERS
    FRAME_TRACE( "    MJPEG_DQT  (Quantization tables)\n" );
    FRAME_TRACE( "        %04x %04x %04x %04x - %04x %04x %04x %04x - %04x %04x %04x %04x - %04x %04x %04x %04x\n",
                QuantizationMatrices[0][0], QuantizationMatrices[0][1], QuantizationMatrices[0][2], QuantizationMatrices[0][3],
                QuantizationMatrices[1][0], QuantizationMatrices[1][1], QuantizationMatrices[1][2], QuantizationMatrices[1][3], 
                QuantizationMatrices[2][0], QuantizationMatrices[2][1], QuantizationMatrices[2][2], QuantizationMatrices[2][3], 
                QuantizationMatrices[3][0], QuantizationMatrices[3][1], QuantizationMatrices[3][2], QuantizationMatrices[3][3] );
#endif

    return FrameParserNoError;

}
FrameParserStatus_t   FrameParser_VideoMjpeg_c::ReadRestartInterval( void )
{
    int         DataSize;


    DataSize                                    = (int)GetShort() - 2;
    CodecStreamParameters.RestartInterval       = GetShort();

#ifdef DUMP_HEADERS
    FRAME_TRACE( "    MJPEG_DRI (Restart interval)\n" );
    FRAME_TRACE( "        %04x\n", CodecStreamParameters.RestartInterval );
#endif

    return FrameParserNoError;

}
FrameParserStatus_t   FrameParser_VideoMjpeg_c::ReadStartOfFrame( void )
{
    FrameParserStatus_t         Status;
    MjpegVideoPictureHeader_t*  Header;

    if( FrameParameters == NULL )
    {
        Status  = GetNewFrameParameters( (void **)&FrameParameters );
        if( Status != FrameParserNoError )
            return Status;
    }

    Header                                      = &FrameParameters->PictureHeader;
    memset( Header, 0x00, sizeof(MjpegVideoPictureHeader_t) );

    Header->length                              = Bits.Get(16);

    Header->sample_precision                    = Bits.Get(8);
    Header->frame_height                        = Bits.Get(16);
    Header->frame_width                         = Bits.Get(16);
    Header->number_of_components                = Bits.Get(8);

    if (Header->number_of_components >= MJPEG_MAX_COMPONENTS)
    {
        FRAME_ERROR("%s - Found more than supported number of components (%d)\n", __FUNCTION__, Header->number_of_components);
        return FrameParserError;
    }

    for (unsigned int i=0; i<Header->number_of_components; i++)
    {
        Header->components[i].id                                = Bits.Get(8);
        Header->components[i].vertical_sampling_factor          = Bits.Get(4);
        Header->components[i].horizontal_sampling_factor        = Bits.Get(4);
        Header->components[i].quantization_table_index          = Bits.Get(8);
    }

    FrameParameters->PictureHeaderPresent       = true;

#ifdef DUMP_HEADERS
    FRAME_TRACE( "Start of frame header:\n" );
    FRAME_TRACE( "        Length              %d\n", Header->length);
    FRAME_TRACE( "        Precision           %d\n", Header->sample_precision);
    FRAME_TRACE( "        FrameHeight         %d\n", Header->frame_height);
    FRAME_TRACE( "        FrameWidth          %d\n", Header->frame_width);
    FRAME_TRACE( "        NumberOfComponents  %d\n", Header->number_of_components);
    for (unsigned int i=0; i<Header->number_of_components; i++)
        FRAME_TRACE( "            Id = %d, HSF = %d, VSF = %d, QTI = %d\n",
                Header->components[i].id,
                Header->components[i].horizontal_sampling_factor,
                Header->components[i].vertical_sampling_factor,
                Header->components[i].quantization_table_index);
#endif

    return FrameParserNoError;

}
//{{{  ReadStreamMetadata
/// /////////////////////////////////////////////////////////////////////////
///
/// \brief      Read in stream generic information
///
/// /////////////////////////////////////////////////////////////////////////
FrameParserStatus_t   FrameParser_VideoMjpeg_c::ReadStreamMetadata (void)
{
    MjpegVideoSequence_t*       Header;
    char                        VendorId[64];
    FrameParserStatus_t         Status          = FrameParserNoError;

    for (unsigned int i=0; i<sizeof (VendorId); i++)
    {
        VendorId[i]             = Bits.Get(8);
        if (VendorId[i] == 0)
            break;
    }
    if (strcmp (VendorId, "STMicroelectronics") != 0)
    {
        FRAME_TRACE("    VendorId          : %s\n", VendorId);
        return FrameParserNoError;
    }
    Status                      = GetNewStreamParameters ((void**)&StreamParameters);
    if (Status != FrameParserNoError)
        return Status;

    StreamParameters->UpdatedSinceLastFrame     = true;
    Header                      = &StreamParameters->SequenceHeader;
    memset (Header, 0x00, sizeof(MjpegVideoSequence_t));

    Header->time_scale          = Bits.Get(32);
    Header->time_delta          = Bits.Get(32);

    StreamParameters->SequenceHeaderPresent             = true;

#ifdef DUMP_HEADERS
    FRAME_TRACE("StreamMetadata :- \n");
    FRAME_TRACE("    VendorId          : %s\n", VendorId);
    FRAME_TRACE("    time_scale        : %6d\n", Header->time_scale);
    FRAME_TRACE("    time_delta        : %6d\n", Header->time_delta);
#endif

    return FrameParserNoError;
}
//}}}
//{{{  ReadHuffmanTables
// //////////////////////////////////////////////////////////////////////////////////////////////////
//
//      Private - Read the huffman encoding tables
//
FrameParserStatus_t   FrameParser_VideoMjpeg_c::ReadHuffmanTables( void )
{
    int                 DataSize;
    unsigned int        TableSize;

    DataSize            = (int)GetShort() - 2;

    for (int Index=0; (DataSize >= (SIZEOF_HUFFMAN_BITS_FIELD+1)); Index++)
    {
        if (Index >= MAX_SUPPORTED_HUFFMAN_TABLES)
        {
            FRAME_ERROR("%s - Found more than supported number of Huffman tables (%d)\n", __FUNCTION__, Index );
            BufferPointer      += DataSize;
            return FrameParserError;
        }

        HuffmanTables[Index].Id         = GetByte();
        ReadArray( HuffmanTables[Index].BitsTable, SIZEOF_HUFFMAN_BITS_FIELD );
        DataSize                       -= SIZEOF_HUFFMAN_BITS_FIELD;

        TableSize                       = 0;
        for (int i=0; i<SIZEOF_HUFFMAN_BITS_FIELD; i++)
            TableSize                  += HuffmanTables[Index].BitsTable[i];

        HuffmanTables[Index].DataSize   = TableSize;
        ReadArray( HuffmanTables[Index].DataTable, TableSize );
        DataSize                       -= TableSize;
    }

#ifdef DUMP_HEADERS
        FRAME_TRACE( "    MJPEG_DHT  (Huffman tables)\n" );
        for (int i=0; i<MAX_SUPPORTED_HUFFMAN_TABLES; i++)
            FRAME_TRACE( "        Id = %d, DataSize = %d\n", HuffmanTables[i].Id, HuffmanTables[i].DataSize );
#endif

    return FrameParserNoError;
}
//}}}
//{{{  ReadStartOfScan
// //////////////////////////////////////////////////////////////////////////////////////////////////
//
//      Private - Read the start of scan header
//
FrameParserStatus_t   FrameParser_VideoMjpeg_c::ReadStartOfScan( void )
{
    int                 DataSize;
    unsigned char       Temporary;

    DataSize                            = (int)GetShort() - 2;

    StartOfScan.NumberOfComponents      = GetByte();

    if (StartOfScan.NumberOfComponents != StartOfFrame.NumberOfComponents)
    {
        FRAME_ERROR("%s - Frame and Scan headers have differing numbers of components (%d, %d)\n", __FUNCTION__, StartOfFrame.NumberOfComponents, StartOfScan.NumberOfComponents);
        return FrameParserError;
    }

    for (int i=0; i<StartOfScan.NumberOfComponents; i++)
    {
        StartOfScan.Components[i].Id                    = GetByte();
        Temporary                                       = GetByte();
        StartOfScan.Components[i].HuffmanACIndex        = Temporary & 0xf;
        StartOfScan.Components[i].HuffmanDCIndex        = Temporary >> 4;
    }

    StartOfScan.StartOfSpectralSelection                = GetByte();
    StartOfScan.EndOfSpectralSelection                  = GetByte();
    Temporary                                           = GetByte();
    StartOfScan.ApproximationBitPositionLow             = Temporary & 0xf;
    StartOfScan.ApproximationBitPositionHigh            = Temporary >> 4;

#ifdef DUMP_HEADERS
    FRAME_TRACE( "    MJPEG_SOS  (Start of scan)\n" );
    FRAME_TRACE( "        NumberOfComponents  %d\n", StartOfScan.NumberOfComponents );
    for (int i=0; i<StartOfScan.NumberOfComponents; i++)
        FRAME_TRACE( "            Id = %d, Huffman DC table = %d, AC table = %d\n",
                        StartOfScan.Components[i].Id,
                        StartOfScan.Components[i].HuffmanDCIndex,
                        StartOfScan.Components[i].HuffmanACIndex );
    FRAME_TRACE( "        StartSpectral       %02x\n", StartOfScan.StartOfSpectralSelection );
    FRAME_TRACE( "        EndSpectral         %02x\n", StartOfScan.EndOfSpectralSelection );
    FRAME_TRACE( "        ApproximationHigh   %d\n", StartOfScan.ApproximationBitPositionHigh );
    FRAME_TRACE( "        ApproximationHigh   %d\n", StartOfScan.ApproximationBitPositionLow );
#endif

    return FrameParserNoError;

}
////////////////////////////////////////////////////////////////////////////
///
/// Examine the mlp globbed audio frames, and extract the correct metadata 
/// to be attached to the frame buffer
///
/// \return Frame parser status code, FrameParserNoError indicates success.
///
FrameParserStatus_t FrameParser_AudioMlp_c::ParseFrameHeader( unsigned char *FrameHeaderBytes, 
							      MlpAudioParsedFrameHeader_t *LocalParsedFrameHeader,
							      int GivenFrameSize )
{
  int StreamIndex =  0, FrameSize =  0 ;
  MlpAudioParsedFrameHeader_t NextParsedFrameHeader;

  LocalParsedFrameHeader->AudioFrameNumber = 0;
  LocalParsedFrameHeader->NumberOfSamples = 0;
  
  do 
  {
    unsigned char* NextFrameHeader = &FrameHeaderBytes[StreamIndex];
    FrameParserStatus_t Status;
    memset(&NextParsedFrameHeader, 0, sizeof(MlpAudioParsedFrameHeader_t));

    // parse a single frame
    Status = FrameParser_AudioMlp_c::ParseSingleFrameHeader( NextFrameHeader, &NextParsedFrameHeader );
    
    if (Status !=  FrameParserNoError) 
    {
      return (Status);
    }

    LocalParsedFrameHeader->AudioFrameNumber += 1;
    
    if (NextParsedFrameHeader.IsMajorSync)
    {
      if (IsFirstMajorFrame)
      {
	// store the stream properties
	LocalParsedFrameHeader->SamplingFrequency = NextParsedFrameHeader.SamplingFrequency;
	LocalParsedFrameHeader->NumberOfSamples = NextParsedFrameHeader.NumberOfSamples;
	IsFirstMajorFrame = false;

	FRAME_TRACE("Mlp stream properties: Sampling Freq: %d, Nb of samples: %d\n",
		    MlpSamplingFreq[LocalParsedFrameHeader->SamplingFrequency],
		    LocalParsedFrameHeader->NumberOfSamples);
      }
      else
      {
	if (NextParsedFrameHeader.SamplingFrequency != LocalParsedFrameHeader->SamplingFrequency)
	{
	  FRAME_ERROR("Unauthorized sampling frequency update\n");
	}
      }
    }

    FrameSize += NextParsedFrameHeader.Length;
    
    StreamIndex += NextParsedFrameHeader.Length;
    
  } while (StreamIndex < GivenFrameSize);
  
  LocalParsedFrameHeader->Length = FrameSize;
  LocalParsedFrameHeader->NumberOfSamples = MlpSampleCount[LocalParsedFrameHeader->SamplingFrequency] * 
    LocalParsedFrameHeader->AudioFrameNumber;
  
  FRAME_DEBUG("SamplingFrequency: %d, FrameSize: %d, NumberOfSamples: %d, NbFrames: %d \n",
	      MlpSamplingFreq[LocalParsedFrameHeader->SamplingFrequency], 
	      LocalParsedFrameHeader->Length, 
	      LocalParsedFrameHeader->NumberOfSamples,
	      LocalParsedFrameHeader->AudioFrameNumber);
  
  return FrameParserNoError;
}
////////////////////////////////////////////////////////////////////////////
///
/// Parse the frame header and store the results for when we emit the frame.
///
FrameParserStatus_t   FrameParser_AudioAac_c::ReadHeaders(void)
{
	FrameParserStatus_t Status;
	//
	// Perform the common portion of the read headers function
	//
	FrameParser_Audio_c::ReadHeaders();
	//
	// save the previous frame parameters
	AacAudioParsedFrameHeader_t LastParsedFrameHeader;
	memcpy(&LastParsedFrameHeader, &ParsedFrameHeader, sizeof(AacAudioParsedFrameHeader_t));
	Status = ParseFrameHeader(BufferData, &ParsedFrameHeader, BufferLength, AAC_GET_FRAME_PROPERTIES, true);
	if (Status != FrameParserNoError)
	{
		if (Status == FrameParserHeaderUnplayable)
		{
			NumHeaderUnplayableErrors++;
			if (NumHeaderUnplayableErrors >= UnplayabilityThreshold)
			{
				// this is clearly not a passing bit error
				FRAME_ERROR("Too many unplayability reports, marking stream unplayable\n");
				Player->MarkStreamUnPlayable(Stream);
			}
		}
		else
		{
			FRAME_ERROR("Failed to parse frame header, bad collator selected?\n");
		}
		return Status;
	}
	if (ParsedFrameHeader.Length != BufferLength)
	{
		FRAME_ERROR("Buffer length is inconsistent with frame header, bad collator selected?\n");
		return FrameParserError;
	}
	if (isFirstFrame)
	{
		isFirstFrame = false;
		FRAME_TRACE("AAC Frame type: %s, FrameSize %d, Number of samples: %d, SamplingFrequency %d, \n",
					FrameTypeName[ParsedFrameHeader.Type],
					ParsedFrameHeader.Length,
					ParsedFrameHeader.NumberOfSamples,
					ParsedFrameHeader.SamplingFrequency);
	}
	if ((ParsedFrameHeader.SamplingFrequency == 0) || (ParsedFrameHeader.NumberOfSamples == 0))
	{
		// the current frame has no such properties, we must refer to the previous frame...
		if ((LastParsedFrameHeader.SamplingFrequency == 0) || (LastParsedFrameHeader.NumberOfSamples == 0))
		{
			// the previous frame has no properties either, we cannot decode this frame...
			FrameToDecode = false;
			FRAME_ERROR("This frame should not be sent for decode (it relies on previous frame for audio parameters)\n");
			return FrameParserError;
		}
		else
		{
			FrameToDecode = true;
			// make the previous frame properties the current ones..
			memcpy(&ParsedFrameHeader, &LastParsedFrameHeader, sizeof(AacAudioParsedFrameHeader_t));
		}
	}
	else
	{
		FrameToDecode = true;
	}
	NumHeaderUnplayableErrors = 0;
	Status = GetNewFrameParameters((void **) &FrameParameters);
	if (Status != FrameParserNoError)
	{
		FRAME_ERROR("Cannot get new frame parameters\n");
		return Status;
	}
	// Nick inserted some default values here
	ParsedFrameParameters->FirstParsedParametersForOutputFrame          = true;
	ParsedFrameParameters->FirstParsedParametersAfterInputJump          = FirstDecodeAfterInputJump;
	ParsedFrameParameters->SurplusDataInjected                          = SurplusDataInjected;
	ParsedFrameParameters->ContinuousReverseJump                        = ContinuousReverseJump;
	ParsedFrameParameters->KeyFrame                                     = true;
	ParsedFrameParameters->ReferenceFrame                               = false;
	ParsedFrameParameters->NewFrameParameters        = true;
	ParsedFrameParameters->SizeofFrameParameterStructure = sizeof(AacAudioFrameParameters_t);
	ParsedFrameParameters->FrameParameterStructure       = FrameParameters;
	FrameParameters->FrameSize = ParsedFrameHeader.Length;
	FrameParameters->Type     = ParsedFrameHeader.Type;
	ParsedAudioParameters->Source.BitsPerSample = 0; // filled in by codec
	ParsedAudioParameters->Source.ChannelCount = 0;  // filled in by codec
	ParsedAudioParameters->Source.SampleRateHz = ParsedFrameHeader.SamplingFrequency;
	ParsedAudioParameters->SampleCount = ParsedFrameHeader.NumberOfSamples;
	ParsedAudioParameters->Organisation = 0; // filled in by codec
	return FrameParserNoError;
}
FrameParserStatus_t FrameParser_AudioEAc3_c::ParseFrameHeader(unsigned char *FrameHeaderBytes,
							      EAc3AudioParsedFrameHeader_t *ParsedFrameHeader,
							      int GivenFrameSize)
{
	int StreamIndex = 0, FrameSize = 0 ;
	EAc3AudioParsedFrameHeader_t NextParsedFrameHeader;
	int NumberOfIndependantSubStreams = 0;
	int NumberOfDependantSubStreams = 0;
	int NumberOfSamples = 0;
	memset(ParsedFrameHeader, 0, sizeof(EAc3AudioParsedFrameHeader_t));
	do
	{
		// At this point we got the idependant substream or stream length, now search for
		// the other dependant substreams
		unsigned char *NextFrameHeader = &FrameHeaderBytes[StreamIndex];
		FrameParserStatus_t Status;
		memset(&NextParsedFrameHeader, 0, sizeof(EAc3AudioParsedFrameHeader_t));
		// parse a single frame
		Status = FrameParser_AudioEAc3_c::ParseSingleFrameHeader(NextFrameHeader, &NextParsedFrameHeader, false);
		if (Status != FrameParserNoError)
		{
			return (Status);
		}
		if ((NextParsedFrameHeader.Type == TypeEac3Ind) || (NextParsedFrameHeader.Type == TypeAc3))
		{
			if (NumberOfIndependantSubStreams == 0)
			{
				/* get the first independant stream properties */
				memcpy(ParsedFrameHeader, &NextParsedFrameHeader, sizeof(EAc3AudioParsedFrameHeader_t));
			}
			NumberOfIndependantSubStreams++;
			FrameSize += NextParsedFrameHeader.Length;
			NumberOfSamples += NextParsedFrameHeader.NumberOfSamples;
		}
		else if (NextParsedFrameHeader.Type == TypeEac3Dep)
		{
			if (NumberOfIndependantSubStreams == 0)
			{
				FRAME_ERROR("Dependant subframe found before independant one, should not occur...\n");
				/* we met a dependant substream first, this is a frame parsing error ....*/
				return FrameParserError;
			}
			FrameSize += NextParsedFrameHeader.Length;
			NumberOfDependantSubStreams++;
		}
		else
		{
			// what else could it be? raise an error!
			FRAME_ERROR("Bad substream type: %d...\n", NextParsedFrameHeader.Type);
			return FrameParserError;
		}
		StreamIndex += NextParsedFrameHeader.Length;
	}
	while (StreamIndex < GivenFrameSize);
	if (FrameSize != GivenFrameSize)
	{
		FRAME_ERROR("Given frame size mismatch: %d (expected:%d)\n", FrameSize, GivenFrameSize);
		return FrameParserError;
	}
	if (NumberOfSamples != EAC3_NBSAMPLES_NEEDED)
	{
		FRAME_ERROR("Number of samples mismatch: %d (expected:%d)\n", NumberOfSamples, EAC3_BYTES_NEEDED);
		return FrameParserError;
	}
	// remember the last frame header type to know if this stream is DD+ or Ac3 (dependant eac3 substreams
	// are always located after eac3 independant substreams or ac3 substreams)
	ParsedFrameHeader->Type = NextParsedFrameHeader.Type;
	ParsedFrameHeader->Length = FrameSize;
	ParsedFrameHeader->NumberOfSamples = NumberOfSamples;
	FRAME_DEBUG("SamplingFrequency %d, FrameSize %d, Indp substreams: %d, Dep substreams: %d\n",
		    ParsedFrameHeader->SamplingFrequency,
		    ParsedFrameHeader->Length,
		    NumberOfIndependantSubStreams,
		    NumberOfDependantSubStreams);
	if (FirstTime)
	{
		if (ParsedFrameHeader->Type == TypeAc3)
		{
			FRAME_TRACE("AC3 stream properties: SamplingFrequency %d, FrameSize %d\n",
				    ParsedFrameHeader->SamplingFrequency,
				    ParsedFrameHeader->Length);
		}
		else
		{
			FRAME_TRACE("DD+ stream properties: SamplingFrequency %d, FrameSize %d, Indp substreams: %d, Dep substreams: %d\n",
				    ParsedFrameHeader->SamplingFrequency,
				    ParsedFrameHeader->Length,
				    NumberOfIndependantSubStreams,
				    NumberOfDependantSubStreams);
		}
		FirstTime = false;
	}
	return FrameParserNoError;
}