StreamInfo::FrameType ParserMpeg2Video::parsePicture(unsigned char* data, int length) {
    int frametype = GetFrameType(data, length);

    // get I,P frames distance
    if(frametype < 3 && m_curDts != DVD_NOPTS_VALUE && m_curPts != DVD_NOPTS_VALUE) {
        m_frameDifference = m_curPts - m_curDts;
        m_lastDts = m_curDts;
        return ConvertFrameType(frametype);
    }

    // extrapolate DTS
    if(m_curDts == DVD_NOPTS_VALUE && m_duration != 0) {
        m_curDts = PtsAdd(m_lastDts, m_duration);
        m_lastDts = m_curDts;
    }

    // B frames have DTS = PTS
    if(frametype == 3 && m_curPts == DVD_NOPTS_VALUE) {
        m_curPts = m_curDts;
    }

    // extrapolate PTS of I/P frame
    if(frametype < 3 && m_curPts == DVD_NOPTS_VALUE) {
        m_curPts = PtsAdd(m_curDts, m_frameDifference);
    }

    return ConvertFrameType(frametype);
}
Esempio n. 2
0
/****************************************************************************
 * 
 *  ROUTINE       :     GetEstimatedBpb
 *
 *  INPUTS        :     CP_INSTANCE *cpi, A Value of Q. 
 *                      
 *
 *  OUTPUTS       :     None.
 *
 *  RETURNS       :     The current estimate for the number of bytes per block
 *                      at the current Q.
 *
 *  FUNCTION      :     Returns an estimate of the bytes per block that will
 *                      be achieved at the given Q
 *
 *  SPECIAL NOTES :     None. 
 *
 *
 *  ERRORS        :     None.
 *
 ****************************************************************************/
double GetEstimatedBpb( CP_INSTANCE *cpi, UINT32 TargetQ )
{
	UINT32 i;
	INT32 ThreshTableIndex = Q_TABLE_SIZE - 1;
    double BytesPerBlock;

	/* Search for the Q table index that matches the given Q. */
	for ( i = 0; i < Q_TABLE_SIZE; i++ )
	{
		if ( TargetQ >= cpi->pb.QThreshTable[i] )
		{
			ThreshTableIndex = i;
			break;
		}
	}

    // Adjust according to Q shift and type of frame
    if ( GetFrameType(&cpi->pb) == BASE_FRAME )
    {
        // Get primary prediction
        BytesPerBlock = KfBpbTable[ThreshTableIndex];
    }
    else 
    {
        // Get primary prediction
        BytesPerBlock = BpbTable[ThreshTableIndex];
        BytesPerBlock = BytesPerBlock * cpi->BpbCorrectionFactor;
    }

	return BytesPerBlock;
}
/*!
 * \brief  功能概述 获取报文中ASDU部分的指针,取得一个ASDU101
 * \param  参数描述 无
 * \return 返回值描述 返回报文中ASDU部分的指针
 * \author zzy
 * \date   2015/5/25
 */
ASDU101 *CIEC104Response::GetAsdu()
{
    if(GetFrameType() != IEC104_I_TYPE)
        return NULL;
    if(GetInfoSize()<10)
        return NULL;
    return (ASDU101 *)GetBuffer(6);
}
ASDU101 *CIEC104DeliverQuery::GetAsdu()
{
    //功能:取得一个ASDU101
    //参数:
    //返回值:
    //建立日期:2007-08-29
    //修改日期:
    //作者:
    //说明:
    if(GetFrameType() != IEC104_I_TYPE)
        return NULL;
    if(GetInfoSize()<10)
        return NULL;
    return (ASDU101 *)GetBuffer(6);
}
void CAudioEncoder::ForwardEncodedAudioFrames(void)
{
  u_int8_t* pFrame;
  u_int32_t frameLength;
  u_int32_t frameNumSamples;

  while (GetEncodedFrame(&pFrame, 
			 &frameLength, 
			 &frameNumSamples)) {

    // sanity check
    if (pFrame == NULL || frameLength == 0) {
#ifdef DEBUG_SYNC
      debug_message("%s:No frame", Profile()->GetName());
#endif
      break;
    }

    //debug_message("Got encoded frame");

    // output has frame start timestamp
    Timestamp output = DstSamplesToTicks(m_audioDstSampleNumber);

    m_audioDstFrameNumber++;
    m_audioDstSampleNumber += frameNumSamples;
    m_audioDstElapsedDuration = DstSamplesToTicks(m_audioDstSampleNumber);

    //debug_message("m_audioDstSampleNumber = %llu", m_audioDstSampleNumber);

    // forward the encoded frame to sinks

#ifdef DEBUG_SYNC
    debug_message("%s:audio forwarding "U64, 
		  Profile()->GetName(), output);
#endif
    CMediaFrame* pMediaFrame =
      new CMediaFrame(
		      GetFrameType(),
		      pFrame, 
		      frameLength,
		      m_audioStartTimestamp + output,
		      frameNumSamples,
		      m_audioDstSampleRate);

    ForwardFrame(pMediaFrame);
  }
}
Esempio n. 6
0
void CDealFrame18::Deal( CRCClient* poClinet,UINT32 dwSerialNumber,UINT32 dwAreaID,UINT32 dwPlayerID,map<string,string> &mapField )
{
    if (NULL == poClinet)
    {
        return;
    }

    if (mapField[ALL_GOLD_COUNT_PARAM].empty())
    {
        string strErrorMsg = GetRsponeResult(ERR_GM_PARM_INFO::ID_PARAM_ERR);
        poClinet->Rspone(strErrorMsg.c_str());
        return;
    }

    UINT32 qwParam1 = SDAtou(mapField[ALL_GOLD_COUNT_PARAM].c_str());
    UINT16 wErrCode = CGMProcessor::GmReq(dwSerialNumber,(UINT8)GetFrameType(), 0,qwParam1,0,0,"",poClinet->GetCliSessionID(), mapField["desc"].c_str());
    if (ERR_GM_PARM_INFO::ID_PLAYER_NOT_MEMORT == wErrCode)
        return;
    const CID2PlayerMap &mapID2PlayerMap = CPlayerMgr::Instance()->GetID2PlayerMap();

    if (0 == mapID2PlayerMap.size())
    {
        string strErrorMsg = GetRsponeResult(ERR_GM_PARM_INFO::ID_NOT_DATA);
        poClinet->Rspone(strErrorMsg.c_str());
        return;
    }

    CID2PlayerMapConstItr iter;

    for (iter = mapID2PlayerMap.begin(); iter != mapID2PlayerMap.end(); iter++)
    {
        CPlayer *pPlayer = iter->second;

        if (NULL == pPlayer)
        {
            continue;
        }

        pPlayer->AddGold(qwParam1,CRecordMgr::EGGT_GETGIVEGM);
    }


    string strRetInfo = GetRsponeResult(ERR_GM_PARM_INFO::ID_SUCCESS);
    poClinet->Rspone(strRetInfo.c_str());
}
Esempio n. 7
0
void CDealFrame33::Deal( CRCClient* poClinet, UINT32 dwSerialNumber, UINT32 dwAreaID, UINT32 dwPlayerID, map<string, string> &mapField )
{
    if (NULL == poClinet)
    {
        return;
    }

    if (mapField[PLAYER_ID].empty() || mapField[DEC_COIN_COUNT_PARAM].empty())
    {
        string strErrorMsg = GetRsponeResult(ERR_GM_PARM_INFO::ID_PARAM_ERR);
        poClinet->Rspone(strErrorMsg.c_str());
        return;
    }

    UINT32 qwParam1 = SDAtou(mapField[DEC_COIN_COUNT_PARAM].c_str());
    UINT32 unPlayerId = SDAtou(mapField[PLAYER_ID].c_str());
    UINT16 wErrCode = CGMProcessor::GmReq(dwSerialNumber, (UINT8)GetFrameType(), unPlayerId, qwParam1, 0, 0, "", poClinet->GetCliSessionID(), mapField["desc"].c_str());

    string strErrorMsg = GetRsponeResult(wErrCode);
    poClinet->Rspone(strErrorMsg.c_str());
}
void CAudioEncoder::Initialize (void)
{
  // called from derived classes init function from the start function
  // in the media flow
  m_audioSrcFrameNumber = 0;
  m_audioDstFrameNumber = 0;
  m_audioDstSampleNumber = 0;
  m_audioSrcElapsedDuration = 0;
  m_audioDstElapsedDuration = 0;

  // destination parameters are from the audio profile
  m_audioDstType = GetFrameType();
  m_audioDstSampleRate = m_pConfig->GetIntegerValue(CFG_AUDIO_SAMPLE_RATE);
  m_audioDstChannels = m_pConfig->GetIntegerValue(CFG_AUDIO_CHANNELS);
  m_audioDstSamplesPerFrame = GetSamplesPerFrame();

  // if we need to resample
  if (m_audioDstSampleRate != m_audioSrcSampleRate) {
    // create a resampler for each audio destination channel - 
    // we will combine the channels before resampling
    m_audioResample = (resample_t *)malloc(sizeof(resample_t) *
					   m_audioDstChannels);
    for (int ix = 0; ix < m_audioDstChannels; ix++) {
      m_audioResample[ix] = st_resample_start(m_audioSrcSampleRate, 
					      m_audioDstSampleRate);
    }
  }

  // this calculation doesn't take into consideration the resampling
  // size of the src.  4 times might not be enough - we need most likely
  // 2 times the max of the src samples and the dest samples

  m_audioPreEncodingBufferLength = 0;
  m_audioPreEncodingBufferMaxLength =
    4 * DstSamplesToBytes(m_audioDstSamplesPerFrame);

  m_audioPreEncodingBuffer = (u_int8_t*)realloc(
						m_audioPreEncodingBuffer,
						m_audioPreEncodingBufferMaxLength);
}
/*!
 * \brief  功能概述 是否为测试帧
 * \param  参数描述 无
 * \return 返回值描述 如果为测试帧则返回true,否则返回false
 * \author zzy
 * \date   2015/5/25
 */
bool CIEC104Response::IsTestFrame()
{
    Q_ASSERT(GetFrameType() == IEC104_U_TYPE );
    BYTE nControlByte1 = GetUInt(2,1);
    return (nControlByte1&0x80)!=0;
}
Esempio n. 10
0
static void UpdateFrame(CP_INSTANCE *cpi){

  double CorrectionFactor;

  /* Reset the DC predictors. */
  cpi->pb.LastIntraDC = 0;
  cpi->pb.InvLastIntraDC = 0;
  cpi->pb.LastInterDC = 0;
  cpi->pb.InvLastInterDC = 0;

  /* Initialise bit packing mechanism. */
#ifndef LIBOGG2
  oggpackB_reset(cpi->oggbuffer);
#else
  oggpackB_writeinit(cpi->oggbuffer, cpi->oggbufferstate);
#endif
  /* mark as video frame */
  oggpackB_write(cpi->oggbuffer,0,1);

  /* Write out the frame header information including size. */
  WriteFrameHeader(cpi);

  /* Copy back any extra frags that are to be updated by the codec
     as part of the background cleanup task */
  CopyBackExtraFrags(cpi);

  /* Encode the data.  */
  EncodeData(cpi);

  /* Adjust drop frame trigger. */
  if ( GetFrameType(&cpi->pb) != KEY_FRAME ) {
    /* Apply decay factor then add in the last frame size. */
    cpi->DropFrameTriggerBytes =
      ((cpi->DropFrameTriggerBytes * (DF_CANDIDATE_WINDOW-1)) /
       DF_CANDIDATE_WINDOW) + oggpackB_bytes(cpi->oggbuffer);
  }else{
    /* Increase cpi->DropFrameTriggerBytes a little. Just after a key
       frame may actually be a good time to drop a frame. */
    cpi->DropFrameTriggerBytes =
      (cpi->DropFrameTriggerBytes * DF_CANDIDATE_WINDOW) /
      (DF_CANDIDATE_WINDOW-1);
  }

  /* Test for overshoot which may require a dropped frame next time
     around.  If we are already in a drop frame condition but the
     previous frame was not dropped then the threshold for continuing
     to allow dropped frames is reduced. */
  if ( cpi->DropFrameCandidate ) {
    if ( cpi->DropFrameTriggerBytes >
         (cpi->frame_target_rate * (DF_CANDIDATE_WINDOW+1)) )
      cpi->DropFrameCandidate = 1;
    else
      cpi->DropFrameCandidate = 0;
  } else {
    if ( cpi->DropFrameTriggerBytes >
         (cpi->frame_target_rate * ((DF_CANDIDATE_WINDOW*2)-2)) )
      cpi->DropFrameCandidate = 1;
    else
      cpi->DropFrameCandidate = 0;
  }

  /* Update the BpbCorrectionFactor variable according to whether or
     not we were close enough with our selection of DCT quantiser.  */
  if ( GetFrameType(&cpi->pb) != KEY_FRAME ) {
    /* Work out a size correction factor. */
    CorrectionFactor = (double)oggpackB_bytes(cpi->oggbuffer) /
      (double)cpi->ThisFrameTargetBytes;

    if ( (CorrectionFactor > 1.05) &&
         (cpi->pb.ThisFrameQualityValue <
          cpi->pb.QThreshTable[cpi->Configuration.ActiveMaxQ]) ) {
      CorrectionFactor = 1.0 + ((CorrectionFactor - 1.0)/2);
      if ( CorrectionFactor > 1.5 )
        cpi->BpbCorrectionFactor *= 1.5;
      else
        cpi->BpbCorrectionFactor *= CorrectionFactor;

      /* Keep BpbCorrectionFactor within limits */
      if ( cpi->BpbCorrectionFactor > MAX_BPB_FACTOR )
        cpi->BpbCorrectionFactor = MAX_BPB_FACTOR;
    } else if ( (CorrectionFactor < 0.95) &&
                (cpi->pb.ThisFrameQualityValue > VERY_BEST_Q) ){
      CorrectionFactor = 1.0 - ((1.0 - CorrectionFactor)/2);
      if ( CorrectionFactor < 0.75 )
        cpi->BpbCorrectionFactor *= 0.75;
      else
        cpi->BpbCorrectionFactor *= CorrectionFactor;

      /* Keep BpbCorrectionFactor within limits */
      if ( cpi->BpbCorrectionFactor < MIN_BPB_FACTOR )
        cpi->BpbCorrectionFactor = MIN_BPB_FACTOR;
    }
  }

  /* Adjust carry over and or key frame context. */
  if ( GetFrameType(&cpi->pb) == KEY_FRAME ) {
    /* Adjust the key frame context unless the key frame was very small */
    AdjustKeyFrameContext(cpi);
  } else {
    /* Update the frame carry over */
    cpi->CarryOver += ((ogg_int32_t)cpi->frame_target_rate -
                       (ogg_int32_t)oggpackB_bytes(cpi->oggbuffer));
  }
  cpi->TotalByteCount += oggpackB_bytes(cpi->oggbuffer);
}
Esempio n. 11
0
static void ExpandBlock ( PB_INSTANCE *pbi, ogg_int32_t FragmentNumber ){
  unsigned char *LastFrameRecPtr;   /* Pointer into previous frame
                                       reconstruction. */
  unsigned char *LastFrameRecPtr2;  /* Pointer into previous frame
                                       reconstruction for 1/2 pixel MC. */

  ogg_uint32_t   ReconPixelsPerLine; /* Pixels per line */
  ogg_int32_t    ReconPixelIndex;    /* Offset for block into a
                                        reconstruction buffer */
  ogg_int32_t    ReconPtr2Offset;    /* Offset for second
                                        reconstruction in half pixel
                                        MC */
  ogg_int32_t    MVOffset;           /* Baseline motion vector offset */
  ogg_int32_t    MvShift  ;          /* Shift to correct to 1/2 or 1/4 pixel */
  ogg_int32_t    MvModMask;          /* Mask to determine whether 1/2
                                        pixel is used */

  /* Get coding mode for this block */
  if ( GetFrameType(pbi) == BASE_FRAME ){
    pbi->CodingMode = CODE_INTRA;
  }else{
    /* Get Motion vector and mode for this block. */
    pbi->CodingMode = pbi->FragCodingMethod[FragmentNumber];
  }

  /* Select the appropriate inverse Q matrix and line stride */
  if ( FragmentNumber<(ogg_int32_t)pbi->YPlaneFragments ) {
    ReconPixelsPerLine = pbi->YStride;
    MvShift = 1;
    MvModMask = 0x00000001;

    /* Select appropriate dequantiser matrix. */
    if ( pbi->CodingMode == CODE_INTRA )
      pbi->dequant_coeffs = pbi->dequant_Y_coeffs;
    else
      pbi->dequant_coeffs = pbi->dequant_Inter_coeffs;
  }else{
    ReconPixelsPerLine = pbi->UVStride;
    MvShift = 2;
    MvModMask = 0x00000003;

    /* Select appropriate dequantiser matrix. */
    if ( pbi->CodingMode == CODE_INTRA )
      pbi->dequant_coeffs = pbi->dequant_UV_coeffs;
    else
      pbi->dequant_coeffs = pbi->dequant_Inter_coeffs;
  }

  /* Set up pointer into the quantisation buffer. */
  pbi->quantized_list = &pbi->QFragData[FragmentNumber][0];

  /* Invert quantisation and DCT to get pixel data. */
  switch(pbi->FragCoefEOB[FragmentNumber]){
  case 0:case 1:
    IDct1( pbi->quantized_list, pbi->dequant_coeffs, pbi->ReconDataBuffer );
    break;
  case 2: case 3:case 4:case 5:case 6:case 7:case 8: case 9:case 10:
    IDct10( pbi->quantized_list, pbi->dequant_coeffs, pbi->ReconDataBuffer );
    break;
  default:
    IDctSlow( pbi->quantized_list, pbi->dequant_coeffs, pbi->ReconDataBuffer );
  }

  /* Convert fragment number to a pixel offset in a reconstruction buffer. */
  ReconPixelIndex = pbi->recon_pixel_index_table[FragmentNumber];

  /* Action depends on decode mode. */
  if ( pbi->CodingMode == CODE_INTER_NO_MV ){
    /* Inter with no motion vector */
    /* Reconstruct the pixel data using the last frame reconstruction
       and change data when the motion vector is (0,0), the recon is
       based on the lastframe without loop filtering---- for testing */
    ReconInter( pbi, &pbi->ThisFrameRecon[ReconPixelIndex],
                &pbi->LastFrameRecon[ReconPixelIndex],
                pbi->ReconDataBuffer, ReconPixelsPerLine );

  }else if ( ModeUsesMC[pbi->CodingMode] ) {
    /* The mode uses a motion vector. */
    /* Get vector from list */
    pbi->MVector.x = pbi->FragMVect[FragmentNumber].x;
    pbi->MVector.y = pbi->FragMVect[FragmentNumber].y;

    /* Work out the base motion vector offset and the 1/2 pixel offset
       if any.  For the U and V planes the MV specifies 1/4 pixel
       accuracy. This is adjusted to 1/2 pixel as follows ( 0->0,
       1/4->1/2, 1/2->1/2, 3/4->1/2 ). */
    MVOffset = 0;
    ReconPtr2Offset = 0;
    if ( pbi->MVector.x > 0 ){
      MVOffset = pbi->MVector.x >> MvShift;
      if ( pbi->MVector.x & MvModMask )
        ReconPtr2Offset += 1;
    } else if ( pbi->MVector.x < 0 ) {
Esempio n. 12
0
/****************************************************************************
 * 
 *  ROUTINE       :     RegulateQ
 *
 *  INPUTS        :     INT32 BlocksToUpdate
 *
 *  OUTPUTS       :     None.
 *
 *  RETURNS       :     None.
 *
 *  FUNCTION      :     If appropriate this function regulates the DCT
 *                      coefficients to match the stream size to the    
 *                      available bandwidth (within defined limits). 
 *
 *  SPECIAL NOTES :     None. 
 *
 *
 *  ERRORS        :     None.
 *
 ****************************************************************************/
void RegulateQ( CP_INSTANCE *cpi, INT32 UpdateScore ) 
{   
    double TargetUnitScoreBytes = (double)cpi->ThisFrameTargetBytes / (double)UpdateScore;
    double PredUnitScoreBytes;
    double LastBitError = 10000.0;       // Silly high number
    UINT32 QIndex = Q_TABLE_SIZE - 1;
    UINT32 i;

    // Search for the best Q for the target bitrate.
	for ( i = 0; i < Q_TABLE_SIZE; i++ )
	{
        PredUnitScoreBytes = GetEstimatedBpb( cpi, cpi->pb.QThreshTable[i] );
        if ( PredUnitScoreBytes > TargetUnitScoreBytes )
        {
            if ( (PredUnitScoreBytes - TargetUnitScoreBytes) <= LastBitError )
            {
                QIndex = i;
            }
            else
            {
                QIndex = i - 1;
            }
            break;
        }
        else
        {
            LastBitError = TargetUnitScoreBytes - PredUnitScoreBytes;
        }
    }

    // QIndex should now indicate the optimal Q.
    cpi->pb.ThisFrameQualityValue = cpi->pb.QThreshTable[QIndex];
    
    // Apply range restrictions for key frames.
    if ( GetFrameType(&cpi->pb) == BASE_FRAME )
    {
        if ( cpi->pb.ThisFrameQualityValue > cpi->pb.QThreshTable[20] )
            cpi->pb.ThisFrameQualityValue = cpi->pb.QThreshTable[20];
        else if ( cpi->pb.ThisFrameQualityValue < cpi->pb.QThreshTable[50] )
            cpi->pb.ThisFrameQualityValue = cpi->pb.QThreshTable[50];
    }
    
    // Limit the Q value to the maximum available value
    if (cpi->pb.ThisFrameQualityValue > cpi->pb.QThreshTable[cpi->Configuration.ActiveMaxQ])
        //if (cpi->pb.ThisFrameQualityValue > QThreshTable[cpi->Configuration.ActiveMaxQ])
    {
        cpi->pb.ThisFrameQualityValue = (UINT32)cpi->pb.QThreshTable[cpi->Configuration.ActiveMaxQ];
    }  
    
    if(cpi->FixedQ)
    {
        if ( GetFrameType(&cpi->pb) == BASE_FRAME )
        {
            cpi->pb.ThisFrameQualityValue = cpi->pb.QThreshTable[43];
            cpi->pb.ThisFrameQualityValue = cpi->FixedQ;
        }
        else
        {
            cpi->pb.ThisFrameQualityValue = cpi->FixedQ;
        }
    }
    
    // If th quantiser value has changed then re-initialise it
    if ( cpi->pb.ThisFrameQualityValue != cpi->pb.LastFrameQualityValue )
    {                    
        /* Initialise quality tables. */
        UpdateQC( cpi, cpi->pb.ThisFrameQualityValue );
        cpi->pb.LastFrameQualityValue = cpi->pb.ThisFrameQualityValue;
    }

}
WORD CIEC104DeliverQuery::GetSendFrameNo()
{
    Q_ASSERT(GetFrameType() == IEC104_I_TYPE );
    WORD nSendFrameNo = GetUInt(2,2);
    return nSendFrameNo >>1;
}
WORD CIEC104DeliverQuery::GetReceiveFrameNo()
{
    Q_ASSERT(GetFrameType() != IEC104_U_TYPE );
    WORD nReceiveFrameNo = GetUInt(4,2);
    return nReceiveFrameNo>>1;
}
Esempio n. 15
0
/****************************************************************************
 * 
 *  ROUTINE       :     DecodeMVectors
 *
 *  INPUTS        :     None. 
 *                      
 *  OUTPUTS       :     None.
 *
 *  RETURNS       :     None.
 *
 *  FUNCTION      :     Decodes the motion vectors for this frame.
 *
 *  SPECIAL NOTES :     None. 
 *
 *
 *  ERRORS        :     None.
 *
 ****************************************************************************/
void DecodeMVectors ( PB_INSTANCE *pbi, UINT32 SBRows, UINT32 SBCols, UINT32 HExtra, UINT32 VExtra )
{
	INT32	FragIndex;			// Fragment number
	UINT32	MB;		    		// Macro-Block, Block indices
	UINT32	SBrow;				// Super-Block row number
	UINT32	SBcol;				// Super-Block row number
	UINT32	SB=0;			    // Super-Block index
    UINT32  CodingMethod;       // Temp Storage for coding mode.

    MOTION_VECTOR MVect[6];     // temp storage for motion vector
    MOTION_VECTOR TmpMVect;
    MOTION_VECTOR LastInterMV;      // storage for last used Inter frame MB motion vector
    MOTION_VECTOR PriorLastInterMV; // storage for previous last used Inter frame MB motion vector
	INT32 (*ExtractMVectorComponent)(PB_INSTANCE *pbi);

    UINT32  UVRow;
    UINT32  UVColumn;
    UINT32  UVFragOffset;

    UINT32  MBListIndex = 0;

    UINT32  MVCode = 0;         // Temporary storage while decoding the MV

    // Should not be decoding motion vectors if in INTRA only mode.
    if ( GetFrameType(pbi) == BASE_FRAME )
    {
        return;
    }

    // set the default motion vector to 0,0
    MVect[0].x = 0;
    MVect[0].y = 0;
    LastInterMV.x = 0;
    LastInterMV.y = 0;
    PriorLastInterMV.x = 0;
    PriorLastInterMV.y = 0;

    // Read the entropy method used and set up the appropriate decode option
    if ( bitread1( &pbi->br) == 0 )
        ExtractMVectorComponent = ExtractMVectorComponentA;
    else
        ExtractMVectorComponent = ExtractMVectorComponentB;

    // Unravel the quad-tree
	for ( SBrow=0; SBrow<SBRows; SBrow++ )
	{
		for ( SBcol=0; SBcol<SBCols; SBcol++ )
		{
			for ( MB=0; MB<4; MB++ )
			{
				// There may be MB's lying out of frame
				// which must be ignored. For these MB's
				// the top left block will have a negative Fragment Index.
				if ( QuadMapToMBTopLeft(pbi->BlockMap, SB,MB) >= 0 )
				{
					// Is the Macro-Block further coded:
					if ( pbi->MBCodedFlags[MBListIndex++] )
					{
                        // Upack the block level modes and motion vectors
                        FragIndex = QuadMapToMBTopLeft( pbi->BlockMap, SB, MB );

                        // Clear the motion vector before we start.
                        MVect[0].x = 0;
                        MVect[0].y = 0;
                            
                        // Unpack the mode (and motion vectors if necessary).
   		                CodingMethod = pbi->FragCodingMethod[FragIndex];

                        // Read the motion vector or vectors if present. 
                        if ( (CodingMethod == CODE_INTER_PLUS_MV) || 
                             (CodingMethod == CODE_GOLDEN_MV) )
                        {
                            MVect[0].x = ExtractMVectorComponent(pbi); 
                            MVect[1].x = MVect[0].x;
                            MVect[2].x = MVect[0].x;
                            MVect[3].x = MVect[0].x;
                            MVect[4].x = MVect[0].x;
                            MVect[5].x = MVect[0].x;
                            MVect[0].y = ExtractMVectorComponent(pbi); 
                            MVect[1].y = MVect[0].y;
                            MVect[2].y = MVect[0].y;
                            MVect[3].y = MVect[0].y;
                            MVect[4].y = MVect[0].y;
                            MVect[5].y = MVect[0].y;
                        }
                        else if ( CodingMethod == CODE_INTER_FOURMV )
                        {
                            // Extrac the 4 Y MVs
                            MVect[0].x = ExtractMVectorComponent(pbi);
                            MVect[0].y = ExtractMVectorComponent(pbi);

                            MVect[1].x = ExtractMVectorComponent(pbi);
                            MVect[1].y = ExtractMVectorComponent(pbi);
                            
                            MVect[2].x = ExtractMVectorComponent(pbi);
                            MVect[2].y = ExtractMVectorComponent(pbi);
                            
                            MVect[3].x = ExtractMVectorComponent(pbi);
                            MVect[3].y = ExtractMVectorComponent(pbi);

                            // Calculate the U and V plane MVs as the average of the Y plane MVs.
                            // First .x component
                            MVect[4].x = MVect[0].x + MVect[1].x + MVect[2].x + MVect[3].x;
                            if ( MVect[4].x >= 0 )
                                MVect[4].x = (MVect[4].x + 2) / 4;
                            else
                                MVect[4].x = (MVect[4].x - 2) / 4;
                            MVect[5].x = MVect[4].x;
                            // Then .y component
                            MVect[4].y = MVect[0].y + MVect[1].y + MVect[2].y + MVect[3].y;
                            if ( MVect[4].y >= 0 )
                                MVect[4].y = (MVect[4].y + 2) / 4;
                            else
                                MVect[4].y = (MVect[4].y - 2) / 4;
                            MVect[5].y = MVect[4].y;
                        }

                        // Keep track of last and prior last inter motion vectors.
                        if ( CodingMethod == CODE_INTER_PLUS_MV )
                        {
                            PriorLastInterMV.x = LastInterMV.x;
                            PriorLastInterMV.y = LastInterMV.y;
                            LastInterMV.x = MVect[0].x;
                            LastInterMV.y = MVect[0].y;
                        }
                        else if ( CodingMethod == CODE_INTER_LAST_MV )
                        {
                            // Use the last coded Inter motion vector.
                            MVect[0].x = LastInterMV.x;
                            MVect[1].x = MVect[0].x;
                            MVect[2].x = MVect[0].x;
                            MVect[3].x = MVect[0].x;
                            MVect[4].x = MVect[0].x;
                            MVect[5].x = MVect[0].x;
                            MVect[0].y = LastInterMV.y;
                            MVect[1].y = MVect[0].y;
                            MVect[2].y = MVect[0].y;
                            MVect[3].y = MVect[0].y;
                            MVect[4].y = MVect[0].y;
                            MVect[5].y = MVect[0].y;
                        }
                        else if ( CodingMethod == CODE_INTER_PRIOR_LAST )
                        {
                            // Use the last coded Inter motion vector.
                            MVect[0].x = PriorLastInterMV.x;
                            MVect[1].x = MVect[0].x;
                            MVect[2].x = MVect[0].x;
                            MVect[3].x = MVect[0].x;
                            MVect[4].x = MVect[0].x;
                            MVect[5].x = MVect[0].x;
                            MVect[0].y = PriorLastInterMV.y;
                            MVect[1].y = MVect[0].y;
                            MVect[2].y = MVect[0].y;
                            MVect[3].y = MVect[0].y;
                            MVect[4].y = MVect[0].y;
                            MVect[5].y = MVect[0].y;

                            // Swap the prior and last MV cases over
                            TmpMVect.x = PriorLastInterMV.x;
                            TmpMVect.y = PriorLastInterMV.y;
                            PriorLastInterMV.x = LastInterMV.x;
                            PriorLastInterMV.y = LastInterMV.y;
                            LastInterMV.x = TmpMVect.x;
                            LastInterMV.y = TmpMVect.y;
                        }
                        else if ( CodingMethod == CODE_INTER_FOURMV )
                        {
                            // Update last MV and prior last mv
                            PriorLastInterMV.x = LastInterMV.x;
                            PriorLastInterMV.y = LastInterMV.y;
                            LastInterMV.x = MVect[3].x;
                            LastInterMV.y = MVect[3].y;
                        }

                        // Note the coding mode and vector for each block in the current macro block.
                        pbi->FragMVect[FragIndex].x = MVect[0].x;
                        pbi->FragMVect[FragIndex].y = MVect[0].y;
                
                        pbi->FragMVect[FragIndex + 1].x = MVect[1].x;
                        pbi->FragMVect[FragIndex + 1].y = MVect[1].y;

                        pbi->FragMVect[FragIndex + pbi->HFragments].x = MVect[2].x;
                        pbi->FragMVect[FragIndex + pbi->HFragments].y = MVect[2].y;

                        pbi->FragMVect[FragIndex + pbi->HFragments + 1].x = MVect[3].x;
                        pbi->FragMVect[FragIndex + pbi->HFragments + 1].y = MVect[3].y;

                        // Matching fragments in the U and V planes
                        UVRow = (FragIndex / (pbi->HFragments * 2));
                        UVColumn = (FragIndex % pbi->HFragments) / 2;
                        UVFragOffset = (UVRow * (pbi->HFragments / 2)) + UVColumn;

                        pbi->FragMVect[pbi->YPlaneFragments + UVFragOffset].x = MVect[4].x;
                        pbi->FragMVect[pbi->YPlaneFragments + UVFragOffset].y = MVect[4].y;

                        pbi->FragMVect[pbi->YPlaneFragments + pbi->UVPlaneFragments + UVFragOffset].x = MVect[5].x;
                        pbi->FragMVect[pbi->YPlaneFragments + pbi->UVPlaneFragments + UVFragOffset].y = MVect[5].y;
					}
				}
			}

			// Next Super-Block
			SB++;
		}
	}
}
Esempio n. 16
0
/****************************************************************************
 * 
 *  ROUTINE       :     QuadDeCodeDisplayFragments2
 *
 *  INPUTS        :     PB instance
 *
 *  OUTPUTS       :     Mapping table BlockMap[SuperBlock][MacroBlock][Block]
 *
 *  RETURNS       :     None.
 *
 *  FUNCTION      :     Creates mapping table between (SuperBlock, MacroBlock, Block)
 *						triplet and corresponding Fragment Index.
 *
 *  SPECIAL NOTES :     None. 
 *
 *
 *  ERRORS        :     None.
 *
 ****************************************************************************/
void QuadDecodeDisplayFragments2 ( PB_INSTANCE *pbi )
{
	UINT32	SB, MB, B;	// Super-block, Macro-block and Block values
    BOOL    DataToDecode = FALSE; 

	INT32   dfIndex;
	UINT32  MBIndex = 0;

	UINT8 * MBFully;
	UINT8 * MBCoded;

	UINT8   BPattern;

	// Set up local pointers
	MBFully = pbi->MBFullyFlags;
	MBCoded = pbi->MBCodedFlags;

	// Reset various data structures common to key frames and inter frames.
	pbi->CodedBlockIndex = 0;
	memset( pbi->display_fragments, 0, pbi->UnitFragments );

    // For "Key frames" mark all blocks as coded and return.
    // Else initialise the ArrayPtr array to 0 (all blocks uncoded by default) 
	if ( GetFrameType(pbi) == BASE_FRAME )
    {
        memset( MBFully, 1, pbi->MacroBlocks );
    }
    else
    {
        memset( MBFully, 0, pbi->MacroBlocks );
	    memset( MBCoded, 0, pbi->MacroBlocks );

		// Un-pack MBlist1
		GetNextMbInit(pbi);
		for( MB = 0; MB < pbi->MacroBlocks; MB++)
		{
			MBFully[MB] = GetNextMbBit (pbi);
		}

		// If there are any macro blocks that are not fully coded then there is more to do.
		for( MB = 0; MB < pbi->MacroBlocks; MB++ )
		{
			if ( !MBFully[MB] )
			{
				DataToDecode = TRUE;
				break;
			}
		}

		// Build the coded MB list (Fully or partially coded)
		if ( DataToDecode )
		{
			// Un-pack MBlist2
			GetNextMbInit(pbi);
			for( MB = 0; MB < pbi->MacroBlocks; MB++)
			{
				if ( !MBFully[MB] )
					MBCoded[MB] = GetNextMbBit( pbi );
			}

		}
	}

	// Complete the block and macro block coded data structures.
	// Initialise the block pattern reader.
	ReadBlockPatternInit(pbi);

	// Follow the quad tree structure
	for ( SB=0; SB < pbi->SuperBlocks; SB++ )
	{
		for ( MB=0; MB<4; MB++ )
		{
            // If MB is in the frame
			if ( QuadMapToMBTopLeft(pbi->BlockMap, SB,MB) >= 0 )
            {
				// Set or read the block pattern for the macro block#
				if ( MBFully[MBIndex] )
				{
					MBCoded[MBIndex] = 1;
					BPattern = 0x0F;
				}
				else if ( MBCoded[MBIndex] )
				{
					BPattern = ReadNextBlockPattern(pbi);
				}
				else
				{
					BPattern = 0;
				}

				for ( B=0; B<4; B++ )
			    {
                    // If block is valid (in frame)...
                    dfIndex = QuadMapToIndex2( pbi->BlockMap, SB, MB, B );
				    if ( dfIndex >= 0 )
				    {
						// Work out if this block is coded or not.
						pbi->display_fragments[dfIndex] = (BPattern & BlockPatternMask[B]) ? 1 : 0;
				    }
			    }

				for ( B=0; B<4; B++ )
			    {
                    // If block is valid (in frame)...
                    dfIndex = QuadMapToIndex1( pbi->BlockMap, SB, MB, B );
				    if ( dfIndex >= 0 )
				    {
						if ( pbi->display_fragments[dfIndex] )
						{
							pbi->CodedBlockList[pbi->CodedBlockIndex] = dfIndex;
							pbi->CodedBlockIndex++;
						}
				    }
			    }

				// Increment the MB index.
				MBIndex++;
            }
		}
    }
}
Esempio n. 17
0
int CTextEncoder::ThreadMain (void)
{
  CMsg* pMsg;
  bool stop = false;
  
  Init();

  m_textDstType = GetFrameType();
  double temp = Profile()->GetFloatValue(CFG_TEXT_REPEAT_TIME_SECS);
  temp *= 1000.0;
  uint32_t wait_time = (uint32_t)temp;

  while (stop == false) {
    int rc = SDL_SemWaitTimeout(m_myMsgQueueSemaphore, wait_time);
    if (rc == 0) {
      pMsg = m_myMsgQueue.get_message();
      if (pMsg != NULL) {
	switch (pMsg->get_value()) {
	case MSG_NODE_STOP_THREAD:
	  debug_message("text %s stop received", 
			Profile()->GetName());
	  DoStopText();
	  stop = true;
	  break;
	case MSG_NODE_START:
	  // DoStartTransmit();  Anything ?
	  break;
	case MSG_NODE_STOP:
	  DoStopText();
	  break;
	case MSG_SINK_FRAME: {
	  uint32_t dontcare;
	  CMediaFrame *mf = (CMediaFrame*)pMsg->get_message(dontcare);
	  if (m_stop_thread == false)
	    ProcessTextFrame(mf);
	  if (mf->RemoveReference()) {
	    delete mf;
	  }
	  break;
	}
	}
      
	delete pMsg;
      } 
    } else if (rc == SDL_MUTEX_TIMEDOUT) {
      SendFrame(GetTimestamp());
    }
  }
  while ((pMsg = m_myMsgQueue.get_message()) != NULL) {
    if (pMsg->get_value() == MSG_SINK_FRAME) {
      uint32_t dontcare;
      CMediaFrame *mf = (CMediaFrame*)pMsg->get_message(dontcare);
      if (mf->RemoveReference()) {
	delete mf;
      }
    }
    delete pMsg;
  }
  debug_message("text encoder %s exit", Profile()->GetName());
  return 0;
}
Esempio n. 18
0
void QuadDecodeDisplayFragments ( PB_INSTANCE *pbi ){
  ogg_uint32_t  SB, MB, B;
  int    DataToDecode;

  ogg_int32_t   dfIndex;
  ogg_uint32_t  MBIndex = 0;

  /* Reset various data structures common to key frames and inter frames. */
  pbi->CodedBlockIndex = 0;
  memset ( pbi->display_fragments, 0, pbi->UnitFragments );

  /* For "Key frames" mark all blocks as coded and return. */
  /* Else initialise the ArrayPtr array to 0 (all blocks uncoded by default) */
  if ( GetFrameType(pbi) == BASE_FRAME ) {
    memset( pbi->SBFullyFlags, 1, pbi->SuperBlocks );
    memset( pbi->SBCodedFlags, 1, pbi->SuperBlocks );
        memset( pbi->MBCodedFlags, 0, pbi->MacroBlocks );
  }else{
    memset( pbi->SBFullyFlags, 0, pbi->SuperBlocks );
    memset( pbi->MBCodedFlags, 0, pbi->MacroBlocks );

    /* Un-pack the list of partially coded Super-Blocks */
    GetNextSbInit(pbi);
    for( SB = 0; SB < pbi->SuperBlocks; SB++){
      pbi->SBCodedFlags[SB] = GetNextSbBit (pbi);
    }

    /* Scan through the list of super blocks.  Unless all are marked
       as partially coded we have more to do. */
    DataToDecode = 0;
    for ( SB=0; SB<pbi->SuperBlocks; SB++ ) {
      if ( !pbi->SBCodedFlags[SB] ) {
        DataToDecode = 1;
        break;
      }
    }

    /* Are there further block map bits to decode ? */
    if ( DataToDecode ) {
      /* Un-pack the Super-Block fully coded flags. */
      GetNextSbInit(pbi);
      for( SB = 0; SB < pbi->SuperBlocks; SB++) {
        /* Skip blocks already marked as partially coded */
        while( (SB < pbi->SuperBlocks) && pbi->SBCodedFlags[SB] )
          SB++;

        if ( SB < pbi->SuperBlocks ) {
          pbi->SBFullyFlags[SB] = GetNextSbBit (pbi);

          if ( pbi->SBFullyFlags[SB] )       /* If SB is fully coded. */
            pbi->SBCodedFlags[SB] = 1;       /* Mark the SB as coded */
        }
      }
    }

    /* Scan through the list of coded super blocks.  If at least one
       is marked as partially coded then we have a block list to
       decode. */
    for ( SB=0; SB<pbi->SuperBlocks; SB++ ) {
      if ( pbi->SBCodedFlags[SB] && !pbi->SBFullyFlags[SB] ) {
        /* Initialise the block list decoder. */
        GetNextBInit(pbi);
        break;
      }
    }
  }

  /* Decode the block data from the bit stream. */
  for ( SB=0; SB<pbi->SuperBlocks; SB++ ){
    for ( MB=0; MB<4; MB++ ){
      /* If MB is in the frame */
      if ( QuadMapToMBTopLeft(pbi->BlockMap, SB,MB) >= 0 ){
        /* Only read block level data if SB was fully or partially coded */
        if ( pbi->SBCodedFlags[SB] ) {
          for ( B=0; B<4; B++ ){
            /* If block is valid (in frame)... */
            dfIndex = QuadMapToIndex1( pbi->BlockMap, SB, MB, B );
            if ( dfIndex >= 0 ){
              if ( pbi->SBFullyFlags[SB] )
                pbi->display_fragments[dfIndex] = 1;
              else
                pbi->display_fragments[dfIndex] = GetNextBBit(pbi);

              /* Create linear list of coded block indices */
              if ( pbi->display_fragments[dfIndex] ) {
                pbi->MBCodedFlags[MBIndex] = 1;
                pbi->CodedBlockList[pbi->CodedBlockIndex] = dfIndex;
                pbi->CodedBlockIndex++;
              }
            }
          }
        }
        MBIndex++;

      }
    }
  }
}
Esempio n. 19
0
/****************************************************************************
 * 
 *  ROUTINE       :     QuadDeCodeDisplayFragments
 *
 *  INPUTS        :     PB instance
 *
 *  OUTPUTS       :     Mapping table BlockMap[SuperBlock][MacroBlock][Block]
 *
 *  RETURNS       :     None.
 *
 *  FUNCTION      :     Creates mapping table between (SuperBlock, MacroBlock, Block)
 *						triplet and corresponding Fragment Index.
 *
 *  SPECIAL NOTES :     None. 
 *
 *
 *  ERRORS        :     None.
 *
 ****************************************************************************/
void QuadDecodeDisplayFragments ( PB_INSTANCE *pbi )
{
	UINT32	SB, MB, B;		// Super-block, Macro-block and Block values
    BOOL    DataToDecode; 

    INT32   dfIndex;
	UINT32  MBIndex = 0;

	// Reset various data structures common to key frames and inter frames.
	pbi->CodedBlockIndex = 0;
	memset ( pbi->display_fragments, 0, pbi->UnitFragments );

    // For "Key frames" mark all blocks as coded and return.
    // Else initialise the ArrayPtr array to 0 (all blocks uncoded by default) 
	if ( GetFrameType(pbi) == BASE_FRAME )
    {
        memset( pbi->SBFullyFlags, 1, pbi->SuperBlocks );
        memset( pbi->SBCodedFlags, 1, pbi->SuperBlocks );
        memset( pbi->MBCodedFlags, 0, pbi->MacroBlocks );
    }
    else
    {
        memset( pbi->SBFullyFlags, 0, pbi->SuperBlocks );
        memset( pbi->MBCodedFlags, 0, pbi->MacroBlocks );

		// Un-pack the list of partially coded Super-Blocks
		GetNextSbInit(pbi);
		for( SB = 0; SB < pbi->SuperBlocks; SB++)
		{
			pbi->SBCodedFlags[SB] = GetNextSbBit (pbi);
		}

		// Scan through the list of super blocks. 
		// Unless all are marked as partially coded we have more to do.
		DataToDecode = FALSE; 
		for ( SB=0; SB<pbi->SuperBlocks; SB++ )
		{
			if ( !pbi->SBCodedFlags[SB] )
			{
				DataToDecode = TRUE;
				break;
			}
		}

		// Are there further block map bits to decode ?
		if ( DataToDecode )
		{
			// Un-pack the Super-Block fully coded flags.
			GetNextSbInit(pbi);
			for( SB = 0; SB < pbi->SuperBlocks; SB++)
			{
				// Skip blocks already marked as partially coded
				while( (SB < pbi->SuperBlocks) && pbi->SBCodedFlags[SB] )  
					SB++;

				if ( SB < pbi->SuperBlocks )
				{
					pbi->SBFullyFlags[SB] = GetNextSbBit (pbi);

					if ( pbi->SBFullyFlags[SB] )         // If SB is fully coded.
						pbi->SBCodedFlags[SB] = 1;       // Mark the SB as coded
				}
			}
		}

		// Scan through the list of coded super blocks.
		// If at least one is marked as partially coded then we have a block list to decode.
		for ( SB=0; SB<pbi->SuperBlocks; SB++ )
		{
			if ( pbi->SBCodedFlags[SB] && !pbi->SBFullyFlags[SB] )
			{
				// Initialise the block list decoder.
				GetNextBInit(pbi);
				break;
			}
		}
	}

	// Decode the block data from the bit stream.
	for ( SB=0; SB<pbi->SuperBlocks; SB++ )
	{
		for ( MB=0; MB<4; MB++ )
		{
            // If MB is in the frame
			if ( QuadMapToMBTopLeft(pbi->BlockMap, SB,MB) >= 0 )
            {
				// Only read block level data if SB was fully or partially coded
				if ( pbi->SBCodedFlags[SB] )
				{
					for ( B=0; B<4; B++ )
					{
						// If block is valid (in frame)...
						dfIndex = QuadMapToIndex1( pbi->BlockMap, SB, MB, B );
						if ( dfIndex >= 0 )
						{
							if ( pbi->SBFullyFlags[SB] )
								pbi->display_fragments[dfIndex] = 1;
							else
								pbi->display_fragments[dfIndex] = GetNextBBit(pbi);

							// Create linear list of coded block indices
							if ( pbi->display_fragments[dfIndex] )
							{
								pbi->MBCodedFlags[MBIndex] = 1;
								pbi->CodedBlockList[pbi->CodedBlockIndex] = dfIndex;
								pbi->CodedBlockIndex++;
							}
						}
					}
				}
				MBIndex++;

            }
		}
    }
}
Esempio n. 20
0
/*!
 * \brief  功能概述 获取发送序号
 * \param  参数描述 无
 * \return 返回值描述 返回报文中的发送序号
 * \author zzy
 * \date   2015/5/25
 */
WORD CIEC104Response::GetSendFrameNo()
{
    Q_ASSERT(GetFrameType() == IEC104_I_TYPE );
    WORD nSendFrameNo = GetUInt(2,2);
    return nSendFrameNo >>1;
}
Esempio n. 21
0
static s32 pre_recv_entry(union recv_frame *precvframe, u8 *pphy_status)
{	
	s32 ret=_SUCCESS;
#ifdef CONFIG_CONCURRENT_MODE	
	u8 *secondary_myid, *paddr1;
	union recv_frame	*precvframe_if2 = NULL;
	_adapter *primary_padapter = precvframe->u.hdr.adapter;
	_adapter *secondary_padapter = primary_padapter->pbuddy_adapter;
	struct recv_priv *precvpriv = &primary_padapter->recvpriv;
	_queue *pfree_recv_queue = &precvpriv->free_recv_queue;
	u8	*pbuf = precvframe->u.hdr.rx_data;
	
	if(!secondary_padapter)
		return ret;
	
	paddr1 = GetAddr1Ptr(pbuf);

	if(IS_MCAST(paddr1) == _FALSE)//unicast packets
	{
		secondary_myid = adapter_mac_addr(secondary_padapter);

		if(_rtw_memcmp(paddr1, secondary_myid, ETH_ALEN))
		{			
			//change to secondary interface
			precvframe->u.hdr.adapter = secondary_padapter;
		}	

		//ret = recv_entry(precvframe);

	}
	else // Handle BC/MC Packets	
	{
		
		u8 clone = _TRUE;
#if 0		
		u8 type, subtype, *paddr2, *paddr3;
	
		type =  GetFrameType(pbuf);
		subtype = GetFrameSubType(pbuf); //bit(7)~bit(2)
		
		switch (type)
		{
			case WIFI_MGT_TYPE: //Handle BC/MC mgnt Packets
				if(subtype == WIFI_BEACON)
				{
					paddr3 = GetAddr3Ptr(precvframe->u.hdr.rx_data);
				
					if (check_fwstate(&secondary_padapter->mlmepriv, _FW_LINKED) &&
						_rtw_memcmp(paddr3, get_bssid(&secondary_padapter->mlmepriv), ETH_ALEN))
					{
						//change to secondary interface
						precvframe->u.hdr.adapter = secondary_padapter;
						clone = _FALSE;
					}

					if(check_fwstate(&primary_padapter->mlmepriv, _FW_LINKED) &&
						_rtw_memcmp(paddr3, get_bssid(&primary_padapter->mlmepriv), ETH_ALEN))
					{
						if(clone==_FALSE)
						{
							clone = _TRUE;									
						}	
						else
						{
							clone = _FALSE;
						}

						precvframe->u.hdr.adapter = primary_padapter;	
					}

					if(check_fwstate(&primary_padapter->mlmepriv, _FW_UNDER_SURVEY|_FW_UNDER_LINKING) ||
						check_fwstate(&secondary_padapter->mlmepriv, _FW_UNDER_SURVEY|_FW_UNDER_LINKING))
					{
						clone = _TRUE;
						precvframe->u.hdr.adapter = primary_padapter;	
					}
				
				}
				else if(subtype == WIFI_PROBEREQ)
				{
					//probe req frame is only for interface2
					//change to secondary interface
					precvframe->u.hdr.adapter = secondary_padapter;
					clone = _FALSE;
				}			
				break;
			case WIFI_CTRL_TYPE: // Handle BC/MC ctrl Packets
			
				break;
			case WIFI_DATA_TYPE: //Handle BC/MC data Packets
					//Notes: AP MODE never rx BC/MC data packets
			
				paddr2 = GetAddr2Ptr(precvframe->u.hdr.rx_data);

				if(_rtw_memcmp(paddr2, get_bssid(&secondary_padapter->mlmepriv), ETH_ALEN))
				{
					//change to secondary interface
					precvframe->u.hdr.adapter = secondary_padapter;
					clone = _FALSE;
				}

				break;
			default:
			
				break;			
		}
#endif

		if(_TRUE == clone)
		{
			//clone/copy to if2		
			struct rx_pkt_attrib *pattrib = NULL;
		
			precvframe_if2 = rtw_alloc_recvframe(pfree_recv_queue);
			if(precvframe_if2)
			{
				precvframe_if2->u.hdr.adapter = secondary_padapter;
		
				_rtw_init_listhead(&precvframe_if2->u.hdr.list);	
				precvframe_if2->u.hdr.precvbuf = NULL;	//can't access the precvbuf for new arch.
				precvframe_if2->u.hdr.len=0;

				_rtw_memcpy(&precvframe_if2->u.hdr.attrib, &precvframe->u.hdr.attrib, sizeof(struct rx_pkt_attrib));

				pattrib = &precvframe_if2->u.hdr.attrib;

				if(rtw_os_alloc_recvframe(secondary_padapter, precvframe_if2, pbuf, NULL) == _SUCCESS)				
				{						
					recvframe_put(precvframe_if2, pattrib->pkt_len);
					//recvframe_pull(precvframe_if2, drvinfo_sz + RXDESC_SIZE);

					if (pattrib->physt && pphy_status)
						rx_query_phy_status(precvframe_if2, pphy_status);
	
					ret = rtw_recv_entry(precvframe_if2);				
				}	
				else
				{
					rtw_free_recvframe(precvframe_if2, pfree_recv_queue);
					DBG_8192C("%s()-%d: alloc_skb() failed!\n", __FUNCTION__, __LINE__);	
				}

			}
			
		}
		
	}
	//if (precvframe->u.hdr.attrib.physt)
	//	rx_query_phy_status(precvframe, pphy_status);

	//ret = rtw_recv_entry(precvframe);

#endif

	return ret;

}
Esempio n. 22
0
/****************************************************************************
 * 
 *  ROUTINE       :     DecodeModes
 *
 *  INPUTS        :     None. 
 *                      
 *  OUTPUTS       :     Reconstructed frame.
 *
 *  RETURNS       :     None.
 *
 *  FUNCTION      :     Decodes the coding mode list for this frame.
 *
 *  SPECIAL NOTES :     None. 
 *
 *
 *  ERRORS        :     None.
 *
 ****************************************************************************/
void DecodeModes( PB_INSTANCE *pbi, UINT32 SBRows, UINT32 SBCols, UINT32 HExtra, UINT32 VExtra )
{
	INT32	FragIndex;			// Fragment number
	UINT32	MB;	    			// Macro-Block, Block indices
	UINT32	SBrow;				// Super-Block row number
	UINT32	SBcol;				// Super-Block row number
	UINT32	SB=0;			    // Super-Block index
    CODING_MODE  CodingMethod;  // Temp Storage for coding mode.

    UINT32  UVRow;
    UINT32  UVColumn;
    UINT32  UVFragOffset;
    
    UINT32  CodingScheme;       // Coding scheme used to code modes.

    UINT32  MBListIndex = 0;

    UINT32  i;

    // If the frame is an intra frame then all blocks have mode intra.
    if ( GetFrameType(pbi) == BASE_FRAME )
    {
        for ( i = 0; i < pbi->UnitFragments; i++ )
        {
            pbi->FragCodingMethod[i] = CODE_INTRA;
        }
    }
    else
    {
        UINT32  ModeEntry;                            // Mode bits read  

        // Read the coding method
        CodingScheme = bitread( &pbi->br,  MODE_METHOD_BITS );  

        // If the coding method is method 0 then we have to read in a custom coding scheme
        if ( CodingScheme == 0 )
        {
            // Read the coding scheme.
            for ( i = 0; i < MAX_MODES; i++ )
            {
                ModeAlphabet[0][ bitread( &pbi->br,  MODE_BITS) ] = (CODING_MODE)i;
            }
        }

	    // Unravel the quad-tree
	    for ( SBrow=0; SBrow<SBRows; SBrow++ )
	    {
		    for ( SBcol=0; SBcol<SBCols; SBcol++ )
		    {
				for ( MB=0; MB<4; MB++ )
				{
					// There may be MB's lying out of frame
					// which must be ignored. For these MB's
					// top left block will have a negative Fragment Index.
    				if ( QuadMapToMBTopLeft(pbi->BlockMap, SB,MB) >= 0 )
					{
						// Is the Macro-Block coded:
						if ( pbi->MBCodedFlags[MBListIndex++] )
						{
                            // Upack the block level modes and motion vectors
                            FragIndex = QuadMapToMBTopLeft( pbi->BlockMap, SB, MB );
                        
                            // Unpack the mode.
                            if ( CodingScheme == (MODE_METHODS-1) )
                            {
                                // This is the fall back coding scheme.
                                // Simply MODE_BITS bits per mode entry.
                                CodingMethod = (CODING_MODE)bitread( &pbi->br,  MODE_BITS );
                            }
                            else
                            {
                                ModeEntry = FrArrayUnpackMode(pbi);
                                CodingMethod =  ModeAlphabet[CodingScheme][ ModeEntry ];
                            }

                            // Note the coding mode for each block in macro block.
                            pbi->FragCodingMethod[FragIndex] = CodingMethod;
                            pbi->FragCodingMethod[FragIndex + 1] = CodingMethod;
                            pbi->FragCodingMethod[FragIndex + pbi->HFragments] = CodingMethod;
                            pbi->FragCodingMethod[FragIndex + pbi->HFragments + 1] = CodingMethod;

                            // Matching fragments in the U and V planes
                            UVRow = (FragIndex / (pbi->HFragments * 2));
                            UVColumn = (FragIndex % pbi->HFragments) / 2;
                            UVFragOffset = (UVRow * (pbi->HFragments / 2)) + UVColumn;

                            pbi->FragCodingMethod[pbi->YPlaneFragments + UVFragOffset] = CodingMethod;
                            pbi->FragCodingMethod[pbi->YPlaneFragments + pbi->UVPlaneFragments + UVFragOffset] = CodingMethod;

						}
					}
			    }

			    // Next Super-Block
			    SB++;
		    }
	    }
    }
}