コード例 #1
0
void RCInitChromaQP(AVCEncObject *encvid)
{
    AVCCommonObj *video = encvid->common;
    AVCMacroblock *currMB = video->currMB;
    int q_bits;

    /* we have to do the same thing for AVC_CLIP3(0,51,video->QSy) */

    video->QPy_div_6 = (currMB->QPy * 43) >> 8;
    video->QPy_mod_6 = currMB->QPy - 6 * video->QPy_div_6;
    currMB->QPc = video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, currMB->QPy + video->currPicParams->chroma_qp_index_offset)];
    video->QPc_div_6 = (video->QPc * 43) >> 8;
    video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;

    /* pre-calculate this to save computation */
    q_bits = 4 + video->QPy_div_6;
    if (video->slice_type == AVC_I_SLICE)
    {
        encvid->qp_const = 682 << q_bits;       // intra
    }
    else
    {
        encvid->qp_const = 342 << q_bits;       // inter
    }

    q_bits = 4 + video->QPc_div_6;
    if (video->slice_type == AVC_I_SLICE)
    {
        encvid->qp_const_c = 682 << q_bits;    // intra
    }
    else
    {
        encvid->qp_const_c = 342 << q_bits;    // inter
    }

    encvid->lambda_mode = QP2QUANT[AVC_MAX(0, currMB->QPy-SHIFT_QP)];
    encvid->lambda_motion = LAMBDA_FACTOR(encvid->lambda_mode);

    return ;
}
コード例 #2
0
ファイル: slice.cpp プロジェクト: acassis/emlinux-ssd1935
/* update video->mb_skip_run */
AVCDec_Status DecodeMB(AVCDecObject *decvid)
{
    AVCDec_Status status;
    AVCCommonObj *video = decvid->common;
    AVCDecBitstream *stream = decvid->bitstream;
    AVCMacroblock *currMB = video->currMB;
    uint mb_type;
    int slice_type = video->slice_type;
    int temp;

    currMB->QPy = video->QPy;
    currMB->QPc = video->QPc;

    if (slice_type == AVC_P_SLICE)
    {
        if (video->mb_skip_run < 0)
        {
            ue_v(stream, (uint *)&(video->mb_skip_run));
        }

        if (video->mb_skip_run == 0)
        {
            /* this will not handle the case where the slice ends with a mb_skip_run == 0 and no following MB data  */
            ue_v(stream, &mb_type);
            if (mb_type > 30)
            {
                return AVCDEC_FAIL;
            }
            InterpretMBModeP(currMB, mb_type);
            video->mb_skip_run = -1;
        }
        else
        {
            /* see subclause 7.4.4 for more details on how
            mb_field_decoding_flag is derived in case of skipped MB */

            currMB->mb_intra = FALSE;

            currMB->mbMode = AVC_SKIP;
            currMB->MbPartWidth = currMB->MbPartHeight = 16;
            currMB->NumMbPart = 1;
            currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
                                          currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1; //
            currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
                                            currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
            currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
                                             currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;

            oscl_memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);

            currMB->CBP = 0;
            video->cbp4x4 = 0;
            /* for skipped MB, always look at the first entry in RefPicList */
            currMB->RefIdx[0] = currMB->RefIdx[1] =
                                    currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
            InterMBPrediction(video);
            video->mb_skip_run--;
            return AVCDEC_SUCCESS;
        }

    }
    else
    {
        /* Then decode mode and MV */
        ue_v(stream, &mb_type);
        if (mb_type > 25)
        {
            return AVCDEC_FAIL;
        }
        InterpretMBModeI(currMB, mb_type);
    }


    if (currMB->mbMode != AVC_I_PCM)
    {

        if (currMB->mbMode == AVC_P8 || currMB->mbMode == AVC_P8ref0)
        {
            status = sub_mb_pred(video, currMB, stream);
        }
        else
        {
            status = mb_pred(video, currMB, stream) ;
        }

        if (status != AVCDEC_SUCCESS)
        {
            return status;
        }

        if (currMB->mbMode != AVC_I16)
        {
            /* decode coded_block_pattern */
            status = DecodeCBP(currMB, stream);
            if (status != AVCDEC_SUCCESS)
            {
                return status;
            }
        }

        if (currMB->CBP > 0 || currMB->mbMode == AVC_I16)
        {
            se_v(stream, &temp);
            if (temp)
            {
                temp += (video->QPy + 52);
                currMB->QPy = video->QPy = temp - 52 * (temp * 79 >> 12);
                if (currMB->QPy > 51 || currMB->QPy < 0)
                {
                    video->QPy = AVC_CLIP3(0, 51, video->QPy);
//                  return AVCDEC_FAIL;
                }
                video->QPy_div_6 = (video->QPy * 43) >> 8;
                video->QPy_mod_6 = video->QPy - 6 * video->QPy_div_6;
                currMB->QPc = video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->QPy + video->currPicParams->chroma_qp_index_offset)];
                video->QPc_div_6 = (video->QPc * 43) >> 8;
                video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;
            }
        }
        /* decode residue and inverse transform */
        status = residual(decvid, currMB);
        if (status != AVCDEC_SUCCESS)
        {
            return status;
        }
    }