Encode_Status VaapiEncoderH264::encodePicture(const PicturePtr& picture) { Encode_Status ret = ENCODE_FAIL; SurfacePtr reconstruct = createSurface(); if (!reconstruct) return ret; { AutoLock locker(m_paramLock); #ifdef __BUILD_GET_MV__ uint32_t size; void *buffer = NULL; getMVBufferSize(&size); if (!picture->editMVBuffer(buffer, &size)) return ret; #endif if (!ensureSequence (picture)) return ret; if (!ensureMiscParams (picture.get())) return ret; if (!ensurePicture(picture, reconstruct)) return ret; if (!ensureSlices (picture)) return ret; } if (!picture->encode()) return ret; if (!referenceListUpdate (picture, reconstruct)) return ret; return ENCODE_SUCCESS; }
void FFMPEGMovie::_consume() { while( !_stopConsuming ) { { std::unique_lock<std::mutex> lock( _targetMutex ); while( !_targetChangedSent ) _targetChanged.wait( lock ); _targetChangedSent = false; if( _stopConsuming ) return; if( _seekTo( _targetTimestamp )) _ptsPosition = UNDEFINED_PTS; // Reset position after seeking } PicturePtr frame; while( !_stopConsuming && _getPtsDelta() >= getFrameDuration() && !isAtEOF( )) { frame = _queue.dequeue(); _ptsPosition = _videoStream->getPositionInSec( frame->getTimestamp( )); } if( !frame ) { auto exception = std::runtime_error( "Frame unavailable error" ); _promise.set_exception( std::make_exception_ptr( exception )); return; } _promise.set_value( frame ); } }
PicturePtr Picture::FromResource(HWND hwnd, HINSTANCE instance, const char *type, const char *name) { PicturePtr result; if (IStream *stream = StreamFromResource(instance, type, name)) { IPicture *pic = PictureFromStream(stream); stream->Release(); if (pic) { if (HDC hdc = GetDC(hwnd)) { int width = MakeWidth(hdc, pic); int height = MakeHeight(hdc, pic); ReleaseDC(hwnd, hdc); if (width && height) result.reset(new Picture(pic, width, height)); } if (!result) pic->Release(); } } return result; }
PicturePtr VaapiDecoderBase::createPicture(int64_t timeStamp /* , VaapiPictureStructure structure = VAAPI_PICTURE_STRUCTURE_FRAME */) { PicturePtr picture; /*accquire one surface from m_surfacePool in base decoder */ SurfacePtr surface = createSurface(); if (!surface) { ERROR("create surface failed"); return picture; } picture.reset(new VaapiDecPicture(m_context, surface, timeStamp)); return picture; }
static void setH264PictureReference(VaapiDecPictureH264* picture, uint32_t referenceFlags, bool otherField) { VAAPI_PICTURE_FLAG_UNSET(picture, VAAPI_PICTURE_FLAGS_REFERENCE); VAAPI_PICTURE_FLAG_SET(picture, referenceFlags); PicturePtr strong; if (!otherField || !(strong = picture->m_otherField.lock())) return ; VaapiDecPictureH264* other = strong.get(); VAAPI_PICTURE_FLAG_UNSET(other, VAAPI_PICTURE_FLAGS_REFERENCE); VAAPI_PICTURE_FLAG_SET(other, referenceFlags); }
bool VaapiDPBManager::execRefPicMarkingSlidingWindow(const PicturePtr& picture) { H264PPS *const pps = picture->m_pps; H264SPS *const sps = pps->sequence; VaapiDecPictureH264 *refPicture; uint32_t i, m, maxNumRefFrames; if (!VAAPI_PICTURE_IS_FIRST_FIELD(picture)) return true; maxNumRefFrames = sps->num_ref_frames; if (maxNumRefFrames == 0) maxNumRefFrames = 1; if (!VAAPI_PICTURE_IS_FRAME(picture)) maxNumRefFrames <<= 1; if (DPBLayer->shortRefCount + DPBLayer->longRefCount < maxNumRefFrames) return true; if (DPBLayer->shortRefCount < 1) return false; for (m = 0, i = 1; i < DPBLayer->shortRefCount; i++) { VaapiDecPictureH264 *const pic = DPBLayer->shortRef[i]; if (pic->m_frameNumWrap < DPBLayer->shortRef[m]->m_frameNumWrap) m = i; } refPicture = DPBLayer->shortRef[m]; setH264PictureReference(refPicture, 0, true); ARRAY_REMOVE_INDEX(DPBLayer->shortRef, m); /* Both fields need to be marked as "unused for reference", so remove the other field from the shortRef[] list as well */ if (!VAAPI_PICTURE_IS_FRAME(picture)) { PicturePtr strong = refPicture->m_otherField.lock(); VaapiDecPictureH264* other = strong.get(); if (other) { for (i = 0; i < DPBLayer->shortRefCount; i++) { if (DPBLayer->shortRef[i] == other) { ARRAY_REMOVE_INDEX(DPBLayer->shortRef, i); break; } } } } return true; }
bool VaapiEncoderH264::ensureSequence(const PicturePtr& picture) { VAEncSequenceParameterBufferH264* seqParam; if (!picture->editSequence(seqParam) || !fill(seqParam)) { ERROR("failed to create sequence parameter buffer (SPS)"); return false; } if (picture->isIdr() && !ensureSequenceHeader(picture, seqParam)) { ERROR ("failed to create packed sequence header buffer"); return false; } return true; }
bool VaapiDecoderH265::fillIqMatrix(const PicturePtr& picture, const H265SliceHdr* const slice) { H265PPS* pps = slice->pps; H265SPS* sps = pps->sps; H265ScalingList* scalingList; if (pps->scaling_list_data_present_flag) { scalingList = &pps->scaling_list; } else if(sps->scaling_list_enabled_flag) { if(sps->scaling_list_data_present_flag) { scalingList = &sps->scaling_list; } else { scalingList = &pps->scaling_list; } } else { //default scaling list return true; } VAIQMatrixBufferHEVC* iqMatrix; if (!picture->editIqMatrix(iqMatrix)) return false; fillScalingList4x4(iqMatrix, scalingList); fillScalingList8x8(iqMatrix, scalingList); fillScalingList16x16(iqMatrix, scalingList); fillScalingList32x32(iqMatrix, scalingList); fillScalingListDc16x16(iqMatrix, scalingList); fillScalingListDc32x32(iqMatrix, scalingList); return true; }
bool VaapiDPBManager::execRefPicMarking(const PicturePtr& pic, bool * hasMMCO5) { *hasMMCO5 = false; if (!VAAPI_PICTURE_IS_REFERENCE(pic)) { return true; } if (!VAAPI_H264_PICTURE_IS_IDR(pic)) { H264SliceHdr* header = pic->getLastSliceHeader(); H264DecRefPicMarking *const decRefPicMarking = &header->dec_ref_pic_marking; if (decRefPicMarking->adaptive_ref_pic_marking_mode_flag) { if (!execRefPicMarkingAdaptive(pic, decRefPicMarking, hasMMCO5)) return false; } else { if (!execRefPicMarkingSlidingWindow(pic)) return false; } } return true; }
YamiStatus VaapiDecoderBase::outputPicture(const PicturePtr& picture) { //TODO: reorder poc return m_surfacePool->output(picture->getSurface(), picture->m_timeStamp) ? YAMI_SUCCESS : YAMI_FAIL; }
/* fill quant parameter buffers functions*/ bool VaapiDecoderVP8::ensureQuantMatrix(const PicturePtr& pic) { Vp8Segmentation *seg = &m_parser.segmentation; VAIQMatrixBufferVP8 *iqMatrix; int32_t baseQI, i; if (!pic->editIqMatrix(iqMatrix)) return false; for (i = 0; i < 4; i++) { int32_t tempIndex; const int32_t MAX_QI_INDEX = 127; if (seg->segmentation_enabled) { baseQI = seg->quantizer_update_value[i]; if (!seg->segment_feature_mode) // 0 means delta update baseQI += m_frameHdr.quant_indices.y_ac_qi;; } else baseQI = m_frameHdr.quant_indices.y_ac_qi; // the first component is y_ac_qi tempIndex = baseQI < 0 ? 0 : (baseQI > MAX_QI_INDEX ? MAX_QI_INDEX : baseQI); iqMatrix->quantization_index[i][0] = tempIndex; tempIndex = baseQI + m_frameHdr.quant_indices.y_dc_delta; tempIndex = tempIndex < 0 ? 0 : (tempIndex > MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex); iqMatrix->quantization_index[i][1] = tempIndex; tempIndex = baseQI + m_frameHdr.quant_indices.y2_dc_delta; tempIndex = tempIndex < 0 ? 0 : (tempIndex > MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex); iqMatrix->quantization_index[i][2] = tempIndex; tempIndex = baseQI + m_frameHdr.quant_indices.y2_ac_delta; tempIndex = tempIndex < 0 ? 0 : (tempIndex > MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex); iqMatrix->quantization_index[i][3] = tempIndex; tempIndex = baseQI + m_frameHdr.quant_indices.uv_dc_delta; tempIndex = tempIndex < 0 ? 0 : (tempIndex > MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex); iqMatrix->quantization_index[i][4] = tempIndex; tempIndex = baseQI + m_frameHdr.quant_indices.uv_ac_delta; tempIndex = tempIndex < 0 ? 0 : (tempIndex > MAX_QI_INDEX ? MAX_QI_INDEX : tempIndex); iqMatrix->quantization_index[i][5] = tempIndex; } return true; }
/* Adds slice headers to picture */ bool VaapiEncoderH264::addSliceHeaders (const PicturePtr& picture) const { VAEncSliceParameterBufferH264 *sliceParam; uint32_t sliceOfMbs, sliceModMbs, curSliceMbs; uint32_t mbSize; uint32_t lastMbIndex; assert (picture); if (picture->m_type != VAAPI_PICTURE_TYPE_I) { /* have one reference frame at least */ assert(m_refList0.size() > 0); } mbSize = m_mbWidth * m_mbHeight; assert (m_numSlices && m_numSlices < mbSize); sliceOfMbs = mbSize / m_numSlices; sliceModMbs = mbSize % m_numSlices; lastMbIndex = 0; for (uint32_t i = 0; i < m_numSlices; ++i) { curSliceMbs = sliceOfMbs; if (sliceModMbs) { ++curSliceMbs; --sliceModMbs; } if (!picture->newSlice(sliceParam)) return false; sliceParam->macroblock_address = lastMbIndex; sliceParam->num_macroblocks = curSliceMbs; sliceParam->macroblock_info = VA_INVALID_ID; sliceParam->slice_type = h264_get_slice_type (picture->m_type); assert (sliceParam->slice_type != -1); sliceParam->idr_pic_id = m_idrNum; sliceParam->pic_order_cnt_lsb = picture->m_poc; sliceParam->num_ref_idx_active_override_flag = 1; if (picture->m_type != VAAPI_PICTURE_TYPE_I && m_refList0.size() > 0) sliceParam->num_ref_idx_l0_active_minus1 = m_refList0.size() - 1; if (picture->m_type == VAAPI_PICTURE_TYPE_B && m_refList1.size() > 0) sliceParam->num_ref_idx_l1_active_minus1 = m_refList1.size() - 1; fillReferenceList(sliceParam); sliceParam->slice_qp_delta = initQP() - minQP(); if (sliceParam->slice_qp_delta > 4) sliceParam->slice_qp_delta = 4; sliceParam->slice_alpha_c0_offset_div2 = 2; sliceParam->slice_beta_offset_div2 = 2; /* set calculation for next slice */ lastMbIndex += curSliceMbs; } assert (lastMbIndex == mbSize); return true; }
bool VaapiDecoderH265::fillSlice(const PicturePtr& picture, const H265SliceHdr* const theSlice, const H265NalUnit* const nalu) { const H265SliceHdr* slice = theSlice; VASliceParameterBufferHEVC* sliceParam; if (!picture->newSlice(sliceParam, nalu->data + nalu->offset, nalu->size)) return false; sliceParam->slice_data_byte_offset = getSliceDataByteOffset(slice, nalu->header_bytes); sliceParam->slice_segment_address = slice->segment_address; #define FILL_LONG(f) sliceParam->LongSliceFlags.fields.f = slice->f #define FILL_LONG_SLICE(f) sliceParam->LongSliceFlags.fields.slice_##f = slice->f //how to fill this //LastSliceOfPic FILL_LONG(dependent_slice_segment_flag); //follow spec if (slice->dependent_slice_segment_flag) { slice = m_prevSlice.get(); } if (!fillReferenceIndex(sliceParam, slice)) return false; FILL_LONG_SLICE(type); sliceParam->LongSliceFlags.fields.color_plane_id = slice->colour_plane_id; FILL_LONG_SLICE(sao_luma_flag); FILL_LONG_SLICE(sao_chroma_flag); FILL_LONG(mvd_l1_zero_flag); FILL_LONG(cabac_init_flag); FILL_LONG_SLICE(temporal_mvp_enabled_flag); if (slice->deblocking_filter_override_flag) FILL_LONG_SLICE(deblocking_filter_disabled_flag); else sliceParam->LongSliceFlags.fields.slice_deblocking_filter_disabled_flag= slice->pps->deblocking_filter_disabled_flag; FILL_LONG(collocated_from_l0_flag); FILL_LONG_SLICE(loop_filter_across_slices_enabled_flag); #define FILL(f) sliceParam->f = slice->f #define FILL_SLICE(f) sliceParam->slice_##f = slice->f FILL(collocated_ref_idx); /* following fields fill in fillReference num_ref_idx_l0_active_minus1 num_ref_idx_l1_active_minus1*/ FILL_SLICE(qp_delta); FILL_SLICE(cb_qp_offset); FILL_SLICE(cr_qp_offset); FILL_SLICE(beta_offset_div2); FILL_SLICE(tc_offset_div2); if (!fillPredWeightTable(sliceParam, slice)) return false; FILL(five_minus_max_num_merge_cand); return true; }
Encode_Status VaapiEncoderBase::getOutput(VideoEncOutputBuffer * outBuffer, bool withWait) { bool isEmpty; PicturePtr picture; Encode_Status ret; FUNC_ENTER(); ret = checkEmpty(outBuffer, &isEmpty); if (isEmpty) return ret; getPicture(picture); ret = picture->getOutput(outBuffer); if (ret != ENCODE_SUCCESS) return ret; checkCodecData(outBuffer); return ENCODE_SUCCESS; }
void VaapiDPBManager::removeShortReference(const PicturePtr& picture) { VaapiDecPictureH264 *refPicture; uint32_t i; uint32_t frameNum = picture->m_frameNum; PicturePtr strong = picture->m_otherField.lock(); VaapiDecPictureH264* other = strong.get(); for (i = 0; i < DPBLayer->shortRefCount; ++i) { if (DPBLayer->shortRef[i]->m_frameNum == frameNum) { refPicture = DPBLayer->shortRef[i]; if (refPicture != other) { setH264PictureReference(refPicture, 0, false); ARRAY_REMOVE_INDEX(DPBLayer->shortRef, i); } return; } } }
bool VaapiEncoderH264::ensurePicture (const PicturePtr& picture, const SurfacePtr& surface) { VAEncPictureParameterBufferH264 *picParam; if (!pictureReferenceListSet(picture)) { ERROR ("reference list reorder failed"); return false; } if (!picture->editPicture(picParam) || !fill(picParam, picture, surface)) { ERROR("failed to create picture parameter buffer (PPS)"); return false; } if (picture->isIdr() && !ensurePictureHeader (picture, picParam)) { ERROR ("set picture packed header failed"); return false; } return true; }
/* fill quant parameter buffers functions*/ bool VaapiDecoderVP8::ensureProbabilityTable(const PicturePtr& pic) { VAProbabilityDataBufferVP8 *probTable = NULL; // XXX, create/render VAProbabilityDataBufferVP8 in base class if (!pic->editProbTable(probTable)) return false; memcpy (probTable->dct_coeff_probs, m_frameHdr.token_probs.prob, sizeof (m_frameHdr.token_probs.prob)); return true; }
PicturePtr VaapiDecoderH265::createPicture(const H265SliceHdr* const slice, const H265NalUnit* const nalu) { PicturePtr picture; SurfacePtr surface = createSurface(); if (!surface) return picture; picture.reset(new VaapiDecPictureH265(m_context, surface, m_currentPTS)); picture->m_noRaslOutputFlag = isIdr(nalu) || isBla(nalu) || m_newStream || m_endOfSequence; m_noRaslOutputFlag = picture->m_noRaslOutputFlag; if (isIrap(nalu)) m_associatedIrapNoRaslOutputFlag = picture->m_noRaslOutputFlag; picture->m_picOutputFlag = (isRasl(nalu) && m_associatedIrapNoRaslOutputFlag) ? false : slice->pic_output_flag; getPoc(picture, slice, nalu); return picture; }
Decode_Status VaapiDecoderVP9::decode(const Vp9FrameHdr* hdr, const uint8_t* data, uint32_t size, uint64_t timeStamp) { Decode_Status ret; ret = ensureContext(hdr); if (ret != DECODE_SUCCESS) return ret; PicturePtr picture = createPicture(timeStamp); if (!picture) return DECODE_MEMORY_FAIL; if (!picture->getSurface()->resize(hdr->width, hdr->height)) { ERROR("resize to %dx%d failed", hdr->width, hdr->height); return DECODE_MEMORY_FAIL; } if (hdr->show_existing_frame) { SurfacePtr& surface = m_reference[hdr->frame_to_show]; if (!surface) { ERROR("frame to show is invalid, idx = %d", hdr->frame_to_show); return DECODE_SUCCESS; } picture->setSurface(surface); return outputPicture(picture); } if (!ensurePicture(picture, hdr)) return DECODE_FAIL; if (!ensureSlice(picture, data, size)) return DECODE_FAIL; ret = picture->decode(); if (ret != DECODE_SUCCESS) return ret; updateReference(picture, hdr); if (hdr->show_frame) return outputPicture(picture); return DECODE_SUCCESS; }
// calls immediately after reorder, // it makes sure I frame are encoded immediately, so P frames can be pushed to the front of the m_reorderFrameList. // it also makes sure input thread and output thread runs in parallel Encode_Status VaapiEncoderH264::doEncode(const SurfacePtr& surface, uint64_t timeStamp, bool forceKeyFrame) { FUNC_ENTER(); Encode_Status ret; ret = reorder(surface, timeStamp, forceKeyFrame); if (ret != ENCODE_SUCCESS) return ret; while (m_reorderState == VAAPI_ENC_REORD_DUMP_FRAMES) { if (!m_maxCodedbufSize) ensureCodedBufferSize(); CodedBufferPtr codedBuffer = VaapiCodedBuffer::create(m_context, m_maxCodedbufSize); if (!codedBuffer) return ENCODE_NO_MEMORY; PicturePtr picture = m_reorderFrameList.front(); m_reorderFrameList.pop_front(); picture->m_codedBuffer = codedBuffer; if (m_reorderFrameList.empty()) m_reorderState = VAAPI_ENC_REORD_WAIT_FRAMES; ret = encodePicture(picture); if (ret != ENCODE_SUCCESS) { return ret; } codedBuffer->setFlag(ENCODE_BUFFERFLAG_ENDOFFRAME); INFO("picture->m_type: 0x%x\n", picture->m_type); if (picture->isIdr()) { codedBuffer->setFlag(ENCODE_BUFFERFLAG_SYNCFRAME); } if (!output(picture)) return ENCODE_INVALID_PARAMS; } INFO(); return ENCODE_SUCCESS; }
bool VaapiDPBManager::execDummyPictureMarking(const PicturePtr& dummyPic, const SliceHeaderPtr& sliceHdr, int32_t frameNum) { initPictureRefLists(dummyPic); initPictureRefsPicNum(dummyPic, sliceHdr, frameNum); if (!execRefPicMarkingSlidingWindow(dummyPic)) return false; removeShortReference(dummyPic); /* add to short reference */ DPBLayer->shortRef[DPBLayer->shortRefCount++] = dummyPic.get(); return true; }
Encode_Status VaapiEncoderBase::getOutput(VideoEncOutputBuffer * outBuffer, VideoEncMVBuffer * MVBuffer, bool withWait) { void *data = NULL; uint32_t mappedSize; bool isEmpty; PicturePtr picture; Encode_Status ret; FUNC_ENTER(); ret = checkEmpty(outBuffer, &isEmpty); if (isEmpty) return ret; getPicture(picture); ret = picture->getOutput(outBuffer); if (ret != ENCODE_SUCCESS) return ret; if (!picture->editMVBuffer(data, &mappedSize)) return ret; if (data) memcpy(MVBuffer->data, data, mappedSize); checkCodecData(outBuffer); return ENCODE_SUCCESS; }
bool VaapiDecoderVP9::ensurePicture(const PicturePtr& picture, const Vp9FrameHdr* hdr) { VADecPictureParameterBufferVP9* param; if (!picture->editPicture(param)) return false; param->frame_width = hdr->width; param->frame_height = hdr->height; if (!fillReference(param, hdr)) return false; #define FILL_PIC_FIELD(field) param->pic_fields.bits.field = hdr->field; FILL_PIC_FIELD(subsampling_x) FILL_PIC_FIELD(subsampling_y) FILL_PIC_FIELD(frame_type) FILL_PIC_FIELD(show_frame) FILL_PIC_FIELD(error_resilient_mode) FILL_PIC_FIELD(intra_only) FILL_PIC_FIELD(allow_high_precision_mv) FILL_PIC_FIELD(mcomp_filter_type) FILL_PIC_FIELD(frame_parallel_decoding_mode) FILL_PIC_FIELD(reset_frame_context) FILL_PIC_FIELD(refresh_frame_context) FILL_PIC_FIELD(frame_context_idx) #undef FILL_PIC_FIELD param->pic_fields.bits.segmentation_enabled = hdr->segmentation.enabled; param->pic_fields.bits.segmentation_temporal_update = hdr->segmentation.temporal_update; param->pic_fields.bits.segmentation_update_map = hdr->segmentation.update_map; param->pic_fields.bits.lossless_flag = m_parser->lossless_flag; param->filter_level = hdr->loopfilter.filter_level; param->sharpness_level = hdr->loopfilter.sharpness_level; #define FILL_FIELD(field) param->field = hdr->field; FILL_FIELD(log2_tile_rows); FILL_FIELD(log2_tile_columns); FILL_FIELD(frame_header_length_in_bytes) FILL_FIELD(first_partition_size) #undef FILL_FIELD assert(sizeof(param->mb_segment_tree_probs) == sizeof(m_parser->mb_segment_tree_probs)); assert(sizeof(param->segment_pred_probs) == sizeof(m_parser->segment_pred_probs)); memcpy(param->mb_segment_tree_probs, m_parser->mb_segment_tree_probs, sizeof(m_parser->mb_segment_tree_probs)); memcpy(param->segment_pred_probs, m_parser->segment_pred_probs, sizeof(m_parser->segment_pred_probs)); return true; }
bool VaapiEncoderH264:: referenceListUpdate (const PicturePtr& picture, const SurfacePtr& surface) { if (VAAPI_PICTURE_TYPE_B == picture->m_type) { return true; } if (picture->isIdr()) { m_refList.clear(); } else if (m_refList.size() >= m_maxRefFrames) { m_refList.pop_back(); } ReferencePtr ref(new VaapiEncoderH264Ref(picture, surface)); m_refList.push_front(ref); // descending order for short-term reference list assert (m_refList.size() <= m_maxRefFrames); return true; }
void VaapiDecoderVP9::updateReference(const PicturePtr& picture, const Vp9FrameHdr* hdr) { uint8_t flag = 1; uint8_t refresh_frame_flags; if (hdr->frame_type == VP9_KEY_FRAME) { refresh_frame_flags = 0xff; } else { refresh_frame_flags = hdr->refresh_frame_flags; } for (int i = 0; i < VP9_REF_FRAMES; i++) { if (refresh_frame_flags & flag) { m_reference[i] = picture->getSurface(); } flag <<= 1; } }
/* Fills in VA picture parameter buffer */ bool VaapiEncoderH264::fill(VAEncPictureParameterBufferH264* picParam, const PicturePtr& picture, const SurfacePtr& surface) const { uint32_t i = 0; /* reference list, */ picParam->CurrPic.picture_id = surface->getID(); picParam->CurrPic.TopFieldOrderCnt = picture->m_poc; if (picture->m_type != VAAPI_PICTURE_TYPE_I) { for (i = 0; i < m_refList.size(); i++) { picParam->ReferenceFrames[i].picture_id = m_refList[i]->m_pic->getID(); picParam->ReferenceFrames[i].TopFieldOrderCnt = m_refList[i]->m_poc; picParam->ReferenceFrames[i].flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE; } } for (; i < 16; ++i) { picParam->ReferenceFrames[i].picture_id = VA_INVALID_ID; } picParam->coded_buf = picture->m_codedBuffer->getID(); picParam->pic_parameter_set_id = 0; picParam->seq_parameter_set_id = 0; picParam->last_picture = 0; /* means last encoding picture */ picParam->frame_num = picture->m_frameNum; picParam->pic_init_qp = initQP(); picParam->num_ref_idx_l0_active_minus1 = (m_maxRefList0Count ? (m_maxRefList0Count - 1) : 0); picParam->num_ref_idx_l1_active_minus1 = (m_maxRefList1Count ? (m_maxRefList1Count - 1) : 0); picParam->chroma_qp_index_offset = 0; picParam->second_chroma_qp_index_offset = 0; /* set picture fields */ picParam->pic_fields.bits.idr_pic_flag = picture->isIdr(); picParam->pic_fields.bits.reference_pic_flag = (picture->m_type != VAAPI_PICTURE_TYPE_B); picParam->pic_fields.bits.entropy_coding_mode_flag = m_useCabac; picParam->pic_fields.bits.transform_8x8_mode_flag = m_useDct8x8; /* enable debloking */ picParam->pic_fields.bits.deblocking_filter_control_present_flag = TRUE; return TRUE; }
bool VaapiEncoderH264::ensureSequence(const PicturePtr& picture) { if (picture->m_type != VAAPI_PICTURE_TYPE_I) { return true; } VAEncSequenceParameterBufferH264* seqParam; if (!picture->editSequence(seqParam) || !fill(seqParam)) { ERROR("failed to create sequence parameter buffer (SPS)"); return false; } if (!ensureSequenceHeader(picture, seqParam)) { ERROR ("failed to create packed sequence header buffer"); return false; } return true; }
bool VaapiDPBManager::outputDPB(const VaapiFrameStore::Ptr &frameStore, const PicturePtr& picture) { picture->m_outputNeeded = false; PicturePtr frame; if (frameStore) { if (--frameStore->m_outputNeeded > 0) return true; frame = frameStore->m_buffers[0]; } DEBUG("DPB: output picture(Addr:%p, Poc:%d)", picture.get(), picture->m_POC); //FIXME: #if 0 if (!frameStore) picture->m_surfBuf->status &= ~SURFACE_DECODING; #endif return m_decoder->outputPicture(frame) == DECODE_SUCCESS; }
bool VaapiDecoderVP9::ensureSlice(const PicturePtr& picture, const void* data, int size) { #define FILL_FIELD(field) vaseg.field = seg.field; VASliceParameterBufferVP9* slice; if (!picture->newSlice(slice, data, size)) return false; for (int i = 0; i < VP9_MAX_SEGMENTS; i++) { VASegmentParameterVP9& vaseg = slice->seg_param[i]; Vp9Segmentation& seg = m_parser->segmentation[i]; memcpy(vaseg.filter_level, seg.filter_level, sizeof(seg.filter_level)); FILL_FIELD(luma_ac_quant_scale) FILL_FIELD(luma_dc_quant_scale) FILL_FIELD(chroma_ac_quant_scale) FILL_FIELD(chroma_dc_quant_scale) vaseg.segment_flags.fields.segment_reference_skipped = seg.reference_skip; vaseg.segment_flags.fields.segment_reference_enabled = seg.reference_frame_enabled; vaseg.segment_flags.fields.segment_reference = seg.reference_frame; } #undef FILL_FIELD return true; }
bool VaapiDPBManager::execRefPicMarkingAdaptive1(const PicturePtr& picture, H264RefPicMarking *refPicMarking, uint32_t MMCO) { uint32_t picNumX, i; int32_t longTermFrameIdx; VaapiDecPictureH264 *refPicture; int32_t foundIdx = 0; switch (MMCO) { case 1: { picNumX = getPicNumX(picture, refPicMarking); foundIdx = findShortRermReference(picNumX); if (foundIdx < 0) return false; i = (uint32_t) foundIdx; setH264PictureReference(DPBLayer->shortRef[i], 0, VAAPI_PICTURE_IS_FRAME(picture)); ARRAY_REMOVE_INDEX(DPBLayer->shortRef, i); } break; case 2: { foundIdx = findLongTermReference(refPicMarking->long_term_pic_num); if (foundIdx < 0) return false; i = (uint32_t) foundIdx; setH264PictureReference(DPBLayer->longRef[i], 0, VAAPI_PICTURE_IS_FRAME(picture)); ARRAY_REMOVE_INDEX(DPBLayer->longRef, i); } break; case 3: { for (i = 0; i < DPBLayer->longRefCount; i++) { if ((int32_t) DPBLayer->longRef[i]->m_longTermFrameIdx == refPicMarking->long_term_frame_idx) break; } if (i != DPBLayer->longRefCount) { setH264PictureReference(DPBLayer->longRef[i], 0, true); ARRAY_REMOVE_INDEX(DPBLayer->longRef, i); } picNumX = getPicNumX(picture, refPicMarking); foundIdx = findShortRermReference(picNumX); if (foundIdx < 0) return false; i = (uint32_t) foundIdx; refPicture = DPBLayer->shortRef[i]; ARRAY_REMOVE_INDEX(DPBLayer->shortRef, i); DPBLayer->longRef[DPBLayer->longRefCount++] = refPicture; refPicture->m_longTermFrameIdx = refPicMarking->long_term_frame_idx; setH264PictureReference(refPicture, VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, VAAPI_PICTURE_IS_FRAME(picture)); } break; case 4: { longTermFrameIdx = refPicMarking->max_long_term_frame_idx_plus1 - 1; for (i = 0; i < DPBLayer->longRefCount; i++) { if (DPBLayer->longRef[i]->m_longTermFrameIdx <= longTermFrameIdx) continue; setH264PictureReference(DPBLayer->longRef[i], 0, false); ARRAY_REMOVE_INDEX(DPBLayer->longRef, i); i--; } } break; case 5: { flushDPB(); /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */ picture->m_frameNum = 0; /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */ if (picture->m_structure != VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD) picture->m_fieldPoc[TOP_FIELD] -= picture->m_POC; if (picture->m_structure != VAAPI_PICTURE_STRUCTURE_TOP_FIELD) picture->m_fieldPoc[BOTTOM_FIELD] -= picture->m_POC; picture->m_POC = 0; if (VAAPI_H264_PICTURE_IS_SHORT_TERM_REFERENCE(picture)) removeShortReference(picture); } break; case 6: { picture->m_longTermFrameIdx = refPicMarking->long_term_frame_idx; setH264PictureReference(picture.get(), VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, false); } break; default: ERROR("unsupported MMCO type %d", MMCO); break; } return true; }