void CVDPAU::FFDrawSlice(struct AVCodecContext *s, const AVFrame *src, int offset[4], int y, int type, int height) { CDVDVideoCodecFFmpeg* ctx = (CDVDVideoCodecFFmpeg*)s->opaque; CVDPAU* vdp = (CVDPAU*)ctx->GetHardware(); // while we are waiting to recover we can't do anything CSharedLock lock(vdp->m_DecoderSection); { CSharedLock dLock(vdp->m_DisplaySection); if(vdp->m_DisplayState != VDPAU_OPEN) return; } if(src->linesize[0] || src->linesize[1] || src->linesize[2] || offset[0] || offset[1] || offset[2]) { CLog::Log(LOGERROR, "CVDPAU::FFDrawSlice - invalid linesizes or offsets provided"); return; } VdpStatus vdp_st; vdpau_render_state * render; render = (vdpau_render_state*)src->data[0]; if(!render) { CLog::Log(LOGERROR, "CVDPAU::FFDrawSlice - invalid context handle provided"); return; } // ffmpeg vc-1 decoder does not flush, make sure the data buffer is still valid if (!vdp->IsSurfaceValid(render)) { CLog::Log(LOGWARNING, "CVDPAU::FFDrawSlice - ignoring invalid buffer"); return; } uint32_t max_refs = 0; if(s->pix_fmt == PIX_FMT_VDPAU_H264) max_refs = render->info.h264.num_ref_frames; if(vdp->decoder == VDP_INVALID_HANDLE || vdp->vdpauConfigured == false || vdp->max_references < max_refs) { if(!vdp->ConfigVDPAU(s, max_refs)) return; } vdp_st = vdp->vdp_decoder_render(vdp->decoder, render->surface, (VdpPictureInfo const *)&(render->info), render->bitstream_buffers_used, render->bitstream_buffers); vdp->CheckStatus(vdp_st, __LINE__); }
void CVDPAU::FFDrawSlice(struct AVCodecContext *s, const AVFrame *src, int offset[4], int y, int type, int height) { CDVDVideoCodecFFmpeg* ctx = (CDVDVideoCodecFFmpeg*)s->opaque; CVDPAU* vdp = (CVDPAU*)ctx->GetHardware(); /* while we are waiting to recover we can't do anything */ if(vdp->recover) return; if(src->linesize[0] || src->linesize[1] || src->linesize[2] || offset[0] || offset[1] || offset[2]) { CLog::Log(LOGERROR, "CVDPAU::FFDrawSlice - invalid linesizes or offsets provided"); return; } VdpStatus vdp_st; vdpau_render_state * render; render = (vdpau_render_state*)src->data[0]; if(!render) { CLog::Log(LOGERROR, "CVDPAU::FFDrawSlice - invalid context handle provided"); return; } uint32_t max_refs = 0; if(s->pix_fmt == PIX_FMT_VDPAU_H264) max_refs = render->info.h264.num_ref_frames; if(vdp->decoder == VDP_INVALID_HANDLE || vdp->vdpauConfigured == false || vdp->max_references < max_refs) { if(!vdp->ConfigVDPAU(s, max_refs)) return; } vdp_st = vdp->vdp_decoder_render(vdp->decoder, render->surface, (VdpPictureInfo const *)&(render->info), render->bitstream_buffers_used, render->bitstream_buffers); vdp->CheckStatus(vdp_st, __LINE__); }
int CVDPAU::FFGetBuffer(AVCodecContext *avctx, AVFrame *pic) { //CLog::Log(LOGNOTICE,"%s",__FUNCTION__); CDVDVideoCodecFFmpeg* ctx = (CDVDVideoCodecFFmpeg*)avctx->opaque; CVDPAU* vdp = (CVDPAU*)ctx->GetHardware(); struct pictureAge* pA = &vdp->picAge; // while we are waiting to recover we can't do anything if(vdp->recover) { CLog::Log(LOGWARNING, "CVDPAU::FFGetBuffer - returning due to awaiting recovery"); return -1; } vdpau_render_state * render = NULL; // find unused surface for(unsigned int i = 0; i < vdp->m_videoSurfaces.size(); i++) { if(!(vdp->m_videoSurfaces[i]->state & (FF_VDPAU_STATE_USED_FOR_REFERENCE | FF_VDPAU_STATE_USED_FOR_RENDER))) { render = vdp->m_videoSurfaces[i]; render->state = 0; break; } } VdpStatus vdp_st = VDP_STATUS_ERROR; if (render == NULL) { // create a new surface VdpDecoderProfile profile; ReadFormatOf(avctx->pix_fmt, profile, vdp->vdp_chroma_type); render = (vdpau_render_state*)calloc(sizeof(vdpau_render_state), 1); vdp_st = vdp->vdp_video_surface_create(vdp->vdp_device, vdp->vdp_chroma_type, avctx->width, avctx->height, &render->surface); vdp->CheckStatus(vdp_st, __LINE__); if (vdp_st != VDP_STATUS_OK) { free(render); CLog::Log(LOGERROR, "CVDPAU::FFGetBuffer - No Video surface available could be created"); return -1; } vdp->m_videoSurfaces.push_back(render); } if (render == NULL) return -1; pic->data[1] = pic->data[2] = NULL; pic->data[0]= (uint8_t*)render; pic->linesize[0] = pic->linesize[1] = pic->linesize[2] = 0; if(pic->reference) { pic->age = pA->ip_age[0]; pA->ip_age[0]= pA->ip_age[1]+1; pA->ip_age[1]= 1; pA->b_age++; } else { pic->age = pA->b_age; pA->ip_age[0]++; pA->ip_age[1]++; pA->b_age = 1; } pic->type= FF_BUFFER_TYPE_USER; render->state |= FF_VDPAU_STATE_USED_FOR_REFERENCE; pic->reordered_opaque= avctx->reordered_opaque; return 0; }