void VDPAUDecoder::setupDecoder(AVCodecContext* pContext) { VdpStatus status; // Create new decoder and mixer. VdpDecoderProfile profile = 0; switch (pContext->pix_fmt) { case PIX_FMT_VDPAU_MPEG1: profile = VDP_DECODER_PROFILE_MPEG1; break; case PIX_FMT_VDPAU_MPEG2: profile = VDP_DECODER_PROFILE_MPEG2_MAIN; break; case PIX_FMT_VDPAU_H264: profile = VDP_DECODER_PROFILE_H264_HIGH; break; case PIX_FMT_VDPAU_WMV3: profile = VDP_DECODER_PROFILE_VC1_SIMPLE; break; case PIX_FMT_VDPAU_VC1: profile = VDP_DECODER_PROFILE_VC1_SIMPLE; break; default: AVG_ASSERT(false); } status = vdp_decoder_create(getVDPAUDevice(), profile, m_Size.x, m_Size.y, 16, &m_VDPDecoder); AVG_ASSERT(status == VDP_STATUS_OK); m_PixFmt = pContext->pix_fmt; VdpVideoMixerFeature features[] = { VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL, VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL, }; VdpVideoMixerParameter params[] = { VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH, VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT, VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE, VDP_VIDEO_MIXER_PARAMETER_LAYERS }; VdpChromaType chroma = VDP_CHROMA_TYPE_420; int numLayers = 0; void const* paramValues [] = { &m_Size.x, &m_Size.y, &chroma, &numLayers }; status = vdp_video_mixer_create(getVDPAUDevice(), 2, features, 4, params, paramValues, &m_VDPMixer); AVG_ASSERT(status == VDP_STATUS_OK); }
bool CVDPAU::ConfigVDPAU(AVCodecContext* avctx, int ref_frames) { FiniVDPAUOutput(); VdpStatus vdp_st; VdpDecoderProfile vdp_decoder_profile; vid_width = avctx->width; vid_height = avctx->height; surface_width = avctx->coded_width; surface_height = avctx->coded_height; past[1] = past[0] = current = future = NULL; CLog::Log(LOGNOTICE, " (VDPAU) screenWidth:%i vidWidth:%i surfaceWidth:%i",OutWidth,vid_width,surface_width); CLog::Log(LOGNOTICE, " (VDPAU) screenHeight:%i vidHeight:%i surfaceHeight:%i",OutHeight,vid_height,surface_height); ReadFormatOf(avctx->pix_fmt, vdp_decoder_profile, vdp_chroma_type); if(avctx->pix_fmt == PIX_FMT_VDPAU_H264) { max_references = ref_frames; if (max_references > 16) max_references = 16; if (max_references < 5) max_references = 5; } else max_references = 2; vdp_st = vdp_decoder_create(vdp_device, vdp_decoder_profile, surface_width, surface_height, max_references, &decoder); if (CheckStatus(vdp_st, __LINE__)) return false; m_vdpauOutputMethod = OUTPUT_NONE; vdpauConfigured = true; return true; }
static int Init(vlc_va_t *va, void **ctxp, vlc_fourcc_t *chromap, int width, int height) { vlc_va_sys_t *sys = va->sys; VdpStatus err; width = (width + 1) & ~1; height = (height + 3) & ~3; sys->width = width; sys->height = height; unsigned surfaces = 2; switch (sys->profile) { case VDP_DECODER_PROFILE_H264_BASELINE: case VDP_DECODER_PROFILE_H264_MAIN: case VDP_DECODER_PROFILE_H264_HIGH: surfaces = 16; break; } err = vdp_decoder_create(sys->vdp, sys->device, sys->profile, width, height, surfaces, &sys->context->decoder); if (err != VDP_STATUS_OK) { msg_Err(va, "%s creation failure: %s", "decoder", vdp_get_error_string(sys->vdp, err)); sys->context->decoder = VDP_INVALID_HANDLE; return VLC_EGENERIC; } *ctxp = sys->context; /* TODO: select better chromas when appropriate */ *chromap = VLC_CODEC_VDPAU_VIDEO_420; return VLC_SUCCESS; }
bool CVDPAU::ConfigVDPAU(AVCodecContext* avctx, int ref_frames) { FiniVDPAUOutput(); VdpStatus vdp_st; VdpDecoderProfile vdp_decoder_profile; vid_width = avctx->width; vid_height = avctx->height; past[1] = past[0] = current = future = NULL; CLog::Log(LOGNOTICE, " (VDPAU) screenWidth:%i vidWidth:%i",OutWidth,vid_width); CLog::Log(LOGNOTICE, " (VDPAU) screenHeight:%i vidHeight:%i",OutHeight,vid_height); ReadFormatOf(avctx->pix_fmt, vdp_decoder_profile, vdp_chroma_type); if(avctx->pix_fmt == PIX_FMT_VDPAU_H264) { max_references = ref_frames; if (max_references > 16) max_references = 16; if (max_references < 5) max_references = 5; } else max_references = 2; vdp_st = vdp_decoder_create(vdp_device, vdp_decoder_profile, vid_width, vid_height, max_references, &decoder); CHECK_VDPAU_RETURN(vdp_st, false); vdp_st = vdp_presentation_queue_target_create_x11(vdp_device, m_Pixmap, //x_window, &vdp_flip_target); CHECK_VDPAU_RETURN(vdp_st, false); vdp_st = vdp_presentation_queue_create(vdp_device, vdp_flip_target, &vdp_flip_queue); CHECK_VDPAU_RETURN(vdp_st, false); totalAvailableOutputSurfaces = 0; int tmpMaxOutputSurfaces = NUM_OUTPUT_SURFACES; if (vid_width == FULLHD_WIDTH) tmpMaxOutputSurfaces = NUM_OUTPUT_SURFACES_FOR_FULLHD; // Creation of outputSurfaces for (int i = 0; i < NUM_OUTPUT_SURFACES && i < tmpMaxOutputSurfaces; i++) { vdp_st = vdp_output_surface_create(vdp_device, VDP_RGBA_FORMAT_B8G8R8A8, OutWidth, OutHeight, &outputSurfaces[i]); CHECK_VDPAU_RETURN(vdp_st, false); totalAvailableOutputSurfaces++; } CLog::Log(LOGNOTICE, " (VDPAU) Total Output Surfaces Available: %i of a max (tmp: %i const: %i)", totalAvailableOutputSurfaces, tmpMaxOutputSurfaces, NUM_OUTPUT_SURFACES); surfaceNum = presentSurfaceNum = 0; outputSurface = outputSurfaces[surfaceNum]; vdpauConfigured = true; return true; }
bool CVDPAU::Open(AVCodecContext* avctx, const enum PixelFormat, unsigned int surfaces) { if(avctx->width == 0 || avctx->height == 0) { CLog::Log(LOGWARNING,"(VDPAU) no width/height available, can't init"); return false; } if (!dl_handle) { dl_handle = dlopen("libvdpau.so.1", RTLD_LAZY); if (!dl_handle) { const char* error = dlerror(); if (!error) error = "dlerror() returned NULL"; CLog::Log(LOGNOTICE,"(VDPAU) Unable to get handle to libvdpau: %s", error); //g_application.m_guiDialogKaiToast.QueueNotification(CGUIDialogKaiToast::Error, "VDPAU", error, 10000); return false; } } InitVDPAUProcs(); if (vdp_device != VDP_INVALID_HANDLE) { SpewHardwareAvailable(); VdpDecoderProfile profile = 0; if(avctx->codec_id == CODEC_ID_H264) profile = VDP_DECODER_PROFILE_H264_HIGH; #ifdef VDP_DECODER_PROFILE_MPEG4_PART2_ASP else if(avctx->codec_id == CODEC_ID_MPEG4) profile = VDP_DECODER_PROFILE_MPEG4_PART2_ASP; #endif if(profile) { if (!CDVDCodecUtils::IsVP3CompatibleWidth(avctx->width)) CLog::Log(LOGWARNING,"(VDPAU) width %i might not be supported because of hardware bug", avctx->width); /* attempt to create a decoder with this width/height, some sizes are not supported by hw */ VdpStatus vdp_st; vdp_st = vdp_decoder_create(vdp_device, profile, avctx->width, avctx->height, 5, &decoder); if(vdp_st != VDP_STATUS_OK) { CLog::Log(LOGERROR, " (VDPAU) Error: %s(%d) checking for decoder support\n", vdp_get_error_string(vdp_st), vdp_st); FiniVDPAUProcs(); return false; } vdp_decoder_destroy(decoder); CheckStatus(vdp_st, __LINE__); } InitCSCMatrix(avctx->height); MakePixmap(avctx->width,avctx->height); /* finally setup ffmpeg */ avctx->get_buffer = CVDPAU::FFGetBuffer; avctx->release_buffer = CVDPAU::FFReleaseBuffer; avctx->draw_horiz_band = CVDPAU::FFDrawSlice; avctx->slice_flags=SLICE_FLAG_CODED_ORDER|SLICE_FLAG_ALLOW_FIELD; return true; } return false; }
static int vdpau_decoder_init( producer_avformat self ) { mlt_log_debug( MLT_PRODUCER_SERVICE(self->parent), "vdpau_decoder_init\n" ); int success = 1; self->video_codec->opaque = self; self->video_codec->get_format = vdpau_get_format; self->video_codec->get_buffer = vdpau_get_buffer; self->video_codec->release_buffer = vdpau_release_buffer; self->video_codec->draw_horiz_band = vdpau_draw_horiz; self->video_codec->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; self->video_codec->pix_fmt = PIX_FMT_VDPAU_H264; VdpDecoderProfile profile = VDP_DECODER_PROFILE_H264_HIGH; uint32_t max_references = self->video_codec->refs; pthread_mutex_lock( &mlt_sdl_mutex ); VdpStatus status = vdp_decoder_create( self->vdpau->device, profile, self->video_codec->width, self->video_codec->height, max_references, &self->vdpau->decoder ); pthread_mutex_unlock( &mlt_sdl_mutex ); if ( status == VDP_STATUS_OK ) { int i, n = FFMIN( self->video_codec->refs + 2, MAX_VDPAU_SURFACES ); self->vdpau->deque = mlt_deque_init(); for ( i = 0; i < n; i++ ) { if ( VDP_STATUS_OK == vdp_surface_create( self->vdpau->device, VDP_CHROMA_TYPE_420, self->video_codec->width, self->video_codec->height, &self->vdpau->render_states[i].surface ) ) { mlt_log_debug( MLT_PRODUCER_SERVICE(self->parent), "successfully created VDPAU surface %x\n", self->vdpau->render_states[i].surface ); mlt_deque_push_back( self->vdpau->deque, &self->vdpau->render_states[i] ); } else { mlt_log_info( MLT_PRODUCER_SERVICE(self->parent), "failed to create VDPAU surface %dx%d\n", self->video_codec->width, self->video_codec->height ); while ( mlt_deque_count( self->vdpau->deque ) ) { struct vdpau_render_state *render = mlt_deque_pop_front( self->vdpau->deque ); vdp_surface_destroy( render->surface ); } mlt_deque_close( self->vdpau->deque ); success = 0; break; } } if ( self->vdpau ) self->vdpau->b_age = self->vdpau->ip_age[0] = self->vdpau->ip_age[1] = 256*256*256*64; // magic from Avidemux } else { success = 0; self->vdpau->decoder = VDP_INVALID_HANDLE; mlt_log_error( MLT_PRODUCER_SERVICE(self->parent), "VDPAU failed to initialize decoder (%s)\n", vdp_get_error_string( status ) ); } return success; }
int main(int argc, char **argv) { int width = 1280, height = 544; Display *display = XOpenDisplay(NULL); Window root = XDefaultRootWindow(display); Window window = XCreateSimpleWindow( display, root, 0, 0, 1280, 544, 0, 0, 0); XSelectInput(display, window, ExposureMask | KeyPressMask); XMapWindow(display, window); XSync(display, 0); VdpDevice dev; mark("vdp_device_create_x11\n"); int ret = vdp_device_create_x11(display, 0, &dev, &vdp_get_proc_address); assert(ret == VDP_STATUS_OK); #define get(id, func) \ ret = vdp_get_proc_address(dev, id, (void **)&func); \ assert(ret == VDP_STATUS_OK); get(VDP_FUNC_ID_DECODER_CREATE, vdp_decoder_create); get(VDP_FUNC_ID_DECODER_DESTROY, vdp_decoder_destroy); get(VDP_FUNC_ID_DECODER_RENDER, vdp_decoder_render); get(VDP_FUNC_ID_VIDEO_MIXER_CREATE, vdp_video_mixer_create); get(VDP_FUNC_ID_VIDEO_MIXER_DESTROY, vdp_video_mixer_destroy); get(VDP_FUNC_ID_VIDEO_MIXER_RENDER, vdp_video_mixer_render); get(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, vdp_video_surface_create); get(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, vdp_video_surface_destroy); get(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, vdp_video_surface_get_bits_ycbcr); get(VDP_FUNC_ID_OUTPUT_SURFACE_CREATE, vdp_output_surface_create); get(VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY, vdp_output_surface_destroy); get(VDP_FUNC_ID_OUTPUT_SURFACE_GET_BITS_NATIVE, vdp_output_surface_get_bits_native); get(VDP_FUNC_ID_PRESENTATION_QUEUE_CREATE, vdp_presentation_queue_create); get(VDP_FUNC_ID_PRESENTATION_QUEUE_DESTROY, vdp_presentation_queue_destroy); get(VDP_FUNC_ID_PRESENTATION_QUEUE_DISPLAY, vdp_presentation_queue_display); get(VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_CREATE_X11, vdp_presentation_queue_target_create_x11); get(VDP_FUNC_ID_PRESENTATION_QUEUE_BLOCK_UNTIL_SURFACE_IDLE, vdp_presentation_queue_block_until_surface_idle); get(VDP_FUNC_ID_PRESENTATION_QUEUE_GET_TIME, vdp_presentation_queue_get_time); #undef get VdpDecoder dec; VdpVideoSurface video[16]; VdpOutputSurface output; VdpPresentationQueue queue; VdpPresentationQueueTarget target; VdpVideoMixer mixer; VdpVideoMixerFeature mixer_features[] = { }; VdpVideoMixerParameter mixer_params[] = { VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH, VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT, VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE }; int zero = 0; const void *mixer_param_vals[] = { &width, &height, &zero }; mark("vdp_decoder_create\n"); ret = vdp_decoder_create(dev, VDP_DECODER_PROFILE_H264_MAIN, 1280, 544, 6, &dec); assert(ret == VDP_STATUS_OK); int i; for (i = 0; i < 16; i++) { mark("vdp_video_surface_create: %d\n", i); ret = vdp_video_surface_create(dev, VDP_CHROMA_TYPE_420, 1280, 544, &video[i]); assert(ret == VDP_STATUS_OK); mark(" <-- %d\n", video[i]); } mark("vdp_output_surface_create\n"); ret = vdp_output_surface_create(dev, VDP_RGBA_FORMAT_B8G8R8A8, 1280, 544, &output); assert(ret == VDP_STATUS_OK); mark("vdp_presentation_queue_target_create_x11\n"); ret = vdp_presentation_queue_target_create_x11(dev, window, &target); assert(ret == VDP_STATUS_OK); mark("vdp_presentation_queue_create\n"); ret = vdp_presentation_queue_create(dev, target, &queue); assert(ret == VDP_STATUS_OK); mark("vdp_video_mixer_create\n"); ret = vdp_video_mixer_create(dev, sizeof(mixer_features)/sizeof(mixer_features[0]), mixer_features, sizeof(mixer_params)/sizeof(mixer_params[0]), mixer_params, mixer_param_vals, &mixer); assert(ret == VDP_STATUS_OK); assert(argc > 1); int fd = open(argv[1], O_RDONLY); struct stat statbuf; assert(fstat(fd, &statbuf) == 0); void *addr = mmap(NULL, statbuf.st_size, PROT_READ, MAP_SHARED, fd, 0); void *orig_addr = addr; mark("mmap file addr: 0x%p size: 0x%lx\n", addr, statbuf.st_size); //printf("mmap'd file of size: %ld\n", statbuf.st_size); VdpPictureInfoH264 info = { .slice_count = 1, .field_order_cnt = { 65536, 65536 }, .is_reference = 1, .frame_num = -1, .field_pic_flag = 0, .bottom_field_flag = 0, .num_ref_frames = 6, .mb_adaptive_frame_field_flag = 0, .constrained_intra_pred_flag = 0, .weighted_pred_flag = 0, .weighted_bipred_idc = 0, .frame_mbs_only_flag = 1, .transform_8x8_mode_flag = 0, .chroma_qp_index_offset = 0, .second_chroma_qp_index_offset = 0, .pic_init_qp_minus26 = 0, .num_ref_idx_l0_active_minus1 = 0, .num_ref_idx_l1_active_minus1 = 0, .log2_max_frame_num_minus4 = 5, .pic_order_cnt_type = 0, .log2_max_pic_order_cnt_lsb_minus4 = 6, .delta_pic_order_always_zero_flag = 0, .direct_8x8_inference_flag = 1, .entropy_coding_mode_flag = 1, .pic_order_present_flag = 0, .deblocking_filter_control_present_flag = 1, .redundant_pic_cnt_present_flag = 0, }; int j; for (j = 0; j < 6; ++j) { int k; for (k = 0; k < 16; ++k) info.scaling_lists_4x4[j][k] = 16; } for (j = 0; j < 2; ++j) { int k; for (k = 0; k < 64; ++k) info.scaling_lists_8x8[j][k] = 16; } for (j = 0; j < 16; ++j) info.referenceFrames[j].surface = VDP_INVALID_HANDLE; mark("vdp_presentation_queue_get_time\n"); VdpTime t; ret = vdp_presentation_queue_get_time(queue, &t); assert(ret == VDP_STATUS_OK); fprintf(stderr, "Start time: %ld\n", t); int vframe = 0; while ((addr - orig_addr) < statbuf.st_size) { int size = ntohl(*(int *)addr); addr += 4; int nal_type = (*(char *)addr) & 0x1F; int nal_ref_idc = (*(char *)addr) >> 5; if (nal_type != 1 && nal_type != 5) { //fprintf(stderr, "Skipping NAL type %d, size: %d\n", nal_type, size); addr += size; continue; } //fprintf(stderr, "Processing NAL type %d, ref_idc: %d, size: %d\n", nal_type, nal_ref_idc, size); int bit_offset = 8; ue(addr, &bit_offset); int slice_type = ue(addr, &bit_offset); mark("nal_type: %d, ref_idc: %d, size: %d, slice_type: %d\n", nal_type, nal_ref_idc, size, slice_type); //fprintf(stderr, "Slice type: %d\n", slice_type); ue(addr, &bit_offset); info.frame_num = read_bits(addr, &bit_offset, info.log2_max_frame_num_minus4 + 4); if (nal_type == 5) { ue(addr, &bit_offset); info.frame_num = 0; for (j = 0; j < 16; ++j) info.referenceFrames[j].surface = VDP_INVALID_HANDLE; } uint32_t poc_lsb = read_bits(addr, &bit_offset, info.log2_max_pic_order_cnt_lsb_minus4 + 4); info.field_order_cnt[0] = (1 << 16) + poc_lsb; info.field_order_cnt[1] = (1 << 16) + poc_lsb; info.is_reference = nal_ref_idc != 0; VdpBitstreamBuffer buffer[2]; static const char header[3] = {0, 0, 1}; buffer[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; buffer[0].bitstream = header; buffer[0].bitstream_bytes = sizeof(header); buffer[1].struct_version = VDP_BITSTREAM_BUFFER_VERSION; buffer[1].bitstream = addr; buffer[1].bitstream_bytes = size; mark("vdp_decoder_render: %d\n", video[vframe]); ret = vdp_decoder_render(dec, video[vframe], (void*)&info, 2, buffer); assert(ret == VDP_STATUS_OK); mark("vdp_video_mixer_render\n"); ret = vdp_video_mixer_render( mixer, VDP_INVALID_HANDLE, NULL, VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME, 0, NULL, video[vframe], 0, NULL, NULL, output, NULL, NULL, 0, NULL); assert(ret == VDP_STATUS_OK); t += 1000000000ULL; mark("vdp_presentation_queue_display\n"); ret = vdp_presentation_queue_display(queue, output, 1280, 544, t); assert(ret == VDP_STATUS_OK); addr += size; /* uint32_t pitches[2] = {1280, 640 * 2}; uint8_t *data[2]; for (i = 0; i < 2; i++) { data[i] = malloc(1280 * 544 / (i ? 2 : 1)); assert(data[i]); } ret = vdp_video_surface_get_bits_ycbcr(video[vframe], VDP_YCBCR_FORMAT_NV12, (void **)data, pitches); assert(ret == VDP_STATUS_OK); write(1, data[0], 1280 * 544); for (i = 0; i < 1280 * 544 / 2; i+=2) write(1, data[1] + i, 1); for (i = 0; i < 1280 * 544 / 2; i+=2) write(1, data[1] + i + 1, 1); */ if (info.is_reference) { for (j = 5; j > 0; --j) memcpy(&info.referenceFrames[j], &info.referenceFrames[j-1], sizeof(info.referenceFrames[0])); info.referenceFrames[0].surface = video[vframe]; memcpy(info.referenceFrames[0].field_order_cnt, info.field_order_cnt, 2 * sizeof(uint32_t)); info.referenceFrames[0].frame_idx = info.frame_num; info.referenceFrames[0].top_is_reference = 1; info.referenceFrames[0].bottom_is_reference = 1; } vframe = (vframe + 1) % 16; //if (vframe > 10) break; } return 0; }