static VAStatus intel_encoder_check_avc_parameter(VADriverContextP ctx, struct encode_state *encode_state, struct intel_encoder_context *encoder_context) { struct i965_driver_data *i965 = i965_driver_data(ctx); struct object_surface *obj_surface; struct object_buffer *obj_buffer; VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer; int i; assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID)); if (pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID) goto error; obj_surface = SURFACE(pic_param->CurrPic.picture_id); assert(obj_surface); /* It is possible the store buffer isn't allocated yet */ if (!obj_surface) goto error; encode_state->reconstructed_object = obj_surface; obj_buffer = BUFFER(pic_param->coded_buf); assert(obj_buffer && obj_buffer->buffer_store && obj_buffer->buffer_store->bo); if (!obj_buffer || !obj_buffer->buffer_store || !obj_buffer->buffer_store->bo) goto error; encode_state->coded_buf_object = obj_buffer; for (i = 0; i < 16; i++) { if (pic_param->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID || pic_param->ReferenceFrames[i].picture_id == VA_INVALID_SURFACE) break; else { obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id); assert(obj_surface); if (!obj_surface) goto error; if (obj_surface->bo) encode_state->reference_objects[i] = obj_surface; else encode_state->reference_objects[i] = NULL; /* FIXME: Warning or Error ??? */ } } for ( ; i < 16; i++) encode_state->reference_objects[i] = NULL; return VA_STATUS_SUCCESS; error: return VA_STATUS_ERROR_INVALID_PARAMETER; }
VAStatus intel_decoder_sanity_check_input(VADriverContextP ctx, VAProfile profile, struct decode_state *decode_state) { struct i965_driver_data *i965 = i965_driver_data(ctx); struct object_surface *obj_surface; VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; if (decode_state->current_render_target == VA_INVALID_SURFACE) goto out; obj_surface = SURFACE(decode_state->current_render_target); if (!obj_surface) goto out; decode_state->render_object = obj_surface; switch (profile) { case VAProfileMPEG2Simple: case VAProfileMPEG2Main: vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state); break; case VAProfileH264ConstrainedBaseline: case VAProfileH264Main: case VAProfileH264High: case VAProfileH264StereoHigh: case VAProfileH264MultiviewHigh: vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state); break; case VAProfileVC1Simple: case VAProfileVC1Main: case VAProfileVC1Advanced: vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state); break; case VAProfileJPEGBaseline: vaStatus = VA_STATUS_SUCCESS; break; case VAProfileVP8Version0_3: vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state); break; default: vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; break; } out: return vaStatus; }
static VAStatus intel_decoder_check_vc1_parameter(VADriverContextP ctx, struct decode_state *decode_state) { struct i965_driver_data *i965 = i965_driver_data(ctx); VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer; struct object_surface *obj_surface; int i = 0; if (pic_param->sequence_fields.bits.interlace == 1 && pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */ return VA_STATUS_ERROR_DECODING_ERROR; } if (pic_param->picture_fields.bits.picture_type == 0 || pic_param->picture_fields.bits.picture_type == 3) { } else if (pic_param->picture_fields.bits.picture_type == 1 || pic_param->picture_fields.bits.picture_type == 4) { obj_surface = SURFACE(pic_param->forward_reference_picture); if (!obj_surface || !obj_surface->bo) decode_state->reference_objects[i++] = NULL; else decode_state->reference_objects[i++] = obj_surface; } else if (pic_param->picture_fields.bits.picture_type == 2) { obj_surface = SURFACE(pic_param->forward_reference_picture); if (!obj_surface || !obj_surface->bo) decode_state->reference_objects[i++] = NULL; else decode_state->reference_objects[i++] = obj_surface; obj_surface = SURFACE(pic_param->backward_reference_picture); if (!obj_surface || !obj_surface->bo) decode_state->reference_objects[i++] = NULL; else decode_state->reference_objects[i++] = obj_surface; } else goto error; for ( ; i < 16; i++) decode_state->reference_objects[i] = NULL; return VA_STATUS_SUCCESS; error: return VA_STATUS_ERROR_INVALID_PARAMETER; }
static VAStatus intel_decoder_check_mpeg2_parameter(VADriverContextP ctx, struct decode_state *decode_state) { struct i965_driver_data *i965 = i965_driver_data(ctx); VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer; struct object_surface *obj_surface; int i = 0; if (pic_param->picture_coding_type == MPEG_I_PICTURE) { } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) { obj_surface = SURFACE(pic_param->forward_reference_picture); if (!obj_surface || !obj_surface->bo) decode_state->reference_objects[i++] = NULL; else decode_state->reference_objects[i++] = obj_surface; } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) { obj_surface = SURFACE(pic_param->forward_reference_picture); if (!obj_surface || !obj_surface->bo) decode_state->reference_objects[i++] = NULL; else decode_state->reference_objects[i++] = obj_surface; obj_surface = SURFACE(pic_param->backward_reference_picture); if (!obj_surface || !obj_surface->bo) decode_state->reference_objects[i++] = NULL; else decode_state->reference_objects[i++] = obj_surface; } else goto error; for ( ; i < 16; i++) decode_state->reference_objects[i] = NULL; return VA_STATUS_SUCCESS; error: return VA_STATUS_ERROR_INVALID_PARAMETER; }
static VAStatus intel_decoder_check_vp8_parameter(VADriverContextP ctx, struct decode_state *decode_state) { struct i965_driver_data *i965 = i965_driver_data(ctx); VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer; struct object_surface *obj_surface; int i = 0; if (pic_param->last_ref_frame != VA_INVALID_SURFACE) { obj_surface = SURFACE(pic_param->last_ref_frame); if (obj_surface && obj_surface->bo) decode_state->reference_objects[i++] = obj_surface; else decode_state->reference_objects[i++] = NULL; } if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) { obj_surface = SURFACE(pic_param->golden_ref_frame); if (obj_surface && obj_surface->bo) decode_state->reference_objects[i++] = obj_surface; else decode_state->reference_objects[i++] = NULL; } if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) { obj_surface = SURFACE(pic_param->alt_ref_frame); if (obj_surface && obj_surface->bo) decode_state->reference_objects[i++] = obj_surface; else decode_state->reference_objects[i++] = NULL; } for ( ; i < 16; i++) decode_state->reference_objects[i] = NULL; return VA_STATUS_SUCCESS; }
/* Ensure the segmentation buffer is large enough for the supplied number of MBs, or re-allocate it */ bool intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf, unsigned int mb_width, unsigned int mb_height) { struct i965_driver_data * const i965 = i965_driver_data(ctx); /* The segmentation map is a 64-byte aligned linear buffer, with each cache line holding only 8 bits for 4 continuous MBs */ const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height; if (buf->valid) { if (buf->bo && buf->bo->size >= buf_size) return true; drm_intel_bo_unreference(buf->bo); buf->valid = false; } buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map", buf_size, 0x1000); buf->valid = buf->bo != NULL; return buf->valid; }
static VAStatus intel_encoder_check_yuv_surface(VADriverContextP ctx, VAProfile profile, struct encode_state *encode_state, struct intel_encoder_context *encoder_context) { struct i965_driver_data *i965 = i965_driver_data(ctx); struct i965_surface src_surface, dst_surface; struct object_surface *obj_surface; VAStatus status; VARectangle rect; /* releae the temporary surface */ if (encoder_context->is_tmp_id) { i965_DestroySurfaces(ctx, &encoder_context->input_yuv_surface, 1); encode_state->input_yuv_object = NULL; } encoder_context->is_tmp_id = 0; obj_surface = SURFACE(encode_state->current_render_target); assert(obj_surface && obj_surface->bo); if (!obj_surface || !obj_surface->bo) return VA_STATUS_ERROR_INVALID_PARAMETER; if (obj_surface->fourcc == VA_FOURCC_NV12) { unsigned int tiling = 0, swizzle = 0; dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle); if (tiling == I915_TILING_Y) { encoder_context->input_yuv_surface = encode_state->current_render_target; encode_state->input_yuv_object = obj_surface; return VA_STATUS_SUCCESS; } } rect.x = 0; rect.y = 0; rect.width = obj_surface->orig_width; rect.height = obj_surface->orig_height; src_surface.base = (struct object_base *)obj_surface; src_surface.type = I965_SURFACE_TYPE_SURFACE; src_surface.flags = I965_SURFACE_FLAG_FRAME; status = i965_CreateSurfaces(ctx, obj_surface->orig_width, obj_surface->orig_height, VA_RT_FORMAT_YUV420, 1, &encoder_context->input_yuv_surface); assert(status == VA_STATUS_SUCCESS); if (status != VA_STATUS_SUCCESS) return status; obj_surface = SURFACE(encoder_context->input_yuv_surface); encode_state->input_yuv_object = obj_surface; assert(obj_surface); i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420); dst_surface.base = (struct object_base *)obj_surface; dst_surface.type = I965_SURFACE_TYPE_SURFACE; dst_surface.flags = I965_SURFACE_FLAG_FRAME; status = i965_image_processing(ctx, &src_surface, &rect, &dst_surface, &rect); assert(status == VA_STATUS_SUCCESS); encoder_context->is_tmp_id = 1; return VA_STATUS_SUCCESS; }
static VAStatus intel_encoder_check_mpeg2_parameter(VADriverContextP ctx, struct encode_state *encode_state, struct intel_encoder_context *encoder_context) { struct i965_driver_data *i965 = i965_driver_data(ctx); VAEncPictureParameterBufferMPEG2 *pic_param = (VAEncPictureParameterBufferMPEG2 *)encode_state->pic_param_ext->buffer; struct object_surface *obj_surface; struct object_buffer *obj_buffer; int i = 0; obj_surface = SURFACE(pic_param->reconstructed_picture); assert(obj_surface); /* It is possible the store buffer isn't allocated yet */ if (!obj_surface) goto error; encode_state->reconstructed_object = obj_surface; obj_buffer = BUFFER(pic_param->coded_buf); assert(obj_buffer && obj_buffer->buffer_store && obj_buffer->buffer_store->bo); if (!obj_buffer || !obj_buffer->buffer_store || !obj_buffer->buffer_store->bo) goto error; encode_state->coded_buf_object = obj_buffer; if (pic_param->picture_type == VAEncPictureTypeIntra) { } else if (pic_param->picture_type == VAEncPictureTypePredictive) { assert(pic_param->forward_reference_picture != VA_INVALID_SURFACE); obj_surface = SURFACE(pic_param->forward_reference_picture); assert(obj_surface && obj_surface->bo); if (!obj_surface || !obj_surface->bo) goto error; encode_state->reference_objects[i++] = obj_surface; } else if (pic_param->picture_type == VAEncPictureTypeBidirectional) { assert(pic_param->forward_reference_picture != VA_INVALID_SURFACE); obj_surface = SURFACE(pic_param->forward_reference_picture); assert(obj_surface && obj_surface->bo); if (!obj_surface || !obj_surface->bo) goto error; encode_state->reference_objects[i++] = obj_surface; assert(pic_param->backward_reference_picture != VA_INVALID_SURFACE); obj_surface = SURFACE(pic_param->backward_reference_picture); assert(obj_surface && obj_surface->bo); if (!obj_surface || !obj_surface->bo) goto error; encode_state->reference_objects[i++] = obj_surface; } else goto error; for ( ; i < 16; i++) encode_state->reference_objects[i] = NULL; return VA_STATUS_SUCCESS; error: return VA_STATUS_ERROR_INVALID_PARAMETER; }
/* Build MPEG-2 reference frames array */ void mpeg2_set_reference_surfaces( VADriverContextP ctx, GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES], struct decode_state *decode_state, VAPictureParameterBufferMPEG2 *pic_param ) { struct i965_driver_data * const i965 = i965_driver_data(ctx); VASurfaceID va_surface; unsigned pic_structure, is_second_field, n = 0; pic_structure = pic_param->picture_coding_extension.bits.picture_structure; is_second_field = pic_structure != MPEG_FRAME && !pic_param->picture_coding_extension.bits.is_first_field; ref_frames[0].surface_id = VA_INVALID_ID; /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */ switch (pic_param->picture_coding_type) { case MPEG_P_PICTURE: if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) { va_surface = decode_state->current_render_target; n += set_ref_frame(i965, &ref_frames[n], va_surface); } va_surface = pic_param->forward_reference_picture; n += set_ref_frame(i965, &ref_frames[n], va_surface); break; case MPEG_B_PICTURE: va_surface = pic_param->forward_reference_picture; n += set_ref_frame(i965, &ref_frames[n], va_surface); va_surface = pic_param->backward_reference_picture; n += set_ref_frame(i965, &ref_frames[n], va_surface); break; } while (n != 2) ref_frames[n++].surface_id = ref_frames[0].surface_id; if (pic_param->picture_coding_extension.bits.progressive_frame) return; ref_frames[2].surface_id = VA_INVALID_ID; /* Bottom field pictures used as reference */ switch (pic_param->picture_coding_type) { case MPEG_P_PICTURE: if (is_second_field && pic_structure == MPEG_TOP_FIELD) { va_surface = decode_state->current_render_target; n += set_ref_frame(i965, &ref_frames[n], va_surface); } va_surface = pic_param->forward_reference_picture; n += set_ref_frame(i965, &ref_frames[n], va_surface); break; case MPEG_B_PICTURE: va_surface = pic_param->forward_reference_picture; n += set_ref_frame(i965, &ref_frames[n], va_surface); va_surface = pic_param->backward_reference_picture; n += set_ref_frame(i965, &ref_frames[n], va_surface); break; } while (n != 4) ref_frames[n++].surface_id = ref_frames[2].surface_id; }
static VAStatus intel_decoder_check_avc_parameter(VADriverContextP ctx, VAProfile h264_profile, struct decode_state *decode_state) { struct i965_driver_data *i965 = i965_driver_data(ctx); VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer; VAStatus va_status; struct object_surface *obj_surface; int i; VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param; int j; assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID)); assert(pic_param->CurrPic.picture_id != VA_INVALID_SURFACE); if (pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID || pic_param->CurrPic.picture_id == VA_INVALID_SURFACE) goto error; assert(pic_param->CurrPic.picture_id == decode_state->current_render_target); if (pic_param->CurrPic.picture_id != decode_state->current_render_target) goto error; if ((h264_profile != VAProfileH264Baseline)) { if (pic_param->num_slice_groups_minus1 || pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) { WARN_ONCE("Unsupported the FMO/ASO constraints!!!\n"); goto error; } } /* Fill in the reference objects array with the actual VA surface objects with 1:1 correspondance with any entry in ReferenceFrames[], i.e. including "holes" for invalid entries, that are expanded to NULL in the reference_objects[] array */ for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) { const VAPictureH264 * const va_pic = &pic_param->ReferenceFrames[i]; obj_surface = NULL; if (!(va_pic->flags & VA_PICTURE_H264_INVALID) && va_pic->picture_id != VA_INVALID_ID) { obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id); if (!obj_surface) return VA_STATUS_ERROR_INVALID_SURFACE; /* * Sometimes a dummy frame comes from the upper layer * library, call i965_check_alloc_surface_bo() to make * sure the store buffer is allocated for this reference * frame */ va_status = avc_ensure_surface_bo(ctx, decode_state, obj_surface, pic_param); if (va_status != VA_STATUS_SUCCESS) return va_status; } decode_state->reference_objects[i] = obj_surface; } for (j = 0; j < decode_state->num_slice_params; j++) { assert(decode_state->slice_params && decode_state->slice_params[j]->buffer); slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer; if (j == decode_state->num_slice_params - 1) next_slice_group_param = NULL; else next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer; for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) { if (i < decode_state->slice_params[j]->num_elements - 1) next_slice_param = slice_param + 1; else next_slice_param = next_slice_group_param; if (next_slice_param != NULL) { /* If the mb position of next_slice is less than or equal to the current slice, * discard the current frame. */ if (next_slice_param->first_mb_in_slice <= slice_param->first_mb_in_slice) { next_slice_param = NULL; WARN_ONCE("!!!incorrect slice_param. The first_mb_in_slice of next_slice is less" " than or equal to that in current slice\n"); goto error; } } } } return VA_STATUS_SUCCESS; error: return VA_STATUS_ERROR_INVALID_PARAMETER; }
/* Ensure the supplied VA surface has valid storage for decoding the current picture */ VAStatus avc_ensure_surface_bo( VADriverContextP ctx, struct decode_state *decode_state, struct object_surface *obj_surface, const VAPictureParameterBufferH264 *pic_param ) { VAStatus va_status; uint32_t hw_fourcc, fourcc, subsample, chroma_format; /* Validate chroma format */ switch (pic_param->seq_fields.bits.chroma_format_idc) { case 0: // Grayscale fourcc = VA_FOURCC_Y800; subsample = SUBSAMPLE_YUV400; chroma_format = VA_RT_FORMAT_YUV400; break; case 1: // YUV 4:2:0 fourcc = VA_FOURCC_NV12; subsample = SUBSAMPLE_YUV420; chroma_format = VA_RT_FORMAT_YUV420; break; default: return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT; } /* Determine the HW surface format, bound to VA config needs */ if ((decode_state->base.chroma_formats & chroma_format) == chroma_format) hw_fourcc = fourcc; else { hw_fourcc = 0; switch (fourcc) { case VA_FOURCC_Y800: // Implement with an NV12 surface if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) { hw_fourcc = VA_FOURCC_NV12; subsample = SUBSAMPLE_YUV420; } break; } } if (!hw_fourcc) return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT; /* (Re-)allocate the underlying surface buffer store, if necessary */ if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) { struct i965_driver_data * const i965 = i965_driver_data(ctx); i965_destroy_surface_storage(obj_surface); va_status = i965_check_alloc_surface_bo(ctx, obj_surface, i965->codec_info->has_tiled_surface, hw_fourcc, subsample); if (va_status != VA_STATUS_SUCCESS) return va_status; } /* Fake chroma components if grayscale is implemented on top of NV12 */ if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) { const uint32_t uv_offset = obj_surface->width * obj_surface->height; const uint32_t uv_size = obj_surface->width * obj_surface->height / 2; drm_intel_gem_bo_map_gtt(obj_surface->bo); memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size); drm_intel_gem_bo_unmap_gtt(obj_surface->bo); } return VA_STATUS_SUCCESS; }