static GstVaapiDecoderStatus ensure_quant_matrix (GstVaapiDecoderVp8 * decoder, GstVaapiPicture * picture) { GstVaapiDecoderVp8Private *const priv = &decoder->priv; GstVp8FrameHdr *const frame_hdr = &priv->frame_hdr; GstVp8Segmentation *const seg = &priv->parser.segmentation; VAIQMatrixBufferVP8 *iq_matrix; const gint8 QI_MAX = 127; gint8 qi, qi_base; gint i; picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (VP8, decoder); if (!picture->iq_matrix) { GST_ERROR ("failed to allocate IQ matrix"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } iq_matrix = picture->iq_matrix->param; /* Fill in VAIQMatrixBufferVP8 */ for (i = 0; i < 4; i++) { if (seg->segmentation_enabled) { qi_base = seg->quantizer_update_value[i]; if (!seg->segment_feature_mode) // 0 means delta update qi_base += frame_hdr->quant_indices.y_ac_qi; } else qi_base = frame_hdr->quant_indices.y_ac_qi; qi = qi_base; iq_matrix->quantization_index[i][0] = CLAMP (qi, 0, QI_MAX); qi = qi_base + frame_hdr->quant_indices.y_dc_delta; iq_matrix->quantization_index[i][1] = CLAMP (qi, 0, QI_MAX); qi = qi_base + frame_hdr->quant_indices.y2_dc_delta; iq_matrix->quantization_index[i][2] = CLAMP (qi, 0, QI_MAX); qi = qi_base + frame_hdr->quant_indices.y2_ac_delta; iq_matrix->quantization_index[i][3] = CLAMP (qi, 0, QI_MAX); qi = qi_base + frame_hdr->quant_indices.uv_dc_delta; iq_matrix->quantization_index[i][4] = CLAMP (qi, 0, QI_MAX); qi = qi_base + frame_hdr->quant_indices.uv_ac_delta; iq_matrix->quantization_index[i][5] = CLAMP (qi, 0, QI_MAX); } return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static GstVaapiDecoderStatus fill_quantization_table (GstVaapiDecoderJpeg * decoder, GstVaapiPicture * picture) { GstVaapiDecoderJpegPrivate *const priv = &decoder->priv; VAIQMatrixBufferJPEGBaseline *iq_matrix; guint i, j, num_tables; if (!VALID_STATE (decoder, GOT_IQ_TABLE)) gst_jpeg_get_default_quantization_tables (&priv->quant_tables); picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (JPEGBaseline, decoder); if (!picture->iq_matrix) { GST_ERROR ("failed to allocate quantiser table"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } iq_matrix = picture->iq_matrix->param; num_tables = MIN (G_N_ELEMENTS (iq_matrix->quantiser_table), GST_JPEG_MAX_QUANT_ELEMENTS); for (i = 0; i < num_tables; i++) { GstJpegQuantTable *const quant_table = &priv->quant_tables.quant_tables[i]; iq_matrix->load_quantiser_table[i] = quant_table->valid; if (!iq_matrix->load_quantiser_table[i]) continue; if (quant_table->quant_precision != 0) { // Only Baseline profile is supported, thus 8-bit Qk values GST_ERROR ("unsupported quantization table element precision"); return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT; } for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++) iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j]; iq_matrix->load_quantiser_table[i] = 1; quant_table->valid = FALSE; } return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static gboolean fill_quantization_table( GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture ) { GstVaapiDecoderJpegPrivate * const priv = decoder->priv; VAIQMatrixBufferJPEGBaseline *iq_matrix; guint i, j, num_tables; if (!priv->has_quant_table) gst_jpeg_get_default_quantization_tables(&priv->quant_tables); picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder); g_assert(picture->iq_matrix); iq_matrix = picture->iq_matrix->param; num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table), GST_JPEG_MAX_QUANT_ELEMENTS); for (i = 0; i < num_tables; i++) { GstJpegQuantTable * const quant_table = &priv->quant_tables.quant_tables[i]; iq_matrix->load_quantiser_table[i] = quant_table->valid; if (!iq_matrix->load_quantiser_table[i]) continue; g_assert(quant_table->quant_precision == 0); for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++) iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j]; iq_matrix->load_quantiser_table[i] = 1; quant_table->valid = FALSE; } return TRUE; }