static gboolean ensure_huffman_table (GstVaapiEncoderJpeg * encoder, GstVaapiEncPicture * picture) { g_assert (picture); if (!fill_huffman_table (encoder, picture)) return FALSE; return TRUE; }
static GstVaapiDecoderStatus decode_scan (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg) { GstVaapiDecoderJpegPrivate *const priv = &decoder->priv; GstVaapiPicture *const picture = priv->current_picture; GstVaapiSlice *slice; VASliceParameterBufferJPEGBaseline *slice_param; GstJpegScanHdr scan_hdr; guint scan_hdr_size, scan_data_size; guint i, h_max, v_max, mcu_width, mcu_height; if (!VALID_STATE (decoder, GOT_SOF)) return GST_VAAPI_DECODER_STATUS_SUCCESS; scan_hdr_size = (seg->data[seg->offset] << 8) | seg->data[seg->offset + 1]; scan_data_size = seg->size - scan_hdr_size; memset (&scan_hdr, 0, sizeof (scan_hdr)); if (!gst_jpeg_segment_parse_scan_header (seg, &scan_hdr)) { GST_ERROR ("failed to parse scan header"); return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER; } slice = GST_VAAPI_SLICE_NEW (JPEGBaseline, decoder, seg->data + seg->offset + scan_hdr_size, scan_data_size); if (!slice) { GST_ERROR ("failed to allocate slice"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_add_slice (picture, slice); if (!VALID_STATE (decoder, GOT_HUF_TABLE)) gst_jpeg_get_default_huffman_tables (&priv->huf_tables); // Update VA Huffman table if it changed for this scan if (huffman_tables_updated (&priv->huf_tables)) { slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW (JPEGBaseline, decoder); if (!slice->huf_table) { GST_ERROR ("failed to allocate Huffman tables"); huffman_tables_reset (&priv->huf_tables); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } fill_huffman_table (slice->huf_table, &priv->huf_tables); huffman_tables_reset (&priv->huf_tables); } slice_param = slice->param; slice_param->num_components = scan_hdr.num_components; for (i = 0; i < scan_hdr.num_components; i++) { slice_param->components[i].component_selector = scan_hdr.components[i].component_selector; slice_param->components[i].dc_table_selector = scan_hdr.components[i].dc_selector; slice_param->components[i].ac_table_selector = scan_hdr.components[i].ac_selector; } slice_param->restart_interval = priv->mcu_restart; slice_param->slice_horizontal_position = 0; slice_param->slice_vertical_position = 0; get_max_sampling_factors (&priv->frame_hdr, &h_max, &v_max); mcu_width = 8 * h_max; mcu_height = 8 * v_max; if (scan_hdr.num_components == 1) { // Non-interleaved const guint Csj = slice_param->components[0].component_selector; const GstJpegFrameComponent *const fcp = get_component (&priv->frame_hdr, Csj); if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) { GST_ERROR ("failed to validate image component %u", Csj); return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER; } mcu_width /= fcp->horizontal_factor; mcu_height /= fcp->vertical_factor; } slice_param->num_mcus = ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) * ((priv->frame_hdr.height + mcu_height - 1) / mcu_height); priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS; return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static GstVaapiDecoderStatus decode_scan( GstVaapiDecoderJpeg *decoder, guchar *scan_header, guint scan_header_size, guchar *scan_data, guint scan_data_size) { GstVaapiDecoderJpegPrivate * const priv = decoder->priv; GstVaapiPicture *picture = priv->current_picture; VASliceParameterBufferJPEGBaseline *slice_param; GstVaapiSlice *gst_slice; guint total_h_samples, total_v_samples; GstJpegScanHdr scan_hdr; guint i; if (!picture) { GST_ERROR("There is no VAPicture before decoding scan."); return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE; } if (!fill_quantization_table(decoder, picture)) { GST_ERROR("failed to fill in quantization table"); return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } if (!fill_huffman_table(decoder, picture)) { GST_ERROR("failed to fill in huffman table"); return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } memset(&scan_hdr, 0, sizeof(scan_hdr)); if (!gst_jpeg_parse_scan_hdr(&scan_hdr, scan_header, scan_header_size, 0)) { GST_DEBUG("Jpeg parsed scan failed."); return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER; } gst_slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder, scan_data, scan_data_size); gst_vaapi_picture_add_slice(picture, gst_slice); slice_param = gst_slice->param; slice_param->num_components = scan_hdr.num_components; for (i = 0; i < scan_hdr.num_components; i++) { slice_param->components[i].component_selector = scan_hdr.components[i].component_selector; slice_param->components[i].dc_table_selector = scan_hdr.components[i].dc_selector; slice_param->components[i].ac_table_selector = scan_hdr.components[i].ac_selector; } slice_param->restart_interval = priv->mcu_restart; if (scan_hdr.num_components == 1) { /*non-interleaved*/ slice_param->slice_horizontal_position = 0; slice_param->slice_vertical_position = 0; /* Y mcu numbers*/ if (slice_param->components[0].component_selector == priv->frame_hdr.components[0].identifier) { slice_param->num_mcus = (priv->frame_hdr.width/8)*(priv->frame_hdr.height/8); } else { /*Cr, Cb mcu numbers*/ slice_param->num_mcus = (priv->frame_hdr.width/16)*(priv->frame_hdr.height/16); } } else { /* interleaved */ slice_param->slice_horizontal_position = 0; slice_param->slice_vertical_position = 0; total_v_samples = get_max_vertical_samples(&priv->frame_hdr); total_h_samples = get_max_horizontal_samples(&priv->frame_hdr); slice_param->num_mcus = ((priv->frame_hdr.width + total_h_samples*8 - 1)/(total_h_samples*8)) * ((priv->frame_hdr.height + total_v_samples*8 -1)/(total_v_samples*8)); } if (picture->slices && picture->slices->len) return GST_VAAPI_DECODER_STATUS_SUCCESS; return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA; }