예제 #1
0
static GstFlowReturn
gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
    GstAdapter * adapter, gboolean at_eos)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  guint i;

  GST_LOG_OBJECT (rsvg, "parse start");
  size = gst_adapter_available (adapter);

  /* "<svg></svg>" */
  if (size < 5 + 6)
    return GST_VIDEO_DECODER_FLOW_NEED_DATA;

  data = gst_adapter_map (adapter, size);
  if (data == NULL) {
    GST_ERROR_OBJECT (rsvg, "Unable to map memory");
    return GST_FLOW_ERROR;
  }
  for (i = 0; i < size - 4; i++) {
    if (memcmp (data + i, "<svg", 4) == 0) {
      gst_adapter_flush (adapter, i);

      size = gst_adapter_available (adapter);
      if (size < 5 + 6)
        return GST_VIDEO_DECODER_FLOW_NEED_DATA;
      data = gst_adapter_map (adapter, size);
      if (data == NULL) {
        GST_ERROR_OBJECT (rsvg, "Unable to map memory");
        return GST_FLOW_ERROR;
      }
      break;
    }
  }
  /* If start wasn't found: */
  if (i == size - 4) {
    gst_adapter_flush (adapter, size - 4);
    return GST_VIDEO_DECODER_FLOW_NEED_DATA;
  }

  for (i = size - 6; i >= 5; i--) {
    if (memcmp (data + i, "</svg>", 6) == 0) {
      completed = TRUE;
      size = i + 6;
      break;
    }
  }

  if (completed) {

    GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);

    gst_video_decoder_add_to_frame (decoder, size);
    return gst_video_decoder_have_frame (decoder);
  }
  return GST_VIDEO_DECODER_FLOW_NEED_DATA;
}
예제 #2
0
/* If there is something in GstVideoDecoder's output adapter, then
   submit the frame for decoding */
static inline void
gst_vaapidecode_flush_output_adapter (GstVaapiDecode * decode)
{
  if (decode->current_frame_size == 0)
    return;
  gst_video_decoder_have_frame (GST_VIDEO_DECODER (decode));
  decode->current_frame_size = 0;
}
예제 #3
0
static GstFlowReturn
gst_vaapidecode_parse_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;
  GstFlowReturn ret;
  guint got_unit_size;
  gboolean got_frame;

  status = gst_vaapi_decoder_parse (decode->decoder, frame,
      adapter, at_eos, &got_unit_size, &got_frame);

  switch (status) {
    case GST_VAAPI_DECODER_STATUS_SUCCESS:
      if (got_unit_size > 0) {
        gst_video_decoder_add_to_frame (vdec, got_unit_size);
        decode->current_frame_size += got_unit_size;
      }
      if (got_frame) {
        ret = gst_video_decoder_have_frame (vdec);
        decode->current_frame_size = 0;
      } else
        ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
      ret = GST_VIDEO_DECODER_FLOW_NEED_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
      GST_WARNING ("parse error %d", status);
      ret = GST_FLOW_NOT_SUPPORTED;
      decode->current_frame_size = 0;
      break;
    default:
      GST_WARNING ("parse error %d", status);
      /* just keep parsing, the decoder should have flushed the broken unit */
      ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      decode->current_frame_size = 0;

      GST_INFO ("requesting upstream a key unit");
      gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decode),
          gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
              FALSE, 0));
      break;
  }
  return ret;
}
예제 #4
0
static GstFlowReturn
theora_dec_parse (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos)
{
  gint av;
  const guint8 *data;

  av = gst_adapter_available (adapter);

  data = gst_adapter_map (adapter, 1);
  /* check for keyframe; must not be header packet */
  if (!(data[0] & 0x80) && (data[0] & 0x40) == 0)
    GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
  gst_adapter_unmap (adapter);

  /* and pass along all */
  gst_video_decoder_add_to_frame (decoder, av);
  return gst_video_decoder_have_frame (decoder);
}
예제 #5
0
static GstFlowReturn
gst_vaapidecode_parse_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;
  GstFlowReturn ret;
  guint got_unit_size;
  gboolean got_frame;

  status = gst_vaapi_decoder_parse (decode->decoder, frame,
      adapter, at_eos, &got_unit_size, &got_frame);

  switch (status) {
    case GST_VAAPI_DECODER_STATUS_SUCCESS:
      if (got_unit_size > 0) {
        gst_video_decoder_add_to_frame (vdec, got_unit_size);
        decode->current_frame_size += got_unit_size;
      }
      if (got_frame) {
        ret = gst_video_decoder_have_frame (vdec);
        decode->current_frame_size = 0;
      } else
        ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
      ret = GST_VIDEO_DECODER_FLOW_NEED_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
      GST_WARNING ("parse error %d", status);
      ret = GST_FLOW_NOT_SUPPORTED;
      decode->current_frame_size = 0;
      break;
    default:
      GST_ERROR ("parse error %d", status);
      ret = GST_FLOW_EOS;
      decode->current_frame_size = 0;
      break;
  }
  return ret;
}
예제 #6
0
static gboolean
gst_vaapidecode_internal_flush (GstVideoDecoder * vdec)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;

  if (!decode->decoder)
    return TRUE;

  /* If there is something in GstVideoDecoder's output adapter, then
     submit the frame for decoding */
  if (decode->current_frame_size) {
    gst_video_decoder_have_frame (vdec);
    decode->current_frame_size = 0;
  }

  status = gst_vaapi_decoder_flush (decode->decoder);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
    GST_WARNING_OBJECT (decode, "failed to flush decoder (status %d)", status);
    return FALSE;
  }

  return TRUE;
}
예제 #7
0
static GstFlowReturn
gst_pngdec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
    GstAdapter * adapter, gboolean at_eos)
{
  gsize toadd = 0;
  GstByteReader reader;
  gconstpointer data;
  guint64 signature;
  gsize size;
  GstPngDec *pngdec = (GstPngDec *) decoder;

  GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);

  /* FIXME : The overhead of using scan_uint32 is massive */

  size = gst_adapter_available (adapter);
  GST_DEBUG ("Parsing PNG image data (%" G_GSIZE_FORMAT " bytes)", size);

  if (size < 8)
    goto need_more_data;

  data = gst_adapter_map (adapter, size);
  gst_byte_reader_init (&reader, data, size);

  if (pngdec->read_data == 0) {
    if (!gst_byte_reader_peek_uint64_be (&reader, &signature))
      goto need_more_data;

    if (signature != PNG_SIGNATURE) {
      for (;;) {
        guint offset;

        offset = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
            0x89504E47, 0, gst_byte_reader_get_remaining (&reader));

        if (offset == -1) {
          gst_adapter_flush (adapter,
              gst_byte_reader_get_remaining (&reader) - 4);
          goto need_more_data;
        }

        if (!gst_byte_reader_skip (&reader, offset))
          goto need_more_data;

        if (!gst_byte_reader_peek_uint64_be (&reader, &signature))
          goto need_more_data;

        if (signature == PNG_SIGNATURE) {
          /* We're skipping, go out, we'll be back */
          gst_adapter_flush (adapter, gst_byte_reader_get_pos (&reader));
          goto need_more_data;
        }
        if (!gst_byte_reader_skip (&reader, 4))
          goto need_more_data;
      }
    }
    pngdec->read_data = 8;
  }

  if (!gst_byte_reader_skip (&reader, pngdec->read_data))
    goto need_more_data;

  for (;;) {
    guint32 length;
    guint32 code;

    if (!gst_byte_reader_get_uint32_be (&reader, &length))
      goto need_more_data;
    if (!gst_byte_reader_get_uint32_le (&reader, &code))
      goto need_more_data;

    if (!gst_byte_reader_skip (&reader, length + 4))
      goto need_more_data;

    if (code == GST_MAKE_FOURCC ('I', 'E', 'N', 'D')) {
      /* Have complete frame */
      toadd = gst_byte_reader_get_pos (&reader);
      GST_DEBUG_OBJECT (decoder, "Have complete frame of size %" G_GSIZE_FORMAT,
          toadd);
      pngdec->read_data = 0;
      goto have_full_frame;
    } else
      pngdec->read_data += length + 12;
  }

  g_assert_not_reached ();
  return GST_FLOW_ERROR;

need_more_data:
  return GST_VIDEO_DECODER_FLOW_NEED_DATA;

have_full_frame:
  if (toadd)
    gst_video_decoder_add_to_frame (decoder, toadd);
  return gst_video_decoder_have_frame (decoder);
}
예제 #8
0
static GstFlowReturn
gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
    GstAdapter * adapter, gboolean at_eos)
{
  guint size;
  gint toadd = 0;
  gboolean resync;
  gint offset = 0, noffset;
  GstJpegDec *dec = (GstJpegDec *) bdec;

  /* FIXME : The overhead of using scan_uint32 is massive */

  size = gst_adapter_available (adapter);
  GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);

  if (at_eos) {
    GST_DEBUG ("Flushing all data out");
    toadd = size;

    /* If we have leftover data, throw it away */
    if (!dec->saw_header)
      goto drop_frame;
    goto have_full_frame;
  }

  if (size < 8)
    goto need_more_data;

  if (!dec->saw_header) {
    gint ret;
    /* we expect at least 4 bytes, first of which start marker */
    ret =
        gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
        size - 4);

    GST_DEBUG ("ret:%d", ret);
    if (ret < 0)
      goto need_more_data;

    if (ret) {
      gst_adapter_flush (adapter, ret);
      size -= ret;
    }
    dec->saw_header = TRUE;
  }

  while (1) {
    guint frame_len;
    guint32 value;

    GST_DEBUG ("offset:%d, size:%d", offset, size);

    noffset =
        gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
        offset, size - offset, &value);

    /* lost sync if 0xff marker not where expected */
    if ((resync = (noffset != offset))) {
      GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
    }
    /* may have marker, but could have been resyncng */
    resync = resync || dec->parse_resync;
    /* Skip over extra 0xff */
    while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
      noffset++;
      noffset =
          gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
          noffset, size - noffset, &value);
    }
    /* enough bytes left for marker? (we need 0xNN after the 0xff) */
    if (noffset < 0) {
      GST_DEBUG ("at end of input and no EOI marker found, need more data");
      goto need_more_data;
    }

    /* now lock on the marker we found */
    offset = noffset;
    value = value & 0xff;
    if (value == 0xd9) {
      GST_DEBUG ("0x%08x: EOI marker", offset + 2);
      /* clear parse state */
      dec->saw_header = FALSE;
      dec->parse_resync = FALSE;
      toadd = offset + 4;
      goto have_full_frame;
    }
    if (value == 0xd8) {
      /* Skip this frame if we found another SOI marker */
      GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
      dec->parse_resync = FALSE;
      /* FIXME : Need to skip data */
      toadd -= offset + 2;
      goto have_full_frame;
    }


    if (value >= 0xd0 && value <= 0xd7)
      frame_len = 0;
    else {
      /* peek tag and subsequent length */
      if (offset + 2 + 4 > size)
        goto need_more_data;
      else
        gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
            &frame_len);
      frame_len = frame_len & 0xffff;
    }
    GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
    /* the frame length includes the 2 bytes for the length; here we want at
     * least 2 more bytes at the end for an end marker */
    if (offset + 2 + 2 + frame_len + 2 > size) {
      goto need_more_data;
    }

    if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
      guint eseglen = dec->parse_entropy_len;

      GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
          offset + 2, eseglen);
      if (size < offset + 2 + frame_len + eseglen)
        goto need_more_data;
      noffset = offset + 2 + frame_len + dec->parse_entropy_len;
      while (1) {
        GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
            noffset, size, size - noffset);
        noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
            0x0000ff00, noffset, size - noffset, &value);
        if (noffset < 0) {
          /* need more data */
          dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
          goto need_more_data;
        }
        if ((value & 0xff) != 0x00) {
          eseglen = noffset - offset - frame_len - 2;
          break;
        }
        noffset++;
      }
      dec->parse_entropy_len = 0;
      frame_len += eseglen;
      GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
          frame_len);
    }
    if (resync) {
      /* check if we will still be in sync if we interpret
       * this as a sync point and skip this frame */
      noffset = offset + frame_len + 2;
      noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
          noffset, 4);
      if (noffset < 0) {
        /* ignore and continue resyncing until we hit the end
         * of our data or find a sync point that looks okay */
        offset++;
        continue;
      }
      GST_DEBUG ("found sync at 0x%x", offset + 2);
    }

    /* Add current data to output buffer */
    toadd += frame_len + 2;
    offset += frame_len + 2;
  }

need_more_data:
  if (toadd)
    gst_video_decoder_add_to_frame (bdec, toadd);
  return GST_VIDEO_DECODER_FLOW_NEED_DATA;

have_full_frame:
  if (toadd)
    gst_video_decoder_add_to_frame (bdec, toadd);
  return gst_video_decoder_have_frame (bdec);

drop_frame:
  gst_adapter_flush (adapter, size);
  return GST_FLOW_OK;
}
예제 #9
0
static GstFlowReturn
gst_pnmdec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
    GstAdapter * adapter, gboolean at_eos)
{
  gsize size;
  GstPnmdec *s = GST_PNMDEC (decoder);
  GstFlowReturn r = GST_FLOW_OK;
  guint offset = 0;
  GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
  const guint8 *raw_data;
  GstVideoCodecState *output_state;

  GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);

  size = gst_adapter_available (adapter);
  if (size < 8) {
    goto need_more_data;
  }
  raw_data = gst_adapter_map (adapter, size);

  if (s->mngr.info.fields != GST_PNM_INFO_FIELDS_ALL) {
    GstPnmInfoMngrResult res;

    res = gst_pnm_info_mngr_scan (&s->mngr, raw_data, size);

    switch (res) {
      case GST_PNM_INFO_MNGR_RESULT_FAILED:
        r = GST_FLOW_ERROR;
        goto out;
      case GST_PNM_INFO_MNGR_RESULT_READING:
        r = GST_FLOW_OK;
        goto out;
      case GST_PNM_INFO_MNGR_RESULT_FINISHED:
        switch (s->mngr.info.type) {
          case GST_PNM_TYPE_BITMAP:
            if (s->mngr.info.encoding == GST_PNM_ENCODING_ASCII) {
              r = GST_FLOW_ERROR;
              goto out;
            }
            s->size = s->mngr.info.width * s->mngr.info.height * 1;
            format = GST_VIDEO_FORMAT_GRAY8;
            break;
          case GST_PNM_TYPE_GRAYMAP:
            s->size = s->mngr.info.width * s->mngr.info.height * 1;
            format = GST_VIDEO_FORMAT_GRAY8;
            break;
          case GST_PNM_TYPE_PIXMAP:
            s->size = s->mngr.info.width * s->mngr.info.height * 3;
            format = GST_VIDEO_FORMAT_RGB;
            break;
        }
        output_state =
            gst_video_decoder_set_output_state (GST_VIDEO_DECODER (s), format,
            s->mngr.info.width, s->mngr.info.height, s->input_state);
        gst_video_codec_state_unref (output_state);
        if (gst_video_decoder_negotiate (GST_VIDEO_DECODER (s)) == FALSE) {
          r = GST_FLOW_NOT_NEGOTIATED;
          goto out;
        }

        if (s->mngr.info.encoding == GST_PNM_ENCODING_ASCII) {
          s->mngr.data_offset++;
          /* It is not possible to know the size of input ascii data to parse.
             So we have to parse and know the number of pixels parsed and
             then finally decide when we have full frame */
          s->buf = gst_buffer_new_and_alloc (s->size);
        }
        offset = s->mngr.data_offset;
        gst_adapter_flush (adapter, offset);
        size = size - offset;
    }
  }

  if (s->mngr.info.encoding == GST_PNM_ENCODING_ASCII) {
    /* Parse ASCII data dn populate s->current_size with the number of 
       bytes actually parsed from the input data */
    r = gst_pnmdec_parse_ascii (s, raw_data + offset, size);
  } else {
    /* Bitmap Contains 8 pixels in a byte */
    if (s->mngr.info.type == GST_PNM_TYPE_BITMAP)
      s->current_size += (size * 8);
    else
      s->current_size += size;
  }

  gst_video_decoder_add_to_frame (decoder, size);
  if (s->size <= s->current_size) {
    goto have_full_frame;
  }

need_more_data:
  return GST_VIDEO_DECODER_FLOW_NEED_DATA;

have_full_frame:
  return gst_video_decoder_have_frame (decoder);

out:
  return r;
}