static GstCaps *
generate_sink_template (void)
{
    GstCaps *caps;
    GstStructure *struc;

    caps = gst_caps_new_empty ();

    struc = gst_structure_new ("video/x-raw-yuv",
                               "width", GST_TYPE_INT_RANGE, 16, 4096,
                               "height", GST_TYPE_INT_RANGE, 16, 4096,
                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30, 1,
                               NULL);

    {
        GValue list = { 0 };
        GValue val = { 0 };

        g_value_init (&list, GST_TYPE_LIST);
        g_value_init (&val, GST_TYPE_FOURCC);

        gst_value_set_fourcc (&val, GST_MAKE_FOURCC ('I', '4', '2', '0'));
        gst_value_list_append_value (&list, &val);

        gst_value_set_fourcc (&val, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'));
        gst_value_list_append_value (&list, &val);

        gst_value_set_fourcc (&val, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'));
        gst_value_list_append_value (&list, &val);

        gst_structure_set_value (struc, "format", &list);

        g_value_unset (&val);
        g_value_unset (&list);
    }

    gst_caps_append_structure (caps, struc);

    return caps;
}
예제 #2
0
static void
gst_vtenc_base_init (GstVTEncClass * klass)
{
  const GstVTEncoderDetails *codec_details =
      GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
  GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
  const int min_width = 1, max_width = G_MAXINT;
  const int min_height = 1, max_height = G_MAXINT;
  const int min_fps_n = 0, max_fps_n = G_MAXINT;
  const int min_fps_d = 1, max_fps_d = 1;
  GstPadTemplate *sink_template, *src_template;
  GstCaps *src_caps;
  GstElementDetails details;

  details.longname = g_strdup_printf ("%s encoder", codec_details->name);
  details.klass = g_strdup_printf ("Codec/Encoder/Video");
  details.description = g_strdup_printf ("%s encoder", codec_details->name);

  gst_element_class_set_details_simple (element_class,
      details.longname, details.klass, details.description,
      "Ole André Vadla Ravnås <*****@*****.**>");

  g_free (details.longname);
  g_free (details.klass);
  g_free (details.description);

  sink_template = gst_pad_template_new ("sink",
      GST_PAD_SINK,
      GST_PAD_ALWAYS,
      gst_caps_new_simple ("video/x-raw-yuv",
          "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', '2'),
          "width", GST_TYPE_INT_RANGE, min_width, max_width,
          "height", GST_TYPE_INT_RANGE, min_height, max_height,
          "framerate", GST_TYPE_FRACTION_RANGE,
          min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL));
  gst_element_class_add_pad_template (element_class, sink_template);
  gst_object_unref (sink_template);

  src_caps = gst_caps_new_simple (codec_details->mimetype,
      "width", GST_TYPE_INT_RANGE, min_width, max_width,
      "height", GST_TYPE_INT_RANGE, min_height, max_height,
      "framerate", GST_TYPE_FRACTION_RANGE,
      min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL);
  if (codec_details->format_id == kVTFormatH264) {
    gst_structure_set (gst_caps_get_structure (src_caps, 0),
        "stream-format", G_TYPE_STRING, "avc", NULL);
  }
  src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
      src_caps);
  gst_element_class_add_pad_template (element_class, src_template);
  gst_object_unref (src_template);
}
예제 #3
0
파일: gstdspvenc.c 프로젝트: EQ4/gst-dsp
static inline GstCaps *
generate_sink_template(void)
{
	GstCaps *caps;
	GstStructure *struc;

	caps = gst_caps_new_empty();

	struc = gst_structure_new("video/x-raw-yuv",
				  "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'),
				  NULL);

	gst_caps_append_structure(caps, struc);

	struc = gst_structure_new("video/x-raw-yuv",
				  "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I', '4', '2', '0'),
				  NULL);

	gst_caps_append_structure(caps, struc);

	return caps;
}
예제 #4
0
static gboolean
gst_aiff_parse_parse_file_header (GstAiffParse * aiff, GstBuffer * buf)
{
  guint8 *data;
  guint32 header, type = 0;

  if (GST_BUFFER_SIZE (buf) < 12) {
    GST_WARNING_OBJECT (aiff, "Buffer too short");
    goto not_aiff;
  }

  data = GST_BUFFER_DATA (buf);

  header = GST_READ_UINT32_LE (data);
  type = GST_READ_UINT32_LE (data + 8);

  if (header != GST_MAKE_FOURCC ('F', 'O', 'R', 'M'))
    goto not_aiff;

  if (type == GST_MAKE_FOURCC ('A', 'I', 'F', 'F'))
    aiff->is_aifc = FALSE;
  else if (type == GST_MAKE_FOURCC ('A', 'I', 'F', 'C'))
    aiff->is_aifc = TRUE;
  else
    goto not_aiff;

  gst_buffer_unref (buf);
  return TRUE;

  /* ERRORS */
not_aiff:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, WRONG_TYPE, (NULL),
        ("File is not an AIFF file: %" GST_FOURCC_FORMAT,
            GST_FOURCC_ARGS (type)));
    gst_buffer_unref (buf);
    return FALSE;
  }
}
예제 #5
0
static GstFlowReturn
gst_ivf_parse_handle_frame_start (GstIvfParse * ivf, GstBaseParseFrame * frame,
    gint * skipsize)
{
  GstBuffer *const buffer = frame->buffer;
  GstMapInfo map;
  GstFlowReturn ret = GST_FLOW_OK;

  gst_buffer_map (buffer, &map, GST_MAP_READ);
  if (map.size >= IVF_FILE_HEADER_SIZE) {
    guint32 magic = GST_READ_UINT32_LE (map.data);
    guint16 version = GST_READ_UINT16_LE (map.data + 4);
    guint16 header_size = GST_READ_UINT16_LE (map.data + 6);
    guint32 fourcc = GST_READ_UINT32_LE (map.data + 8);
    guint16 width = GST_READ_UINT16_LE (map.data + 12);
    guint16 height = GST_READ_UINT16_LE (map.data + 14);
    guint32 fps_n = GST_READ_UINT32_LE (map.data + 16);
    guint32 fps_d = GST_READ_UINT32_LE (map.data + 20);
#ifndef GST_DISABLE_GST_DEBUG
    guint32 num_frames = GST_READ_UINT32_LE (map.data + 24);
#endif

    if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') ||
        version != 0 || header_size != 32 ||
        fourcc_to_media_type (fourcc) == NULL) {
      GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL));
      ret = GST_FLOW_ERROR;
      goto end;
    }

    ivf->fourcc = fourcc;

    gst_ivf_parse_set_size (ivf, width, height);
    gst_ivf_parse_set_framerate (ivf, fps_n, fps_d);

    GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames);

    /* move along */
    ivf->state = GST_IVF_PARSE_DATA;
    gst_base_parse_set_min_frame_size (GST_BASE_PARSE_CAST (ivf),
        IVF_FRAME_HEADER_SIZE);
    *skipsize = IVF_FILE_HEADER_SIZE;
  } else {
    GST_LOG_OBJECT (ivf, "Header data not yet available.");
    *skipsize = 0;
  }

end:
  gst_buffer_unmap (buffer, &map);
  return ret;
}
예제 #6
0
/******************************************************************************
 * gst_tiprepencbuf_transform_caps
 *   Most platforms require that the exact same caps are used on both ends.
 *   DM6467 can also convert from Y8C8/NV16 to NV12.
 *****************************************************************************/
static GstCaps*
gst_tiprepencbuf_transform_caps(GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps)
{
    GstTIPrepEncBuf *prepencbuf = GST_TIPREPENCBUF(trans);
    GstCaps         *supported_caps;
    GstStructure    *caps_struct;

    g_return_val_if_fail(GST_IS_CAPS(caps), NULL);

    /* We always support the same caps on both sides */
    supported_caps = gst_caps_copy(caps);

    /* On DM6467, we also support conversion from Y8C8/NV16 to NV12 */
    if (prepencbuf->device == Cpu_Device_DM6467) {
        switch (direction) {
            case GST_PAD_SINK:
                caps_struct =
                    gst_structure_copy(gst_caps_get_structure(caps, 0));
                gst_structure_set(caps_struct, "format", GST_TYPE_FOURCC,
                    GST_MAKE_FOURCC('N', 'V', '1', '2'), (gchar *) NULL);
                gst_caps_append_structure(supported_caps, caps_struct);
                break;
            case GST_PAD_SRC:
                caps_struct =
                    gst_structure_copy(gst_caps_get_structure(caps, 0));
                gst_structure_set(caps_struct, "format", GST_TYPE_FOURCC,
                    GST_MAKE_FOURCC('N', 'V', '1', '6'), (gchar *) NULL);
                gst_caps_append_structure(supported_caps, caps_struct);
                break;
            case GST_PAD_UNKNOWN:
                break;
        }
    }

    return supported_caps;
}
static GstStructure *
gst_mio_video_device_format_basics_to_structure (GstMIOVideoDevice * self,
    GstMIOVideoFormat * format)
{
  GstStructure *s;

  switch (format->type) {
    case kCVPixelFormatType_422YpCbCr8:
    case kCVPixelFormatType_422YpCbCr8Deprecated:
    {
      guint fcc;

      if (format->type == kCVPixelFormatType_422YpCbCr8)
        fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
      else
        fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');

      s = gst_structure_new ("video/x-raw-yuv",
          "format", GST_TYPE_FOURCC, fcc,
          "width", G_TYPE_INT, format->dim.width,
          "height", G_TYPE_INT, format->dim.height, NULL);
      break;
    }
    case kFigVideoCodecType_JPEG_OpenDML:
    {
      s = gst_structure_new ("image/jpeg",
          "width", G_TYPE_INT, format->dim.width,
          "height", G_TYPE_INT, format->dim.height, NULL);
      break;
    }
    default:
      s = NULL;
      break;
  }

  return s;
}
예제 #8
0
static void
gst_sdlvideosink_base_init (gpointer g_class)
{
  GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
  GstCaps *capslist;
  gint i;
  guint32 formats[] = {
    GST_MAKE_FOURCC ('I', '4', '2', '0'),
    GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
    GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
        /*
           GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
           GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
         */
  };

  /* make a list of all available caps */
  capslist = gst_caps_new_empty ();
  for (i = 0; i < G_N_ELEMENTS (formats); i++) {
    gst_caps_append_structure (capslist,
        gst_structure_new ("video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, formats[i],
            "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
            "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL));
  }

  sink_template = gst_pad_template_new ("sink",
      GST_PAD_SINK, GST_PAD_ALWAYS, capslist);

  gst_element_class_add_pad_template (element_class, sink_template);
  gst_element_class_set_details_simple (element_class, "SDL video sink",
      "Sink/Video", "An SDL-based videosink",
      "Ronald Bultje <*****@*****.**>, "
      "Edgard Lima <*****@*****.**>, "
      "Jan Schmidt <*****@*****.**>");
}
예제 #9
0
static GstStructure *
gst_decklink_mode_get_structure (GstDecklinkModeEnum e)
{
  const GstDecklinkMode *mode = &modes[e];

  return gst_structure_new ("video/x-raw-yuv",
      "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
      "width", G_TYPE_INT, mode->width,
      "height", G_TYPE_INT, mode->height,
      "framerate", GST_TYPE_FRACTION, mode->fps_n, mode->fps_d,
      "interlaced", G_TYPE_BOOLEAN, mode->interlaced,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, mode->par_n, mode->par_d,
      "color-matrix", G_TYPE_STRING, mode->is_hdtv ? "hdtv" : "sdtv",
      "chroma-site", G_TYPE_STRING, "mpeg2", NULL);
}
예제 #10
0
static void
gst_mim_enc_create_tcp_header (GstMimEnc * mimenc, guint8 * p,
    guint32 payload_size, GstClockTime ts, gboolean keyframe, gboolean paused)
{
  p[0] = 24;
  p[1] = paused ? 1 : 0;
  GST_WRITE_UINT16_LE (p + 2, mimenc->width);
  GST_WRITE_UINT16_LE (p + 4, mimenc->height);
  GST_WRITE_UINT16_LE (p + 6, keyframe ? 1 : 0);
  GST_WRITE_UINT32_LE (p + 8, payload_size);
  GST_WRITE_UINT32_LE (p + 12, paused ? 0 :
      GST_MAKE_FOURCC ('M', 'L', '2', '0'));
  GST_WRITE_UINT32_LE (p + 16, 0);
  GST_WRITE_UINT32_LE (p + 20, ts / GST_MSECOND);
}
예제 #11
0
static MXFMetadataFileDescriptor *
mxf_up_get_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
    MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
{
  GstStructure *s;

  s = gst_caps_get_structure (caps, 0);
  if (strcmp (gst_structure_get_name (s), "video/x-raw-rgb") == 0) {
    return mxf_up_get_rgba_descriptor (tmpl, caps, handler, mapping_data);
  } else if (strcmp (gst_structure_get_name (s), "video/x-raw-yuv") == 0) {
    guint32 fourcc;

    if (!gst_structure_get_fourcc (s, "format", &fourcc))
      return NULL;

    if (fourcc == GST_MAKE_FOURCC ('A', 'Y', 'U', 'V') ||
        fourcc == GST_MAKE_FOURCC ('v', '3', '0', '8'))
      return mxf_up_get_rgba_descriptor (tmpl, caps, handler, mapping_data);

    return mxf_up_get_cdci_descriptor (tmpl, caps, handler, mapping_data);
  }

  g_assert_not_reached ();
}
예제 #12
0
gboolean
gst_vdp_video_buffer_calculate_size (guint32 fourcc, gint width, gint height,
    guint * size)
{
  switch (fourcc) {
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
    {
      *size = gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, width, height);
      break;
    }
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    {
      *size = gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, width, height);
      break;
    }
    case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
    {
      *size = width * height + width * height / 2;
      break;
    }
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
    {
      *size = gst_video_format_get_size (GST_VIDEO_FORMAT_UYVY, width, height);
      break;
    }
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
    {
      *size = gst_video_format_get_size (GST_VIDEO_FORMAT_YUY2, width, height);
      break;
    }
    default:
      return FALSE;
  }

  return TRUE;
}
예제 #13
0
bool tcam_gst_is_fourcc_rgb (const unsigned int fourcc)
{
    if (fourcc == GST_MAKE_FOURCC('R', 'G', 'B', 'x')
        || fourcc == GST_MAKE_FOURCC('x', 'R', 'G', 'B')
        || fourcc == GST_MAKE_FOURCC('B', 'G', 'R', 'x')
        || fourcc == GST_MAKE_FOURCC('x', 'B', 'G', 'R')
        || fourcc == GST_MAKE_FOURCC('R', 'G', 'B', 'A')
        || fourcc == GST_MAKE_FOURCC('A', 'R', 'G', 'B')
        || fourcc == GST_MAKE_FOURCC('B', 'G', 'R', 'A')
        || fourcc == GST_MAKE_FOURCC('A', 'B', 'G', 'R')
        || fourcc == FOURCC_BGR24
        || fourcc == FOURCC_RGB32
        || fourcc == FOURCC_RGB64)
    {
        return TRUE;
    }

    return FALSE;
}
static GstCaps * create_src_caps (GstOmxBaseFilter21 *omx_base)
{
    GstCaps *caps;    
    GstOmxBaseFilter21 *self;
    int width, height;
    GstStructure *struc;

    self = GST_OMX_BASE_FILTER21 (omx_base);
    caps = gst_pad_peer_get_caps (omx_base->srcpad);

    if (gst_caps_is_empty (caps))
    {

        width = 1920;
        height = 1088;

    }
    else
    {
        GstStructure *s;

        s = gst_caps_get_structure (caps, 0);

        if (!(gst_structure_get_int (s, "width", &width) &&
            gst_structure_get_int (s, "height", &height)))
        {

			width = 1920;
			height = 1088;

        }
    }

    caps = gst_caps_new_empty ();
    struc = gst_structure_new (("video/x-raw-yuv"),
            "width",  G_TYPE_INT, width,
            "height", G_TYPE_INT, height,
            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
            NULL);


    gst_caps_append_structure (caps, struc);

    return caps;
}
예제 #15
0
static void
transform_value (GValue * dest)
{
  GValue fourcc = { 0 };

  g_value_init (dest, GST_TYPE_LIST);
  g_value_init (&fourcc, GST_TYPE_FOURCC);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('I', '4', '2', '0'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', 'V', '1', '2'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', '4', '2', 'B'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('Y', '4', '4', '4'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('v', '2', '1', '0'));
  gst_value_list_append_value (dest, &fourcc);

  gst_value_set_fourcc (&fourcc, GST_MAKE_FOURCC ('v', '2', '1', '6'));
  gst_value_list_append_value (dest, &fourcc);

  g_value_unset (&fourcc);
}
예제 #16
0
static gboolean
gst_csp_structure_is_alpha (GstStructure * s)
{
  GQuark name;

  name = gst_structure_get_name_id (s);

  if (name == _QRAWRGB) {
    return gst_structure_id_has_field (s, _QALPHAMASK);
  } else if (name == _QRAWYUV) {
    guint32 fourcc;

    if (!gst_structure_get_fourcc (s, "format", &fourcc))
      return FALSE;

    return (fourcc == GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'));
  }

  return FALSE;
}
예제 #17
0
static gboolean
gst_ffmpegcsp_structure_is_alpha (GstStructure * s)
{
  const gchar *name;

  name = gst_structure_get_name (s);

  if (g_str_equal (name, "video/x-raw-rgb")) {
    return gst_structure_has_field (s, "alpha_mask");
  } else if (g_str_equal (name, "video/x-raw-yuv")) {
    guint32 fourcc;

    if (!gst_structure_get_fourcc (s, "format", &fourcc))
      return FALSE;

    return (fourcc == GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'));
  }

  return FALSE;
}
static guint8 *
gss_adaptive_assemble_chunk (GssTransaction * t, GssAdaptive * adaptive,
    GssAdaptiveLevel * level, GssIsomFragment * fragment)
{
  GError *error = NULL;
  guint8 *mdat_data;
  int fd;
  gboolean ret;

  g_return_val_if_fail (t != NULL, NULL);
  g_return_val_if_fail (adaptive != NULL, NULL);
  g_return_val_if_fail (level != NULL, NULL);
  g_return_val_if_fail (fragment != NULL, NULL);

  fd = open (level->filename, O_RDONLY);
  if (fd < 0) {
    GST_WARNING ("failed to open \"%s\", error=\"%s\", broken manifest?",
        level->filename, g_strerror (errno));
    gss_transaction_error_not_found (t,
        "failed to open file (broken manifest?)");
    return NULL;
  }

  mdat_data = g_malloc (fragment->mdat_size);

  GST_WRITE_UINT32_BE (mdat_data, fragment->mdat_size);
  GST_WRITE_UINT32_LE (mdat_data + 4, GST_MAKE_FOURCC ('m', 'd', 'a', 't'));

  ret = gss_sglist_load (fragment->sglist, fd, mdat_data + 8, &error);
  if (!ret) {
    gss_transaction_error_not_found (t, error->message);
    g_error_free (error);
    g_free (mdat_data);
    close (fd);
    return NULL;
  }

  close (fd);

  return mdat_data;
}
void
gss_adaptive_convert_isoff_ondemand (GssAdaptive * adaptive,
    GssIsomMovie * movie, GssIsomTrack * track, GssDrmType drm_type)
{
  int i;
  guint64 offset = 0;
  gboolean is_video;

  is_video = (track->hdlr.handler_type == GST_MAKE_FOURCC ('v', 'i', 'd', 'e'));

  GST_DEBUG ("stsd entries %d", track->stsd.entry_count);

  if (drm_type != GSS_DRM_CLEAR) {
    movie->pssh.data = adaptive->drm_info.data;
    movie->pssh.len = adaptive->drm_info.data_len;
    memcpy (movie->pssh.uuid,
        gss_drm_get_drm_uuid (adaptive->drm_info.drm_type), 16);
    movie->pssh.present = TRUE;
  }

  if (drm_type == GSS_DRM_PLAYREADY) {
    track->is_encrypted = TRUE;
  }
  for (i = 0; i < track->n_fragments; i++) {
    GssIsomFragment *fragment = track->fragments[i];

    fragment->offset = offset;
    gss_isom_fragment_serialize (fragment, &fragment->moof_data,
        &fragment->moof_size, is_video);
    offset += fragment->moof_size;
    offset += fragment->mdat_header_size;
    offset += fragment->mdat_size;
  }
  track->dash_size = offset;

  gss_isom_movie_serialize_track_dash (movie, track,
      &track->dash_header_data, &track->dash_header_size,
      &track->dash_header_and_sidx_size);

  track->dash_size += track->dash_header_and_sidx_size;
}
예제 #20
0
/**
 * gst_vaapi_profile_get_codec:
 * @profile: a #GstVaapiProfile
 *
 * Extracts the #GstVaapiCodec from @profile.
 *
 * Return value: the #GstVaapiCodec from @profile
 */
GstVaapiCodec
gst_vaapi_profile_get_codec(GstVaapiProfile profile)
{
    GstVaapiCodec codec;

    switch (profile) {
    case GST_VAAPI_PROFILE_VC1_SIMPLE:
    case GST_VAAPI_PROFILE_VC1_MAIN:
        codec = GST_VAAPI_CODEC_WMV3;
        break;
    case GST_VAAPI_PROFILE_VC1_ADVANCED:
        codec = GST_VAAPI_CODEC_VC1;
        break;
    case GST_VAAPI_PROFILE_JPEG_BASELINE:
        codec = GST_VAAPI_CODEC_JPEG;
        break;
    default:
        codec = (guint32)profile & GST_MAKE_FOURCC(0xff,0xff,0xff,0);
        break;
    }
    return codec;
}
예제 #21
0
static gboolean
gst_rtp_j2k_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
{
  GstStructure *structure;
  gint clock_rate;
  GstCaps *outcaps;
  gboolean res;

  structure = gst_caps_get_structure (caps, 0);

  if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
    clock_rate = 90000;
  depayload->clock_rate = clock_rate;

  outcaps =
      gst_caps_new_simple ("image/x-jpc", "framerate", GST_TYPE_FRACTION, 0, 1,
      "fields", G_TYPE_INT, 1, "fourcc", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('s',
          'Y', 'U', 'V'), NULL);
  res = gst_pad_set_caps (depayload->srcpad, outcaps);
  gst_caps_unref (outcaps);

  return res;
}
예제 #22
0
static GstBuffer *
gst_mimenc_create_tcp_header (GstMimEnc * mimenc, guint32 payload_size,
    GstClockTime timestamp, gboolean keyframe, gboolean paused)
{
  // 24 bytes
  GstBuffer *buf_header = gst_buffer_new_and_alloc (24);
  guchar *p = (guchar *) GST_BUFFER_DATA (buf_header);

  GST_BUFFER_TIMESTAMP (buf_header) = timestamp;

  p[0] = 24;
  p[1] = paused ? 1 : 0;
  GST_WRITE_UINT16_LE (p + 2, mimenc->width);
  GST_WRITE_UINT16_LE (p + 4, mimenc->height);
  GST_WRITE_UINT16_LE (p + 6, keyframe ? 1 : 0);
  GST_WRITE_UINT32_LE (p + 8, payload_size);
  GST_WRITE_UINT32_LE (p + 12, paused ? 0 :
      GST_MAKE_FOURCC ('M', 'L', '2', '0'));
  GST_WRITE_UINT32_LE (p + 16, 0);
  GST_WRITE_UINT32_LE (p + 20, timestamp / GST_MSECOND);

  return buf_header;
}
예제 #23
0
/**
 * gst_video_format_from_fourcc:
 * @fourcc: a FOURCC value representing raw YUV video
 *
 * Converts a FOURCC value into the corresponding #GstVideoFormat.
 * If the FOURCC cannot be represented by #GstVideoFormat,
 * #GST_VIDEO_FORMAT_UNKNOWN is returned.
 *
 * Since: 0.10.16
 *
 * Returns: the #GstVideoFormat describing the FOURCC value
 */
GstVideoFormat
gst_video_format_from_fourcc (guint32 fourcc)
{
  switch (fourcc) {
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
      return GST_VIDEO_FORMAT_I420;
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
      return GST_VIDEO_FORMAT_YV12;
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
      return GST_VIDEO_FORMAT_YUY2;
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
      return GST_VIDEO_FORMAT_UYVY;
    case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
      return GST_VIDEO_FORMAT_AYUV;
    case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
      return GST_VIDEO_FORMAT_Y41B;
    case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
      return GST_VIDEO_FORMAT_Y42B;
    default:
      return GST_VIDEO_FORMAT_UNKNOWN;
  }
}
예제 #24
0
/**
 * gst_video_format_to_fourcc:
 * @format: a #GstVideoFormat video format
 *
 * Converts a #GstVideoFormat value into the corresponding FOURCC.  Only
 * a few YUV formats have corresponding FOURCC values.  If @format has
 * no corresponding FOURCC value, 0 is returned.
 *
 * Since: 0.10.16
 *
 * Returns: the FOURCC corresponding to @format
 */
guint32
gst_video_format_to_fourcc (GstVideoFormat format)
{
  g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, 0);

  switch (format) {
    case GST_VIDEO_FORMAT_I420:
      return GST_MAKE_FOURCC ('I', '4', '2', '0');
    case GST_VIDEO_FORMAT_YV12:
      return GST_MAKE_FOURCC ('Y', 'V', '1', '2');
    case GST_VIDEO_FORMAT_YUY2:
      return GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
    case GST_VIDEO_FORMAT_UYVY:
      return GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
    case GST_VIDEO_FORMAT_AYUV:
      return GST_MAKE_FOURCC ('A', 'Y', 'U', 'V');
    case GST_VIDEO_FORMAT_Y41B:
      return GST_MAKE_FOURCC ('Y', '4', '1', 'B');
    case GST_VIDEO_FORMAT_Y42B:
      return GST_MAKE_FOURCC ('Y', '4', '2', 'B');
    default:
      return 0;
  }
}
예제 #25
0
static GstFlowReturn
gst_png_parse_handle_frame (GstBaseParse * parse,
    GstBaseParseFrame * frame, gint * skipsize)
{
  GstPngParse *pngparse = GST_PNG_PARSE (parse);
  GstMapInfo map;
  GstByteReader reader;
  GstFlowReturn ret = GST_FLOW_OK;
  guint64 signature;
  guint width = 0, height = 0;

  gst_buffer_map (frame->buffer, &map, GST_MAP_READ);
  gst_byte_reader_init (&reader, map.data, map.size);

  if (!gst_byte_reader_peek_uint64_be (&reader, &signature))
    goto beach;

  if (signature != PNG_SIGNATURE) {
    for (;;) {
      guint offset;

      offset = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
          0x89504E47, 0, gst_byte_reader_get_remaining (&reader));

      if (offset == -1) {
        *skipsize = gst_byte_reader_get_remaining (&reader) - 4;
        goto beach;
      }

      gst_byte_reader_skip (&reader, offset);

      if (!gst_byte_reader_peek_uint64_be (&reader, &signature))
        goto beach;

      if (signature == PNG_SIGNATURE) {
        /* We're skipping, go out, we'll be back */
        *skipsize = gst_byte_reader_get_pos (&reader);
        goto beach;
      }
      gst_byte_reader_skip (&reader, 4);
    }
  }

  gst_byte_reader_skip (&reader, 8);

  for (;;) {
    guint32 length;
    guint32 code;

    if (!gst_byte_reader_get_uint32_be (&reader, &length))
      goto beach;
    if (!gst_byte_reader_get_uint32_le (&reader, &code))
      goto beach;

    GST_TRACE_OBJECT (parse, "%" GST_FOURCC_FORMAT " chunk, %u bytes",
        GST_FOURCC_ARGS (code), length);

    if (code == GST_MAKE_FOURCC ('I', 'H', 'D', 'R')) {
      if (!gst_byte_reader_get_uint32_be (&reader, &width))
        goto beach;
      if (!gst_byte_reader_get_uint32_be (&reader, &height))
        goto beach;
      length -= 8;
    } else if (code == GST_MAKE_FOURCC ('I', 'D', 'A', 'T')) {
      gst_base_parse_set_min_frame_size (parse,
          gst_byte_reader_get_pos (&reader) + 4 + length + 12);
    }

    if (!gst_byte_reader_skip (&reader, length + 4))
      goto beach;

    if (code == GST_MAKE_FOURCC ('I', 'E', 'N', 'D')) {
      /* the start code and at least 2 empty frames (IHDR and IEND) */
      gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12);

      if (pngparse->width != width || pngparse->height != height) {
        GstCaps *caps, *sink_caps;

        pngparse->height = height;
        pngparse->width = width;

        caps = gst_caps_new_simple ("image/png",
            "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);

        sink_caps =
            gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (pngparse));

        if (sink_caps) {
          GstStructure *st;
          gint fr_num, fr_denom;

          st = gst_caps_get_structure (sink_caps, 0);
          if (st
              && gst_structure_get_fraction (st, "framerate", &fr_num,
                  &fr_denom)) {
            gst_caps_set_simple (caps,
                "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL);
          } else {
            GST_WARNING_OBJECT (pngparse, "No framerate set");
          }

          gst_caps_unref (sink_caps);
        }

        if (!gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps))
          ret = GST_FLOW_NOT_NEGOTIATED;

        gst_caps_unref (caps);

        if (ret != GST_FLOW_OK)
          goto beach;
      }


      gst_buffer_unmap (frame->buffer, &map);
      return gst_base_parse_finish_frame (parse, frame,
          gst_byte_reader_get_pos (&reader));
    }
  }

beach:

  gst_buffer_unmap (frame->buffer, &map);

  return ret;
}
예제 #26
0
void QGstreamerGLTextureRenderer::setFallbackBuffer(GstBuffer *buffer)
{
#ifdef GL_TEXTURE_SINK_DEBUG
    qDebug() << Q_FUNC_INFO << buffer;
#endif
    m_fallbackImage = QImage();

    if (!buffer)
        return;

    GstCaps *caps = GST_BUFFER_CAPS(buffer);
    const uchar *data = GST_BUFFER_DATA(buffer);

    if (!(caps && data))
        return;

    const GstStructure *structure = gst_caps_get_structure(caps, 0);
    guint32 fourcc;
    gst_structure_get_fourcc(structure, "format", &fourcc);

    if (fourcc != GST_MAKE_FOURCC('Y', 'U', 'Y', '2') &&
        fourcc != GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'))
        return;

    QSize resolution = QGstUtils::capsResolution(caps);
    m_fallbackImage = QImage(resolution, QImage::Format_RGB32);

    for (int y=0; y<resolution.height(); y++) {
        const uchar *src = data + y*GST_ROUND_UP_4(resolution.width())*2;
        quint32 *dst = (quint32 *)m_fallbackImage.scanLine(y);

        int y1, y2;
        int u, v;

        int r1, g1, b1;
        int r2, g2, b2;

        int r, g, b;

        for (int x=0; x<resolution.width(); x+=2) {
            if (fourcc == GST_MAKE_FOURCC('Y', 'U', 'Y', '2')) {
                y1 = *src; src++;
                u  = *src; src++;
                y2 = *src; src++;
                v  = *src; src++;
            } else {
                u  = *src; src++;
                y1 = *src; src++;
                v  = *src; src++;
                y2 = *src; src++;
            }

            y1 = 298*y1/256;
            y2 = 298*y2/256;

            r = 408*v/256 - 223;
            g = - 100*u/256 - 208*v/256 + 136;
            b = 516*u/256 - 276;

            r1 = qBound(0, y1 + r, 255);
            g1 = qBound(0, y1 + g, 255);
            b1 = qBound(0, y1 + b, 255);

            *dst = qRgb(r1, g1, b1); dst++;

            r2 = qBound(0, y2 + r, 255);
            g2 = qBound(0, y2 + g, 255);
            b2 = qBound(0, y2 + b, 255);

            *dst = qRgb(r2, g2, b2); dst++;
        }
    }
}
예제 #27
0
static GstFlowReturn
pad_chain(GstPad *pad, GstBuffer *buf)
{
	struct obj *self;
	GstFlowReturn ret = GST_FLOW_OK;
	AVCodecContext *ctx;
	AVFrame *frame;
	int got_pic;
	AVPacket pkt;
	int read;

	self = (struct obj *)((GstObject *)pad)->parent;
	ctx = self->av_ctx;

	if (G_UNLIKELY(!self->initialized)) {
		GstCaps *new_caps;
		GstStructure *struc;

		self->initialized = true;
		if (gst_av_codec_open(ctx, self->codec) < 0) {
			ret = GST_FLOW_ERROR;
			goto leave;
		}

		if (self->parse_func)
			self->parse_func(self, buf);

		new_caps = gst_caps_new_empty();

		struc = gst_structure_new("video/x-raw-yuv",
				"width", G_TYPE_INT, ctx->width,
				"height", G_TYPE_INT, ctx->height,
				"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I','4','2','0'),
				NULL);

		if (ctx->time_base.num)
			gst_structure_set(struc,
					"framerate", GST_TYPE_FRACTION,
					ctx->time_base.den, ctx->time_base.num,
					NULL);

		if (ctx->sample_aspect_ratio.num)
			gst_structure_set(struc,
					"pixel-aspect-ratio", GST_TYPE_FRACTION,
					ctx->sample_aspect_ratio.num, ctx->sample_aspect_ratio.den,
					NULL);

		gst_caps_append_structure(new_caps, struc);

		GST_INFO_OBJECT(self, "caps are: %" GST_PTR_FORMAT, new_caps);
		gst_pad_set_caps(self->srcpad, new_caps);
		gst_caps_unref(new_caps);
	}

	av_init_packet(&pkt);
	pkt.data = buf->data;
	pkt.size = buf->size;

	frame = avcodec_alloc_frame();

	read = avcodec_decode_video2(ctx, frame, &got_pic, &pkt);
	if (read < 0) {
		GST_WARNING_OBJECT(self, "error: %i", read);
		goto leave;
	}

	if (got_pic) {
		GstBuffer *out_buf;
		out_buf = convert_frame(self, frame);
		out_buf->timestamp = buf->timestamp;
		out_buf->duration = buf->duration;
		ret = gst_pad_push(self->srcpad, out_buf);
	}

leave:
	gst_buffer_unref(buf);

	return ret;
}
예제 #28
0
static gboolean
sink_setcaps(GstPad *pad, GstCaps *caps)
{
	struct obj *self;
	GstStructure *in_struc;
	const char *name;
	int codec_id;
	const GValue *codec_data;
	GstBuffer *buf;
	AVCodecContext *ctx;

	self = (struct obj *)((GstObject *)pad)->parent;
	ctx = self->av_ctx;

	in_struc = gst_caps_get_structure(caps, 0);

	gst_structure_get_int(in_struc, "width", &ctx->width);
	gst_structure_get_int(in_struc, "height", &ctx->height);

	gst_structure_get_fraction(in_struc, "pixel-aspect-ratio",
			&ctx->sample_aspect_ratio.num, &ctx->sample_aspect_ratio.den);

	gst_structure_get_fraction(in_struc, "framerate",
			&ctx->time_base.den, &ctx->time_base.num);

	/* bug in xvimagesink? */
	if (!ctx->time_base.num)
		ctx->time_base = (AVRational){ 1, 0 };

	name = gst_structure_get_name(in_struc);
	if (strcmp(name, "video/x-h263") == 0)
		codec_id = CODEC_ID_H263;
	else if (strcmp(name, "video/x-h264") == 0)
		codec_id = CODEC_ID_H264;
	else if (strcmp(name, "video/mpeg") == 0) {
		int version;
		gst_structure_get_int(in_struc, "mpegversion", &version);
		switch (version) {
		case 4:
			codec_id = CODEC_ID_MPEG4;
			break;
		case 2:
			codec_id = CODEC_ID_MPEG2VIDEO;
			break;
		case 1:
			codec_id = CODEC_ID_MPEG1VIDEO;
			break;
		default:
			codec_id = CODEC_ID_NONE;
			break;
		}
	}
	else if (strcmp(name, "video/x-divx") == 0) {
		int version;
		gst_structure_get_int(in_struc, "divxversion", &version);
		switch (version) {
		case 5:
		case 4:
			codec_id = CODEC_ID_MPEG4;
			break;
		case 3:
			codec_id = CODEC_ID_MSMPEG4V3;
			break;
		default:
			codec_id = CODEC_ID_NONE;
			break;
		}
	}
	else if (strcmp(name, "video/x-xvid") == 0)
		codec_id = CODEC_ID_MPEG4;
	else if (strcmp(name, "video/x-3ivx") == 0)
		codec_id = CODEC_ID_MPEG4;
	else if (strcmp(name, "video/x-vp8") == 0)
		codec_id = CODEC_ID_VP8;
	else if (strcmp(name, "video/x-wmv") == 0) {
		int version;
		gst_structure_get_int(in_struc, "wmvversion", &version);
		switch (version) {
		case 3: {
			guint32 fourcc;
			codec_id = CODEC_ID_WMV3;
			if (gst_structure_get_fourcc(in_struc, "fourcc", &fourcc) ||
					gst_structure_get_fourcc(in_struc, "format", &fourcc))
			{
				if (fourcc == GST_MAKE_FOURCC('W', 'V', 'C', '1'))
					codec_id = CODEC_ID_VC1;
			}
			break;
		}
		case 2:
			codec_id = CODEC_ID_WMV2;
			break;
		case 1:
			codec_id = CODEC_ID_WMV1;
			break;
		default:
			codec_id = CODEC_ID_NONE;
			break;
		}

	}
	else
		codec_id = CODEC_ID_NONE;

	self->codec = avcodec_find_decoder(codec_id);
	if (!self->codec)
		return false;

	switch (codec_id) {
	case CODEC_ID_H263:
		self->parse_func = gst_av_h263_parse;
		break;
	case CODEC_ID_H264:
		self->parse_func = gst_av_h264_parse;
		break;
	case CODEC_ID_MPEG4:
		self->parse_func = gst_av_mpeg4_parse;
		break;
	}

	codec_data = gst_structure_get_value(in_struc, "codec_data");
	if (!codec_data)
		goto next;
	buf = gst_value_get_buffer(codec_data);
	if (!buf)
		goto next;
	ctx->extradata = malloc(buf->size + FF_INPUT_BUFFER_PADDING_SIZE);
	memcpy(ctx->extradata, buf->data, buf->size);
	ctx->extradata_size = buf->size;

	if (self->parse_func)
		self->parse_func(self, buf);

next:
	return true;
}
예제 #29
0
static GstFlowReturn
gst_aiff_parse_stream_headers (GstAiffParse * aiff)
{
  GstFlowReturn res;
  GstBuffer *buf;
  guint32 tag, size;
  gboolean gotdata = FALSE;
  gboolean done = FALSE;
  GstEvent **event_p;
  GstFormat bformat;
  gint64 upstream_size = 0;

  bformat = GST_FORMAT_BYTES;
  gst_pad_query_peer_duration (aiff->sinkpad, &bformat, &upstream_size);
  GST_DEBUG_OBJECT (aiff, "upstream size %" G_GUINT64_FORMAT, upstream_size);

  /* loop headers until we get data */
  while (!done) {
    if (aiff->streaming) {
      if (!gst_aiff_parse_peek_chunk_info (aiff, &tag, &size))
        return GST_FLOW_OK;
    } else {
      if ((res =
              gst_pad_pull_range (aiff->sinkpad, aiff->offset, 8,
                  &buf)) != GST_FLOW_OK)
        goto header_read_error;
      tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
      size = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf) + 4);
    }

    GST_INFO_OBJECT (aiff,
        "Got TAG: %" GST_FOURCC_FORMAT ", offset %" G_GUINT64_FORMAT,
        GST_FOURCC_ARGS (tag), aiff->offset);

    /* We just keep reading chunks until we find the one we're interested in.
     */
    switch (tag) {
      case GST_MAKE_FOURCC ('C', 'O', 'M', 'M'):{
        if (aiff->streaming) {
          if (!gst_aiff_parse_peek_chunk (aiff, &tag, &size))
            return GST_FLOW_OK;

          gst_adapter_flush (aiff->adapter, 8);
          aiff->offset += 8;

          buf = gst_adapter_take_buffer (aiff->adapter, size);
        } else {
          if ((res = gst_aiff_parse_read_chunk (aiff,
                      &aiff->offset, &tag, &buf)) != GST_FLOW_OK)
            return res;
        }

        if (!gst_aiff_parse_parse_comm (aiff, buf)) {
          gst_buffer_unref (buf);
          goto parse_header_error;
        }

        gst_buffer_unref (buf);

        /* do sanity checks of header fields */
        if (aiff->channels == 0)
          goto no_channels;
        if (aiff->rate == 0)
          goto no_rate;

        GST_DEBUG_OBJECT (aiff, "creating the caps");

        aiff->caps = gst_aiff_parse_create_caps (aiff);
        if (!aiff->caps)
          goto unknown_format;

        gst_pad_set_caps (aiff->srcpad, aiff->caps);

        aiff->bytes_per_sample = aiff->channels * aiff->width / 8;
        aiff->bps = aiff->bytes_per_sample * aiff->rate;

        if (aiff->bytes_per_sample <= 0)
          goto no_bytes_per_sample;

        aiff->got_comm = TRUE;
        break;
      }
      case GST_MAKE_FOURCC ('S', 'S', 'N', 'D'):{
        GstFormat fmt;
        GstBuffer *ssndbuf = NULL;
        const guint8 *ssnddata = NULL;
        guint32 datasize;

        GST_DEBUG_OBJECT (aiff, "Got 'SSND' TAG, size : %d", size);

        /* Now, read the 8-byte header in the SSND chunk */
        if (aiff->streaming) {
          if (!gst_aiff_parse_peek_data (aiff, 16, &ssnddata))
            return GST_FLOW_OK;
        } else {
          gst_buffer_unref (buf);
          if ((res =
                  gst_pad_pull_range (aiff->sinkpad, aiff->offset, 16,
                      &ssndbuf)) != GST_FLOW_OK)
            goto header_read_error;
          ssnddata = GST_BUFFER_DATA (ssndbuf);
        }

        aiff->ssnd_offset = GST_READ_UINT32_BE (ssnddata + 8);
        aiff->ssnd_blocksize = GST_READ_UINT32_BE (ssnddata + 12);

        gotdata = TRUE;
        if (aiff->streaming) {
          gst_adapter_flush (aiff->adapter, 16);
        } else {
          gst_buffer_unref (ssndbuf);
        }
        /* 8 byte chunk header, 16 byte SSND header */
        aiff->offset += 24;

        datasize = size - 16;

        aiff->datastart = aiff->offset + aiff->ssnd_offset;
        /* file might be truncated */
        fmt = GST_FORMAT_BYTES;
        if (upstream_size) {
          size = MIN (datasize, (upstream_size - aiff->datastart));
        }
        aiff->datasize = (guint64) datasize;
        aiff->dataleft = (guint64) datasize;
        aiff->end_offset = datasize + aiff->datastart;
        if (!aiff->streaming) {
          /* We will continue looking at chunks until the end - to read tags,
           * etc. */
          aiff->offset += datasize;
        }
        GST_DEBUG_OBJECT (aiff, "datasize = %d", datasize);
        if (aiff->streaming) {
          done = TRUE;
        }
        break;
      }
      default:
        gst_aiff_parse_ignore_chunk (aiff, buf, tag, size);
    }

    if (upstream_size && (aiff->offset >= upstream_size)) {
      /* Now we have gone through the whole file */
      done = TRUE;
    }
  }

  /* We read all the chunks (in pull mode) or reached the SSND chunk
   * (in push mode). We must have both COMM and SSND now; error out 
   * otherwise.
   */
  if (!aiff->got_comm) {
    GST_WARNING_OBJECT (aiff, "Failed to find COMM chunk");
    goto no_header;
  }
  if (!gotdata) {
    GST_WARNING_OBJECT (aiff, "Failed to find SSND chunk");
    goto no_data;
  }

  GST_DEBUG_OBJECT (aiff, "Finished parsing headers");

  if (gst_aiff_parse_calculate_duration (aiff)) {
    gst_segment_init (&aiff->segment, GST_FORMAT_TIME);
    gst_segment_set_duration (&aiff->segment, GST_FORMAT_TIME, aiff->duration);
  } else {
    /* no bitrate, let downstream peer do the math, we'll feed it bytes. */
    gst_segment_init (&aiff->segment, GST_FORMAT_BYTES);
    gst_segment_set_duration (&aiff->segment, GST_FORMAT_BYTES, aiff->datasize);
  }

  /* now we have all the info to perform a pending seek if any, if no
   * event, this will still do the right thing and it will also send
   * the right newsegment event downstream. */
  gst_aiff_parse_perform_seek (aiff, aiff->seek_event);
  /* remove pending event */
  event_p = &aiff->seek_event;
  gst_event_replace (event_p, NULL);

  /* we just started, we are discont */
  aiff->discont = TRUE;

  aiff->state = AIFF_PARSE_DATA;

  return GST_FLOW_OK;

  /* ERROR */
no_header:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL),
        ("Invalid AIFF header (no COMM found)"));
    return GST_FLOW_ERROR;
  }
no_data:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL),
        ("Invalid AIFF: no SSND found"));
    return GST_FLOW_ERROR;
  }
parse_header_error:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, DEMUX, (NULL),
        ("Couldn't parse audio header"));
    return GST_FLOW_ERROR;
  }
no_channels:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL),
        ("Stream claims to contain no channels - invalid data"));
    return GST_FLOW_ERROR;
  }
no_rate:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL),
        ("Stream with sample_rate == 0 - invalid data"));
    return GST_FLOW_ERROR;
  }
no_bytes_per_sample:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, FAILED, (NULL),
        ("Could not caluclate bytes per sample - invalid data"));
    return GST_FLOW_ERROR;
  }
unknown_format:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, TYPE_NOT_FOUND, (NULL),
        ("No caps found for format 0x%x, %d channels, %d Hz",
            aiff->format, aiff->channels, aiff->rate));
    return GST_FLOW_ERROR;
  }
header_read_error:
  {
    GST_ELEMENT_ERROR (aiff, STREAM, DEMUX, (NULL),
        ("Couldn't read in header"));
    return GST_FLOW_ERROR;
  }
}
예제 #30
0
static GstFlowReturn
gst_mim_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstMimDec *mimdec = GST_MIM_DEC (parent);
  GstBuffer *out_buf;
  const guchar *header, *frame_body;
  guint32 fourcc;
  guint16 header_size;
  gint width, height;
  GstCaps *caps;
  GstFlowReturn res = GST_FLOW_OK;
  GstClockTime in_time = GST_BUFFER_TIMESTAMP (buf);
  GstEvent *event = NULL;
  gboolean result = TRUE;
  guint32 payload_size;
  guint32 current_ts;
  GstMapInfo map;

  gst_adapter_push (mimdec->adapter, buf);


  /* do we have enough bytes to read a header */
  while (gst_adapter_available (mimdec->adapter) >= 24) {
    header = gst_adapter_map (mimdec->adapter, 24);
    header_size = header[0];
    if (header_size != 24) {
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
          ("invalid frame: header size %d incorrect", header_size));
      return GST_FLOW_ERROR;
    }

    if (header[1] == 1) {
      /* This is a a paused frame, skip it */
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      continue;
    }

    fourcc = GUINT32_FROM_LE (*((guint32 *) (header + 12)));
    if (GST_MAKE_FOURCC ('M', 'L', '2', '0') != fourcc) {
      gst_adapter_unmap (mimdec->adapter);
      gst_adapter_flush (mimdec->adapter, 24);
      GST_ELEMENT_ERROR (mimdec, STREAM, WRONG_TYPE, (NULL),
          ("invalid frame: unknown FOURCC code %X (%" GST_FOURCC_FORMAT ")",
              fourcc, GST_FOURCC_ARGS (fourcc)));
      return GST_FLOW_ERROR;
    }

    payload_size = GUINT32_FROM_LE (*((guint32 *) (header + 8)));

    current_ts = GUINT32_FROM_LE (*((guint32 *) (header + 20)));

    gst_adapter_unmap (mimdec->adapter);

    GST_LOG_OBJECT (mimdec, "Got packet, payload size %d", payload_size);

    if (gst_adapter_available (mimdec->adapter) < payload_size + 24)
      return GST_FLOW_OK;

    /* We have a whole packet and have read the header, lets flush it out */
    gst_adapter_flush (mimdec->adapter, 24);

    frame_body = gst_adapter_map (mimdec->adapter, payload_size);

    if (mimdec->buffer_size < 0) {
      /* Check if its a keyframe, otherwise skip it */
      if (GUINT32_FROM_LE (*((guint32 *) (frame_body + 12))) != 0) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        return GST_FLOW_OK;
      }

      if (!mimic_decoder_init (mimdec->dec, frame_body)) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
            ("mimic_decoder_init error"));
        return GST_FLOW_ERROR;
      }

      if (!mimic_get_property (mimdec->dec, "buffer_size",
              &mimdec->buffer_size)) {
        gst_adapter_unmap (mimdec->adapter);
        gst_adapter_flush (mimdec->adapter, payload_size);
        GST_ELEMENT_ERROR (mimdec, LIBRARY, INIT, (NULL),
            ("mimic_get_property('buffer_size') error"));
        return GST_FLOW_ERROR;
      }

      mimic_get_property (mimdec->dec, "width", &width);
      mimic_get_property (mimdec->dec, "height", &height);
      GST_DEBUG_OBJECT (mimdec,
          "Initialised decoder with %d x %d payload size %d buffer_size %d",
          width, height, payload_size, mimdec->buffer_size);
      caps = gst_caps_new_simple ("video/x-raw",
          "format", G_TYPE_STRING, "RGB",
          "framerate", GST_TYPE_FRACTION, 0, 1,
          "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
      gst_pad_set_caps (mimdec->srcpad, caps);
      gst_caps_unref (caps);
    }


    if (mimdec->need_segment) {
      GstSegment segment;

      gst_segment_init (&segment, GST_FORMAT_TIME);

      if (GST_CLOCK_TIME_IS_VALID (in_time))
        segment.start = in_time;
      else
        segment.start = current_ts * GST_MSECOND;
      event = gst_event_new_segment (&segment);
    }
    mimdec->need_segment = FALSE;

    if (event)
      result = gst_pad_push_event (mimdec->srcpad, event);
    event = NULL;

    if (!result) {
      GST_WARNING_OBJECT (mimdec, "gst_pad_push_event failed");
      return GST_FLOW_ERROR;
    }


    out_buf = gst_buffer_new_allocate (NULL, mimdec->buffer_size, NULL);
    gst_buffer_map (out_buf, &map, GST_MAP_READWRITE);

    if (!mimic_decode_frame (mimdec->dec, frame_body, map.data)) {
      GST_WARNING_OBJECT (mimdec, "mimic_decode_frame error\n");

      gst_adapter_flush (mimdec->adapter, payload_size);

      gst_buffer_unmap (out_buf, &map);
      gst_buffer_unref (out_buf);
      GST_ELEMENT_ERROR (mimdec, STREAM, DECODE, (NULL),
          ("mimic_decode_frame error"));
      return GST_FLOW_ERROR;
    }
    gst_buffer_unmap (out_buf, &map);
    gst_adapter_flush (mimdec->adapter, payload_size);

    if (GST_CLOCK_TIME_IS_VALID (in_time))
      GST_BUFFER_TIMESTAMP (out_buf) = in_time;
    else
      GST_BUFFER_TIMESTAMP (out_buf) = current_ts * GST_MSECOND;

    res = gst_pad_push (mimdec->srcpad, out_buf);

    if (res != GST_FLOW_OK)
      break;
  }

  return res;
}