Beispiel #1
0
static gboolean
gst_v4lsrc_set_caps (GstBaseSrc * src, GstCaps * caps)
{
    GstV4lSrc *v4lsrc;
    guint32 fourcc;
    gint bpp, depth, w, h, palette = -1;
    const GValue *new_fps;
    gint cur_fps_n, cur_fps_d;
    GstStructure *structure;
    struct video_window *vwin;

    v4lsrc = GST_V4LSRC (src);
    vwin = &GST_V4LELEMENT (v4lsrc)->vwin;

    /* if we're not open, punt -- we'll get setcaps'd later via negotiate */
    if (!GST_V4L_IS_OPEN (v4lsrc))
        return FALSE;

    /* make sure we stop capturing and dealloc buffers */
    if (GST_V4L_IS_ACTIVE (v4lsrc)) {
        if (!gst_v4lsrc_capture_stop (v4lsrc))
            return FALSE;
        if (!gst_v4lsrc_capture_deinit (v4lsrc))
            return FALSE;
    }

    /* it's fixed, one struct */
    structure = gst_caps_get_structure (caps, 0);

    if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0)
        gst_structure_get_fourcc (structure, "format", &fourcc);
    else
        fourcc = GST_MAKE_FOURCC ('R', 'G', 'B', ' ');

    gst_structure_get_int (structure, "width", &w);
    gst_structure_get_int (structure, "height", &h);
    new_fps = gst_structure_get_value (structure, "framerate");

    /* set framerate if it's not already correct */
    if (!gst_v4lsrc_get_fps (v4lsrc, &cur_fps_n, &cur_fps_d))
        return FALSE;

    if (new_fps) {
        GST_DEBUG_OBJECT (v4lsrc, "linking with %dx%d at %d/%d fps", w, h,
                          gst_value_get_fraction_numerator (new_fps),
                          gst_value_get_fraction_denominator (new_fps));

        if (gst_value_get_fraction_numerator (new_fps) != cur_fps_n ||
                gst_value_get_fraction_denominator (new_fps) != cur_fps_d) {
            int fps_index = (gst_value_get_fraction_numerator (new_fps) * 16) /
                            (gst_value_get_fraction_denominator (new_fps) * 15);

            GST_DEBUG_OBJECT (v4lsrc, "Trying to set fps index %d", fps_index);
            /* set bits 16 to 21 to 0 */
            vwin->flags &= (0x3F00 - 1);
            /* set bits 16 to 21 to the index */
            vwin->flags |= fps_index << 16;
            if (!gst_v4l_set_window_properties (GST_V4LELEMENT (v4lsrc))) {
                return FALSE;
            }
        }
    }

    switch (fourcc) {
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
        palette = VIDEO_PALETTE_YUV420P;
        v4lsrc->buffer_size = ((w + 1) & ~1) * ((h + 1) & ~1) * 1.5;
        break;
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
        palette = VIDEO_PALETTE_YUV422;
        v4lsrc->buffer_size = ((w + 1) & ~1) * h * 2;
        break;
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
        palette = VIDEO_PALETTE_UYVY;
        v4lsrc->buffer_size = ((w + 1) & ~1) * h * 2;
        break;
    case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
        palette = VIDEO_PALETTE_YUV411P;
        v4lsrc->buffer_size = ((w + 3) & ~3) * h * 1.5;
        break;
    case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
        palette = VIDEO_PALETTE_YUV411;
        v4lsrc->buffer_size = ((w + 3) & ~3) * h * 1.5;
        break;
    case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
        palette = VIDEO_PALETTE_YUV410P;
        v4lsrc->buffer_size = ((w + 3) & ~3) * ((h + 3) & ~3) * 1.125;
        break;
    case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
        palette = VIDEO_PALETTE_YUV422P;
        v4lsrc->buffer_size = ((w + 1) & ~1) * h * 2;
        break;
    case GST_MAKE_FOURCC ('R', 'G', 'B', ' '):
        gst_structure_get_int (structure, "depth", &depth);
        switch (depth) {
        case 15:
            palette = VIDEO_PALETTE_RGB555;
            v4lsrc->buffer_size = w * h * 2;
            break;
        case 16:
            palette = VIDEO_PALETTE_RGB565;
            v4lsrc->buffer_size = w * h * 2;
            break;
        case 24:
            gst_structure_get_int (structure, "bpp", &bpp);
            switch (bpp) {
            case 24:
                palette = VIDEO_PALETTE_RGB24;
                v4lsrc->buffer_size = w * h * 3;
                break;
            case 32:
                palette = VIDEO_PALETTE_RGB32;
                v4lsrc->buffer_size = w * h * 4;
                break;
            default:
                break;
            }
            break;
        default:
            break;
        }
        break;
    default:
        break;
    }

    if (palette == -1) {
        GST_WARNING_OBJECT (v4lsrc, "palette for fourcc %" GST_FOURCC_FORMAT
                            " is -1, refusing link", GST_FOURCC_ARGS (fourcc));
        return FALSE;
    }

    GST_DEBUG_OBJECT (v4lsrc, "trying to set_capture %dx%d, palette %d",
                      w, h, palette);
    /* this only fills in v4lsrc->mmap values */
    if (!gst_v4lsrc_set_capture (v4lsrc, w, h, palette)) {
        GST_WARNING_OBJECT (v4lsrc, "could not set_capture %dx%d, palette %d",
                            w, h, palette);
        return FALSE;
    }

    /* first try the negotiated settings using try_capture */
    if (!gst_v4lsrc_try_capture (v4lsrc, w, h, palette)) {
        GST_DEBUG_OBJECT (v4lsrc, "failed trying palette %d for %dx%d", palette,
                          w, h);
        return FALSE;
    }

    if (!gst_v4lsrc_capture_init (v4lsrc))
        return FALSE;

    if (!gst_v4lsrc_capture_start (v4lsrc))
        return FALSE;

    return TRUE;
}
void QGstreamerGLTextureRenderer::setFallbackBuffer(GstBuffer *buffer)
{
#ifdef GL_TEXTURE_SINK_DEBUG
    qDebug() << Q_FUNC_INFO << buffer;
#endif
    m_fallbackImage = QImage();

    if (!buffer)
        return;

    GstCaps *caps = GST_BUFFER_CAPS(buffer);
    const uchar *data = GST_BUFFER_DATA(buffer);

    if (!(caps && data))
        return;

    const GstStructure *structure = gst_caps_get_structure(caps, 0);
    guint32 fourcc;
    gst_structure_get_fourcc(structure, "format", &fourcc);

    if (fourcc != GST_MAKE_FOURCC('Y', 'U', 'Y', '2') &&
        fourcc != GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'))
        return;

    QSize resolution = QGstUtils::capsResolution(caps);
    m_fallbackImage = QImage(resolution, QImage::Format_RGB32);

    for (int y=0; y<resolution.height(); y++) {
        const uchar *src = data + y*GST_ROUND_UP_4(resolution.width())*2;
        quint32 *dst = (quint32 *)m_fallbackImage.scanLine(y);

        int y1, y2;
        int u, v;

        int r1, g1, b1;
        int r2, g2, b2;

        int r, g, b;

        for (int x=0; x<resolution.width(); x+=2) {
            if (fourcc == GST_MAKE_FOURCC('Y', 'U', 'Y', '2')) {
                y1 = *src; src++;
                u  = *src; src++;
                y2 = *src; src++;
                v  = *src; src++;
            } else {
                u  = *src; src++;
                y1 = *src; src++;
                v  = *src; src++;
                y2 = *src; src++;
            }

            y1 = 298*y1/256;
            y2 = 298*y2/256;

            r = 408*v/256 - 223;
            g = - 100*u/256 - 208*v/256 + 136;
            b = 516*u/256 - 276;

            r1 = qBound(0, y1 + r, 255);
            g1 = qBound(0, y1 + g, 255);
            b1 = qBound(0, y1 + b, 255);

            *dst = qRgb(r1, g1, b1); dst++;

            r2 = qBound(0, y2 + r, 255);
            g2 = qBound(0, y2 + g, 255);
            b2 = qBound(0, y2 + b, 255);

            *dst = qRgb(r2, g2, b2); dst++;
        }
    }
}
Beispiel #3
0
static gboolean
gst_divxenc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstDivxEnc *divxenc;
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  gint w, h;
  const GValue *fps;
  guint32 fourcc;
  guint32 divx_cs;
  gint bitcnt = 0;
  gboolean ret = FALSE;

  divxenc = GST_DIVXENC (gst_pad_get_parent (pad));

  /* if there's something old around, remove it */
  gst_divxenc_unset (divxenc);

  gst_structure_get_int (structure, "width", &w);
  gst_structure_get_int (structure, "height", &h);
  gst_structure_get_fourcc (structure, "format", &fourcc);

  fps = gst_structure_get_value (structure, "framerate");
  if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
    divxenc->fps_n = gst_value_get_fraction_numerator (fps);
    divxenc->fps_d = gst_value_get_fraction_denominator (fps);
  } else {
    divxenc->fps_n = -1;
  }

  switch (fourcc) {
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
      divx_cs = GST_MAKE_FOURCC ('I', '4', '2', '0');
      break;
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
      divx_cs = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
      break;
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
      divx_cs = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
      break;
    case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
      divx_cs = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U');
      break;
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
      divx_cs = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
      break;
    default:
      ret = FALSE;
      goto done;
  }

  divxenc->csp = divx_cs;
  divxenc->bitcnt = bitcnt;
  divxenc->width = w;
  divxenc->height = h;

  /* try it */
  if (gst_divxenc_setup (divxenc)) {
    GstCaps *new_caps = NULL;

    new_caps = gst_caps_new_simple ("video/x-divx",
        "divxversion", G_TYPE_INT, 5,
        "width", G_TYPE_INT, w,
        "height", G_TYPE_INT, h,
        "framerate", GST_TYPE_FRACTION, divxenc->fps_n, divxenc->fps_d, NULL);

    if (new_caps) {

      if (!gst_pad_set_caps (divxenc->srcpad, new_caps)) {
        gst_divxenc_unset (divxenc);
        ret = FALSE;
        goto done;
      }
      gst_caps_unref (new_caps);
      ret = TRUE;
      goto done;

    }

  }

  /* if we got here - it's not good */

  ret = FALSE;

done:
  gst_object_unref (divxenc);
  return ret;
}
Beispiel #4
0
static GstFlowReturn
gst_schrodownsample_transform (GstBaseTransform * base_transform,
    GstBuffer *inbuf, GstBuffer *outbuf)
{
  GstSchrodownsample *compress;
  SchroFrame *outframe;
  int width, height;
  uint32_t format;
  SchroFrame *stack[10];
  int n = 0;
  
  g_return_val_if_fail (GST_IS_SCHRODOWNSAMPLE (base_transform), GST_FLOW_ERROR);
  compress = GST_SCHRODOWNSAMPLE (base_transform);

  gst_structure_get_fourcc (gst_caps_get_structure(inbuf->caps, 0),
      "format", &format);
  gst_structure_get_int (gst_caps_get_structure(inbuf->caps, 0),
      "width", &width);
  gst_structure_get_int (gst_caps_get_structure(inbuf->caps, 0),
      "height", &height);

  switch (format) {
    case GST_MAKE_FOURCC('I','4','2','0'):
      stack[n] = schro_frame_new_from_data_I420 (GST_BUFFER_DATA(inbuf),
          width, height);
      n++;
      outframe = schro_frame_new_from_data_I420 (GST_BUFFER_DATA(outbuf),
          width/2, height/2);
      break;
    case GST_MAKE_FOURCC('Y','V','1','2'):
      stack[n] = schro_frame_new_from_data_YV12 (GST_BUFFER_DATA(inbuf),
          width, height);
      n++;
      outframe = schro_frame_new_from_data_YV12 (GST_BUFFER_DATA(outbuf),
          width/2, height/2);
      break;
    case GST_MAKE_FOURCC('Y','U','Y','2'):
      stack[n] = schro_frame_new_from_data_YUY2 (GST_BUFFER_DATA(inbuf),
          width, height);
      n++;
      stack[n] = schro_virt_frame_new_unpack (stack[n-1]);
      n++;
      outframe = schro_frame_new_from_data_YUY2 (GST_BUFFER_DATA(outbuf),
          width/2, height/2);
      break;
    case GST_MAKE_FOURCC('U','Y','V','Y'):
      stack[n] = schro_frame_new_from_data_UYVY (GST_BUFFER_DATA(inbuf),
          width, height);
      n++;
      stack[n] = schro_virt_frame_new_unpack (stack[n-1]);
      n++;
      outframe = schro_frame_new_from_data_UYVY (GST_BUFFER_DATA(outbuf),
          width/2, height/2);
      break;
    case GST_MAKE_FOURCC('A','Y','U','V'):
      stack[n] = schro_frame_new_from_data_AYUV (GST_BUFFER_DATA(inbuf),
          width, height);
      n++;
      stack[n] = schro_virt_frame_new_unpack (stack[n-1]);
      n++;
      outframe = schro_frame_new_from_data_AYUV (GST_BUFFER_DATA(outbuf),
          width/2, height/2);
      break;
    default:
      g_assert_not_reached();
  }

  stack[n] = schro_virt_frame_new_horiz_downsample (stack[n-1]);
  n++;
  stack[n] = schro_virt_frame_new_vert_downsample (stack[n-1]);
  n++;

  switch (format) {
    case GST_MAKE_FOURCC('Y','U','Y','2'):
      stack[n] = schro_virt_frame_new_pack_YUY2 (stack[n-1]);
      n++;
      break;
    case GST_MAKE_FOURCC('U','Y','V','Y'):
      stack[n] = schro_virt_frame_new_pack_UYVY (stack[n-1]);
      n++;
      break;
    case GST_MAKE_FOURCC('A','Y','U','V'):
      stack[n] = schro_virt_frame_new_pack_AYUV (stack[n-1]);
      n++;
      break;
    default:
      break;
  }

  schro_virt_frame_render (stack[n-1], outframe);

  while(n>0) {
    schro_frame_unref (stack[n-1]);
    n--;
  }

  return GST_FLOW_OK;
}
static gboolean
gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
  GstStructure *structure;
  GstCaps *video_caps = NULL;
  gboolean res = FALSE;

  GstCaps *allowed_caps, *output_caps, *src_caps;

  /* check if the input is non native */
  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
    if (!gst_structure_get_fourcc (structure, "format", &vpp->fourcc))
      goto done;
    vpp->native_input = FALSE;
    video_caps = gst_vdp_yuv_to_video_caps (caps);
    if (!video_caps)
      goto done;

    if (!vpp->vpool)
      vpp->vpool = gst_vdp_video_buffer_pool_new (vpp->device);

    gst_vdp_buffer_pool_set_caps (vpp->vpool, video_caps);

  } else {
    vpp->native_input = TRUE;
    video_caps = gst_caps_ref (caps);

    if (vpp->vpool) {
      g_object_unref (vpp->vpool);
      vpp->vpool = NULL;
    }
  }

  structure = gst_caps_get_structure (video_caps, 0);
  if (!gst_structure_get_int (structure, "width", &vpp->width) ||
      !gst_structure_get_int (structure, "height", &vpp->height) ||
      !gst_structure_get_int (structure, "chroma-type",
          (gint *) & vpp->chroma_type))
    goto done;


  /* get interlaced flag */
  gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced);

  /* extract par */
  if (gst_structure_has_field_typed (structure, "pixel-aspect-ratio",
          GST_TYPE_FRACTION)) {
    gst_structure_get_fraction (structure, "pixel-aspect-ratio", &vpp->par_n,
        &vpp->par_d);
    vpp->got_par = TRUE;
  } else
    vpp->got_par = FALSE;

  allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad);
  if (G_UNLIKELY (!allowed_caps))
    goto null_allowed_caps;
  if (G_UNLIKELY (gst_caps_is_empty (allowed_caps)))
    goto empty_allowed_caps;
  GST_DEBUG ("allowed_caps: %" GST_PTR_FORMAT, allowed_caps);

  output_caps = gst_vdp_video_to_output_caps (video_caps);
  src_caps = gst_caps_intersect (output_caps, allowed_caps);
  gst_caps_unref (allowed_caps);
  gst_caps_unref (output_caps);

  if (gst_caps_is_empty (src_caps))
    goto not_negotiated;

  gst_pad_fixate_caps (vpp->srcpad, src_caps);


  if (gst_vdp_vpp_is_interlaced (vpp)) {
    gint fps_n, fps_d;

    if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
      gst_fraction_double (&fps_n, &fps_d);
      gst_caps_set_simple (src_caps, "framerate", GST_TYPE_FRACTION, fps_n,
          fps_d, NULL);
      vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
    }

    gst_caps_set_simple (src_caps, "interlaced", G_TYPE_BOOLEAN, FALSE, NULL);
  }

  GST_DEBUG ("src_caps: %" GST_PTR_FORMAT, src_caps);

  res = gst_pad_set_caps (vpp->srcpad, src_caps);
  gst_caps_unref (src_caps);

done:
  gst_object_unref (vpp);
  if (video_caps)
    gst_caps_unref (video_caps);

  return res;

null_allowed_caps:
  GST_ERROR_OBJECT (vpp, "Got null from gst_pad_get_allowed_caps");
  goto done;

empty_allowed_caps:
  GST_ERROR_OBJECT (vpp, "Got EMPTY caps from gst_pad_get_allowed_caps");

  gst_caps_unref (allowed_caps);
  goto done;

not_negotiated:
  gst_caps_unref (src_caps);
  GST_ERROR_OBJECT (vpp, "Couldn't find suitable output format");
  goto done;
}
/* Convert a GstCaps (video/raw) to a FFMPEG PixFmt
 */
static enum PixelFormat
gst_ffmpeg_caps_to_pixfmt(const GstCaps* caps) {
    GstStructure* structure;
    enum PixelFormat pix_fmt = PIX_FMT_NONE;

    GST_DEBUG("converting caps %" GST_PTR_FORMAT, caps);
    g_return_val_if_fail(gst_caps_get_size(caps) == 1, PIX_FMT_NONE);
    structure = gst_caps_get_structure(caps, 0);

    if (strcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) {
        guint32 fourcc;

        if (gst_structure_get_fourcc(structure, "format", &fourcc)) {
            switch (fourcc) {
            case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
                pix_fmt = PIX_FMT_YUYV422;
                break;

            case GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'):
                pix_fmt = PIX_FMT_UYVY422;
                break;

            case GST_MAKE_FOURCC('I', '4', '2', '0'):
                pix_fmt = PIX_FMT_YUV420P;
                break;

            case GST_MAKE_FOURCC('Y', '4', '1', 'B'):
                pix_fmt = PIX_FMT_YUV411P;
                break;

            case GST_MAKE_FOURCC('Y', '4', '2', 'B'):
                pix_fmt = PIX_FMT_YUV422P;
                break;

            case GST_MAKE_FOURCC('Y', 'U', 'V', '9'):
                pix_fmt = PIX_FMT_YUV410P;
                break;
            }
        }
    } else if (strcmp(gst_structure_get_name(structure),
                      "video/x-raw-rgb") == 0) {
        gint bpp = 0, rmask = 0, endianness = 0;

        if (gst_structure_get_int(structure, "bpp", &bpp) &&
                gst_structure_get_int(structure, "endianness", &endianness) &&
                endianness == G_BIG_ENDIAN) {
            if (gst_structure_get_int(structure, "red_mask", &rmask)) {
                switch (bpp) {
                case 32:
                    if (rmask == 0x00ff0000) {
                        pix_fmt = PIX_FMT_ARGB;
                    } else if (rmask == 0xff000000) {
                        pix_fmt = PIX_FMT_RGBA;
                    } else if (rmask == 0xff00) {
                        pix_fmt = PIX_FMT_BGRA;
                    } else if (rmask == 0xff) {
                        pix_fmt = PIX_FMT_ABGR;
                    }

                    break;

                case 24:
                    if (rmask == 0x0000FF) {
                        pix_fmt = PIX_FMT_BGR24;
                    } else {
                        pix_fmt = PIX_FMT_RGB24;
                    }

                    break;

                case 16:
                    if (endianness == G_BYTE_ORDER) {
                        pix_fmt = PIX_FMT_RGB565;
                    }

                    break;

                case 15:
                    if (endianness == G_BYTE_ORDER) {
                        pix_fmt = PIX_FMT_RGB555;
                    }

                    break;

                default:
                    /* nothing */
                    break;
                }
            } else {
                if (bpp == 8) {
                    pix_fmt = PIX_FMT_PAL8;
                }
            }
        }
    }

    return pix_fmt;
}
static gboolean
clutter_gst_video_sink_set_caps (GstBaseSink *bsink,
                                 GstCaps     *caps)
{
  ClutterGstVideoSink        *sink;
  ClutterGstVideoSinkPrivate *priv;
  GstCaps                    *intersection;
  GstStructure               *structure;
  gboolean                    ret;
  const GValue               *fps;
  const GValue               *par;
  gint                        width, height;
  guint32                     fourcc;
  int                         red_mask, blue_mask;

  sink = CLUTTER_GST_VIDEO_SINK(bsink);
  priv = sink->priv;

  intersection = gst_caps_intersect (priv->caps, caps);
  if (gst_caps_is_empty (intersection)) 
    return FALSE;

  gst_caps_unref (intersection);

  structure = gst_caps_get_structure (caps, 0);

  ret  = gst_structure_get_int (structure, "width", &width);
  ret &= gst_structure_get_int (structure, "height", &height);
  fps  = gst_structure_get_value (structure, "framerate");
  ret &= (fps != NULL);

  par  = gst_structure_get_value (structure, "pixel-aspect-ratio");

  if (!ret)
    return FALSE;

  priv->width  = width;
  priv->height = height;

  /* We dont yet use fps or pixel aspect into but handy to have */
  priv->fps_n  = gst_value_get_fraction_numerator (fps);
  priv->fps_d  = gst_value_get_fraction_denominator (fps);

  if (par) 
    {
      priv->par_n = gst_value_get_fraction_numerator (par);
      priv->par_d = gst_value_get_fraction_denominator (par);
    } 
  else 
    priv->par_n = priv->par_d = 1;

  ret = gst_structure_get_fourcc (structure, "format", &fourcc);
  if (ret && (fourcc == GST_MAKE_FOURCC ('Y', 'V', '1', '2')))
    {
      priv->format = CLUTTER_GST_YV12;
    }
  else if (ret && (fourcc == GST_MAKE_FOURCC ('I', '4', '2', '0')))
    {
      priv->format = CLUTTER_GST_I420;
    }
  else if (ret && (fourcc == GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')))
    {
      priv->format = CLUTTER_GST_AYUV;
      priv->bgr = FALSE;
    }
  else
    {
      guint32 mask;
      gst_structure_get_int (structure, "red_mask", &red_mask);
      gst_structure_get_int (structure, "blue_mask", &blue_mask);
      
      mask = red_mask | blue_mask;
      if (mask < 0x1000000)
        {
          priv->format = CLUTTER_GST_RGB24;
          priv->bgr = (red_mask == 0xff0000) ? FALSE : TRUE;
        }
      else
        {
          priv->format = CLUTTER_GST_RGB32;
          priv->bgr = (red_mask == 0xff000000) ? FALSE : TRUE;
        }
    }

  /* find a renderer that can display our format */
  priv->renderer = clutter_gst_find_renderer_by_format (sink, priv->format);
  if (G_UNLIKELY (priv->renderer == NULL))
    {
      GST_ERROR_OBJECT (sink, "could not find a suitable renderer");
      return FALSE;
    }

  GST_INFO_OBJECT (sink, "using the %s renderer", priv->renderer->name);

  return TRUE;
}
Beispiel #8
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
	GstAravis* gst_aravis = GST_ARAVIS(src);
	GstStructure *structure;
	ArvPixelFormat pixel_format;
	int height, width;
	int bpp, depth;
	const GValue *frame_rate;
	const char *caps_string;
	unsigned int i;
	guint32 fourcc;

	GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

	arv_camera_stop_acquisition (gst_aravis->camera);

	if (gst_aravis->stream != NULL)
		g_object_unref (gst_aravis->stream);

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &width);
	gst_structure_get_int (structure, "height", &height);
	frame_rate = gst_structure_get_value (structure, "framerate");
	gst_structure_get_int (structure, "bpp", &bpp);
	gst_structure_get_int (structure, "depth", &depth);

	if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) {
		const char *string;

	       	string = gst_structure_get_string (structure, "format");
		fourcc = GST_STR_FOURCC (string);
	} else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) {
		gst_structure_get_fourcc (structure, "format", &fourcc);
	} else
		fourcc = 0;

	pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

	arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height);
	arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
	arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

	if (frame_rate != NULL) {
		double dbl_frame_rate;

		dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
			(double) gst_value_get_fraction_denominator (frame_rate);

		GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
		arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

		if (dbl_frame_rate > 0.0)
			gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
							     3e6 / dbl_frame_rate);
		else
			gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
	} else
		gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

	GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us);

	GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

	if(gst_aravis->gain_auto) {
		arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous");
	} else {
		if (gst_aravis->gain >= 0) {
			GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain);
			arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera));
	}

	if(gst_aravis->exposure_auto) {
		arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous");
	} else {
		if (gst_aravis->exposure_time_us > 0.0) {
			GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us);
			arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));
	}

	if (gst_aravis->fixed_caps != NULL)
		gst_caps_unref (gst_aravis->fixed_caps);

	caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format);
	if (caps_string != NULL) {
		GstStructure *structure;
		GstCaps *caps;

		caps = gst_caps_new_empty ();
		structure = gst_structure_from_string (caps_string, NULL);
		gst_structure_set (structure,
				   "width", G_TYPE_INT, width,
				   "height", G_TYPE_INT, height,
				   NULL);

		if (frame_rate != NULL)
			gst_structure_set_value (structure, "framerate", frame_rate);

		gst_caps_append_structure (caps, structure);

		gst_aravis->fixed_caps = caps;
	} else
		gst_aravis->fixed_caps = NULL;

	gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
	gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

	if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend)
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL);
	else
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

	for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
		arv_stream_push_buffer (gst_aravis->stream,
					arv_buffer_new (gst_aravis->payload, NULL));

	GST_LOG_OBJECT (gst_aravis, "Start acquisition");
	arv_camera_start_acquisition (gst_aravis->camera);

	gst_aravis->timestamp_offset = 0;
	gst_aravis->last_timestamp = 0;

	return TRUE;
}
Beispiel #9
0
static gboolean  
dmo_audiodec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  DMOAudioDec *dec = (DMOAudioDec *) gst_pad_get_parent (pad);
  DMOAudioDecClass *klass = (DMOAudioDecClass *) G_OBJECT_GET_CLASS (dec);
  GstStructure *s = gst_caps_get_structure (caps, 0);
  gchar *dll;
  gint size;
  WAVEFORMATEX * hdr = NULL;
  const GValue *v;
  GstBuffer *extradata = NULL;
  GstCaps *out;
  gboolean ret = FALSE;
  guint32 fourcc;
  
  GST_DEBUG_OBJECT (dec, "setcaps called with %" GST_PTR_FORMAT, caps);

  Check_FS_Segment ();

  if (dec->ctx) {
    DMO_AudioDecoder_Destroy (dec->ctx);
    dec->ctx = NULL;
  }

  /* read data */
  if (!gst_structure_get_int (s, "bitrate", &dec->bitrate) ||
      !gst_structure_get_int (s, "block_align", &dec->block_align) ||
      !gst_structure_get_int (s, "rate", &dec->rate) ||
      !gst_structure_get_int (s, "channels", &dec->channels) ||
      !gst_structure_get_int (s, "depth", &dec->depth)) {
    goto beach;
  }
  
  if ((v = gst_structure_get_value (s, "codec_data")))
    extradata = gst_value_get_buffer (v);

  if (!gst_structure_get_fourcc (s, "format", &fourcc))
    fourcc = klass->entry->format;

  /* set up dll initialization */
  dll = g_strdup_printf ("%s.dll", klass->entry->dll);
  size = sizeof (WAVEFORMATEX) +
      (extradata ? GST_BUFFER_SIZE (extradata) : 0);
  hdr = g_malloc0 (size);
  if (extradata) { /* Codec data is appended after our header */
    memcpy (((guchar *) hdr) + sizeof (WAVEFORMATEX),
  	        GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
    hdr->cbSize = GST_BUFFER_SIZE (extradata);
  }
  hdr->wFormatTag = fourcc;
  hdr->nChannels = dec->channels;
  hdr->nSamplesPerSec = dec->rate;
  hdr->nAvgBytesPerSec = dec->bitrate / 8;
  hdr->nBlockAlign = dec->block_align;
  hdr->wBitsPerSample = dec->depth;
  GST_DEBUG ("Will now open %s using %d bps %d channels", dll, dec->bitrate,
             dec->channels);
  if (!(dec->ctx = DMO_AudioDecoder_Open (dll, &klass->entry->guid, hdr))) {
    GST_ERROR ("Failed to open DLL %s", dll);
    g_free (dll);
    g_free (hdr);
    goto beach;
  }
  g_free (dll);
  g_free (hdr);
  
  DMO_AudioDecoder_GetOutputInfos (dec->ctx, &dec->out_buffer_size,
                                   &dec->out_align);
  DMO_AudioDecoder_GetInputInfos (dec->ctx, &dec->in_buffer_size,
                                  &dec->in_align, &dec->lookahead);
  
  /* negotiate output */
  out = gst_caps_from_string (klass->entry->srccaps);
  gst_caps_set_simple (out,
      "width", G_TYPE_INT, dec->depth,
      "depth", G_TYPE_INT, dec->depth,
      "rate", G_TYPE_INT, dec->rate,
      "channels", G_TYPE_INT, dec->channels,
      NULL);
  if (dec->channels > 2 && dec->channels <= 11) {
    GstAudioChannelPosition pos[] = {
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
      GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
      GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
      GST_AUDIO_CHANNEL_POSITION_LFE,
      GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
      GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
      GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
      GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
      GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
      GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
      GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT
    };
    gst_audio_set_channel_positions (gst_caps_get_structure (out, 0), pos);
  }
  if (!gst_pad_set_caps (dec->srcpad, out)) {
    gst_caps_unref (out);
    GST_ERROR ("Failed to negotiate output");
    goto beach;
  }
  gst_caps_unref (out);
  
  ret = TRUE;

beach:
  gst_object_unref (dec);
  
  return ret;
}
Beispiel #10
0
/**
 * gst_video_format_parse_caps:
 * @caps: the #GstCaps to parse
 * @format: the #GstVideoFormat of the video represented by @caps (output)
 * @width: the width of the video represented by @caps (output)
 * @height: the height of the video represented by @caps (output)
 *
 * Determines the #GstVideoFormat of @caps and places it in the location
 * pointed to by @format.  Extracts the size of the video and places it
 * in the location pointed to by @width and @height.  If @caps does not
 * represent one of the raw video formats listed in #GstVideoFormat, the
 * function will fail and return FALSE.
 *
 * Since: 0.10.16
 *
 * Returns: TRUE if @caps was parsed correctly.
 */
gboolean
gst_video_format_parse_caps (GstCaps * caps, GstVideoFormat * format,
    int *width, int *height)
{
  GstStructure *structure;
  gboolean ok = TRUE;

  if (!gst_caps_is_fixed (caps))
    return FALSE;

  structure = gst_caps_get_structure (caps, 0);

  if (format) {
    if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
      guint32 fourcc;

      ok &= gst_structure_get_fourcc (structure, "format", &fourcc);

      *format = gst_video_format_from_fourcc (fourcc);
      if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
        ok = FALSE;
      }
    } else if (gst_structure_has_name (structure, "video/x-raw-rgb")) {
      int depth;
      int bpp;
      int endianness;
      int red_mask;
      int green_mask;
      int blue_mask;
      int alpha_mask;
      gboolean have_alpha;

      ok &= gst_structure_get_int (structure, "depth", &depth);
      ok &= gst_structure_get_int (structure, "bpp", &bpp);
      ok &= gst_structure_get_int (structure, "endianness", &endianness);
      ok &= gst_structure_get_int (structure, "red_mask", &red_mask);
      ok &= gst_structure_get_int (structure, "green_mask", &green_mask);
      ok &= gst_structure_get_int (structure, "blue_mask", &blue_mask);
      have_alpha = gst_structure_get_int (structure, "alpha_mask", &alpha_mask);

      if (depth == 24 && bpp == 32 && endianness == G_BIG_ENDIAN) {
        *format = gst_video_format_from_rgb32_masks (red_mask, green_mask,
            blue_mask);
        if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
          ok = FALSE;
        }
      } else if (depth == 32 && bpp == 32 && endianness == G_BIG_ENDIAN &&
          have_alpha) {
        *format = gst_video_format_from_rgba32_masks (red_mask, green_mask,
            blue_mask, alpha_mask);
        if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
          ok = FALSE;
        }
      } else if (depth == 24 && bpp == 24 && endianness == G_BIG_ENDIAN) {
        *format = gst_video_format_from_rgb24_masks (red_mask, green_mask,
            blue_mask);
        if (*format == GST_VIDEO_FORMAT_UNKNOWN) {
          ok = FALSE;
        }
      } else {
        ok = FALSE;
      }
    } else {
      ok = FALSE;
    }
  }

  if (width) {
    ok &= gst_structure_get_int (structure, "width", width);
  }

  if (height) {
    ok &= gst_structure_get_int (structure, "height", height);
  }

  return ok;
}
Beispiel #11
0
static gboolean
gst_goo_encjpeg_setcaps (GstPad* pad, GstCaps* caps)
{
	GstGooEncJpeg* self = GST_GOO_ENCJPEG (gst_pad_get_parent (pad));
	GstGooEncJpegPrivate* priv = GST_GOO_ENCJPEG_GET_PRIVATE (self);

	GstStructure* structure;
	const GValue* framerate;
	guint32 fourcc;
	GstPad* otherpad;
	GstCaps* othercaps;
	gboolean ret;

	otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;
	othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (otherpad));

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &priv->width);
	gst_structure_get_int (structure, "height", &priv->height);
	gst_structure_get_fourcc (structure, "format", &fourcc);

	switch (fourcc)
	{
	case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
		priv->colorformat = OMX_COLOR_FormatCbYCrY;
		break;
	case GST_MAKE_FOURCC ('I', '4', '2', '0'):
		priv->colorformat = OMX_COLOR_FormatYUV420PackedPlanar;
		break;
	default:
		GST_ERROR ("format not supported");
		return FALSE;
	}

	g_object_set (self->component, "width", priv->width, NULL);
	g_object_set (self->component, "height", priv->height, NULL);

	priv->omxbufsiz = (priv->colorformat == OMX_COLOR_FormatCbYCrY) ?
		priv->width * priv->height * 2 :
		priv->width * priv->height * 1.5;

	framerate = gst_structure_get_value (structure, "framerate");

	if (G_LIKELY (framerate))
	{
		gst_caps_set_simple
			(othercaps,
			 "width", G_TYPE_INT, priv->width,
			 "height", G_TYPE_INT, priv->height,
			 "framerate", GST_TYPE_FRACTION,
			 gst_value_get_fraction_numerator (framerate),
			 gst_value_get_fraction_denominator (framerate),
			 NULL);
	}
	else
	{
		gst_caps_set_simple
			(othercaps,
			 "width", G_TYPE_INT, priv->width,
			 "height", G_TYPE_INT, priv->height,
			 NULL);
	}

	ret = gst_pad_set_caps (self->srcpad, othercaps);
	gst_caps_unref (othercaps);

	if (GST_PAD_LINK_SUCCESSFUL (ret) &&
	    goo_component_get_state (self->component) == OMX_StateLoaded)
	{
		omx_start (self);
	}

	gst_object_unref (self);

	return ret;
}
Beispiel #12
0
gint
gst_xvid_structure_to_csp (GstStructure * structure)
{
  const gchar *mime = gst_structure_get_name (structure);
  gint xvid_cs = -1;

  if (!strcmp (mime, "video/x-raw-yuv")) {
    guint32 fourcc;

    gst_structure_get_fourcc (structure, "format", &fourcc);
    switch (fourcc) {
      case GST_MAKE_FOURCC ('I', '4', '2', '0'):
        xvid_cs = XVID_CSP_I420;
        break;
      case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
        xvid_cs = XVID_CSP_YUY2;
        break;
      case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
        xvid_cs = XVID_CSP_YV12;
        break;
      case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
        xvid_cs = XVID_CSP_UYVY;
        break;
      case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
        xvid_cs = XVID_CSP_YVYU;
        break;
    }
  } else {
    gint depth, bpp, r_mask;

    gst_structure_get_int (structure, "depth", &depth);
    gst_structure_get_int (structure, "bpp", &bpp);
    gst_structure_get_int (structure, "red_mask", &r_mask);

    switch (depth) {
      case 15:
        xvid_cs = XVID_CSP_RGB555;
        break;
      case 16:
        xvid_cs = XVID_CSP_RGB565;
        break;
      case 24:
        if (bpp == 24) {
          xvid_cs = XVID_CSP_BGR;
        } else {
          switch (r_mask) {
            case 0xff000000:
              xvid_cs = XVID_CSP_RGBA;
              break;
#ifdef XVID_CSP_ARGB
            case 0x00ff0000:
              xvid_cs = XVID_CSP_ARGB;
              break;
#endif
            case 0x0000ff00:
              xvid_cs = XVID_CSP_BGRA;
              break;
            case 0x000000ff:
              xvid_cs = XVID_CSP_ABGR;
              break;
          }
        }
        break;
      default:
        break;
    }

  }

  return xvid_cs;
}
Beispiel #13
0
static gboolean
sink_setcaps(GstPad *pad, GstCaps *caps)
{
	struct obj *self;
	GstStructure *in_struc;
	const char *name;
	int codec_id;
	const GValue *codec_data;
	GstBuffer *buf;
	AVCodecContext *ctx;

	self = (struct obj *)((GstObject *)pad)->parent;
	ctx = self->av_ctx;

	if (ctx) {
		/* reset */
		get_delayed(self);
		gst_av_codec_close(ctx);
		av_freep(&ctx->extradata);
		av_freep(&self->av_ctx);
		self->initialized = false;
	}

	in_struc = gst_caps_get_structure(caps, 0);

	name = gst_structure_get_name(in_struc);
	if (strcmp(name, "video/x-h263") == 0)
		codec_id = CODEC_ID_H263;
	else if (strcmp(name, "video/x-h264") == 0)
		codec_id = CODEC_ID_H264;
	else if (strcmp(name, "video/mpeg") == 0) {
		int version;
		gst_structure_get_int(in_struc, "mpegversion", &version);
		switch (version) {
		case 4:
			codec_id = CODEC_ID_MPEG4;
			break;
		case 2:
			codec_id = CODEC_ID_MPEG2VIDEO;
			break;
		case 1:
			codec_id = CODEC_ID_MPEG1VIDEO;
			break;
		default:
			codec_id = CODEC_ID_NONE;
			break;
		}
	}
	else if (strcmp(name, "video/x-divx") == 0) {
		int version;
		gst_structure_get_int(in_struc, "divxversion", &version);
		switch (version) {
		case 5:
		case 4:
			codec_id = CODEC_ID_MPEG4;
			break;
		case 3:
			codec_id = CODEC_ID_MSMPEG4V3;
			break;
		default:
			codec_id = CODEC_ID_NONE;
			break;
		}
	}
	else if (strcmp(name, "video/x-xvid") == 0)
		codec_id = CODEC_ID_MPEG4;
	else if (strcmp(name, "video/x-3ivx") == 0)
		codec_id = CODEC_ID_MPEG4;
	else if (strcmp(name, "video/x-vp8") == 0)
		codec_id = CODEC_ID_VP8;
	else if (strcmp(name, "video/x-theora") == 0)
		codec_id = CODEC_ID_THEORA;
	else if (strcmp(name, "video/x-wmv") == 0) {
		int version;
		gst_structure_get_int(in_struc, "wmvversion", &version);
		switch (version) {
		case 3: {
			guint32 fourcc;
			codec_id = CODEC_ID_WMV3;
			if (gst_structure_get_fourcc(in_struc, "fourcc", &fourcc) ||
					gst_structure_get_fourcc(in_struc, "format", &fourcc))
			{
				if (fourcc == GST_MAKE_FOURCC('W', 'V', 'C', '1'))
					codec_id = CODEC_ID_VC1;
			}
			break;
		}
		case 2:
			codec_id = CODEC_ID_WMV2;
			break;
		case 1:
			codec_id = CODEC_ID_WMV1;
			break;
		default:
			codec_id = CODEC_ID_NONE;
			break;
		}

	}
	else
		codec_id = CODEC_ID_NONE;

	self->codec = avcodec_find_decoder(codec_id);
	if (!self->codec)
		return false;

	switch (codec_id) {
	case CODEC_ID_H263:
		self->parse_func = gst_av_h263_parse;
		break;
	case CODEC_ID_H264:
		self->parse_func = gst_av_h264_parse;
		break;
	case CODEC_ID_MPEG4:
		self->parse_func = gst_av_mpeg4_parse;
		break;
	}

	self->av_ctx = ctx = avcodec_alloc_context3(self->codec);

	ctx->get_buffer = get_buffer;
	ctx->release_buffer = release_buffer;
	ctx->reget_buffer = reget_buffer;
	ctx->opaque = self;
	ctx->flags |= CODEC_FLAG_EMU_EDGE;

	gst_structure_get_int(in_struc, "width", &ctx->width);
	gst_structure_get_int(in_struc, "height", &ctx->height);

	gst_structure_get_fraction(in_struc, "pixel-aspect-ratio",
			&ctx->sample_aspect_ratio.num, &ctx->sample_aspect_ratio.den);

	gst_structure_get_fraction(in_struc, "framerate",
			&ctx->time_base.den, &ctx->time_base.num);

	/* bug in xvimagesink? */
	if (!ctx->time_base.num)
		ctx->time_base = (AVRational){ 1, 0 };

	if (codec_id == CODEC_ID_THEORA) {
		get_theora_extradata(ctx, in_struc);
		goto next;
	}

	codec_data = gst_structure_get_value(in_struc, "codec_data");
	if (!codec_data)
		goto next;
	buf = gst_value_get_buffer(codec_data);
	if (!buf)
		goto next;
	ctx->extradata = av_malloc(buf->size + FF_INPUT_BUFFER_PADDING_SIZE);
	memcpy(ctx->extradata, buf->data, buf->size);
	memset(ctx->extradata + buf->size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
	ctx->extradata_size = buf->size;

	if (self->parse_func)
		self->parse_func(self, buf);

next:
	return true;
}
static void gst_tiswhitebalance_fixate_caps (GstBaseTransform* base,
                                             GstPadDirection direction,
                                             GstCaps* incoming,
                                             GstCaps* outgoing)
{
    GstTisWhiteBalance* self = GST_TISWHITEBALANCE(base);

    GstStructure* ins;
    GstStructure* outs;
    gint width, height;
    g_return_if_fail (gst_caps_is_fixed (incoming));

    GST_DEBUG_OBJECT (base, "trying to fixate outgoing caps %" GST_PTR_FORMAT
                      " based on caps %" GST_PTR_FORMAT, outgoing, incoming);

    ins = gst_caps_get_structure (incoming, 0);
    outs = gst_caps_get_structure (outgoing, 0);

    if (gst_structure_get_int (ins, "width", &width))
    {
        if (gst_structure_has_field (outs, "width"))
        {
            gst_structure_fixate_field_nearest_int (outs, "width", width);
        }
        self->width = width;
    }

    if (gst_structure_get_int (ins, "height", &height))
    {
        if (gst_structure_has_field (outs, "height"))
        {
            gst_structure_fixate_field_nearest_int (outs, "height", height);
        }
        self->height = height;
    }

    const char* p = gst_structure_get_name (ins);
    guint fourcc;
    if (g_strcmp0(p, "video/x-raw-bayer") == 0)
    {
        if (gst_structure_get_field_type (ins, "format") == G_TYPE_STRING)
        {
            const char *string;
            string = gst_structure_get_string (ins, "format");
            fourcc = GST_STR_FOURCC (string);
        }
        else if (gst_structure_get_field_type (ins, "format") == GST_TYPE_FOURCC)
        {
            gst_structure_get_fourcc (ins, "format", &fourcc);
        }
        else
            fourcc = 0;

        if (fourcc == 0)
        {
            gst_debug_log (gst_tiswhitebalance_debug_category,
                           GST_LEVEL_ERROR,
                           "gst_tiswhitebalance",
                           "gst_tiswhitebalance_fixate_caps",
                           0,
                           NULL,
                           "Unable to determine video format.");
            return;
        }

        if (fourcc == MAKE_FOURCC ('g','r','b','g'))
        {
            self->pattern = GR;
        }
        else if (fourcc == MAKE_FOURCC ('r', 'g', 'g', 'b'))
        {
            self->pattern = RG;
        }
        else if (fourcc == MAKE_FOURCC ('g', 'b', 'r', 'g'))
        {
            self->pattern = GB;
        }
        else if (fourcc == MAKE_FOURCC ('b', 'g', 'g', 'r'))
        {
            self->pattern = BG;
        }
        else
        {
            gst_debug_log (gst_tiswhitebalance_debug_category,
                           GST_LEVEL_ERROR,
                           "gst_tiswhitebalance",
                           "gst_tiswhitebalance_fixate_caps",
                           0,
                           NULL,
                           "Unable to determine bayer pattern.");
            return;
        }

        gst_debug_log (gst_tiswhitebalance_debug_category,
                       GST_LEVEL_INFO,
                       "gst_tiswhitebalance",
                       "gst_tiswhitebalance_fixate_caps",
                       0,
                       NULL,
                       "Using bayer format %s for whitebalancing.", bayer_to_string(self->pattern));

    }
    else
    {
        gst_debug_log (gst_tiswhitebalance_debug_category,
                       GST_LEVEL_INFO,
                       "gst_tiswhitebalance",
                       "gst_tiswhitebalance_fixate_caps",
                       0,
                       NULL,
                       "Not a bayer format. White balance will be disabled.");
    }

}
static gboolean
gst_y4m_encode_setcaps (GstPad * pad, GstCaps * vscaps)
{
  GstY4mEncode *filter;
  GstStructure *structure;
  gboolean res;
  gint w, h;
  guint32 fourcc;
  const GValue *fps, *par, *interlaced;

  filter = GST_Y4M_ENCODE (GST_PAD_PARENT (pad));

  structure = gst_caps_get_structure (vscaps, 0);

  res = gst_structure_get_int (structure, "width", &w);
  res &= gst_structure_get_int (structure, "height", &h);
  res &= ((fps = gst_structure_get_value (structure, "framerate")) != NULL);
  res &= gst_structure_get_fourcc (structure, "format", &fourcc);

  switch (fourcc) {             /* Translate fourcc to Y4M colorspace code */
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
      filter->colorspace = "420";
      break;
    case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
      filter->colorspace = "422";
      break;
    case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
      filter->colorspace = "411";
      break;
    case GST_MAKE_FOURCC ('Y', '4', '4', '4'):
      filter->colorspace = "444";
      break;
    default:
      res = FALSE;
      break;
  }

  if (!res || w <= 0 || h <= 0 || !GST_VALUE_HOLDS_FRACTION (fps))
    return FALSE;

  /* optional interlaced info */
  interlaced = gst_structure_get_value (structure, "interlaced");

  /* optional par info */
  par = gst_structure_get_value (structure, "pixel-aspect-ratio");

  filter->width = w;
  filter->height = h;
  filter->fps_num = gst_value_get_fraction_numerator (fps);
  filter->fps_den = gst_value_get_fraction_denominator (fps);
  if ((par != NULL) && GST_VALUE_HOLDS_FRACTION (par)) {
    filter->par_num = gst_value_get_fraction_numerator (par);
    filter->par_den = gst_value_get_fraction_denominator (par);
  } else {                      /* indicates unknown */
    filter->par_num = 0;
    filter->par_den = 0;
  }
  if ((interlaced != NULL) && G_VALUE_HOLDS (interlaced, G_TYPE_BOOLEAN)) {
    filter->interlaced = g_value_get_boolean (interlaced);
  } else {
    /* assume progressive if no interlaced property in caps */
    filter->interlaced = FALSE;
  }
  /* the template caps will do for the src pad, should always accept */
  return gst_pad_set_caps (filter->srcpad,
      gst_static_pad_template_get_caps (&y4mencode_src_factory));
}
/* returns static descriptions and dynamic ones (such as video/x-raw-yuv),
 * or NULL if caps aren't known at all */
static gchar *
format_info_get_desc (const FormatInfo * info, const GstCaps * caps)
{
  const GstStructure *s;

  g_assert (info != NULL);

  if (info->desc != NULL)
    return g_strdup (_(info->desc));

  s = gst_caps_get_structure (caps, 0);

  if (strcmp (info->type, "video/x-raw-yuv") == 0) {
    const gchar *ret = NULL;
    guint32 fourcc = 0;

    gst_structure_get_fourcc (s, "format", &fourcc);
    switch (fourcc) {
      case GST_MAKE_FOURCC ('I', '4', '2', '0'):
        ret = _("Uncompressed planar YUV 4:2:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
        ret = _("Uncompressed planar YVU 4:2:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
        ret = _("Uncompressed packed YUV 4:2:2");
        break;
      case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
        ret = _("Uncompressed packed YUV 4:1:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'V', 'U', '9'):
        ret = _("Uncompressed packed YVU 4:1:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
      case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
        ret = _("Uncompressed packed YUV 4:2:2");
        break;
      case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
        ret = _("Uncompressed packed YUV 4:1:1");
        break;
      case GST_MAKE_FOURCC ('I', 'Y', 'U', '2'):
        ret = _("Uncompressed packed YUV 4:4:4");
        break;
      case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
        ret = _("Uncompressed planar YUV 4:2:2");
        break;
      case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
        ret = _("Uncompressed planar YUV 4:1:1");
        break;
      case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
        ret = _("Uncompressed black and white Y-plane");
        break;
      default:
        ret = _("Uncompressed YUV");
        break;
    }
    return g_strdup (ret);
  } else if (strcmp (info->type, "video/x-raw-rgb") == 0) {
    const gchar *rgb_str;
    gint depth = 0;

    gst_structure_get_int (s, "depth", &depth);
    rgb_str = gst_structure_has_field (s, "alpha_mask") ? "RGBA" : "RGB";
    if (gst_structure_has_field (s, "paletted_data")) {
      return g_strdup_printf (_("Uncompressed palettized %d-bit %s"), depth,
          rgb_str);
    } else {
      return g_strdup_printf ("Uncompressed %d-bit %s", depth, rgb_str);
    }
  } else if (strcmp (info->type, "video/x-h263") == 0) {
    const gchar *variant, *ret;

    variant = gst_structure_get_string (s, "variant");
    if (variant == NULL)
      ret = "H.263";
    else if (strcmp (variant, "itu") == 0)
      ret = "ITU H.26n";        /* why not ITU H.263? (tpm) */
    else if (strcmp (variant, "lead") == 0)
      ret = "Lead H.263";
    else if (strcmp (variant, "microsoft") == 0)
      ret = "Microsoft H.263";
    else if (strcmp (variant, "vdolive") == 0)
      ret = "VDOLive";
    else if (strcmp (variant, "vivo") == 0)
      ret = "Vivo H.263";
    else if (strcmp (variant, "xirlink") == 0)
      ret = "Xirlink H.263";
    else {
      GST_WARNING ("Unknown H263 variant '%s'", variant);
      ret = "H.263";
    }
    return g_strdup (ret);
  } else if (strcmp (info->type, "video/x-h264") == 0) {
    const gchar *variant, *ret;

    variant = gst_structure_get_string (s, "variant");
    if (variant == NULL)
      ret = "H.264";
    else if (strcmp (variant, "itu") == 0)
      ret = "ITU H.264";
    else if (strcmp (variant, "videosoft") == 0)
      ret = "Videosoft H.264";
    else if (strcmp (variant, "lead") == 0)
      ret = "Lead H.264";
    else {
      GST_WARNING ("Unknown H264 variant '%s'", variant);
      ret = "H.264";
    }
    return g_strdup (ret);
  } else if (strcmp (info->type, "video/x-divx") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "divxversion", &ver) || ver <= 2) {
      GST_WARNING ("Unexpected DivX version in %" GST_PTR_FORMAT, caps);
      return g_strdup ("DivX MPEG-4");
    }
    return g_strdup_printf (_("DivX MPEG-4 Version %d"), ver);
  } else if (strcmp (info->type, "video/x-msmpeg") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "msmpegversion", &ver) ||
        ver < 40 || ver > 49) {
      GST_WARNING ("Unexpected msmpegversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Microsoft MPEG-4 4.x");
    }
    return g_strdup_printf ("Microsoft MPEG-4 4.%d", ver % 10);
  } else if (strcmp (info->type, "video/x-truemotion") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "trueversion", &ver);
    switch (ver) {
      case 1:
        return g_strdup_printf ("Duck TrueMotion 1");
      case 2:
        return g_strdup_printf ("TrueMotion 2.0");
      default:
        GST_WARNING ("Unexpected trueversion in %" GST_PTR_FORMAT, caps);
        break;
    }
    return g_strdup_printf ("TrueMotion");
  } else if (strcmp (info->type, "video/x-xan") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "wcversion", &ver) || ver < 1) {
      GST_WARNING ("Unexpected wcversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Xan Wing Commander");
    }
    return g_strdup_printf ("Xan Wing Commander %u", ver);
  } else if (strcmp (info->type, "video/x-indeo") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "indeoversion", &ver) || ver < 2) {
      GST_WARNING ("Unexpected indeoversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Intel Indeo");
    }
    return g_strdup_printf ("Intel Indeo %u", ver);
  } else if (strcmp (info->type, "audio/x-wma") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "wmaversion", &ver);
    switch (ver) {
      case 1:
      case 2:
      case 3:
        return g_strdup_printf ("Windows Media Audio %d", ver + 6);
      default:
        break;
    }
    GST_WARNING ("Unexpected wmaversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("Windows Media Audio");
  } else if (strcmp (info->type, "video/x-wmv") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "wmvversion", &ver);
    switch (ver) {
      case 1:
      case 2:
      case 3:
        return g_strdup_printf ("Windows Media Video %d", ver + 6);
      default:
        break;
    }
    GST_WARNING ("Unexpected wmvversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("Windows Media Video");
  } else if (strcmp (info->type, "audio/x-mace") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "maceversion", &ver);
    if (ver == 3 || ver == 6) {
      return g_strdup_printf ("MACE-%d", ver);
    } else {
      GST_WARNING ("Unexpected maceversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("MACE");
    }
  } else if (strcmp (info->type, "video/x-svq") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "svqversion", &ver);
    if (ver == 1 || ver == 3) {
      return g_strdup_printf ("Sorensen Video %d", ver);
    } else {
      GST_WARNING ("Unexpected svqversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Sorensen Video");
    }
  } else if (strcmp (info->type, "video/x-asus") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "asusversion", &ver);
    if (ver == 1 || ver == 2) {
      return g_strdup_printf ("Asus Video %d", ver);
    } else {
      GST_WARNING ("Unexpected asusversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Asus Video");
    }
  } else if (strcmp (info->type, "video/x-ati-vcr") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "vcrversion", &ver);
    if (ver == 1 || ver == 2) {
      return g_strdup_printf ("ATI VCR %d", ver);
    } else {
      GST_WARNING ("Unexpected acrversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("ATI VCR");
    }
  } else if (strcmp (info->type, "audio/x-adpcm") == 0) {
    const GValue *layout_val;

    layout_val = gst_structure_get_value (s, "layout");
    if (layout_val != NULL && G_VALUE_HOLDS_STRING (layout_val)) {
      const gchar *layout;

      if ((layout = g_value_get_string (layout_val))) {
        gchar *layout_upper, *ret;

        if (strcmp (layout, "swf") == 0)
          return g_strdup ("Shockwave ADPCM");
        if (strcmp (layout, "microsoft") == 0)
          return g_strdup ("Microsoft ADPCM");
        if (strcmp (layout, "quicktime") == 0)
          return g_strdup ("Quicktime ADPCM");
        if (strcmp (layout, "westwood") == 0)
          return g_strdup ("Westwood ADPCM");
        if (strcmp (layout, "yamaha") == 0)
          return g_strdup ("Yamaha ADPCM");
        /* FIXME: other layouts: sbpro2, sbpro3, sbpro4, ct, g726, ea,
         * adx, xa, 4xm, smjpeg, dk4, dk3, dvi */
        layout_upper = g_ascii_strup (layout, -1);
        ret = g_strdup_printf ("%s ADPCM", layout_upper);
        g_free (layout_upper);
        return ret;
      }
    }
    return g_strdup ("ADPCM");
  } else if (strcmp (info->type, "audio/mpeg") == 0) {
    gint ver = 0, layer = 0;

    gst_structure_get_int (s, "mpegversion", &ver);

    switch (ver) {
      case 1:
        gst_structure_get_int (s, "layer", &layer);
        switch (layer) {
          case 1:
          case 2:
          case 3:
            return g_strdup_printf ("MPEG-1 Layer %d (MP%d)", layer, layer);
          default:
            break;
        }
        GST_WARNING ("Unexpected MPEG-1 layer in %" GST_PTR_FORMAT, caps);
        return g_strdup ("MPEG-1 Audio");
      case 4:
        return g_strdup ("MPEG-4 AAC");
      default:
        break;
    }
    GST_WARNING ("Unexpected audio mpegversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("MPEG Audio");
  } else if (strcmp (info->type, "audio/x-pn-realaudio") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "raversion", &ver);
    switch (ver) {
      case 1:
        return g_strdup ("RealAudio 14k4bps");
      case 2:
        return g_strdup ("RealAudio 28k8bps");
      case 8:
        return g_strdup ("RealAudio G2 (Cook)");
      default:
        break;
    }
    GST_WARNING ("Unexpected raversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("RealAudio");
  } else if (strcmp (info->type, "video/x-pn-realvideo") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "rmversion", &ver);
    switch (ver) {
      case 1:
        return g_strdup ("RealVideo 1.0");
      case 2:
        return g_strdup ("RealVideo 2.0");
      case 3:
        return g_strdup ("RealVideo 3.0");
      case 4:
        return g_strdup ("RealVideo 4.0");
      default:
        break;
    }
    GST_WARNING ("Unexpected rmversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("RealVideo");
  } else if (strcmp (info->type, "video/mpeg") == 0) {
    gboolean sysstream;
    gint ver = 0;

    if (!gst_structure_get_boolean (s, "systemstream", &sysstream) ||
        !gst_structure_get_int (s, "mpegversion", &ver) || ver < 1 || ver > 4) {
      GST_WARNING ("Missing fields in mpeg video caps %" GST_PTR_FORMAT, caps);
    } else {
      if (sysstream) {
        return g_strdup_printf ("MPEG-%d System Stream", ver);
      } else {
        return g_strdup_printf ("MPEG-%d Video", ver);
      }
    }
    return g_strdup ("MPEG Video");
  } else if (strcmp (info->type, "audio/x-raw-int") == 0) {
    gint bitdepth = 0;

    /* 8-bit pcm might not have depth field (?) */
    if (!gst_structure_get_int (s, "depth", &bitdepth))
      gst_structure_get_int (s, "width", &bitdepth);
    if (bitdepth != 0)
      return g_strdup_printf (_("Raw %d-bit PCM audio"), bitdepth);
    else
      return g_strdup (_("Raw PCM audio"));
  } else if (strcmp (info->type, "audio/x-raw-float") == 0) {
    gint bitdepth = 0;

    gst_structure_get_int (s, "width", &bitdepth);
    if (bitdepth != 0)
      return g_strdup_printf (_("Raw %d-bit floating-point audio"), bitdepth);
    else
      return g_strdup (_("Raw floating-point audio"));
  }

  return NULL;
}
Beispiel #17
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
    GstAravis* gst_aravis = GST_ARAVIS(src);
    GstStructure *structure;
    ArvPixelFormat pixel_format;
    int height, width;
    int bpp, depth;
    const GValue *frame_rate;
    const char *caps_string;
    unsigned int i;
    guint32 fourcc;

    GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

    arv_camera_stop_acquisition (gst_aravis->camera);

    if (gst_aravis->stream != NULL)
        g_object_unref (gst_aravis->stream);

    structure = gst_caps_get_structure (caps, 0);

    gst_structure_get_int (structure, "width", &width);
    gst_structure_get_int (structure, "height", &height);
    frame_rate = gst_structure_get_value (structure, "framerate");
    gst_structure_get_fourcc (structure, "format", &fourcc);
    gst_structure_get_int (structure, "bpp", &bpp);
    gst_structure_get_int (structure, "depth", &depth);

    pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

    arv_camera_set_region (gst_aravis->camera, 0, 0, width, height);
    arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
    arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

    if (frame_rate != NULL) {
        double dbl_frame_rate;

        dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
                         (double) gst_value_get_fraction_denominator (frame_rate);

        GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
        arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

        if (dbl_frame_rate > 0.0)
            gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
                                                 3e6 / dbl_frame_rate);
        else
            gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
    } else
        gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

    GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us);

    GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Gain       = %d", gst_aravis->gain);
    arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
    GST_DEBUG_OBJECT (gst_aravis, "Actual gain       = %d", arv_camera_get_gain (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Exposure   = %g µs", gst_aravis->exposure_time_us);
    arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
    GST_DEBUG_OBJECT (gst_aravis, "Actual exposure   = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));

    if (gst_aravis->fixed_caps != NULL)
        gst_caps_unref (gst_aravis->fixed_caps);

    caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
    if (caps_string != NULL) {
        GstStructure *structure;
        GstCaps *caps;

        caps = gst_caps_new_empty ();
        structure = gst_structure_from_string (caps_string, NULL);
        gst_structure_set (structure,
                           "width", G_TYPE_INT, width,
                           "height", G_TYPE_INT, height,
                           NULL);

        if (frame_rate != NULL)
            gst_structure_set_value (structure, "framerate", frame_rate);

        gst_caps_append_structure (caps, structure);

        gst_aravis->fixed_caps = caps;
    } else
        gst_aravis->fixed_caps = NULL;

    gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
    gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

    for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
        arv_stream_push_buffer (gst_aravis->stream,
                                arv_buffer_new (gst_aravis->payload, NULL));

    GST_LOG_OBJECT (gst_aravis, "Start acquisition");
    arv_camera_start_acquisition (gst_aravis->camera);

    gst_aravis->timestamp_offset = 0;
    gst_aravis->last_timestamp = 0;

    return TRUE;
}
static gboolean
gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
{
    GstRtpVRawPay *rtpvrawpay;
    GstStructure *s;
    gboolean res;
    const gchar *name;
    gint width, height;
    gint yp, up, vp;
    gint pgroup, ystride, uvstride = 0, xinc, yinc;
    GstVideoFormat sampling;
    const gchar *depthstr, *samplingstr, *colorimetrystr;
    gchar *wstr, *hstr;
    gboolean interlaced;
    const gchar *color_matrix;
    gint depth;

    rtpvrawpay = GST_RTP_VRAW_PAY (payload);

    s = gst_caps_get_structure (caps, 0);

    /* start parsing the format */
    name = gst_structure_get_name (s);

    /* these values are the only thing we can do */
    depthstr = "8";

    /* parse common width/height */
    res = gst_structure_get_int (s, "width", &width);
    res &= gst_structure_get_int (s, "height", &height);
    if (!res)
        goto missing_dimension;

    if (!gst_structure_get_boolean (s, "interlaced", &interlaced))
        interlaced = FALSE;

    color_matrix = gst_structure_get_string (s, "color-matrix");
    colorimetrystr = "SMPTE240M";
    if (color_matrix) {
        if (g_str_equal (color_matrix, "sdtv")) {
            /* BT.601 implies a bit more than just color-matrix */
            colorimetrystr = "BT601-5";
        } else if (g_str_equal (color_matrix, "hdtv")) {
            colorimetrystr = "BT709-2";
        }
    }

    yp = up = vp = 0;
    xinc = yinc = 1;

    if (!strcmp (name, "video/x-raw-rgb")) {
        gint amask, rmask;
        gboolean has_alpha;

        has_alpha = gst_structure_get_int (s, "alpha_mask", &amask);
        depth = 8;

        if (!gst_structure_get_int (s, "red_mask", &rmask))
            goto unknown_mask;

        if (has_alpha) {
            pgroup = 4;
            ystride = width * 4;
            if (rmask == 0xFF000000) {
                sampling = GST_VIDEO_FORMAT_RGBA;
                samplingstr = "RGBA";
            } else {
                sampling = GST_VIDEO_FORMAT_BGRA;
                samplingstr = "BGRA";
            }
        } else {
            pgroup = 3;
            ystride = GST_ROUND_UP_4 (width * 3);
            if (rmask == 0x00FF0000) {
                sampling = GST_VIDEO_FORMAT_RGB;
                samplingstr = "RGB";
            } else {
                sampling = GST_VIDEO_FORMAT_BGR;
                samplingstr = "BGR";
            }
        }
    } else if (!strcmp (name, "video/x-raw-yuv")) {
        guint32 fourcc;

        if (!gst_structure_get_fourcc (s, "format", &fourcc))
            goto unknown_fourcc;

        GST_LOG_OBJECT (payload, "have fourcc %" GST_FOURCC_FORMAT,
                        GST_FOURCC_ARGS (fourcc));

        switch (fourcc) {
        case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
            sampling = GST_VIDEO_FORMAT_AYUV;
            samplingstr = "YCbCr-4:4:4";
            pgroup = 3;
            ystride = width * 4;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
            sampling = GST_VIDEO_FORMAT_UYVY;
            samplingstr = "YCbCr-4:2:2";
            pgroup = 4;
            xinc = 2;
            ystride = GST_ROUND_UP_2 (width) * 2;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
            sampling = GST_VIDEO_FORMAT_Y41B;
            samplingstr = "YCbCr-4:1:1";
            pgroup = 6;
            xinc = 4;
            ystride = GST_ROUND_UP_4 (width);
            uvstride = GST_ROUND_UP_8 (width) / 4;
            up = ystride * height;
            vp = up + uvstride * height;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('I', '4', '2', '0'):
            sampling = GST_VIDEO_FORMAT_I420;
            samplingstr = "YCbCr-4:2:0";
            pgroup = 6;
            xinc = yinc = 2;
            ystride = GST_ROUND_UP_4 (width);
            uvstride = GST_ROUND_UP_8 (width) / 2;
            up = ystride * GST_ROUND_UP_2 (height);
            vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'):
#define GST_VIDEO_FORMAT_UYVP GST_VIDEO_FORMAT_UYVY     /* FIXME */
            sampling = GST_VIDEO_FORMAT_UYVP;
            samplingstr = "YCbCr-4:2:2";
            pgroup = 4;
            xinc = 2;
            ystride = GST_ROUND_UP_2 (width) * 2;
            depth = 10;
            break;
        default:
            goto unknown_fourcc;
        }
    } else
        goto unknown_format;

    if (interlaced) {
        yinc *= 2;
    }
    if (depth == 10) {
        depthstr = "10";
    }

    rtpvrawpay->width = width;
    rtpvrawpay->height = height;
    rtpvrawpay->sampling = sampling;
    rtpvrawpay->pgroup = pgroup;
    rtpvrawpay->xinc = xinc;
    rtpvrawpay->yinc = yinc;
    rtpvrawpay->yp = yp;
    rtpvrawpay->up = up;
    rtpvrawpay->vp = vp;
    rtpvrawpay->ystride = ystride;
    rtpvrawpay->uvstride = uvstride;
    rtpvrawpay->interlaced = interlaced;
    rtpvrawpay->depth = depth;

    GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %d", width, height,
                      sampling);
    GST_DEBUG_OBJECT (payload, "yp %d, up %d, vp %d", yp, up, vp);
    GST_DEBUG_OBJECT (payload, "pgroup %d, ystride %d, uvstride %d", pgroup,
                      ystride, uvstride);

    wstr = g_strdup_printf ("%d", rtpvrawpay->width);
    hstr = g_strdup_printf ("%d", rtpvrawpay->height);

    gst_basertppayload_set_options (payload, "video", TRUE, "RAW", 90000);
    if (interlaced) {
        res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
                                              samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
                                              wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
                                              colorimetrystr, "interlace", G_TYPE_STRING, "true", NULL);
    } else {
        res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
                                              samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
                                              wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
                                              colorimetrystr, NULL);
    }
    g_free (wstr);
    g_free (hstr);

    return res;

    /* ERRORS */
unknown_mask:
    {
        GST_ERROR_OBJECT (payload, "unknown red mask specified");
        return FALSE;
    }
unknown_format:
    {
        GST_ERROR_OBJECT (payload, "unknown caps format");
        return FALSE;
    }
unknown_fourcc:
    {
        GST_ERROR_OBJECT (payload, "invalid or missing fourcc");
        return FALSE;
    }
missing_dimension:
    {
        GST_ERROR_OBJECT (payload, "missing width or height property");
        return FALSE;
    }
}
Beispiel #19
0
static gboolean
theora_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  GstTheoraEnc *enc = GST_THEORA_ENC (gst_pad_get_parent (pad));
  guint32 fourcc;
  const GValue *par;
  gint fps_n, fps_d;

  gst_structure_get_fourcc (structure, "format", &fourcc);
  gst_structure_get_int (structure, "width", &enc->width);
  gst_structure_get_int (structure, "height", &enc->height);
  gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d);
  par = gst_structure_get_value (structure, "pixel-aspect-ratio");

  th_info_clear (&enc->info);
  th_info_init (&enc->info);
  /* Theora has a divisible-by-sixteen restriction for the encoded video size but
   * we can define a picture area using pic_width/pic_height */
  enc->info.frame_width = GST_ROUND_UP_16 (enc->width);
  enc->info.frame_height = GST_ROUND_UP_16 (enc->height);
  enc->info.pic_width = enc->width;
  enc->info.pic_height = enc->height;
  switch (fourcc) {
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
      enc->info.pixel_fmt = TH_PF_420;
      break;
    case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
      enc->info.pixel_fmt = TH_PF_422;
      break;
    case GST_MAKE_FOURCC ('Y', '4', '4', '4'):
      enc->info.pixel_fmt = TH_PF_444;
      break;
    default:
      g_assert_not_reached ();
  }

  enc->info.fps_numerator = enc->fps_n = fps_n;
  enc->info.fps_denominator = enc->fps_d = fps_d;
  if (par) {
    enc->info.aspect_numerator = gst_value_get_fraction_numerator (par);
    enc->info.aspect_denominator = gst_value_get_fraction_denominator (par);
  } else {
    /* setting them to 0 indicates that the decoder can chose a good aspect
     * ratio, defaulting to 1/1 */
    enc->info.aspect_numerator = 0;
    enc->info.aspect_denominator = 0;
  }

  enc->info.colorspace = TH_CS_UNSPECIFIED;

  /* as done in theora */
  enc->info.keyframe_granule_shift = _ilog (enc->keyframe_force - 1);
  GST_DEBUG_OBJECT (enc,
      "keyframe_frequency_force is %d, granule shift is %d",
      enc->keyframe_force, enc->info.keyframe_granule_shift);

  theora_enc_reset (enc);
  enc->initialised = TRUE;

  gst_object_unref (enc);

  return TRUE;
}
Beispiel #20
0
static gboolean
gst_jasper_dec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstJasperDec *dec;
  const GValue *framerate;
  GstStructure *s;
  const gchar *mimetype;
  guint32 fourcc;

  dec = GST_JASPER_DEC (GST_PAD_PARENT (pad));
  s = gst_caps_get_structure (caps, 0);
  mimetype = gst_structure_get_name (s);

  /* reset negotiation */
  dec->fmt = -1;
  dec->strip = 0;
  dec->format = GST_VIDEO_FORMAT_UNKNOWN;
  if (dec->codec_data) {
    gst_buffer_unref (dec->codec_data);
    dec->codec_data = NULL;
  }

  if (!strcmp (mimetype, "image/x-j2c") || !strcmp (mimetype, "image/x-jpc")) {
    const GValue *codec_data;
    gint fields;

    /* we only handle single field, packetized input */
    if (gst_structure_get_value (s, "framerate") == NULL)
      goto refuse_caps;
    if (gst_structure_get_int (s, "fields", &fields) && fields != 1)
      goto refuse_caps;

    if (!gst_structure_get_fourcc (s, "fourcc", &fourcc))
      goto refuse_caps;
    switch (fourcc) {
      case GST_MAKE_FOURCC ('s', 'R', 'G', 'B'):
        dec->clrspc = JAS_CLRSPC_SRGB;
        break;
      case GST_MAKE_FOURCC ('s', 'Y', 'U', 'V'):
        dec->clrspc = JAS_CLRSPC_SYCBCR;
        break;
      default:
        goto refuse_caps;
        break;
    }

    dec->fmt = jas_image_strtofmt ((char *) "jpc");
    /* strip the j2c box stuff it is embedded in */
    if (!strcmp (mimetype, "image/x-jpc"))
      dec->strip = 0;
    else
      dec->strip = 8;

    codec_data = gst_structure_get_value (s, "codec_data");
    if (codec_data) {
      dec->codec_data = gst_value_get_buffer (codec_data);
      gst_buffer_ref (dec->codec_data);
    }
  } else if (!strcmp (mimetype, "image/jp2"))
    dec->fmt = jas_image_strtofmt ((char *) "jp2");

  if (dec->fmt < 0)
    goto refuse_caps;

  if ((framerate = gst_structure_get_value (s, "framerate")) != NULL) {
    dec->framerate_numerator = gst_value_get_fraction_numerator (framerate);
    dec->framerate_denominator = gst_value_get_fraction_denominator (framerate);
    GST_DEBUG_OBJECT (dec, "got framerate of %d/%d fps => packetized mode",
        dec->framerate_numerator, dec->framerate_denominator);
  } else {
    dec->framerate_numerator = 0;
    dec->framerate_denominator = 1;
    GST_DEBUG_OBJECT (dec, "no framerate, assuming single image");
  }

  return TRUE;

refuse_caps:
  {
    GST_WARNING_OBJECT (dec, "refused caps %" GST_PTR_FORMAT, caps);
    return FALSE;
  }
}
static gboolean
gst_deinterlace2_setcaps (GstPad * pad, GstCaps * caps)
{
  gboolean res = TRUE;
  GstDeinterlace2 *self = GST_DEINTERLACE2 (gst_pad_get_parent (pad));
  GstPad *otherpad;
  GstStructure *structure;
  GstVideoFormat fmt;
  guint32 fourcc;
  GstCaps *othercaps;

  otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;

  structure = gst_caps_get_structure (caps, 0);

  res = gst_structure_get_int (structure, "width", &self->frame_width);
  res &= gst_structure_get_int (structure, "height", &self->frame_height);
  res &=
      gst_structure_get_fraction (structure, "framerate", &self->frame_rate_n,
      &self->frame_rate_d);
  res &= gst_structure_get_fourcc (structure, "format", &fourcc);
  /* TODO: get interlaced, field_layout, field_order */
  if (!res)
    goto invalid_caps;

  if (self->fields == GST_DEINTERLACE2_ALL) {
    gint fps_n = self->frame_rate_n, fps_d = self->frame_rate_d;

    if (!gst_fraction_double (&fps_n, &fps_d, otherpad != self->srcpad))
      goto invalid_caps;

    othercaps = gst_caps_copy (caps);

    gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION, fps_n,
        fps_d, NULL);
  } else {
    othercaps = gst_caps_ref (caps);
  }

  if (!gst_pad_set_caps (otherpad, othercaps))
    goto caps_not_accepted;
  gst_caps_unref (othercaps);

  /* TODO: introduce self->field_stride */
  self->field_height = self->frame_height / 2;

  fmt = gst_video_format_from_fourcc (fourcc);

  /* TODO: only true if fields are subbuffers of interlaced frames,
     change when the buffer-fields concept has landed */
  self->field_stride =
      gst_video_format_get_row_stride (fmt, 0, self->frame_width) * 2;
  self->output_stride =
      gst_video_format_get_row_stride (fmt, 0, self->frame_width);

  /* in bytes */
  self->line_length =
      gst_video_format_get_row_stride (fmt, 0, self->frame_width);
  self->frame_size =
      gst_video_format_get_size (fmt, self->frame_width, self->frame_height);

  if (self->fields == GST_DEINTERLACE2_ALL && otherpad == self->srcpad)
    self->field_duration =
        gst_util_uint64_scale (GST_SECOND, self->frame_rate_d,
        self->frame_rate_n);
  else
    self->field_duration =
        gst_util_uint64_scale (GST_SECOND, self->frame_rate_d,
        2 * self->frame_rate_n);

  GST_DEBUG_OBJECT (self, "Set caps: %" GST_PTR_FORMAT, caps);

done:

  gst_object_unref (self);
  return res;

invalid_caps:
  res = FALSE;
  GST_ERROR_OBJECT (pad, "Invalid caps: %" GST_PTR_FORMAT, caps);
  goto done;

caps_not_accepted:
  res = FALSE;
  GST_ERROR_OBJECT (pad, "Caps not accepted: %" GST_PTR_FORMAT, othercaps);
  gst_caps_unref (othercaps);
  goto done;
}
/* Default implementation of the method  video_dualencoder_sink_set_caps */
gboolean
gst_tidmai_base_video_dualencoder_sink_set_caps (GstPad * pad, GstCaps * caps)
{

  GST_DEBUG("Entry default_sink_set_caps base video dualencoder");
  
  GstStructure *capStruct;
  gint framerateNum;
  gint framerateDen;
  guint32 fourcc;
  GstTIDmaiVideoInfo *video_info;
  GstTIDmaiDualEncInstance *actual_encoder_instance;
  GstTIDmaiBaseDualEncoder *base_dualencoder = (GstTIDmaiBaseDualEncoder *)gst_pad_get_parent(pad);
  GstTIDmaiBaseVideoDualEncoder *video_dualencoder = GST_TI_DMAI_BASE_VIDEO_DUALENCODER(base_dualencoder);
  
  /* Lock the entry */
  GMUTEX_LOCK(video_dualencoder->set_caps_mutex);
  
  /* Check the current encoder instance */
  if(base_dualencoder->low_resolution_encoder->collect->pad ==
	 pad) {
	 actual_encoder_instance = base_dualencoder->low_resolution_encoder;
	 GST_DEBUG("Actual instance: low resolution");
  }
  else if(base_dualencoder->high_resolution_encoder->collect->pad ==
	 pad) {
	 actual_encoder_instance = base_dualencoder->high_resolution_encoder;
	 GST_DEBUG("Actual instance: high resolution");
  }
  else {
	 GST_WARNING("Fail in determinate the actual encoder instance!");
	 goto refuse_caps;
  }
  
  /* Init the video info */
  video_info = g_malloc0(sizeof(GstTIDmaiVideoInfo));
  
  capStruct = gst_caps_get_structure(caps, 0);
  /* get info from caps */
  

  if (gst_structure_get_fraction(capStruct, "framerate", &framerateNum,
      &framerateDen)) {
	video_info->framerateN = framerateNum;
    video_info->framerateD = framerateDen;
  
  } else {
    GST_WARNING("Problems for obtain framerate!");
	goto refuse_caps;
  }

  if (!gst_structure_get_int(capStruct, "height", &video_info->height)) {
	video_info->height = 0;
  }

  if (!gst_structure_get_int(capStruct, "width", &video_info->width)) {
	video_info->width = 0;
  }

  if (!gst_structure_get_int(capStruct, "pitch", &video_info->pitch)) {
	video_info->pitch = 0;
  }

  if (gst_structure_get_fourcc(capStruct, "format", &fourcc)) {

	switch (fourcc) {
       case GST_MAKE_FOURCC('N', 'V', '1', '2'):
            video_info->colorSpace = ColorSpace_YUV420PSEMI;
			/*base_dualencoder->inBufSize = (video_info->height * video_info->width) * (3 / 2);*/ /* The encoder instance B (high resolution), set this field adequately */
            break;
       default:
            GST_WARNING("Unsupported fourcc in video stream!");
                goto refuse_caps;
            }
   }

  actual_encoder_instance->media_info = video_info;
  
  /* We are ready to init the codec */
  if (!gst_tidmai_base_dualencoder_init_codec (base_dualencoder, actual_encoder_instance))
    goto refuse_caps;

  /* save the caps for then update the caps */
  actual_encoder_instance->sink_caps = caps;
   
  gst_object_unref(base_dualencoder);

  GST_DEBUG("Leave default_sink_set_caps base video dualencoder");
  
  /* Un-lock the entry */
  GMUTEX_UNLOCK(video_dualencoder->set_caps_mutex);
   	
  return TRUE;
	
  /* ERRORS */
refuse_caps:
  {
	  
    GST_ERROR ("refused caps %" GST_PTR_FORMAT, caps);
	
	/* Un-lock the entry */ 
	GMUTEX_UNLOCK(video_dualencoder->set_caps_mutex);
	
    return FALSE;
  }
}
Beispiel #23
0
static gboolean
sink_setcaps(GstPad *pad,
	     GstCaps *caps)
{
	GstDspVEnc *self;
	GstDspBase *base;
	GstStructure *in_struc;
	GstCaps *out_caps;
	GstStructure *out_struc;
	gint width = 0, height = 0;
	GstCaps *allowed_caps;
	gint tgt_level = -1;
	struct td_codec *codec;

	self = GST_DSP_VENC(GST_PAD_PARENT(pad));
	base = GST_DSP_BASE(self);
	codec = base->codec;

	if (!codec)
		return FALSE;

#ifdef DEBUG
	{
		gchar *str = gst_caps_to_string(caps);
		pr_info(self, "sink caps: %s", str);
		g_free(str);
	}
#endif

	in_struc = gst_caps_get_structure(caps, 0);

	out_caps = gst_caps_new_empty();

	switch (base->alg) {
	case GSTDSP_JPEGENC:
		out_struc = gst_structure_new("image/jpeg",
					      NULL);
		break;
	case GSTDSP_H263ENC:
		out_struc = gst_structure_new("video/x-h263",
					      "variant", G_TYPE_STRING, "itu",
					      NULL);
		break;
	case GSTDSP_MP4VENC:
		out_struc = gst_structure_new("video/mpeg",
					      "mpegversion", G_TYPE_INT, 4,
					      "systemstream", G_TYPE_BOOLEAN, FALSE,
					      NULL);
		break;
	case GSTDSP_H264ENC:
		out_struc = gst_structure_new("video/x-h264",
					      "alignment", G_TYPE_STRING, "au",
					      NULL);
		break;
	default:
		return FALSE;
	}

	if (gst_structure_get_int(in_struc, "width", &width))
		gst_structure_set(out_struc, "width", G_TYPE_INT, width, NULL);
	if (gst_structure_get_int(in_struc, "height", &height))
		gst_structure_set(out_struc, "height", G_TYPE_INT, height, NULL);
	gst_structure_get_fourcc(in_struc, "format", &self->color_format);

	switch (base->alg) {
	case GSTDSP_H263ENC:
	case GSTDSP_MP4VENC:
	case GSTDSP_H264ENC:
		base->output_buffer_size = width * height / 2;
		break;
	case GSTDSP_JPEGENC:
		if (width % 2 || height % 2)
			return FALSE;
		if (self->color_format == GST_MAKE_FOURCC('I', '4', '2', '0'))
			base->input_buffer_size = ROUND_UP(width, 16) * ROUND_UP(height, 16) * 3 / 2;
		else
			base->input_buffer_size = ROUND_UP(width, 16) * ROUND_UP(height, 16) * 2;
		base->output_buffer_size = width * height;
		if (self->quality < 10)
			base->output_buffer_size /= 10;
		else if (self->quality < 100)
			base->output_buffer_size /= (100 / self->quality);
		break;
	default:
		break;
	}

	if (base->node)
		goto skip_setup;

	switch (base->alg) {
	case GSTDSP_JPEGENC:
		du_port_alloc_buffers(base->ports[0], 1);
#if SN_API > 1
		du_port_alloc_buffers(base->ports[1], 2);
#else
		/* old versions of the sn can't handle 2 buffers */
		/*
		 * Some constrained pipelines might starve because of this. You
		 * might want to try enable-last-buffer=false on some sinks.
		 * TODO Is there any way around this?
		 */
		du_port_alloc_buffers(base->ports[1], 1);
#endif
		break;
	default:
		du_port_alloc_buffers(base->ports[0], 2);
		du_port_alloc_buffers(base->ports[1], 4);
		break;
	}

skip_setup:
	self->width = width;
	self->height = height;

	{
		const GValue *framerate = NULL;
		framerate = gst_structure_get_value(in_struc, "framerate");
		if (framerate) {
			gst_structure_set_value(out_struc, "framerate", framerate);
			/* calculate nearest integer */
			self->framerate = (gst_value_get_fraction_numerator(framerate) * 2 /
					   gst_value_get_fraction_denominator(framerate) + 1) / 2;
		}
	}

	/* see if downstream caps express something */
	allowed_caps = gst_pad_get_allowed_caps(base->srcpad);
	if (allowed_caps) {
		if (gst_caps_get_size(allowed_caps) > 0) {
			GstStructure *s;
			s = gst_caps_get_structure(allowed_caps, 0);
			gst_structure_get_int(s, "level", &tgt_level);
			if (base->alg == GSTDSP_H264ENC) {
				const char *stream_format;
				stream_format = gst_structure_get_string(s, "stream-format");
				if (stream_format && !strcmp(stream_format, "avc"))
					self->priv.h264.bytestream = false;
				else
					stream_format = "byte-stream";
				gst_structure_set(out_struc, "stream-format", G_TYPE_STRING, stream_format, NULL);
			}
		}
		gst_caps_unref(allowed_caps);
	}

	check_supported_levels(self, tgt_level);

	if (self->bitrate == 0)
		self->bitrate = self->max_bitrate;
	else if (self->bitrate > self->max_bitrate)
		self->bitrate = self->max_bitrate;

	gst_caps_append_structure(out_caps, out_struc);

#ifdef DEBUG
	{
		gchar *str = gst_caps_to_string(out_caps);
		pr_info(self, "src caps: %s", str);
		g_free(str);
	}
#endif

	if (!gst_pad_take_caps(base->srcpad, out_caps))
		return FALSE;

	if (base->node)
		return TRUE;

	base->node = create_node(self);
	if (!base->node) {
		pr_err(self, "dsp node creation failed");
		return FALSE;
	}

	if (codec->setup_params)
		codec->setup_params(base);

	if (!gstdsp_start(base)) {
		pr_err(self, "dsp start failed");
		return FALSE;
	}

	if (codec->send_params)
		codec->send_params(base, base->node);

	return TRUE;
}