static void
gst_rtp_vraw_pay_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstRtpVRawPay *rtpvrawpay;

  rtpvrawpay = GST_RTP_VRAW_PAY (object);

  switch (prop_id) {
    case PROP_CHUNKS_PER_FRAME:
      g_value_set_int (value, rtpvrawpay->chunks_per_frame);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
static GstFlowReturn
gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
{
  GstRtpVRawPay *rtpvrawpay;
  GstFlowReturn ret = GST_FLOW_OK;
  gfloat packets_per_packline;
  guint pgroups_per_packet;
  guint packlines_per_list, buffers_per_list;
  guint lines_delay;            /* after how many packed lines we push out a buffer list */
  guint last_line;              /* last pack line number we pushed out a buffer list     */
  guint line, offset;
  guint8 *p0, *yp, *up, *vp;
  guint ystride, uvstride;
  guint xinc, yinc;
  guint pgroup;
  guint mtu;
  guint width, height;
  gint field, fields;
  GstVideoFormat format;
  GstVideoFrame frame;
  gint interlaced;
  gboolean use_buffer_lists;
  GstBufferList *list = NULL;
  GstRTPBuffer rtp = { NULL, };

  rtpvrawpay = GST_RTP_VRAW_PAY (payload);

  gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ);

  GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
      gst_buffer_get_size (buffer));

  /* get pointer and strides of the planes */
  p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
  yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
  up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
  vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);

  ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
  uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);

  mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);

  /* amount of bytes for one pixel */
  pgroup = rtpvrawpay->pgroup;
  width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);

  interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo);

  format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo);

  yinc = rtpvrawpay->yinc;
  xinc = rtpvrawpay->xinc;

  /* after how many packed lines we push out a buffer list */
  lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame);

  /* calculate how many buffers we expect to store in a single buffer list */
  pgroups_per_packet = (mtu - (12 + 14)) / pgroup;
  packets_per_packline = width / (xinc * pgroups_per_packet * 1.0);
  packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame);
  buffers_per_list = packlines_per_list * packets_per_packline;
  buffers_per_list = GST_ROUND_UP_8 (buffers_per_list);

  use_buffer_lists = (rtpvrawpay->chunks_per_frame < (height / yinc));

  fields = 1 + interlaced;

  /* start with line 0, offset 0 */
  for (field = 0; field < fields; field++) {
    line = field;
    offset = 0;
    last_line = 0;

    if (use_buffer_lists)
      list = gst_buffer_list_new_sized (buffers_per_list);

    /* write all lines */
    while (line < height) {
      guint left, pack_line;
      GstBuffer *out;
      guint8 *outdata, *headers;
      gboolean next_line, complete = FALSE;
      guint length, cont, pixels;

      /* get the max allowed payload length size, we try to fill the complete MTU */
      left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
      out = gst_rtp_buffer_new_allocate (left, 0, 0);

      if (field == 0) {
        GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer);
      } else {
        GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) +
            GST_BUFFER_DURATION (buffer) / 2;
      }

      gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
      outdata = gst_rtp_buffer_get_payload (&rtp);

      GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
          mtu);

      /*
       *   0                   1                   2                   3
       *   0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |   Extended Sequence Number    |            Length             |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |F|          Line No            |C|           Offset            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |            Length             |F|          Line No            |
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
       *  |C|           Offset            |                               .
       *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+                               .
       *  .                                                               .
       *  .                 Two (partial) lines of video data             .
       *  .                                                               .
       *  +---------------------------------------------------------------+
       */

      /* need 2 bytes for the extended sequence number */
      *outdata++ = 0;
      *outdata++ = 0;
      left -= 2;

      /* the headers start here */
      headers = outdata;

      /* make sure we can fit at least *one* header and pixel */
      if (!(left > (6 + pgroup))) {
        gst_rtp_buffer_unmap (&rtp);
        gst_buffer_unref (out);
        goto too_small;
      }

      /* while we can fit at least one header and one pixel */
      while (left > (6 + pgroup)) {
        /* we need a 6 bytes header */
        left -= 6;

        /* get how may bytes we need for the remaining pixels */
        pixels = width - offset;
        length = (pixels * pgroup) / xinc;

        if (left >= length) {
          /* pixels and header fit completely, we will write them and skip to the
           * next line. */
          next_line = TRUE;
        } else {
          /* line does not fit completely, see how many pixels fit */
          pixels = (left / pgroup) * xinc;
          length = (pixels * pgroup) / xinc;
          next_line = FALSE;
        }
        GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
            pixels);
        left -= length;

        /* write length */
        *outdata++ = (length >> 8) & 0xff;
        *outdata++ = length & 0xff;

        /* write line no */
        *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
        *outdata++ = line & 0xff;

        if (next_line) {
          /* go to next line we do this here to make the check below easier */
          line += yinc;
        }

        /* calculate continuation marker */
        cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;

        /* write offset and continuation marker */
        *outdata++ = ((offset >> 8) & 0x7f) | cont;
        *outdata++ = offset & 0xff;

        if (next_line) {
          /* reset offset */
          offset = 0;
          GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
        } else {
          offset += pixels;
          GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
        }

        if (!cont)
          break;
      }
      GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
          (guint) (outdata - headers));

      /* second pass, read headers and write the data */
      while (TRUE) {
        guint offs, lin;

        /* read length and cont */
        length = (headers[0] << 8) | headers[1];
        lin = ((headers[2] & 0x7f) << 8) | headers[3];
        offs = ((headers[4] & 0x7f) << 8) | headers[5];
        cont = headers[4] & 0x80;
        pixels = length / pgroup;
        headers += 6;

        GST_LOG_OBJECT (payload,
            "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
            cont);

        switch (format) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_RGBA:
          case GST_VIDEO_FORMAT_BGR:
          case GST_VIDEO_FORMAT_BGRA:
          case GST_VIDEO_FORMAT_UYVY:
          case GST_VIDEO_FORMAT_UYVP:
            offs /= xinc;
            memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length);
            outdata += length;
            break;
          case GST_VIDEO_FORMAT_AYUV:
          {
            gint i;
            guint8 *datap;

            datap = p0 + (lin * ystride) + (offs * 4);

            for (i = 0; i < pixels; i++) {
              *outdata++ = datap[2];
              *outdata++ = datap[1];
              *outdata++ = datap[3];
              datap += 4;
            }
            break;
          }
          case GST_VIDEO_FORMAT_I420:
          {
            gint i;
            guint uvoff;
            guint8 *yd1p, *yd2p, *udp, *vdp;

            yd1p = yp + (lin * ystride) + (offs);
            yd2p = yd1p + ystride;
            uvoff = (lin / yinc * uvstride) + (offs / xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *yd1p++;
              *outdata++ = *yd1p++;
              *outdata++ = *yd2p++;
              *outdata++ = *yd2p++;
              *outdata++ = *udp++;
              *outdata++ = *vdp++;
            }
            break;
          }
          case GST_VIDEO_FORMAT_Y41B:
          {
            gint i;
            guint uvoff;
            guint8 *ydp, *udp, *vdp;

            ydp = yp + (lin * ystride) + offs;
            uvoff = (lin / yinc * uvstride) + (offs / xinc);
            udp = up + uvoff;
            vdp = vp + uvoff;

            for (i = 0; i < pixels; i++) {
              *outdata++ = *udp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
              *outdata++ = *vdp++;
              *outdata++ = *ydp++;
              *outdata++ = *ydp++;
            }
            break;
          }
          default:
            gst_rtp_buffer_unmap (&rtp);
            gst_buffer_unref (out);
            goto unknown_sampling;
        }

        if (!cont)
          break;
      }

      if (line >= height) {
        GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
        gst_rtp_buffer_set_marker (&rtp, TRUE);
        complete = TRUE;
      }
      gst_rtp_buffer_unmap (&rtp);
      if (left > 0) {
        GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
        gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
      }

      /* Now either push out the buffer directly */
      if (!use_buffer_lists) {
        ret = gst_rtp_base_payload_push (payload, out);
        continue;
      }

      /* or add the buffer to buffer list ... */
      gst_buffer_list_add (list, out);

      /* .. and check if we need to push out the list */
      pack_line = (line - field) / fields;
      if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) {
        GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack "
            "line %u", gst_buffer_list_length (list), pack_line);
        ret = gst_rtp_base_payload_push_list (payload, list);
        list = NULL;
        if (!complete)
          list = gst_buffer_list_new_sized (buffers_per_list);
        last_line = pack_line;
      }
    }

  }

  gst_video_frame_unmap (&frame);
  gst_buffer_unref (buffer);

  return ret;

  /* ERRORS */
unknown_sampling:
  {
    GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
        (NULL), ("unimplemented sampling"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
too_small:
  {
    GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT,
        (NULL), ("not enough space to send at least one pixel"));
    gst_video_frame_unmap (&frame);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_SUPPORTED;
  }
}
static GstFlowReturn
gst_rtp_vraw_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer)
{
    GstRtpVRawPay *rtpvrawpay;
    GstFlowReturn ret = GST_FLOW_OK;
    guint line, offset;
    guint8 *data, *yp, *up, *vp;
    guint ystride, uvstride;
    guint size, pgroup;
    guint mtu;
    guint width, height;
    gint field;

    rtpvrawpay = GST_RTP_VRAW_PAY (payload);

    data = GST_BUFFER_DATA (buffer);
    size = GST_BUFFER_SIZE (buffer);

    GST_LOG_OBJECT (rtpvrawpay, "new frame of %u bytes", size);

    /* get pointer and strides of the planes */
    yp = data + rtpvrawpay->yp;
    up = data + rtpvrawpay->up;
    vp = data + rtpvrawpay->vp;

    ystride = rtpvrawpay->ystride;
    uvstride = rtpvrawpay->uvstride;

    mtu = GST_BASE_RTP_PAYLOAD_MTU (payload);

    /* amount of bytes for one pixel */
    pgroup = rtpvrawpay->pgroup;
    width = rtpvrawpay->width;
    height = rtpvrawpay->height;

    /* start with line 0, offset 0 */

    for (field = 0; field < 1 + rtpvrawpay->interlaced; field++) {
        line = field;
        offset = 0;

        /* write all lines */
        while (line < height) {
            guint left;
            GstBuffer *out;
            guint8 *outdata, *headers;
            gboolean next_line;
            guint length, cont, pixels;

            /* get the max allowed payload length size, we try to fill the complete MTU */
            left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
            out = gst_rtp_buffer_new_allocate (left, 0, 0);

            if (field == 0) {
                GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer);
            } else {
                GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer) +
                                             GST_BUFFER_DURATION (buffer) / 2;
            }

            outdata = gst_rtp_buffer_get_payload (out);

            GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
                            mtu);

            /*
             *   0                   1                   2                   3
             *   0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
             *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
             *  |   Extended Sequence Number    |            Length             |
             *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
             *  |F|          Line No            |C|           Offset            |
             *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
             *  |            Length             |F|          Line No            |
             *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
             *  |C|           Offset            |                               .
             *  +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+                               .
             *  .                                                               .
             *  .                 Two (partial) lines of video data             .
             *  .                                                               .
             *  +---------------------------------------------------------------+
             */

            /* need 2 bytes for the extended sequence number */
            *outdata++ = 0;
            *outdata++ = 0;
            left -= 2;

            /* the headers start here */
            headers = outdata;

            /* while we can fit at least one header and one pixel */
            while (left > (6 + pgroup)) {
                /* we need a 6 bytes header */
                left -= 6;

                /* get how may bytes we need for the remaining pixels */
                pixels = width - offset;
                length = (pixels * pgroup) / rtpvrawpay->xinc;

                if (left >= length) {
                    /* pixels and header fit completely, we will write them and skip to the
                     * next line. */
                    next_line = TRUE;
                } else {
                    /* line does not fit completely, see how many pixels fit */
                    pixels = (left / pgroup) * rtpvrawpay->xinc;
                    length = (pixels * pgroup) / rtpvrawpay->xinc;
                    next_line = FALSE;
                }
                GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
                                pixels);
                left -= length;

                /* write length */
                *outdata++ = (length >> 8) & 0xff;
                *outdata++ = length & 0xff;

                /* write line no */
                *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
                *outdata++ = line & 0xff;

                if (next_line) {
                    /* go to next line we do this here to make the check below easier */
                    line += rtpvrawpay->yinc;
                }

                /* calculate continuation marker */
                cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;

                /* write offset and continuation marker */
                *outdata++ = ((offset >> 8) & 0x7f) | cont;
                *outdata++ = offset & 0xff;

                if (next_line) {
                    /* reset offset */
                    offset = 0;
                    GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
                } else {
                    offset += pixels;
                    GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
                }

                if (!cont)
                    break;
            }
            GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
                            (guint) (outdata - headers));

            /* second pass, read headers and write the data */
            while (TRUE) {
                guint offs, lin;

                /* read length and cont */
                length = (headers[0] << 8) | headers[1];
                lin = ((headers[2] & 0x7f) << 8) | headers[3];
                offs = ((headers[4] & 0x7f) << 8) | headers[5];
                cont = headers[4] & 0x80;
                pixels = length / pgroup;
                headers += 6;

                GST_LOG_OBJECT (payload,
                                "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
                                cont);

                switch (rtpvrawpay->sampling) {
                case GST_VIDEO_FORMAT_RGB:
                case GST_VIDEO_FORMAT_RGBA:
                case GST_VIDEO_FORMAT_BGR:
                case GST_VIDEO_FORMAT_BGRA:
                case GST_VIDEO_FORMAT_UYVY:
                    offs /= rtpvrawpay->xinc;
                    memcpy (outdata, yp + (lin * ystride) + (offs * pgroup), length);
                    outdata += length;
                    break;
                case GST_VIDEO_FORMAT_AYUV:
                {
                    gint i;
                    guint8 *datap;

                    datap = yp + (lin * ystride) + (offs * 4);

                    for (i = 0; i < pixels; i++) {
                        *outdata++ = datap[2];
                        *outdata++ = datap[1];
                        *outdata++ = datap[3];
                        datap += 4;
                    }
                    break;
                }
                case GST_VIDEO_FORMAT_I420:
                {
                    gint i;
                    guint uvoff;
                    guint8 *yd1p, *yd2p, *udp, *vdp;

                    yd1p = yp + (lin * ystride) + (offs);
                    yd2p = yd1p + ystride;
                    uvoff =
                        (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc);
                    udp = up + uvoff;
                    vdp = vp + uvoff;

                    for (i = 0; i < pixels; i++) {
                        *outdata++ = *yd1p++;
                        *outdata++ = *yd1p++;
                        *outdata++ = *yd2p++;
                        *outdata++ = *yd2p++;
                        *outdata++ = *udp++;
                        *outdata++ = *vdp++;
                    }
                    break;
                }
                case GST_VIDEO_FORMAT_Y41B:
                {
                    gint i;
                    guint uvoff;
                    guint8 *ydp, *udp, *vdp;

                    ydp = yp + (lin * ystride) + offs;
                    uvoff =
                        (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc);
                    udp = up + uvoff;
                    vdp = vp + uvoff;

                    for (i = 0; i < pixels; i++) {
                        *outdata++ = *udp++;
                        *outdata++ = *ydp++;
                        *outdata++ = *ydp++;
                        *outdata++ = *vdp++;
                        *outdata++ = *ydp++;
                        *outdata++ = *ydp++;
                    }
                    break;
                }
                default:
                    gst_buffer_unref (out);
                    goto unknown_sampling;
                }

                if (!cont)
                    break;
            }

            if (line >= height) {
                GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
                gst_rtp_buffer_set_marker (out, TRUE);
            }
            if (left > 0) {
                GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
                GST_BUFFER_SIZE (out) -= left;
            }

            /* push buffer */
            ret = gst_basertppayload_push (payload, out);
        }

    }
    gst_buffer_unref (buffer);

    return ret;

    /* ERRORS */
unknown_sampling:
    {
        GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
                           (NULL), ("unimplemented sampling"));
        gst_buffer_unref (buffer);
        return GST_FLOW_NOT_SUPPORTED;
    }
}
static gboolean
gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
  GstRtpVRawPay *rtpvrawpay;
  gboolean res;
  gint pgroup, xinc, yinc;
  const gchar *depthstr, *samplingstr, *colorimetrystr;
  gchar *wstr, *hstr;
  GstVideoInfo info;

  rtpvrawpay = GST_RTP_VRAW_PAY (payload);

  if (!gst_video_info_from_caps (&info, caps))
    goto invalid_caps;

  rtpvrawpay->vinfo = info;

  if (gst_video_colorimetry_matches (&info.colorimetry,
          GST_VIDEO_COLORIMETRY_BT601)) {
    colorimetrystr = "BT601-5";
  } else if (gst_video_colorimetry_matches (&info.colorimetry,
          GST_VIDEO_COLORIMETRY_BT709)) {
    colorimetrystr = "BT709-2";
  } else if (gst_video_colorimetry_matches (&info.colorimetry,
          GST_VIDEO_COLORIMETRY_SMPTE240M)) {
    colorimetrystr = "SMPTE240M";
  } else {
    colorimetrystr = "SMPTE240M";
  }

  xinc = yinc = 1;

  /* these values are the only thing we can do */
  depthstr = "8";

  switch (GST_VIDEO_INFO_FORMAT (&info)) {
    case GST_VIDEO_FORMAT_RGBA:
      samplingstr = "RGBA";
      pgroup = 4;
      break;
    case GST_VIDEO_FORMAT_BGRA:
      samplingstr = "BGRA";
      pgroup = 4;
      break;
    case GST_VIDEO_FORMAT_RGB:
      samplingstr = "RGB";
      pgroup = 3;
      break;
    case GST_VIDEO_FORMAT_BGR:
      samplingstr = "BGR";
      pgroup = 3;
      break;
    case GST_VIDEO_FORMAT_AYUV:
      samplingstr = "YCbCr-4:4:4";
      pgroup = 3;
      break;
    case GST_VIDEO_FORMAT_UYVY:
      samplingstr = "YCbCr-4:2:2";
      pgroup = 4;
      xinc = 2;
      break;
    case GST_VIDEO_FORMAT_Y41B:
      samplingstr = "YCbCr-4:1:1";
      pgroup = 6;
      xinc = 4;
      break;
    case GST_VIDEO_FORMAT_I420:
      samplingstr = "YCbCr-4:2:0";
      pgroup = 6;
      xinc = yinc = 2;
      break;
    case GST_VIDEO_FORMAT_UYVP:
      samplingstr = "YCbCr-4:2:2";
      pgroup = 5;
      xinc = 2;
      depthstr = "10";
      break;
    default:
      goto unknown_format;
      break;
  }

  if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
    yinc *= 2;
  }

  rtpvrawpay->pgroup = pgroup;
  rtpvrawpay->xinc = xinc;
  rtpvrawpay->yinc = yinc;

  GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %s",
      GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), samplingstr);
  GST_DEBUG_OBJECT (payload, "xinc %d, yinc %d, pgroup %d", xinc, yinc, pgroup);

  wstr = g_strdup_printf ("%d", GST_VIDEO_INFO_WIDTH (&info));
  hstr = g_strdup_printf ("%d", GST_VIDEO_INFO_HEIGHT (&info));

  gst_rtp_base_payload_set_options (payload, "video", TRUE, "RAW", 90000);
  if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
    res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
        samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
        wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
        colorimetrystr, "interlace", G_TYPE_STRING, "true", NULL);
  } else {
    res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
        samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
        wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
        colorimetrystr, NULL);
  }
  g_free (wstr);
  g_free (hstr);

  return res;

  /* ERRORS */
invalid_caps:
  {
    GST_ERROR_OBJECT (payload, "could not parse caps");
    return FALSE;
  }
unknown_format:
  {
    GST_ERROR_OBJECT (payload, "unknown caps format");
    return FALSE;
  }
}
static gboolean
gst_rtp_vraw_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps)
{
    GstRtpVRawPay *rtpvrawpay;
    GstStructure *s;
    gboolean res;
    const gchar *name;
    gint width, height;
    gint yp, up, vp;
    gint pgroup, ystride, uvstride = 0, xinc, yinc;
    GstVideoFormat sampling;
    const gchar *depthstr, *samplingstr, *colorimetrystr;
    gchar *wstr, *hstr;
    gboolean interlaced;
    const gchar *color_matrix;
    gint depth;

    rtpvrawpay = GST_RTP_VRAW_PAY (payload);

    s = gst_caps_get_structure (caps, 0);

    /* start parsing the format */
    name = gst_structure_get_name (s);

    /* these values are the only thing we can do */
    depthstr = "8";

    /* parse common width/height */
    res = gst_structure_get_int (s, "width", &width);
    res &= gst_structure_get_int (s, "height", &height);
    if (!res)
        goto missing_dimension;

    if (!gst_structure_get_boolean (s, "interlaced", &interlaced))
        interlaced = FALSE;

    color_matrix = gst_structure_get_string (s, "color-matrix");
    colorimetrystr = "SMPTE240M";
    if (color_matrix) {
        if (g_str_equal (color_matrix, "sdtv")) {
            /* BT.601 implies a bit more than just color-matrix */
            colorimetrystr = "BT601-5";
        } else if (g_str_equal (color_matrix, "hdtv")) {
            colorimetrystr = "BT709-2";
        }
    }

    yp = up = vp = 0;
    xinc = yinc = 1;

    if (!strcmp (name, "video/x-raw-rgb")) {
        gint amask, rmask;
        gboolean has_alpha;

        has_alpha = gst_structure_get_int (s, "alpha_mask", &amask);
        depth = 8;

        if (!gst_structure_get_int (s, "red_mask", &rmask))
            goto unknown_mask;

        if (has_alpha) {
            pgroup = 4;
            ystride = width * 4;
            if (rmask == 0xFF000000) {
                sampling = GST_VIDEO_FORMAT_RGBA;
                samplingstr = "RGBA";
            } else {
                sampling = GST_VIDEO_FORMAT_BGRA;
                samplingstr = "BGRA";
            }
        } else {
            pgroup = 3;
            ystride = GST_ROUND_UP_4 (width * 3);
            if (rmask == 0x00FF0000) {
                sampling = GST_VIDEO_FORMAT_RGB;
                samplingstr = "RGB";
            } else {
                sampling = GST_VIDEO_FORMAT_BGR;
                samplingstr = "BGR";
            }
        }
    } else if (!strcmp (name, "video/x-raw-yuv")) {
        guint32 fourcc;

        if (!gst_structure_get_fourcc (s, "format", &fourcc))
            goto unknown_fourcc;

        GST_LOG_OBJECT (payload, "have fourcc %" GST_FOURCC_FORMAT,
                        GST_FOURCC_ARGS (fourcc));

        switch (fourcc) {
        case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
            sampling = GST_VIDEO_FORMAT_AYUV;
            samplingstr = "YCbCr-4:4:4";
            pgroup = 3;
            ystride = width * 4;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
            sampling = GST_VIDEO_FORMAT_UYVY;
            samplingstr = "YCbCr-4:2:2";
            pgroup = 4;
            xinc = 2;
            ystride = GST_ROUND_UP_2 (width) * 2;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
            sampling = GST_VIDEO_FORMAT_Y41B;
            samplingstr = "YCbCr-4:1:1";
            pgroup = 6;
            xinc = 4;
            ystride = GST_ROUND_UP_4 (width);
            uvstride = GST_ROUND_UP_8 (width) / 4;
            up = ystride * height;
            vp = up + uvstride * height;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('I', '4', '2', '0'):
            sampling = GST_VIDEO_FORMAT_I420;
            samplingstr = "YCbCr-4:2:0";
            pgroup = 6;
            xinc = yinc = 2;
            ystride = GST_ROUND_UP_4 (width);
            uvstride = GST_ROUND_UP_8 (width) / 2;
            up = ystride * GST_ROUND_UP_2 (height);
            vp = up + uvstride * GST_ROUND_UP_2 (height) / 2;
            depth = 8;
            break;
        case GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'):
#define GST_VIDEO_FORMAT_UYVP GST_VIDEO_FORMAT_UYVY     /* FIXME */
            sampling = GST_VIDEO_FORMAT_UYVP;
            samplingstr = "YCbCr-4:2:2";
            pgroup = 4;
            xinc = 2;
            ystride = GST_ROUND_UP_2 (width) * 2;
            depth = 10;
            break;
        default:
            goto unknown_fourcc;
        }
    } else
        goto unknown_format;

    if (interlaced) {
        yinc *= 2;
    }
    if (depth == 10) {
        depthstr = "10";
    }

    rtpvrawpay->width = width;
    rtpvrawpay->height = height;
    rtpvrawpay->sampling = sampling;
    rtpvrawpay->pgroup = pgroup;
    rtpvrawpay->xinc = xinc;
    rtpvrawpay->yinc = yinc;
    rtpvrawpay->yp = yp;
    rtpvrawpay->up = up;
    rtpvrawpay->vp = vp;
    rtpvrawpay->ystride = ystride;
    rtpvrawpay->uvstride = uvstride;
    rtpvrawpay->interlaced = interlaced;
    rtpvrawpay->depth = depth;

    GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %d", width, height,
                      sampling);
    GST_DEBUG_OBJECT (payload, "yp %d, up %d, vp %d", yp, up, vp);
    GST_DEBUG_OBJECT (payload, "pgroup %d, ystride %d, uvstride %d", pgroup,
                      ystride, uvstride);

    wstr = g_strdup_printf ("%d", rtpvrawpay->width);
    hstr = g_strdup_printf ("%d", rtpvrawpay->height);

    gst_basertppayload_set_options (payload, "video", TRUE, "RAW", 90000);
    if (interlaced) {
        res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
                                              samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
                                              wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
                                              colorimetrystr, "interlace", G_TYPE_STRING, "true", NULL);
    } else {
        res = gst_basertppayload_set_outcaps (payload, "sampling", G_TYPE_STRING,
                                              samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
                                              wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
                                              colorimetrystr, NULL);
    }
    g_free (wstr);
    g_free (hstr);

    return res;

    /* ERRORS */
unknown_mask:
    {
        GST_ERROR_OBJECT (payload, "unknown red mask specified");
        return FALSE;
    }
unknown_format:
    {
        GST_ERROR_OBJECT (payload, "unknown caps format");
        return FALSE;
    }
unknown_fourcc:
    {
        GST_ERROR_OBJECT (payload, "invalid or missing fourcc");
        return FALSE;
    }
missing_dimension:
    {
        GST_ERROR_OBJECT (payload, "missing width or height property");
        return FALSE;
    }
}