static GstVideoOverlayComposition *
gst_dvbsub_overlay_subs_to_comp (GstDVBSubOverlay * overlay,
    DVBSubtitles * subs)
{
  GstVideoOverlayComposition *comp = NULL;
  GstVideoOverlayRectangle *rect;
  gint width, height, dw, dh, wx, wy;
  gint i;

  g_return_val_if_fail (subs != NULL && subs->num_rects > 0, NULL);

  width = GST_VIDEO_INFO_WIDTH (&overlay->info);
  height = GST_VIDEO_INFO_HEIGHT (&overlay->info);

  dw = subs->display_def.display_width;
  dh = subs->display_def.display_height;

  GST_LOG_OBJECT (overlay,
      "converting %d rectangles for display %dx%d -> video %dx%d",
      subs->num_rects, dw, dh, width, height);

  if (subs->display_def.window_flag) {
    wx = subs->display_def.window_x;
    wy = subs->display_def.window_y;
    GST_LOG_OBJECT (overlay, "display window %dx%d @ (%d, %d)",
        subs->display_def.window_width, subs->display_def.window_height,
        wx, wy);
  } else {
    wx = 0;
    wy = 0;
  }

  for (i = 0; i < subs->num_rects; i++) {
    DVBSubtitleRect *srect = &subs->rects[i];
    GstBuffer *buf;
    gint w, h;
    guint8 *in_data;
    guint32 *palette, *data;
    gint rx, ry, rw, rh, stride;
    gint k, l;
    GstMapInfo map;

    GST_LOG_OBJECT (overlay, "rectangle %d: %dx%d @ (%d, %d)", i,
        srect->w, srect->h, srect->x, srect->y);

    w = srect->w;
    h = srect->h;

    buf = gst_buffer_new_and_alloc (w * h * 4);
    gst_buffer_map (buf, &map, GST_MAP_WRITE);
    data = (guint32 *) map.data;
    in_data = srect->pict.data;
    palette = srect->pict.palette;
    stride = srect->pict.rowstride;
    for (k = 0; k < h; k++) {
      for (l = 0; l < w; l++) {
        guint32 ayuv;

        ayuv = palette[*in_data];
        GST_WRITE_UINT32_BE (data, ayuv);
        in_data++;
        data++;
      }
      in_data += stride - w;
    }
    gst_buffer_unmap (buf, &map);

    /* this is assuming the subtitle rectangle coordinates are relative
     * to the window (if there is one) within a display of specified dimension.
     * Coordinate wrt the latter is then scaled to the actual dimension of
     * the video we are dealing with here. */
    rx = gst_util_uint64_scale (wx + srect->x, width, dw);
    ry = gst_util_uint64_scale (wy + srect->y, height, dh);
    rw = gst_util_uint64_scale (srect->w, width, dw);
    rh = gst_util_uint64_scale (srect->h, height, dh);

    GST_LOG_OBJECT (overlay, "rectangle %d rendered: %dx%d @ (%d, %d)", i,
        rw, rh, rx, ry);

    gst_buffer_add_video_meta (buf, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_YUV, w, h);
    rect = gst_video_overlay_rectangle_new_raw (buf, rx, ry, rw, rh, 0);
    g_assert (rect);
    if (comp) {
      gst_video_overlay_composition_add_rectangle (comp, rect);
    } else {
      comp = gst_video_overlay_composition_new (rect);
    }
    gst_video_overlay_rectangle_unref (rect);
    gst_buffer_unref (buf);
  }

  return comp;
}
static GstFlowReturn
gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent);
  GstVideoOverlayComposition *compo = NULL;
  GstVideoOverlayCompositionMeta *upstream_compo_meta;

  if (gst_pad_check_reconfigure (self->srcpad)) {
    if (!gst_overlay_composition_negotiate (self, NULL)) {
      gst_pad_mark_reconfigure (self->srcpad);
      gst_buffer_unref (buffer);
      GST_OBJECT_LOCK (self->srcpad);
      if (GST_PAD_IS_FLUSHING (self->srcpad)) {
        GST_OBJECT_UNLOCK (self->srcpad);
        return GST_FLOW_FLUSHING;
      }
      GST_OBJECT_UNLOCK (self->srcpad);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  if (!self->sample) {
    self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL);
  } else {
    self->sample = gst_sample_make_writable (self->sample);
    gst_sample_set_buffer (self->sample, buffer);
    gst_sample_set_caps (self->sample, self->caps);
    gst_sample_set_segment (self->sample, &self->segment);
  }

  g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0,
      self->sample, &compo);

  /* Don't store the buffer in the sample any longer, otherwise it will not
   * be writable below as we have one reference in the sample and one in
   * this function.
   *
   * If the sample is not writable itself then the application kept an
   * reference itself.
   */
  if (gst_sample_is_writable (self->sample)) {
    gst_sample_set_buffer (self->sample, NULL);
  }

  if (!compo) {
    GST_DEBUG_OBJECT (self->sinkpad,
        "Application did not provide an overlay composition");
    return gst_pad_push (self->srcpad, buffer);
  }

  /* If upstream attached a meta, we can safely add our own things
   * in it. Upstream must've checked that downstream supports it */
  upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer);
  if (upstream_compo_meta) {
    GstVideoOverlayComposition *merged_compo =
        gst_video_overlay_composition_copy (upstream_compo_meta->overlay);
    guint i, n;

    GST_DEBUG_OBJECT (self->sinkpad,
        "Appending to upstream overlay composition");

    n = gst_video_overlay_composition_n_rectangles (compo);
    for (i = 0; i < n; i++) {
      GstVideoOverlayRectangle *rect =
          gst_video_overlay_composition_get_rectangle (compo, i);
      gst_video_overlay_composition_add_rectangle (merged_compo, rect);
    }

    gst_video_overlay_composition_unref (compo);
    gst_video_overlay_composition_unref (upstream_compo_meta->overlay);
    upstream_compo_meta->overlay = merged_compo;
  } else if (self->attach_compo_to_buffer) {
    GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta");

    buffer = gst_buffer_make_writable (buffer);
    gst_buffer_add_video_overlay_composition_meta (buffer, compo);
    gst_video_overlay_composition_unref (compo);
  } else {
    GstVideoFrame frame;

    buffer = gst_buffer_make_writable (buffer);
    if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) {
      gst_video_overlay_composition_unref (compo);
      goto map_failed;
    }

    gst_video_overlay_composition_blend (compo, &frame);

    gst_video_frame_unmap (&frame);
    gst_video_overlay_composition_unref (compo);
  }

  return gst_pad_push (self->srcpad, buffer);

map_failed:
  {
    GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}