Пример #1
0
/* For the clock skew we use a windowed low point averaging algorithm as can be
 * found in Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation
 * over Network Delays":
 * http://www.grame.fr/Ressources/pub/TR-050601.pdf
 * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546
 *
 * The idea is that the jitter is composed of:
 *
 *  J = N + n
 *
 *   N   : a constant network delay.
 *   n   : random added noise. The noise is concentrated around 0
 *
 * In the receiver we can track the elapsed time at the sender with:
 *
 *  send_diff(i) = (Tsi - Ts0);
 *
 *   Tsi : The time at the sender at packet i
 *   Ts0 : The time at the sender at the first packet
 *
 * This is the difference between the RTP timestamp in the first received packet
 * and the current packet.
 *
 * At the receiver we have to deal with the jitter introduced by the network.
 *
 *  recv_diff(i) = (Tri - Tr0)
 *
 *   Tri : The time at the receiver at packet i
 *   Tr0 : The time at the receiver at the first packet
 *
 * Both of these values contain a jitter Ji, a jitter for packet i, so we can
 * write:
 *
 *  recv_diff(i) = (Cri + D + ni) - (Cr0 + D + n0))
 *
 *    Cri    : The time of the clock at the receiver for packet i
 *    D + ni : The jitter when receiving packet i
 *
 * We see that the network delay is irrelevant here as we can elliminate D:
 *
 *  recv_diff(i) = (Cri + ni) - (Cr0 + n0))
 *
 * The drift is now expressed as:
 *
 *  Drift(i) = recv_diff(i) - send_diff(i);
 *
 * We now keep the W latest values of Drift and find the minimum (this is the
 * one with the lowest network jitter and thus the one which is least affected
 * by it). We average this lowest value to smooth out the resulting network skew.
 *
 * Both the window and the weighting used for averaging influence the accuracy
 * of the drift estimation. Finding the correct parameters turns out to be a
 * compromise between accuracy and inertia.
 *
 * We use a 2 second window or up to 512 data points, which is statistically big
 * enough to catch spikes (FIXME, detect spikes).
 * We also use a rather large weighting factor (125) to smoothly adapt. During
 * startup, when filling the window, we use a parabolic weighting factor, the
 * more the window is filled, the faster we move to the detected possible skew.
 *
 * Returns: @time adjusted with the clock skew.
 */
static GstClockTime
calculate_skew (RTPJitterBuffer * jbuf, guint32 rtptime, GstClockTime time,
    guint32 clock_rate)
{
  guint64 ext_rtptime;
  guint64 send_diff, recv_diff;
  gint64 delta;
  gint64 old;
  gint pos, i;
  GstClockTime gstrtptime, out_time;
  guint64 slope;

  ext_rtptime = gst_rtp_buffer_ext_timestamp (&jbuf->ext_rtptime, rtptime);

  gstrtptime = gst_util_uint64_scale_int (ext_rtptime, GST_SECOND, clock_rate);

  /* keep track of the last extended rtptime */
  jbuf->last_rtptime = ext_rtptime;

  if (jbuf->clock_rate != clock_rate) {
    if (jbuf->clock_rate == -1) {
      GST_DEBUG ("Clock rate changed from %" G_GUINT32_FORMAT " to %"
          G_GUINT32_FORMAT, jbuf->clock_rate, clock_rate);
    } else {
      GST_WARNING ("Clock rate changed from %" G_GUINT32_FORMAT " to %"
          G_GUINT32_FORMAT, jbuf->clock_rate, clock_rate);
    }
    jbuf->base_time = -1;
    jbuf->base_rtptime = -1;
    jbuf->clock_rate = clock_rate;
    jbuf->prev_out_time = -1;
    jbuf->prev_send_diff = -1;
  }

  /* first time, lock on to time and gstrtptime */
  if (G_UNLIKELY (jbuf->base_time == -1)) {
    jbuf->base_time = time;
    jbuf->prev_out_time = -1;
    GST_DEBUG ("Taking new base time %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
  }
  if (G_UNLIKELY (jbuf->base_rtptime == -1)) {
    jbuf->base_rtptime = gstrtptime;
    jbuf->base_extrtp = ext_rtptime;
    jbuf->prev_send_diff = -1;
    GST_DEBUG ("Taking new base rtptime %" GST_TIME_FORMAT,
        GST_TIME_ARGS (gstrtptime));
  }

  if (G_LIKELY (gstrtptime >= jbuf->base_rtptime))
    send_diff = gstrtptime - jbuf->base_rtptime;
  else if (time != -1) {
    /* elapsed time at sender, timestamps can go backwards and thus be smaller
     * than our base time, take a new base time in that case. */
    GST_WARNING ("backward timestamps at server, taking new base time");
    rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, FALSE);
    send_diff = 0;
  } else {
    GST_WARNING ("backward timestamps at server but no timestamps");
    send_diff = 0;
    /* at least try to get a new timestamp.. */
    jbuf->base_time = -1;
  }

  GST_DEBUG ("extrtp %" G_GUINT64_FORMAT ", gstrtp %" GST_TIME_FORMAT ", base %"
      GST_TIME_FORMAT ", send_diff %" GST_TIME_FORMAT, ext_rtptime,
      GST_TIME_ARGS (gstrtptime), GST_TIME_ARGS (jbuf->base_rtptime),
      GST_TIME_ARGS (send_diff));

  /* we don't have an arrival timestamp so we can't do skew detection. we
   * should still apply a timestamp based on RTP timestamp and base_time */
  if (time == -1 || jbuf->base_time == -1)
    goto no_skew;

  /* elapsed time at receiver, includes the jitter */
  recv_diff = time - jbuf->base_time;

  /* measure the diff */
  delta = ((gint64) recv_diff) - ((gint64) send_diff);

  /* measure the slope, this gives a rought estimate between the sender speed
   * and the receiver speed. This should be approximately 8, higher values
   * indicate a burst (especially when the connection starts) */
  if (recv_diff > 0)
    slope = (send_diff * 8) / recv_diff;
  else
    slope = 8;

  GST_DEBUG ("time %" GST_TIME_FORMAT ", base %" GST_TIME_FORMAT ", recv_diff %"
      GST_TIME_FORMAT ", slope %" G_GUINT64_FORMAT, GST_TIME_ARGS (time),
      GST_TIME_ARGS (jbuf->base_time), GST_TIME_ARGS (recv_diff), slope);

  /* if the difference between the sender timeline and the receiver timeline
   * changed too quickly we have to resync because the server likely restarted
   * its timestamps. */
  if (ABS (delta - jbuf->skew) > GST_SECOND) {
    GST_WARNING ("delta - skew: %" GST_TIME_FORMAT " too big, reset skew",
        GST_TIME_ARGS (delta - jbuf->skew));
    rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, TRUE);
    send_diff = 0;
    delta = 0;
  }

  pos = jbuf->window_pos;

  if (G_UNLIKELY (jbuf->window_filling)) {
    /* we are filling the window */
    GST_DEBUG ("filling %d, delta %" G_GINT64_FORMAT, pos, delta);
    jbuf->window[pos++] = delta;
    /* calc the min delta we observed */
    if (G_UNLIKELY (pos == 1 || delta < jbuf->window_min))
      jbuf->window_min = delta;

    if (G_UNLIKELY (send_diff >= MAX_TIME || pos >= MAX_WINDOW)) {
      jbuf->window_size = pos;

      /* window filled */
      GST_DEBUG ("min %" G_GINT64_FORMAT, jbuf->window_min);

      /* the skew is now the min */
      jbuf->skew = jbuf->window_min;
      jbuf->window_filling = FALSE;
    } else {
      gint perc_time, perc_window, perc;

      /* figure out how much we filled the window, this depends on the amount of
       * time we have or the max number of points we keep. */
      perc_time = send_diff * 100 / MAX_TIME;
      perc_window = pos * 100 / MAX_WINDOW;
      perc = MAX (perc_time, perc_window);

      /* make a parabolic function, the closer we get to the MAX, the more value
       * we give to the scaling factor of the new value */
      perc = perc * perc;

      /* quickly go to the min value when we are filling up, slowly when we are
       * just starting because we're not sure it's a good value yet. */
      jbuf->skew =
          (perc * jbuf->window_min + ((10000 - perc) * jbuf->skew)) / 10000;
      jbuf->window_size = pos + 1;
    }
  } else {
    /* pick old value and store new value. We keep the previous value in order
     * to quickly check if the min of the window changed */
    old = jbuf->window[pos];
    jbuf->window[pos++] = delta;

    if (G_UNLIKELY (delta <= jbuf->window_min)) {
      /* if the new value we inserted is smaller or equal to the current min,
       * it becomes the new min */
      jbuf->window_min = delta;
    } else if (G_UNLIKELY (old == jbuf->window_min)) {
      gint64 min = G_MAXINT64;

      /* if we removed the old min, we have to find a new min */
      for (i = 0; i < jbuf->window_size; i++) {
        /* we found another value equal to the old min, we can stop searching now */
        if (jbuf->window[i] == old) {
          min = old;
          break;
        }
        if (jbuf->window[i] < min)
          min = jbuf->window[i];
      }
      jbuf->window_min = min;
    }
    /* average the min values */
    jbuf->skew = (jbuf->window_min + (124 * jbuf->skew)) / 125;
    GST_DEBUG ("delta %" G_GINT64_FORMAT ", new min: %" G_GINT64_FORMAT,
        delta, jbuf->window_min);
  }
  /* wrap around in the window */
  if (G_UNLIKELY (pos >= jbuf->window_size))
    pos = 0;
  jbuf->window_pos = pos;

no_skew:
  /* the output time is defined as the base timestamp plus the RTP time
   * adjusted for the clock skew .*/
  if (jbuf->base_time != -1) {
    out_time = jbuf->base_time + send_diff;
    /* skew can be negative and we don't want to make invalid timestamps */
    if (jbuf->skew < 0 && out_time < -jbuf->skew) {
      out_time = 0;
    } else {
      out_time += jbuf->skew;
    }
    /* check if timestamps are not going backwards, we can only check this if we
     * have a previous out time and a previous send_diff */
    if (G_LIKELY (jbuf->prev_out_time != -1 && jbuf->prev_send_diff != -1)) {
      /* now check for backwards timestamps */
      if (G_UNLIKELY (
              /* if the server timestamps went up and the out_time backwards */
              (send_diff > jbuf->prev_send_diff
                  && out_time < jbuf->prev_out_time) ||
              /* if the server timestamps went backwards and the out_time forwards */
              (send_diff < jbuf->prev_send_diff
                  && out_time > jbuf->prev_out_time) ||
              /* if the server timestamps did not change */
              send_diff == jbuf->prev_send_diff)) {
        GST_DEBUG ("backwards timestamps, using previous time");
        out_time = jbuf->prev_out_time;
      }
    }
    if (time != -1 && out_time + jbuf->delay < time) {
      /* if we are going to produce a timestamp that is later than the input
       * timestamp, we need to reset the jitterbuffer. Likely the server paused
       * temporarily */
      GST_DEBUG ("out %" GST_TIME_FORMAT " + %" G_GUINT64_FORMAT " < time %"
          GST_TIME_FORMAT ", reset jitterbuffer", GST_TIME_ARGS (out_time),
          jbuf->delay, GST_TIME_ARGS (time));
      rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, TRUE);
      out_time = time;
      send_diff = 0;
    }
  } else
    out_time = -1;

  jbuf->prev_out_time = out_time;
  jbuf->prev_send_diff = send_diff;

  GST_DEBUG ("skew %" G_GINT64_FORMAT ", out %" GST_TIME_FORMAT,
      jbuf->skew, GST_TIME_ARGS (out_time));

  return out_time;
}
Пример #2
0
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point,
      GST_TIME_ARGS (frame->presentation_timestamp));

  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
    if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
      GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
          GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (frame->presentation_timestamp -
              base_video_decoder->segment.start));
      base_video_decoder->timestamp_offset = frame->presentation_timestamp;
      base_video_decoder->field_index = 0;
    } else {
      /* This case is for one initial timestamp and no others, e.g.,
       * filesrc ! decoder ! xvimagesink */
      GST_WARNING ("sync timestamp didn't change, ignoring");
      frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
    }
  } else {
    if (frame->is_sync_point) {
      GST_WARNING ("sync point doesn't have timestamp");
      if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_WARNING
            ("No base timestamp.  Assuming frames start at segment start");
        base_video_decoder->timestamp_offset =
            base_video_decoder->segment.start;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_duration (base_video_decoder,
        frame->n_fields);
  }

  if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
    if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
      GST_WARNING ("decreasing timestamp (%" GST_TIME_FORMAT " < %"
          GST_TIME_FORMAT ")", GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (base_video_decoder->last_timestamp));
    }
  }
  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif
    int tff = base_video_decoder->state.top_field_first;

    if (frame->field_index & 1) {
      tff ^= 1;
    }
    if (tff) {
      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
  GST_BUFFER_OFFSET (frame->src_buffer) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (frame->src_buffer) = GST_BUFFER_OFFSET_NONE;

  GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));

  base_video_decoder->frames =
      g_list_remove (base_video_decoder->frames, frame);

  gst_base_video_decoder_set_src_caps (base_video_decoder);

  src_buffer = frame->src_buffer;
  frame->src_buffer = NULL;

  gst_base_video_decoder_free_frame (frame);

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
      GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
    } else {
      GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
      gst_buffer_unref (src_buffer);
      return GST_FLOW_OK;
    }
  }

  return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
      src_buffer);
}
static gboolean
setup_recoder_pipeline (GstSmartEncoder * smart_encoder)
{
  GstPad *tmppad;
  GstCaps *caps;

  /* Fast path */
  if (G_UNLIKELY (smart_encoder->encoder))
    return TRUE;

  GST_DEBUG ("Creating internal decoder and encoder");

  /* Create decoder/encoder */
  caps = gst_pad_get_current_caps (smart_encoder->sinkpad);
  smart_encoder->decoder = get_decoder (caps);
  if (G_UNLIKELY (smart_encoder->decoder == NULL))
    goto no_decoder;
  gst_caps_unref (caps);
  gst_element_set_bus (smart_encoder->decoder, GST_ELEMENT_BUS (smart_encoder));

  caps = gst_pad_get_current_caps (smart_encoder->sinkpad);
  smart_encoder->encoder = get_encoder (caps);
  if (G_UNLIKELY (smart_encoder->encoder == NULL))
    goto no_encoder;
  gst_caps_unref (caps);
  gst_element_set_bus (smart_encoder->encoder, GST_ELEMENT_BUS (smart_encoder));

  GST_DEBUG ("Creating internal pads");

  /* Create internal pads */

  /* Source pad which we'll use to feed data to decoders */
  smart_encoder->internal_srcpad = gst_pad_new ("internal_src", GST_PAD_SRC);
  g_object_set_qdata ((GObject *) smart_encoder->internal_srcpad,
      INTERNAL_ELEMENT, smart_encoder);
  gst_pad_set_active (smart_encoder->internal_srcpad, TRUE);

  /* Sink pad which will get the buffers from the encoder.
   * Note: We don't need an event function since we'll be discarding all
   * of them. */
  smart_encoder->internal_sinkpad = gst_pad_new ("internal_sink", GST_PAD_SINK);
  g_object_set_qdata ((GObject *) smart_encoder->internal_sinkpad,
      INTERNAL_ELEMENT, smart_encoder);
  gst_pad_set_chain_function (smart_encoder->internal_sinkpad, internal_chain);
  gst_pad_set_active (smart_encoder->internal_sinkpad, TRUE);

  GST_DEBUG ("Linking pads to elements");

  /* Link everything */
  tmppad = gst_element_get_static_pad (smart_encoder->encoder, "src");
  if (GST_PAD_LINK_FAILED (gst_pad_link (tmppad,
              smart_encoder->internal_sinkpad)))
    goto sinkpad_link_fail;
  gst_object_unref (tmppad);

  if (!gst_element_link (smart_encoder->decoder, smart_encoder->encoder))
    goto encoder_decoder_link_fail;

  tmppad = gst_element_get_static_pad (smart_encoder->decoder, "sink");
  if (GST_PAD_LINK_FAILED (gst_pad_link (smart_encoder->internal_srcpad,
              tmppad)))
    goto srcpad_link_fail;
  gst_object_unref (tmppad);

  GST_DEBUG ("Done creating internal elements/pads");

  return TRUE;

no_decoder:
  {
    GST_WARNING ("Couldn't find a decoder for %" GST_PTR_FORMAT, caps);
    gst_caps_unref (caps);
    return FALSE;
  }

no_encoder:
  {
    GST_WARNING ("Couldn't find an encoder for %" GST_PTR_FORMAT, caps);
    gst_caps_unref (caps);
    return FALSE;
  }

srcpad_link_fail:
  {
    gst_object_unref (tmppad);
    GST_WARNING ("Couldn't link internal srcpad to decoder");
    return FALSE;
  }

sinkpad_link_fail:
  {
    gst_object_unref (tmppad);
    GST_WARNING ("Couldn't link encoder to internal sinkpad");
    return FALSE;
  }

encoder_decoder_link_fail:
  {
    GST_WARNING ("Couldn't link decoder to encoder");
    return FALSE;
  }
}
Пример #4
0
//
// Expected synchronisation from caller. This method is not thread-safe!
//
bool DiscretixSession::dxdrmProcessKey(Uint8Array* key, RefPtr<Uint8Array>& nextMessage, unsigned short& errorCode, unsigned long& systemCode)
{
    GST_MEMDUMP("response received :", key->data(), key->byteLength());

    bool isAckRequired;
    HDxResponseResult responseResult = nullptr;
    EDxDrmStatus status = DX_ERROR_CONTENT_NOT_RECOGNIZED;

    errorCode = 0;
    if (m_state == PHASE_INITIAL) {
        // Server replied to our license request
        status = DxDrmStream_ProcessLicenseResponse(m_DxDrmStream, key->data(), key->byteLength(), &responseResult, &isAckRequired);

        if (status == DX_SUCCESS) {
            // Create a deep copy of the key.
            m_key = key->buffer();
            m_state = (isAckRequired ? PHASE_ACKNOWLEDGE : PHASE_PROVISIONED);
            GST_DEBUG("Acknowledgement required: %s", isAckRequired ? "yes" : "no");
        }

    } else if (m_state == PHASE_ACKNOWLEDGE) {

        // Server replied to our license response acknowledge
        status = DxDrmClient_ProcessServerResponse(key->data(), key->byteLength(), DX_RESPONSE_LICENSE_ACK, &responseResult, &isAckRequired);

        if (status == DX_SUCCESS) {
            // Create a deep copy of the key.
            m_key = key->buffer();
            m_state = (isAckRequired ? PHASE_ACKNOWLEDGE : PHASE_PROVISIONED);

            if (m_state == PHASE_ACKNOWLEDGE)
                GST_WARNING("Acknowledging an Ack. Strange situation.");
        }
    } else
        GST_WARNING("Unexpected call. We are already provisioned");

    if (status != DX_SUCCESS) {
        GST_ERROR("failed processing license response (status: %d)", status);
        errorCode = MediaKeyError::MEDIA_KEYERR_CLIENT;
    } else if (m_state == PHASE_PROVISIONED) {
        status = DxDrmStream_SetIntent(m_DxDrmStream, DX_INTENT_AUTO_PLAY, DX_AUTO_NO_UI);
        if (status != DX_SUCCESS)
            GST_ERROR("opening stream failed because there are no rights (license) to play the content (status: %d)", status);
        else {
            GST_INFO("playback rights found");

            /* starting consumption of the file - notifying the drm that the file is being used */
            status = DxDrmFile_HandleConsumptionEvent(m_DxDrmStream, DX_EVENT_START);
            if (status != DX_SUCCESS)
                GST_ERROR("Content consumption failed");
            else {
                GST_INFO("Stream was opened and is ready for playback");
                m_ready = true;
            }
        }

    } else if (m_state == PHASE_ACKNOWLEDGE) {
        uint32_t challengeLength = MAX_CHALLENGE_LEN;
        unsigned char* challenge = static_cast<unsigned char*>(g_malloc0(challengeLength));

        status = DxDrmClient_GetLicenseAcq_GenerateAck(&responseResult, challenge, &challengeLength);
        if (status != DX_SUCCESS)
            GST_ERROR("failed generating license ack challenge (status: %d, response result %p)", status, responseResult);

        GST_MEMDUMP("generated license ack request :", challenge, challengeLength);

        nextMessage = Uint8Array::create(challenge, challengeLength);
        g_free(challenge);
    }

    systemCode = status;

    return (status == DX_SUCCESS);
}
Пример #5
0
static gboolean
gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  gboolean ret = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
    {
      if (!base_video_decoder->packetized) {
        GstFlowReturn flow_ret;

        do {
          flow_ret =
              base_video_decoder_class->parse_data (base_video_decoder, TRUE);
        } while (flow_ret == GST_FLOW_OK);
      }

      if (base_video_decoder_class->finish) {
        base_video_decoder_class->finish (base_video_decoder);
      }

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
          event);
    }
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      double rate;
      double applied_rate;
      GstFormat format;
      gint64 start;
      gint64 stop;
      gint64 position;
      GstSegment *segment = &base_video_decoder->segment;

      gst_event_parse_new_segment_full (event, &update, &rate,
          &applied_rate, &format, &start, &stop, &position);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (!update) {
        gst_base_video_decoder_reset (base_video_decoder);
      }

      base_video_decoder->timestamp_offset = start;

      gst_segment_set_newsegment_full (segment,
          update, rate, applied_rate, format, start, stop, position);
      base_video_decoder->have_segment = TRUE;

      GST_WARNING ("new segment: format %d rate %g start %" GST_TIME_FORMAT
          " stop %" GST_TIME_FORMAT
          " position %" GST_TIME_FORMAT
          " update %d",
          format, rate,
          GST_TIME_ARGS (segment->start),
          GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update);

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
          event);
    }
      break;
    case GST_EVENT_FLUSH_STOP:{
      GST_OBJECT_LOCK (base_video_decoder);
      base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
      base_video_decoder->proportion = 0.5;
      GST_OBJECT_UNLOCK (base_video_decoder);
    }
    default:
      /* FIXME this changes the order of events */
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
          event);
      break;
  }

done:
  gst_object_unref (base_video_decoder);
  return ret;

newseg_wrong_format:
  {
    GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment");
    gst_event_unref (event);
    goto done;
  }
}
Пример #6
0
/* returns static descriptions and dynamic ones (such as video/x-raw-yuv),
 * or NULL if caps aren't known at all */
static gchar *
format_info_get_desc (const FormatInfo * info, const GstCaps * caps)
{
  const GstStructure *s;

  g_assert (info != NULL);

  if (info->desc != NULL)
    return g_strdup (_(info->desc));

  s = gst_caps_get_structure (caps, 0);

  if (strcmp (info->type, "video/x-raw-yuv") == 0) {
    const gchar *ret = NULL;
    guint32 fourcc = 0;

    gst_structure_get_fourcc (s, "format", &fourcc);
    switch (fourcc) {
      case GST_MAKE_FOURCC ('I', '4', '2', '0'):
        ret = _("Uncompressed planar YUV 4:2:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
        ret = _("Uncompressed planar YVU 4:2:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
        ret = _("Uncompressed packed YUV 4:2:2");
        break;
      case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
        ret = _("Uncompressed packed YUV 4:1:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'V', 'U', '9'):
        ret = _("Uncompressed packed YVU 4:1:0");
        break;
      case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
      case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
        ret = _("Uncompressed packed YUV 4:2:2");
        break;
      case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
        ret = _("Uncompressed packed YUV 4:1:1");
        break;
      case GST_MAKE_FOURCC ('I', 'Y', 'U', '2'):
        ret = _("Uncompressed packed YUV 4:4:4");
        break;
      case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
        ret = _("Uncompressed planar YUV 4:2:2");
        break;
      case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
        ret = _("Uncompressed planar YUV 4:1:1");
        break;
      case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
        ret = _("Uncompressed black and white Y-plane");
        break;
      default:
        ret = _("Uncompressed YUV");
        break;
    }
    return g_strdup (ret);
  } else if (strcmp (info->type, "video/x-raw-rgb") == 0) {
    const gchar *rgb_str;
    gint depth = 0;

    gst_structure_get_int (s, "depth", &depth);
    rgb_str = gst_structure_has_field (s, "alpha_mask") ? "RGBA" : "RGB";
    if (gst_structure_has_field (s, "paletted_data")) {
      return g_strdup_printf (_("Uncompressed palettized %d-bit %s"), depth,
          rgb_str);
    } else {
      return g_strdup_printf ("Uncompressed %d-bit %s", depth, rgb_str);
    }
  } else if (strcmp (info->type, "video/x-h263") == 0) {
    const gchar *variant, *ret;

    variant = gst_structure_get_string (s, "variant");
    if (variant == NULL)
      ret = "H.263";
    else if (strcmp (variant, "itu") == 0)
      ret = "ITU H.26n";        /* why not ITU H.263? (tpm) */
    else if (strcmp (variant, "lead") == 0)
      ret = "Lead H.263";
    else if (strcmp (variant, "microsoft") == 0)
      ret = "Microsoft H.263";
    else if (strcmp (variant, "vdolive") == 0)
      ret = "VDOLive";
    else if (strcmp (variant, "vivo") == 0)
      ret = "Vivo H.263";
    else if (strcmp (variant, "xirlink") == 0)
      ret = "Xirlink H.263";
    else {
      GST_WARNING ("Unknown H263 variant '%s'", variant);
      ret = "H.263";
    }
    return g_strdup (ret);
  } else if (strcmp (info->type, "video/x-h264") == 0) {
    const gchar *variant, *ret;

    variant = gst_structure_get_string (s, "variant");
    if (variant == NULL)
      ret = "H.264";
    else if (strcmp (variant, "itu") == 0)
      ret = "ITU H.264";
    else if (strcmp (variant, "videosoft") == 0)
      ret = "Videosoft H.264";
    else if (strcmp (variant, "lead") == 0)
      ret = "Lead H.264";
    else {
      GST_WARNING ("Unknown H264 variant '%s'", variant);
      ret = "H.264";
    }
    return g_strdup (ret);
  } else if (strcmp (info->type, "video/x-divx") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "divxversion", &ver) || ver <= 2) {
      GST_WARNING ("Unexpected DivX version in %" GST_PTR_FORMAT, caps);
      return g_strdup ("DivX MPEG-4");
    }
    return g_strdup_printf (_("DivX MPEG-4 Version %d"), ver);
  } else if (strcmp (info->type, "video/x-msmpeg") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "msmpegversion", &ver) ||
        ver < 40 || ver > 49) {
      GST_WARNING ("Unexpected msmpegversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Microsoft MPEG-4 4.x");
    }
    return g_strdup_printf ("Microsoft MPEG-4 4.%d", ver % 10);
  } else if (strcmp (info->type, "video/x-truemotion") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "trueversion", &ver);
    switch (ver) {
      case 1:
        return g_strdup_printf ("Duck TrueMotion 1");
      case 2:
        return g_strdup_printf ("TrueMotion 2.0");
      default:
        GST_WARNING ("Unexpected trueversion in %" GST_PTR_FORMAT, caps);
        break;
    }
    return g_strdup_printf ("TrueMotion");
  } else if (strcmp (info->type, "video/x-xan") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "wcversion", &ver) || ver < 1) {
      GST_WARNING ("Unexpected wcversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Xan Wing Commander");
    }
    return g_strdup_printf ("Xan Wing Commander %u", ver);
  } else if (strcmp (info->type, "video/x-indeo") == 0) {
    gint ver = 0;

    if (!gst_structure_get_int (s, "indeoversion", &ver) || ver < 2) {
      GST_WARNING ("Unexpected indeoversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Intel Indeo");
    }
    return g_strdup_printf ("Intel Indeo %u", ver);
  } else if (strcmp (info->type, "audio/x-wma") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "wmaversion", &ver);
    switch (ver) {
      case 1:
      case 2:
      case 3:
        return g_strdup_printf ("Windows Media Audio %d", ver + 6);
      default:
        break;
    }
    GST_WARNING ("Unexpected wmaversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("Windows Media Audio");
  } else if (strcmp (info->type, "video/x-wmv") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "wmvversion", &ver);
    switch (ver) {
      case 1:
      case 2:
      case 3:
        return g_strdup_printf ("Windows Media Video %d", ver + 6);
      default:
        break;
    }
    GST_WARNING ("Unexpected wmvversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("Windows Media Video");
  } else if (strcmp (info->type, "audio/x-mace") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "maceversion", &ver);
    if (ver == 3 || ver == 6) {
      return g_strdup_printf ("MACE-%d", ver);
    } else {
      GST_WARNING ("Unexpected maceversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("MACE");
    }
  } else if (strcmp (info->type, "video/x-svq") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "svqversion", &ver);
    if (ver == 1 || ver == 3) {
      return g_strdup_printf ("Sorensen Video %d", ver);
    } else {
      GST_WARNING ("Unexpected svqversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Sorensen Video");
    }
  } else if (strcmp (info->type, "video/x-asus") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "asusversion", &ver);
    if (ver == 1 || ver == 2) {
      return g_strdup_printf ("Asus Video %d", ver);
    } else {
      GST_WARNING ("Unexpected asusversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("Asus Video");
    }
  } else if (strcmp (info->type, "video/x-ati-vcr") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "vcrversion", &ver);
    if (ver == 1 || ver == 2) {
      return g_strdup_printf ("ATI VCR %d", ver);
    } else {
      GST_WARNING ("Unexpected acrversion in %" GST_PTR_FORMAT, caps);
      return g_strdup ("ATI VCR");
    }
  } else if (strcmp (info->type, "audio/x-adpcm") == 0) {
    const GValue *layout_val;

    layout_val = gst_structure_get_value (s, "layout");
    if (layout_val != NULL && G_VALUE_HOLDS_STRING (layout_val)) {
      const gchar *layout;

      if ((layout = g_value_get_string (layout_val))) {
        gchar *layout_upper, *ret;

        if (strcmp (layout, "swf") == 0)
          return g_strdup ("Shockwave ADPCM");
        if (strcmp (layout, "microsoft") == 0)
          return g_strdup ("Microsoft ADPCM");
        if (strcmp (layout, "quicktime") == 0)
          return g_strdup ("Quicktime ADPCM");
        if (strcmp (layout, "westwood") == 0)
          return g_strdup ("Westwood ADPCM");
        if (strcmp (layout, "yamaha") == 0)
          return g_strdup ("Yamaha ADPCM");
        /* FIXME: other layouts: sbpro2, sbpro3, sbpro4, ct, g726, ea,
         * adx, xa, 4xm, smjpeg, dk4, dk3, dvi */
        layout_upper = g_ascii_strup (layout, -1);
        ret = g_strdup_printf ("%s ADPCM", layout_upper);
        g_free (layout_upper);
        return ret;
      }
    }
    return g_strdup ("ADPCM");
  } else if (strcmp (info->type, "audio/mpeg") == 0) {
    gint ver = 0, layer = 0;

    gst_structure_get_int (s, "mpegversion", &ver);

    switch (ver) {
      case 1:
        gst_structure_get_int (s, "layer", &layer);
        switch (layer) {
          case 1:
          case 2:
          case 3:
            return g_strdup_printf ("MPEG-1 Layer %d (MP%d)", layer, layer);
          default:
            break;
        }
        GST_WARNING ("Unexpected MPEG-1 layer in %" GST_PTR_FORMAT, caps);
        return g_strdup ("MPEG-1 Audio");
      case 4:
        return g_strdup ("MPEG-4 AAC");
      default:
        break;
    }
    GST_WARNING ("Unexpected audio mpegversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("MPEG Audio");
  } else if (strcmp (info->type, "audio/x-pn-realaudio") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "raversion", &ver);
    switch (ver) {
      case 1:
        return g_strdup ("RealAudio 14k4bps");
      case 2:
        return g_strdup ("RealAudio 28k8bps");
      case 8:
        return g_strdup ("RealAudio G2 (Cook)");
      default:
        break;
    }
    GST_WARNING ("Unexpected raversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("RealAudio");
  } else if (strcmp (info->type, "video/x-pn-realvideo") == 0) {
    gint ver = 0;

    gst_structure_get_int (s, "rmversion", &ver);
    switch (ver) {
      case 1:
        return g_strdup ("RealVideo 1.0");
      case 2:
        return g_strdup ("RealVideo 2.0");
      case 3:
        return g_strdup ("RealVideo 3.0");
      case 4:
        return g_strdup ("RealVideo 4.0");
      default:
        break;
    }
    GST_WARNING ("Unexpected rmversion in %" GST_PTR_FORMAT, caps);
    return g_strdup ("RealVideo");
  } else if (strcmp (info->type, "video/mpeg") == 0) {
    gboolean sysstream;
    gint ver = 0;

    if (!gst_structure_get_boolean (s, "systemstream", &sysstream) ||
        !gst_structure_get_int (s, "mpegversion", &ver) || ver < 1 || ver > 4) {
      GST_WARNING ("Missing fields in mpeg video caps %" GST_PTR_FORMAT, caps);
    } else {
      if (sysstream) {
        return g_strdup_printf ("MPEG-%d System Stream", ver);
      } else {
        return g_strdup_printf ("MPEG-%d Video", ver);
      }
    }
    return g_strdup ("MPEG Video");
  } else if (strcmp (info->type, "audio/x-raw-int") == 0) {
    gint bitdepth = 0;

    /* 8-bit pcm might not have depth field (?) */
    if (!gst_structure_get_int (s, "depth", &bitdepth))
      gst_structure_get_int (s, "width", &bitdepth);
    if (bitdepth != 0)
      return g_strdup_printf (_("Raw %d-bit PCM audio"), bitdepth);
    else
      return g_strdup (_("Raw PCM audio"));
  } else if (strcmp (info->type, "audio/x-raw-float") == 0) {
    gint bitdepth = 0;

    gst_structure_get_int (s, "width", &bitdepth);
    if (bitdepth != 0)
      return g_strdup_printf (_("Raw %d-bit floating-point audio"), bitdepth);
    else
      return g_strdup (_("Raw floating-point audio"));
  }

  return NULL;
}
Пример #7
0
static gpointer
init_devices (gpointer data)
{
  IDeckLinkIterator *iterator;
  IDeckLink *decklink = NULL;
  HRESULT ret;
  int i;

#ifdef _MSC_VER
  // Start COM thread for Windows

  g_mutex_lock (&com_init_lock);

  /* create the COM initialization thread */
  g_thread_create ((GThreadFunc) gst_decklink_com_thread, NULL, FALSE, NULL);

  /* wait until the COM thread signals that COM has been initialized */
  g_cond_wait (&com_init_cond, &com_init_lock);
  g_mutex_unlock (&com_init_lock);
#endif /* _MSC_VER */

  iterator = CreateDeckLinkIteratorInstance ();
  if (iterator == NULL) {
    GST_ERROR ("no driver");
    return NULL;
  }

  i = 0;
  ret = iterator->Next (&decklink);
  while (ret == S_OK) {
    ret = decklink->QueryInterface (IID_IDeckLinkInput,
        (void **) &devices[i].input.input);
    if (ret != S_OK) {
      GST_WARNING ("selected device does not have input interface");
    } else {
      devices[i].input.device = decklink;
      devices[i].input.clock = gst_decklink_clock_new ("GstDecklinkInputClock");
      GST_DECKLINK_CLOCK_CAST (devices[i].input.clock)->input =
          &devices[i].input;
      devices[i].input.
          input->SetCallback (new GStreamerDecklinkInputCallback (&devices[i].
              input));
    }

    ret = decklink->QueryInterface (IID_IDeckLinkOutput,
        (void **) &devices[i].output.output);
    if (ret != S_OK) {
      GST_WARNING ("selected device does not have output interface");
    } else {
      devices[i].output.device = decklink;
      devices[i].output.clock =
          gst_decklink_clock_new ("GstDecklinkOutputClock");
      GST_DECKLINK_CLOCK_CAST (devices[i].output.clock)->output =
          &devices[i].output;
    }

    ret = decklink->QueryInterface (IID_IDeckLinkConfiguration,
        (void **) &devices[i].input.config);
    if (ret != S_OK) {
      GST_WARNING ("selected device does not have config interface");
    }

    ret = decklink->QueryInterface (IID_IDeckLinkAttributes,
        (void **) &devices[i].input.attributes);
    if (ret != S_OK) {
      GST_WARNING ("selected device does not have attributes interface");
    }

    ret = iterator->Next (&decklink);
    i++;

    if (i == 10) {
      GST_WARNING ("this hardware has more then 10 devices");
      break;
    }
  }

  n_devices = i;

  iterator->Release ();

  return NULL;
}
Пример #8
0
/* This function checks that it is actually really possible to create an image
   using XShm */
gboolean
gst_ximagesink_check_xshm_calls (GstXImageSink * ximagesink,
    GstXContext * xcontext)
{
  XImage *ximage;
  XShmSegmentInfo SHMInfo;
  size_t size;
  int (*handler) (Display *, XErrorEvent *);
  gboolean result = FALSE;
  gboolean did_attach = FALSE;

  g_return_val_if_fail (xcontext != NULL, FALSE);

  /* Sync to ensure any older errors are already processed */
  XSync (xcontext->disp, FALSE);

  /* Set defaults so we don't free these later unnecessarily */
  SHMInfo.shmaddr = ((void *) -1);
  SHMInfo.shmid = -1;

  /* Setting an error handler to catch failure */
  error_caught = FALSE;
  handler = XSetErrorHandler (gst_ximagesink_handle_xerror);

  /* Trying to create a 1x1 ximage */
  GST_DEBUG ("XShmCreateImage of 1x1");

  ximage = XShmCreateImage (xcontext->disp, xcontext->visual,
      xcontext->depth, ZPixmap, NULL, &SHMInfo, 1, 1);

  /* Might cause an error, sync to ensure it is noticed */
  XSync (xcontext->disp, FALSE);
  if (!ximage || error_caught) {
    GST_WARNING ("could not XShmCreateImage a 1x1 image");
    goto beach;
  }
  size = ximage->height * ximage->bytes_per_line;

  SHMInfo.shmid = shmget (IPC_PRIVATE, size, IPC_CREAT | 0777);
  if (SHMInfo.shmid == -1) {
    GST_WARNING ("could not get shared memory of %" G_GSIZE_FORMAT " bytes",
        size);
    goto beach;
  }

  SHMInfo.shmaddr = shmat (SHMInfo.shmid, NULL, 0);
  if (SHMInfo.shmaddr == ((void *) -1)) {
    GST_WARNING ("Failed to shmat: %s", g_strerror (errno));
    /* Clean up the shared memory segment */
    shmctl (SHMInfo.shmid, IPC_RMID, NULL);
    goto beach;
  }

  ximage->data = SHMInfo.shmaddr;
  SHMInfo.readOnly = FALSE;

  if (XShmAttach (xcontext->disp, &SHMInfo) == 0) {
    GST_WARNING ("Failed to XShmAttach");
    /* Clean up the shared memory segment */
    shmctl (SHMInfo.shmid, IPC_RMID, NULL);
    goto beach;
  }

  /* Sync to ensure we see any errors we caused */
  XSync (xcontext->disp, FALSE);

  /* Delete the shared memory segment as soon as everyone is attached.
   * This way, it will be deleted as soon as we detach later, and not
   * leaked if we crash. */
  shmctl (SHMInfo.shmid, IPC_RMID, NULL);

  if (!error_caught) {
    GST_DEBUG ("XServer ShmAttached to 0x%x, id 0x%lx", SHMInfo.shmid,
        SHMInfo.shmseg);

    did_attach = TRUE;
    /* store whether we succeeded in result */
    result = TRUE;
  } else {
    GST_WARNING ("MIT-SHM extension check failed at XShmAttach. "
        "Not using shared memory.");
  }

beach:
  /* Sync to ensure we swallow any errors we caused and reset error_caught */
  XSync (xcontext->disp, FALSE);

  error_caught = FALSE;
  XSetErrorHandler (handler);

  if (did_attach) {
    GST_DEBUG ("XServer ShmDetaching from 0x%x id 0x%lx",
        SHMInfo.shmid, SHMInfo.shmseg);
    XShmDetach (xcontext->disp, &SHMInfo);
    XSync (xcontext->disp, FALSE);
  }
  if (SHMInfo.shmaddr != ((void *) -1))
    shmdt (SHMInfo.shmaddr);
  if (ximage)
    XDestroyImage (ximage);
  return result;
}
Пример #9
0
static gboolean
gst_vdp_mpeg4_dec_handle_configuration (GstVdpMpeg4Dec * mpeg4_dec,
    GstMpeg4Frame * mpeg4_frame)
{
  Mpeg4VisualObjectSequence vos;
  Mpeg4VisualObject vo;
  Mpeg4VideoObjectLayer vol;

  GstVideoState state;
  guint8 profile_indication;
  VdpDecoderProfile profile;

  GstFlowReturn ret;

  if (mpeg4_dec->is_configured)
    return GST_FLOW_OK;

  if (!mpeg4_frame->vos_buf || !mpeg4_frame->vo_buf || !mpeg4_frame->vol_buf)
    goto skip_frame;

  if (!mpeg4_util_parse_VOS (mpeg4_frame->vos_buf, &vos))
    goto skip_frame;

  if (!mpeg4_util_parse_VO (mpeg4_frame->vo_buf, &vo))
    goto skip_frame;

  if (!mpeg4_util_parse_VOL (mpeg4_frame->vol_buf, &vo, &vol))
    goto skip_frame;

  state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (mpeg4_dec));

  state.width = vol.width;
  state.height = vol.height;

  if (vol.fixed_vop_rate) {
    state.fps_n = vol.vop_time_increment_resolution;
    state.fps_d = vol.fixed_vop_time_increment;
  }

  state.par_n = vol.par_n;
  state.par_d = vol.par_d;

  gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (mpeg4_dec), state);

  profile_indication = vos.profile_and_level_indication >> 4;
  switch (profile_indication) {
    case 0x0:
      profile = VDP_DECODER_PROFILE_MPEG4_PART2_SP;
      break;

    case 0xf:
      profile = VDP_DECODER_PROFILE_MPEG4_PART2_ASP;
      break;

    default:
      goto unsupported_profile;
  }
  ret = gst_vdp_decoder_init_decoder (GST_VDP_DECODER (mpeg4_dec), profile, 2);
  if (ret != GST_FLOW_OK)
    return ret;

  mpeg4_dec->vol = vol;
  mpeg4_dec->is_configured = TRUE;

  return GST_FLOW_OK;

skip_frame:
  GST_WARNING ("Skipping frame since we're not configured yet");
  gst_base_video_decoder_skip_frame (GST_BASE_VIDEO_DECODER (mpeg4_dec),
      GST_VIDEO_FRAME (mpeg4_frame));
  return GST_FLOW_CUSTOM_ERROR;

unsupported_profile:
  GST_ELEMENT_ERROR (mpeg4_dec, STREAM, WRONG_TYPE,
      ("vdpaumpeg4dec doesn't support this streams profile"),
      ("profile_and_level_indication: %d", vos.profile_and_level_indication));
  return GST_FLOW_ERROR;
}
Пример #10
0
static GstVaapiDecoderStatus
decode_buffer(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer)
{
    GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
    GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
    GstJpegMarkerSegment seg;
    GstJpegScanSegment scan_seg;
    GstClockTime pts;
    guchar *buf;
    guint buf_size, ofs;
    gboolean append_ecs;

    buf      = GST_BUFFER_DATA(buffer);
    buf_size = GST_BUFFER_SIZE(buffer);
    if (!buf && buf_size == 0)
        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;

    memset(&scan_seg, 0, sizeof(scan_seg));

    pts = GST_BUFFER_TIMESTAMP(buffer);
    ofs = 0;
    while (gst_jpeg_parse(&seg, buf, buf_size, ofs)) {
        if (seg.size < 0) {
            GST_DEBUG("buffer to short for parsing");
            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
        }
        ofs += seg.size;

        /* Decode scan, if complete */
        if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) {
            scan_seg.data_size = seg.offset - scan_seg.data_offset;
            scan_seg.is_valid  = TRUE;
        }
        if (scan_seg.is_valid) {
            status = decode_scan(
                decoder,
                buf + scan_seg.header_offset,
                scan_seg.header_size,
                buf + scan_seg.data_offset,
                scan_seg.data_size
            );
            if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
                break;
            memset(&scan_seg, 0, sizeof(scan_seg));
        }

        append_ecs = TRUE;
        switch (seg.marker) {
        case GST_JPEG_MARKER_SOI:
            priv->has_quant_table = FALSE;
            priv->has_huf_table   = FALSE;
            priv->mcu_restart     = 0;
            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
            break;
        case GST_JPEG_MARKER_EOI:
            if (decode_current_picture(decoder)) {
                /* Get out of the loop, trailing data is not needed */
                status = GST_VAAPI_DECODER_STATUS_SUCCESS;
                goto end;
            }
            status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
            break;
        case GST_JPEG_MARKER_DHT:
            status = decode_huffman_table(decoder, buf + seg.offset, seg.size);
            break;
        case GST_JPEG_MARKER_DQT:
            status = decode_quant_table(decoder, buf + seg.offset, seg.size);
            break;
        case GST_JPEG_MARKER_DRI:
            status = decode_restart_interval(decoder, buf + seg.offset, seg.size);
            break;
        case GST_JPEG_MARKER_DAC:
            GST_ERROR("unsupported arithmetic coding mode");
            status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
            break;
        case GST_JPEG_MARKER_SOS:
            scan_seg.header_offset = seg.offset;
            scan_seg.header_size   = seg.size;
            scan_seg.data_offset   = seg.offset + seg.size;
            scan_seg.data_size     = 0;
            append_ecs             = FALSE;
            break;
        default:
            /* Restart marker */
            if (seg.marker >= GST_JPEG_MARKER_RST_MIN &&
                seg.marker <= GST_JPEG_MARKER_RST_MAX) {
                append_ecs = FALSE;
                break;
            }

            /* Frame header */
            if (seg.marker >= GST_JPEG_MARKER_SOF_MIN &&
                seg.marker <= GST_JPEG_MARKER_SOF_MAX) {
                status = decode_picture(
                    decoder,
                    seg.marker,
                    buf + seg.offset, seg.size,
                    pts
                );
                break;
            }

            /* Application segments */
            if (seg.marker >= GST_JPEG_MARKER_APP_MIN &&
                seg.marker <= GST_JPEG_MARKER_APP_MAX) {
                status = GST_VAAPI_DECODER_STATUS_SUCCESS;
                break;
            }

            GST_WARNING("unsupported marker (0x%02x)", seg.marker);
            status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
            break;
        }

        /* Append entropy coded segments */
        if (append_ecs)
            scan_seg.data_size = seg.offset - scan_seg.data_offset;

        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
            break;
    }
end:
    return status;
}
void
gst_aiur_stream_cache_add_buffer (GstAiurStreamCache * cache,
    GstBuffer * buffer)
{
  guint64 size;
  gint trycnt = 0;
  if ((cache == NULL) || (buffer == NULL))
    goto bail;

  g_mutex_lock (cache->mutex);

  size = GST_BUFFER_SIZE (buffer);

  if ((cache->seeking) || (size == 0)) {
    g_mutex_unlock (cache->mutex);
    goto bail;
  }

  if (cache->ignore_size) {
    /* drop part or total buffer */
    if (cache->ignore_size >= size) {
      cache->ignore_size -= size;
      g_mutex_unlock (cache->mutex);
      goto bail;
    } else {
      GST_BUFFER_DATA (buffer) += (cache->ignore_size);
      GST_BUFFER_SIZE (buffer) -= (cache->ignore_size);
      size = GST_BUFFER_SIZE (buffer);
      cache->ignore_size = 0;
    }
    //g_print("cache offset %lld\n", cache->offset);
  }

  gst_adapter_push (cache->adapter, buffer);
  g_cond_signal (cache->produce_cond);

  buffer = NULL;

  if (cache->threshold_max) {
#if 0
    if (cache->threshold_max < size + cache->threshold_pre) {
      cache->threshold_max = size + cache->threshold_pre;
    }
#endif

    while ((gst_adapter_available (cache->adapter) > cache->threshold_max)
        && (cache->closed == FALSE)) {
      if (((++trycnt) & 0x1f) == 0x0) {
        GST_WARNING ("wait push try %d SIZE %d %lld", trycnt,
            gst_adapter_available (cache->adapter), cache->threshold_max);
      }
      WAIT_COND_TIMEOUT (cache->consume_cond, cache->mutex, 1000000);
    }

    if (cache->seeking) {
      g_mutex_unlock (cache->mutex);
      goto bail;
    }
  }


  g_mutex_unlock (cache->mutex);

  return;

bail:
  if (buffer) {
    gst_buffer_unref (buffer);
  }
}
Пример #12
0
static gboolean
get_video_recv_info (KmsRembLocal * rl,
    guint64 * bitrate, guint * fraction_lost, guint64 * packets_rcv_interval)
{
  GValueArray *arr = NULL;
  GValue *val;
  guint i;
  gboolean ret = FALSE;

  if (!KMS_REMB_BASE (rl)->rtpsess) {
    GST_WARNING ("Session object does not exist");
    return ret;
  }

  g_object_get (KMS_REMB_BASE (rl)->rtpsess, "sources", &arr, NULL);

  if (arr == NULL) {
    GST_WARNING ("Sources array not found");
    return ret;
  }

  for (i = 0; i < arr->n_values; i++) {
    GObject *source;
    guint ssrc;
    GstStructure *s;

    val = g_value_array_get_nth (arr, i);
    source = g_value_get_object (val);
    g_object_get (source, "ssrc", &ssrc, "stats", &s, NULL);

    GST_TRACE_OBJECT (source, "source ssrc: %u", ssrc);
    GST_TRACE_OBJECT (KMS_REMB_BASE (rl)->rtpsess, "stats: %" GST_PTR_FORMAT,
        s);

    if (ssrc == rl->remote_ssrc) {
      GstClockTime current_time;
      guint64 octets_received, packets_received;

      if (!gst_structure_get_uint64 (s, "bitrate", bitrate)) {
        break;
      }
      if (!gst_structure_get_uint64 (s, "octets-received", &octets_received)) {
        break;
      }
      if (!gst_structure_get_uint (s, "sent-rb-fractionlost", fraction_lost)) {
        break;
      }
      if (!gst_structure_get_uint64 (s, "packets-received", &packets_received)) {
        break;
      }

      current_time = kms_utils_get_time_nsecs ();

      if (rl->last_time != 0) {
        GstClockTime elapsed = current_time - rl->last_time;
        guint64 bytes_handled = octets_received - rl->last_octets_received;

        *bitrate =
            gst_util_uint64_scale (bytes_handled, 8 * GST_SECOND, elapsed);
        GST_TRACE_OBJECT (KMS_REMB_BASE (rl)->rtpsess,
            "Elapsed %" G_GUINT64_FORMAT " bytes %" G_GUINT64_FORMAT ", rate %"
            G_GUINT64_FORMAT, elapsed, bytes_handled, *bitrate);
      }

      rl->last_time = current_time;
      rl->last_octets_received = octets_received;

      *packets_rcv_interval = packets_received - rl->last_packets_received;
      rl->last_packets_received = packets_received;

      ret = TRUE;
    }

    gst_structure_free (s);

    if (ret) {
      break;
    }
  }

  g_value_array_free (arr);

  return ret;
}
Пример #13
0
static void
on_sending_rtcp (GObject * sess, GstBuffer * buffer, gboolean is_early,
    gboolean * do_not_supress)
{
  KmsRembLocal *rl;
  KmsRTCPPSFBAFBREMBPacket remb_packet;
  GstRTCPBuffer rtcp = { NULL, };
  GstRTCPPacket packet;
  guint packet_ssrc;

  rl = g_object_get_data (sess, KMS_REMB_LOCAL);

  if (!rl) {
    GST_WARNING ("Invalid RembLocal");
    return;
  }

  if (is_early) {
    return;
  }

  if (!gst_rtcp_buffer_map (buffer, GST_MAP_READWRITE, &rtcp)) {
    GST_WARNING_OBJECT (sess, "Cannot map buffer to RTCP");
    return;
  }

  if (!gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_PSFB, &packet)) {
    GST_WARNING_OBJECT (sess, "Cannot add RTCP packet");
    goto end;
  }

  if (!kms_remb_local_update (rl)) {
    goto end;
  }

  remb_packet.bitrate = rl->remb;
  if (rl->event_manager != NULL) {
    guint remb_local_max;

    remb_local_max = kms_utils_remb_event_manager_get_min (rl->event_manager);
    if (remb_local_max > 0) {
      GST_TRACE_OBJECT (sess, "REMB local max: %" G_GUINT32_FORMAT,
          remb_local_max);
      remb_packet.bitrate = MIN (remb_local_max, rl->remb);
    }
  }

  if (rl->min_bw > 0) {
    remb_packet.bitrate = MAX (remb_packet.bitrate, rl->min_bw * 1000);
  } else {
    remb_packet.bitrate = MAX (remb_packet.bitrate, REMB_MIN);
  }

  remb_packet.n_ssrcs = 1;
  remb_packet.ssrcs[0] = rl->remote_ssrc;
  g_object_get (sess, "internal-ssrc", &packet_ssrc, NULL);
  if (!kms_rtcp_psfb_afb_remb_marshall_packet (&packet, &remb_packet,
          packet_ssrc)) {
    gst_rtcp_packet_remove (&packet);
  }

  GST_TRACE_OBJECT (sess, "Sending REMB (bitrate: %" G_GUINT32_FORMAT
      ", ssrc: %" G_GUINT32_FORMAT ")", remb_packet.bitrate, rl->remote_ssrc);

  kms_remb_base_update_stats (KMS_REMB_BASE (rl), rl->remote_ssrc,
      remb_packet.bitrate);

end:
  gst_rtcp_buffer_unmap (&rtcp);
}
Пример #14
0
/**
 * rtp_jitter_buffer_insert:
 * @jbuf: an #RTPJitterBuffer
 * @buf: a buffer
 * @time: a running_time when this buffer was received in nanoseconds
 * @clock_rate: the clock-rate of the payload of @buf
 * @max_delay: the maximum lateness of @buf
 * @tail: TRUE when the tail element changed.
 *
 * Inserts @buf into the packet queue of @jbuf. The sequence number of the
 * packet will be used to sort the packets. This function takes ownerhip of
 * @buf when the function returns %TRUE.
 * @buf should have writable metadata when calling this function.
 *
 * Returns: %FALSE if a packet with the same number already existed.
 */
gboolean
rtp_jitter_buffer_insert (RTPJitterBuffer * jbuf, GstBuffer * buf,
    GstClockTime time, guint32 clock_rate, gboolean * tail, gint * percent)
{
  GList *list;
  guint32 rtptime;
  guint16 seqnum;
  GstRTPBuffer rtp = {NULL};

  g_return_val_if_fail (jbuf != NULL, FALSE);
  g_return_val_if_fail (buf != NULL, FALSE);

  gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);

  seqnum = gst_rtp_buffer_get_seq (&rtp);

  /* loop the list to skip strictly smaller seqnum buffers */
  for (list = jbuf->packets->head; list; list = g_list_next (list)) {
    guint16 qseq;
    gint gap;
    GstRTPBuffer rtpb = {NULL};

    gst_rtp_buffer_map (GST_BUFFER_CAST (list->data), GST_MAP_READ, &rtpb);
    qseq = gst_rtp_buffer_get_seq (&rtpb);
    gst_rtp_buffer_unmap (&rtpb);

    /* compare the new seqnum to the one in the buffer */
    gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq);

    /* we hit a packet with the same seqnum, notify a duplicate */
    if (G_UNLIKELY (gap == 0))
      goto duplicate;

    /* seqnum > qseq, we can stop looking */
    if (G_LIKELY (gap < 0))
      break;
  }

  rtptime = gst_rtp_buffer_get_timestamp (&rtp);
  /* rtp time jumps are checked for during skew calculation, but bypassed
   * in other mode, so mind those here and reset jb if needed.
   * Only reset if valid input time, which is likely for UDP input
   * where we expect this might happen due to async thread effects
   * (in seek and state change cycles), but not so much for TCP input */
  if (GST_CLOCK_TIME_IS_VALID (time) &&
      jbuf->mode != RTP_JITTER_BUFFER_MODE_SLAVE &&
      jbuf->base_time != -1 && jbuf->last_rtptime != -1) {
    GstClockTime ext_rtptime = jbuf->ext_rtptime;

    ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
    if (ext_rtptime > jbuf->last_rtptime + 3 * clock_rate ||
        ext_rtptime + 3 * clock_rate < jbuf->last_rtptime) {
      /* reset even if we don't have valid incoming time;
       * still better than producing possibly very bogus output timestamp */
      GST_WARNING ("rtp delta too big, reset skew");
      rtp_jitter_buffer_reset_skew (jbuf);
    }
  }

  switch (jbuf->mode) {
    case RTP_JITTER_BUFFER_MODE_NONE:
    case RTP_JITTER_BUFFER_MODE_BUFFER:
      /* send 0 as the first timestamp and -1 for the other ones. This will
       * interpollate them from the RTP timestamps with a 0 origin. In buffering
       * mode we will adjust the outgoing timestamps according to the amount of
       * time we spent buffering. */
      if (jbuf->base_time == -1)
        time = 0;
      else
        time = -1;
      break;
    case RTP_JITTER_BUFFER_MODE_SLAVE:
    default:
      break;
  }
  /* do skew calculation by measuring the difference between rtptime and the
   * receive time, this function will retimestamp @buf with the skew corrected
   * running time. */
  time = calculate_skew (jbuf, rtptime, time, clock_rate);
  GST_BUFFER_TIMESTAMP (buf) = time;

  /* It's more likely that the packet was inserted in the front of the buffer */
  if (G_LIKELY (list))
    g_queue_insert_before (jbuf->packets, list, buf);
  else
    g_queue_push_tail (jbuf->packets, buf);

  /* buffering mode, update buffer stats */
  if (jbuf->mode == RTP_JITTER_BUFFER_MODE_BUFFER)
    update_buffer_level (jbuf, percent);
  else
    *percent = -1;

  /* tail was changed when we did not find a previous packet, we set the return
   * flag when requested. */
  if (G_LIKELY (tail))
    *tail = (list == NULL);

  gst_rtp_buffer_unmap (&rtp);

  return TRUE;

  /* ERRORS */
duplicate:
  {
    gst_rtp_buffer_unmap (&rtp);
    GST_WARNING ("duplicate packet %d found", (gint) seqnum);
    return FALSE;
  }
}
Пример #15
0
static gboolean
gst_amlvdec_sink_event  (GstVideoDecoder * dec, GstEvent * event)
{
    gboolean ret = TRUE;
     GstAmlVdec *amlvdec = GST_AMLVDEC(dec);	
     GST_ERROR_OBJECT (amlvdec, "Got %s event on sink pad", GST_EVENT_TYPE_NAME (event));
    switch (GST_EVENT_TYPE (event)) {
      /*  case GST_EVENT_NEWSEGMENT:
        {
            gboolean update;
            GstFormat format;
            gdouble rate, arate;
            gint64 start, stop, time;
						
						stop_eos_task (amlvdec);
            gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format, &start, &stop, &time);

            if (format != GST_FORMAT_TIME)
                goto newseg_wrong_format;
            amlvdec_forward_process(amlvdec, update, rate, format, start, stop, time);
            gst_segment_set_newsegment_full (&amlvdec->segment, update, rate, arate, format, start, stop, time);

            GST_DEBUG_OBJECT (amlvdec,"Pushing newseg rate %g, applied rate %g, format %d, start %"
                G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT ", pos %" G_GINT64_FORMAT,
                rate, arate, format, start, stop, time);

            ret = gst_pad_push_event (amlvdec->srcpad, event);
            break;
        }*/
		
        case GST_EVENT_FLUSH_START:
            
            if(amlvdec->codec_init_ok){
                set_black_policy(0);
            }
            ret = TRUE;
            break;
	  
        case GST_EVENT_FLUSH_STOP:
        {
        		stop_eos_task (amlvdec);
            if(amlvdec->codec_init_ok){
                gint res = -1;
                res = codec_reset(amlvdec->pcodec);
                if (res < 0) {
                    GST_ERROR("reset vcodec failed, res= %x\n", res);
                    return FALSE;
                }            
                amlvdec->is_headerfeed = FALSE; 
            }
            GST_WARNING("vformat:%d\n", amlvdec->pcodec->video_type);          
            break;
        } 
		
        case GST_EVENT_EOS:
            GST_WARNING("get GST_EVENT_EOS,check for video end\n");
            if(amlvdec->codec_init_ok)	{
			start_eos_task(amlvdec);	
                amlvdec->is_eos = TRUE;
            }
                
     
            ret = TRUE;
            break;
		 
        default:           
            break;
    }

done:
     ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (amlvdec, event);

    return ret;
}
Пример #16
0
static GstFlowReturn
gst_vdp_mpeg4_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
    GstBuffer * buf, gboolean at_eos, GstVideoFrame * frame)
{
  GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
  guint8 start_code;
  GstMpeg4Frame *mpeg4_frame;

  GstFlowReturn ret = GST_FLOW_OK;

  /* start code prefix */
  SKIP (&reader, 24);

  /* start_code */
  READ_UINT8 (&reader, start_code, 8);

  mpeg4_frame = GST_MPEG4_FRAME_CAST (frame);

  /* collect packages */
  if (start_code == MPEG4_PACKET_VOS) {
    if (mpeg4_frame->vop_buf)
      ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
          (GstVideoFrame **) & mpeg4_frame);

    gst_buffer_replace (&mpeg4_frame->vos_buf, buf);
  }

  else if (start_code == MPEG4_PACKET_EVOS) {
    if (mpeg4_frame->vop_buf)
      ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
          (GstVideoFrame **) & mpeg4_frame);
  }

  else if (start_code == MPEG4_PACKET_VO)
    gst_buffer_replace (&mpeg4_frame->vo_buf, buf);

  else if (start_code >= MPEG4_PACKET_VOL_MIN &&
      start_code <= MPEG4_PACKET_VOL_MAX)
    gst_buffer_replace (&mpeg4_frame->vol_buf, buf);

  else if (start_code == MPEG4_PACKET_GOV) {
    if (mpeg4_frame->vop_buf)
      ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
          (GstVideoFrame **) & mpeg4_frame);

    gst_buffer_replace (&mpeg4_frame->gov_buf, buf);
  }

  else if (start_code == MPEG4_PACKET_VOP) {
    if (mpeg4_frame->vop_buf)
      ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
          (GstVideoFrame **) & mpeg4_frame);

    mpeg4_frame->vop_buf = buf;
  }

  else
    gst_buffer_unref (buf);


  if (at_eos && mpeg4_frame->vop_buf)
    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE,
        (GstVideoFrame **) & mpeg4_frame);

  return ret;

error:
  gst_buffer_unref (buf);
  GST_WARNING ("error parsing packet");
  return GST_FLOW_OK;
}
Пример #17
0
/**
 * ges_layer_add_clip:
 * @layer: a #GESLayer
 * @clip: (transfer full): the #GESClip to add.
 *
 * Adds the given clip to the layer. Sets the clip's parent, and thus
 * takes ownership of the clip.
 *
 * An clip can only be added to one layer.
 *
 * Calling this method will construct and properly set all the media related
 * elements on @clip. If you need to know when those objects (actually #GESTrackElement)
 * are constructed, you should connect to the container::child-added signal which
 * is emited right after those elements are ready to be used.
 *
 * Returns: TRUE if the clip was properly added to the layer, or FALSE
 * if the @layer refuses to add the clip.
 */
gboolean
ges_layer_add_clip (GESLayer * layer, GESClip * clip)
{
  GESAsset *asset;
  GESLayerPrivate *priv;
  GESLayer *current_layer;

  g_return_val_if_fail (GES_IS_LAYER (layer), FALSE);
  g_return_val_if_fail (GES_IS_CLIP (clip), FALSE);

  GST_DEBUG_OBJECT (layer, "adding clip:%p", clip);

  priv = layer->priv;
  current_layer = ges_clip_get_layer (clip);
  if (G_UNLIKELY (current_layer)) {
    GST_WARNING ("Clip %p already belongs to another layer", clip);
    gst_object_unref (current_layer);

    return FALSE;
  }

  asset = ges_extractable_get_asset (GES_EXTRACTABLE (clip));
  if (asset == NULL) {
    gchar *id;
    NewAssetUData *mudata = g_slice_new (NewAssetUData);

    mudata->clip = clip;
    mudata->layer = layer;

    GST_DEBUG_OBJECT (layer, "%" GST_PTR_FORMAT " as no reference to any "
        "assets creating a asset... trying sync", clip);

    id = ges_extractable_get_id (GES_EXTRACTABLE (clip));
    asset = ges_asset_request (G_OBJECT_TYPE (clip), id, NULL);
    if (asset == NULL) {
      GESProject *project = layer->timeline ?
          GES_PROJECT (ges_extractable_get_asset (GES_EXTRACTABLE
              (layer->timeline))) : NULL;

      ges_asset_request_async (G_OBJECT_TYPE (clip),
          id, NULL, (GAsyncReadyCallback) new_asset_cb, mudata);

      if (project)
        ges_project_add_loading_asset (project, G_OBJECT_TYPE (clip), id);
      g_free (id);

      GST_LOG_OBJECT (layer, "Object added async");
      return TRUE;
    }
    g_free (id);

    ges_extractable_set_asset (GES_EXTRACTABLE (clip), asset);

    g_slice_free (NewAssetUData, mudata);
  }


  gst_object_ref_sink (clip);

  /* Take a reference to the clip and store it stored by start/priority */
  priv->clips_start = g_list_insert_sorted (priv->clips_start, clip,
      (GCompareFunc) element_start_compare);

  /* Inform the clip it's now in this layer */
  ges_clip_set_layer (clip, layer);

  GST_DEBUG ("current clip priority : %d, Height: %d", _PRIORITY (clip),
      LAYER_HEIGHT);

  /* Set the priority. */
  if (_PRIORITY (clip) > LAYER_HEIGHT) {
    GST_WARNING_OBJECT (layer,
        "%p is out of the layer space, setting its priority to "
        "%d, setting it to the maximum priority of the layer: %d", clip,
        _PRIORITY (clip), LAYER_HEIGHT - 1);
    _set_priority0 (GES_TIMELINE_ELEMENT (clip), LAYER_HEIGHT - 1);
  }

  /* If the clip has an acceptable priority, we just let it with its current
   * priority */
  ges_layer_resync_priorities (layer);
  ges_timeline_element_set_timeline (GES_TIMELINE_ELEMENT (clip),
      layer->timeline);

  /* emit 'clip-added' */
  g_signal_emit (layer, ges_layer_signals[OBJECT_ADDED], 0, clip);

  return TRUE;
}
Пример #18
0
int
ModuleManager::loadModule (std::string modulePath)
{
  const kurento::FactoryRegistrar *registrar;
  void *registrarFactory, *getVersion = NULL, *getName = NULL,
                           *getDescriptor = NULL, *getGenerationTime = NULL;
  std::string moduleFileName;
  std::string moduleName;
  std::string moduleVersion;
  std::string generationTime;
  const char *moduleDescriptor = NULL;

  boost::filesystem::path path (modulePath);

  moduleFileName = path.filename().string();

  if (loadedModules.find (moduleFileName) != loadedModules.end() ) {
    GST_WARNING ("Module named %s already loaded", moduleFileName.c_str() );
    return -1;
  }

  Glib::Module module (modulePath);

  if (!module) {
    GST_WARNING ("Module %s cannot be loaded: %s", modulePath.c_str(),
                 Glib::Module::get_last_error().c_str() );
    return -1;
  }

  if (!module.get_symbol ("getFactoryRegistrar", registrarFactory) ) {
    GST_WARNING ("Symbol 'getFactoryRegistrar' not found in library %s",
                 moduleFileName.c_str() );
    return -1;
  }

  registrar = ( (RegistrarFactoryFunc) registrarFactory) ();
  const std::map <std::string, std::shared_ptr <kurento::Factory > > &factories =
    registrar->getFactories();

  for (auto it : factories) {
    if (loadedFactories.find (it.first) != loadedFactories.end() ) {
      GST_WARNING ("Factory %s is already registered, skiping module %s",
                   it.first.c_str(), module.get_name().c_str() );
      return -1;
    }
  }

  module.make_resident();

  loadedFactories.insert (factories.begin(), factories.end() );

  GST_DEBUG ("Module loaded from %s", module.get_name().c_str() );

  if (!module.get_symbol ("getModuleVersion", getVersion) ) {
    GST_WARNING ("Cannot get module version");
  } else {
    moduleVersion = ( (GetNameFunc) getVersion) ();
  }

  if (!module.get_symbol ("getModuleName", getName) ) {
    GST_WARNING ("Cannot get module name");
  } else {
    std::string finalModuleName;

    moduleName = ( (GetVersionFunc) getName) ();

    // Factories are also registered using the module name as a prefix
    // Modules core, elements and filters use kurento as prefix
    if (moduleName == "core" || moduleName == "elements"
        || moduleName == "filters")  {
      finalModuleName = "kurento";
    } else {
      finalModuleName = moduleName;
    }

    for (auto it : factories) {
      loadedFactories [finalModuleName + "." + it.first] = it.second;
    }
  }

  if (!module.get_symbol ("getModuleDescriptor", getDescriptor) ) {
    GST_WARNING ("Cannot get module descriptor");
  } else {
    moduleDescriptor = ( (GetDescFunc) getDescriptor) ();
  }

  if (!module.get_symbol ("getGenerationTime", getGenerationTime) ) {
    GST_WARNING ("Cannot get module generationTime");
  } else {
    generationTime = ( (GetGenerationTimeFunc) getGenerationTime) ();
  }

  loadedModules[moduleFileName] = std::shared_ptr<ModuleData> (new ModuleData (
                                    moduleName, moduleVersion, generationTime,
                                    moduleDescriptor, factories) );

  GST_INFO ("Loaded %s version %s generated at %s", moduleName.c_str() ,
            moduleVersion.c_str(), generationTime.c_str() );

  return 0;
}
Пример #19
0
static GstFlowReturn
gst_deinterleave_process (GstDeinterleave * self, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;

  guint channels = GST_AUDIO_INFO_CHANNELS (&self->audio_info);

  guint pads_pushed = 0, buffers_allocated = 0;

  guint nframes =
      gst_buffer_get_size (buf) / channels /
      (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);

  guint bufsize = nframes * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);

  guint i;

  GList *srcs;

  GstBuffer **buffers_out = g_new0 (GstBuffer *, channels);

  guint8 *in, *out;

  GstMapInfo read_info;
  gst_buffer_map (buf, &read_info, GST_MAP_READ);

  /* Send any pending events to all src pads */
  GST_OBJECT_LOCK (self);
  if (self->pending_events) {
    GList *events;

    GstEvent *event;

    GST_DEBUG_OBJECT (self, "Sending pending events to all src pads");

    for (events = self->pending_events; events != NULL; events = events->next) {
      event = GST_EVENT (events->data);

      for (srcs = self->srcpads; srcs != NULL; srcs = srcs->next)
        gst_pad_push_event (GST_PAD (srcs->data), gst_event_ref (event));
      gst_event_unref (event);
    }

    g_list_free (self->pending_events);
    self->pending_events = NULL;
  }
  GST_OBJECT_UNLOCK (self);

  /* Allocate buffers */
  for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) {
    buffers_out[i] = gst_buffer_new_allocate (NULL, bufsize, NULL);

    /* Make sure we got a correct buffer. The only other case we allow
     * here is an unliked pad */
    if (!buffers_out[i])
      goto alloc_buffer_failed;
    else if (buffers_out[i] && gst_buffer_get_size (buffers_out[i]) != bufsize)
      goto alloc_buffer_bad_size;

    if (buffers_out[i]) {
      gst_buffer_copy_into (buffers_out[i], buf, GST_BUFFER_COPY_METADATA, 0,
          -1);
      buffers_allocated++;
    }
  }

  /* Return NOT_LINKED if no pad was linked */
  if (!buffers_allocated) {
    GST_WARNING_OBJECT (self,
        "Couldn't allocate any buffers because no pad was linked");
    ret = GST_FLOW_NOT_LINKED;
    goto done;
  }

  /* deinterleave */
  for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) {
    GstPad *pad = (GstPad *) srcs->data;
    GstMapInfo write_info;


    in = (guint8 *) read_info.data;
    in += i * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
    if (buffers_out[i]) {
      gst_buffer_map (buffers_out[i], &write_info, GST_MAP_WRITE);

      out = (guint8 *) write_info.data;

      self->func (out, in, channels, nframes);

      gst_buffer_unmap (buffers_out[i], &write_info);

      ret = gst_pad_push (pad, buffers_out[i]);
      buffers_out[i] = NULL;
      if (ret == GST_FLOW_OK)
        pads_pushed++;
      else if (ret == GST_FLOW_NOT_LINKED)
        ret = GST_FLOW_OK;
      else
        goto push_failed;
    }
  }

  /* Return NOT_LINKED if no pad was linked */
  if (!pads_pushed)
    ret = GST_FLOW_NOT_LINKED;

done:
  gst_buffer_unmap (buf, &read_info);
  gst_buffer_unref (buf);
  g_free (buffers_out);
  return ret;

alloc_buffer_failed:
  {
    GST_WARNING ("gst_pad_alloc_buffer() returned %s", gst_flow_get_name (ret));
    goto clean_buffers;

  }
alloc_buffer_bad_size:
  {
    GST_WARNING ("called alloc_buffer(), but didn't get requested bytes");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto clean_buffers;
  }
push_failed:
  {
    GST_DEBUG ("push() failed, flow = %s", gst_flow_get_name (ret));
    goto clean_buffers;
  }
clean_buffers:
  {
    gst_buffer_unmap (buf, &read_info);
    for (i = 0; i < channels; i++) {
      if (buffers_out[i])
        gst_buffer_unref (buffers_out[i]);
    }
    gst_buffer_unref (buf);
    g_free (buffers_out);
    return ret;
  }
}
Пример #20
0
static gchar *
tmplayer_parse_line (ParserState * state, const gchar * line, guint line_num)
{
  GstClockTime ts = GST_CLOCK_TIME_NONE;
  const gchar *text_start = NULL;
  gboolean multiline = FALSE;
  gchar *ret = NULL;
  gchar divc = '\0';
  guint h, m, s, l = 1;

  if (sscanf (line, "%u:%02u:%02u,%u%c", &h, &m, &s, &l, &divc) == 5 &&
      (divc == '=')) {
    GST_LOG ("multiline format %u %u %u %u", h, m, s, l);
    ts = GST_SECOND * ((((h * 60) + m) * 60) + s);
    text_start = strchr (line, '=');
    multiline = TRUE;
  } else if (sscanf (line, "%u:%02u:%02u%c", &h, &m, &s, &divc) == 4 &&
      (divc == '=' || divc == ':')) {
    GST_LOG ("single line format %u %u %u %u %c", h, m, s, l, divc);
    ts = GST_SECOND * ((((h * 60) + m) * 60) + s);
    text_start = strchr (line + 6, divc);
  } else if (line[0] == '\0' && state->buf->len > 0 &&
      GST_CLOCK_TIME_IS_VALID (state->start_time)) {
    /* if we get an empty line (could be the end of the file, but doesn't have
     * to be), just push whatever is still in the buffer without a duration */
    GST_LOG ("empty line, and there's still text in the buffer");
    ret = tmplayer_process_buffer (state);
    state->duration = GST_CLOCK_TIME_NONE;
    return ret;
  } else {
    GST_WARNING ("failed to parse line: '%s'", line);
    return NULL;
  }

  /* if this is a line without text, or the first line in a multiline file,
   * process and return the data in the buffer, which is the previous line(s) */
  if (text_start == NULL || text_start[1] == '\0' ||
      (l == 1 && state->buf->len > 0)) {

    if (GST_CLOCK_TIME_IS_VALID (state->start_time) &&
        state->start_time < ts && line_num > 0) {
      ret = tmplayer_process_buffer (state);
      state->duration = ts - state->start_time;
      /* ..and append current line's text (if there is any) for the next round.
       * We don't have to store ts as pending_start_time, since we deduce the
       * durations from the start times anyway, so as long as the parser just
       * forwards state->start_time by duration after it pushes the line we
       * are about to return it will all be good. */
      g_string_append (state->buf, text_start + 1);
    } else if (line_num > 0) {
      GST_WARNING ("end of subtitle unit but no valid start time?!");
    }
  } else {
    if (l > 1)
      g_string_append_c (state->buf, '\n');
    g_string_append (state->buf, text_start + 1);
    state->start_time = ts;
  }

  GST_LOG ("returning: '%s'", GST_STR_NULL (ret));
  return ret;
}
Пример #21
0
static GstPad *
kms_element_request_new_pad (GstElement * element,
    GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
  GstPad *ret_pad = NULL;
  gchar *pad_name;
  gboolean added;

  KMS_ELEMENT_LOCK (element);
  if (templ ==
      gst_element_class_get_pad_template (GST_ELEMENT_CLASS (G_OBJECT_GET_CLASS
              (element)), "audio_src_%u")) {
    pad_name = g_strdup_printf ("audio_src_%d",
        KMS_ELEMENT (element)->priv->audio_pad_count++);

    ret_pad = kms_element_generate_src_pad (KMS_ELEMENT (element), pad_name,
        KMS_ELEMENT (element)->priv->audio_agnosticbin, templ);
    if (ret_pad == NULL)
      KMS_ELEMENT (element)->priv->audio_pad_count--;

    g_free (pad_name);

  } else if (templ ==
      gst_element_class_get_pad_template (GST_ELEMENT_CLASS (G_OBJECT_GET_CLASS
              (element)), "video_src_%u")) {
    pad_name = g_strdup_printf ("video_src_%d",
        KMS_ELEMENT (element)->priv->video_pad_count++);

    ret_pad = kms_element_generate_src_pad (KMS_ELEMENT (element), pad_name,
        KMS_ELEMENT (element)->priv->video_agnosticbin, templ);
    if (ret_pad == NULL)
      KMS_ELEMENT (element)->priv->video_pad_count--;

    g_free (pad_name);

  } else if (templ ==
      gst_element_class_get_pad_template (GST_ELEMENT_CLASS (G_OBJECT_GET_CLASS
              (element)), AUDIO_SINK_PAD)) {
    ret_pad =
        kms_element_generate_sink_pad (KMS_ELEMENT (element), AUDIO_SINK_PAD,
        &KMS_ELEMENT (element)->priv->audio_valve, templ);

    gst_pad_add_probe (ret_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        accept_eos_probe, element, NULL);

    g_signal_connect (G_OBJECT (ret_pad), "unlinked",
        G_CALLBACK (send_flush_on_unlink), NULL);
  } else if (templ ==
      gst_element_class_get_pad_template (GST_ELEMENT_CLASS (G_OBJECT_GET_CLASS
              (element)), VIDEO_SINK_PAD)) {
    ret_pad =
        kms_element_generate_sink_pad (KMS_ELEMENT (element), VIDEO_SINK_PAD,
        &KMS_ELEMENT (element)->priv->video_valve, templ);

    gst_pad_add_probe (ret_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        accept_eos_probe, element, NULL);

    g_signal_connect (G_OBJECT (ret_pad), "unlinked",
        G_CALLBACK (send_flush_on_unlink), NULL);
  }

  if (ret_pad == NULL) {
    KMS_ELEMENT_UNLOCK (element);
    GST_WARNING ("No pad created");
    return NULL;
  }

  if (GST_STATE (element) >= GST_STATE_PAUSED
      || GST_STATE_PENDING (element) >= GST_STATE_PAUSED
      || GST_STATE_TARGET (element) >= GST_STATE_PAUSED)
    gst_pad_set_active (ret_pad, TRUE);

  added = gst_element_add_pad (element, ret_pad);
  KMS_ELEMENT_UNLOCK (element);

  if (added)
    return ret_pad;

  if (gst_pad_get_direction (ret_pad) == GST_PAD_SRC) {
    GstPad *target = gst_ghost_pad_get_target (GST_GHOST_PAD (ret_pad));

    if (target != NULL) {
      GstElement *agnostic = gst_pad_get_parent_element (target);

      gst_element_release_request_pad (agnostic, target);
      g_object_unref (target);
      g_object_unref (agnostic);
    }
  }

  g_object_unref (ret_pad);
  return NULL;
}
Пример #22
0
/**
 * gst_tag_image_data_to_image_buffer:
 * @image_data: the (encoded) image
 * @image_data_len: the length of the encoded image data at @image_data
 * @image_type: type of the image, or #GST_TAG_IMAGE_TYPE_UNDEFINED. Pass
 *     #GST_TAG_IMAGE_TYPE_NONE if no image type should be set at all (e.g.
 *     for preview images)
 *
 * Helper function for tag-reading plugins to create a #GstBuffer suitable to
 * add to a #GstTagList as an image tag (such as #GST_TAG_IMAGE or
 * #GST_TAG_PREVIEW_IMAGE) from the encoded image data and an (optional) image
 * type.
 *
 * Background: cover art and other images in tags are usually stored as a
 * blob of binary image data, often accompanied by a MIME type or some other
 * content type string (e.g. 'png', 'jpeg', 'jpg'). Sometimes there is also an
 * 'image type' to indicate what kind of image this is (e.g. front cover,
 * back cover, artist, etc.). The image data may also be an URI to the image
 * rather than the image itself.
 *
 * In GStreamer, image tags are #GstBuffer<!-- -->s containing the raw image
 * data, with the buffer caps describing the content type of the image
 * (e.g. image/jpeg, image/png, text/uri-list). The buffer caps may contain
 * an additional 'image-type' field of #GST_TYPE_TAG_IMAGE_TYPE to describe
 * the type of image (front cover, back cover etc.). #GST_TAG_PREVIEW_IMAGE
 * tags should not carry an image type, their type is already indicated via
 * the special tag name.
 *
 * This function will do various checks and typefind the encoded image
 * data (we can't trust the declared mime type).
 *
 * Returns: a newly-allocated image buffer for use in tag lists, or NULL
 *
 * Since: 0.10.20
 */
GstBuffer *
gst_tag_image_data_to_image_buffer (const guint8 * image_data,
    guint image_data_len, GstTagImageType image_type)
{
  const gchar *name;

  GstBuffer *image;

  GstCaps *caps;

  g_return_val_if_fail (image_data != NULL, NULL);
  g_return_val_if_fail (image_data_len > 0, NULL);
  g_return_val_if_fail (gst_tag_image_type_is_valid (image_type), NULL);

  GST_DEBUG ("image data len: %u bytes", image_data_len);

  /* allocate space for a NUL terminator for an uri too */
  image = gst_buffer_try_new_and_alloc (image_data_len + 1);
  if (image == NULL) {
    GST_WARNING ("failed to allocate buffer of %d for image", image_data_len);
    return NULL;
  }

  memcpy (GST_BUFFER_DATA (image), image_data, image_data_len);
  GST_BUFFER_DATA (image)[image_data_len] = '\0';

  /* Find GStreamer media type, can't trust declared type */
  caps = gst_type_find_helper_for_buffer (NULL, image, NULL);

  if (caps == NULL)
    goto no_type;

  GST_DEBUG ("Found GStreamer media type: %" GST_PTR_FORMAT, caps);

  /* sanity check: make sure typefound/declared caps are either URI or image */
  name = gst_structure_get_name (gst_caps_get_structure (caps, 0));

  if (!g_str_has_prefix (name, "image/") &&
      !g_str_has_prefix (name, "video/") &&
      !g_str_equal (name, "text/uri-list")) {
    GST_DEBUG ("Unexpected image type '%s', ignoring image frame", name);
    goto error;
  }

  /* Decrease size by 1 if we don't have an URI list
   * to keep the original size of the image
   */
  if (!g_str_equal (name, "text/uri-list"))
    GST_BUFFER_SIZE (image) = image_data_len;

  if (image_type != GST_TAG_IMAGE_TYPE_NONE) {
    GST_LOG ("Setting image type: %d", image_type);
    caps = gst_caps_make_writable (caps);
    gst_caps_set_simple (caps, "image-type", GST_TYPE_TAG_IMAGE_TYPE,
        image_type, NULL);
  }

  gst_buffer_set_caps (image, caps);
  gst_caps_unref (caps);
  return image;

/* ERRORS */
no_type:
  {
    GST_DEBUG ("Could not determine GStreamer media type, ignoring image");
    /* fall through */
  }
error:
  {
    if (image)
      gst_buffer_unref (image);
    if (caps)
      gst_caps_unref (caps);
    return NULL;
  }
}
Пример #23
0
GstFlowReturn
gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point,
      GST_TIME_ARGS (frame->presentation_timestamp));

  if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
    if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
      GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
          GST_TIME_ARGS (frame->presentation_timestamp),
          GST_TIME_ARGS (frame->presentation_timestamp -
              base_video_decoder->segment.start));
      base_video_decoder->timestamp_offset = frame->presentation_timestamp;
      base_video_decoder->field_index = 0;
    } else {
      /* This case is for one initial timestamp and no others, e.g.,
       * filesrc ! decoder ! xvimagesink */
      GST_WARNING ("sync timestamp didn't change, ignoring");
      frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
    }
  } else {
    if (frame->is_sync_point) {
      GST_WARNING ("sync point doesn't have timestamp");
      if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
        GST_WARNING
            ("No base timestamp.  Assuming frames start at segment start");
        base_video_decoder->timestamp_offset =
            base_video_decoder->segment.start;
        base_video_decoder->field_index = 0;
      }
    }
  }
  frame->field_index = base_video_decoder->field_index;
  base_video_decoder->field_index += frame->n_fields;

  if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
    frame->presentation_timestamp =
        gst_base_video_decoder_get_field_timestamp (base_video_decoder,
        frame->field_index);
    frame->presentation_duration = GST_CLOCK_TIME_NONE;
    frame->decode_timestamp =
        gst_base_video_decoder_get_timestamp (base_video_decoder,
        frame->decode_frame_number);
  }
  if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
    frame->presentation_duration =
        gst_base_video_decoder_get_field_duration (base_video_decoder,
        frame->n_fields);
  }

  base_video_decoder->last_timestamp = frame->presentation_timestamp;

  GST_DEBUG ("skipping frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));

  base_video_decoder->frames =
      g_list_remove (base_video_decoder->frames, frame);

  gst_base_video_decoder_free_frame (frame);

  return GST_FLOW_OK;
}
/**
 * gst_amr_parse_check_valid_frame:
 * @parse: #GstBaseParse.
 * @buffer: #GstBuffer.
 * @framesize: Output variable where the found frame size is put.
 * @skipsize: Output variable which tells how much data needs to be skipped
 *            until a frame header is found.
 *
 * Implementation of "check_valid_frame" vmethod in #GstBaseParse class.
 *
 * Returns: TRUE if the given data contains valid frame.
 */
static GstFlowReturn
gst_amr_parse_handle_frame (GstBaseParse * parse,
    GstBaseParseFrame * frame, gint * skipsize)
{
  GstBuffer *buffer;
  GstMapInfo map;
  gint fsize = 0, mode, dsize;
  GstAmrParse *amrparse;
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean found = FALSE;

  amrparse = GST_AMR_PARSE (parse);
  buffer = frame->buffer;

  gst_buffer_map (buffer, &map, GST_MAP_READ);
  dsize = map.size;

  GST_LOG ("buffer: %d bytes", dsize);

  if (amrparse->need_header) {
    if (dsize >= AMR_MIME_HEADER_SIZE &&
        gst_amr_parse_parse_header (amrparse, map.data, skipsize)) {
      amrparse->need_header = FALSE;
      gst_base_parse_set_frame_rate (GST_BASE_PARSE (amrparse), 50, 1, 2, 2);
    } else {
      GST_WARNING ("media doesn't look like a AMR format");
    }
    /* We return FALSE, so this frame won't get pushed forward. Instead,
       the "skip" value is set, so next time we will receive a valid frame. */
    goto done;
  }

  *skipsize = 1;
  /* Does this look like a possible frame header candidate? */
  if ((map.data[0] & 0x83) == 0) {
    /* Yep. Retrieve the frame size */
    mode = (map.data[0] >> 3) & 0x0F;
    fsize = amrparse->block_size[mode] + 1;     /* +1 for the header byte */

    /* We recognize this data as a valid frame when:
     *     - We are in sync. There is no need for extra checks then
     *     - We are in EOS. There might not be enough data to check next frame
     *     - Sync is lost, but the following data after this frame seem
     *       to contain a valid header as well (and there is enough data to
     *       perform this check)
     */
    if (fsize) {
      *skipsize = 0;
      /* in sync, no further check */
      if (!GST_BASE_PARSE_LOST_SYNC (parse)) {
        found = TRUE;
      } else if (dsize > fsize) {
        /* enough data, check for next sync */
        if ((map.data[fsize] & 0x83) == 0)
          found = TRUE;
      } else if (GST_BASE_PARSE_DRAINING (parse)) {
        /* not enough, but draining, so ok */
        found = TRUE;
      }
    }
  }
Пример #25
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *klass;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_reset (base_video_decoder);
  }

  if (!base_video_decoder->started) {
    klass->start (base_video_decoder);
    base_video_decoder->started = TRUE;
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }
  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);

#if 0
  if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("got new offset %" GST_TIME_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
    base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
  }
#endif

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame_2 (base_video_decoder);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    if (!base_video_decoder->have_sync) {
      int n, m;

      GST_DEBUG ("no sync, scanning");

      n = gst_adapter_available (base_video_decoder->input_adapter);
      m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);
      if (m == -1) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }

      if (m < 0) {
        g_warning ("subclass returned negative scan %d", m);
      }

      if (m >= n) {
        GST_ERROR ("subclass scanned past end %d >= %d", m, n);
      }

      gst_adapter_flush (base_video_decoder->input_adapter, m);

      if (m < n) {
        GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

        /* this is only "maybe" sync */
        base_video_decoder->have_sync = TRUE;
      }

      if (!base_video_decoder->have_sync) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }
    }

    do {
      ret = klass->parse_data (base_video_decoder, FALSE);
    } while (ret == GST_FLOW_OK);

    if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
      gst_object_unref (base_video_decoder);
      return GST_FLOW_OK;
    }
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
Пример #26
0
/*
 * gst_debug_bin_to_dot_file:
 * @bin: the top-level pipeline that should be analyzed
 * @file_name: output base filename (e.g. "myplayer")
 *
 * To aid debugging applications one can use this method to write out the whole
 * network of gstreamer elements that form the pipeline into an dot file.
 * This file can be processed with graphviz to get an image.
 * <informalexample><programlisting>
 *  dot -Tpng -oimage.png graph_lowlevel.dot
 * </programlisting></informalexample>
 */
void
gst_debug_bin_to_dot_file (GstBin * bin, GstDebugGraphDetails details,
    const gchar * file_name)
{
  gchar *full_file_name = NULL;
  FILE *out;

  g_return_if_fail (GST_IS_BIN (bin));

  if (G_LIKELY (priv_gst_dump_dot_dir == NULL))
    return;

  if (!file_name) {
    file_name = g_get_application_name ();
    if (!file_name)
      file_name = "unnamed";
  }

  full_file_name = g_strdup_printf ("%s" G_DIR_SEPARATOR_S "%s.dot",
      priv_gst_dump_dot_dir, file_name);

  if ((out = fopen (full_file_name, "wb"))) {
    gchar *state_name = NULL;
    gchar *param_name = NULL;

    if (details & GST_DEBUG_GRAPH_SHOW_STATES) {
      state_name = debug_dump_get_element_state (GST_ELEMENT (bin));
    }
    if (details & GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS) {
      param_name = debug_dump_get_element_params (GST_ELEMENT (bin));
    }

    /* write header */
    fprintf (out,
        "digraph pipeline {\n"
        "  rankdir=LR;\n"
        "  fontname=\"sans\";\n"
        "  fontsize=\"10\";\n"
        "  labelloc=t;\n"
        "  nodesep=.1;\n"
        "  ranksep=.2;\n"
        "  label=\"<%s>\\n%s%s%s\";\n"
        "  node [style=filled, shape=box, fontsize=\"9\", fontname=\"sans\", margin=\"0.0,0.0\"];\n"
        "  edge [labelfontsize=\"6\", fontsize=\"9\", fontname=\"monospace\"];\n"
        "\n", G_OBJECT_TYPE_NAME (bin), GST_OBJECT_NAME (bin),
        (state_name ? state_name : ""), (param_name ? param_name : "")
        );
    if (state_name)
      g_free (state_name);
    if (param_name)
      g_free (param_name);

    debug_dump_element (bin, details, out, 1);

    /* write footer */
    fprintf (out, "}\n");
    fclose (out);
    GST_INFO ("wrote bin graph to : '%s'", full_file_name);
  } else {
    GST_WARNING ("Failed to open file '%s' for writing: %s", full_file_name,
        g_strerror (errno));
  }
  g_free (full_file_name);
}
Пример #27
0
GstFlowReturn
gst_euresys_fill (GstPushSrc * src, GstBuffer * buf)
{
  GstEuresys *euresys = GST_EURESYS (src);
  MCSTATUS status = 0;
  MCSIGNALINFO siginfo;
  MCHANDLE hSurface;
  int *pImage;
  INT32 timeCode;
  INT64 timeStamp;
  int newsize;
  int dropped_frame_count;
  GstMapInfo minfo;

  /* Start acquisition */
  if (!euresys->acq_started) {
    status =
        McSetParamInt (euresys->hChannel, MC_ChannelState,
        MC_ChannelState_ACTIVE);
    if (status != MC_OK) {
      GST_ELEMENT_ERROR (euresys, RESOURCE, FAILED,
          (("Failed to set channel state to ACTIVE.")), (NULL));
      return GST_FLOW_ERROR;
    }
    euresys->acq_started = TRUE;
  }

  /* Wait for next surface (frame) */
  while (TRUE) {
    /* Wait up to 5000 msecs for a signal */
    status = McWaitSignal (euresys->hChannel, MC_SIG_ANY, 5000, &siginfo);
    if (status == MC_TIMEOUT) {
      GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
          (("Timeout waiting for signal.")), (("Timeout waiting for signal.")));
      return GST_FLOW_ERROR;
    } else if (siginfo.Signal == MC_SIG_ACQUISITION_FAILURE) {
      GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
          (("Acquisition failure due to timeout.")), (NULL));
      return GST_FLOW_ERROR;
    } else if (siginfo.Signal == MC_SIG_SURFACE_PROCESSING) {
      break;
    } else {
      continue;
    }
  }

  /* Get pointer to image data and other info */
  hSurface = (MCHANDLE) siginfo.SignalInfo;
  /* "number of bytes actually written into the surface" */
  status = McGetParamInt (hSurface, MC_FillCount, &newsize);
  /* "Internal numbering of surface during acquisition sequence" (zero-based) */
  status |= McGetParamInt (hSurface, MC_TimeCode, &timeCode);
  /* "number of microseconds elapsed since midnight (00:00:00), 
   * January 1, 1970, coordinated universal time (UTC), according
   * to the system clock when the surface is filled" */
  status |= McGetParamInt64 (hSurface, MC_TimeStamp_us, &timeStamp);
  status |= McGetParamPtr (hSurface, MC_SurfaceAddr, (PVOID *) & pImage);
  if (G_UNLIKELY (status != MC_OK)) {
    GST_ELEMENT_ERROR (euresys, RESOURCE, FAILED,
        (("Failed to read surface parameter.")), (NULL));
    return GST_FLOW_ERROR;
  }

  GST_INFO ("Got surface #%05d", timeCode);

  dropped_frame_count = timeCode - (euresys->last_time_code + 1);
  if (dropped_frame_count != 0) {
    euresys->dropped_frame_count += dropped_frame_count;
    GST_WARNING ("Dropped %d frames (%d total)", dropped_frame_count,
        euresys->dropped_frame_count);
    /* TODO: emit message here about dropped frames */
  }
  euresys->last_time_code = timeCode;

  /* Copy image to buffer from surface */
  gst_buffer_map (buf, &minfo, GST_MAP_WRITE);
  /* TODO: fix strides? */
  g_assert (minfo.size == newsize);
  memcpy (minfo.data, pImage, newsize);
  gst_buffer_unmap (buf, &minfo);

  /* TODO: set buffer timestamp based on MC_TimeStamp_us */
  GST_BUFFER_TIMESTAMP (buf) =
      gst_clock_get_time (GST_ELEMENT_CLOCK (src)) -
      GST_ELEMENT_CAST (src)->base_time;

  /* Done processing surface, release control */
  McSetParamInt (hSurface, MC_SurfaceState, MC_SurfaceState_FREE);

  return GST_FLOW_OK;
}
/* This function checks that it is actually really possible to create an image
   using XShm */
gboolean
ximageutil_check_xshm_calls (GstXContext * xcontext)
{
  XImage *ximage;
  XShmSegmentInfo SHMInfo;
  size_t size;
  int (*handler) (Display *, XErrorEvent *);
  gboolean result = FALSE;
  gboolean did_attach = FALSE;

  g_return_val_if_fail (xcontext != NULL, FALSE);

  /* Sync to ensure any older errors are already processed */
  XSync (xcontext->disp, FALSE);

  /* Set defaults so we don't free these later unnecessarily */
  SHMInfo.shmaddr = ((void *) -1);
  SHMInfo.shmid = -1;

  /* Setting an error handler to catch failure */
  error_caught = FALSE;
  handler = XSetErrorHandler (ximageutil_handle_xerror);

  /* Trying to create a 1x1 ximage */
  GST_DEBUG ("XShmCreateImage of 1x1");

  ximage = XShmCreateImage (xcontext->disp, xcontext->visual,
      xcontext->depth, ZPixmap, NULL, &SHMInfo, 1, 1);

  /* Might cause an error, sync to ensure it is noticed */
  XSync (xcontext->disp, FALSE);
  if (!ximage || error_caught) {
    GST_WARNING ("could not XShmCreateImage a 1x1 image");
    goto beach;
  }
  size = ximage->height * ximage->bytes_per_line;

  SHMInfo.shmid = shmget (IPC_PRIVATE, size, IPC_CREAT | 0777);
  if (SHMInfo.shmid == -1) {
    GST_WARNING ("could not get shared memory of %" G_GSIZE_FORMAT " bytes",
        size);
    goto beach;
  }

  SHMInfo.shmaddr = shmat (SHMInfo.shmid, 0, 0);
  if (SHMInfo.shmaddr == ((void *) -1)) {
    GST_WARNING ("Failed to shmat: %s", g_strerror (errno));
    goto beach;
  }

  /* Delete the SHM segment. It will actually go away automatically
   * when we detach now */
  shmctl (SHMInfo.shmid, IPC_RMID, 0);

  ximage->data = SHMInfo.shmaddr;
  SHMInfo.readOnly = FALSE;

  if (XShmAttach (xcontext->disp, &SHMInfo) == 0) {
    GST_WARNING ("Failed to XShmAttach");
    goto beach;
  }

  /* Sync to ensure we see any errors we caused */
  XSync (xcontext->disp, FALSE);

  if (!error_caught) {
    did_attach = TRUE;
    /* store whether we succeeded in result */
    result = TRUE;
  }
beach:
  /* Sync to ensure we swallow any errors we caused and reset error_caught */
  XSync (xcontext->disp, FALSE);
  error_caught = FALSE;
  XSetErrorHandler (handler);

  if (did_attach) {
    XShmDetach (xcontext->disp, &SHMInfo);
    XSync (xcontext->disp, FALSE);
  }
  if (SHMInfo.shmaddr != ((void *) -1))
    shmdt (SHMInfo.shmaddr);
  if (ximage)
    XDestroyImage (ximage);
  return result;
}
Пример #29
0
static gboolean
rfb_decoder_state_wait_for_security (RfbDecoder * decoder)
{
  guint8 *buffer = NULL;

  /*
   * Version 3.3 The server decides the security type and sends a single word
   *
   * The security-type may only take the value 0, 1 or 2. A value of 0 means that the
   * connection has failed and is followed by a string giving the reason, as described
   * above.
   */
  if (IS_VERSION_3_3 (decoder)) {
    buffer = rfb_decoder_read (decoder, 4);

    decoder->security_type = RFB_GET_UINT32 (buffer);
    GST_DEBUG ("security = %d", decoder->security_type);

    g_return_val_if_fail (decoder->security_type < 3, FALSE);
    g_return_val_if_fail (decoder->security_type != SECURITY_FAIL,
        rfb_decoder_state_reason (decoder));
    g_free (buffer);
    buffer = NULL;
  } else {
    /* \TODO Add behavoir for the rfb 3.7 and 3.8 servers */
    GST_WARNING ("Other versions are not yet supported");
  }

  switch (decoder->security_type) {
    case SECURITY_NONE:
      GST_DEBUG ("Security type is None");
      if (IS_VERSION_3_8 (decoder)) {
        decoder->state = rfb_decoder_state_security_result;
      } else {
        decoder->state = rfb_decoder_state_send_client_initialisation;
      }
      break;
    case SECURITY_VNC:
      /*
       * VNC authentication is to be used and protocol data is to be sent unencrypted. The
       * server sends a random 16-byte challenge
       */
      GST_DEBUG ("Security type is VNC Authentication");
      /* VNC Authentication can't be used if the password is not set */
      if (!decoder->password) {
        GST_WARNING
            ("VNC Authentication can't be used if the password is not set");
        return FALSE;
      }

      buffer = rfb_decoder_read (decoder, 16);
      vncEncryptBytes ((unsigned char *) buffer, decoder->password);
      rfb_decoder_send (decoder, buffer, 16);
      g_free (buffer);

      GST_DEBUG ("Encrypted challenge send to server");

      decoder->state = rfb_decoder_state_security_result;
      break;
    default:
      GST_WARNING ("Security type is not known");
      return FALSE;
      break;
  }
  return TRUE;
}
Пример #30
0
/**
 * rdt_jitter_buffer_insert:
 * @jbuf: an #RDTJitterBuffer
 * @buf: a buffer
 * @time: a running_time when this buffer was received in nanoseconds
 * @clock_rate: the clock-rate of the payload of @buf
 * @tail: TRUE when the tail element changed.
 *
 * Inserts @buf into the packet queue of @jbuf. The sequence number of the
 * packet will be used to sort the packets. This function takes ownerhip of
 * @buf when the function returns %TRUE.
 * @buf should have writable metadata when calling this function.
 *
 * Returns: %FALSE if a packet with the same number already existed.
 */
gboolean
rdt_jitter_buffer_insert (RDTJitterBuffer * jbuf, GstBuffer * buf,
    GstClockTime time, guint32 clock_rate, gboolean * tail)
{
  GList *list;
  guint32 rtptime;
  guint16 seqnum;
  GstRDTPacket packet;
  gboolean more;

  g_return_val_if_fail (jbuf != NULL, FALSE);
  g_return_val_if_fail (buf != NULL, FALSE);

  more = gst_rdt_buffer_get_first_packet (buf, &packet);
  /* programmer error */
  g_return_val_if_fail (more == TRUE, FALSE);

  seqnum = gst_rdt_packet_data_get_seq (&packet);
  /* do skew calculation by measuring the difference between rtptime and the
   * receive time, this function will retimestamp @buf with the skew corrected
   * running time. */
  rtptime = gst_rdt_packet_data_get_timestamp (&packet);

  /* loop the list to skip strictly smaller seqnum buffers */
  for (list = jbuf->packets->head; list; list = g_list_next (list)) {
    guint16 qseq;
    gint gap;

    more =
        gst_rdt_buffer_get_first_packet (GST_BUFFER_CAST (list->data), &packet);
    /* programmer error */
    g_return_val_if_fail (more == TRUE, FALSE);

    qseq = gst_rdt_packet_data_get_seq (&packet);

    /* compare the new seqnum to the one in the buffer */
    gap = gst_rdt_buffer_compare_seqnum (seqnum, qseq);

    /* we hit a packet with the same seqnum, notify a duplicate */
    if (G_UNLIKELY (gap == 0))
      goto duplicate;

    /* seqnum > qseq, we can stop looking */
    if (G_LIKELY (gap < 0))
      break;
  }


  if (clock_rate) {
    time = calculate_skew (jbuf, rtptime, time, clock_rate);
    GST_BUFFER_TIMESTAMP (buf) = time;
  }

  if (list)
    g_queue_insert_before (jbuf->packets, list, buf);
  else
    g_queue_push_tail (jbuf->packets, buf);

  /* tail was changed when we did not find a previous packet, we set the return
   * flag when requested. */
  if (tail)
    *tail = (list == NULL);

  return TRUE;

  /* ERRORS */
duplicate:
  {
    GST_WARNING ("duplicate packet %d found", (gint) seqnum);
    return FALSE;
  }
}