Exemplo n.º 1
0
static gboolean
gst_hls_sink_create_elements (GstHlsSink * sink)
{
  GstPad *pad = NULL;

  GST_DEBUG_OBJECT (sink, "Creating internal elements");

  if (sink->elements_created)
    return TRUE;

  sink->multifilesink = gst_element_factory_make ("multifilesink", NULL);
  if (sink->multifilesink == NULL)
    goto missing_element;

  g_object_set (sink->multifilesink, "location", sink->location,
      "next-file", 3, "post-messages", TRUE, "max-files", sink->max_files,
      NULL);

  gst_bin_add (GST_BIN_CAST (sink), sink->multifilesink);

  pad = gst_element_get_static_pad (sink->multifilesink, "sink");
  gst_ghost_pad_set_target (GST_GHOST_PAD (sink->ghostpad), pad);
  gst_object_unref (pad);

  sink->elements_created = TRUE;
  return TRUE;

missing_element:
  gst_element_post_message (GST_ELEMENT_CAST (sink),
      gst_missing_element_message_new (GST_ELEMENT_CAST (sink),
          "multifilesink"));
  GST_ELEMENT_ERROR (sink, CORE, MISSING_PLUGIN,
      (("Missing element '%s' - check your GStreamer installation."),
          "multifilesink"), (NULL));
  return FALSE;
}
Exemplo n.º 2
0
static void
gst_compare_meta (GstCompare * comp, GstBuffer * buf1, GstCaps * caps1,
    GstBuffer * buf2, GstCaps * caps2)
{
  gint flags = 0;

  if (comp->meta & GST_BUFFER_COPY_FLAGS) {
    if (GST_BUFFER_FLAGS (buf1) != GST_BUFFER_FLAGS (buf2)) {
      flags |= GST_BUFFER_COPY_FLAGS;
      GST_DEBUG_OBJECT (comp, "flags %d != flags %d", GST_BUFFER_FLAGS (buf1),
          GST_BUFFER_FLAGS (buf2));
    }
  }
  if (comp->meta & GST_BUFFER_COPY_TIMESTAMPS) {
    if (GST_BUFFER_TIMESTAMP (buf1) != GST_BUFFER_TIMESTAMP (buf2)) {
      flags |= GST_BUFFER_COPY_TIMESTAMPS;
      GST_DEBUG_OBJECT (comp,
          "ts %" GST_TIME_FORMAT " != ts %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf1)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf2)));
    }
    if (GST_BUFFER_DURATION (buf1) != GST_BUFFER_DURATION (buf2)) {
      flags |= GST_BUFFER_COPY_TIMESTAMPS;
      GST_DEBUG_OBJECT (comp,
          "dur %" GST_TIME_FORMAT " != dur %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf1)),
          GST_TIME_ARGS (GST_BUFFER_DURATION (buf2)));
    }
    if (comp->offset_ts) {
      if (GST_BUFFER_OFFSET (buf1) != GST_BUFFER_OFFSET (buf2)) {
        flags |= GST_BUFFER_COPY_TIMESTAMPS;
        GST_DEBUG_OBJECT (comp,
            "offset %" G_GINT64_FORMAT " != offset %" G_GINT64_FORMAT,
            GST_BUFFER_OFFSET (buf1), GST_BUFFER_OFFSET (buf2));
      }
      if (GST_BUFFER_OFFSET_END (buf1) != GST_BUFFER_OFFSET_END (buf2)) {
        flags |= GST_BUFFER_COPY_TIMESTAMPS;
        GST_DEBUG_OBJECT (comp,
            "offset_end %" G_GINT64_FORMAT " != offset_end %" G_GINT64_FORMAT,
            GST_BUFFER_OFFSET_END (buf1), GST_BUFFER_OFFSET_END (buf2));
      }
    }
  }
#if 0
  /* FIXME ?? */
  if (comp->meta & GST_BUFFER_COPY_CAPS) {
    if (!gst_caps_is_equal (caps1, caps2)) {
      flags |= GST_BUFFER_COPY_CAPS;
      GST_DEBUG_OBJECT (comp,
          "caps %" GST_PTR_FORMAT " != caps %" GST_PTR_FORMAT, caps1, caps2);
    }
  }
#endif

  /* signal mismatch by debug and message */
  if (flags) {
    GST_WARNING_OBJECT (comp, "buffers %p and %p failed metadata match %d",
        buf1, buf2, flags);

    gst_element_post_message (GST_ELEMENT (comp),
        gst_message_new_element (GST_OBJECT (comp),
            gst_structure_new ("delta", "meta", G_TYPE_INT, flags, NULL)));
  }
}
Exemplo n.º 3
0
/* This function is used to perform seeks on the element in
 * pull mode.
 *
 * It also works when event is NULL, in which case it will just
 * start from the last configured segment. This technique is
 * used when activating the element and to perform the seek in
 * READY.
 */
static gboolean
gst_aiff_parse_perform_seek (GstAiffParse * aiff, GstEvent * event)
{
  gboolean res;
  gdouble rate;
  GstFormat format, bformat;
  GstSeekFlags flags;
  GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
  gint64 cur, stop, upstream_size;
  gboolean flush;
  gboolean update;
  GstSegment seeksegment = { 0, };
  gint64 last_stop;

  if (event) {
    GST_DEBUG_OBJECT (aiff, "doing seek with event");

    gst_event_parse_seek (event, &rate, &format, &flags,
        &cur_type, &cur, &stop_type, &stop);

    /* no negative rates yet */
    if (rate < 0.0)
      goto negative_rate;

    if (format != aiff->segment.format) {
      GST_INFO_OBJECT (aiff, "converting seek-event from %s to %s",
          gst_format_get_name (format),
          gst_format_get_name (aiff->segment.format));
      res = TRUE;
      if (cur_type != GST_SEEK_TYPE_NONE)
        res =
            gst_pad_query_convert (aiff->srcpad, format, cur,
            &aiff->segment.format, &cur);
      if (res && stop_type != GST_SEEK_TYPE_NONE)
        res =
            gst_pad_query_convert (aiff->srcpad, format, stop,
            &aiff->segment.format, &stop);
      if (!res)
        goto no_format;

      format = aiff->segment.format;
    }
  } else {
    GST_DEBUG_OBJECT (aiff, "doing seek without event");
    flags = 0;
    rate = 1.0;
    cur_type = GST_SEEK_TYPE_SET;
    stop_type = GST_SEEK_TYPE_SET;
  }

  /* get flush flag */
  flush = flags & GST_SEEK_FLAG_FLUSH;

  /* now we need to make sure the streaming thread is stopped. We do this by
   * either sending a FLUSH_START event downstream which will cause the
   * streaming thread to stop with a WRONG_STATE.
   * For a non-flushing seek we simply pause the task, which will happen as soon
   * as it completes one iteration (and thus might block when the sink is
   * blocking in preroll). */
  if (flush) {
    GST_DEBUG_OBJECT (aiff, "sending flush start");
    gst_pad_push_event (aiff->srcpad, gst_event_new_flush_start ());
  } else {
    gst_pad_pause_task (aiff->sinkpad);
  }

  /* we should now be able to grab the streaming thread because we stopped it
   * with the above flush/pause code */
  GST_PAD_STREAM_LOCK (aiff->sinkpad);

  /* save current position */
  last_stop = aiff->segment.last_stop;

  GST_DEBUG_OBJECT (aiff, "stopped streaming at %" G_GINT64_FORMAT, last_stop);

  /* copy segment, we need this because we still need the old
   * segment when we close the current segment. */
  memcpy (&seeksegment, &aiff->segment, sizeof (GstSegment));

  /* configure the seek parameters in the seeksegment. We will then have the
   * right values in the segment to perform the seek */
  if (event) {
    GST_DEBUG_OBJECT (aiff, "configuring seek");
    gst_segment_set_seek (&seeksegment, rate, format, flags,
        cur_type, cur, stop_type, stop, &update);
  }

  /* figure out the last position we need to play. If it's configured (stop !=
   * -1), use that, else we play until the total duration of the file */
  if ((stop = seeksegment.stop) == -1)
    stop = seeksegment.duration;

  GST_DEBUG_OBJECT (aiff, "cur_type =%d", cur_type);
  if ((cur_type != GST_SEEK_TYPE_NONE)) {
    /* bring offset to bytes, if the bps is 0, we have the segment in BYTES and
     * we can just copy the last_stop. If not, we use the bps to convert TIME to
     * bytes. */
    if (aiff->bps > 0)
      aiff->offset =
          uint64_ceiling_scale (seeksegment.last_stop, (guint64) aiff->bps,
          GST_SECOND);
    else
      aiff->offset = seeksegment.last_stop;
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
    aiff->offset -= (aiff->offset % aiff->bytes_per_sample);
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
    aiff->offset += aiff->datastart;
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
  } else {
    GST_LOG_OBJECT (aiff, "continue from offset=%" G_GUINT64_FORMAT,
        aiff->offset);
  }

  if (stop_type != GST_SEEK_TYPE_NONE) {
    if (aiff->bps > 0)
      aiff->end_offset =
          uint64_ceiling_scale (stop, (guint64) aiff->bps, GST_SECOND);
    else
      aiff->end_offset = stop;
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
    aiff->end_offset -= (aiff->end_offset % aiff->bytes_per_sample);
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
    aiff->end_offset += aiff->datastart;
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
  } else {
    GST_LOG_OBJECT (aiff, "continue to end_offset=%" G_GUINT64_FORMAT,
        aiff->end_offset);
  }

  /* make sure filesize is not exceeded due to rounding errors or so,
   * same precaution as in _stream_headers */
  bformat = GST_FORMAT_BYTES;
  if (gst_pad_query_peer_duration (aiff->sinkpad, &bformat, &upstream_size))
    aiff->end_offset = MIN (aiff->end_offset, upstream_size);

  /* this is the range of bytes we will use for playback */
  aiff->offset = MIN (aiff->offset, aiff->end_offset);
  aiff->dataleft = aiff->end_offset - aiff->offset;

  GST_DEBUG_OBJECT (aiff,
      "seek: rate %lf, offset %" G_GUINT64_FORMAT ", end %" G_GUINT64_FORMAT
      ", segment %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT, rate, aiff->offset,
      aiff->end_offset, GST_TIME_ARGS (seeksegment.start),
      GST_TIME_ARGS (stop));

  /* prepare for streaming again */
  if (flush) {
    /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
    GST_DEBUG_OBJECT (aiff, "sending flush stop");
    gst_pad_push_event (aiff->srcpad, gst_event_new_flush_stop ());
  } else if (aiff->segment_running) {
    /* we are running the current segment and doing a non-flushing seek,
     * close the segment first based on the previous last_stop. */
    GST_DEBUG_OBJECT (aiff, "closing running segment %" G_GINT64_FORMAT
        " to %" G_GINT64_FORMAT, aiff->segment.accum, aiff->segment.last_stop);

    /* queue the segment for sending in the stream thread */
    if (aiff->close_segment)
      gst_event_unref (aiff->close_segment);
    aiff->close_segment = gst_event_new_new_segment (TRUE,
        aiff->segment.rate, aiff->segment.format,
        aiff->segment.accum, aiff->segment.last_stop, aiff->segment.accum);

    /* keep track of our last_stop */
    seeksegment.accum = aiff->segment.last_stop;
  }

  /* now we did the seek and can activate the new segment values */
  memcpy (&aiff->segment, &seeksegment, sizeof (GstSegment));

  /* if we're doing a segment seek, post a SEGMENT_START message */
  if (aiff->segment.flags & GST_SEEK_FLAG_SEGMENT) {
    gst_element_post_message (GST_ELEMENT_CAST (aiff),
        gst_message_new_segment_start (GST_OBJECT_CAST (aiff),
            aiff->segment.format, aiff->segment.last_stop));
  }

  /* now create the newsegment */
  GST_DEBUG_OBJECT (aiff, "Creating newsegment from %" G_GINT64_FORMAT
      " to %" G_GINT64_FORMAT, aiff->segment.last_stop, stop);

  /* store the newsegment event so it can be sent from the streaming thread. */
  if (aiff->start_segment)
    gst_event_unref (aiff->start_segment);
  aiff->start_segment =
      gst_event_new_new_segment (FALSE, aiff->segment.rate,
      aiff->segment.format, aiff->segment.last_stop, stop,
      aiff->segment.last_stop);

  /* mark discont if we are going to stream from another position. */
  if (last_stop != aiff->segment.last_stop) {
    GST_DEBUG_OBJECT (aiff, "mark DISCONT, we did a seek to another position");
    aiff->discont = TRUE;
  }

  /* and start the streaming task again */
  aiff->segment_running = TRUE;
  if (!aiff->streaming) {
    gst_pad_start_task (aiff->sinkpad, (GstTaskFunction) gst_aiff_parse_loop,
        aiff->sinkpad);
  }

  GST_PAD_STREAM_UNLOCK (aiff->sinkpad);

  return TRUE;

  /* ERRORS */
negative_rate:
  {
    GST_DEBUG_OBJECT (aiff, "negative playback rates are not supported yet.");
    return FALSE;
  }
no_format:
  {
    GST_DEBUG_OBJECT (aiff, "unsupported format given, seek aborted.");
    return FALSE;
  }
}
Exemplo n.º 4
0
static void
gst_musepackdec_loop (GstPad * sinkpad)
{
  GstMusepackDec *musepackdec;
  GstFlowReturn flow;
  GstBuffer *out;
  GstMapInfo info;
  mpc_frame_info frame;
  mpc_status err;
  gint num_samples, samplerate, bitspersample;

  musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));

  samplerate = g_atomic_int_get (&musepackdec->rate);

  if (samplerate == 0) {
    if (!gst_musepack_stream_init (musepackdec))
      goto pause_task;

    gst_musepackdec_send_newsegment (musepackdec);
    samplerate = g_atomic_int_get (&musepackdec->rate);
  }

  bitspersample = g_atomic_int_get (&musepackdec->bps);

  out = gst_buffer_new_allocate (NULL, MPC_DECODER_BUFFER_LENGTH * 4, NULL);

  gst_buffer_map (out, &info, GST_MAP_READWRITE);
  frame.buffer = (MPC_SAMPLE_FORMAT *) info.data;
  err = mpc_demux_decode (musepackdec->d, &frame);
  gst_buffer_unmap (out, &info);

  if (err != MPC_STATUS_OK) {
    GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
    GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
    goto pause_task;
  } else if (frame.bits == -1) {
    goto eos_and_pause;
  }

  num_samples = frame.samples;

  gst_buffer_set_size (out, num_samples * bitspersample);

  GST_BUFFER_OFFSET (out) = musepackdec->segment.position;
  GST_BUFFER_PTS (out) =
      gst_util_uint64_scale_int (musepackdec->segment.position,
      GST_SECOND, samplerate);
  GST_BUFFER_DURATION (out) =
      gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);

  musepackdec->segment.position += num_samples;

  GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));

  flow = gst_pad_push (musepackdec->srcpad, out);
  if (flow != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
    goto pause_task;
  }

  /* check if we're at the end of a configured segment */
  if (musepackdec->segment.stop != -1 &&
      musepackdec->segment.position >= musepackdec->segment.stop) {
    gint64 stop_time;

    GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");

    if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)
      goto eos_and_pause;

    GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");

    stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,
        GST_SECOND, samplerate);

    gst_element_post_message (GST_ELEMENT (musepackdec),
        gst_message_new_segment_done (GST_OBJECT (musepackdec),
            GST_FORMAT_TIME, stop_time));
    gst_pad_push_event (musepackdec->srcpad,
        gst_event_new_segment_done (GST_FORMAT_TIME, stop_time));

    goto pause_task;
  }

  return;

eos_and_pause:
  {
    GST_DEBUG_OBJECT (musepackdec, "sending EOS event");
    gst_pad_push_event (musepackdec->srcpad, gst_event_new_eos ());
    /* fall through to pause */
  }

pause_task:
  {
    GST_DEBUG_OBJECT (musepackdec, "Pausing task");
    gst_pad_pause_task (sinkpad);
    return;
  }
}
Exemplo n.º 5
0
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
{

  GstMotioncells *filter;

  filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
  if (filter->calculate_motion) {
    double sensitivity;
    int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count,
        motioncells_count, i;
    int thickness, success, motioncellsidxcnt, numberOfCells,
        motioncellsnumber, cellsOfInterestNumber;
    int mincellsOfInterestNumber, motiondetect;
    char *datafile;
    bool display, changed_datafile, useAlpha;
    gint64 starttime;
    motionmaskcoordrect *motionmaskcoords;
    motioncellidx *motionmaskcellsidx;
    cellscolor motioncellscolor;
    motioncellidx *motioncellsidx;
    g_mutex_lock (filter->propset_mutex);
    buf = gst_buffer_make_writable (buf);
    filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
    if (filter->firstframe) {
      setPrevFrame (filter->cvImage, filter->id);
      filter->firstframe = FALSE;
    }

    sensitivity = filter->sensitivity;
    framerate = filter->framerate;
    gridx = filter->gridx;
    gridy = filter->gridy;
    display = filter->display;
    motionmaskcoord_count = filter->motionmaskcoord_count;
    motionmaskcoords =
        g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count);
    for (i = 0; i < filter->motionmaskcoord_count; i++) {       //we need divide 2 because we use gauss pyramid in C++ side
      motionmaskcoords[i].upper_left_x =
          filter->motionmaskcoords[i].upper_left_x / 2;
      motionmaskcoords[i].upper_left_y =
          filter->motionmaskcoords[i].upper_left_y / 2;
      motionmaskcoords[i].lower_right_x =
          filter->motionmaskcoords[i].lower_right_x / 2;
      motionmaskcoords[i].lower_right_y =
          filter->motionmaskcoords[i].lower_right_y / 2;
    }

    motioncellscolor.R_channel_value =
        filter->motioncellscolor->R_channel_value;
    motioncellscolor.G_channel_value =
        filter->motioncellscolor->G_channel_value;
    motioncellscolor.B_channel_value =
        filter->motioncellscolor->B_channel_value;

    if ((filter->changed_gridx || filter->changed_gridy
            || filter->changed_startime)) {
      if ((g_strcmp0 (filter->cur_datafile, NULL) != 0)) {
        GFREE (filter->cur_datafile);
        filter->datafileidx++;
        filter->cur_datafile =
            g_strdup_printf ("%s-%d.%s", filter->basename_datafile,
            filter->datafileidx, filter->datafile_extension);
        filter->changed_datafile = TRUE;
        motion_cells_free_resources (filter->id);
      }
      if (filter->motioncells_count > 0)
        gst_motioncells_update_motion_cells (filter);
      if (filter->motionmaskcells_count > 0)
        gst_motioncells_update_motion_masks (filter);
      filter->changed_gridx = FALSE;
      filter->changed_gridy = FALSE;
      filter->changed_startime = FALSE;
    }
    datafile = g_strdup (filter->cur_datafile);
    filter->cur_buff_timestamp = (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
    filter->starttime +=
        (filter->cur_buff_timestamp - filter->prev_buff_timestamp);
    starttime = filter->starttime;
    if (filter->changed_datafile || filter->diff_timestamp < 0)
      filter->diff_timestamp =
          (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
    changed_datafile = filter->changed_datafile;
    motionmaskcells_count = filter->motionmaskcells_count;
    motionmaskcellsidx = g_new0 (motioncellidx, filter->motionmaskcells_count);
    for (i = 0; i < filter->motionmaskcells_count; i++) {
      motionmaskcellsidx[i].lineidx = filter->motionmaskcellsidx[i].lineidx;
      motionmaskcellsidx[i].columnidx = filter->motionmaskcellsidx[i].columnidx;
    }
    motioncells_count = filter->motioncells_count;
    motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count);
    for (i = 0; i < filter->motioncells_count; i++) {
      motioncellsidx[i].lineidx = filter->motioncellsidx[i].lineidx;
      motioncellsidx[i].columnidx = filter->motioncellsidx[i].columnidx;
    }
    useAlpha = filter->usealpha;
    thickness = filter->thickness;
    success =
        perform_detection_motion_cells (filter->cvImage, sensitivity, framerate,
        gridx, gridy,
        (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND) -
        filter->diff_timestamp, display, useAlpha, motionmaskcoord_count,
        motionmaskcoords, motionmaskcells_count, motionmaskcellsidx,
        motioncellscolor, motioncells_count, motioncellsidx, starttime,
        datafile, changed_datafile, thickness, filter->id);
    if ((success == 1) && (filter->sent_init_error_msg == false)) {
      char *initfailedreason;
      int initerrorcode;
      GstStructure *s;
      GstMessage *m;
      initfailedreason = getInitDataFileFailed (filter->id);
      initerrorcode = getInitErrorCode (filter->id);
      s = gst_structure_new ("motion", "init_error_code", G_TYPE_INT,
          initerrorcode, "details", G_TYPE_STRING, initfailedreason, NULL);
      m = gst_message_new_element (GST_OBJECT (filter), s);
      gst_element_post_message (GST_ELEMENT (filter), m);
      filter->sent_init_error_msg = TRUE;
    }
    if ((success == -1) && (filter->sent_save_error_msg == false)) {
      char *savefailedreason;
      int saveerrorcode;
      GstStructure *s;
      GstMessage *m;
      savefailedreason = getSaveDataFileFailed (filter->id);
      saveerrorcode = getSaveErrorCode (filter->id);
      s = gst_structure_new ("motion", "save_error_code", G_TYPE_INT,
          saveerrorcode, "details", G_TYPE_STRING, savefailedreason, NULL);
      m = gst_message_new_element (GST_OBJECT (filter), s);
      gst_element_post_message (GST_ELEMENT (filter), m);
      filter->sent_save_error_msg = TRUE;
    }
    if (success == -2) {        //frame dropped
      filter->prev_buff_timestamp = filter->cur_buff_timestamp;
      //free
      GFREE (datafile);
      GFREE (motionmaskcoords);
      GFREE (motionmaskcellsidx);
      GFREE (motioncellsidx);
      g_mutex_unlock (filter->propset_mutex);
      return gst_pad_push (filter->srcpad, buf);
    }
    filter->changed_datafile = getChangedDataFile (filter->id);
    motioncellsidxcnt = getMotionCellsIdxCnt (filter->id);
    numberOfCells = filter->gridx * filter->gridy;
    motioncellsnumber = motioncellsidxcnt / MSGLEN;
    cellsOfInterestNumber = (filter->motioncells_count > 0) ?   //how many cells interest for us
        (filter->motioncells_count) : (numberOfCells);
    mincellsOfInterestNumber =
        floor ((double) cellsOfInterestNumber * filter->threshold);
    motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0;
    if ((motioncellsidxcnt > 0) && (motiondetect == 1)) {
      char *detectedmotioncells;
      filter->last_motion_timestamp = GST_BUFFER_TIMESTAMP (buf);
      detectedmotioncells = getMotionCellsIdx (filter->id);
      if (detectedmotioncells) {
        filter->consecutive_motion++;
        if ((filter->previous_motion == false)
            && (filter->consecutive_motion >= filter->minimum_motion_frames)) {
          GstStructure *s;
          GstMessage *m;
          filter->previous_motion = true;
          filter->motion_begin_timestamp = GST_BUFFER_TIMESTAMP (buf);
          s = gst_structure_new ("motion", "motion_cells_indices",
              G_TYPE_STRING, detectedmotioncells, "motion_begin", G_TYPE_UINT64,
              filter->motion_begin_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        } else if (filter->postallmotion) {
          GstStructure *s;
          GstMessage *m;
          filter->motion_timestamp = GST_BUFFER_TIMESTAMP (buf);
          s = gst_structure_new ("motion", "motion_cells_indices",
              G_TYPE_STRING, detectedmotioncells, "motion", G_TYPE_UINT64,
              filter->motion_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        }
      } else {
        GstStructure *s;
        GstMessage *m;
        s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING,
            "error", NULL);
        m = gst_message_new_element (GST_OBJECT (filter), s);
        gst_element_post_message (GST_ELEMENT (filter), m);
      }
    } else {
      filter->consecutive_motion = 0;
      if ((((GST_BUFFER_TIMESTAMP (buf) -
                      filter->last_motion_timestamp) / 1000000000l) >=
              filter->gap)
          && (filter->last_motion_timestamp > 0)) {
        GST_DEBUG ("POST MOTION FINISHED MSG\n");
        if (filter->previous_motion) {
          GstStructure *s;
          GstMessage *m;
          filter->previous_motion = false;
          s = gst_structure_new ("motion", "motion_finished", G_TYPE_UINT64,
              filter->last_motion_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        }
      }
    }
    if (filter->postnomotion > 0) {
      guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l;
      if ((last_buf_timestamp -
              (filter->last_motion_timestamp / 1000000000l)) >=
          filter->postnomotion) {
        GST_DEBUG ("POST NO MOTION MSG\n");
        if ((last_buf_timestamp -
                (filter->last_nomotion_notified / 1000000000l)) >=
            filter->postnomotion) {
          GstStructure *s;
          GstMessage *m;
          filter->last_nomotion_notified = GST_BUFFER_TIMESTAMP (buf);
          s = gst_structure_new ("motion", "no_motion", G_TYPE_UINT64,
              filter->last_motion_timestamp, NULL);
          m = gst_message_new_element (GST_OBJECT (filter), s);
          gst_element_post_message (GST_ELEMENT (filter), m);
        }
      }
    }
    filter->prev_buff_timestamp = filter->cur_buff_timestamp;
    //free
    GFREE (datafile);
    GFREE (motionmaskcoords);
    GFREE (motionmaskcellsidx);
    GFREE (motioncellsidx);

    g_mutex_unlock (filter->propset_mutex);
  }

  return gst_pad_push (filter->srcpad, buf);
}
Exemplo n.º 6
0
/* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
  /* We are possibly in a GStreamer working thread, so we notify the main
   * thread of this event through a message in the bus */
gst_element_post_message(playbin,gst_message_new_application(GST_OBJECT (playbin),gst_structure_new("tags-changed",(gchar *)NULL)));
}
Exemplo n.º 7
0
static GstFlowReturn
gst_app_src_create (GstBaseSrc * bsrc, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstAppSrc *appsrc = GST_APP_SRC_CAST (bsrc);
  GstAppSrcPrivate *priv = appsrc->priv;
  GstFlowReturn ret;

  GST_OBJECT_LOCK (appsrc);
  if (G_UNLIKELY (priv->size != bsrc->segment.duration &&
          bsrc->segment.format == GST_FORMAT_BYTES)) {
    GST_DEBUG_OBJECT (appsrc,
        "Size changed from %" G_GINT64_FORMAT " to %" G_GINT64_FORMAT,
        bsrc->segment.duration, priv->size);
    bsrc->segment.duration = priv->size;
    GST_OBJECT_UNLOCK (appsrc);

    gst_element_post_message (GST_ELEMENT (appsrc),
        gst_message_new_duration_changed (GST_OBJECT (appsrc)));
  } else {
    GST_OBJECT_UNLOCK (appsrc);
  }

  g_mutex_lock (&priv->mutex);
  /* check flushing first */
  if (G_UNLIKELY (priv->flushing))
    goto flushing;

  if (priv->stream_type == GST_APP_STREAM_TYPE_RANDOM_ACCESS) {
    /* if we are dealing with a random-access stream, issue a seek if the offset
     * changed. */
    if (G_UNLIKELY (priv->offset != offset)) {
      gboolean res;

      /* do the seek */
      res = gst_app_src_emit_seek (appsrc, offset);

      if (G_UNLIKELY (!res))
        /* failing to seek is fatal */
        goto seek_error;

      priv->offset = offset;
      priv->is_eos = FALSE;
    }
  }

  while (TRUE) {
    /* return data as long as we have some */
    if (!g_queue_is_empty (priv->queue)) {
      guint buf_size;

      if (priv->new_caps) {
        gst_app_src_do_negotiate (bsrc);
        priv->new_caps = FALSE;
      }

      *buf = g_queue_pop_head (priv->queue);
      buf_size = gst_buffer_get_size (*buf);

      GST_DEBUG_OBJECT (appsrc, "we have buffer %p of size %u", *buf, buf_size);

      priv->queued_bytes -= buf_size;

      /* only update the offset when in random_access mode */
      if (priv->stream_type == GST_APP_STREAM_TYPE_RANDOM_ACCESS)
        priv->offset += buf_size;

      /* signal that we removed an item */
      g_cond_broadcast (&priv->cond);

      /* see if we go lower than the empty-percent */
      if (priv->min_percent && priv->max_bytes) {
        if (priv->queued_bytes * 100 / priv->max_bytes <= priv->min_percent)
          /* ignore flushing state, we got a buffer and we will return it now.
           * Errors will be handled in the next round */
          gst_app_src_emit_need_data (appsrc, size);
      }
      ret = GST_FLOW_OK;
      break;
    } else {
      gst_app_src_emit_need_data (appsrc, size);

      /* we can be flushing now because we released the lock above */
      if (G_UNLIKELY (priv->flushing))
        goto flushing;

      /* if we have a buffer now, continue the loop and try to return it. In
       * random-access mode (where a buffer is normally pushed in the above
       * signal) we can still be empty because the pushed buffer got flushed or
       * when the application pushes the requested buffer later, we support both
       * possibilities. */
      if (!g_queue_is_empty (priv->queue))
        continue;

      /* no buffer yet, maybe we are EOS, if not, block for more data. */
    }

    /* check EOS */
    if (G_UNLIKELY (priv->is_eos))
      goto eos;

    /* nothing to return, wait a while for new data or flushing. */
    g_cond_wait (&priv->cond, &priv->mutex);
  }
  g_mutex_unlock (&priv->mutex);
  return ret;

  /* ERRORS */
flushing:
  {
    GST_DEBUG_OBJECT (appsrc, "we are flushing");
    g_mutex_unlock (&priv->mutex);
    return GST_FLOW_FLUSHING;
  }
eos:
  {
    GST_DEBUG_OBJECT (appsrc, "we are EOS");
    g_mutex_unlock (&priv->mutex);
    return GST_FLOW_EOS;
  }
seek_error:
  {
    g_mutex_unlock (&priv->mutex);
    GST_ELEMENT_ERROR (appsrc, RESOURCE, READ, ("failed to seek"),
        GST_ERROR_SYSTEM);
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 8
0
static void
gst_soup_http_src_got_headers_cb (SoupMessage * msg, GstSoupHTTPSrc * src)
{
  const char *value;
  GstTagList *tag_list;
  GstBaseSrc *basesrc;
  guint64 newsize;
  GHashTable *params = NULL;

  GST_DEBUG_OBJECT (src, "got headers:");
  soup_message_headers_foreach (msg->response_headers,
      gst_soup_http_src_headers_foreach, src);

  if (msg->status_code == 407 && src->proxy_id && src->proxy_pw)
    return;

  if (src->automatic_redirect && SOUP_STATUS_IS_REDIRECTION (msg->status_code)) {
    GST_DEBUG_OBJECT (src, "%u redirect to \"%s\"", msg->status_code,
        soup_message_headers_get_one (msg->response_headers, "Location"));
    return;
  }

  if (msg->status_code == SOUP_STATUS_UNAUTHORIZED)
    return;

  src->session_io_status = GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_RUNNING;

  /* Parse Content-Length. */
  if (soup_message_headers_get_encoding (msg->response_headers) ==
      SOUP_ENCODING_CONTENT_LENGTH) {
    newsize = src->request_position +
        soup_message_headers_get_content_length (msg->response_headers);
    if (!src->have_size || (src->content_size != newsize)) {
      src->content_size = newsize;
      src->have_size = TRUE;
      src->seekable = TRUE;
      GST_DEBUG_OBJECT (src, "size = %" G_GUINT64_FORMAT, src->content_size);

      basesrc = GST_BASE_SRC_CAST (src);
      gst_segment_set_duration (&basesrc->segment, GST_FORMAT_BYTES,
          src->content_size);
      gst_element_post_message (GST_ELEMENT (src),
          gst_message_new_duration (GST_OBJECT (src), GST_FORMAT_BYTES,
              src->content_size));
    }
  }

  /* Icecast stuff */
  tag_list = gst_tag_list_new ();

  if ((value =
          soup_message_headers_get_one (msg->response_headers,
              "icy-metaint")) != NULL) {
    gint icy_metaint = atoi (value);

    GST_DEBUG_OBJECT (src, "icy-metaint: %s (parsed: %d)", value, icy_metaint);
    if (icy_metaint > 0) {
      if (src->src_caps)
        gst_caps_unref (src->src_caps);

      src->src_caps = gst_caps_new_simple ("application/x-icy",
          "metadata-interval", G_TYPE_INT, icy_metaint, NULL);
    }
  }
  if ((value =
          soup_message_headers_get_content_type (msg->response_headers,
              &params)) != NULL) {
    GST_DEBUG_OBJECT (src, "Content-Type: %s", value);
    if (g_ascii_strcasecmp (value, "audio/L16") == 0) {
      gint channels = 2;
      gint rate = 44100;
      char *param;

      if (src->src_caps)
        gst_caps_unref (src->src_caps);

      param = g_hash_table_lookup (params, "channels");
      if (param != NULL)
        channels = atol (param);

      param = g_hash_table_lookup (params, "rate");
      if (param != NULL)
        rate = atol (param);

      src->src_caps = gst_caps_new_simple ("audio/x-raw-int",
          "channels", G_TYPE_INT, channels,
          "rate", G_TYPE_INT, rate,
          "width", G_TYPE_INT, 16,
          "depth", G_TYPE_INT, 16,
          "signed", G_TYPE_BOOLEAN, TRUE,
          "endianness", G_TYPE_INT, G_BIG_ENDIAN, NULL);
    } else {
      /* Set the Content-Type field on the caps */
      if (src->src_caps)
        gst_caps_set_simple (src->src_caps, "content-type", G_TYPE_STRING,
            value, NULL);
    }
  }

  if (params != NULL)
    g_hash_table_destroy (params);

  if ((value =
          soup_message_headers_get_one (msg->response_headers,
              "icy-name")) != NULL) {
    g_free (src->iradio_name);
    src->iradio_name = gst_soup_http_src_unicodify (value);
    if (src->iradio_name) {
      g_object_notify (G_OBJECT (src), "iradio-name");
      gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_ORGANIZATION,
          src->iradio_name, NULL);
    }
  }
  if ((value =
          soup_message_headers_get_one (msg->response_headers,
              "icy-genre")) != NULL) {
    g_free (src->iradio_genre);
    src->iradio_genre = gst_soup_http_src_unicodify (value);
    if (src->iradio_genre) {
      g_object_notify (G_OBJECT (src), "iradio-genre");
      gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_GENRE,
          src->iradio_genre, NULL);
    }
  }
  if ((value = soup_message_headers_get_one (msg->response_headers, "icy-url"))
      != NULL) {
    g_free (src->iradio_url);
    src->iradio_url = gst_soup_http_src_unicodify (value);
    if (src->iradio_url) {
      g_object_notify (G_OBJECT (src), "iradio-url");
      gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_LOCATION,
          src->iradio_url, NULL);
    }
  }
  if (!gst_tag_list_is_empty (tag_list)) {
    GST_DEBUG_OBJECT (src,
        "calling gst_element_found_tags with %" GST_PTR_FORMAT, tag_list);
    gst_element_found_tags (GST_ELEMENT_CAST (src), tag_list);
  } else {
    gst_tag_list_free (tag_list);
  }

  /* Handle HTTP errors. */
  gst_soup_http_src_parse_status (msg, src);

  /* Check if Range header was respected. */
  if (src->ret == GST_FLOW_CUSTOM_ERROR &&
      src->read_position && msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) {
    src->seekable = FALSE;
    GST_ELEMENT_ERROR (src, RESOURCE, SEEK,
        (_("Server does not support seeking.")),
        ("Server does not accept Range HTTP header, URL: %s", src->location));
    src->ret = GST_FLOW_ERROR;
  }
}
Exemplo n.º 9
0
/* MT safe */
static GstStateChangeReturn
gst_pipeline_change_state (GstElement * element, GstStateChange transition)
{
  GstStateChangeReturn result = GST_STATE_CHANGE_SUCCESS;
  GstPipeline *pipeline = GST_PIPELINE_CAST (element);
  GstClock *clock;

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      GST_OBJECT_LOCK (element);
      if (element->bus)
        gst_bus_set_flushing (element->bus, FALSE);
      GST_OBJECT_UNLOCK (element);
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      GST_OBJECT_LOCK (element);
      pipeline->priv->update_clock = TRUE;
      GST_OBJECT_UNLOCK (element);

      /* READY to PAUSED starts running_time from 0 */
      reset_start_time (pipeline, 0);
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
    {
      GstClockTime now, start_time, last_start_time, delay;
      gboolean update_clock;
      GstClock *cur_clock;

      GST_DEBUG_OBJECT (element, "selecting clock and base_time");

      GST_OBJECT_LOCK (element);
      cur_clock = element->clock;
      if (cur_clock)
        gst_object_ref (cur_clock);
      /* get the desired running_time of the first buffer aka the start_time */
      start_time = GST_ELEMENT_START_TIME (pipeline);
      last_start_time = pipeline->priv->last_start_time;
      pipeline->priv->last_start_time = start_time;
      /* see if we need to update the clock */
      update_clock = pipeline->priv->update_clock;
      pipeline->priv->update_clock = FALSE;
      delay = pipeline->delay;
      GST_OBJECT_UNLOCK (element);

      /* running time changed, either with a PAUSED or a flush, we need to check
       * if there is a new clock & update the base time */
      /* only do this for top-level, however */
      if (GST_OBJECT_PARENT (element) == NULL &&
          (update_clock || last_start_time != start_time)) {
        GST_DEBUG_OBJECT (pipeline, "Need to update start_time");

        /* when going to PLAYING, select a clock when needed. If we just got
         * flushed, we don't reselect the clock. */
        if (update_clock) {
          GST_DEBUG_OBJECT (pipeline, "Need to update clock.");
          clock = gst_element_provide_clock (element);
        } else {
          GST_DEBUG_OBJECT (pipeline,
              "Don't need to update clock, using old clock.");
          /* only try to ref if cur_clock is not NULL */
          if (cur_clock)
            gst_object_ref (cur_clock);
          clock = cur_clock;
        }

        if (clock) {
          now = gst_clock_get_time (clock);
        } else {
          GST_DEBUG_OBJECT (pipeline, "no clock, using base time of NONE");
          now = GST_CLOCK_TIME_NONE;
        }

        if (clock != cur_clock) {
          /* now distribute the clock (which could be NULL). If some
           * element refuses the clock, this will return FALSE and
           * we effectively fail the state change. */
          if (!gst_element_set_clock (element, clock))
            goto invalid_clock;

          /* if we selected and distributed a new clock, let the app
           * know about it */
          gst_element_post_message (element,
              gst_message_new_new_clock (GST_OBJECT_CAST (element), clock));
        }

        if (clock)
          gst_object_unref (clock);

        if (start_time != GST_CLOCK_TIME_NONE && now != GST_CLOCK_TIME_NONE) {
          GstClockTime new_base_time = now - start_time + delay;
          GST_DEBUG_OBJECT (element,
              "start_time=%" GST_TIME_FORMAT ", now=%" GST_TIME_FORMAT
              ", base_time %" GST_TIME_FORMAT,
              GST_TIME_ARGS (start_time), GST_TIME_ARGS (now),
              GST_TIME_ARGS (new_base_time));

          gst_element_set_base_time (element, new_base_time);
        } else {
          GST_DEBUG_OBJECT (pipeline,
              "NOT adjusting base_time because start_time is NONE");
        }
      } else {
        GST_DEBUG_OBJECT (pipeline,
            "NOT adjusting base_time because we selected one before");
      }

      if (cur_clock)
        gst_object_unref (cur_clock);
      break;
    }
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
    {
      /* we take a start_time snapshot before calling the children state changes
       * so that they know about when the pipeline PAUSED. */
      pipeline_update_start_time (element);
      break;
    }
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      reset_start_time (pipeline, 0);
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      break;
  }

  result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
    {
      /* Take a new snapshot of the start_time after calling the state change on
       * all children. This will be the running_time of the pipeline when we go
       * back to PLAYING */
      pipeline_update_start_time (element);
      break;
    }
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
    {
      GstBus *bus;
      gboolean auto_flush;

      /* grab some stuff before we release the lock to flush out the bus */
      GST_OBJECT_LOCK (element);
      if ((bus = element->bus))
        gst_object_ref (bus);
      auto_flush = pipeline->priv->auto_flush_bus;
      GST_OBJECT_UNLOCK (element);

      if (bus) {
        if (auto_flush) {
          gst_bus_set_flushing (bus, TRUE);
        } else {
          GST_INFO_OBJECT (element, "not flushing bus, auto-flushing disabled");
        }
        gst_object_unref (bus);
      }
      break;
    }
  }
  return result;

  /* ERRORS */
invalid_clock:
  {
    /* we generate this error when the selected clock was not
     * accepted by some element */
    GST_ELEMENT_ERROR (pipeline, CORE, CLOCK,
        (_("Selected clock cannot be used in pipeline.")),
        ("Pipeline cannot operate with selected clock"));
    GST_DEBUG_OBJECT (pipeline,
        "Pipeline cannot operate with selected clock %p", clock);
    if (clock)
      gst_object_unref (clock);
    return GST_STATE_CHANGE_FAILURE;
  }
}
Exemplo n.º 10
0
static GstBuffer *
read_device (int fd, int adapter_number, int frontend_number, int size,
             GstDvbSrc * object)
{
    int count = 0;
    struct pollfd pfd[1];
    int ret_val = 0;
    guint attempts = 0;
    const int TIMEOUT = 100;

    GstBuffer *buf = gst_buffer_new_and_alloc (size);

    g_return_val_if_fail (GST_IS_BUFFER (buf), NULL);

    if (fd < 0) {
        return NULL;
    }

    pfd[0].fd = fd;
    pfd[0].events = POLLIN;

    while (count < size) {
        ret_val = poll (pfd, 1, TIMEOUT);
        if (ret_val > 0) {
            if (pfd[0].revents & POLLIN) {
                int tmp = 0;

                tmp = read (fd, GST_BUFFER_DATA (buf) + count, size - count);
                if (tmp < 0) {
                    GST_WARNING
                    ("Unable to read from device: /dev/dvb/adapter%d/dvr%d (%d)",
                     adapter_number, frontend_number, errno);
                    attempts += 1;
                    if (attempts % 10 == 0) {
                        GST_WARNING
                        ("Unable to read from device after %u attempts: /dev/dvb/adapter%d/dvr%d",
                         attempts, adapter_number, frontend_number);
                    }

                } else
                    count = count + tmp;
            } else {
                GST_LOG ("revents = %d\n", pfd[0].revents);
            }
        } else if (ret_val == 0) {  // poll timeout
            attempts += 1;
            GST_INFO ("Reading from device /dev/dvb/adapter%d/dvr%d timedout (%d)",
                      adapter_number, frontend_number, attempts);

            if (attempts % 10 == 0) {
                GST_WARNING
                ("Unable to read after %u attempts from device: /dev/dvb/adapter%d/dvr%d (%d)",
                 attempts, adapter_number, frontend_number, errno);
                gst_element_post_message (GST_ELEMENT_CAST (object),
                                          gst_message_new_element (GST_OBJECT (object),
                                                  gst_structure_empty_new ("dvb-read-failure")));

            }
        } else if (errno == -EINTR) {       // poll interrupted
            if (attempts % 50 == 0) {
                gst_buffer_unref (buf);
                return NULL;
            };
        }

    }

    GST_BUFFER_SIZE (buf) = count;
    GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE;
    return buf;
}
Exemplo n.º 11
0
static gboolean
gst_dvbsrc_open_frontend (GstDvbSrc * object)
{
    struct dvb_frontend_info fe_info;
    char *adapter_desc = NULL;
    gchar *frontend_dev;
    GstStructure *adapter_structure;
    char *adapter_name = NULL;

    frontend_dev = g_strdup_printf ("/dev/dvb/adapter%d/frontend%d",
                                    object->adapter_number, object->frontend_number);
    GST_INFO_OBJECT (object, "Using frontend device: %s", frontend_dev);

    /* open frontend */
    if ((object->fd_frontend = open (frontend_dev, O_RDWR)) < 0) {
        switch (errno) {
        case ENOENT:
            GST_ELEMENT_ERROR (object, RESOURCE, NOT_FOUND,
                               (_("Device \"%s\" does not exist."), frontend_dev), (NULL));
            break;
        default:
            GST_ELEMENT_ERROR (object, RESOURCE, OPEN_READ_WRITE,
                               (_("Could not open frontend device \"%s\"."), frontend_dev),
                               GST_ERROR_SYSTEM);
            break;
        }

        close (object->fd_frontend);
        g_free (frontend_dev);
        return FALSE;
    }

    if (ioctl (object->fd_frontend, FE_GET_INFO, &fe_info) < 0) {
        GST_ELEMENT_ERROR (object, RESOURCE, SETTINGS,
                           (_("Could not get settings from frontend device \"%s\"."),
                            frontend_dev), GST_ERROR_SYSTEM);

        close (object->fd_frontend);
        g_free (frontend_dev);
        return FALSE;
    }

    adapter_name = g_strdup (fe_info.name);

    object->adapter_type = fe_info.type;
    switch (object->adapter_type) {
    case FE_QPSK:
        adapter_desc = "DVB-S";
        adapter_structure = gst_structure_new ("dvb-adapter",
                                               "type", G_TYPE_STRING, adapter_desc,
                                               "name", G_TYPE_STRING, adapter_name,
                                               "auto-fec", G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL);
        break;
    case FE_QAM:
        adapter_desc = "DVB-C";
        adapter_structure = gst_structure_new ("dvb-adapter",
                                               "type", G_TYPE_STRING, adapter_desc,
                                               "name", G_TYPE_STRING, adapter_name,
                                               "auto-inversion", G_TYPE_BOOLEAN,
                                               fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN,
                                               fe_info.caps & FE_CAN_QAM_AUTO, "auto-fec", G_TYPE_BOOLEAN,
                                               fe_info.caps & FE_CAN_FEC_AUTO, NULL);
        break;
    case FE_OFDM:
        adapter_desc = "DVB-T";
        adapter_structure = gst_structure_new ("dvb-adapter",
                                               "type", G_TYPE_STRING, adapter_desc,
                                               "name", G_TYPE_STRING, adapter_name,
                                               "auto-inversion", G_TYPE_BOOLEAN,
                                               fe_info.caps & FE_CAN_INVERSION_AUTO, "auto-qam", G_TYPE_BOOLEAN,
                                               fe_info.caps & FE_CAN_QAM_AUTO, "auto-transmission-mode",
                                               G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_TRANSMISSION_MODE_AUTO,
                                               "auto-guard-interval", G_TYPE_BOOLEAN,
                                               fe_info.caps & FE_CAN_GUARD_INTERVAL_AUTO, "auto-hierarchy",
                                               G_TYPE_BOOLEAN, fe_info.caps % FE_CAN_HIERARCHY_AUTO, "auto-fec",
                                               G_TYPE_BOOLEAN, fe_info.caps & FE_CAN_FEC_AUTO, NULL);
        break;
    case FE_ATSC:
        adapter_desc = "ATSC";
        adapter_structure = gst_structure_new ("dvb-adapter",
                                               "type", G_TYPE_STRING, adapter_desc,
                                               "name", G_TYPE_STRING, adapter_name, NULL);
        break;
    default:
        g_error ("Unknown frontend type: %d", object->adapter_type);
        adapter_structure = gst_structure_new ("dvb-adapter",
                                               "type", G_TYPE_STRING, "unknown", NULL);
    }

    GST_INFO_OBJECT (object, "DVB card: %s ", adapter_name);
    gst_element_post_message (GST_ELEMENT_CAST (object), gst_message_new_element
                              (GST_OBJECT (object), adapter_structure));
    g_free (frontend_dev);
    g_free (adapter_name);
    return TRUE;
}
Exemplo n.º 12
0
GstFlowReturn
gst_kate_util_decoder_base_chain_kate_packet (GstKateDecoderBase * decoder,
    GstElement * element, GstPad * pad, GstBuffer * buf, GstPad * srcpad,
    GstPad * tagpad, GstCaps ** src_caps, const kate_event ** ev)
{
  kate_packet kp;
  int ret;
  GstFlowReturn rflow = GST_FLOW_OK;
  gboolean is_header;
  guint8 *data;
  gsize size;
  guint8 header[1];

  size = gst_buffer_extract (buf, 0, header, 1);

  GST_DEBUG_OBJECT (element, "got kate packet, %u bytes, type %02x",
      gst_buffer_get_size (buf), size == 0 ? -1 : header[0]);

  is_header = size > 0 && (header[0] & 0x80);

  if (!is_header && decoder->tags) {
    /* after we've processed headers, send any tags before processing the data packet */
    GST_DEBUG_OBJECT (element, "Not a header, sending tags for pad %s:%s",
        GST_DEBUG_PAD_NAME (tagpad));
    gst_element_found_tags_for_pad (element, tagpad, decoder->tags);
    decoder->tags = NULL;
  }

  data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
  kate_packet_wrap (&kp, size, data);
  ret = kate_high_decode_packetin (&decoder->k, &kp, ev);
  gst_buffer_unmap (buf, data, size);

  if (G_UNLIKELY (ret < 0)) {
    GST_ELEMENT_ERROR (element, STREAM, DECODE, (NULL),
        ("Failed to decode Kate packet: %s",
            gst_kate_util_get_error_message (ret)));
    return GST_FLOW_ERROR;
  }

  if (G_UNLIKELY (ret > 0)) {
    GST_DEBUG_OBJECT (element,
        "kate_high_decode_packetin has received EOS packet");
  }

  /* headers may be interesting to retrieve information from */
  if (G_UNLIKELY (is_header)) {
    switch (header[0]) {
      case 0x80:               /* ID header */
        GST_INFO_OBJECT (element, "Parsed ID header: language %s, category %s",
            decoder->k.ki->language, decoder->k.ki->category);
        if (src_caps) {
          if (*src_caps) {
            gst_caps_unref (*src_caps);
            *src_caps = NULL;
          }
          if (strcmp (decoder->k.ki->category, "K-SPU") == 0 ||
              strcmp (decoder->k.ki->category, "spu-subtitles") == 0) {
            *src_caps = gst_caps_new_empty_simple ("video/x-dvd-subpicture");
          } else if (decoder->k.ki->text_markup_type == kate_markup_none) {
            *src_caps = gst_caps_new_empty_simple ("text/plain");
          } else {
            *src_caps = gst_caps_new_empty_simple ("text/x-pango-markup");
          }
          GST_INFO_OBJECT (srcpad, "Setting caps: %" GST_PTR_FORMAT, *src_caps);
          if (!gst_pad_set_caps (srcpad, *src_caps)) {
            GST_ERROR_OBJECT (srcpad, "Failed to set caps %" GST_PTR_FORMAT,
                *src_caps);
          }
        }
        if (decoder->k.ki->language && *decoder->k.ki->language) {
          GstTagList *old = decoder->tags, *tags = gst_tag_list_new_empty ();
          if (tags) {
            gchar *lang_code;

            /* en_GB -> en */
            lang_code = g_ascii_strdown (decoder->k.ki->language, -1);
            g_strdelimit (lang_code, NULL, '\0');
            gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_LANGUAGE_CODE,
                lang_code, NULL);
            g_free (lang_code);
            /* TODO: category - where should it go ? */
            decoder->tags =
                gst_tag_list_merge (decoder->tags, tags, GST_TAG_MERGE_REPLACE);
            gst_tag_list_free (tags);
            if (old)
              gst_tag_list_free (old);
          }
        }

        /* update properties */
        if (decoder->language)
          g_free (decoder->language);
        decoder->language = g_strdup (decoder->k.ki->language);
        if (decoder->category)
          g_free (decoder->category);
        decoder->category = g_strdup (decoder->k.ki->category);
        decoder->original_canvas_width = decoder->k.ki->original_canvas_width;
        decoder->original_canvas_height = decoder->k.ki->original_canvas_height;

        /* we can now send away any event we've delayed, as the src pad now has caps */
        gst_kate_util_decoder_base_drain_event_queue (decoder);

        break;

      case 0x81:               /* Vorbis comments header */
        GST_INFO_OBJECT (element, "Parsed comments header");
        {
          gchar *encoder = NULL;
          GstTagList *old = decoder->tags, *list =
              gst_tag_list_from_vorbiscomment_buffer (buf,
              (const guint8 *) "\201kate\0\0\0\0", 9, &encoder);
          if (list) {
            decoder->tags =
                gst_tag_list_merge (decoder->tags, list, GST_TAG_MERGE_REPLACE);
            gst_tag_list_free (list);
          }

          if (!decoder->tags) {
            GST_ERROR_OBJECT (element, "failed to decode comment header");
            decoder->tags = gst_tag_list_new_empty ();
          }
          if (encoder) {
            gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE,
                GST_TAG_ENCODER, encoder, NULL);
            g_free (encoder);
          }
          gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE,
              GST_TAG_SUBTITLE_CODEC, "Kate", NULL);
          gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE,
              GST_TAG_ENCODER_VERSION, decoder->k.ki->bitstream_version_major,
              NULL);

          if (old)
            gst_tag_list_free (old);

          if (decoder->initialized) {
            gst_element_found_tags_for_pad (element, tagpad, decoder->tags);
            decoder->tags = NULL;
          } else {
            /* Only push them as messages for the time being. *
             * They will be pushed on the pad once the decoder is initialized */
            gst_element_post_message (element,
                gst_message_new_tag (GST_OBJECT (element),
                    gst_tag_list_copy (decoder->tags)));
          }
        }
        break;

      default:
        break;
    }
  }
#if ((KATE_VERSION_MAJOR<<16)|(KATE_VERSION_MINOR<<8)|KATE_VERSION_PATCH) >= 0x000400
  else if (*ev && (*ev)->meta) {
    int count = kate_meta_query_count ((*ev)->meta);
    if (count > 0) {
      GstTagList *evtags = gst_tag_list_new_empty ();
      int idx;
      GST_DEBUG_OBJECT (decoder, "Kate event has %d attached metadata", count);
      for (idx = 0; idx < count; ++idx) {
        const char *tag, *value;
        size_t len;
        if (kate_meta_query ((*ev)->meta, idx, &tag, &value, &len) < 0) {
          GST_WARNING_OBJECT (decoder, "Failed to retrieve metadata %d", idx);
        } else {
          if (gst_kate_util_is_utf8_string (value, len)) {
            gchar *compound = g_strdup_printf ("%s=%s", tag, value);
            GST_DEBUG_OBJECT (decoder, "Metadata %d: %s=%s (%zu bytes)", idx,
                tag, value, len);
            gst_tag_list_add (evtags, GST_TAG_MERGE_APPEND,
                GST_TAG_EXTENDED_COMMENT, compound, NULL);
            g_free (compound);
          } else {
            GST_INFO_OBJECT (decoder,
                "Metadata %d, (%s, %zu bytes) is binary, ignored", idx, tag,
                len);
          }
        }
      }
      if (gst_tag_list_is_empty (evtags))
        gst_tag_list_free (evtags);
      else
        gst_element_found_tags_for_pad (element, tagpad, evtags);
    }
  }
#endif

  return rflow;
}
Exemplo n.º 13
0
static GstFlowReturn
gst_cutter_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstCutter *filter;
  GstMapInfo map;
  gint16 *in_data;
  gint bpf, rate;
  gsize in_size;
  guint num_samples;
  gdouble NCS = 0.0;            /* Normalized Cumulative Square of buffer */
  gdouble RMS = 0.0;            /* RMS of signal in buffer */
  gdouble NMS = 0.0;            /* Normalized Mean Square of buffer */
  GstBuffer *prebuf;            /* pointer to a prebuffer element */
  GstClockTime duration;

  filter = GST_CUTTER (parent);

  if (GST_AUDIO_INFO_FORMAT (&filter->info) == GST_AUDIO_FORMAT_UNKNOWN)
    goto not_negotiated;

  bpf = GST_AUDIO_INFO_BPF (&filter->info);
  rate = GST_AUDIO_INFO_RATE (&filter->info);

  gst_buffer_map (buf, &map, GST_MAP_READ);
  in_data = (gint16 *) map.data;
  in_size = map.size;

  GST_LOG_OBJECT (filter, "length of prerec buffer: %" GST_TIME_FORMAT,
      GST_TIME_ARGS (filter->pre_run_length));

  /* calculate mean square value on buffer */
  switch (GST_AUDIO_INFO_FORMAT (&filter->info)) {
    case GST_AUDIO_FORMAT_S16:
      num_samples = in_size / 2;
      gst_cutter_calculate_gint16 (in_data, num_samples, &NCS);
      NMS = NCS / num_samples;
      break;
    case GST_AUDIO_FORMAT_S8:
      num_samples = in_size;
      gst_cutter_calculate_gint8 ((gint8 *) in_data, num_samples, &NCS);
      NMS = NCS / num_samples;
      break;
    default:
      /* this shouldn't happen */
      g_warning ("no mean square function for format");
      break;
  }

  gst_buffer_unmap (buf, &map);

  filter->silent_prev = filter->silent;

  duration = gst_util_uint64_scale (in_size / bpf, GST_SECOND, rate);

  RMS = sqrt (NMS);
  /* if RMS below threshold, add buffer length to silent run length count
   * if not, reset
   */
  GST_LOG_OBJECT (filter, "buffer stats: NMS %f, RMS %f, audio length %f", NMS,
      RMS, gst_guint64_to_gdouble (duration));

  if (RMS < filter->threshold_level)
    filter->silent_run_length += gst_guint64_to_gdouble (duration);
  else {
    filter->silent_run_length = 0 * GST_SECOND;
    filter->silent = FALSE;
  }

  if (filter->silent_run_length > filter->threshold_length)
    /* it has been silent long enough, flag it */
    filter->silent = TRUE;

  /* has the silent status changed ? if so, send right signal
   * and, if from silent -> not silent, flush pre_record buffer
   */
  if (filter->silent != filter->silent_prev) {
    if (filter->silent) {
      GstMessage *m =
          gst_cutter_message_new (filter, FALSE, GST_BUFFER_TIMESTAMP (buf));
      GST_DEBUG_OBJECT (filter, "signaling CUT_STOP");
      gst_element_post_message (GST_ELEMENT (filter), m);
    } else {
      gint count = 0;
      GstMessage *m =
          gst_cutter_message_new (filter, TRUE, GST_BUFFER_TIMESTAMP (buf));

      GST_DEBUG_OBJECT (filter, "signaling CUT_START");
      gst_element_post_message (GST_ELEMENT (filter), m);
      /* first of all, flush current buffer */
      GST_DEBUG_OBJECT (filter, "flushing buffer of length %" GST_TIME_FORMAT,
          GST_TIME_ARGS (filter->pre_run_length));

      while (filter->pre_buffer) {
        prebuf = (g_list_first (filter->pre_buffer))->data;
        filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf);
        gst_pad_push (filter->srcpad, prebuf);
        ++count;
      }
      GST_DEBUG_OBJECT (filter, "flushed %d buffers", count);
      filter->pre_run_length = 0 * GST_SECOND;
    }
  }
  /* now check if we have to send the new buffer to the internal buffer cache
   * or to the srcpad */
  if (filter->silent) {
    filter->pre_buffer = g_list_append (filter->pre_buffer, buf);
    filter->pre_run_length += gst_guint64_to_gdouble (duration);

    while (filter->pre_run_length > filter->pre_length) {
      GstClockTime pduration;
      gsize psize;

      prebuf = (g_list_first (filter->pre_buffer))->data;
      g_assert (GST_IS_BUFFER (prebuf));

      psize = gst_buffer_get_size (prebuf);
      pduration = gst_util_uint64_scale (psize / bpf, GST_SECOND, rate);

      filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf);
      filter->pre_run_length -= gst_guint64_to_gdouble (pduration);

      /* only pass buffers if we don't leak */
      if (!filter->leaky)
        ret = gst_pad_push (filter->srcpad, prebuf);
      else
        gst_buffer_unref (prebuf);
    }
  } else
    ret = gst_pad_push (filter->srcpad, buf);

  return ret;

  /* ERRORS */
not_negotiated:
  {
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Exemplo n.º 14
0
static GstFlowReturn
gst_dtmf_detect_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstDtmfDetect *self = GST_DTMF_DETECT (trans);
  gint dtmf_count;
  gchar dtmfbuf[MAX_DTMF_DIGITS] = "";
  gint i;

  if (GST_BUFFER_IS_DISCONT (buf))
    zap_dtmf_detect_init (&self->dtmf_state);
  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP))
    return GST_FLOW_OK;

  zap_dtmf_detect (&self->dtmf_state, (int16_t *) GST_BUFFER_DATA (buf),
      GST_BUFFER_SIZE (buf) / 2, FALSE);

  dtmf_count = zap_dtmf_get (&self->dtmf_state, dtmfbuf, MAX_DTMF_DIGITS);

  if (dtmf_count)
    GST_DEBUG_OBJECT (self, "Got %d DTMF events: %s", dtmf_count, dtmfbuf);
  else
    GST_LOG_OBJECT (self, "Got no DTMF events");

  for (i = 0; i < dtmf_count; i++) {
    GstMessage *dtmf_message = NULL;
    GstStructure *structure;
    gint dtmf_payload_event;

    GST_DEBUG_OBJECT (self, "Got DTMF event %c", dtmfbuf[i]);

    switch (dtmfbuf[i]) {
      case '0':
        dtmf_payload_event = 0;
        break;
      case '1':
        dtmf_payload_event = 1;
        break;
      case '2':
        dtmf_payload_event = 2;
        break;
      case '3':
        dtmf_payload_event = 3;
        break;
      case '4':
        dtmf_payload_event = 4;
        break;
      case '5':
        dtmf_payload_event = 5;
        break;
      case '6':
        dtmf_payload_event = 6;
        break;
      case '7':
        dtmf_payload_event = 7;
        break;
      case '8':
        dtmf_payload_event = 8;
        break;
      case '9':
        dtmf_payload_event = 9;
        break;
      case '*':
        dtmf_payload_event = 10;
        break;
      case '#':
        dtmf_payload_event = 11;
        break;
      case 'A':
        dtmf_payload_event = 12;
        break;
      case 'B':
        dtmf_payload_event = 13;
        break;
      case 'C':
        dtmf_payload_event = 14;
        break;
      case 'D':
        dtmf_payload_event = 15;
        break;
      default:
        continue;
    }

    structure = gst_structure_new ("dtmf-event",
        "type", G_TYPE_INT, 1,
        "number", G_TYPE_INT, dtmf_payload_event,
        "method", G_TYPE_INT, 2, NULL);
    dtmf_message = gst_message_new_element (GST_OBJECT (self), structure);
    gst_element_post_message (GST_ELEMENT (self), dtmf_message);
  }

  return GST_FLOW_OK;
}
Exemplo n.º 15
0
static GstFlowReturn
gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
{
  GstV4l2Src *v4l2src = GST_V4L2SRC (src);
  GstV4l2Object *obj = v4l2src->v4l2object;
  GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL_CAST (obj->pool);
  GstFlowReturn ret;
  GstClock *clock;
  GstClockTime abs_time, base_time, timestamp, duration;
  GstClockTime delay;
  GstMessage *qos_msg;

  do {
    ret = GST_BASE_SRC_CLASS (parent_class)->alloc (GST_BASE_SRC (src), 0,
        obj->info.size, buf);

    if (G_UNLIKELY (ret != GST_FLOW_OK))
      goto alloc_failed;

    ret = gst_v4l2_buffer_pool_process (pool, buf);

  } while (ret == GST_V4L2_FLOW_CORRUPTED_BUFFER);

  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto error;

  timestamp = GST_BUFFER_TIMESTAMP (*buf);
  duration = obj->duration;

  /* timestamps, LOCK to get clock and base time. */
  /* FIXME: element clock and base_time is rarely changing */
  GST_OBJECT_LOCK (v4l2src);
  if ((clock = GST_ELEMENT_CLOCK (v4l2src))) {
    /* we have a clock, get base time and ref clock */
    base_time = GST_ELEMENT (v4l2src)->base_time;
    gst_object_ref (clock);
  } else {
    /* no clock, can't set timestamps */
    base_time = GST_CLOCK_TIME_NONE;
  }
  GST_OBJECT_UNLOCK (v4l2src);

  /* sample pipeline clock */
  if (clock) {
    abs_time = gst_clock_get_time (clock);
    gst_object_unref (clock);
  } else {
    abs_time = GST_CLOCK_TIME_NONE;
  }

retry:
  if (!v4l2src->has_bad_timestamp && timestamp != GST_CLOCK_TIME_NONE) {
    struct timespec now;
    GstClockTime gstnow;

    /* v4l2 specs say to use the system time although many drivers switched to
     * the more desirable monotonic time. We first try to use the monotonic time
     * and see how that goes */
    clock_gettime (CLOCK_MONOTONIC, &now);
    gstnow = GST_TIMESPEC_TO_TIME (now);

    if (timestamp > gstnow || (gstnow - timestamp) > (10 * GST_SECOND)) {
      GTimeVal now;

      /* very large diff, fall back to system time */
      g_get_current_time (&now);
      gstnow = GST_TIMEVAL_TO_TIME (now);
    }

    /* Detect buggy drivers here, and stop using their timestamp. Failing any
     * of these condition would imply a very buggy driver:
     *   - Timestamp in the future
     *   - Timestamp is going backward compare to last seen timestamp
     *   - Timestamp is jumping forward for less then a frame duration
     *   - Delay is bigger then the actual timestamp
     * */
    if (timestamp > gstnow) {
      GST_WARNING_OBJECT (v4l2src,
          "Timestamp in the future detected, ignoring driver timestamps");
      v4l2src->has_bad_timestamp = TRUE;
      goto retry;
    }

    if (v4l2src->last_timestamp > timestamp) {
      GST_WARNING_OBJECT (v4l2src,
          "Timestamp going backward, ignoring driver timestamps");
      v4l2src->has_bad_timestamp = TRUE;
      goto retry;
    }

    delay = gstnow - timestamp;

    if (delay > timestamp) {
      GST_WARNING_OBJECT (v4l2src,
          "Timestamp does not correlate with any clock, ignoring driver timestamps");
      v4l2src->has_bad_timestamp = TRUE;
      goto retry;
    }

    /* Save last timestamp for sanity checks */
    v4l2src->last_timestamp = timestamp;

    GST_DEBUG_OBJECT (v4l2src, "ts: %" GST_TIME_FORMAT " now %" GST_TIME_FORMAT
        " delay %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
        GST_TIME_ARGS (gstnow), GST_TIME_ARGS (delay));
  } else {
    /* we assume 1 frame latency otherwise */
    if (GST_CLOCK_TIME_IS_VALID (duration))
      delay = duration;
    else
      delay = 0;
  }

  /* set buffer metadata */

  if (G_LIKELY (abs_time != GST_CLOCK_TIME_NONE)) {
    /* the time now is the time of the clock minus the base time */
    timestamp = abs_time - base_time;

    /* adjust for delay in the device */
    if (timestamp > delay)
      timestamp -= delay;
    else
      timestamp = 0;
  } else {
    timestamp = GST_CLOCK_TIME_NONE;
  }

  /* activate settings for next frame */
  if (GST_CLOCK_TIME_IS_VALID (duration)) {
    v4l2src->ctrl_time += duration;
  } else {
    /* this is not very good (as it should be the next timestamp),
     * still good enough for linear fades (as long as it is not -1)
     */
    v4l2src->ctrl_time = timestamp;
  }
  gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);

  GST_INFO_OBJECT (src, "sync to %" GST_TIME_FORMAT " out ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (v4l2src->ctrl_time), GST_TIME_ARGS (timestamp));

  /* use generated offset values only if there are not already valid ones
   * set by the v4l2 device */
  if (!GST_BUFFER_OFFSET_IS_VALID (*buf) || !GST_BUFFER_OFFSET_END_IS_VALID (*buf)) {
    GST_BUFFER_OFFSET (*buf) = v4l2src->offset++;
    GST_BUFFER_OFFSET_END (*buf) = v4l2src->offset;
  } else {
    /* adjust raw v4l2 device sequence, will restart at null in case of renegotiation
     * (streamoff/streamon) */
    GST_BUFFER_OFFSET (*buf) += v4l2src->renegotiation_adjust;
    GST_BUFFER_OFFSET_END (*buf) += v4l2src->renegotiation_adjust;
    /* check for frame loss with given (from v4l2 device) buffer offset */
    if ((v4l2src->offset != 0) && (GST_BUFFER_OFFSET (*buf) != (v4l2src->offset + 1))) {
      guint64 lost_frame_count = GST_BUFFER_OFFSET (*buf) - v4l2src->offset - 1;
      GST_WARNING_OBJECT (v4l2src,
          "lost frames detected: count = %" G_GUINT64_FORMAT " - ts: %" GST_TIME_FORMAT,
              lost_frame_count, GST_TIME_ARGS (timestamp));

      qos_msg = gst_message_new_qos (GST_OBJECT_CAST (v4l2src), TRUE,
          GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, timestamp,
          GST_CLOCK_TIME_IS_VALID (duration) ? lost_frame_count * duration : GST_CLOCK_TIME_NONE);
      gst_element_post_message (GST_ELEMENT_CAST (v4l2src), qos_msg);

    }
    v4l2src->offset = GST_BUFFER_OFFSET (*buf);
  }

  GST_BUFFER_TIMESTAMP (*buf) = timestamp;
  GST_BUFFER_DURATION (*buf) = duration;

  return ret;

  /* ERROR */
alloc_failed:
  {
    if (ret != GST_FLOW_FLUSHING)
      GST_ELEMENT_ERROR (src, RESOURCE, NO_SPACE_LEFT,
          ("Failed to allocate a buffer"), (NULL));
    return ret;
  }
error:
  {
    if (ret == GST_V4L2_FLOW_LAST_BUFFER) {
      GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
          ("Driver returned a buffer with no payload, this most likely "
              "indicate a bug in the driver."), (NULL));
      ret = GST_FLOW_ERROR;
    } else {
      GST_DEBUG_OBJECT (src, "error processing buffer %d (%s)", ret,
          gst_flow_get_name (ret));
    }
    return ret;
  }
}
Exemplo n.º 16
0
static GstFlowReturn
gst_rtp_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
    guint length, GstBuffer ** buffer)
{
  GstRTPDTMFSrcEvent *event;
  GstRTPDTMFSrc *dtmfsrc;
  GstClock *clock;
  GstClockID *clockid;
  GstClockReturn clockret;
  GstMessage *message;
  GQueue messages = G_QUEUE_INIT;

  dtmfsrc = GST_RTP_DTMF_SRC (basesrc);

  do {

    if (dtmfsrc->payload == NULL) {
      GST_DEBUG_OBJECT (dtmfsrc, "popping");
      event = g_async_queue_pop (dtmfsrc->event_queue);

      GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);

      switch (event->event_type) {
        case RTP_DTMF_EVENT_TYPE_STOP:
          GST_WARNING_OBJECT (dtmfsrc,
              "Received a DTMF stop event when already stopped");
          gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
          break;

        case RTP_DTMF_EVENT_TYPE_START:
          dtmfsrc->first_packet = TRUE;
          dtmfsrc->last_packet = FALSE;
          /* Set the redundancy on the first packet */
          dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
          if (!gst_rtp_dtmf_prepare_timestamps (dtmfsrc))
            goto no_clock;

          g_queue_push_tail (&messages,
              gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
                  event));
          dtmfsrc->payload = event->payload;
          dtmfsrc->payload->duration =
              dtmfsrc->ptime * dtmfsrc->clock_rate / 1000;
          event->payload = NULL;
          break;

        case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
          /*
           * We're pushing it back because it has to stay in there until
           * the task is really paused (and the queue will then be flushed
           */
          GST_OBJECT_LOCK (dtmfsrc);
          if (dtmfsrc->paused) {
            g_async_queue_push (dtmfsrc->event_queue, event);
            goto paused_locked;
          }
          GST_OBJECT_UNLOCK (dtmfsrc);
          break;
      }

      gst_rtp_dtmf_src_event_free (event);
    } else if (!dtmfsrc->first_packet && !dtmfsrc->last_packet &&
        (dtmfsrc->timestamp - dtmfsrc->start_timestamp) / GST_MSECOND >=
        MIN_PULSE_DURATION) {
      GST_DEBUG_OBJECT (dtmfsrc, "try popping");
      event = g_async_queue_try_pop (dtmfsrc->event_queue);


      if (event != NULL) {
        GST_DEBUG_OBJECT (dtmfsrc, "try popped %d", event->event_type);

        switch (event->event_type) {
          case RTP_DTMF_EVENT_TYPE_START:
            GST_WARNING_OBJECT (dtmfsrc,
                "Received two consecutive DTMF start events");
            gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
            break;

          case RTP_DTMF_EVENT_TYPE_STOP:
            dtmfsrc->first_packet = FALSE;
            dtmfsrc->last_packet = TRUE;
            /* Set the redundancy on the last packet */
            dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
            g_queue_push_tail (&messages,
                gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
                    event));
            break;

          case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
            /*
             * We're pushing it back because it has to stay in there until
             * the task is really paused (and the queue will then be flushed)
             */
            GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
            GST_OBJECT_LOCK (dtmfsrc);
            if (dtmfsrc->paused) {
              g_async_queue_push (dtmfsrc->event_queue, event);
              goto paused_locked;
            }
            GST_OBJECT_UNLOCK (dtmfsrc);
            break;
        }
        gst_rtp_dtmf_src_event_free (event);
      }
    }
  } while (dtmfsrc->payload == NULL);


  GST_DEBUG_OBJECT (dtmfsrc, "Processed events, now lets wait on the clock");

  clock = gst_element_get_clock (GST_ELEMENT (basesrc));
  if (!clock)
    goto no_clock;
  clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
      gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
  gst_object_unref (clock);

  GST_OBJECT_LOCK (dtmfsrc);
  if (!dtmfsrc->paused) {
    dtmfsrc->clockid = clockid;
    GST_OBJECT_UNLOCK (dtmfsrc);

    clockret = gst_clock_id_wait (clockid, NULL);

    GST_OBJECT_LOCK (dtmfsrc);
    if (dtmfsrc->paused)
      clockret = GST_CLOCK_UNSCHEDULED;
  } else {
    clockret = GST_CLOCK_UNSCHEDULED;
  }
  gst_clock_id_unref (clockid);
  dtmfsrc->clockid = NULL;
  GST_OBJECT_UNLOCK (dtmfsrc);

  while ((message = g_queue_pop_head (&messages)) != NULL)
    gst_element_post_message (GST_ELEMENT (dtmfsrc), message);

  if (clockret == GST_CLOCK_UNSCHEDULED) {
    goto paused;
  }

send_last:

  if (dtmfsrc->dirty)
    if (!gst_rtp_dtmf_src_negotiate (basesrc))
      return GST_FLOW_NOT_NEGOTIATED;

  /* create buffer to hold the payload */
  *buffer = gst_rtp_dtmf_src_create_next_rtp_packet (dtmfsrc);

  if (dtmfsrc->redundancy_count)
    dtmfsrc->redundancy_count--;

  /* Only the very first one has a marker */
  dtmfsrc->first_packet = FALSE;

  /* This is the end of the event */
  if (dtmfsrc->last_packet == TRUE && dtmfsrc->redundancy_count == 0) {

    g_slice_free (GstRTPDTMFPayload, dtmfsrc->payload);
    dtmfsrc->payload = NULL;

    dtmfsrc->last_packet = FALSE;
  }

  return GST_FLOW_OK;

paused_locked:

  GST_OBJECT_UNLOCK (dtmfsrc);

paused:

  if (dtmfsrc->payload) {
    dtmfsrc->first_packet = FALSE;
    dtmfsrc->last_packet = TRUE;
    /* Set the redundanc on the last packet */
    dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
    goto send_last;
  } else {
    return GST_FLOW_FLUSHING;
  }

no_clock:
  GST_ELEMENT_ERROR (dtmfsrc, STREAM, MUX, ("No available clock"),
      ("No available clock"));
  gst_pad_pause_task (GST_BASE_SRC_PAD (dtmfsrc));
  return GST_FLOW_ERROR;
}
Exemplo n.º 17
0
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_template_match_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
    GstTemplateMatch *filter;
    CvPoint best_pos;
    double best_res;
    GstMapInfo info;
    GstMessage *m = NULL;

    filter = GST_TEMPLATE_MATCH (parent);

    if ((!filter) || (!buf)) {
        return GST_FLOW_OK;
    }
    GST_LOG_OBJECT (filter, "Buffer size %u", (guint) gst_buffer_get_size (buf));

    buf = gst_buffer_make_writable (buf);
    gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_READWRITE));
    filter->cvImage->imageData = (char *) info.data;

    GST_OBJECT_LOCK (filter);
    if (filter->cvTemplateImage && !filter->cvDistImage) {
        if (filter->cvTemplateImage->width > filter->cvImage->width) {
            GST_WARNING ("Template Image is wider than input image");
        } else if (filter->cvTemplateImage->height > filter->cvImage->height) {
            GST_WARNING ("Template Image is taller than input image");
        } else {

            GST_DEBUG_OBJECT (filter, "cvCreateImage (Size(%d-%d+1,%d) %d, %d)",
                              filter->cvImage->width, filter->cvTemplateImage->width,
                              filter->cvImage->height - filter->cvTemplateImage->height + 1,
                              IPL_DEPTH_32F, 1);
            filter->cvDistImage =
                cvCreateImage (cvSize (filter->cvImage->width -
                                       filter->cvTemplateImage->width + 1,
                                       filter->cvImage->height - filter->cvTemplateImage->height + 1),
                               IPL_DEPTH_32F, 1);
            if (!filter->cvDistImage) {
                GST_WARNING ("Couldn't create dist image.");
            }
        }
    }
    if (filter->cvTemplateImage && filter->cvDistImage) {
        GstStructure *s;

        gst_template_match_match (filter->cvImage, filter->cvTemplateImage,
                                  filter->cvDistImage, &best_res, &best_pos, filter->method);

        s = gst_structure_new ("template_match",
                               "x", G_TYPE_UINT, best_pos.x,
                               "y", G_TYPE_UINT, best_pos.y,
                               "width", G_TYPE_UINT, filter->cvTemplateImage->width,
                               "height", G_TYPE_UINT, filter->cvTemplateImage->height,
                               "result", G_TYPE_DOUBLE, best_res, NULL);

        m = gst_message_new_element (GST_OBJECT (filter), s);

        if (filter->display) {
            CvPoint corner = best_pos;
            CvScalar color;
            if (filter->method == CV_TM_SQDIFF_NORMED
                    || filter->method == CV_TM_CCORR_NORMED
                    || filter->method == CV_TM_CCOEFF_NORMED) {
                /* Yellow growing redder as match certainty approaches 1.0.  This can
                   only be applied with method == *_NORMED as the other match methods
                   aren't normalized to be in range 0.0 - 1.0 */
                color = CV_RGB (255, 255 - pow (255, best_res), 32);
            } else {
                color = CV_RGB (255, 32, 32);
            }

            buf = gst_buffer_make_writable (buf);

            corner.x += filter->cvTemplateImage->width;
            corner.y += filter->cvTemplateImage->height;
            cvRectangle (filter->cvImage, best_pos, corner, color, 3, 8, 0);
        }

    }
    GST_OBJECT_UNLOCK (filter);

    if (m) {
        gst_element_post_message (GST_ELEMENT (filter), m);
    }
    return gst_pad_push (filter->srcpad, buf);
}
Exemplo n.º 18
0
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;
  gboolean caps_changed = FALSE;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }
  // If we're not flushing, we should have a valid frame from the queue
  g_assert (f != NULL);

  g_mutex_lock (&self->lock);
  if (self->caps_mode != f->mode) {
    if (self->mode == GST_DECKLINK_MODE_AUTO) {
      GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
          f->mode);
      caps_changed = TRUE;
      self->caps_mode = f->mode;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid mode in captured frame"),
          ("Mode set to %d but captured %d", self->caps_mode, f->mode));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }
  if (self->caps_format != f->format) {
    if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) {
      GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
          f->format);
      caps_changed = TRUE;
      self->caps_format = f->format;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid pixel format in captured frame"),
          ("Format set to %d but captured %d", self->caps_format, f->format));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  g_mutex_unlock (&self->lock);
  if (caps_changed) {
    caps = gst_decklink_mode_get_caps (f->mode, f->format);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);

  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;
  gst_buffer_add_video_time_code_meta (*buffer, f->tc);

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
Exemplo n.º 19
0
static GstBuffer *
gst_rtp_dtmf_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{

  GstRtpDTMFDepay *rtpdtmfdepay = NULL;
  GstBuffer *outbuf = NULL;
  gint payload_len;
  guint8 *payload = NULL;
  guint32 timestamp;
  GstRTPDTMFPayload dtmf_payload;
  gboolean marker;
  GstStructure *structure = NULL;
  GstMessage *dtmf_message = NULL;

  rtpdtmfdepay = GST_RTP_DTMF_DEPAY (depayload);

  if (!gst_rtp_buffer_validate (buf))
    goto bad_packet;

  payload_len = gst_rtp_buffer_get_payload_len (buf);
  payload = gst_rtp_buffer_get_payload (buf);

  if (payload_len != sizeof (GstRTPDTMFPayload))
    goto bad_packet;

  memcpy (&dtmf_payload, payload, sizeof (GstRTPDTMFPayload));

  if (dtmf_payload.event > MAX_EVENT)
    goto bad_packet;


  marker = gst_rtp_buffer_get_marker (buf);

  timestamp = gst_rtp_buffer_get_timestamp (buf);

  dtmf_payload.duration = g_ntohs (dtmf_payload.duration);

  /* clip to whole units of unit_time */
  if (rtpdtmfdepay->unit_time) {
    guint unit_time_clock =
        (rtpdtmfdepay->unit_time * depayload->clock_rate) / 1000;
    if (dtmf_payload.duration % unit_time_clock) {
      /* Make sure we don't overflow the duration */
      if (dtmf_payload.duration < G_MAXUINT16 - unit_time_clock)
        dtmf_payload.duration += unit_time_clock -
            (dtmf_payload.duration % unit_time_clock);
      else
        dtmf_payload.duration -= dtmf_payload.duration % unit_time_clock;
    }
  }

  /* clip to max duration */
  if (rtpdtmfdepay->max_duration) {
    guint max_duration_clock =
        (rtpdtmfdepay->max_duration * depayload->clock_rate) / 1000;

    if (max_duration_clock < G_MAXUINT16 &&
        dtmf_payload.duration > max_duration_clock)
      dtmf_payload.duration = max_duration_clock;
  }

  GST_DEBUG_OBJECT (depayload, "Received new RTP DTMF packet : "
      "marker=%d - timestamp=%u - event=%d - duration=%d",
      marker, timestamp, dtmf_payload.event, dtmf_payload.duration);

  GST_DEBUG_OBJECT (depayload,
      "Previous information : timestamp=%u - duration=%d",
      rtpdtmfdepay->previous_ts, rtpdtmfdepay->previous_duration);

  /* First packet */
  if (marker || rtpdtmfdepay->previous_ts != timestamp) {
    rtpdtmfdepay->sample = 0;
    rtpdtmfdepay->previous_ts = timestamp;
    rtpdtmfdepay->previous_duration = dtmf_payload.duration;
    rtpdtmfdepay->first_gst_ts = GST_BUFFER_TIMESTAMP (buf);

    structure = gst_structure_new ("dtmf-event",
        "number", G_TYPE_INT, dtmf_payload.event,
        "volume", G_TYPE_INT, dtmf_payload.volume,
        "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1, NULL);
    if (structure) {
      dtmf_message =
          gst_message_new_element (GST_OBJECT (depayload), structure);
      if (dtmf_message) {
        if (!gst_element_post_message (GST_ELEMENT (depayload), dtmf_message)) {
          GST_ERROR_OBJECT (depayload,
              "Unable to send dtmf-event message to bus");
        }
      } else {
        GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event message");
      }
    } else {
      GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event structure");
    }
  } else {
    guint16 duration = dtmf_payload.duration;
    dtmf_payload.duration -= rtpdtmfdepay->previous_duration;
    /* If late buffer, ignore */
    if (duration > rtpdtmfdepay->previous_duration)
      rtpdtmfdepay->previous_duration = duration;
  }

  GST_DEBUG_OBJECT (depayload, "new previous duration : %d - new duration : %d"
      " - diff  : %d - clock rate : %d - timestamp : %llu",
      rtpdtmfdepay->previous_duration, dtmf_payload.duration,
      (rtpdtmfdepay->previous_duration - dtmf_payload.duration),
      depayload->clock_rate, GST_BUFFER_TIMESTAMP (buf));

  /* If late or duplicate packet (like the redundant end packet). Ignore */
  if (dtmf_payload.duration > 0) {
    outbuf = gst_buffer_new ();
    gst_dtmf_src_generate_tone (rtpdtmfdepay, dtmf_payload, outbuf);


    GST_BUFFER_TIMESTAMP (outbuf) = rtpdtmfdepay->first_gst_ts +
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET (outbuf) =
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET_END (outbuf) = rtpdtmfdepay->previous_duration *
        GST_SECOND / depayload->clock_rate;

    GST_DEBUG_OBJECT (depayload, "timestamp : %llu - time %" GST_TIME_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  }

  return outbuf;


bad_packet:
  GST_ELEMENT_WARNING (rtpdtmfdepay, STREAM, DECODE,
      ("Packet did not validate"), (NULL));
  return NULL;
}
Exemplo n.º 20
0
static void
gst_timidity_loop (GstPad * sinkpad)
{
  GstTimidity *timidity = GST_TIMIDITY (GST_PAD_PARENT (sinkpad));
  GstBuffer *out;
  GstFlowReturn ret;

  if (timidity->mididata_size == 0) {
    if (!gst_timidity_get_upstream_size (timidity, &timidity->mididata_size)) {
      GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL),
          ("Unable to get song length"));
      goto paused;
    }

    if (timidity->mididata)
      g_free (timidity->mididata);

    timidity->mididata = g_malloc (timidity->mididata_size);
    timidity->mididata_offset = 0;
    return;
  }

  if (timidity->mididata_offset < timidity->mididata_size) {
    GstBuffer *buffer;
    gint64 size;

    GST_DEBUG_OBJECT (timidity, "loading song");

    ret =
        gst_pad_pull_range (timidity->sinkpad, timidity->mididata_offset,
        -1, &buffer);
    if (ret != GST_FLOW_OK) {
      GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL),
          ("Unable to load song"));
      goto paused;
    }

    size = timidity->mididata_size - timidity->mididata_offset;
    if (GST_BUFFER_SIZE (buffer) < size)
      size = GST_BUFFER_SIZE (buffer);

    memmove (timidity->mididata + timidity->mididata_offset,
        GST_BUFFER_DATA (buffer), size);
    gst_buffer_unref (buffer);

    timidity->mididata_offset += size;
    GST_DEBUG_OBJECT (timidity, "Song loaded");
    return;
  }

  if (!timidity->song) {
    MidIStream *stream;
    GstTagList *tags = NULL;
    gchar *text;

    GST_DEBUG_OBJECT (timidity, "Parsing song");

    stream =
        mid_istream_open_mem (timidity->mididata, timidity->mididata_size, 0);

    timidity->song = mid_song_load (stream, timidity->song_options);
    mid_istream_close (stream);

    if (!timidity->song) {
      GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL),
          ("Unable to parse midi"));
      goto paused;
    }

    mid_song_start (timidity->song);
    timidity->o_len = (GST_MSECOND *
        (GstClockTime) mid_song_get_total_time (timidity->song)) /
        timidity->time_per_frame;
    gst_segment_set_newsegment (timidity->o_segment, FALSE, 1.0,
        GST_FORMAT_DEFAULT, 0, GST_CLOCK_TIME_NONE, 0);


    gst_pad_push_event (timidity->srcpad,
        gst_timidity_get_new_segment_event (timidity, GST_FORMAT_TIME, FALSE));

    /* extract tags */
    text = mid_song_get_meta (timidity->song, MID_SONG_TEXT);
    if (text) {
      tags = gst_tag_list_new ();
      gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, text, NULL);

      //g_free (text);
    }

    text = mid_song_get_meta (timidity->song, MID_SONG_COPYRIGHT);
    if (text) {
      if (tags == NULL)
        tags = gst_tag_list_new ();
      gst_tag_list_add (tags, GST_TAG_MERGE_APPEND,
          GST_TAG_COPYRIGHT, text, NULL);

      //g_free (text);
    }

    if (tags) {
      gst_element_found_tags (GST_ELEMENT (timidity), tags);
    }

    GST_DEBUG_OBJECT (timidity, "Parsing song done");
    return;
  }

  if (timidity->o_segment_changed) {
    GstSegment *segment = gst_timidity_get_segment (timidity, GST_FORMAT_TIME,
        !timidity->o_new_segment);

    GST_LOG_OBJECT (timidity,
        "sending newsegment from %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
        ", pos=%" GST_TIME_FORMAT, GST_TIME_ARGS ((guint64) segment->start),
        GST_TIME_ARGS ((guint64) segment->stop),
        GST_TIME_ARGS ((guint64) segment->time));

    if (timidity->o_segment->flags & GST_SEEK_FLAG_SEGMENT) {
      gst_element_post_message (GST_ELEMENT (timidity),
          gst_message_new_segment_start (GST_OBJECT (timidity),
              segment->format, segment->start));
    }

    gst_segment_free (segment);
    timidity->o_segment_changed = FALSE;
    return;
  }

  if (timidity->o_seek) {
    /* perform a seek internally */
    timidity->o_segment->last_stop = timidity->o_segment->time;
    mid_song_seek (timidity->song,
        (timidity->o_segment->last_stop * timidity->time_per_frame) /
        GST_MSECOND);
  }

  out = gst_timidity_get_buffer (timidity);
  if (!out) {
    GST_LOG_OBJECT (timidity, "Song ended, generating eos");
    gst_pad_push_event (timidity->srcpad, gst_event_new_eos ());
    timidity->o_seek = FALSE;
    goto paused;
  }

  if (timidity->o_seek) {
    GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);
    timidity->o_seek = FALSE;
  }

  gst_buffer_set_caps (out, timidity->out_caps);
  ret = gst_pad_push (timidity->srcpad, out);

  if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED)
    goto error;

  return;

paused:
  {
    GST_DEBUG_OBJECT (timidity, "pausing task");
    gst_pad_pause_task (timidity->sinkpad);
    return;
  }
error:
  {
    GST_ELEMENT_ERROR (timidity, STREAM, FAILED,
        ("Internal data stream error"),
        ("Streaming stopped, reason %s", gst_flow_get_name (ret)));
    gst_pad_push_event (timidity->srcpad, gst_event_new_eos ());
    goto paused;
  }
}
Exemplo n.º 21
0
static void
demux_pad_added (GstElement * element, GstPad * pad, RsnDvdBin * dvdbin)
{
  gboolean skip_mq = FALSE;
  GstPad *mq_pad = NULL;
  GstPad *dest_pad = NULL;
  GstCaps *caps;
  GstStructure *s;

  GST_DEBUG_OBJECT (dvdbin, "New pad: %" GST_PTR_FORMAT, pad);

  caps = gst_pad_get_caps (pad);
  if (caps == NULL) {
    GST_WARNING_OBJECT (dvdbin, "NULL caps from pad %" GST_PTR_FORMAT, pad);
    return;
  }
  if (!gst_caps_is_fixed (caps)) {
    GST_WARNING_OBJECT (dvdbin, "Unfixed caps %" GST_PTR_FORMAT
        " on pad %" GST_PTR_FORMAT, caps, pad);
    gst_caps_unref (caps);
    return;
  }

  GST_DEBUG_OBJECT (dvdbin,
      "Pad %" GST_PTR_FORMAT " has caps: %" GST_PTR_FORMAT, pad, caps);

  s = gst_caps_get_structure (caps, 0);
  g_return_if_fail (s != NULL);

  if (can_sink_caps (dvdbin->pieces[DVD_ELEM_VIDDEC], caps)) {
    dest_pad =
        gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_VIDDEC], "sink");
  } else if (g_str_equal (gst_structure_get_name (s), "video/x-dvd-subpicture")) {
    dest_pad =
        gst_element_get_request_pad (dvdbin->pieces[DVD_ELEM_SPU_SELECT],
        "sink%d");
    skip_mq = TRUE;
  } else if (can_sink_caps (dvdbin->pieces[DVD_ELEM_AUDDEC], caps)) {
    GST_LOG_OBJECT (dvdbin, "Found audio pad w/ caps %" GST_PTR_FORMAT, caps);
    dest_pad =
        gst_element_get_request_pad (dvdbin->pieces[DVD_ELEM_AUD_SELECT],
        "sink%d");
  } else {
    GstStructure *s;

    GST_DEBUG_OBJECT (dvdbin, "Ignoring unusable pad w/ caps %" GST_PTR_FORMAT,
        caps);
    gst_element_post_message (GST_ELEMENT_CAST (dvdbin),
        gst_missing_decoder_message_new (GST_ELEMENT_CAST (dvdbin), caps));

    s = gst_caps_get_structure (caps, 0);
    if (g_str_has_prefix ("video/", gst_structure_get_name (s))) {
      GST_ELEMENT_ERROR (dvdbin, STREAM, CODEC_NOT_FOUND, (NULL),
          ("No MPEG video decoder found"));
    } else {
      GST_ELEMENT_WARNING (dvdbin, STREAM, CODEC_NOT_FOUND, (NULL),
          ("No MPEG video decoder found"));
    }
  }

  gst_caps_unref (caps);

  if (dest_pad == NULL) {
    GST_DEBUG_OBJECT (dvdbin, "Don't know how to handle pad. Ignoring");
    return;
  }

  if (skip_mq) {
    mq_pad = gst_object_ref (pad);
  } else {
    mq_pad = connect_thru_mq (dvdbin, pad);
    if (mq_pad == NULL)
      goto failed;
    GST_DEBUG_OBJECT (dvdbin, "Linking new pad %" GST_PTR_FORMAT
        " through multiqueue to %" GST_PTR_FORMAT, pad, dest_pad);
  }

  gst_pad_link (mq_pad, dest_pad);

  gst_object_unref (mq_pad);
  gst_object_unref (dest_pad);

  return;
failed:
  GST_ELEMENT_ERROR (dvdbin, CORE, FAILED, (NULL),
      ("Failed to handle new demuxer pad %s", GST_PAD_NAME (pad)));
  if (mq_pad)
    gst_object_unref (mq_pad);
  if (dest_pad)
    gst_object_unref (dest_pad);
  return;
}
Exemplo n.º 22
0
static GstFlowReturn
gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
  GstVideoRate *videorate;
  GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED;
  GstClockTime intime, in_ts, in_dur;
  GstClockTime avg_period;
  gboolean skip = FALSE;

  videorate = GST_VIDEO_RATE (trans);

  /* make sure the denominators are not 0 */
  if (videorate->from_rate_denominator == 0 ||
      videorate->to_rate_denominator == 0)
    goto not_negotiated;

  GST_OBJECT_LOCK (videorate);
  avg_period = videorate->average_period_set;
  GST_OBJECT_UNLOCK (videorate);

  /* MT-safe switching between modes */
  if (G_UNLIKELY (avg_period != videorate->average_period)) {
    gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0);
    videorate->average_period = avg_period;
    videorate->last_ts = GST_CLOCK_TIME_NONE;

    if (switch_mode) {
      if (avg_period) {
        /* enabling average mode */
        videorate->average = 0;
        /* make sure no cached buffers from regular mode are left */
        gst_video_rate_swap_prev (videorate, NULL, 0);
      } else {
        /* enable regular mode */
        videorate->next_ts = GST_CLOCK_TIME_NONE;
        skip = TRUE;
      }

      /* max averaging mode has a no latency, normal mode does */
      gst_element_post_message (GST_ELEMENT (videorate),
          gst_message_new_latency (GST_OBJECT (videorate)));
    }
  }

  if (videorate->average_period > 0)
    return gst_video_rate_trans_ip_max_avg (videorate, buffer);

  in_ts = GST_BUFFER_TIMESTAMP (buffer);
  in_dur = GST_BUFFER_DURATION (buffer);

  if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) {
    in_ts = videorate->last_ts;
    if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE))
      goto invalid_buffer;
  }

  /* get the time of the next expected buffer timestamp, we use this when the
   * next buffer has -1 as a timestamp */
  videorate->last_ts = in_ts;
  if (in_dur != GST_CLOCK_TIME_NONE)
    videorate->last_ts += in_dur;

  GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (in_ts));

  /* the input time is the time in the segment + all previously accumulated
   * segments */
  intime = in_ts + videorate->segment.base;

  /* we need to have two buffers to compare */
  if (videorate->prevbuf == NULL) {
    gst_video_rate_swap_prev (videorate, buffer, intime);
    videorate->in++;
    if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) {
      /* new buffer, we expect to output a buffer that matches the first
       * timestamp in the segment */
      if (videorate->skip_to_first || skip) {
        videorate->next_ts = intime;
        videorate->base_ts = in_ts - videorate->segment.start;
        videorate->out_frame_count = 0;
      } else {
        videorate->next_ts = videorate->segment.start + videorate->segment.base;
      }
    }
  } else {
    GstClockTime prevtime;
    gint count = 0;
    gint64 diff1, diff2;

    prevtime = videorate->prev_ts;

    GST_LOG_OBJECT (videorate,
        "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT
        " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime),
        GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts));

    videorate->in++;

    /* drop new buffer if it's before previous one */
    if (intime < prevtime) {
      GST_DEBUG_OBJECT (videorate,
          "The new buffer (%" GST_TIME_FORMAT
          ") is before the previous buffer (%"
          GST_TIME_FORMAT "). Dropping new buffer.",
          GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime));
      videorate->drop++;
      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);
      goto done;
    }

    /* got 2 buffers, see which one is the best */
    do {

      diff1 = prevtime - videorate->next_ts;
      diff2 = intime - videorate->next_ts;

      /* take absolute values, beware: abs and ABS don't work for gint64 */
      if (diff1 < 0)
        diff1 = -diff1;
      if (diff2 < 0)
        diff2 = -diff2;

      GST_LOG_OBJECT (videorate,
          "diff with prev %" GST_TIME_FORMAT " diff with new %"
          GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
          GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
          GST_TIME_ARGS (videorate->next_ts));

      /* output first one when its the best */
      if (diff1 <= diff2) {
        GstFlowReturn r;
        count++;

        /* on error the _flush function posted a warning already */
        if ((r = gst_video_rate_flush_prev (videorate,
                    count > 1)) != GST_FLOW_OK) {
          res = r;
          goto done;
        }
      }

      /* Do not produce any dups. We can exit loop now */
      if (videorate->drop_only)
        break;
      /* continue while the first one was the best, if they were equal avoid
       * going into an infinite loop */
    }
    while (diff1 < diff2);

    /* if we outputed the first buffer more then once, we have dups */
    if (count > 1) {
      videorate->dup += count - 1;
      if (!videorate->silent)
        gst_video_rate_notify_duplicate (videorate);
    }
    /* if we didn't output the first buffer, we have a drop */
    else if (count == 0) {
      videorate->drop++;

      if (!videorate->silent)
        gst_video_rate_notify_drop (videorate);

      GST_LOG_OBJECT (videorate,
          "new is best, old never used, drop, outgoing ts %"
          GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts));
    }
    GST_LOG_OBJECT (videorate,
        "END, putting new in old, diff1 %" GST_TIME_FORMAT
        ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT
        ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %"
        G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1),
        GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts),
        videorate->in, videorate->out, videorate->drop, videorate->dup);

    /* swap in new one when it's the best */
    gst_video_rate_swap_prev (videorate, buffer, intime);
  }
done:
  return res;

  /* ERRORS */
not_negotiated:
  {
    GST_WARNING_OBJECT (videorate, "no framerate negotiated");
    res = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

invalid_buffer:
  {
    GST_WARNING_OBJECT (videorate,
        "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it");
    res = GST_BASE_TRANSFORM_FLOW_DROPPED;
    goto done;
  }
}
Exemplo n.º 23
0
static gboolean
gst_hls_demux_cache_fragments (GstHLSDemux * demux)
{
  gint i;

  /* Start parsing the main playlist */
  gst_m3u8_client_set_current (demux->client, demux->client->main);

  if (gst_m3u8_client_is_live (demux->client)) {
    if (!gst_hls_demux_update_playlist (demux, FALSE)) {
      GST_ERROR_OBJECT (demux, "Could not fetch the main playlist %s",
          demux->client->main->uri);
      return FALSE;
    }
  }

  /* If this playlist is a variant playlist, select the first one
   * and update it */
  if (gst_m3u8_client_has_variant_playlist (demux->client)) {
    GstM3U8 *child = demux->client->main->current_variant->data;
    gst_m3u8_client_set_current (demux->client, child);
    if (!gst_hls_demux_update_playlist (demux, FALSE)) {
      GST_ERROR_OBJECT (demux, "Could not fetch the child playlist %s",
          child->uri);
      return FALSE;
    }
  }

  /* If it's a live source, set the sequence number to the end of the list
   * and substract the 'fragmets_cache' to start from the last fragment*/
  if (gst_m3u8_client_is_live (demux->client)) {
    demux->client->sequence += g_list_length (demux->client->current->files);
    if (demux->client->sequence >= demux->fragments_cache)
      demux->client->sequence -= demux->fragments_cache;
    else
      demux->client->sequence = 0;
  }

  /* Cache the first fragments */
  for (i = 0; i < demux->fragments_cache; i++) {
    gst_element_post_message (GST_ELEMENT (demux),
        gst_message_new_buffering (GST_OBJECT (demux),
            100 * i / demux->fragments_cache));
    g_get_current_time (&demux->next_update);
    g_time_val_add (&demux->next_update,
        demux->client->current->targetduration * 1000000);
    if (!gst_hls_demux_get_next_fragment (demux, FALSE)) {
      if (!demux->cancelled)
        GST_ERROR_OBJECT (demux, "Error caching the first fragments");
      return FALSE;
    }
    /* make sure we stop caching fragments if something cancelled it */
    if (demux->cancelled)
      return FALSE;
    gst_hls_demux_switch_playlist (demux);
  }
  gst_element_post_message (GST_ELEMENT (demux),
      gst_message_new_buffering (GST_OBJECT (demux), 100));

  g_get_current_time (&demux->next_update);

  demux->need_cache = FALSE;
  return TRUE;
}
Exemplo n.º 24
0
static void
_substream_codec_changed (FsRtpSubStream *substream,
    FsRtpStream *stream)
{
  GList *substream_item = NULL;
  GList *codeclist = NULL;
  FsRtpSession *session = fs_rtp_stream_get_session (stream, NULL);

  if (!session)
    return;

  FS_RTP_SESSION_LOCK (session);

  if (!substream->codec)
  {
    FS_RTP_SESSION_UNLOCK (session);
    g_object_unref (session);
    return;
  }

  codeclist = g_list_prepend (NULL, fs_codec_copy (substream->codec));

  for (substream_item = stream->substreams;
       substream_item;
       substream_item = g_list_next (substream_item))
  {
    FsRtpSubStream *othersubstream = substream_item->data;

    if (othersubstream != substream)
    {
      if (othersubstream->codec)
      {
        if (fs_codec_are_equal (substream->codec, othersubstream->codec))
          break;

        if (!_codec_list_has_codec (codeclist, othersubstream->codec))
          codeclist = g_list_append (codeclist,
              fs_codec_copy (othersubstream->codec));
      }
    }
  }

  FS_RTP_SESSION_UNLOCK (session);

  if (substream_item == NULL)
  {
    GstElement *conf = NULL;

    g_object_notify (G_OBJECT (stream), "current-recv-codecs");

    g_object_get (session, "conference", &conf, NULL);

    gst_element_post_message (conf,
        gst_message_new_element (GST_OBJECT (conf),
            gst_structure_new ("farstream-recv-codecs-changed",
                "stream", FS_TYPE_STREAM, stream,
                "codecs", FS_TYPE_CODEC_LIST, codeclist,
                NULL)));

    gst_object_unref (conf);
  }

  fs_codec_list_destroy (codeclist);
  g_object_unref (session);
}
Exemplo n.º 25
0
static gboolean
gst_musepackdec_handle_seek_event (GstMusepackDec * dec, GstEvent * event)
{
  GstSeekType start_type, stop_type;
  GstSeekFlags flags;
  GstSegment segment;
  GstFormat format;
  gboolean flush;
  gdouble rate;
  gint64 start, stop;
  gint samplerate;

  gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
      &stop_type, &stop);

  if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) {
    GST_DEBUG_OBJECT (dec, "seek failed: only TIME or DEFAULT format allowed");
    return FALSE;
  }

  samplerate = g_atomic_int_get (&dec->rate);

  if (format == GST_FORMAT_TIME) {
    if (start_type != GST_SEEK_TYPE_NONE)
      start = gst_util_uint64_scale_int (start, samplerate, GST_SECOND);
    if (stop_type != GST_SEEK_TYPE_NONE)
      stop = gst_util_uint64_scale_int (stop, samplerate, GST_SECOND);
  }

  flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH);

  if (flush)
    gst_pad_push_event (dec->srcpad, gst_event_new_flush_start ());
  else
    gst_pad_pause_task (dec->sinkpad);  /* not _stop_task()? */

  GST_PAD_STREAM_LOCK (dec->sinkpad);

  /* operate on segment copy until we know the seek worked */
  segment = dec->segment;

  gst_segment_do_seek (&segment, rate, GST_FORMAT_DEFAULT,
      flags, start_type, start, stop_type, stop, NULL);

  gst_pad_push_event (dec->sinkpad, gst_event_new_flush_stop (TRUE));

  GST_DEBUG_OBJECT (dec, "segment: [%" G_GINT64_FORMAT "-%" G_GINT64_FORMAT
      "] = [%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT "]",
      segment.start, segment.stop,
      GST_TIME_ARGS (segment.start * GST_SECOND / dec->rate),
      GST_TIME_ARGS (segment.stop * GST_SECOND / dec->rate));

  GST_DEBUG_OBJECT (dec, "performing seek to sample %" G_GINT64_FORMAT,
      segment.start);

  if (segment.start >= segment.duration) {
    GST_WARNING_OBJECT (dec, "seek out of bounds");
    goto failed;
  }
  if (mpc_demux_seek_sample (dec->d, segment.start) != MPC_STATUS_OK)
    goto failed;

  if ((flags & GST_SEEK_FLAG_SEGMENT) == GST_SEEK_FLAG_SEGMENT) {
    GST_DEBUG_OBJECT (dec, "posting SEGMENT_START message");

    gst_element_post_message (GST_ELEMENT (dec),
        gst_message_new_segment_start (GST_OBJECT (dec), GST_FORMAT_TIME,
            gst_util_uint64_scale_int (segment.start, GST_SECOND, dec->rate)));
  }

  if (flush) {
    gst_pad_push_event (dec->srcpad, gst_event_new_flush_stop (TRUE));
  }

  segment.position = segment.start;
  dec->segment = segment;
  gst_musepackdec_send_newsegment (dec);

  GST_DEBUG_OBJECT (dec, "seek successful");

  gst_pad_start_task (dec->sinkpad,
      (GstTaskFunction) gst_musepackdec_loop, dec->sinkpad, NULL);

  GST_PAD_STREAM_UNLOCK (dec->sinkpad);

  return TRUE;

failed:
  {
    GST_WARNING_OBJECT (dec, "seek failed");
    GST_PAD_STREAM_UNLOCK (dec->sinkpad);
    return FALSE;
  }
}
static void
gst_type_find_element_loop (GstPad * pad)
{
  GstTypeFindElement *typefind;
  GstFlowReturn ret = GST_FLOW_OK;

  typefind = GST_TYPE_FIND_ELEMENT (GST_PAD_PARENT (pad));

  if (typefind->need_stream_start) {
    gchar *stream_id;
    GstEvent *event;

    stream_id = gst_pad_create_stream_id (typefind->src,
        GST_ELEMENT_CAST (typefind), NULL);

    GST_DEBUG_OBJECT (typefind, "Pushing STREAM_START");
    event = gst_event_new_stream_start (stream_id);
    gst_event_set_group_id (event, gst_util_group_id_next ());
    gst_pad_push_event (typefind->src, event);

    typefind->need_stream_start = FALSE;
    g_free (stream_id);
  }

  if (typefind->mode == MODE_TYPEFIND) {
    GstPad *peer = NULL;
    GstCaps *found_caps = NULL;
    GstTypeFindProbability probability = GST_TYPE_FIND_NONE;

    GST_DEBUG_OBJECT (typefind, "find type in pull mode");

    GST_OBJECT_LOCK (typefind);
    if (typefind->force_caps) {
      found_caps = gst_caps_ref (typefind->force_caps);
      probability = GST_TYPE_FIND_MAXIMUM;
    }
    GST_OBJECT_UNLOCK (typefind);

    if (!found_caps) {
      peer = gst_pad_get_peer (pad);
      if (peer) {
        gint64 size;
        gchar *ext;

        if (!gst_pad_query_duration (peer, GST_FORMAT_BYTES, &size)) {
          GST_WARNING_OBJECT (typefind, "Could not query upstream length!");
          gst_object_unref (peer);

          ret = GST_FLOW_ERROR;
          goto pause;
        }

        /* the size if 0, we cannot continue */
        if (size == 0) {
          /* keep message in sync with message in sink event handler */
          GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND,
              (_("Stream contains no data.")), ("Can't typefind empty stream"));
          gst_object_unref (peer);
          ret = GST_FLOW_ERROR;
          goto pause;
        }
        ext = gst_type_find_get_extension (typefind, pad);

        found_caps =
            gst_type_find_helper_get_range (GST_OBJECT_CAST (peer),
            GST_OBJECT_PARENT (peer),
            (GstTypeFindHelperGetRangeFunction) (GST_PAD_GETRANGEFUNC (peer)),
            (guint64) size, ext, &probability);
        g_free (ext);

        GST_DEBUG ("Found caps %" GST_PTR_FORMAT, found_caps);

        gst_object_unref (peer);
      }
    }

    if (!found_caps || probability < typefind->min_probability) {
      GST_DEBUG ("Trying to guess using extension");
      gst_caps_replace (&found_caps, NULL);
      found_caps =
          gst_type_find_guess_by_extension (typefind, pad, &probability);
    }

    if (!found_caps || probability < typefind->min_probability) {
      GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL));
      gst_caps_replace (&found_caps, NULL);
      ret = GST_FLOW_ERROR;
      goto pause;
    }

    GST_DEBUG ("Emiting found caps %" GST_PTR_FORMAT, found_caps);
    gst_type_find_element_emit_have_type (typefind, probability, found_caps);
    typefind->mode = MODE_NORMAL;
    gst_caps_unref (found_caps);
  } else if (typefind->mode == MODE_NORMAL) {
    GstBuffer *outbuf = NULL;

    if (typefind->need_segment) {
      typefind->need_segment = FALSE;
      gst_pad_push_event (typefind->src,
          gst_event_new_segment (&typefind->segment));
    }

    /* Pull 4k blocks and send downstream */
    ret = gst_pad_pull_range (typefind->sink, typefind->offset, 4096, &outbuf);
    if (ret != GST_FLOW_OK)
      goto pause;

    typefind->offset += gst_buffer_get_size (outbuf);

    ret = gst_pad_push (typefind->src, outbuf);
    if (ret != GST_FLOW_OK)
      goto pause;
  } else {
    /* Error out */
    ret = GST_FLOW_ERROR;
    goto pause;
  }

  return;

pause:
  {
    const gchar *reason = gst_flow_get_name (ret);
    gboolean push_eos = FALSE;

    GST_LOG_OBJECT (typefind, "pausing task, reason %s", reason);
    gst_pad_pause_task (typefind->sink);

    if (ret == GST_FLOW_EOS) {
      /* perform EOS logic */

      if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
        gint64 stop;

        /* for segment playback we need to post when (in stream time)
         * we stopped, this is either stop (when set) or the duration. */
        if ((stop = typefind->segment.stop) == -1)
          stop = typefind->offset;

        GST_LOG_OBJECT (typefind, "Sending segment done, at end of segment");
        gst_element_post_message (GST_ELEMENT (typefind),
            gst_message_new_segment_done (GST_OBJECT (typefind),
                GST_FORMAT_BYTES, stop));
        gst_pad_push_event (typefind->src,
            gst_event_new_segment_done (GST_FORMAT_BYTES, stop));
      } else {
        push_eos = TRUE;
      }
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      /* for fatal errors we post an error message */
      GST_ELEMENT_ERROR (typefind, STREAM, FAILED, (NULL),
          ("stream stopped, reason %s", reason));
      push_eos = TRUE;
    }
    if (push_eos) {
      /* send EOS, and prevent hanging if no streams yet */
      GST_LOG_OBJECT (typefind, "Sending EOS, at end of stream");
      gst_pad_push_event (typefind->src, gst_event_new_eos ());
    }
    return;
  }
}
Exemplo n.º 27
0
static void
gst_aiff_parse_loop (GstPad * pad)
{
  GstFlowReturn ret;
  GstAiffParse *aiff = GST_AIFF_PARSE (GST_PAD_PARENT (pad));

  GST_LOG_OBJECT (aiff, "process data");

  switch (aiff->state) {
    case AIFF_PARSE_START:
      GST_INFO_OBJECT (aiff, "AIFF_PARSE_START");
      if ((ret = gst_aiff_parse_stream_init (aiff)) != GST_FLOW_OK)
        goto pause;

      aiff->state = AIFF_PARSE_HEADER;
      /* fall-through */

    case AIFF_PARSE_HEADER:
      GST_INFO_OBJECT (aiff, "AIFF_PARSE_HEADER");
      if ((ret = gst_aiff_parse_stream_headers (aiff)) != GST_FLOW_OK)
        goto pause;

      aiff->state = AIFF_PARSE_DATA;
      GST_INFO_OBJECT (aiff, "AIFF_PARSE_DATA");
      /* fall-through */

    case AIFF_PARSE_DATA:
      if ((ret = gst_aiff_parse_stream_data (aiff)) != GST_FLOW_OK)
        goto pause;
      break;
    default:
      g_assert_not_reached ();
  }
  return;

  /* ERRORS */
pause:
  {
    const gchar *reason = gst_flow_get_name (ret);

    GST_DEBUG_OBJECT (aiff, "pausing task, reason %s", reason);
    aiff->segment_running = FALSE;
    gst_pad_pause_task (pad);

    if (ret == GST_FLOW_UNEXPECTED) {
      /* perform EOS logic */
      if (aiff->segment.flags & GST_SEEK_FLAG_SEGMENT) {
        GstClockTime stop;

        if ((stop = aiff->segment.stop) == -1)
          stop = aiff->segment.duration;

        gst_element_post_message (GST_ELEMENT_CAST (aiff),
            gst_message_new_segment_done (GST_OBJECT_CAST (aiff),
                aiff->segment.format, stop));
      } else {
        gst_pad_push_event (aiff->srcpad, gst_event_new_eos ());
      }
    } else if (ret < GST_FLOW_UNEXPECTED || ret == GST_FLOW_NOT_LINKED) {
      /* for fatal errors we post an error message, post the error
       * first so the app knows about the error first. */
      GST_ELEMENT_ERROR (aiff, STREAM, FAILED,
          (_("Internal data flow error.")),
          ("streaming task paused, reason %s (%d)", reason, ret));
      gst_pad_push_event (aiff->srcpad, gst_event_new_eos ());
    }
    return;
  }
}
static gboolean
gst_type_find_element_seek (GstTypeFindElement * typefind, GstEvent * event)
{
  GstSeekFlags flags;
  GstSeekType start_type, stop_type;
  GstFormat format;
  gboolean flush;
  gdouble rate;
  gint64 start, stop;
  GstSegment seeksegment = { 0, };

  gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
      &stop_type, &stop);

  /* we can only seek on bytes */
  if (format != GST_FORMAT_BYTES) {
    GST_DEBUG_OBJECT (typefind, "Can only seek on BYTES");
    return FALSE;
  }

  /* copy segment, we need this because we still need the old
   * segment when we close the current segment. */
  memcpy (&seeksegment, &typefind->segment, sizeof (GstSegment));

  GST_DEBUG_OBJECT (typefind, "configuring seek");
  gst_segment_do_seek (&seeksegment, rate, format, flags,
      start_type, start, stop_type, stop, NULL);

  flush = ! !(flags & GST_SEEK_FLAG_FLUSH);

  GST_DEBUG_OBJECT (typefind, "New segment %" GST_SEGMENT_FORMAT, &seeksegment);

  if (flush) {
    GST_DEBUG_OBJECT (typefind, "Starting flush");
    gst_pad_push_event (typefind->sink, gst_event_new_flush_start ());
    gst_pad_push_event (typefind->src, gst_event_new_flush_start ());
  } else {
    GST_DEBUG_OBJECT (typefind, "Non-flushing seek, pausing task");
    gst_pad_pause_task (typefind->sink);
  }

  /* now grab the stream lock so that streaming cannot continue, for
   * non flushing seeks when the element is in PAUSED this could block
   * forever. */
  GST_DEBUG_OBJECT (typefind, "Waiting for streaming to stop");
  GST_PAD_STREAM_LOCK (typefind->sink);

  if (flush) {
    GST_DEBUG_OBJECT (typefind, "Stopping flush");
    gst_pad_push_event (typefind->sink, gst_event_new_flush_stop (TRUE));
    gst_pad_push_event (typefind->src, gst_event_new_flush_stop (TRUE));
  }

  /* now update the real segment info */
  GST_DEBUG_OBJECT (typefind, "Committing new seek segment");
  memcpy (&typefind->segment, &seeksegment, sizeof (GstSegment));
  typefind->offset = typefind->segment.start;

  /* notify start of new segment */
  if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
    GstMessage *msg;

    msg = gst_message_new_segment_start (GST_OBJECT (typefind),
        GST_FORMAT_BYTES, typefind->segment.start);
    gst_element_post_message (GST_ELEMENT (typefind), msg);
  }

  typefind->need_segment = TRUE;

  /* restart our task since it might have been stopped when we did the
   * flush. */
  gst_pad_start_task (typefind->sink,
      (GstTaskFunction) gst_type_find_element_loop, typefind->sink, NULL);

  /* streaming can continue now */
  GST_PAD_STREAM_UNLOCK (typefind->sink);

  return TRUE;
}
void
gst_audio_fx_base_fir_filter_set_kernel (GstAudioFXBaseFIRFilter * self,
    gdouble * kernel, guint kernel_length, guint64 latency,
    const GstAudioInfo * info)
{
  gboolean latency_changed;
  GstAudioFormat format;
  gint channels;

  g_return_if_fail (kernel != NULL);
  g_return_if_fail (self != NULL);

  g_mutex_lock (&self->lock);

  latency_changed = (self->latency != latency
      || (!self->low_latency && self->kernel_length < FFT_THRESHOLD
          && kernel_length >= FFT_THRESHOLD)
      || (!self->low_latency && self->kernel_length >= FFT_THRESHOLD
          && kernel_length < FFT_THRESHOLD));

  /* FIXME: If the latency changes, the buffer size changes too and we
   * have to drain in any case until this is fixed in the future */
  if (self->buffer && (!self->drain_on_changes || latency_changed)) {
    gst_audio_fx_base_fir_filter_push_residue (self);
    self->start_ts = GST_CLOCK_TIME_NONE;
    self->start_off = GST_BUFFER_OFFSET_NONE;
    self->nsamples_out = 0;
    self->nsamples_in = 0;
    self->buffer_fill = 0;
  }

  g_free (self->kernel);
  if (!self->drain_on_changes || latency_changed) {
    g_free (self->buffer);
    self->buffer = NULL;
    self->buffer_fill = 0;
    self->buffer_length = 0;
  }

  self->kernel = kernel;
  self->kernel_length = kernel_length;

  if (info) {
    format = GST_AUDIO_INFO_FORMAT (info);
    channels = GST_AUDIO_INFO_CHANNELS (info);
  } else {
    format = GST_AUDIO_FILTER_FORMAT (self);
    channels = GST_AUDIO_FILTER_CHANNELS (self);
  }

  gst_audio_fx_base_fir_filter_calculate_frequency_response (self);
  gst_audio_fx_base_fir_filter_select_process_function (self, format, channels);

  if (latency_changed) {
    self->latency = latency;
    gst_element_post_message (GST_ELEMENT (self),
        gst_message_new_latency (GST_OBJECT (self)));
  }

  g_mutex_unlock (&self->lock);
}
static GstStateChangeReturn
gst_decklink_video_sink_change_state (GstElement * element,
    GstStateChange transition)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;

  switch (transition) {
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      g_mutex_lock (&self->output->lock);
      self->output->clock_start_time = GST_CLOCK_TIME_NONE;
      self->output->clock_epoch += self->output->clock_last_time;
      self->output->clock_last_time = 0;
      self->output->clock_offset = 0;
      g_mutex_unlock (&self->output->lock);
      gst_element_post_message (element,
          gst_message_new_clock_provide (GST_OBJECT_CAST (element),
              self->output->clock, TRUE));
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
      GstClock *clock, *audio_clock;

      clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
      if (clock) {
        audio_clock = gst_decklink_output_get_audio_clock (self->output);
        if (clock && clock != self->output->clock && clock != audio_clock) {
          gst_clock_set_master (self->output->clock, clock);
        }
        gst_object_unref (clock);
        if (audio_clock)
          gst_object_unref (audio_clock);
      } else {
        GST_ELEMENT_ERROR (self, STREAM, FAILED,
            (NULL), ("Need a clock to go to PLAYING"));
        ret = GST_STATE_CHANGE_FAILURE;
      }

      break;
    }
    default:
      break;
  }

  if (ret == GST_STATE_CHANGE_FAILURE)
    return ret;
  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
  if (ret == GST_STATE_CHANGE_FAILURE)
    return ret;

  switch (transition) {
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      gst_element_post_message (element,
          gst_message_new_clock_lost (GST_OBJECT_CAST (element),
              self->output->clock));
      gst_clock_set_master (self->output->clock, NULL);
      // Reset calibration to make the clock reusable next time we use it
      gst_clock_set_calibration (self->output->clock, 0, 0, 1, 1);
      g_mutex_lock (&self->output->lock);
      self->output->clock_start_time = GST_CLOCK_TIME_NONE;
      self->output->clock_epoch += self->output->clock_last_time;
      self->output->clock_last_time = 0;
      self->output->clock_offset = 0;
      g_mutex_unlock (&self->output->lock);
      gst_decklink_video_sink_stop (self);
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:{
      if (gst_decklink_video_sink_stop_scheduled_playback (self) ==
          GST_STATE_CHANGE_FAILURE)
        ret = GST_STATE_CHANGE_FAILURE;
      break;
    }
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
      g_mutex_lock (&self->output->lock);
      if (self->output->start_scheduled_playback)
        self->output->start_scheduled_playback (self->output->videosink);
      g_mutex_unlock (&self->output->lock);
      break;
    }
    default:
      break;
  }

  return ret;
}