Ejemplo n.º 1
0
/*****************************************************************************
 * OpenDecoder: probe the decoder and return score
 *****************************************************************************/
static int OpenDecoder( vlc_object_t *p_this )
{
    decoder_t *p_dec = ( decoder_t* )p_this;
    decoder_sys_t *p_sys;
    GstStateChangeReturn i_ret;
    gboolean b_ret;
    sink_src_caps_t caps = { NULL, NULL };
    GstStructure *p_str;
    GstAppSrcCallbacks cb;
    int i_rval = VLC_SUCCESS;
    GList *p_list;
    bool dbin;

#define VLC_GST_CHECK( r, v, s, t ) \
    { if( r == v ){ msg_Err( p_dec, s ); i_rval = t; goto fail; } }

    if( !vlc_gst_init( ))
    {
        msg_Err( p_dec, "failed to register vlcvideosink" );
        return VLC_EGENERIC;
    }

    p_str = vlc_to_gst_fmt( &p_dec->fmt_in );
    if( !p_str )
        return VLC_EGENERIC;

    /* Allocate the memory needed to store the decoder's structure */
    p_sys = p_dec->p_sys = calloc( 1, sizeof( *p_sys ) );
    if( p_sys == NULL )
    {
        gst_structure_free( p_str );
        return VLC_ENOMEM;
    }

    dbin = var_CreateGetBool( p_dec, "use-decodebin" );
    msg_Dbg( p_dec, "Using decodebin? %s", dbin ? "yes ":"no" );

    caps.p_sinkcaps = gst_caps_new_empty( );
    gst_caps_append_structure( caps.p_sinkcaps, p_str );
    /* Currently supports only system memory raw output format */
    caps.p_srccaps = gst_caps_new_empty_simple( "video/x-raw" );

    /* Get the list of all the available gstreamer decoders */
    p_list = gst_element_factory_list_get_elements(
            GST_ELEMENT_FACTORY_TYPE_DECODER, GST_RANK_MARGINAL );
    VLC_GST_CHECK( p_list, NULL, "no decoder list found", VLC_ENOMOD );
    if( !dbin )
    {
        GList *p_l;
        /* Sort them as per ranks */
        p_list = g_list_sort( p_list, gst_plugin_feature_rank_compare_func );
        VLC_GST_CHECK( p_list, NULL, "failed to sort decoders list",
                VLC_ENOMOD );
        p_l = g_list_find_custom( p_list, &caps, find_decoder_func );
        VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",
                VLC_ENOMOD );
        /* create the decoder with highest rank */
        p_sys->p_decode_in = gst_element_factory_create(
                ( GstElementFactory* )p_l->data, NULL );
        VLC_GST_CHECK( p_sys->p_decode_in, NULL,
                "failed to create decoder", VLC_ENOMOD );
    }
    else
    {
        GList *p_l;
        /* Just check if any suitable decoder exists, rest will be
         * handled by decodebin */
        p_l = g_list_find_custom( p_list, &caps, find_decoder_func );
        VLC_GST_CHECK( p_l, NULL, "no suitable decoder found",
                VLC_ENOMOD );
    }
    gst_plugin_feature_list_free( p_list );
    p_list = NULL;
    gst_caps_unref( caps.p_srccaps );
    caps.p_srccaps = NULL;

    p_sys->b_prerolled = false;
    p_sys->b_running = false;

    /* Queue: GStreamer thread will dump buffers into this queue,
     * DecodeBlock() will pop out the buffers from the queue */
    p_sys->p_que = gst_atomic_queue_new( 0 );
    VLC_GST_CHECK( p_sys->p_que, NULL, "failed to create queue",
            VLC_ENOMEM );

    p_sys->p_decode_src = gst_element_factory_make( "appsrc", NULL );
    VLC_GST_CHECK( p_sys->p_decode_src, NULL, "appsrc not found",
            VLC_ENOMOD );
    g_object_set( G_OBJECT( p_sys->p_decode_src ), "caps", caps.p_sinkcaps,
            "emit-signals", TRUE, "format", GST_FORMAT_BYTES,
            "stream-type", GST_APP_STREAM_TYPE_SEEKABLE,
            /* Making DecodeBlock() to block on appsrc with max queue size of 1 byte.
             * This will make the push_buffer() tightly coupled with the buffer
             * flow from appsrc -> decoder. push_buffer() will only return when
             * the same buffer it just fed to appsrc has also been fed to the
             * decoder element as well */
            "block", TRUE, "max-bytes", ( guint64 )1, NULL );
    gst_caps_unref( caps.p_sinkcaps );
    caps.p_sinkcaps = NULL;
    cb.enough_data = NULL;
    cb.need_data = NULL;
    cb.seek_data = seek_data_cb;
    gst_app_src_set_callbacks( GST_APP_SRC( p_sys->p_decode_src ),
            &cb, p_dec, NULL );

    if( dbin )
    {
        p_sys->p_decode_in = gst_element_factory_make( "decodebin", NULL );
        VLC_GST_CHECK( p_sys->p_decode_in, NULL, "decodebin not found",
                VLC_ENOMOD );
        //g_object_set( G_OBJECT( p_sys->p_decode_in ),
        //"max-size-buffers", 2, NULL );
        //g_signal_connect( G_OBJECT( p_sys->p_decode_in ), "no-more-pads",
                //G_CALLBACK( no_more_pads_cb ), p_dec );
        g_signal_connect( G_OBJECT( p_sys->p_decode_in ), "pad-added",
                G_CALLBACK( pad_added_cb ), p_dec );

    }

    /* videosink: will emit signal for every available buffer */
    p_sys->p_decode_out = gst_element_factory_make( "vlcvideosink", NULL );
    VLC_GST_CHECK( p_sys->p_decode_out, NULL, "vlcvideosink not found",
            VLC_ENOMOD );
    p_sys->p_allocator = gst_vlc_picture_plane_allocator_new(
            (gpointer) p_dec );
    g_object_set( G_OBJECT( p_sys->p_decode_out ), "sync", FALSE, "allocator",
            p_sys->p_allocator, "id", (gpointer) p_dec, NULL );
    g_signal_connect( G_OBJECT( p_sys->p_decode_out ), "new-buffer",
            G_CALLBACK( frame_handoff_cb ), p_dec );

    //FIXME: caps_signal
#if 0
    g_signal_connect( G_OBJECT( p_sys->p_decode_out ), "new-caps",
            G_CALLBACK( caps_handoff_cb ), p_dec );
#else
    GST_VLC_VIDEO_SINK( p_sys->p_decode_out )->new_caps = caps_handoff_cb;
#endif

    p_sys->p_decoder = GST_ELEMENT( gst_bin_new( "decoder" ) );
    VLC_GST_CHECK( p_sys->p_decoder, NULL, "bin not found", VLC_ENOMOD );
    p_sys->p_bus = gst_bus_new( );
    VLC_GST_CHECK( p_sys->p_bus, NULL, "failed to create bus",
            VLC_ENOMOD );
    gst_element_set_bus( p_sys->p_decoder, p_sys->p_bus );

    gst_bin_add_many( GST_BIN( p_sys->p_decoder ),
            p_sys->p_decode_src, p_sys->p_decode_in,
            p_sys->p_decode_out, NULL );
    gst_object_ref( p_sys->p_decode_src );
    gst_object_ref( p_sys->p_decode_in );
    gst_object_ref( p_sys->p_decode_out );

    b_ret = gst_element_link( p_sys->p_decode_src, p_sys->p_decode_in );
    VLC_GST_CHECK( b_ret, FALSE, "failed to link src <-> in",
            VLC_EGENERIC );

    if( !dbin )
    {
        b_ret = gst_element_link( p_sys->p_decode_in, p_sys->p_decode_out );
        VLC_GST_CHECK( b_ret, FALSE, "failed to link in <-> out",
                VLC_EGENERIC );
    }

    p_dec->fmt_out.i_cat = p_dec->fmt_in.i_cat;

    /* set the pipeline to playing */
    i_ret = gst_element_set_state( p_sys->p_decoder, GST_STATE_PLAYING );
    VLC_GST_CHECK( i_ret, GST_STATE_CHANGE_FAILURE,
            "set state failure", VLC_EGENERIC );
    p_sys->b_running = true;

    /* Set callbacks */
    p_dec->pf_decode_video = DecodeBlock;
    p_dec->pf_flush        = Flush;

    return VLC_SUCCESS;

fail:
    if( caps.p_sinkcaps )
        gst_caps_unref( caps.p_sinkcaps );
    if( caps.p_srccaps )
        gst_caps_unref( caps.p_srccaps );
    if( p_list )
        gst_plugin_feature_list_free( p_list );
    CloseDecoder( ( vlc_object_t* )p_dec );
    return i_rval;
}
Ejemplo n.º 2
0
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    close();
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    gst_initializer::init();

//    if(!isInited) {
//        printf("gst_init\n");
//        gst_init (NULL, NULL);

//        gst_debug_set_active(TRUE);
//        gst_debug_set_colored(TRUE);
//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);

//        isInited = true;
//    }
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    if(type != CV_CAP_GSTREAMER_FILE) {
        close();
        return false;
    }

    if(!gst_uri_is_valid(filename)) {
        uri = realpath(filename, NULL);
        stream=false;
        if(uri) {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        } else {
            GError *err = NULL;
            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                CV_WARN("GStreamer: Error opening bin\n");
                close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    if(!uridecodebin) {
        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        if(!uridecodebin) {
            CV_WARN("GStreamer: Failed to create uridecodebin\n");
            close();
            return false;
        }
    }

    if(manualpipeline) {
        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
        CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
        return false;
        }

    pipeline = uridecodebin;
    } else {
    pipeline = gst_pipeline_new (NULL);

        color = gst_element_factory_make("ffmpegcolorspace", NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    caps = gst_caps_new_simple("video/x-raw-rgb",
                               "red_mask",   G_TYPE_INT, 0x0000FF,
                               "green_mask", G_TYPE_INT, 0x00FF00,
                               "blue_mask",  G_TYPE_INT, 0xFF0000,
                               NULL);
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
       GST_STATE_CHANGE_FAILURE) {
        CV_WARN("GStreamer: unable to set pipeline to ready\n");
        gst_object_unref(pipeline);
        return false;
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
       GST_STATE_CHANGE_FAILURE) {
        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        CV_WARN("GStreamer: unable to set pipeline to playing\n");
        gst_object_unref(pipeline);
        return false;
    }



    handleMessage();

    __END__;

    return true;
}
Ejemplo n.º 3
0
bool CvCapture_GStreamer::setProperty( int propId, double value )
{
       GstFormat format;
    GstSeekFlags flags;

    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }

    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                        flags, (gint64) (value * GST_MSECOND))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                        flags, (gint64) value)) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                        flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_FRAME_WIDTH:
        if(value > 0)
            setFilter("width", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("width");
        break;
    case CV_CAP_PROP_FRAME_HEIGHT:
        if(value > 0)
            setFilter("height", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("height");
        break;
    case CV_CAP_PROP_FPS:
        if(value > 0) {
            int num, denom;
            num = (int) value;
            if(value != num) { // FIXME this supports only fractions x/1 and x/2
                num = (int) (value * 2);
                denom = 2;
            } else
                denom = 1;

            setFilter("framerate", GST_TYPE_FRACTION, num, denom);
        } else
            removeFilter("framerate");
        break;
    case CV_CAP_PROP_FOURCC:
    case CV_CAP_PROP_FRAME_COUNT:
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink)
            break;
        gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
        break;
    default:
        CV_WARN("GStreamer: unhandled property");
    }
    return false;
}
/* Return the possible output caps based on inputs and downstream prefs */
static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter)
{
  GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
  GList *l;
  gint best_width = -1, best_height = -1;
  gdouble best_fps = -1, cur_fps;
  gint best_fps_n = 0, best_fps_d = 1;
  GstVideoInfo *mix_info;
  GstCaps *blend_caps, *tmp_caps;
  GstCaps *out_caps;

  GST_OBJECT_LOCK (vagg);

  for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
    GstVideoAggregatorPad *pad = l->data;
    GstVideoInfo tmp = pad->info;
    gint this_width, this_height;
    gint fps_n, fps_d;

    if (!pad->info.finfo)
      continue;

    /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
    if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
      continue;

    /* Convert to per-view width/height for unpacked forms */
    gst_video_multiview_video_info_change_mode (&tmp,
        GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE);

    this_width = GST_VIDEO_INFO_WIDTH (&tmp);
    this_height = GST_VIDEO_INFO_HEIGHT (&tmp);
    fps_n = GST_VIDEO_INFO_FPS_N (&tmp);
    fps_d = GST_VIDEO_INFO_FPS_D (&tmp);

    GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT
        " w %u h %u", pad, this_width, this_height);

    if (this_width == 0 || this_height == 0)
      continue;

    if (best_width < this_width)
      best_width = this_width;
    if (best_height < this_height)
      best_height = this_height;

    if (fps_d == 0)
      cur_fps = 0.0;
    else
      gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);

    if (best_fps < cur_fps) {
      best_fps = cur_fps;
      best_fps_n = fps_n;
      best_fps_d = fps_d;
    }

    /* FIXME: Preserve PAR for at least one input when different sized inputs */
  }
  GST_OBJECT_UNLOCK (vagg);

  mix_info = &mix->mix_info;
  gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width,
      best_height);

  GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n;
  GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d;

  GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED;
  GST_VIDEO_INFO_VIEWS (mix_info) = 2;

  /* FIXME: If input is marked as flipped or flopped, preserve those flags */
  GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;

  /* Choose our output format based on downstream preferences */
  blend_caps = gst_video_info_to_caps (mix_info);

  gst_caps_set_features (blend_caps, 0,
      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));

  tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps);
  gst_caps_unref (blend_caps);

  out_caps = gst_caps_intersect (caps, tmp_caps);
  gst_caps_unref (tmp_caps);

  GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, out_caps);

  return out_caps;
}
Ejemplo n.º 5
0
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
  const guint8 *data;
  GstAudioChannelPosition pos[64];
  const GstAudioChannelPosition *posn = NULL;
  GstMapInfo map;

  if (!gst_opus_header_is_id_header (buf)) {
    GST_ERROR_OBJECT (dec, "Header is not an Opus ID header");
    return GST_FLOW_ERROR;
  }

  gst_buffer_map (buf, &map, GST_MAP_READ);
  data = map.data;

  if (!(dec->n_channels == 0 || dec->n_channels == data[9])) {
    gst_buffer_unmap (buf, &map);
    GST_ERROR_OBJECT (dec, "Opus ID header has invalid channels");
    return GST_FLOW_ERROR;
  }

  dec->n_channels = data[9];
  dec->sample_rate = GST_READ_UINT32_LE (data + 12);
  dec->pre_skip = GST_READ_UINT16_LE (data + 10);
  dec->r128_gain = GST_READ_UINT16_LE (data + 16);
  dec->r128_gain_volume = gst_opus_dec_get_r128_volume (dec->r128_gain);
  GST_INFO_OBJECT (dec,
      "Found pre-skip of %u samples, R128 gain %d (volume %f)",
      dec->pre_skip, dec->r128_gain, dec->r128_gain_volume);

  dec->channel_mapping_family = data[18];
  if (dec->channel_mapping_family == 0) {
    /* implicit mapping */
    GST_INFO_OBJECT (dec, "Channel mapping family 0, implicit mapping");
    dec->n_streams = dec->n_stereo_streams = 1;
    dec->channel_mapping[0] = 0;
    dec->channel_mapping[1] = 1;
  } else {
    dec->n_streams = data[19];
    dec->n_stereo_streams = data[20];
    memcpy (dec->channel_mapping, data + 21, dec->n_channels);

    if (dec->channel_mapping_family == 1) {
      GST_INFO_OBJECT (dec, "Channel mapping family 1, Vorbis mapping");
      switch (dec->n_channels) {
        case 1:
        case 2:
          /* nothing */
          break;
        case 3:
        case 4:
        case 5:
        case 6:
        case 7:
        case 8:
          posn = gst_opus_channel_positions[dec->n_channels - 1];
          break;
        default:{
          gint i;

          GST_ELEMENT_WARNING (GST_ELEMENT (dec), STREAM, DECODE,
              (NULL), ("Using NONE channel layout for more than 8 channels"));

          for (i = 0; i < dec->n_channels; i++)
            pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;

          posn = pos;
        }
      }
    } else {
      GST_INFO_OBJECT (dec, "Channel mapping family %d",
          dec->channel_mapping_family);
    }
  }

  gst_opus_dec_negotiate (dec, posn);

  gst_buffer_unmap (buf, &map);

  return GST_FLOW_OK;
}
Ejemplo n.º 6
0
GstElement* webkitVideoSinkNew(WebCore::GStreamerGWorld* gstGWorld)
{
    GstElement* element = GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
    WEBKIT_VIDEO_SINK(element)->priv->gstGWorld = gstGWorld;
    return element;
}
static void
gst_curl_smtp_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstCurlSmtpSink *sink;
  GstState cur_state;

  g_return_if_fail (GST_IS_CURL_SMTP_SINK (object));
  sink = GST_CURL_SMTP_SINK (object);

  gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);
  if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) {
    GST_OBJECT_LOCK (sink);

    switch (prop_id) {
      case PROP_MAIL_RCPT:
        g_free (sink->mail_rcpt);
        sink->mail_rcpt = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "mail-rcpt set to %s", sink->mail_rcpt);
        break;
      case PROP_MAIL_FROM:
        g_free (sink->mail_from);
        sink->mail_from = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "mail-from set to %s", sink->mail_from);
        break;
      case PROP_SUBJECT:
        g_free (sink->subject);
        sink->subject = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "subject set to %s", sink->subject);
        break;
      case PROP_MESSAGE_BODY:
        g_free (sink->message_body);
        sink->message_body = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "message-body set to %s", sink->message_body);
        break;
      case PROP_CONTENT_TYPE:
        g_free (sink->content_type);
        sink->content_type = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "content-type set to %s", sink->content_type);
        break;
      case PROP_USE_SSL:
        sink->use_ssl = g_value_get_boolean (value);
        GST_DEBUG_OBJECT (sink, "use-ssl set to %d", sink->use_ssl);
        break;
      case PROP_NBR_ATTACHMENTS:
        sink->nbr_attachments = g_value_get_int (value);
        sink->nbr_attachments_left = sink->nbr_attachments;
        GST_DEBUG_OBJECT (sink, "nbr-attachments set to %d",
            sink->nbr_attachments);
        break;
      case PROP_POP_USER_NAME:
        g_free (sink->pop_user);
        sink->pop_user = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "pop-user set to %s", sink->pop_user);
        break;
      case PROP_POP_USER_PASSWD:
        g_free (sink->pop_passwd);
        sink->pop_passwd = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "pop-passwd set to %s", sink->pop_passwd);
        break;
      case PROP_POP_LOCATION:
        g_free (sink->pop_location);
        sink->pop_location = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "pop-location set to %s", sink->pop_location);
        break;

      default:
        GST_DEBUG_OBJECT (sink, "invalid property id %d", prop_id);
        break;
    }

    GST_OBJECT_UNLOCK (sink);

    return;
  }

  /* in PLAYING or PAUSED state */
  GST_OBJECT_LOCK (sink);

  switch (prop_id) {
    case PROP_CONTENT_TYPE:
      g_free (sink->content_type);
      sink->content_type = g_value_dup_string (value);
      GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);
      break;
    default:
      GST_WARNING_OBJECT (sink, "cannot set property when PLAYING");
      break;
  }

  GST_OBJECT_UNLOCK (sink);
}
Ejemplo n.º 8
0
static void
gst_curl_http_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstCurlHttpSink *sink;
  GstState cur_state;

  g_return_if_fail (GST_IS_CURL_HTTP_SINK (object));
  sink = GST_CURL_HTTP_SINK (object);

  gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);
  if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) {
    GST_OBJECT_LOCK (sink);

    switch (prop_id) {
      case PROP_PROXY:
        g_free (sink->proxy);
        sink->proxy = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "proxy set to %s", sink->proxy);
        break;
      case PROP_PROXY_PORT:
        sink->proxy_port = g_value_get_int (value);
        GST_DEBUG_OBJECT (sink, "proxy port set to %d", sink->proxy_port);
        break;
      case PROP_PROXY_USER_NAME:
        g_free (sink->proxy_user);
        sink->proxy_user = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "proxy user set to %s", sink->proxy_user);
        break;
      case PROP_PROXY_USER_PASSWD:
        g_free (sink->proxy_passwd);
        sink->proxy_passwd = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "proxy password set to %s", sink->proxy_passwd);
        break;
      case PROP_USE_CONTENT_LENGTH:
        sink->use_content_length = g_value_get_boolean (value);
        GST_DEBUG_OBJECT (sink, "use_content_length set to %d",
            sink->use_content_length);
        break;
      case PROP_CONTENT_TYPE:
        g_free (sink->content_type);
        sink->content_type = g_value_dup_string (value);
        GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);
        break;
      default:
        GST_DEBUG_OBJECT (sink, "invalid property id %d", prop_id);
        break;
    }

    GST_OBJECT_UNLOCK (sink);

    return;
  }

  /* in PLAYING or PAUSED state */
  GST_OBJECT_LOCK (sink);

  switch (prop_id) {
    case PROP_CONTENT_TYPE:
      g_free (sink->content_type);
      sink->content_type = g_value_dup_string (value);
      GST_DEBUG_OBJECT (sink, "content type set to %s", sink->content_type);
      break;
    default:
      GST_WARNING_OBJECT (sink, "cannot set property when PLAYING");
      break;
  }

  GST_OBJECT_UNLOCK (sink);
}
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
  const guint8 *data = GST_BUFFER_DATA (buf);
  GstCaps *caps;
  const GstAudioChannelPosition *pos = NULL;

  if (!gst_opus_header_is_id_header (buf)) {
    GST_ERROR_OBJECT (dec, "Header is not an Opus ID header");
    return GST_FLOW_ERROR;
  }
  if (!(dec->n_channels == 0 || dec->n_channels == data[9])) {
    GST_ERROR_OBJECT (dec, "Opus ID header has invalid channels");
    return GST_FLOW_ERROR;
  }

  dec->n_channels = data[9];
  dec->pre_skip = GST_READ_UINT16_LE (data + 10);
  dec->r128_gain = GST_READ_UINT16_LE (data + 16);
  dec->r128_gain_volume = gst_opus_dec_get_r128_volume (dec->r128_gain);
  GST_INFO_OBJECT (dec,
      "Found pre-skip of %u samples, R128 gain %d (volume %f)",
      dec->pre_skip, dec->r128_gain, dec->r128_gain_volume);

  dec->channel_mapping_family = data[18];
  if (dec->channel_mapping_family == 0) {
    /* implicit mapping */
    GST_INFO_OBJECT (dec, "Channel mapping family 0, implicit mapping");
    dec->n_streams = dec->n_stereo_streams = 1;
    dec->channel_mapping[0] = 0;
    dec->channel_mapping[1] = 1;
  } else {
    dec->n_streams = data[19];
    dec->n_stereo_streams = data[20];
    memcpy (dec->channel_mapping, data + 21, dec->n_channels);

    if (dec->channel_mapping_family == 1) {
      GST_INFO_OBJECT (dec, "Channel mapping family 1, Vorbis mapping");
      switch (dec->n_channels) {
        case 1:
        case 2:
          /* nothing */
          break;
        case 3:
        case 4:
        case 5:
        case 6:
        case 7:
        case 8:
          pos = gst_opus_channel_positions[dec->n_channels - 1];
          break;
        default:{
          gint i;
          GstAudioChannelPosition *posn =
              g_new (GstAudioChannelPosition, dec->n_channels);

          GST_ELEMENT_WARNING (GST_ELEMENT (dec), STREAM, DECODE,
              (NULL), ("Using NONE channel layout for more than 8 channels"));

          for (i = 0; i < dec->n_channels; i++)
            posn[i] = GST_AUDIO_CHANNEL_POSITION_NONE;

          pos = posn;
        }
      }
    } else {
      GST_INFO_OBJECT (dec, "Channel mapping family %d",
          dec->channel_mapping_family);
    }
  }

  caps = gst_opus_dec_negotiate (dec);

  if (pos) {
    GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");
    gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
  }

  if (dec->n_channels > 8) {
    g_free ((GstAudioChannelPosition *) pos);
  }

  GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);
  gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);
  gst_caps_unref (caps);

  return GST_FLOW_OK;
}
Ejemplo n.º 10
0
int GStreamer_init(const char *mplayer)
{
	GError* error;
	GstBus *bus;
	GstElement *videosink, *audiosink;
	int err;

	if (g_initialized)
		g_error("GStreamer: already initialized, call destroy first!\n");

	g_state_callback = NULL;
	g_duration = 0;
	g_position = 0;

	/* pthread synchronization */
	pthread_mutex_init(&g_mutex, NULL);
	err = pthread_cond_init(&g_main_cond, NULL);
	if (err) {
		g_error("GStreamer: failed to initialize main condition %s\n",
				strerror(errno));
		return -1;
	}

	/* init gstreamer library */
	if (!gst_init_check(NULL, NULL, &error)) {
		g_error("GStreamer: failed to initialize gstreamer library: [%d] %s\n",
				error->code, error->message);
		g_error_free(error);
		return -1;
	}

	/* create pipeline */
	g_pipeline = gst_pipeline_new("pipeline");
	g_pipeline_name = gst_element_get_name(GST_ELEMENT(g_pipeline));

	/* register callback */
	bus = gst_pipeline_get_bus(GST_PIPELINE(g_pipeline));
	gst_bus_add_watch(bus, my_bus_callback, NULL);
	gst_object_unref(bus);

#if 0
	/* TODO unlinked when removed from pipeline */

	/* hardcode audio/video sink */
	g_videosink = create_video_sink();
	g_audiosink = create_audio_sink();

	if (!g_videosink || !g_audiosink) {
		/* TODO memory leak */
		g_error("GStreamer: failed to create sink elements\n");
		return -1;
	}
#endif

	/* prepare http/file src */
	g_filesrc = gst_element_factory_make ("filesrc", "filesrc");
	g_httpsrc = gst_element_factory_make ("souphttpsrc", "httpsrc");

	if (!g_filesrc || !g_httpsrc) {
		/* TODO memory leak */
		g_error("GStreamer: failed to create src elements %x %x\n", g_filesrc, g_httpsrc);
		return -1;
	}

	g_object_ref(g_filesrc);
	g_object_ref(g_httpsrc);

	/* initialize pipeline */
	/* TODO do for audio/video pipe separately */

	if (gst_element_set_state(g_pipeline, GST_STATE_READY) ==
		GST_STATE_CHANGE_FAILURE) {
	  g_error("GStreamer: could not set pipeline to ready\n");
	}

	/* start main loop */
	g_main_loop = g_main_loop_new(NULL, FALSE);

	err = pthread_create(&g_reader_thread, NULL, main_thread_proc, NULL);
	if (err) {
		g_error("GStreamer: failed to launch gstreamer main thread %s\n",
				strerror(errno));
		goto err_pthread;
	}

	g_print("GStreamer: SUCCESSFULLY INITIALIZED\n");
	g_initialized = 1;

	return 0;

err_pthread:
	pthread_cond_destroy(&g_main_cond);
	pthread_mutex_destroy(&g_mutex);
	
	return err;
}
Ejemplo n.º 11
0
static GstElement *
gst_auto_video_sink_find_best (GstAutoVideoSink * sink)
{
  GList *list, *item;
  GstElement *choice = NULL;
  GstMessage *message = NULL;
  GSList *errors = NULL;
  GstBus *bus = gst_bus_new ();
  GstPad *el_pad = NULL;
  GstCaps *el_caps = NULL;
  gboolean no_match = TRUE;

  list = gst_registry_feature_filter (gst_registry_get (),
      (GstPluginFeatureFilter) gst_auto_video_sink_factory_filter, FALSE, sink);
  list = g_list_sort (list, (GCompareFunc) gst_auto_video_sink_compare_ranks);

  GST_LOG_OBJECT (sink, "Trying to find usable video devices ...");

  for (item = list; item != NULL; item = item->next) {
    GstElementFactory *f = GST_ELEMENT_FACTORY (item->data);
    GstElement *el;

    if ((el = gst_auto_video_sink_create_element_with_pretty_name (sink, f))) {
      GstStateChangeReturn ret;

      GST_DEBUG_OBJECT (sink, "Testing %s", GST_OBJECT_NAME (f));

      /* If autovideosink has been provided with filter caps,
       * accept only sinks that match with the filter caps */
      if (sink->filter_caps) {
        el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "sink");
        el_caps = gst_pad_query_caps (el_pad, NULL);
        gst_object_unref (el_pad);
        GST_DEBUG_OBJECT (sink,
            "Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
            sink->filter_caps, el_caps);
        no_match = !gst_caps_can_intersect (sink->filter_caps, el_caps);
        gst_caps_unref (el_caps);

        if (no_match) {
          GST_DEBUG_OBJECT (sink, "Incompatible caps");
          gst_object_unref (el);
          continue;
        } else {
          GST_DEBUG_OBJECT (sink, "Found compatible caps");
        }
      }

      gst_element_set_bus (el, bus);
      ret = gst_element_set_state (el, GST_STATE_READY);
      if (ret == GST_STATE_CHANGE_SUCCESS) {
        GST_DEBUG_OBJECT (sink, "This worked!");
        choice = el;
        break;
      }

      /* collect all error messages */
      while ((message = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR))) {
        GST_DEBUG_OBJECT (sink, "error message %" GST_PTR_FORMAT, message);
        errors = g_slist_append (errors, message);
      }

      gst_element_set_state (el, GST_STATE_NULL);
      gst_object_unref (el);
    }
  }

  GST_DEBUG_OBJECT (sink, "done trying");
  if (!choice) {
    if (errors) {
      /* FIXME: we forward the first error for now; but later on it might make
       * sense to actually analyse them */
      gst_message_ref (GST_MESSAGE (errors->data));
      GST_DEBUG_OBJECT (sink, "reposting message %p", errors->data);
      gst_element_post_message (GST_ELEMENT (sink), GST_MESSAGE (errors->data));
    } else {
      /* send warning message to application and use a fakesink */
      GST_ELEMENT_WARNING (sink, RESOURCE, NOT_FOUND, (NULL),
          ("Failed to find a usable video sink"));
      choice = gst_element_factory_make ("fakesink", "fake-video-sink");
      if (g_object_class_find_property (G_OBJECT_GET_CLASS (choice), "sync"))
        g_object_set (choice, "sync", TRUE, NULL);
      gst_element_set_state (choice, GST_STATE_READY);
    }
  }
  gst_object_unref (bus);
  gst_plugin_feature_list_free (list);
  g_slist_foreach (errors, (GFunc) gst_mini_object_unref, NULL);
  g_slist_free (errors);

  return choice;
}
Ejemplo n.º 12
0
GstElement *create_video_sink()
{
	GstElement *bin, *decoder = NULL;
	GstIterator *iter;
	GstIteratorResult res;
	GError *error = NULL;
	GstPad *pad;
	gpointer element = NULL;
	const char* decoder_name;

#ifndef DESKTOP 
	/* create pipeline */                                                                                 
	decoder_name = "tividdec20";
	bin = gst_parse_launch_full("TIViddec2 genTimeStamps=FALSE \
			    engineName=decode \
			    codecName=h264dec numFrames=-1 \
			! videoscale method=0 \
			! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 \
			! ffmpegcolorspace \
			! video/x-raw-rgb, bpp=16 \
			! TIDmaiVideoSink displayStd=fbdev displayDevice=/dev/fb0 videoStd=QVGA \
			    videoOutput=LCD resizer=FALSE accelFrameCopy=TRUE",
			NULL, 0, &error);                                      
#else
	decoder_name = "decodebin";
	bin = gst_parse_launch_full("decodebin \
			! videoscale method=0 \
			! video/x-raw-yuv, format=(fourcc)I420, width=320, height=240 \
			! xvimagesink",
			NULL, 0, &error);                                      
#endif

	if (!bin) {
		g_error("GStreamer: failed to parse video sink pipeline\n");
		return NULL;
	}              

	gst_object_set_name(GST_OBJECT(bin), "video-sink");

	iter = gst_bin_iterate_elements(GST_BIN(bin));
	res = gst_iterator_next (iter, &element);
	while (res == GST_ITERATOR_OK) {
		gchar *name;

		name = gst_object_get_name(GST_OBJECT (element));
		if (name) {
			if (!strncmp(name, decoder_name, strlen(decoder_name))) {
				decoder = GST_ELEMENT(element); 
			}
			g_printf("GS: video sink element: %s \n", name);
			g_free (name);
		}

		gst_object_unref (element);
		element = NULL;

		res = gst_iterator_next (iter, &element);
	}
	gst_iterator_free (iter);

	if (!decoder) {
		/* mem leak */
		g_printf("decoder element not found\n");
		return NULL;
	}

	/* add ghostpad */
	pad = gst_element_get_static_pad (decoder, "sink");
	gst_element_add_pad(bin, gst_ghost_pad_new("sink", pad));
	gst_object_unref(GST_OBJECT(pad));

	return bin;
}
Ejemplo n.º 13
0
/* http://<xxx>/manual/html/section-bus-message-types.html */
static gboolean my_bus_callback(GstBus *bus, GstMessage *msg,
	gpointer user_data)
{
	GstMessageType msgType;
	GstObject *msgSrc;
	gchar *msgSrcName;

	/* used in switch */
	/* error message */
	gchar *debug;
	GError *err;
	GstState oldstate, newstate, pending;

	/* stream status */
	GstElement *owner;

	msgType = GST_MESSAGE_TYPE(msg);
	msgSrc = GST_MESSAGE_SRC(msg);
	msgSrcName = GST_OBJECT_NAME(msgSrc);

	switch (GST_MESSAGE_TYPE(msg)) {
	case GST_MESSAGE_EOS:
		g_print("GStreamer: end-of-stream\n");
		pthread_mutex_lock(&g_mutex);

		gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);
		trigger_callback(GST_STATE_NULL);

		pthread_mutex_unlock(&g_mutex);
		break;

	case GST_MESSAGE_ERROR:
		gst_message_parse_error(msg, &err, &debug);
		g_free (debug);

		g_error("GStreamer: error: [%d] %s\n", err->code, err->message);
		g_error_free(err);

		/* TODO no sleep in callback */
		pthread_mutex_lock(&g_mutex);

		/* setting state to null flushes pipeline */
		gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);
		trigger_callback(GST_STATE_NULL);

		pthread_mutex_unlock(&g_mutex);
		break;

	case GST_MESSAGE_STATE_CHANGED:
		gst_message_parse_state_changed(msg, &oldstate, &newstate, &pending);
#if 0   /* noisy */
		g_print("GStreamer: %s: State change: OLD: '%s', NEW: '%s', PENDING: '%s'\n",
				msgSrcName,
				gststate_get_name(oldstate),
				gststate_get_name(newstate),
				gststate_get_name(pending));
#endif
		if (!strcmp(msgSrcName, g_pipeline_name))
			trigger_callback(newstate); /* TODO GstState != GStreamer_state */

		break;

	case GST_MESSAGE_WARNING:
	case GST_MESSAGE_INFO:
		/* TODO */
		break;
	case GST_MESSAGE_APPLICATION:  /* marshal information into the main thread */
	case GST_MESSAGE_ASYNC_START:
	case GST_MESSAGE_ASYNC_DONE:
	case GST_MESSAGE_BUFFERING: /* caching of network streams */
	case GST_MESSAGE_CLOCK_LOST:
	case GST_MESSAGE_CLOCK_PROVIDE:
	case GST_MESSAGE_ELEMENT:  /* custom message, e.g. qtdemux redirect */
	case GST_MESSAGE_LATENCY:
	case GST_MESSAGE_NEW_CLOCK:
	case GST_MESSAGE_REQUEST_STATE:
	case GST_MESSAGE_SEGMENT_DONE:
	case GST_MESSAGE_SEGMENT_START:
	case GST_MESSAGE_STATE_DIRTY:
	case GST_MESSAGE_STEP_DONE:
	case GST_MESSAGE_STRUCTURE_CHANGE:
	case GST_MESSAGE_TAG: /* meta data: artist, title */
		/* ignore */
		break;
	case GST_MESSAGE_DURATION:
	default:
		g_print("GStreamer: BUS_CALL %s %d\n",
				gst_message_type_get_name(GST_MESSAGE_TYPE(msg)),
				GST_MESSAGE_TYPE(msg));
		break;
	}

	return 1;
}
int
main (int argc, char *argv[])
{

  GThread *input_thread;
  gint numbuffers = -1;
  gchar device[128] = { '\0' };
  gchar input[128] = { '\0' };
  gulong frequency = 0;
  GstBus *bus;

  /* see for input option */

  int c;

  while (1) {
    static char long_options_desc[][64] = {
      {"Number of buffers to output before sending EOS"},
      {"Device location. Common in /dev/video0"},
      {"input/output (channel) to switch to"},
      {"frequency to tune to (in Hz)"},
      {0, 0, 0, 0}
    };
    static struct option long_options[] = {
      {"numbuffers", 1, 0, 'n'},
      {"device", 1, 0, 'd'},
      {"input", 1, 0, 'i'},
      {"frequency", 1, 0, 'f'},
      {0, 0, 0, 0}
    };
    /* getopt_long stores the option index here. */
    int option_index = 0;

    c = getopt_long (argc, argv, "n:d:i:f:h", long_options, &option_index);

    /* Detect the end of the options. */
    if (c == -1) {
      printf ("tip: use -h to see help message.\n");
      break;
    }

    switch (c) {
      case 0:
        /* If this option set a flag, do nothing else now. */
        if (long_options[option_index].flag != 0)
          break;
        printf ("option %s", long_options[option_index].name);
        if (optarg)
          printf (" with arg %s", optarg);
        printf ("\n");
        break;

      case 'n':
        numbuffers = atoi (optarg);
        break;

      case 'd':
        strncpy (device, optarg, sizeof (device) / sizeof (device[0]));
        break;

      case 'i':
        strncpy (input, optarg, sizeof (input) / sizeof (input[0]));
        break;

      case 'f':
        frequency = atol (optarg);
        break;

      case 'h':
        printf ("Usage: v4l2src-test [OPTION]...\n");
        for (c = 0; long_options[c].name; ++c) {
          printf ("-%c, --%s\r\t\t\t\t%s\n", long_options[c].val,
              long_options[c].name, long_options_desc[c]);
        }
        exit (0);
        break;

      case '?':
        /* getopt_long already printed an error message. */
        printf ("Use -h to see help message.\n");
        break;

      default:
        abort ();
    }
  }

  /* Print any remaining command line arguments (not options). */
  if (optind < argc) {
    printf ("Use -h to see help message.\n" "non-option ARGV-elements: ");
    while (optind < argc)
      printf ("%s ", argv[optind++]);
    putchar ('\n');
  }

  /* init */
  gst_init (&argc, &argv);

  /* create elements */
  if (!(pipeline = gst_pipeline_new ("my_pipeline"))) {
    fprintf (stderr, "error: gst_pipeline_new return NULL");
    return -1;
  }

  if (!(source = gst_element_factory_make ("v4l2src", NULL))) {
    fprintf (stderr,
        "error: gst_element_factory_make (\"v4l2src\", NULL) return NULL");
    return -1;
  }

  if (!(sink = gst_element_factory_make ("xvimagesink", NULL))) {
    fprintf (stderr,
        "error: gst_element_factory_make (\"xvimagesink\", NULL) return NULL");
    return -1;
  }

  if (numbuffers > -1) {
    g_object_set (source, "num-buffers", numbuffers, NULL);
  }
  if (device[0]) {
    g_object_set (source, "device", device, NULL);
  }
  if (input[0]) {
    g_object_set (source, "input", input, NULL);
  }
  if (frequency) {
    g_object_set (source, "frequency", frequency, NULL);
  }

  /* you would normally check that the elements were created properly */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, my_bus_callback, NULL);

  /* put together a pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
  gst_element_link_pads (source, "src", sink, "sink");

  /* start the pipeline */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
  loop = g_main_loop_new (NULL, FALSE);

  input_thread = g_thread_try_new ("v4l2src-test", read_user, source, NULL);

  if (input_thread == NULL) {
    fprintf (stderr, "error: g_thread_try_new() failed");
    return -1;
  }

  g_main_loop_run (loop);
  g_thread_join (input_thread);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);

  gst_object_unref (bus);
  gst_object_unref (pipeline);

  gst_deinit ();

  return 0;

}
Ejemplo n.º 15
0
static gboolean
gst_hls_demux_cache_fragments (GstHLSDemux * demux)
{
  gint i;

  /* If this playlist is a variant playlist, select the first one
   * and update it */
  if (gst_m3u8_client_has_variant_playlist (demux->client)) {
    GstM3U8 *child = NULL;

    if (demux->connection_speed == 0) {

      GST_M3U8_CLIENT_LOCK (demux->client);
      child = demux->client->main->current_variant->data;
      GST_M3U8_CLIENT_UNLOCK (demux->client);
    } else {
      GList *tmp = gst_m3u8_client_get_playlist_for_bitrate (demux->client,
          demux->connection_speed);

      child = GST_M3U8 (tmp->data);
    }

    gst_m3u8_client_set_current (demux->client, child);
    if (!gst_hls_demux_update_playlist (demux, FALSE)) {
      GST_ERROR_OBJECT (demux, "Could not fetch the child playlist %s",
          child->uri);
      return FALSE;
    }
  }

  if (!gst_m3u8_client_is_live (demux->client)) {
    GstClockTime duration = gst_m3u8_client_get_duration (demux->client);

    GST_DEBUG_OBJECT (demux, "Sending duration message : %" GST_TIME_FORMAT,
        GST_TIME_ARGS (duration));
    if (duration != GST_CLOCK_TIME_NONE)
      gst_element_post_message (GST_ELEMENT (demux),
          gst_message_new_duration (GST_OBJECT (demux),
              GST_FORMAT_TIME, duration));
  }

  /* Cache the first fragments */
  for (i = 0; i < demux->fragments_cache; i++) {
    gst_element_post_message (GST_ELEMENT (demux),
        gst_message_new_buffering (GST_OBJECT (demux),
            100 * i / demux->fragments_cache));
    g_get_current_time (&demux->next_update);
    if (!gst_hls_demux_get_next_fragment (demux, TRUE)) {
      if (demux->end_of_playlist)
        break;
      if (!demux->cancelled)
        GST_ERROR_OBJECT (demux, "Error caching the first fragments");
      return FALSE;
    }
    /* make sure we stop caching fragments if something cancelled it */
    if (demux->cancelled)
      return FALSE;
    gst_hls_demux_switch_playlist (demux);
  }
  gst_element_post_message (GST_ELEMENT (demux),
      gst_message_new_buffering (GST_OBJECT (demux), 100));

  g_get_current_time (&demux->next_update);

  demux->need_cache = FALSE;
  return TRUE;

}
static GstFlowReturn
opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
{
  GstFlowReturn res = GST_FLOW_OK;
  gint size;
  guint8 *data;
  GstBuffer *outbuf;
  gint16 *out_data;
  int n, err;
  int samples;
  unsigned int packet_size;
  GstBuffer *buf;

  if (dec->state == NULL) {
    /* If we did not get any headers, default to 2 channels */
    if (dec->n_channels == 0) {
      GstCaps *caps;
      GST_INFO_OBJECT (dec, "No header, assuming single stream");
      dec->n_channels = 2;
      dec->sample_rate = 48000;
      caps = gst_opus_dec_negotiate (dec);
      GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);
      gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);
      gst_caps_unref (caps);
      /* default stereo mapping */
      dec->channel_mapping_family = 0;
      dec->channel_mapping[0] = 0;
      dec->channel_mapping[1] = 1;
      dec->n_streams = 1;
      dec->n_stereo_streams = 1;
    }

    GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
        dec->n_channels, dec->sample_rate);
#ifndef GST_DISABLE_DEBUG
    gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
        "Mapping table", dec->n_channels, dec->channel_mapping);
#endif

    GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
        dec->n_stereo_streams);
    dec->state =
        opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
        dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
    if (!dec->state || err != OPUS_OK)
      goto creation_failed;
  }

  if (buffer) {
    GST_DEBUG_OBJECT (dec, "Received buffer of size %u",
        GST_BUFFER_SIZE (buffer));
  } else {
    GST_DEBUG_OBJECT (dec, "Received missing buffer");
  }

  /* if using in-band FEC, we introdude one extra frame's delay as we need
     to potentially wait for next buffer to decode a missing buffer */
  if (dec->use_inband_fec && !dec->primed) {
    GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out");
    gst_buffer_replace (&dec->last_buffer, buffer);
    dec->primed = TRUE;
    goto done;
  }

  /* That's the buffer we'll be sending to the opus decoder. */
  buf = (dec->use_inband_fec
      && GST_BUFFER_SIZE (dec->last_buffer) > 0) ? dec->last_buffer : buffer;

  if (buf && GST_BUFFER_SIZE (buf) > 0) {
    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);
    GST_DEBUG_OBJECT (dec, "Using buffer of size %u", size);
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "Using NULL buffer");
    data = NULL;
    size = 0;
  }

  /* use maximum size (120 ms) as the number of returned samples is
     not constant over the stream. */
  samples = 120 * dec->sample_rate / 1000;
  packet_size = samples * dec->n_channels * 2;

  res = gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec),
      GST_BUFFER_OFFSET_NONE, packet_size,
      GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (dec)), &outbuf);

  if (res != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
    return res;
  }

  out_data = (gint16 *) GST_BUFFER_DATA (outbuf);

  if (dec->use_inband_fec) {
    if (dec->last_buffer) {
      /* normal delayed decode */
      GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          0);
    } else {
      /* FEC reconstruction decode */
      GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          1);
    }
  } else {
    /* normal decode */
    GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer");
    n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0);
  }

  if (n < 0) {
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL));
    gst_buffer_unref (outbuf);
    return GST_FLOW_ERROR;
  }
  GST_DEBUG_OBJECT (dec, "decoded %d samples", n);
  GST_BUFFER_SIZE (outbuf) = n * 2 * dec->n_channels;

  /* Skip any samples that need skipping */
  if (dec->pre_skip > 0) {
    guint scaled_pre_skip = dec->pre_skip * dec->sample_rate / 48000;
    guint skip = scaled_pre_skip > n ? n : scaled_pre_skip;
    guint scaled_skip = skip * 48000 / dec->sample_rate;
    GST_BUFFER_SIZE (outbuf) -= skip * 2 * dec->n_channels;
    GST_BUFFER_DATA (outbuf) += skip * 2 * dec->n_channels;
    dec->pre_skip -= scaled_skip;
    GST_INFO_OBJECT (dec,
        "Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
        scaled_skip, dec->pre_skip);
  }

  if (GST_BUFFER_SIZE (outbuf) == 0) {
    gst_buffer_unref (outbuf);
    outbuf = NULL;
  }

  /* Apply gain */
  /* Would be better off leaving this to a volume element, as this is
     a naive conversion that does too many int/float conversions.
     However, we don't have control over the pipeline...
     So make it optional if the user program wants to use a volume,
     but do it by default so the correct volume goes out by default */
  if (dec->apply_gain && outbuf && dec->r128_gain) {
    unsigned int i, nsamples = GST_BUFFER_SIZE (outbuf) / 2;
    double volume = dec->r128_gain_volume;
    gint16 *samples = (gint16 *) GST_BUFFER_DATA (outbuf);
    GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume);
    for (i = 0; i < nsamples; ++i) {
      int sample = (int) (samples[i] * volume + 0.5);
      samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample;
    }
  }

  if (dec->use_inband_fec) {
    gst_buffer_replace (&dec->last_buffer, buffer);
  }

  res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);

  if (res != GST_FLOW_OK)
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));

done:
  return res;

creation_failed:
  GST_ERROR_OBJECT (dec, "Failed to create Opus decoder: %d", err);
  return GST_FLOW_ERROR;
}
Ejemplo n.º 17
0
/**
 * gst_aggregator_iterate_sinkpads:
 * @self: The #GstAggregator
 * @func: The function to call.
 * @user_data: The data to pass to @func.
 *
 * Iterate the sinkpads of aggregator to call a function on them.
 *
 * This method guarantees that @func will be called only once for each
 * sink pad.
 */
gboolean
gst_aggregator_iterate_sinkpads (GstAggregator * self,
    GstAggregatorPadForeachFunc func, gpointer user_data)
{
  gboolean result = FALSE;
  GstIterator *iter;
  gboolean done = FALSE;
  GValue item = { 0, };
  GList *seen_pads = NULL;

  iter = gst_element_iterate_sink_pads (GST_ELEMENT (self));

  if (!iter)
    goto no_iter;

  while (!done) {
    switch (gst_iterator_next (iter, &item)) {
      case GST_ITERATOR_OK:
      {
        GstPad *pad;

        pad = g_value_get_object (&item);

        /* if already pushed, skip. FIXME, find something faster to tag pads */
        if (pad == NULL || g_list_find (seen_pads, pad)) {
          g_value_reset (&item);
          break;
        }

        GST_LOG_OBJECT (self, "calling function on pad %s:%s",
            GST_DEBUG_PAD_NAME (pad));
        result = func (self, pad, user_data);

        done = !result;

        seen_pads = g_list_prepend (seen_pads, pad);

        g_value_reset (&item);
        break;
      }
      case GST_ITERATOR_RESYNC:
        gst_iterator_resync (iter);
        break;
      case GST_ITERATOR_ERROR:
        GST_ERROR_OBJECT (self,
            "Could not iterate over internally linked pads");
        done = TRUE;
        break;
      case GST_ITERATOR_DONE:
        done = TRUE;
        break;
    }
  }
  g_value_unset (&item);
  gst_iterator_free (iter);

  if (seen_pads == NULL) {
    GST_DEBUG_OBJECT (self, "No pad seen");
    return FALSE;
  }

  g_list_free (seen_pads);

no_iter:
  return result;
}
Ejemplo n.º 18
0
static GstFlowReturn
theora_handle_data_packet (GstTheoraDec * dec, ogg_packet * packet,
    GstClockTime outtime, GstClockTime outdur)
{
  /* normal data packet */
  th_ycbcr_buffer buf;
  GstBuffer *out;
  gboolean keyframe;
  GstFlowReturn result;
  ogg_int64_t gp;

  if (G_UNLIKELY (!dec->have_header))
    goto not_initialized;

  /* get timestamp and durations */
  if (outtime == -1)
    outtime = dec->last_timestamp;
  if (outdur == -1)
    outdur = gst_util_uint64_scale_int (GST_SECOND, dec->info.fps_denominator,
        dec->info.fps_numerator);

  /* calculate expected next timestamp */
  if (outtime != -1 && outdur != -1)
    dec->last_timestamp = outtime + outdur;

  /* the second most significant bit of the first data byte is cleared 
   * for keyframes. We can only check it if it's not a zero-length packet. */
  keyframe = packet->bytes && ((packet->packet[0] & 0x40) == 0);
  if (G_UNLIKELY (keyframe)) {
    GST_DEBUG_OBJECT (dec, "we have a keyframe");
    dec->need_keyframe = FALSE;
  } else if (G_UNLIKELY (dec->need_keyframe)) {
    goto dropping;
  }

  GST_DEBUG_OBJECT (dec, "parsing data packet");

  /* this does the decoding */
  if (G_UNLIKELY (th_decode_packetin (dec->decoder, packet, &gp) < 0))
    goto decode_error;

  if (outtime != -1) {
    gboolean need_skip;
    GstClockTime running_time;
    GstClockTime earliest_time;
    gdouble proportion;

    /* qos needs to be done on running time */
    running_time = gst_segment_to_running_time (&dec->segment, GST_FORMAT_TIME,
        outtime);

    GST_OBJECT_LOCK (dec);
    proportion = dec->proportion;
    earliest_time = dec->earliest_time;
    /* check for QoS, don't perform the last steps of getting and
     * pushing the buffers that are known to be late. */
    need_skip = earliest_time != -1 && running_time <= earliest_time;
    GST_OBJECT_UNLOCK (dec);

    if (need_skip) {
      GstMessage *qos_msg;
      guint64 stream_time;
      gint64 jitter;

      GST_DEBUG_OBJECT (dec, "skipping decoding: qostime %"
          GST_TIME_FORMAT " <= %" GST_TIME_FORMAT,
          GST_TIME_ARGS (running_time), GST_TIME_ARGS (earliest_time));

      dec->dropped++;

      stream_time =
          gst_segment_to_stream_time (&dec->segment, GST_FORMAT_TIME, outtime);
      jitter = GST_CLOCK_DIFF (running_time, earliest_time);

      qos_msg =
          gst_message_new_qos (GST_OBJECT_CAST (dec), FALSE, running_time,
          stream_time, outtime, outdur);
      gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
      gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
          dec->processed, dec->dropped);
      gst_element_post_message (GST_ELEMENT_CAST (dec), qos_msg);

      goto dropping_qos;
    }
  }

  /* this does postprocessing and set up the decoded frame
   * pointers in our yuv variable */
  if (G_UNLIKELY (th_decode_ycbcr_out (dec->decoder, buf) < 0))
    goto no_yuv;

  if (G_UNLIKELY ((buf[0].width != dec->info.frame_width)
          || (buf[0].height != dec->info.frame_height)))
    goto wrong_dimensions;

  result = theora_handle_image (dec, buf, &out);
  if (result != GST_FLOW_OK)
    return result;

  GST_BUFFER_OFFSET (out) = dec->frame_nr;
  if (dec->frame_nr != -1)
    dec->frame_nr++;
  GST_BUFFER_OFFSET_END (out) = dec->frame_nr;

  GST_BUFFER_TIMESTAMP (out) = outtime;
  GST_BUFFER_DURATION (out) = outdur;

  dec->processed++;

  if (dec->segment.rate >= 0.0)
    result = theora_dec_push_forward (dec, out);
  else
    result = theora_dec_push_reverse (dec, out);

  return result;

  /* ERRORS */
not_initialized:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("no header sent yet"));
    return GST_FLOW_ERROR;
  }
dropping:
  {
    GST_WARNING_OBJECT (dec, "dropping frame because we need a keyframe");
    dec->discont = TRUE;
    return GST_FLOW_OK;
  }
dropping_qos:
  {
    if (dec->frame_nr != -1)
      dec->frame_nr++;
    dec->discont = TRUE;
    GST_WARNING_OBJECT (dec, "dropping frame because of QoS");
    return GST_FLOW_OK;
  }
decode_error:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("theora decoder did not decode data packet"));
    return GST_FLOW_ERROR;
  }
no_yuv:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("couldn't read out YUV image"));
    return GST_FLOW_ERROR;
  }
wrong_dimensions:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, FORMAT,
        (NULL), ("dimensions of image do not match header"));
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 19
0
GstElement* webkitVideoSinkNew()
{
    return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
}
Ejemplo n.º 20
0
static void
gst_wrapper_camera_bin_reset_video_src_caps (GstWrapperCameraBinSrc * self,
    GstCaps * caps)
{
  GstClock *clock;
  gint64 base_time;

  GST_DEBUG_OBJECT (self, "Resetting src caps to %" GST_PTR_FORMAT, caps);
  if (self->src_vid_src) {
    GstCaps *old_caps;

    g_object_get (G_OBJECT (self->src_filter), "caps", &old_caps, NULL);
    if (gst_caps_is_equal (caps, old_caps)) {
      GST_DEBUG_OBJECT (self, "old and new caps are same, do not reset it");
      if (old_caps)
        gst_caps_unref (old_caps);
      return;
    }
    if (old_caps)
      gst_caps_unref (old_caps);

    clock = gst_element_get_clock (self->src_vid_src);
    base_time = gst_element_get_base_time (self->src_vid_src);

    /* Ideally, we should only need to get the source to READY here,
     * but it seems v4l2src isn't happy with this. Putting to NULL makes
     * it work.
     *
     * TODO fix this in v4l2src
     */
    gst_element_set_state (self->src_vid_src, GST_STATE_NULL);
    set_capsfilter_caps (self, caps);

    self->drop_newseg = TRUE;

    GST_DEBUG_OBJECT (self, "Bringing source up");
    if (!gst_element_sync_state_with_parent (self->src_vid_src)) {
      GST_WARNING_OBJECT (self, "Failed to reset source caps");
      gst_element_set_state (self->src_vid_src, GST_STATE_NULL);
    }

    if (clock) {
      gst_element_set_clock (self->src_vid_src, clock);
      gst_element_set_base_time (self->src_vid_src, base_time);

      if (GST_IS_BIN (self->src_vid_src)) {
        GstIterator *it =
            gst_bin_iterate_elements (GST_BIN (self->src_vid_src));
        gpointer item = NULL;
        gboolean done = FALSE;
        while (!done) {
          switch (gst_iterator_next (it, &item)) {
            case GST_ITERATOR_OK:
              gst_element_set_base_time (GST_ELEMENT (item), base_time);
              gst_object_unref (item);
              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
              done = TRUE;
              break;
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);
      }

      gst_object_unref (clock);
    }
  }
}
Ejemplo n.º 21
0
static void
gst_gnome_vfs_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstGnomeVFSSink *sink;
  GstState cur_state;

  sink = GST_GNOME_VFS_SINK (object);

  gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0);

  if (cur_state == GST_STATE_PLAYING || cur_state == GST_STATE_PAUSED) {
    GST_WARNING_OBJECT (sink, "cannot set property when PAUSED or PLAYING");
    return;
  }

  GST_OBJECT_LOCK (sink);

  switch (prop_id) {
    case ARG_LOCATION:{
      const gchar *new_location;

      if (sink->uri) {
        gnome_vfs_uri_unref (sink->uri);
        sink->uri = NULL;
      }
      if (sink->uri_name) {
        g_free (sink->uri_name);
        sink->uri_name = NULL;
      }

      new_location = g_value_get_string (value);
      if (new_location) {
        sink->uri_name = gst_gnome_vfs_location_to_uri_string (new_location);
        sink->uri = gnome_vfs_uri_new (sink->uri_name);
      }
      break;
    }
    case ARG_URI:{
      if (sink->uri) {
        gnome_vfs_uri_unref (sink->uri);
        sink->uri = NULL;
      }
      if (sink->uri_name) {
        g_free (sink->uri_name);
        sink->uri_name = NULL;
      }
      if (g_value_get_boxed (value)) {
        sink->uri = (GnomeVFSURI *) g_value_dup_boxed (value);
        sink->uri_name = gnome_vfs_uri_to_string (sink->uri, 0);
      }
      break;
    }
    case ARG_HANDLE:{
      if (sink->uri) {
        gnome_vfs_uri_unref (sink->uri);
        sink->uri = NULL;
      }
      if (sink->uri_name) {
        g_free (sink->uri_name);
        sink->uri_name = NULL;
      }
      sink->handle = g_value_get_boxed (value);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }

  GST_OBJECT_UNLOCK (sink);
}
Ejemplo n.º 22
0
static GstElement *find_color_balance_element() {
	GstIterator *iterator = gst_bin_iterate_all_by_interface(
		GST_BIN(pipeline),  GST_TYPE_COLOR_BALANCE);
	
	GstElement *color_balance_element = NULL;
	gboolean done = FALSE, hardware = FALSE;
#if GST_CHECK_VERSION(1, 0, 0)
	GValue item = G_VALUE_INIT;
#else
	gpointer item;
#endif
	while (!done) {
	switch (gst_iterator_next(iterator, &item)) {
	case GST_ITERATOR_OK : {
#if GST_CHECK_VERSION(1, 0, 0)
		GstElement *element = g_value_get_object(&item);
#else
		GstElement *element = GST_ELEMENT(item);
#endif
		if (is_valid_color_balance_element(element)) {
			if (!color_balance_element) {
				color_balance_element = GST_ELEMENT_CAST(	
					gst_object_ref(element));
				hardware =
					(gst_color_balance_get_balance_type(GST_COLOR_BALANCE
					(element)) == GST_COLOR_BALANCE_HARDWARE);
			}
			else if (!hardware) {
				gboolean tmp =
					(gst_color_balance_get_balance_type(GST_COLOR_BALANCE
					(element)) == GST_COLOR_BALANCE_HARDWARE);

				if (tmp) {
					if (color_balance_element)
						gst_object_unref(color_balance_element);
					color_balance_element =
						GST_ELEMENT_CAST(gst_object_ref(element));
					hardware = TRUE;
				}
			}
		}
#if GST_CHECK_VERSION(1, 0, 0)
		g_value_reset(&item);
#endif
		if (hardware && color_balance_element)
			done = TRUE;
        	break;
		}
	case GST_ITERATOR_RESYNC :
		gst_iterator_resync(iterator);
		done = FALSE;
		hardware = FALSE;
		if (color_balance_element)
			gst_object_unref(color_balance_element);
		color_balance_element = NULL;
		break;
	case GST_ITERATOR_DONE:
	case GST_ITERATOR_ERROR:
	default:
		done = TRUE;
	}
	}
#if GST_CHECK_VERSION(1, 0, 0)
	g_value_unset(&item);
#endif
	gst_iterator_free(iterator);
	return color_balance_element;
}
Ejemplo n.º 23
0
/* called with the object lock held */
static gboolean
gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer);
  GstBuffer *converted_buffer, *inbuf;
  GstVideoInfo *out_info = &vagg->info;
#ifndef G_DISABLE_ASSERT
  gint n;
#endif
  gint v, views;
  gint valid_views = 0;
  GList *walk;

  inbuf = gst_buffer_new ();
  walk = GST_ELEMENT (mixer)->sinkpads;
  while (walk) {
    GstGLStereoMixPad *pad = walk->data;
    GstMemory *in_mem;

    GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views);

    if (!pad || !pad->current_buffer) {
      GST_DEBUG ("skipping texture, null frame");
      walk = g_list_next (walk);
      continue;
    }

    in_mem = gst_buffer_get_memory (pad->current_buffer, 0);

    GST_LOG_OBJECT (mixer,
        "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem);
    /* Appending the memory to a 2nd buffer locks it
     * exclusive a 2nd time, which will mark it for
     * copy-on-write. The ref will keep the memory
     * alive but we add a parent_buffer_meta to also
     * prevent the input buffer from returning to any buffer
     * pool it might belong to
     */
    gst_buffer_append_memory (inbuf, in_mem);
    /* Use parent buffer meta to keep input buffer alive */
    gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer);

    valid_views++;
    walk = g_list_next (walk);
  }

  if (mixer->mix_info.views != valid_views) {
    GST_WARNING_OBJECT (mixer, "Not enough input views to process");
    return FALSE;
  }

  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
      GST_VIDEO_MULTIVIEW_MODE_SEPARATED)
    views = out_info->views;
  else
    views = 1;

  if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert,
          FALSE, inbuf) != GST_FLOW_OK)
    return FALSE;

  /* Clear any existing buffers, just in case */
  gst_buffer_replace (&mixer->primary_out, NULL);
  gst_buffer_replace (&mixer->auxilliary_out, NULL);

  if (gst_gl_view_convert_get_output (mixer->viewconvert,
          &mixer->primary_out) != GST_FLOW_OK)
    return FALSE;

  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
      GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
    if (gst_gl_view_convert_get_output (mixer->viewconvert,
            &mixer->auxilliary_out) != GST_FLOW_OK)
      return FALSE;
  }

  if (mixer->primary_out == NULL)
    return FALSE;

  converted_buffer = mixer->primary_out;

#ifndef G_DISABLE_ASSERT
  n = gst_buffer_n_memory (converted_buffer);
  g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views);
#endif

  for (v = 0; v < views; v++) {
    gst_buffer_add_video_meta_full (converted_buffer, v,
        GST_VIDEO_INFO_FORMAT (out_info),
        GST_VIDEO_INFO_WIDTH (out_info),
        GST_VIDEO_INFO_HEIGHT (out_info),
        GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride);
    if (mixer->auxilliary_out) {
      gst_buffer_add_video_meta_full (mixer->auxilliary_out, v,
          GST_VIDEO_INFO_FORMAT (out_info),
          GST_VIDEO_INFO_WIDTH (out_info),
          GST_VIDEO_INFO_HEIGHT (out_info),
          GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset,
          out_info->stride);
    }
  }

  return TRUE;
}
Ejemplo n.º 24
0
static void
gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
    GstNleSource * nlesrc)
{
  GstCaps *caps;
  const GstStructure *s;
  const gchar *mime;
  GstElement *appsink = NULL;
  GstPad *sink_pad;
  GstAppSinkCallbacks appsink_cbs;
  GstNleSrcItem *item;

  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  caps = gst_pad_get_caps_reffed (pad);
  s = gst_caps_get_structure (caps, 0);
  mime = gst_structure_get_name (s);
  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);

  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_video_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;
    nlesrc->video_linked = TRUE;
    if (!nlesrc->video_srcpad_added) {
      gst_pad_set_active (nlesrc->video_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->video_srcpad));
      nlesrc->video_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);
    nlesrc->video_eos = FALSE;
  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio
      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_audio_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;
    nlesrc->audio_linked = TRUE;
    if (!nlesrc->audio_srcpad_added) {
      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->audio_srcpad));
      nlesrc->audio_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);
    nlesrc->audio_eos = FALSE;
  }
  if (appsink != NULL) {
    g_object_set (appsink, "sync", FALSE, NULL);
    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,
        NULL);
    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);
    sink_pad = gst_element_get_static_pad (appsink, "sink");
    gst_pad_link (pad, sink_pad);
    gst_element_sync_state_with_parent (appsink);
    gst_object_unref (sink_pad);
  }
}
Ejemplo n.º 25
0
static GstFlowReturn
opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
{
  GstFlowReturn res = GST_FLOW_OK;
  gsize size;
  guint8 *data;
  GstBuffer *outbuf;
  gint16 *out_data;
  int n, err;
  int samples;
  unsigned int packet_size;
  GstBuffer *buf;
  GstMapInfo map, omap;

  if (dec->state == NULL) {
    /* If we did not get any headers, default to 2 channels */
    if (dec->n_channels == 0) {
      GST_INFO_OBJECT (dec, "No header, assuming single stream");
      dec->n_channels = 2;
      dec->sample_rate = 48000;
      /* default stereo mapping */
      dec->channel_mapping_family = 0;
      dec->channel_mapping[0] = 0;
      dec->channel_mapping[1] = 1;
      dec->n_streams = 1;
      dec->n_stereo_streams = 1;

      gst_opus_dec_negotiate (dec, NULL);
    }

    GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
        dec->n_channels, dec->sample_rate);
#ifndef GST_DISABLE_GST_DEBUG
    gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
        "Mapping table", dec->n_channels, dec->channel_mapping);
#endif

    GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
        dec->n_stereo_streams);
    dec->state =
        opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
        dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
    if (!dec->state || err != OPUS_OK)
      goto creation_failed;
  }

  if (buffer) {
    GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT,
        gst_buffer_get_size (buffer));
  } else {
    GST_DEBUG_OBJECT (dec, "Received missing buffer");
  }

  /* if using in-band FEC, we introdude one extra frame's delay as we need
     to potentially wait for next buffer to decode a missing buffer */
  if (dec->use_inband_fec && !dec->primed) {
    GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out");
    gst_buffer_replace (&dec->last_buffer, buffer);
    dec->primed = TRUE;
    goto done;
  }

  /* That's the buffer we'll be sending to the opus decoder. */
  buf = (dec->use_inband_fec
      && gst_buffer_get_size (dec->last_buffer) >
      0) ? dec->last_buffer : buffer;

  if (buf && gst_buffer_get_size (buf) > 0) {
    gst_buffer_map (buf, &map, GST_MAP_READ);
    data = map.data;
    size = map.size;
    GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size);
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "Using NULL buffer");
    data = NULL;
    size = 0;
  }

  if (gst_buffer_get_size (buffer) == 0) {
    GstClockTime const opus_plc_alignment = 2500 * GST_USECOND;
    GstClockTime aligned_missing_duration;
    GstClockTime missing_duration = GST_BUFFER_DURATION (buffer);

    GST_DEBUG_OBJECT (dec,
        "missing buffer, doing PLC duration %" GST_TIME_FORMAT
        " plus leftover %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration),
        GST_TIME_ARGS (dec->leftover_plc_duration));

    /* add the leftover PLC duration to that of the buffer */
    missing_duration += dec->leftover_plc_duration;

    /* align the combined buffer and leftover PLC duration to multiples
     * of 2.5ms, always rounding down, and store excess duration for later */
    aligned_missing_duration =
        (missing_duration / opus_plc_alignment) * opus_plc_alignment;
    dec->leftover_plc_duration = missing_duration - aligned_missing_duration;

    /* Opus' PLC cannot operate with less than 2.5ms; skip PLC
     * and accumulate the missing duration in the leftover_plc_duration
     * for the next PLC attempt */
    if (aligned_missing_duration < opus_plc_alignment) {
      GST_DEBUG_OBJECT (dec,
          "current duration %" GST_TIME_FORMAT
          " of missing data not enough for PLC (minimum needed: %"
          GST_TIME_FORMAT ") - skipping", GST_TIME_ARGS (missing_duration),
          GST_TIME_ARGS (opus_plc_alignment));
      goto done;
    }

    /* convert the duration (in nanoseconds) to sample count */
    samples =
        gst_util_uint64_scale_int (aligned_missing_duration, dec->sample_rate,
        GST_SECOND);

    GST_DEBUG_OBJECT (dec,
        "calculated PLC frame length: %" GST_TIME_FORMAT
        " num frame samples: %d new leftover: %" GST_TIME_FORMAT,
        GST_TIME_ARGS (aligned_missing_duration), samples,
        GST_TIME_ARGS (dec->leftover_plc_duration));
  } else {
    /* use maximum size (120 ms) as the number of returned samples is
       not constant over the stream. */
    samples = 120 * dec->sample_rate / 1000;
  }

  packet_size = samples * dec->n_channels * 2;

  outbuf =
      gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec),
      packet_size);
  if (!outbuf) {
    goto buffer_failed;
  }

  gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
  out_data = (gint16 *) omap.data;

  if (dec->use_inband_fec) {
    if (gst_buffer_get_size (dec->last_buffer) > 0) {
      /* normal delayed decode */
      GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          0);
    } else {
      /* FEC reconstruction decode */
      GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          1);
    }
  } else {
    /* normal decode */
    GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer");
    n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0);
  }
  gst_buffer_unmap (outbuf, &omap);
  if (data != NULL)
    gst_buffer_unmap (buf, &map);

  if (n < 0) {
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL));
    gst_buffer_unref (outbuf);
    return GST_FLOW_ERROR;
  }
  GST_DEBUG_OBJECT (dec, "decoded %d samples", n);
  gst_buffer_set_size (outbuf, n * 2 * dec->n_channels);

  /* Skip any samples that need skipping */
  if (dec->pre_skip > 0) {
    guint scaled_pre_skip = dec->pre_skip * dec->sample_rate / 48000;
    guint skip = scaled_pre_skip > n ? n : scaled_pre_skip;
    guint scaled_skip = skip * 48000 / dec->sample_rate;

    gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1);
    dec->pre_skip -= scaled_skip;
    GST_INFO_OBJECT (dec,
        "Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
        scaled_skip, dec->pre_skip);
  }

  if (gst_buffer_get_size (outbuf) == 0) {
    gst_buffer_unref (outbuf);
    outbuf = NULL;
  } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) {
    gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16,
        dec->n_channels, dec->opus_pos, dec->info.position);
  }

  /* Apply gain */
  /* Would be better off leaving this to a volume element, as this is
     a naive conversion that does too many int/float conversions.
     However, we don't have control over the pipeline...
     So make it optional if the user program wants to use a volume,
     but do it by default so the correct volume goes out by default */
  if (dec->apply_gain && outbuf && dec->r128_gain) {
    gsize rsize;
    unsigned int i, nsamples;
    double volume = dec->r128_gain_volume;
    gint16 *samples;

    gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE);
    samples = (gint16 *) omap.data;
    rsize = omap.size;
    GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume);
    nsamples = rsize / 2;
    for (i = 0; i < nsamples; ++i) {
      int sample = (int) (samples[i] * volume + 0.5);
      samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample;
    }
    gst_buffer_unmap (outbuf, &omap);
  }

  if (dec->use_inband_fec) {
    gst_buffer_replace (&dec->last_buffer, buffer);
  }

  res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);

  if (res != GST_FLOW_OK)
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));

done:
  return res;

creation_failed:
  GST_ERROR_OBJECT (dec, "Failed to create Opus decoder: %d", err);
  return GST_FLOW_ERROR;

buffer_failed:
  GST_ERROR_OBJECT (dec, "Failed to create %u byte buffer", packet_size);
  return GST_FLOW_ERROR;
}
Ejemplo n.º 26
0
static void
gst_nle_source_next (GstNleSource * nlesrc)
{
  GstNleSrcItem *item;
  GstStateChangeReturn ret;
  GstElement *uridecodebin;
  GstBus *bus;
  GstState state;

  nlesrc->index++;

  if (nlesrc->index >= g_list_length (nlesrc->queue)) {
    gst_nle_source_push_eos (nlesrc);
    return;
  }

  if (nlesrc->source != NULL) {
    gst_object_unref (nlesrc->source);
    nlesrc->source = NULL;
  }

  if (nlesrc->decoder != NULL) {
    gst_element_set_state (GST_ELEMENT (nlesrc->decoder), GST_STATE_NULL);
    gst_element_get_state (GST_ELEMENT (nlesrc->decoder), NULL, NULL, 0);
    gst_object_unref (nlesrc->decoder);
  }

  nlesrc->decoder = gst_pipeline_new ("decoder");
  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
  /* Connect signal to recover source element for queries in bytes */
  g_signal_connect (uridecodebin, "source-setup",
      G_CALLBACK (gst_nle_source_on_source_setup), nlesrc); 

  gst_bin_add (GST_BIN (nlesrc->decoder), uridecodebin);

  g_signal_connect (uridecodebin, "autoplug-select",
      G_CALLBACK (lgm_filter_video_decoders), nlesrc);
  g_signal_connect (uridecodebin, "pad-added",
      G_CALLBACK (gst_nle_source_pad_added_cb), nlesrc);
  g_signal_connect (uridecodebin, "no-more-pads",
      G_CALLBACK (gst_nle_source_no_more_pads), nlesrc);

  bus = GST_ELEMENT_BUS (nlesrc->decoder);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (gst_nle_source_bus_message),
      nlesrc);
  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  GST_INFO_OBJECT (nlesrc, "Starting next item with uri:%s", item->file_path);
  GST_INFO_OBJECT (nlesrc, "start:%" GST_TIME_FORMAT " stop:%"
      GST_TIME_FORMAT " rate:%f", GST_TIME_ARGS (item->start),
      GST_TIME_ARGS (item->stop), item->rate);

  g_object_set (uridecodebin, "uri", item->file_path, NULL);

  nlesrc->seek_done = FALSE;
  if (GST_CLOCK_TIME_IS_VALID (item->stop)) {
    nlesrc->video_seek_done = FALSE;
    nlesrc->audio_seek_done = FALSE;
  } else {
    nlesrc->video_seek_done = TRUE;
    nlesrc->audio_seek_done = TRUE;
  }
  nlesrc->audio_eos = TRUE;
  nlesrc->video_eos = TRUE;
  nlesrc->audio_ts = 0;
  nlesrc->video_ts = 0;
  nlesrc->start_ts = nlesrc->accu_time;
  nlesrc->video_linked = FALSE;
  nlesrc->audio_linked = FALSE;
  nlesrc->item_setup = FALSE;
  nlesrc->cached_duration = 0;

  GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,
      GST_TIME_ARGS (nlesrc->start_ts));
  gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);
  ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 5 * GST_SECOND);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");
    gst_nle_source_check_eos (nlesrc);
    return;
  }

  nlesrc->seek_done = TRUE;
  if (!item->still_picture && GST_CLOCK_TIME_IS_VALID (item->stop)) {
    GST_DEBUG_OBJECT (nlesrc, "Sending seek event");
    gst_element_seek (nlesrc->decoder, 1, GST_FORMAT_TIME,
        GST_SEEK_FLAG_ACCURATE,
        GST_SEEK_TYPE_SET, item->start, GST_SEEK_TYPE_SET, item->stop);
  }
}
Ejemplo n.º 27
0
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
        double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    __BEGIN__;
    //actually doesn't support fourcc parameter and encode an avi with jpegenc
    //we need to find a common api between backend to support fourcc for avi
    //but also to choose in a common way codec and container format (ogg,dirac,matroska)
    // check arguments

    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);
    std::map<int,char*>::iterator encit;
    encit=encs.find(fourcc);
    if (encit==encs.end())
        CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");
//    if(!isInited) {
//        gst_init (NULL, NULL);
//        isInited = true;
//    }
    gst_initializer::init();
    close();
    source=gst_element_factory_make("appsrc",NULL);
    file=gst_element_factory_make("filesink", NULL);
    enc=gst_element_factory_make(encit->second, NULL);
    mux=gst_element_factory_make("avimux", NULL);
    color = gst_element_factory_make("ffmpegcolorspace", NULL);
    if (!enc)
        CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");
    g_object_set(G_OBJECT(file), "location", filename, NULL);
    pipeline = gst_pipeline_new (NULL);
    GstCaps* caps;
    if (is_color) {
        input_pix_fmt=1;
        caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                        frameSize.width,
                                        frameSize.height,
                                        (int) (fps * 1000),
                                        1000,
                                        1,
                                        1);
    }
    else  {
        input_pix_fmt=0;
        caps= gst_caps_new_simple("video/x-raw-gray",
                                  "width", G_TYPE_INT, frameSize.width,
                                  "height", G_TYPE_INT, frameSize.height,
                                  "framerate", GST_TYPE_FRACTION, int(fps),1,
                                  "bpp",G_TYPE_INT,8,
                                  "depth",G_TYPE_INT,8,
                                  NULL);
    }
    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    if (fourcc==CV_FOURCC_DEFAULT) {
        gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);
        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }
    else {
        gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);
        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }


    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
        GST_STATE_CHANGE_FAILURE) {
            CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }
    __END__;
    return true;
}
Ejemplo n.º 28
0
int
main (int argc, char *argv[])
{
  GstElement *filesrc, *osssink, *queue, *parse, *decode;
  GstElement *bin;
  GstElement *thread;

  gst_init (&argc, &argv);

  if (argc != 2) {
    g_print ("usage: %s <filename>\n", argv[0]);
    exit (-1);
  }

  /* create a new thread to hold the elements */
  thread = gst_thread_new ("thread");
  g_assert (thread != NULL);

  /* create a new bin to hold the elements */
  bin = gst_bin_new ("bin");
  g_assert (bin != NULL);

  /* create a disk reader */
  filesrc = gst_element_factory_make ("filesrc", "disk_source");
  g_assert (filesrc != NULL);
  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
  g_signal_connect (G_OBJECT (filesrc), "eos", G_CALLBACK (eos), thread);

  queue = gst_element_factory_make ("queue", "queue");

  /* and an audio sink */
  osssink = gst_element_factory_make ("osssink", "play_audio");
  g_assert (osssink != NULL);

  parse = gst_element_factory_make ("mp3parse", "parse");
  decode = gst_element_factory_make ("mpg123", "decode");

  /* add objects to the main bin */
  gst_bin_add (GST_BIN (bin), filesrc);
  gst_bin_add (GST_BIN (bin), queue);

  gst_bin_add (GST_BIN (thread), parse);
  gst_bin_add (GST_BIN (thread), decode);
  gst_bin_add (GST_BIN (thread), osssink);

  gst_element_link_many (filesrc, queue, parse, decode, osssink, NULL);

  /* make it ready */
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY);
  /* start playing */
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);

  playing = TRUE;

  while (playing) {
    gst_bin_iterate (GST_BIN (bin));
  }

  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);

  exit (0);
}
Ejemplo n.º 29
0
static GstFlowReturn
daala_handle_data_packet (GstDaalaDec * dec, ogg_packet * packet,
    GstVideoCodecFrame * frame)
{
  /* normal data packet */
  od_img img;
  gboolean keyframe;
  GstFlowReturn result;

  if (G_UNLIKELY (!dec->have_header))
    goto not_initialized;

  /* the second most significant bit of the first data byte is cleared 
   * for keyframes. We can only check it if it's not a zero-length packet. */
  keyframe = packet->bytes && ((packet->packet[0] & 0x40));
  if (G_UNLIKELY (keyframe)) {
    GST_DEBUG_OBJECT (dec, "we have a keyframe");
    dec->need_keyframe = FALSE;
  } else if (G_UNLIKELY (dec->need_keyframe)) {
    goto dropping;
  }

  GST_DEBUG_OBJECT (dec, "parsing data packet");

  /* this does the decoding */
  if (G_UNLIKELY (daala_decode_packet_in (dec->decoder, &img, packet) < 0))
    goto decode_error;

  if (frame &&
      (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (dec),
              frame) < 0))
    goto dropping_qos;

  if (G_UNLIKELY ((img.width != dec->info.pic_width
              || img.height != dec->info.pic_height)))
    goto wrong_dimensions;

  result = daala_handle_image (dec, &img, frame);

  return result;

  /* ERRORS */
not_initialized:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("no header sent yet"));
    return GST_FLOW_ERROR;
  }
dropping:
  {
    GST_WARNING_OBJECT (dec, "dropping frame because we need a keyframe");
    return GST_CUSTOM_FLOW_DROP;
  }
dropping_qos:
  {
    GST_WARNING_OBJECT (dec, "dropping frame because of QoS");
    return GST_CUSTOM_FLOW_DROP;
  }
decode_error:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
        (NULL), ("daala decoder did not decode data packet"));
    return GST_FLOW_ERROR;
  }
wrong_dimensions:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, FORMAT,
        (NULL), ("dimensions of image do not match header"));
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 30
0
bool VideoSender::enableSending(bool enable)
{
  GstElement *sink;
#define USE_TEE 0
#if USE_TEE
  GstElement *ob;
#endif
  GError *error = NULL;

  qDebug() << "In" << __FUNCTION__ << ", Enable:" << enable;

  // Disable video sending
  if (enable == false) {
    qDebug() << "Stopping video encoding";
    if (pipeline) {
      gst_element_set_state(pipeline, GST_STATE_NULL);
    }

    qDebug() << "Deleting pipeline";
    if (pipeline) {
      gst_object_unref(GST_OBJECT(pipeline));
      pipeline = NULL;
    }
    encoder = NULL;

    ODdata[OB_VIDEO_PARAM_CONTINUE] = 0;
    if (ODprocess) {
      ODprocess->write((const char *)ODdata, sizeof(ODdata));
    }

    return true;
  }

  if (pipeline) {
    // Do nothing as the pipeline has already been created and is
    // probably running
    qCritical("Pipeline exists already, doing nothing");
    return true;
  }

  // Initialisation. We don't pass command line arguments here
  if (!gst_init_check(NULL, NULL, NULL)) {
    qCritical("Failed to init GST");
    return false;
  }

  if (!hardware) {
    qCritical("No hardware plugin");
    return false;
  }

  QString pipelineString = "";
  pipelineString.append(videoSource + " name=source");
  pipelineString.append(" ! ");
  pipelineString.append("capsfilter caps=\"video/x-raw,format=(string)I420,framerate=(fraction)30/1,");
  switch(quality) {
  default:
  case 0:
    pipelineString.append("width=(int)320,height=(int)240");
    break;
  case 1:
    pipelineString.append("width=(int)640,height=(int)480");
    break;
  case 2:
    pipelineString.append("width=(int)800,height=(int)600");
    break;
  }

  pipelineString.append("\"");

#if USE_TEE
  pipelineString.append(" ! ");
  pipelineString.append("tee name=scripttee");
  // FIXME: does this case latency?
  pipelineString.append(" ! ");
  pipelineString.append("queue");
#endif
  pipelineString.append(" ! ");
  pipelineString.append(hardware->getEncodingPipeline());
  pipelineString.append(" ! ");
  pipelineString.append("rtph264pay name=rtppay config-interval=1 mtu=500");
  pipelineString.append(" ! ");
  pipelineString.append("appsink name=sink sync=false max-buffers=1 drop=true");
#if USE_TEE
  // Tee (branch) frames for external components
  pipelineString.append(" scripttee. ");
  // TODO: downscale to 320x240?
  pipelineString.append(" ! ");
  pipelineString.append("appsink name=ob sync=false max-buffers=1 drop=true");
#endif
  qDebug() << "Using pipeline:" << pipelineString;

  // Create encoding video pipeline
  pipeline = gst_parse_launch(pipelineString.toUtf8(), &error);
  if (!pipeline) {
    qCritical("Failed to parse pipeline: %s", error->message);
    g_error_free(error);
    return false;
  }

  encoder = gst_bin_get_by_name(GST_BIN(pipeline), "encoder");
  if (!encoder) {
    qCritical("Failed to get encoder");
    return false;
  }

  // Assuming here that X86 uses x264enc
  if (hardware->getHardwareName() == "generic_x86") {
    g_object_set(G_OBJECT(encoder), "speed-preset", 1, NULL); // ultrafast
    g_object_set(G_OBJECT(encoder), "tune", 0x00000004, NULL); // zerolatency
  }

  if (hardware->getHardwareName() == "tegrak1" ||
      hardware->getHardwareName() == "tegrax1") {
    //g_object_set(G_OBJECT(encoder), "input-buffers", 2, NULL); // not valid on 1.0
    //g_object_set(G_OBJECT(encoder), "output-buffers", 2, NULL); // not valid on 1.0
    //g_object_set(G_OBJECT(encoder), "quality-level", 0, NULL);
    //g_object_set(G_OBJECT(encoder), "rc-mode", 0, NULL);
  }

  if (hardware->getHardwareName() == "tegrax2") {
    g_object_set(G_OBJECT(encoder), "preset-level", 0, NULL); // 0 == UltraFastPreset for high perf
  }

  setBitrate(bitrate);

  {
    GstElement *source;
    source = gst_bin_get_by_name(GST_BIN(pipeline), "source");
    if (!source) {
      qCritical("Failed to get source");
      return false;
    }

    g_object_set(G_OBJECT(source), "do-timestamp", true, NULL);

    if (videoSource == "videotestsrc") {
      g_object_set(G_OBJECT(source), "is-live", true, NULL);
    } else if (videoSource == "v4l2src") {
      //g_object_set(G_OBJECT(source), "always-copy", false, NULL);

      const char *camera = "/dev/video0";
      QByteArray env_camera = qgetenv("PLECO_SLAVE_CAMERA");
      if (!env_camera.isNull()) {
        camera = env_camera.data();
      }
      g_object_set(G_OBJECT(source), "device", camera, NULL);
    }

    if (hardware->getHardwareName() == "tegrak1" ||
        hardware->getHardwareName() == "tegrax1") {
      g_object_set(G_OBJECT(source), "io-mode", 1, NULL);
    }
  }


  sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
  if (!sink) {
    qCritical("Failed to get sink");
    return false;
  }

  // Set appsink callbacks
  GstAppSinkCallbacks appSinkCallbacks;
  appSinkCallbacks.eos             = NULL;
  appSinkCallbacks.new_preroll     = NULL;
  appSinkCallbacks.new_sample      = &newBufferCB;

  gst_app_sink_set_callbacks(GST_APP_SINK(sink), &appSinkCallbacks, this, NULL);
#if USE_TEE
  // Callbacks for the OB process appsink
  ob = gst_bin_get_by_name(GST_BIN(pipeline), "ob");
  if (!ob) {
    qCritical("Failed to get ob appsink");
    return false;
  }

  // Set appsink callbacks
  GstAppSinkCallbacks obCallbacks;
  obCallbacks.eos             = NULL;
  obCallbacks.new_preroll     = NULL;
  obCallbacks.new_sample      = &newBufferOBCB;

  gst_app_sink_set_callbacks(GST_APP_SINK(ob), &obCallbacks, this, NULL);
#endif
  // Start running 
  gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);

  launchObjectDetection();

  return true;
}