Пример #1
0
static gint accept_socket (HTTPServer *http_server)
{
    struct epoll_event ee;
    gint accepted_sock, ret;
    struct sockaddr in_addr;
    socklen_t in_len;
    RequestData **request_data_pointer;
    RequestData *request_data;
    gint request_data_queue_len;

    in_len = sizeof (in_addr);
    for (;;) {
        /* repeat accept until -1 returned */
        accepted_sock = accept (http_server->listen_sock, &in_addr, &in_len);
        if (accepted_sock == -1) {
            if (( errno == EAGAIN) || (errno == EWOULDBLOCK)) {
                /* We have processed all incoming connections. */
                break;

            } else {
                GST_ERROR ("accept error  %s", g_strerror (errno));
                break;
            }
        }
        g_mutex_lock (&(http_server->request_data_queue_mutex));
        request_data_queue_len = g_queue_get_length (http_server->request_data_queue);
        g_mutex_unlock (&(http_server->request_data_queue_mutex));
        if (request_data_queue_len == 0) {
            GST_ERROR ("event queue empty");
            (void) close (accepted_sock);
            //close_socket_gracefully (accepted_sock);
            continue;
        }
        GST_INFO ("request from %s:%d, accepted_sock %d", get_address (in_addr), get_port (in_addr), accepted_sock);
        http_server->total_click += 1;

        int on = 1;
        setsockopt (accepted_sock, SOL_TCP, TCP_CORK, &on, sizeof (on));
        set_nonblock (accepted_sock);
        g_mutex_lock (&(http_server->request_data_queue_mutex));
        request_data_pointer = g_queue_pop_tail (http_server->request_data_queue);
        g_mutex_unlock (&(http_server->request_data_queue_mutex));
        if (request_data_pointer == NULL) {
            GST_WARNING ("No NONE request, refuse this request.");
            (void) close (accepted_sock);
            //close_socket_gracefully (accepted_sock);
            continue;
        }
        request_data = *request_data_pointer;
        GST_DEBUG ("pop up request data, id %d, sock %d, events %d", request_data->id, accepted_sock, request_data->events);
        /* clear events, there may be events from last request. */
        request_data->events = 0;
        request_data->client_addr = in_addr;
        request_data->sock = accepted_sock;
        request_data->birth_time = gst_clock_get_time (http_server->system_clock);
        request_data->status = HTTP_CONNECTED;
        request_data->request_length = 0;
        ee.events = EPOLLIN | EPOLLOUT | EPOLLET;
        ee.data.ptr = request_data_pointer;
        ret = epoll_ctl (http_server->epollfd, EPOLL_CTL_ADD, accepted_sock, &ee);
        if (ret == -1) {
            GST_ERROR ("epoll_ctl add error %s sock %d", g_strerror (errno), accepted_sock);
            request_data_release (http_server, request_data_pointer);
            return -1;

        } else {
            GST_DEBUG ("pop request data, sock %d", request_data->sock);
        }
    }

    return 0;
}
Пример #2
0
/**
 * gst_date_time_new_from_iso8601_string:
 * @string: ISO 8601-formatted datetime string.
 *
 * Tries to parse common variants of ISO-8601 datetime strings into a
 * #GstDateTime.
 *
 * Free-function: gst_date_time_unref
 *
 * Returns: (transfer full): a newly created #GstDateTime, or NULL on error
 */
GstDateTime *
gst_date_time_new_from_iso8601_string (const gchar * string)
{
  gint year = -1, month = -1, day = -1, hour = -1, minute = -1;
  gdouble second = -1.0;
  gfloat tzoffset = 0.0;
  guint64 usecs;
  gint len, ret;

  g_return_val_if_fail (string != NULL, NULL);

  GST_DEBUG ("Parsing '%s' into a datetime", string);

  len = strlen (string);

  if (len < 4 || !g_ascii_isdigit (string[0]) || !g_ascii_isdigit (string[1])
      || !g_ascii_isdigit (string[2]) || !g_ascii_isdigit (string[3]))
    return NULL;

  ret = sscanf (string, "%04d-%02d-%02d", &year, &month, &day);

  if (ret == 0)
    return NULL;

  if (ret == 3 && day <= 0) {
    ret = 2;
    day = -1;
  }

  if (ret >= 2 && month <= 0) {
    ret = 1;
    month = day = -1;
  }

  if (ret >= 1 && year <= 0)
    return NULL;

  else if (ret >= 1 && len < 16)
    /* YMD is 10 chars. XMD + HM will be 16 chars. if it is less,
     * it make no sense to continue. We will stay with YMD. */
    goto ymd;

  string += 10;
  /* Exit if there is no expeceted value on this stage */
  if (!(*string == 'T' || *string == '-' || *string == ' '))
    goto ymd;

  /* if hour or minute fails, then we will use onlly ymd. */
  hour = g_ascii_strtoull (string + 1, (gchar **) & string, 10);
  if (hour > 24 || *string != ':')
    goto ymd;

  /* minute */
  minute = g_ascii_strtoull (string + 1, (gchar **) & string, 10);
  if (minute > 59)
    goto ymd;

  /* second */
  if (*string == ':') {
    second = g_ascii_strtoull (string + 1, (gchar **) & string, 10);
    /* if we fail here, we still can reuse hour and minute. We
     * will still attempt to parse any timezone information */
    if (second > 59) {
      second = -1.0;
    } else {
      /* microseconds */
      if (*string == '.' || *string == ',') {
        const gchar *usec_start = string + 1;
        guint digits;

        usecs = g_ascii_strtoull (string + 1, (gchar **) & string, 10);
        if (usecs != G_MAXUINT64 && string > usec_start) {
          digits = (guint) (string - usec_start);
          second += (gdouble) usecs / pow (10.0, digits);
        }
      }
    }
  }

  if (*string == 'Z')
    goto ymd_hms;
  else {
    /* reuse some code from gst-plugins-base/gst-libs/gst/tag/gstxmptag.c */
    gint gmt_offset_hour = -1, gmt_offset_min = -1, gmt_offset = -1;
    gchar *plus_pos = NULL;
    gchar *neg_pos = NULL;
    gchar *pos = NULL;

    GST_LOG ("Checking for timezone information");

    /* check if there is timezone info */
    plus_pos = strrchr (string, '+');
    neg_pos = strrchr (string, '-');
    if (plus_pos)
      pos = plus_pos + 1;
    else if (neg_pos)
      pos = neg_pos + 1;

    if (pos) {
      gint ret_tz;
      if (pos[2] == ':')
        ret_tz = sscanf (pos, "%d:%d", &gmt_offset_hour, &gmt_offset_min);
      else
        ret_tz = sscanf (pos, "%02d%02d", &gmt_offset_hour, &gmt_offset_min);

      GST_DEBUG ("Parsing timezone: %s", pos);

      if (ret_tz == 2) {
        gmt_offset = gmt_offset_hour * 60 + gmt_offset_min;
        if (neg_pos != NULL && neg_pos + 1 == pos)
          gmt_offset *= -1;

        tzoffset = gmt_offset / 60.0;

        GST_LOG ("Timezone offset: %f (%d minutes)", tzoffset, gmt_offset);
      } else
        GST_WARNING ("Failed to parse timezone information");
    }
  }

ymd_hms:
  return gst_date_time_new (tzoffset, year, month, day, hour, minute, second);
ymd:
  return gst_date_time_new_ymd (year, month, day);
}
static void
flush_data (GstRtpQDM2Depay * depay)
{
  guint i;
  guint avail;

  if ((avail = gst_adapter_available (depay->adapter)))
    gst_adapter_flush (depay->adapter, avail);

  GST_DEBUG ("Flushing %d packets", depay->nbpackets);

  for (i = 0; depay->packets[i]; i++) {
    QDM2Packet *pack = depay->packets[i];
    guint32 crc = 0;
    int i = 0;
    GstBuffer *buf;
    guint8 *data;

    /* CRC is the sum of everything (including first bytes) */

    data = pack->data;

    if (G_UNLIKELY (data == NULL))
      continue;

    /* If the packet size is bigger than 0xff, we need 2 bytes to store the size */
    if (depay->packetsize > 0xff) {
      /* Expanded size 0x02 | 0x80 */
      data[0] = 0x82;
      GST_WRITE_UINT16_BE (data + 1, depay->packetsize - 3);
    } else {
      data[0] = 0x2;
      data[1] = depay->packetsize - 2;
    }

    /* Calculate CRC */
    for (; i < depay->packetsize; i++)
      crc += data[i];

    GST_DEBUG ("CRC is 0x%x", crc);

    /* Write CRC */
    if (depay->packetsize > 0xff)
      GST_WRITE_UINT16_BE (data + 3, crc);
    else
      GST_WRITE_UINT16_BE (data + 2, crc);

    GST_MEMDUMP ("Extracted packet", data, depay->packetsize);

    buf = gst_buffer_new ();
    gst_buffer_append_memory (buf,
        gst_memory_new_wrapped (0, data, depay->packetsize, 0,
            depay->packetsize, data, g_free));

    gst_adapter_push (depay->adapter, buf);

    if (pack->data) {
      pack->data = NULL;
    }
  }
}
static void
gst_mpg123_audio_dec_class_init (GstMpg123AudioDecClass * klass)
{
  GstAudioDecoderClass *base_class;
  GstElementClass *element_class;
  GstPadTemplate *src_template, *sink_template;
  int error;

  GST_DEBUG_CATEGORY_INIT (mpg123_debug, "mpg123", 0, "mpg123 mp3 decoder");

  base_class = GST_AUDIO_DECODER_CLASS (klass);
  element_class = GST_ELEMENT_CLASS (klass);

  gst_element_class_set_static_metadata (element_class,
      "mpg123 mp3 decoder",
      "Codec/Decoder/Audio",
      "Decodes mp3 streams using the mpg123 library",
      "Carlos Rafael Giani <*****@*****.**>");

  /* Not using static pad template for srccaps, since the comma-separated list
   * of formats needs to be created depending on whatever mpg123 supports */
  {
    const int *format_list;
    const long *rates_list;
    size_t num, i;
    GString *s;
    GstCaps *src_template_caps;

    s = g_string_new ("audio/x-raw, ");

    mpg123_encodings (&format_list, &num);
    g_string_append (s, "format = { ");
    for (i = 0; i < num; ++i) {
      switch (format_list[i]) {
        case MPG123_ENC_SIGNED_16:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (S16));
          break;
        case MPG123_ENC_UNSIGNED_16:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (U16));
          break;
        case MPG123_ENC_SIGNED_24:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (S24));
          break;
        case MPG123_ENC_UNSIGNED_24:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (U24));
          break;
        case MPG123_ENC_SIGNED_32:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (S32));
          break;
        case MPG123_ENC_UNSIGNED_32:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (U32));
          break;
        case MPG123_ENC_FLOAT_32:
          g_string_append (s, (i > 0) ? ", " : "");
          g_string_append (s, GST_AUDIO_NE (F32));
          break;
        default:
          GST_DEBUG ("Ignoring mpg123 format %d", format_list[i]);
          break;
      }
    }
    g_string_append (s, " }, ");

    mpg123_rates (&rates_list, &num);
    g_string_append (s, "rate = (int) { ");
    for (i = 0; i < num; ++i) {
      g_string_append_printf (s, "%s%lu", (i > 0) ? ", " : "", rates_list[i]);
    }
    g_string_append (s, "}, ");

    g_string_append (s, "channels = (int) [ 1, 2 ], ");
    g_string_append (s, "layout = (string) interleaved");

    src_template_caps = gst_caps_from_string (s->str);
    src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
        src_template_caps);

    g_string_free (s, TRUE);
  }

  sink_template = gst_static_pad_template_get (&static_sink_template);

  gst_element_class_add_pad_template (element_class, sink_template);
  gst_element_class_add_pad_template (element_class, src_template);

  base_class->start = GST_DEBUG_FUNCPTR (gst_mpg123_audio_dec_start);
  base_class->stop = GST_DEBUG_FUNCPTR (gst_mpg123_audio_dec_stop);
  base_class->handle_frame =
      GST_DEBUG_FUNCPTR (gst_mpg123_audio_dec_handle_frame);
  base_class->set_format = GST_DEBUG_FUNCPTR (gst_mpg123_audio_dec_set_format);
  base_class->flush = GST_DEBUG_FUNCPTR (gst_mpg123_audio_dec_flush);

  error = mpg123_init ();
  if (G_UNLIKELY (error != MPG123_OK))
    GST_ERROR ("Could not initialize mpg123 library: %s",
        mpg123_plain_strerror (error));
  else
    GST_INFO ("mpg123 library initialized");
}
Пример #5
0
gboolean
cam_device_open (CamDevice * device, const char *filename)
{
  ca_caps_t ca_caps;
  int ret;
  int i;
  int count = 10;

  g_return_val_if_fail (device != NULL, FALSE);
  g_return_val_if_fail (device->state == CAM_DEVICE_STATE_CLOSED, FALSE);
  g_return_val_if_fail (filename != NULL, FALSE);

  GST_INFO ("opening ca device %s", filename);

  ret = open (filename, O_RDWR);
  if (ret == -1) {
    GST_ERROR ("can't open ca device: %s", strerror (errno));
    return FALSE;
  }

  GST_DEBUG ("Successfully opened device %s", filename);

  device->fd = ret;

  ret = ioctl (device->fd, CA_RESET);

  g_usleep (G_USEC_PER_SEC / 10);

  while (TRUE) {
    /* get the capabilities of the CA */
    ret = ioctl (device->fd, CA_GET_CAP, &ca_caps);
    if (ret == -1) {
      GST_ERROR ("CA_GET_CAP ioctl failed: %s", strerror (errno));
      reset_state (device);
      return FALSE;
    }
    if (ca_caps.slot_num > 0)
      break;
    if (!count) {
      GST_ERROR ("CA_GET_CAP succeeded but not slots");
      reset_state (device);
      return FALSE;
    }
    count--;
    g_usleep (G_USEC_PER_SEC / 5);
  }

  device->tl = cam_tl_new (device->fd);
  device->sl = cam_sl_new (device->tl);
  device->al = cam_al_new (device->sl);

  device->mgr = cam_resource_manager_new ();
  cam_al_install (device->al, CAM_AL_APPLICATION (device->mgr));

  device->info = cam_application_info_new ();
  cam_al_install (device->al, CAM_AL_APPLICATION (device->info));

  device->cas = cam_conditional_access_new ();
  cam_al_install (device->al, CAM_AL_APPLICATION (device->cas));

  /* open a connection to each slot */
  for (i = 0; i < ca_caps.slot_num; ++i) {
    CamTLConnection *connection;

    ret = cam_tl_create_connection (device->tl, i, &connection);
    if (CAM_FAILED (ret)) {
      /* just ignore the slot, error out later only if no connection has been
       * established */
      GST_WARNING ("connection to slot %d failed, error: %d", i, ret);
      continue;
    }
  }

  if (g_hash_table_size (device->tl->connections) == 0) {
    GST_ERROR ("couldn't connect to any slot");

    reset_state (device);
    return FALSE;
  }

  device->state = CAM_DEVICE_STATE_OPEN;
  device->filename = g_strdup (filename);

  /* poll each connection to initiate the protocol */
  cam_tl_read_all (device->tl, TRUE);

  return TRUE;
}
Пример #6
0
/*
 * Read a new buffer from src->reqoffset, takes care of events
 * and seeking and such.
 */
static GstFlowReturn
gst_rtmp_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
{
  GstRTMPSrc *src;
  GstBuffer *buf;
  GstMapInfo map;
  guint8 *data;
  guint todo;
  gsize bsize;
  int read;
  int size;

  src = GST_RTMP_SRC (pushsrc);

  g_return_val_if_fail (src->rtmp != NULL, GST_FLOW_ERROR);

  size = GST_BASE_SRC_CAST (pushsrc)->blocksize;

  GST_DEBUG ("reading from %" G_GUINT64_FORMAT
      ", size %u", src->cur_offset, size);

  buf = gst_buffer_new_allocate (NULL, size, NULL);
  if (G_UNLIKELY (buf == NULL)) {
    GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", size);
    return GST_FLOW_ERROR;
  }

  bsize = todo = size;
  gst_buffer_map (buf, &map, GST_MAP_WRITE);
  data = map.data;
  read = bsize = 0;

  while (todo > 0) {
    read = RTMP_Read (src->rtmp, (char *) data, todo);

    if (G_UNLIKELY (read == 0 && todo == size)) {
      goto eos;
    } else if (G_UNLIKELY (read == 0)) {
      todo = 0;
      break;
    }

    if (G_UNLIKELY (read < 0))
      goto read_failed;

    if (read < todo) {
      data += read;
      todo -= read;
      bsize += read;
    } else {
      bsize += todo;
      todo = 0;
    }
    GST_LOG ("  got size %d", read);
  }
  gst_buffer_unmap (buf, &map);
  gst_buffer_resize (buf, 0, bsize);

  if (src->discont) {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    src->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (buf) = src->last_timestamp;
  GST_BUFFER_OFFSET (buf) = src->cur_offset;

  src->cur_offset += size;
  if (src->last_timestamp == GST_CLOCK_TIME_NONE)
    src->last_timestamp = src->rtmp->m_mediaStamp * GST_MSECOND;
  else
    src->last_timestamp =
        MAX (src->last_timestamp, src->rtmp->m_mediaStamp * GST_MSECOND);

  GST_LOG_OBJECT (src, "Created buffer of size %u at %" G_GINT64_FORMAT
      " with timestamp %" GST_TIME_FORMAT, size, GST_BUFFER_OFFSET (buf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));


  /* we're done, return the buffer */
  *buffer = buf;

  return GST_FLOW_OK;

read_failed:
  {
    gst_buffer_unref (buf);
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to read data"));
    return GST_FLOW_ERROR;
  }
eos:
  {
    gst_buffer_unref (buf);
    if (src->cur_offset == 0) {
      GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
          ("Failed to read any data from stream, check your URL"));
      return GST_FLOW_ERROR;
    } else {
      GST_DEBUG_OBJECT (src, "Reading data gave EOS");
      return GST_FLOW_EOS;
    }
  }
}
Пример #7
0
gboolean
gst_ffmpegvidenc_register (GstPlugin * plugin)
{
  GTypeInfo typeinfo = {
    sizeof (GstFFMpegVidEncClass),
    (GBaseInitFunc) gst_ffmpegvidenc_base_init,
    NULL,
    (GClassInitFunc) gst_ffmpegvidenc_class_init,
    NULL,
    NULL,
    sizeof (GstFFMpegVidEnc),
    0,
    (GInstanceInitFunc) gst_ffmpegvidenc_init,
  };
  GType type;
  AVCodec *in_plugin;


  GST_LOG ("Registering encoders");

  /* build global ffmpeg param/property info */
  gst_ffmpeg_cfg_init ();

  in_plugin = av_codec_next (NULL);
  while (in_plugin) {
    gchar *type_name;

    /* Skip non-AV codecs */
    if (in_plugin->type != AVMEDIA_TYPE_VIDEO)
      goto next;

    /* no quasi codecs, please */
    if (in_plugin->id == AV_CODEC_ID_RAWVIDEO ||
        in_plugin->id == AV_CODEC_ID_V210 ||
        in_plugin->id == AV_CODEC_ID_V210X ||
        in_plugin->id == AV_CODEC_ID_R210
        || in_plugin->id == AV_CODEC_ID_ZLIB) {
      goto next;
    }

    /* No encoders depending on external libraries (we don't build them, but
     * people who build against an external ffmpeg might have them.
     * We have native gstreamer plugins for all of those libraries anyway. */
    if (!strncmp (in_plugin->name, "lib", 3)) {
      GST_DEBUG
          ("Not using external library encoder %s. Use the gstreamer-native ones instead.",
          in_plugin->name);
      goto next;
    }

    /* only video encoders */
    if (!av_codec_is_encoder (in_plugin)
        || in_plugin->type != AVMEDIA_TYPE_VIDEO)
      goto next;

    /* FIXME : We should have a method to know cheaply whether we have a mapping
     * for the given plugin or not */

    GST_DEBUG ("Trying plugin %s [%s]", in_plugin->name, in_plugin->long_name);

    /* no codecs for which we're GUARANTEED to have better alternatives */
    if (!strcmp (in_plugin->name, "gif")) {
      GST_LOG ("Ignoring encoder %s", in_plugin->name);
      goto next;
    }

    /* construct the type */
    type_name = g_strdup_printf ("avenc_%s", in_plugin->name);

    type = g_type_from_name (type_name);

    if (!type) {

      /* create the glib type now */
      type =
          g_type_register_static (GST_TYPE_VIDEO_ENCODER, type_name, &typeinfo,
          0);
      g_type_set_qdata (type, GST_FFENC_PARAMS_QDATA, (gpointer) in_plugin);

      {
        static const GInterfaceInfo preset_info = {
          NULL,
          NULL,
          NULL
        };
        g_type_add_interface_static (type, GST_TYPE_PRESET, &preset_info);
      }
    }

    if (!gst_element_register (plugin, type_name, GST_RANK_SECONDARY, type)) {
      g_free (type_name);
      return FALSE;
    }

    g_free (type_name);

  next:
    in_plugin = av_codec_next (in_plugin);
  }

  GST_LOG ("Finished registering encoders");

  return TRUE;
}
Пример #8
0
/* returns the pointer in @str to the #version declaration */
const gchar *
_gst_glsl_shader_string_find_version (const gchar * str)
{
  gboolean sl_comment = FALSE;
  gboolean ml_comment = FALSE;
  gboolean newline = TRUE;
  gint i = 0;

  _init_debug ();

  /* search for #version while allowing for preceeding comments/whitespace as
   * permitted by the GLSL specification */
  while (str && str[i] != '\0' && i < 1024) {
    if (str[i] == '\n' || str[i] == '\r') {
      newline = TRUE;
      sl_comment = FALSE;
      i++;
      continue;
    }

    if (g_ascii_isspace (str[i]))
      goto next;

    if (sl_comment)
      goto next;

    if (ml_comment) {
      if (g_strstr_len (&str[i], 2, "*/")) {
        ml_comment = FALSE;
        i++;
      }
      goto next;
    }

    if (g_strstr_len (&str[i], 2, "//")) {
      sl_comment = TRUE;
      i++;
      goto next;
    }

    if (g_strstr_len (&str[i], 2, "/*")) {
      ml_comment = TRUE;
      i++;
      goto next;
    }

    if (str[i] == '#') {
      if (newline && _check_valid_version_preprocessor_string (&str[i])) {
        GST_DEBUG ("found #version declaration at index %i", i);
        return &str[i];
      }
      break;
    }

  next:
    newline = FALSE;
    i++;
  }

  GST_DEBUG ("no #version declaration found in the first 1K");

  return NULL;
}
Пример #9
0
static GstFlowReturn
gst_goom_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGoom *goom;
  GstFlowReturn ret;
  GstBuffer *outbuf = NULL;

  goom = GST_GOOM (gst_pad_get_parent (pad));
    
  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (goom->srcpad) == NULL) {
    ret = get_buffer (goom, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }
  
  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (goom->adapter);
    goom->next_ts = -1;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE)
    goom->next_ts = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (goom,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));

  /* Collect samples until we have enough for an output frame */
  gst_adapter_push (goom->adapter, buffer);

  ret = GST_FLOW_OK;

  while (TRUE) {
    const guint16 *data;
    gboolean need_skip;
    guchar *out_frame;
    gint i, c;
    guint avail, to_flush;
	
	Message m;

    avail = gst_adapter_available (goom->adapter);
    GST_DEBUG_OBJECT (goom, "avail now %u", avail);

    /* we need GOOM_SAMPLES to get a meaningful result from goom. */
    if (avail < (GOOM_SAMPLES * goom->bps))
      break;

    /* we also need enough samples to produce one frame at least */
    if (avail < goom->bpf)
      break;

    GST_DEBUG_OBJECT (goom, "processing buffer");

    if (goom->next_ts != -1) {
      gint64 qostime;

      qostime = gst_segment_to_running_time (&goom->segment, GST_FORMAT_TIME,
          goom->next_ts);

      GST_OBJECT_LOCK (goom);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = goom->earliest_time != -1 && qostime <= goom->earliest_time;
      GST_OBJECT_UNLOCK (goom);

      if (need_skip) {
        GST_WARNING_OBJECT (goom,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (goom->earliest_time));
        goto skip;
      }
    }

    /* get next GOOM_SAMPLES, we have at least this amount of samples */
    data =
        (const guint16 *) gst_adapter_peek (goom->adapter,
        GOOM_SAMPLES * goom->bps);

    if (goom->channels == 2) {
      for (i = 0; i < GOOM_SAMPLES; i++) {
        goom->datain[0][i] = *data++;
        goom->datain[1][i] = *data++;
      }
    } else {
      for (i = 0; i < GOOM_SAMPLES; i++) {
        goom->datain[0][i] = *data;
        goom->datain[1][i] = *data++;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (goom, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    GST_BUFFER_TIMESTAMP (outbuf) = goom->next_ts;
    GST_BUFFER_DURATION (outbuf) = goom->duration;
    GST_BUFFER_SIZE (outbuf) = goom->outsize;

	//gst_spectrum_transform_ip(gstSpectrum,buffer);

	//print_spectrum_message(gstSpectrum);
	//readMessage(&m);

	//c = m.magnitude[0] + m.magnitude[1] + m.magnitude[2] + m.magnitude[3] + m.magnitude[4];
	//if (c != 0) {
		/*printf("\n\n [");
		for (i = 0; i < 5; i++)
			printf("%f $ ", m.magnitude[i]);
		printf("] \n\n");*/
		
		/*g_print ("Goom: %" GST_TIME_FORMAT ", message received: %" GST_TIME_FORMAT "\n",
			GST_TIME_ARGS (goom->next_ts), GST_TIME_ARGS (m.timestamp));*/
	//}

    out_frame = (guchar *) goom_update (goom->plugin, goom->datain, 0, 0);
    memcpy (GST_BUFFER_DATA (outbuf), out_frame, goom->outsize);

    GST_DEBUG ("Pushing frame with time=%" GST_TIME_FORMAT ", duration=%"
        GST_TIME_FORMAT, GST_TIME_ARGS (goom->next_ts),
        GST_TIME_ARGS (goom->duration));

    ret = gst_pad_push (goom->srcpad, outbuf);
    outbuf = NULL;

  skip:
    /* interpollate next timestamp */
    if (goom->next_ts != -1)
      goom->next_ts += goom->duration;

    /* Now flush the samples we needed for this frame, which might be more than
     * the samples we used (GOOM_SAMPLES). */
    to_flush = goom->bpf;

    GST_DEBUG_OBJECT (goom, "finished frame, flushing %u bytes from input",
        to_flush);
    gst_adapter_flush (goom->adapter, to_flush);

    if (ret != GST_FLOW_OK)
      break;
  }

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

beach:
  gst_object_unref (goom);

  return ret;
}
Пример #10
0
/* Process pending events. Call with ->lock held */
static void
gst_sdlv_process_events (GstSDLVideoSink * sdlvideosink)
{
  SDL_Event event;
  int numevents;
  char *keysym = NULL;

  do {
    SDL_PumpEvents ();
    numevents = SDL_PeepEvents (&event, 1, SDL_GETEVENT,
        SDL_KEYDOWNMASK | SDL_KEYUPMASK |
        SDL_MOUSEMOTIONMASK | SDL_MOUSEBUTTONDOWNMASK |
        SDL_MOUSEBUTTONUPMASK | SDL_QUITMASK | SDL_VIDEORESIZEMASK);

    if (numevents > 0 && (event.type == SDL_KEYUP || event.type == SDL_KEYDOWN)) {
      keysym = SDL_GetKeyName (event.key.keysym.sym);
    }

    if (numevents > 0) {
      g_mutex_unlock (sdlvideosink->lock);
      switch (event.type) {
        case SDL_MOUSEMOTION:
          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),
              "mouse-move", 0, event.motion.x, event.motion.y);
          break;
        case SDL_MOUSEBUTTONDOWN:
          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),
              "mouse-button-press",
              event.button.button, event.button.x, event.button.y);
          break;
        case SDL_MOUSEBUTTONUP:
          gst_navigation_send_mouse_event (GST_NAVIGATION (sdlvideosink),
              "mouse-button-release",
              event.button.button, event.button.x, event.button.y);
          break;
        case SDL_KEYUP:
          GST_DEBUG ("key press event %s !",
              SDL_GetKeyName (event.key.keysym.sym));
          gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink),
              "key-release", keysym);
          break;
        case SDL_KEYDOWN:
          if (SDLK_ESCAPE != event.key.keysym.sym) {
            GST_DEBUG ("key press event %s !",
                SDL_GetKeyName (event.key.keysym.sym));
            gst_navigation_send_key_event (GST_NAVIGATION (sdlvideosink),
                "key-press", keysym);
            break;
          } else {
            /* fall through */
          }
        case SDL_QUIT:
          sdlvideosink->running = FALSE;
          GST_ELEMENT_ERROR (sdlvideosink, RESOURCE, OPEN_WRITE,
              ("Video output device is gone."),
              ("We were running fullscreen and user "
                  "pressed the ESC key, stopping playback."));
          break;
        case SDL_VIDEORESIZE:
          /* create a SDL window of the size requested by the user */
          g_mutex_lock (sdlvideosink->lock);
          GST_VIDEO_SINK_WIDTH (sdlvideosink) = event.resize.w;
          GST_VIDEO_SINK_HEIGHT (sdlvideosink) = event.resize.h;
          gst_sdlvideosink_create (sdlvideosink);
          g_mutex_unlock (sdlvideosink->lock);
          break;
      }
      g_mutex_lock (sdlvideosink->lock);
    }
  } while (numevents > 0);
}
Пример #11
0
/* Must be called with the sdl lock held */
static gboolean
gst_sdlvideosink_create (GstSDLVideoSink * sdlvideosink)
{
  if (GST_VIDEO_SINK_HEIGHT (sdlvideosink) <= 0)
    GST_VIDEO_SINK_HEIGHT (sdlvideosink) = sdlvideosink->height;
  if (GST_VIDEO_SINK_WIDTH (sdlvideosink) <= 0)
    GST_VIDEO_SINK_WIDTH (sdlvideosink) = sdlvideosink->width;

  gst_sdlvideosink_destroy (sdlvideosink);

  if (sdlvideosink->is_xwindows && !sdlvideosink->xwindow_id) {
    g_mutex_unlock (sdlvideosink->lock);
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (sdlvideosink));
    g_mutex_lock (sdlvideosink->lock);
  }

  /* create a SDL window of the size requested by the user */
  if (sdlvideosink->full_screen) {
    sdlvideosink->screen =
        SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink),
        GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0,
        SDL_SWSURFACE | SDL_FULLSCREEN);
  } else {
    sdlvideosink->screen =
        SDL_SetVideoMode (GST_VIDEO_SINK_WIDTH (sdlvideosink),
        GST_VIDEO_SINK_HEIGHT (sdlvideosink), 0, SDL_HWSURFACE | SDL_RESIZABLE);
  }
  if (sdlvideosink->screen == NULL)
    goto no_screen;

  /* create a new YUV overlay */
  sdlvideosink->overlay = SDL_CreateYUVOverlay (sdlvideosink->width,
      sdlvideosink->height, sdlvideosink->format, sdlvideosink->screen);
  if (sdlvideosink->overlay == NULL)
    goto no_overlay;


  GST_DEBUG ("Using a %dx%d %dbpp SDL screen with a %dx%d \'%"
      GST_FOURCC_FORMAT "\' YUV overlay", GST_VIDEO_SINK_WIDTH (sdlvideosink),
      GST_VIDEO_SINK_HEIGHT (sdlvideosink),
      sdlvideosink->screen->format->BitsPerPixel, sdlvideosink->width,
      sdlvideosink->height, GST_FOURCC_ARGS (sdlvideosink->format));

  sdlvideosink->rect.x = 0;
  sdlvideosink->rect.y = 0;
  sdlvideosink->rect.w = GST_VIDEO_SINK_WIDTH (sdlvideosink);
  sdlvideosink->rect.h = GST_VIDEO_SINK_HEIGHT (sdlvideosink);

  /*SDL_DisplayYUVOverlay (sdlvideosink->overlay, &(sdlvideosink->rect)); */

  GST_DEBUG ("sdlvideosink: setting %08x (%" GST_FOURCC_FORMAT ")",
      sdlvideosink->format, GST_FOURCC_ARGS (sdlvideosink->format));

  return TRUE;

  /* ERRORS */
no_screen:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("SDL: Couldn't set %dx%d: %s", GST_VIDEO_SINK_WIDTH (sdlvideosink),
            GST_VIDEO_SINK_HEIGHT (sdlvideosink), SDL_GetError ()));
    return FALSE;
  }
no_overlay:
  {
    GST_ELEMENT_ERROR (sdlvideosink, LIBRARY, TOO_LAZY, (NULL),
        ("SDL: Couldn't create SDL YUV overlay (%dx%d \'%" GST_FOURCC_FORMAT
            "\'): %s", sdlvideosink->width, sdlvideosink->height,
            GST_FOURCC_ARGS (sdlvideosink->format), SDL_GetError ()));
    return FALSE;
  }
}
Пример #12
0
static int
gst_dv1394src_iso_receive (raw1394handle_t handle, int channel, size_t len,
    quadlet_t * data)
{
  GstDV1394Src *dv1394src = gst_dv1394src_from_raw1394handle (handle);

  if (len > 16) {
    /*
       the following code taken from kino-0.51 (Dan Dennedy/Charles Yates)
       Kindly relicensed under the LGPL. See the commit log for version 1.6 of
       this file in CVS.
     */
    unsigned char *p = (unsigned char *) &data[3];

    int section_type = p[0] >> 5;       /* section type is in bits 5 - 7 */
    int dif_sequence = p[1] >> 4;       /* dif sequence number is in bits 4 - 7 */
    int dif_block = p[2];

    /* if we are at the beginning of a frame, 
       we set buf=frame, and alloc a new buffer for frame
     */
    if (section_type == 0 && dif_sequence == 0) {       // dif header
      if (!GST_PAD_CAPS (GST_BASE_SRC_PAD (dv1394src))) {
        GstCaps *caps;

        // figure format (NTSC/PAL)
        if (p[3] & 0x80) {
          // PAL
          dv1394src->frame_size = PAL_FRAMESIZE;
          dv1394src->frame_rate = PAL_FRAMERATE;
          GST_DEBUG ("PAL data");
          caps = gst_caps_new_simple ("video/x-dv",
              "format", G_TYPE_STRING, "PAL",
              "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
        } else {
          // NTSC (untested)
          dv1394src->frame_size = NTSC_FRAMESIZE;
          dv1394src->frame_rate = NTSC_FRAMERATE;
          GST_DEBUG
              ("NTSC data [untested] - please report success/failure to <*****@*****.**>");
          caps = gst_caps_new_simple ("video/x-dv",
              "format", G_TYPE_STRING, "NTSC",
              "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
        }
        gst_pad_set_caps (GST_BASE_SRC_PAD (dv1394src), caps);
        gst_caps_unref (caps);
      }
      // drop last frame when not complete
      if (!dv1394src->drop_incomplete
          || dv1394src->bytes_in_frame == dv1394src->frame_size) {
        dv1394src->buf = dv1394src->frame;
      } else {
        GST_INFO_OBJECT (GST_ELEMENT (dv1394src), "incomplete frame dropped");
        g_signal_emit (G_OBJECT (dv1394src),
            gst_dv1394src_signals[SIGNAL_FRAME_DROPPED], 0);
        if (dv1394src->frame) {
          gst_buffer_unref (dv1394src->frame);
        }
      }
      if ((dv1394src->frame_sequence + 1) % (dv1394src->skip +
              dv1394src->consecutive) < dv1394src->consecutive) {
        GstBuffer *buf;
        gint64 i64;

        buf = gst_buffer_new_and_alloc (dv1394src->frame_size);

        /* fill in offset, duration, timestamp */
        GST_BUFFER_OFFSET (buf) = dv1394src->frame_sequence;
        dv1394src->frame = buf;
      }
      dv1394src->frame_sequence++;
      dv1394src->bytes_in_frame = 0;
    }

    if (dv1394src->frame != NULL) {
      guint8 *data = GST_BUFFER_DATA (dv1394src->frame);

      switch (section_type) {
        case 0:                /* 1 Header block */
          /* p[3] |= 0x80; // hack to force PAL data */
          memcpy (data + dif_sequence * 150 * 80, p, 480);
          break;

        case 1:                /* 2 Subcode blocks */
          memcpy (data + dif_sequence * 150 * 80 + (1 + dif_block) * 80, p,
              480);
          break;

        case 2:                /* 3 VAUX blocks */
          memcpy (data + dif_sequence * 150 * 80 + (3 + dif_block) * 80, p,
              480);
          break;

        case 3:                /* 9 Audio blocks interleaved with video */
          memcpy (data + dif_sequence * 150 * 80 + (6 + dif_block * 16) * 80, p,
              480);
          break;

        case 4:                /* 135 Video blocks interleaved with audio */
          memcpy (data + dif_sequence * 150 * 80 + (7 + (dif_block / 15) +
                  dif_block) * 80, p, 480);
          break;

        default:               /* we can't handle any other data */
          break;
      }
      dv1394src->bytes_in_frame += 480;
    }
  }
Пример #13
0
static gboolean
register_plugin (GstPlugin * plugin, const gchar * vendor,
    const gchar * filename)
{
  GModule *module;
  GstFrei0rPluginRegisterReturn ret = GST_FREI0R_PLUGIN_REGISTER_RETURN_FAILED;
  GstFrei0rFuncTable ftable = { NULL, };
  gint i;
  f0r_plugin_info_t info = { NULL, };
  f0r_instance_t *instance = NULL;

  GST_DEBUG ("Registering plugin '%s'", filename);

  module = g_module_open (filename, G_MODULE_BIND_LAZY | G_MODULE_BIND_LOCAL);
  if (!module) {
    GST_WARNING ("Failed to load plugin");
    return FALSE;
  }

  if (!g_module_symbol (module, "f0r_init", (gpointer *) & ftable.init)) {
    GST_INFO ("No frei0r plugin");
    g_module_close (module);
    return FALSE;
  }

  if (!g_module_symbol (module, "f0r_deinit", (gpointer *) & ftable.deinit) ||
      !g_module_symbol (module, "f0r_construct",
          (gpointer *) & ftable.construct)
      || !g_module_symbol (module, "f0r_destruct",
          (gpointer *) & ftable.destruct)
      || !g_module_symbol (module, "f0r_get_plugin_info",
          (gpointer *) & ftable.get_plugin_info)
      || !g_module_symbol (module, "f0r_get_param_info",
          (gpointer *) & ftable.get_param_info)
      || !g_module_symbol (module, "f0r_set_param_value",
          (gpointer *) & ftable.set_param_value)
      || !g_module_symbol (module, "f0r_get_param_value",
          (gpointer *) & ftable.get_param_value))
    goto invalid_frei0r_plugin;

  /* One of these must exist */
  g_module_symbol (module, "f0r_update", (gpointer *) & ftable.update);
  g_module_symbol (module, "f0r_update2", (gpointer *) & ftable.update2);

  if (!ftable.init ()) {
    GST_WARNING ("Failed to initialize plugin");
    g_module_close (module);
    return FALSE;
  }

  if (!ftable.update && !ftable.update2)
    goto invalid_frei0r_plugin;

  ftable.get_plugin_info (&info);

  if (info.frei0r_version > 1) {
    GST_WARNING ("Unsupported frei0r version %d", info.frei0r_version);
    ftable.deinit ();
    g_module_close (module);
    return FALSE;
  }

  if (info.color_model > F0R_COLOR_MODEL_PACKED32) {
    GST_WARNING ("Unsupported color model %d", info.color_model);
    ftable.deinit ();
    g_module_close (module);
    return FALSE;
  }

  for (i = 0; i < info.num_params; i++) {
    f0r_param_info_t pinfo = { NULL, };

    ftable.get_param_info (&pinfo, i);
    if (pinfo.type > F0R_PARAM_STRING) {
      GST_WARNING ("Unsupported parameter type %d", pinfo.type);
      ftable.deinit ();
      g_module_close (module);
      return FALSE;
    }
  }

  instance = ftable.construct (640, 480);
  if (!instance) {
    GST_WARNING ("Failed to instanciate plugin '%s'", info.name);
    ftable.deinit ();
    g_module_close (module);
    return FALSE;
  }
  ftable.destruct (instance);

  switch (info.plugin_type) {
    case F0R_PLUGIN_TYPE_FILTER:
      ret = gst_frei0r_filter_register (plugin, vendor, &info, &ftable);
      break;
    case F0R_PLUGIN_TYPE_SOURCE:
      ret = gst_frei0r_src_register (plugin, vendor, &info, &ftable);
      break;
    case F0R_PLUGIN_TYPE_MIXER2:
    case F0R_PLUGIN_TYPE_MIXER3:
      ret = gst_frei0r_mixer_register (plugin, vendor, &info, &ftable);
      break;
    default:
      break;
  }

  switch (ret) {
    case GST_FREI0R_PLUGIN_REGISTER_RETURN_OK:
      return TRUE;
    case GST_FREI0R_PLUGIN_REGISTER_RETURN_FAILED:
      GST_ERROR ("Failed to register frei0r plugin");
      ftable.deinit ();
      g_module_close (module);
      return FALSE;
    case GST_FREI0R_PLUGIN_REGISTER_RETURN_ALREADY_REGISTERED:
      GST_DEBUG ("frei0r plugin already registered");
      ftable.deinit ();
      g_module_close (module);
      return TRUE;
    default:
      g_return_val_if_reached (FALSE);
  }

  g_return_val_if_reached (FALSE);

invalid_frei0r_plugin:
  GST_ERROR ("Invalid frei0r plugin");
  ftable.deinit ();
  g_module_close (module);

  return FALSE;
}
Пример #14
0
static void thread_pool_func (gpointer data, gpointer user_data)
{
    HTTPServer *http_server = (HTTPServer *)user_data;
    RequestData **request_data_pointer = data;
    RequestData *request_data = *request_data_pointer;
    gint ret;
    GstClockTime cb_ret;

    GST_DEBUG ("EVENT %d, status %d, sock %d", request_data->events, request_data->status, request_data->sock);
    g_mutex_lock (&(request_data->events_mutex));
    if (request_data->events & (EPOLLHUP | EPOLLERR)) {
        request_data->status = HTTP_FINISH;
        request_data->events = 0;

    } else if (request_data->events & EPOLLOUT) {
        if ((request_data->status == HTTP_IDLE) || (request_data->status == HTTP_BLOCK)) {
            request_data->status = HTTP_CONTINUE;
        }
        request_data->events ^= EPOLLOUT;

    } else if (request_data->events & EPOLLIN) {
        if ((request_data->status == HTTP_IDLE) || (request_data->status == HTTP_BLOCK)) {
            /* in normal play status */
            ret = read_request (request_data);
            if (ret < 0) {
                request_data->status = HTTP_FINISH;

            } else {
                GST_DEBUG ("Unexpected request arrived, ignore.");
                request_data->status = HTTP_CONTINUE;
            }
        } 
        /* HTTP_REQUEST status */
        request_data->events ^= EPOLLIN;

    } else if ((request_data->status == HTTP_IDLE) || (request_data->status == HTTP_BLOCK)) {
        /* no event, popup from idle queue or block queue */
        request_data->status = HTTP_CONTINUE;

    } else {
        GST_WARNING ("warning!!! unprocessed event, sock %d status %d events %d", request_data->sock, request_data->status, request_data->events);
    }
    g_mutex_unlock (&(request_data->events_mutex));

    if (request_data->status == HTTP_REQUEST) {
        ret = read_request (request_data);
        if (ret < 0) {
            request_data_release (http_server, request_data_pointer);
            return;
        } 

        ret = parse_request (request_data);
        if (ret == 0) {
            /* parse complete, call back user function */
            request_data->events ^= EPOLLIN;
            invoke_user_callback (http_server, request_data_pointer);

        } else if (ret == 1) {
            /* need read more data */
            g_mutex_lock (&(http_server->block_queue_mutex));
            g_queue_push_head (http_server->block_queue, request_data_pointer);
            g_mutex_unlock (&(http_server->block_queue_mutex));
            return;

        } else if (ret == 2) {
            /* Not Implemented */
            GST_WARNING ("Not Implemented, return is %d, sock is %d", ret, request_data->sock);
            gchar *buf = g_strdup_printf (http_501, PACKAGE_NAME, PACKAGE_VERSION);
            if (httpserver_write (request_data->sock, buf, strlen (buf)) != strlen (buf)) {
                GST_ERROR ("write sock %d error.", request_data->sock);
            }
            g_free (buf);
            request_data_release (http_server, request_data_pointer);

        } else {
            /* Bad Request */
            GST_WARNING ("Bad request, return is %d, sock is %d", ret, request_data->sock);
            gchar *buf = g_strdup_printf (http_400, PACKAGE_NAME, PACKAGE_VERSION);
            if (httpserver_write (request_data->sock, buf, strlen (buf)) != strlen (buf)) {
                GST_ERROR ("write sock %d error.", request_data->sock);
            }
            g_free (buf);
            request_data_release (http_server, request_data_pointer);
        }

    } else if (request_data->status == HTTP_CONTINUE) {
        invoke_user_callback (http_server, request_data_pointer);

    } else if (request_data->status == HTTP_FINISH) { // FIXME: how about if have continue request in idle queue??
        cb_ret = http_server->user_callback (request_data, http_server->user_data);
        GST_DEBUG ("request finish %d callback return %lu, send %lu", request_data->sock, cb_ret, request_data->bytes_send);
        if (cb_ret == 0) {
            g_mutex_lock (&(http_server->idle_queue_mutex));
            g_tree_remove (http_server->idle_queue, &(request_data->wakeup_time));
            g_mutex_unlock (&(http_server->idle_queue_mutex));
            request_data_release (http_server, request_data_pointer);
        }
    }
}
static GstFlowReturn
gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
  GstUDPSrc *udpsrc;
  GstBuffer *outbuf = NULL;
  GSocketAddress *saddr = NULL;
  gint flags = G_SOCKET_MSG_NONE;
  gboolean try_again;
  GError *err = NULL;
  gssize res;
  gsize offset;

  udpsrc = GST_UDPSRC_CAST (psrc);

  if (!gst_udpsrc_ensure_mem (udpsrc))
    goto memory_alloc_error;

retry:

  do {
    gint64 timeout;

    try_again = FALSE;

    if (udpsrc->timeout)
      timeout = udpsrc->timeout / 1000;
    else
      timeout = -1;

    GST_LOG_OBJECT (udpsrc, "doing select, timeout %" G_GINT64_FORMAT, timeout);

    if (!g_socket_condition_timed_wait (udpsrc->used_socket, G_IO_IN | G_IO_PRI,
            timeout, udpsrc->cancellable, &err)) {
      if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY)
          || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
        goto stopped;
      } else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_TIMED_OUT)) {
        g_clear_error (&err);
        /* timeout, post element message */
        gst_element_post_message (GST_ELEMENT_CAST (udpsrc),
            gst_message_new_element (GST_OBJECT_CAST (udpsrc),
                gst_structure_new ("GstUDPSrcTimeout",
                    "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL)));
      } else {
        goto select_error;
      }

      try_again = TRUE;
    }
  } while (G_UNLIKELY (try_again));

  if (saddr != NULL) {
    g_object_unref (saddr);
    saddr = NULL;
  }

  res =
      g_socket_receive_message (udpsrc->used_socket, &saddr, udpsrc->vec, 2,
      NULL, NULL, &flags, udpsrc->cancellable, &err);

  if (G_UNLIKELY (res < 0)) {
    /* EHOSTUNREACH for a UDP socket means that a packet sent with udpsink
     * generated a "port unreachable" ICMP response. We ignore that and try
     * again. */
    if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE)) {
      g_clear_error (&err);
      goto retry;
    }
    goto receive_error;
  }

  /* remember maximum packet size */
  if (res > udpsrc->max_size)
    udpsrc->max_size = res;

  outbuf = gst_buffer_new ();

  /* append first memory chunk to buffer */
  gst_buffer_append_memory (outbuf, udpsrc->mem);

  /* if the packet didn't fit into the first chunk, add second one as well */
  if (res > udpsrc->map.size) {
    gst_buffer_append_memory (outbuf, udpsrc->mem_max);
    gst_memory_unmap (udpsrc->mem_max, &udpsrc->map_max);
    udpsrc->vec[1].buffer = NULL;
    udpsrc->vec[1].size = 0;
    udpsrc->mem_max = NULL;
  }

  /* make sure we allocate a new chunk next time (we do this only here because
   * we look at map.size to see if the second memory chunk is needed above) */
  gst_memory_unmap (udpsrc->mem, &udpsrc->map);
  udpsrc->vec[0].buffer = NULL;
  udpsrc->vec[0].size = 0;
  udpsrc->mem = NULL;

  offset = udpsrc->skip_first_bytes;

  if (G_UNLIKELY (offset > 0 && res < offset))
    goto skip_error;

  gst_buffer_resize (outbuf, offset, res - offset);

  /* use buffer metadata so receivers can also track the address */
  if (saddr) {
    gst_buffer_add_net_address_meta (outbuf, saddr);
    g_object_unref (saddr);
    saddr = NULL;
  }

  GST_LOG_OBJECT (udpsrc, "read packet of %d bytes", (int) res);

  *buf = GST_BUFFER_CAST (outbuf);

  return GST_FLOW_OK;

  /* ERRORS */
memory_alloc_error:
  {
    GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
        ("Failed to allocate or map memory"));
    return GST_FLOW_ERROR;
  }
select_error:
  {
    GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
        ("select error: %s", err->message));
    g_clear_error (&err);
    return GST_FLOW_ERROR;
  }
stopped:
  {
    GST_DEBUG ("stop called");
    g_clear_error (&err);
    return GST_FLOW_FLUSHING;
  }
receive_error:
  {
    if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) ||
        g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
      g_clear_error (&err);
      return GST_FLOW_FLUSHING;
    } else {
      GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
          ("receive error %" G_GSSIZE_FORMAT ": %s", res, err->message));
      g_clear_error (&err);
      return GST_FLOW_ERROR;
    }
  }
skip_error:
  {
    gst_buffer_unref (outbuf);

    GST_ELEMENT_ERROR (udpsrc, STREAM, DECODE, (NULL),
        ("UDP buffer to small to skip header"));
    return GST_FLOW_ERROR;
  }
}
static void
window_resize (GstGLWindowDispmanxEGL * window_egl, guint width, guint height,
    gboolean visible)
{
  GST_DEBUG ("resizing %s window from %ux%u to %ux%u",
      visible ? "visible" : "invisible", window_egl->native.width,
      window_egl->native.height, width, height);

  if (window_egl->display) {
    VC_RECT_T dst_rect;
    VC_RECT_T src_rect;
    GstVideoRectangle src, dst, res;
    DISPMANX_UPDATE_HANDLE_T dispman_update;
    uint32_t opacity = visible ? 255 : 0;
    VC_DISPMANX_ALPHA_T alpha =
        { DISPMANX_FLAGS_ALPHA_FIXED_ALL_PIXELS, opacity, 0 };

    /* Center width*height frame inside dp_width*dp_height */
    src.w = width;
    src.h = height;
    src.x = src.y = 0;
    dst.w = window_egl->dp_width;
    dst.h = window_egl->dp_height;
    dst.x = dst.y = 0;
    gst_video_sink_center_rect (src, dst, &res, FALSE);

    dst_rect.x = res.x;
    dst_rect.y = res.y;
    dst_rect.width = res.w;
    dst_rect.height = res.h;

    src_rect.x = 0;
    src_rect.y = 0;
    src_rect.width = width << 16;
    src_rect.height = height << 16;

    dispman_update = vc_dispmanx_update_start (0);

    if (window_egl->native.element) {
      uint32_t change_flags =
          ELEMENT_CHANGE_OPACITY | ELEMENT_CHANGE_DEST_RECT |
          ELEMENT_CHANGE_SRC_RECT;
      vc_dispmanx_element_change_attributes (dispman_update,
          window_egl->native.element, change_flags, 0, opacity, &dst_rect,
          &src_rect, 0, 0);
    } else {
      window_egl->native.element = vc_dispmanx_element_add (dispman_update,
          window_egl->display, 0, &dst_rect, 0, &src_rect,
          DISPMANX_PROTECTION_NONE, &alpha, 0, 0);
    }

    vc_dispmanx_update_submit_sync (dispman_update);

    if (GST_GL_WINDOW (window_egl)->resize)
      GST_GL_WINDOW (window_egl)->resize (GST_GL_WINDOW (window_egl)->
          resize_data, width, height);
  }

  window_egl->native.width = width;
  window_egl->native.height = height;
}
static void
gst_udpsrc_set_property (GObject * object, guint prop_id, const GValue * value,
    GParamSpec * pspec)
{
  GstUDPSrc *udpsrc = GST_UDPSRC (object);

  switch (prop_id) {
    case PROP_BUFFER_SIZE:
      udpsrc->buffer_size = g_value_get_int (value);
      break;
    case PROP_PORT:
      udpsrc->port = g_value_get_int (value);
      g_free (udpsrc->uri);
      udpsrc->uri =
          g_strdup_printf ("udp://%s:%u", udpsrc->address, udpsrc->port);
      break;
    case PROP_MULTICAST_GROUP:
    case PROP_ADDRESS:
    {
      const gchar *group;

      g_free (udpsrc->address);
      if ((group = g_value_get_string (value)))
        udpsrc->address = g_strdup (group);
      else
        udpsrc->address = g_strdup (UDP_DEFAULT_MULTICAST_GROUP);

      g_free (udpsrc->uri);
      udpsrc->uri =
          g_strdup_printf ("udp://%s:%u", udpsrc->address, udpsrc->port);
      break;
    }
    case PROP_MULTICAST_IFACE:
      g_free (udpsrc->multi_iface);

      if (g_value_get_string (value) == NULL)
        udpsrc->multi_iface = g_strdup (UDP_DEFAULT_MULTICAST_IFACE);
      else
        udpsrc->multi_iface = g_value_dup_string (value);
      break;
    case PROP_URI:
      gst_udpsrc_set_uri (udpsrc, g_value_get_string (value), NULL);
      break;
    case PROP_CAPS:
    {
      const GstCaps *new_caps_val = gst_value_get_caps (value);
      GstCaps *new_caps;
      GstCaps *old_caps;

      if (new_caps_val == NULL) {
        new_caps = gst_caps_new_any ();
      } else {
        new_caps = gst_caps_copy (new_caps_val);
      }

      GST_OBJECT_LOCK (udpsrc);
      old_caps = udpsrc->caps;
      udpsrc->caps = new_caps;
      GST_OBJECT_UNLOCK (udpsrc);
      if (old_caps)
        gst_caps_unref (old_caps);

      gst_pad_mark_reconfigure (GST_BASE_SRC_PAD (udpsrc));
      break;
    }
    case PROP_SOCKET:
      if (udpsrc->socket != NULL && udpsrc->socket != udpsrc->used_socket &&
          udpsrc->close_socket) {
        GError *err = NULL;

        if (!g_socket_close (udpsrc->socket, &err)) {
          GST_ERROR ("failed to close socket %p: %s", udpsrc->socket,
              err->message);
          g_clear_error (&err);
        }
      }
      if (udpsrc->socket)
        g_object_unref (udpsrc->socket);
      udpsrc->socket = g_value_dup_object (value);
      GST_DEBUG ("setting socket to %p", udpsrc->socket);
      break;
    case PROP_TIMEOUT:
      udpsrc->timeout = g_value_get_uint64 (value);
      break;
    case PROP_SKIP_FIRST_BYTES:
      udpsrc->skip_first_bytes = g_value_get_int (value);
      break;
    case PROP_CLOSE_SOCKET:
      udpsrc->close_socket = g_value_get_boolean (value);
      break;
    case PROP_AUTO_MULTICAST:
      udpsrc->auto_multicast = g_value_get_boolean (value);
      break;
    case PROP_REUSE:
      udpsrc->reuse = g_value_get_boolean (value);
      break;
    default:
      break;
  }
}
Пример #18
0
static gboolean
bus_watch (GstBus *bus, GstMessage *message, gpointer user_data)
{
  struct SimpleMsnConference *dat = user_data;

  switch (GST_MESSAGE_TYPE (message))
  {
    case GST_MESSAGE_ELEMENT:
      {
        const GstStructure *s = gst_message_get_structure (message);
        ts_fail_if (s==NULL, "NULL structure in element message");
        if (gst_structure_has_name (s, "farstream-error"))
        {
          const GValue *value;
          FsError errorno;
          const gchar *error;

          ts_fail_unless (
              gst_implements_interface_check (GST_MESSAGE_SRC (message),
                  FS_TYPE_CONFERENCE),
              "Received farstream-error from non-farstream element");

          ts_fail_unless (
              gst_structure_has_field_typed (s, "src-object", G_TYPE_OBJECT),
              "farstream-error structure has no src-object field");
          ts_fail_unless (
              gst_structure_has_field_typed (s, "error-no", FS_TYPE_ERROR),
              "farstream-error structure has no src-object field");
          ts_fail_unless (
              gst_structure_has_field_typed (s, "error-msg", G_TYPE_STRING),
              "farstream-error structure has no src-object field");

          value = gst_structure_get_value (s, "error-no");
          errorno = g_value_get_enum (value);
          error = gst_structure_get_string (s, "error-msg");

          ts_fail ("Error on BUS (%d) %s", errorno, error);
        }
        else if (gst_structure_has_name (s, "farstream-new-local-candidate"))
        {
          FsStream *stream;
          FsCandidate *candidate;
          const GValue *value;

          ts_fail_unless (
              gst_implements_interface_check (GST_MESSAGE_SRC (message),
                  FS_TYPE_CONFERENCE),
              "Received farstream-error from non-farstream element");

          ts_fail_unless (
              gst_structure_has_field_typed (s, "stream", FS_TYPE_STREAM),
              "farstream-new-local-candidate structure has no stream field");
          ts_fail_unless (
              gst_structure_has_field_typed (s, "candidate", FS_TYPE_CANDIDATE),
              "farstream-new-local-candidate structure has no candidate field");

          value = gst_structure_get_value (s, "stream");
          stream = g_value_get_object (value);

          value = gst_structure_get_value (s, "candidate");
          candidate = g_value_get_boxed (value);

          ts_fail_unless (stream && candidate, "new-local-candidate with NULL"
              " stream(%p) or candidate(%p)", stream, candidate);

          if (dat->target)
          {
            GError *error = NULL;
            GList *list = g_list_append (NULL, candidate);
            gboolean add_remote_candidates_res;

            GST_DEBUG ("Setting candidate: %s %d",
                candidate->ip, candidate->port);
            add_remote_candidates_res = fs_stream_add_remote_candidates (
                dat->target->stream, list, &error);
            ts_fail_unless (add_remote_candidates_res,
                "Could not set remote candidate: %s",
                error ? error->message : "No GError");
            ts_fail_unless (error == NULL);
            g_list_free (list);
          }
        }
      }
      break;
    case GST_MESSAGE_ERROR:
      {
        GError *error = NULL;
        gchar *debug = NULL;
        gst_message_parse_error (message, &error, &debug);

        ts_fail ("Got an error on the BUS (%d): %s (%s)", error->code,
            error->message, debug);
        g_error_free (error);
        g_free (debug);
      }
      break;
    case GST_MESSAGE_WARNING:
      {
        GError *error = NULL;
        gchar *debug = NULL;
        gst_message_parse_warning (message, &error, &debug);

        GST_DEBUG ("%d: Got a warning on the BUS: %s (%s)",
            error->code,
            error->message, debug);
        g_error_free (error);
        g_free (debug);
      }
      break;
    default:
      break;
  }

  return TRUE;
}
Пример #19
0
static gboolean
gst_ffmpegvidenc_set_format (GstVideoEncoder * encoder,
    GstVideoCodecState * state)
{
  GstCaps *other_caps;
  GstCaps *allowed_caps;
  GstCaps *icaps;
  GstVideoCodecState *output_format;
  enum PixelFormat pix_fmt;
  GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;
  GstFFMpegVidEncClass *oclass =
      (GstFFMpegVidEncClass *) G_OBJECT_GET_CLASS (ffmpegenc);

  /* close old session */
  if (ffmpegenc->opened) {
    gst_ffmpeg_avcodec_close (ffmpegenc->context);
    ffmpegenc->opened = FALSE;
    if (avcodec_get_context_defaults3 (ffmpegenc->context,
            oclass->in_plugin) < 0) {
      GST_DEBUG_OBJECT (ffmpegenc, "Failed to set context defaults");
      return FALSE;
    }
  }

  /* if we set it in _getcaps we should set it also in _link */
  ffmpegenc->context->strict_std_compliance = -1;

  /* user defined properties */
  ffmpegenc->context->bit_rate = ffmpegenc->bitrate;
  ffmpegenc->context->bit_rate_tolerance = ffmpegenc->bitrate;
  ffmpegenc->context->gop_size = ffmpegenc->gop_size;
  ffmpegenc->context->me_method = ffmpegenc->me_method;
  GST_DEBUG_OBJECT (ffmpegenc, "Setting avcontext to bitrate %d, gop_size %d",
      ffmpegenc->bitrate, ffmpegenc->gop_size);

  /* RTP payload used for GOB production (for Asterisk) */
  if (ffmpegenc->rtp_payload_size) {
    ffmpegenc->context->rtp_payload_size = ffmpegenc->rtp_payload_size;
  }

  /* additional avcodec settings */
  /* first fill in the majority by copying over */
  gst_ffmpeg_cfg_fill_context (ffmpegenc, ffmpegenc->context);

  /* then handle some special cases */
  ffmpegenc->context->lmin = (ffmpegenc->lmin * FF_QP2LAMBDA + 0.5);
  ffmpegenc->context->lmax = (ffmpegenc->lmax * FF_QP2LAMBDA + 0.5);

  if (ffmpegenc->interlaced) {
    ffmpegenc->context->flags |=
        CODEC_FLAG_INTERLACED_DCT | CODEC_FLAG_INTERLACED_ME;
    ffmpegenc->picture->interlaced_frame = TRUE;
    /* if this is not the case, a filter element should be used to swap fields */
    ffmpegenc->picture->top_field_first = TRUE;
  }

  /* some other defaults */
  ffmpegenc->context->rc_strategy = 2;
  ffmpegenc->context->b_frame_strategy = 0;
  ffmpegenc->context->coder_type = 0;
  ffmpegenc->context->context_model = 0;
  ffmpegenc->context->scenechange_threshold = 0;
  ffmpegenc->context->inter_threshold = 0;

  /* and last but not least the pass; CBR, 2-pass, etc */
  ffmpegenc->context->flags |= ffmpegenc->pass;
  switch (ffmpegenc->pass) {
      /* some additional action depends on type of pass */
    case CODEC_FLAG_QSCALE:
      ffmpegenc->context->global_quality
          = ffmpegenc->picture->quality = FF_QP2LAMBDA * ffmpegenc->quantizer;
      break;
    case CODEC_FLAG_PASS1:     /* need to prepare a stats file */
      /* we don't close when changing caps, fingers crossed */
      if (!ffmpegenc->file)
        ffmpegenc->file = g_fopen (ffmpegenc->filename, "w");
      if (!ffmpegenc->file)
        goto open_file_err;
      break;
    case CODEC_FLAG_PASS2:
    {                           /* need to read the whole stats file ! */
      gsize size;

      if (!g_file_get_contents (ffmpegenc->filename,
              &ffmpegenc->context->stats_in, &size, NULL))
        goto file_read_err;

      break;
    }
    default:
      break;
  }

  GST_DEBUG_OBJECT (ffmpegenc, "Extracting common video information");
  /* fetch pix_fmt, fps, par, width, height... */
  gst_ffmpeg_videoinfo_to_context (&state->info, ffmpegenc->context);

  if ((oclass->in_plugin->id == AV_CODEC_ID_MPEG4)
      && (ffmpegenc->context->time_base.den > 65535)) {
    /* MPEG4 Standards do not support time_base denominator greater than
     * (1<<16) - 1 . We therefore scale them down.
     * Agreed, it will not be the exact framerate... but the difference
     * shouldn't be that noticeable */
    ffmpegenc->context->time_base.num =
        (gint) gst_util_uint64_scale_int (ffmpegenc->context->time_base.num,
        65535, ffmpegenc->context->time_base.den);
    ffmpegenc->context->time_base.den = 65535;
    GST_LOG_OBJECT (ffmpegenc, "MPEG4 : scaled down framerate to %d / %d",
        ffmpegenc->context->time_base.den, ffmpegenc->context->time_base.num);
  }

  pix_fmt = ffmpegenc->context->pix_fmt;

  /* max-key-interval may need the framerate set above */
  if (ffmpegenc->max_key_interval) {
    AVCodecContext *ctx;

    /* override gop-size */
    ctx = ffmpegenc->context;
    ctx->gop_size = (ffmpegenc->max_key_interval < 0) ?
        (-ffmpegenc->max_key_interval
        * (ctx->time_base.den * ctx->ticks_per_frame / ctx->time_base.num))
        : ffmpegenc->max_key_interval;
  }

  /* open codec */
  if (gst_ffmpeg_avcodec_open (ffmpegenc->context, oclass->in_plugin) < 0)
    goto open_codec_fail;

  /* second pass stats buffer no longer needed */
  if (ffmpegenc->context->stats_in)
    g_free (ffmpegenc->context->stats_in);

  /* is the colourspace correct? */
  if (pix_fmt != ffmpegenc->context->pix_fmt)
    goto pix_fmt_err;

  /* we may have failed mapping caps to a pixfmt,
   * and quite some codecs do not make up their own mind about that
   * in any case, _NONE can never work out later on */
  if (pix_fmt == PIX_FMT_NONE)
    goto bad_input_fmt;

  /* some codecs support more than one format, first auto-choose one */
  GST_DEBUG_OBJECT (ffmpegenc, "picking an output format ...");
  allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
  if (!allowed_caps) {
    GST_DEBUG_OBJECT (ffmpegenc, "... but no peer, using template caps");
    /* we need to copy because get_allowed_caps returns a ref, and
     * get_pad_template_caps doesn't */
    allowed_caps =
        gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
  }
  GST_DEBUG_OBJECT (ffmpegenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
  gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id,
      oclass->in_plugin->type, allowed_caps, ffmpegenc->context);

  /* try to set this caps on the other side */
  other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id,
      ffmpegenc->context, TRUE);

  if (!other_caps) {
    gst_caps_unref (allowed_caps);
    goto unsupported_codec;
  }

  icaps = gst_caps_intersect (allowed_caps, other_caps);
  gst_caps_unref (allowed_caps);
  gst_caps_unref (other_caps);
  if (gst_caps_is_empty (icaps)) {
    gst_caps_unref (icaps);
    return FALSE;
  }
  icaps = gst_caps_truncate (icaps);

  /* Store input state and set output state */
  if (ffmpegenc->input_state)
    gst_video_codec_state_unref (ffmpegenc->input_state);
  ffmpegenc->input_state = gst_video_codec_state_ref (state);

  output_format = gst_video_encoder_set_output_state (encoder, icaps, state);
  gst_video_codec_state_unref (output_format);

  /* success! */
  ffmpegenc->opened = TRUE;

  return TRUE;

  /* ERRORS */
open_file_err:
  {
    GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, OPEN_WRITE,
        (("Could not open file \"%s\" for writing."), ffmpegenc->filename),
        GST_ERROR_SYSTEM);
    return FALSE;
  }
file_read_err:
  {
    GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, READ,
        (("Could not get contents of file \"%s\"."), ffmpegenc->filename),
        GST_ERROR_SYSTEM);
    return FALSE;
  }

open_codec_fail:
  {
    gst_ffmpeg_avcodec_close (ffmpegenc->context);
    if (avcodec_get_context_defaults3 (ffmpegenc->context,
            oclass->in_plugin) < 0)
      GST_DEBUG_OBJECT (ffmpegenc, "Failed to set context defaults");
    if (ffmpegenc->context->stats_in)
      g_free (ffmpegenc->context->stats_in);
    GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: Failed to open libav codec",
        oclass->in_plugin->name);
    return FALSE;
  }

pix_fmt_err:
  {
    gst_ffmpeg_avcodec_close (ffmpegenc->context);
    if (avcodec_get_context_defaults3 (ffmpegenc->context,
            oclass->in_plugin) < 0)
      GST_DEBUG_OBJECT (ffmpegenc, "Failed to set context defaults");
    GST_DEBUG_OBJECT (ffmpegenc,
        "avenc_%s: AV wants different colourspace (%d given, %d wanted)",
        oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt);
    return FALSE;
  }

bad_input_fmt:
  {
    GST_DEBUG_OBJECT (ffmpegenc, "avenc_%s: Failed to determine input format",
        oclass->in_plugin->name);
    return FALSE;
  }

unsupported_codec:
  {
    gst_ffmpeg_avcodec_close (ffmpegenc->context);
    if (avcodec_get_context_defaults3 (ffmpegenc->context,
            oclass->in_plugin) < 0)
      GST_DEBUG_OBJECT (ffmpegenc, "Failed to set context defaults");
    GST_DEBUG ("Unsupported codec - no caps found");
    return FALSE;
  }
}
Пример #20
0
/**
 * tsmux_stream_get_es_descrs:
 * @stream: a #TsMuxStream
 * @buf: a buffer to hold the ES descriptor
 * @len: the length used in @buf
 *
 * Write an Elementary Stream Descriptor for @stream into @buf. the number of
 * bytes consumed in @buf will be updated in @len.
 *
 * @buf and @len must be at least #TSMUX_MIN_ES_DESC_LEN.
 */
void
tsmux_stream_get_es_descrs (TsMuxStream * stream,
                            GstMpegtsPMTStream * pmt_stream)
{
    GstMpegtsDescriptor *descriptor;

    g_return_if_fail (stream != NULL);
    g_return_if_fail (pmt_stream != NULL);

    /* Based on the stream type, write out any descriptors to go in the
     * PMT ES_info field */
    /* tag (registration_descriptor), length, format_identifier */
    switch (stream->stream_type) {
    case TSMUX_ST_AUDIO_AAC:
        /* FIXME */
        break;
    case TSMUX_ST_VIDEO_MPEG4:
        /* FIXME */
        break;
    case TSMUX_ST_VIDEO_H264:
    {
        /* FIXME : Not sure about this additional_identification_info */
        guint8 add_info[] = { 0xFF, 0x1B, 0x44, 0x3F };

        descriptor = gst_mpegts_descriptor_from_registration ("HDMV",
                     add_info, 4);

        g_ptr_array_add (pmt_stream->descriptors, descriptor);
        break;
    }
    case TSMUX_ST_VIDEO_DIRAC:
        descriptor = gst_mpegts_descriptor_from_registration ("drac", NULL, 0);
        g_ptr_array_add (pmt_stream->descriptors, descriptor);
        break;
    case TSMUX_ST_PS_AUDIO_AC3:
    {
        guint8 add_info[6];
        guint8 *pos;

        pos = add_info;

        /* audio_stream_descriptor () | ATSC A/52-2001 Annex A
         *
         * descriptor_tag       8 uimsbf
         * descriptor_length    8 uimsbf
         * sample_rate_code     3 bslbf
         * bsid                 5 bslbf
         * bit_rate_code        6 bslbf
         * surround_mode        2 bslbf
         * bsmod                3 bslbf
         * num_channels         4 bslbf
         * full_svc             1 bslbf
         * langcod              8 bslbf
         * [...]
         */
        *pos++ = 0x81;
        *pos++ = 0x04;

        /* 3 bits sample_rate_code, 5 bits hardcoded bsid (default ver 8) */
        switch (stream->audio_sampling) {
        case 48000:
            *pos++ = 0x08;
            break;
        case 44100:
            *pos++ = 0x28;
            break;
        case 32000:
            *pos++ = 0x48;
            break;
        default:
            *pos++ = 0xE8;
            break;                /* 48, 44.1 or 32 Khz */
        }

        /* 1 bit bit_rate_limit, 5 bits bit_rate_code, 2 bits suround_mode */
        switch (stream->audio_bitrate) {
        case 32:
            *pos++ = 0x00 << 2;
            break;
        case 40:
            *pos++ = 0x01 << 2;
            break;
        case 48:
            *pos++ = 0x02 << 2;
            break;
        case 56:
            *pos++ = 0x03 << 2;
            break;
        case 64:
            *pos++ = 0x04 << 2;
            break;
        case 80:
            *pos++ = 0x05 << 2;
            break;
        case 96:
            *pos++ = 0x06 << 2;
            break;
        case 112:
            *pos++ = 0x07 << 2;
            break;
        case 128:
            *pos++ = 0x08 << 2;
            break;
        case 160:
            *pos++ = 0x09 << 2;
            break;
        case 192:
            *pos++ = 0x0A << 2;
            break;
        case 224:
            *pos++ = 0x0B << 2;
            break;
        case 256:
            *pos++ = 0x0C << 2;
            break;
        case 320:
            *pos++ = 0x0D << 2;
            break;
        case 384:
            *pos++ = 0x0E << 2;
            break;
        case 448:
            *pos++ = 0x0F << 2;
            break;
        case 512:
            *pos++ = 0x10 << 2;
            break;
        case 576:
            *pos++ = 0x11 << 2;
            break;
        case 640:
            *pos++ = 0x12 << 2;
            break;
        default:
            *pos++ = 0x32 << 2;
            break;                /* 640 Kb/s upper limit */
        }

        /* 3 bits bsmod, 4 bits num_channels, 1 bit full_svc */
        switch (stream->audio_channels) {
        case 1:
            *pos++ = 0x01 << 1;
            break;                /* 1/0 */
        case 2:
            *pos++ = 0x02 << 1;
            break;                /* 2/0 */
        case 3:
            *pos++ = 0x0A << 1;
            break;                /* <= 3 */
        case 4:
            *pos++ = 0x0B << 1;
            break;                /* <= 4 */
        case 5:
            *pos++ = 0x0C << 1;
            break;                /* <= 5 */
        case 6:
        default:
            *pos++ = 0x0D << 1;
            break;                /* <= 6 */
        }

        *pos++ = 0x00;

        descriptor = gst_mpegts_descriptor_from_registration ("AC-3",
                     add_info, 6);

        g_ptr_array_add (pmt_stream->descriptors, descriptor);

        break;
    }
    case TSMUX_ST_PS_AUDIO_DTS:
        /* FIXME */
        break;
    case TSMUX_ST_PS_AUDIO_LPCM:
        /* FIXME */
        break;
    case TSMUX_ST_PS_TELETEXT:
        /* FIXME empty descriptor for now;
         * should be provided by upstream in event or so ? */
        descriptor =
            gst_mpegts_descriptor_from_custom (GST_MTS_DESC_DVB_TELETEXT, 0, 1);

        g_ptr_array_add (pmt_stream->descriptors, descriptor);
        break;
    case TSMUX_ST_PS_DVB_SUBPICTURE:
    /* falltrough ...
     * that should never happen anyway as
     * dvb subtitles are private data */
    case TSMUX_ST_PRIVATE_DATA:
        if (stream->is_dvb_sub) {
            GST_DEBUG ("Stream language %s", stream->language);
            /* Simple DVB subtitles with no monitor aspect ratio critical
               FIXME, how do we make it settable? */
            /* Default composition page ID */
            /* Default ancillary_page_id */
            descriptor =
                gst_mpegts_descriptor_from_dvb_subtitling (stream->language, 0x10,
                        0x0001, 0x0152);

            g_ptr_array_add (pmt_stream->descriptors, descriptor);
            break;
        }
    default:
        break;
    }
}
Пример #21
0
static gboolean
resample_set_state_from_caps (ResampleState * state, GstCaps * incaps,
    GstCaps * outcaps, gint * channels, gint * inrate, gint * outrate)
{
  GstStructure *structure;
  gboolean ret;
  gint myinrate, myoutrate;
  int mychannels;
  gint width, depth;
  ResampleFormat format;

  GST_DEBUG ("incaps %" GST_PTR_FORMAT ", outcaps %"
      GST_PTR_FORMAT, incaps, outcaps);

  structure = gst_caps_get_structure (incaps, 0);

  /* get width */
  ret = gst_structure_get_int (structure, "width", &width);
  if (!ret)
    goto no_width;

  /* figure out the format */
  if (g_str_equal (gst_structure_get_name (structure), "audio/x-raw-float")) {
    if (width == 32)
      format = RESAMPLE_FORMAT_F32;
    else if (width == 64)
      format = RESAMPLE_FORMAT_F64;
    else
      goto wrong_depth;
  } else {
    /* for int, depth and width must be the same */
    ret = gst_structure_get_int (structure, "depth", &depth);
    if (!ret || width != depth)
      goto not_equal;

    if (width == 16)
      format = RESAMPLE_FORMAT_S16;
    else if (width == 32)
      format = RESAMPLE_FORMAT_S32;
    else
      goto wrong_depth;
  }
  ret = gst_structure_get_int (structure, "rate", &myinrate);
  ret &= gst_structure_get_int (structure, "channels", &mychannels);
  if (!ret)
    goto no_in_rate_channels;

  structure = gst_caps_get_structure (outcaps, 0);
  ret = gst_structure_get_int (structure, "rate", &myoutrate);
  if (!ret)
    goto no_out_rate;

  if (channels)
    *channels = mychannels;
  if (inrate)
    *inrate = myinrate;
  if (outrate)
    *outrate = myoutrate;

  resample_set_format (state, format);
  resample_set_n_channels (state, mychannels);
  resample_set_input_rate (state, myinrate);
  resample_set_output_rate (state, myoutrate);

  return TRUE;

  /* ERRORS */
no_width:
  {
    GST_DEBUG ("failed to get width from caps");
    return FALSE;
  }
not_equal:
  {
    GST_DEBUG ("width %d and depth %d must be the same", width, depth);
    return FALSE;
  }
wrong_depth:
  {
    GST_DEBUG ("unknown depth %d found", depth);
    return FALSE;
  }
no_in_rate_channels:
  {
    GST_DEBUG ("could not get input rate and channels");
    return FALSE;
  }
no_out_rate:
  {
    GST_DEBUG ("could not get output rate");
    return FALSE;
  }
}
Пример #22
0
/*
 * helper_find_peek:
 * @data: helper data struct
 * @off: stream offset
 * @size: block size
 *
 * Get data pointer within a stream. Keeps a cache of read buffers (partly
 * for performance reasons, but mostly because pointers returned by us need
 * to stay valid until typefinding has finished)
 *
 * Returns: address of the data or %NULL if buffer does not cover the
 * requested range.
 */
static const guint8 *
helper_find_peek (gpointer data, gint64 offset, guint size)
{
  GstTypeFindHelper *helper;
  GstBuffer *buffer;
  GstFlowReturn ret;
  GSList *insert_pos = NULL;
  gsize buf_size;
  guint64 buf_offset;
  GstMappedBuffer *bmap;
#if 0
  GstCaps *caps;
#endif

  helper = (GstTypeFindHelper *) data;

  GST_LOG_OBJECT (helper->obj, "'%s' called peek (%" G_GINT64_FORMAT
      ", %u)", GST_OBJECT_NAME (helper->factory), offset, size);

  if (size == 0)
    return NULL;

  if (offset < 0) {
    if (helper->size == -1 || helper->size < -offset)
      return NULL;

    offset += helper->size;
  }

  /* see if we have a matching buffer already in our list */
  if (size > 0 && offset <= helper->last_offset) {
    GSList *walk;

    for (walk = helper->buffers; walk; walk = walk->next) {
      GstMappedBuffer *bmp = (GstMappedBuffer *) walk->data;
      GstBuffer *buf = GST_BUFFER_CAST (bmp->buffer);
      guint64 buf_offset = GST_BUFFER_OFFSET (buf);
      guint buf_size = gst_buffer_get_size (buf);

      /* buffers are kept sorted by end offset (highest first) in the list, so
       * at this point we save the current position and stop searching if 
       * we're after the searched end offset */
      if (buf_offset <= offset) {
        if ((offset + size) < (buf_offset + buf_size)) {
          /* must already have been mapped before */
          return (guint8 *) bmp->map.data + (offset - buf_offset);
        }
      } else if (offset + size >= buf_offset + buf_size) {
        insert_pos = walk;
        break;
      }
    }
  }

  buffer = NULL;
  /* some typefinders go in 1 byte steps over 1k of data and request
   * small buffers. It is really inefficient to pull each time, and pulling
   * a larger chunk is almost free. Trying to pull a larger chunk at the end
   * of the file is also not a problem here, we'll just get a truncated buffer
   * in that case (and we'll have to double-check the size we actually get
   * anyway, see below) */
  ret =
      helper->func (helper->obj, helper->parent, offset, MAX (size, 4096),
      &buffer);

  if (ret != GST_FLOW_OK)
    goto error;

#if 0
  caps = GST_BUFFER_CAPS (buffer);

  if (caps && !gst_caps_is_empty (caps) && !gst_caps_is_any (caps)) {
    GST_DEBUG ("buffer has caps %" GST_PTR_FORMAT ", suggest max probability",
        caps);

    gst_caps_replace (&helper->caps, caps);
    helper->best_probability = GST_TYPE_FIND_MAXIMUM;

    gst_buffer_unref (buffer);
    return NULL;
  }
#endif

  /* getrange might silently return shortened buffers at the end of a file,
   * we must, however, always return either the full requested data or NULL */
  buf_offset = GST_BUFFER_OFFSET (buffer);
  buf_size = gst_buffer_get_size (buffer);

  if ((buf_offset != -1 && buf_offset != offset) || buf_size < size) {
    GST_DEBUG ("dropping short buffer: %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT
        " instead of %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT,
        buf_offset, buf_offset + buf_size - 1, offset, offset + size - 1);
    gst_buffer_unref (buffer);
    return NULL;
  }

  bmap = g_slice_new0 (GstMappedBuffer);
  bmap->buffer = buffer;
  if (insert_pos) {
    helper->buffers = g_slist_insert_before (helper->buffers, insert_pos, bmap);
  } else {
    /* if insert_pos is not set, our offset is bigger than the largest offset
     * we have so far; since we keep the list sorted with highest offsets
     * first, we need to prepend the buffer to the list */
    helper->last_offset = GST_BUFFER_OFFSET (buffer) + buf_size;
    helper->buffers = g_slist_prepend (helper->buffers, bmap);
  }

  gst_buffer_map (buffer, &bmap->map, GST_MAP_READ);

  return bmap->map.data;

error:
  {
    GST_INFO ("typefind function returned: %s", gst_flow_get_name (ret));
    return NULL;
  }
}
Пример #23
0
void
kms_agnostic_bin2_set_property (GObject * object, guint property_id,
    const GValue * value, GParamSpec * pspec)
{
  KmsAgnosticBin2 *self = KMS_AGNOSTIC_BIN2 (object);

  switch (property_id) {
    case PROP_MIN_BITRATE:{
      gint v;

      v = g_value_get_int (value);
      KMS_AGNOSTIC_BIN2_LOCK (self);
      if (v > self->priv->max_bitrate) {
        v = self->priv->max_bitrate;

        GST_WARNING_OBJECT (self,
            "Setting min-bitrate bigger than max-bitrate");
      }

      self->priv->min_bitrate = v;
      GST_DEBUG_OBJECT (self, "min_bitrate configured %d",
          self->priv->min_bitrate);
      kms_agnostic_bin_set_encoders_bitrate (self);
      KMS_AGNOSTIC_BIN2_UNLOCK (self);
      break;
    }
    case PROP_MAX_BITRATE:{
      gint v;

      self->priv->bitrate_unlimited = FALSE;
      v = g_value_get_int (value);
      KMS_AGNOSTIC_BIN2_LOCK (self);
      if (v == 0) {
        self->priv->bitrate_unlimited = TRUE;
        v = MAX_BITRATE_DEFAULT;
      }
      if (v < self->priv->min_bitrate) {
        v = self->priv->min_bitrate;

        GST_WARNING_OBJECT (self, "Setting max-bitrate less than min-bitrate");
      }
      self->priv->max_bitrate = v;
      GST_DEBUG ("max_bitrate configured %d", self->priv->max_bitrate);
      kms_agnostic_bin_set_encoders_bitrate (self);
      KMS_AGNOSTIC_BIN2_UNLOCK (self);
      break;
    }
    case PROP_CODEC_CONFIG:
      KMS_AGNOSTIC_BIN2_LOCK (self);
      if (self->priv->codec_config) {
        gst_structure_free (self->priv->codec_config);
        self->priv->codec_config = NULL;
      }
      self->priv->codec_config = g_value_dup_boxed (value);
      KMS_AGNOSTIC_BIN2_UNLOCK (self);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
      break;
  }
}
Пример #24
0
/* Initialize the tracing system */
void
_priv_gst_tracing_init (void)
{
  gint i = 0;
  const gchar *env = g_getenv ("GST_TRACERS");

  /* We initialize the tracer sub system even if the end
   * user did not activate it through the env variable
   * so that external tools can use it anyway */
  GST_DEBUG ("Initializing GstTracer");
  _priv_tracers = g_hash_table_new (NULL, NULL);

  if (G_N_ELEMENTS (_quark_strings) != GST_TRACER_QUARK_MAX)
    g_warning ("the quark table is not consistent! %d != %d",
        (gint) G_N_ELEMENTS (_quark_strings), GST_TRACER_QUARK_MAX);

  for (i = 0; i < GST_TRACER_QUARK_MAX; i++) {
    _priv_gst_tracer_quark_table[i] =
        g_quark_from_static_string (_quark_strings[i]);
  }

  if (env != NULL && *env != '\0') {
    GstRegistry *registry = gst_registry_get ();
    GstPluginFeature *feature;
    GstTracerFactory *factory;
    gchar **t = g_strsplit_set (env, ";", 0);
    gchar *params;

    GST_INFO ("enabling tracers: '%s'", env);
    i = 0;
    while (t[i]) {
      // check t[i] for params
      if ((params = strchr (t[i], '('))) {
        gchar *end = strchr (&params[1], ')');
        *params = '\0';
        params++;
        if (end)
          *end = '\0';
      } else {
        params = NULL;
      }

      GST_INFO ("checking tracer: '%s'", t[i]);

      if ((feature = gst_registry_lookup_feature (registry, t[i]))) {
        factory = GST_TRACER_FACTORY (gst_plugin_feature_load (feature));
        if (factory) {
          GstTracer *tracer;

          GST_INFO_OBJECT (factory, "creating tracer: type-id=%u",
              (guint) factory->type);

          tracer = g_object_new (factory->type, "params", params, NULL);

          /* Clear floating flag */
          gst_object_ref_sink (tracer);

          /* tracers register them self to the hooks */
          gst_object_unref (tracer);
        } else {
          GST_WARNING_OBJECT (feature,
              "loading plugin containing feature %s failed!", t[i]);
        }
      } else {
        GST_WARNING ("no tracer named '%s'", t[i]);
      }
      i++;
    }
    g_strfreev (t);
  }
}
Пример #25
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  GError *error = NULL;
  GstBus *bus;

  gst_init (&argc, &argv);

  GST_DEBUG_CATEGORY_INIT (appsrc_playbin_debug, "appsrc-playbin", 0,
      "appsrc playbin example");

  if (argc < 2) {
    g_print ("usage: %s <filename>\n", argv[0]);
    return -1;
  }

  /* try to open the file as an mmapped file */
  app->file = g_mapped_file_new (argv[1], FALSE, &error);
  if (error) {
    g_print ("failed to open file: %s\n", error->message);
    g_error_free (error);
    return -2;
  }
  /* get some vitals, this will be used to read data from the mmapped file and
   * feed it to appsrc. */
  app->length = g_mapped_file_get_length (app->file);
  app->data = (guint8 *) g_mapped_file_get_contents (app->file);
  app->offset = 0;

  /* create a mainloop to get messages */
  app->loop = g_main_loop_new (NULL, TRUE);

  app->playbin = gst_element_factory_make ("playbin2", NULL);
  g_assert (app->playbin);

  bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin));

  /* add watch for messages */
  gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);

  /* set to read from appsrc */
  g_object_set (app->playbin, "uri", "appsrc://", NULL);

  /* get notification when the source is created so that we get a handle to it
   * and can configure it */
  g_signal_connect (app->playbin, "deep-notify::source",
      (GCallback) found_source, app);

  /* go to playing and wait in a mainloop. */
  gst_element_set_state (app->playbin, GST_STATE_PLAYING);

  /* this mainloop is stopped when we receive an error or EOS */
  g_main_loop_run (app->loop);

  GST_DEBUG ("stopping");

  gst_element_set_state (app->playbin, GST_STATE_NULL);

  /* free the file */
  g_mapped_file_free (app->file);

  gst_object_unref (bus);
  g_main_loop_unref (app->loop);

  return 0;
}
Пример #26
0
static int
gst_ffmpegdata_write (URLContext * h, unsigned char *buf, int size)
{
  GstProtocolInfo *info;
  GstBuffer *outbuf;

  GST_DEBUG ("Writing %d bytes", size);
  info = (GstProtocolInfo *) h->priv_data;

  g_return_val_if_fail (h->flags != URL_RDONLY, -EIO);

  /*
   * WHISPERCAST BEGIN: set the "streamheader" value on the source pad caps,
   * so we are able to use ffmpegmux with fdsink/multifdsink/etc...
  */
  if (info->set_streamheader) {
    GstCaps *caps;
    GstStructure *structure;
  
    GValue array = { 0 };
    GstBuffer *copy;
    GValue value = { 0 };
  
    caps = gst_pad_get_caps(info->pad);
    caps = gst_caps_make_writable (caps);
  
    structure = gst_caps_get_structure (caps, 0);
  
    /* put buffers in a fixed list */
    g_value_init (&array, GST_TYPE_ARRAY);
  
    g_value_init (&value, GST_TYPE_BUFFER);
    copy = gst_buffer_new_and_alloc (size);
    memcpy (GST_BUFFER_DATA (copy), buf, size);
    gst_value_set_buffer (&value, copy);
    gst_buffer_unref (copy);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
  
    gst_structure_set_value (structure, "streamheader", &array);
    g_value_unset (&array);
  
    gst_pad_set_caps(info->pad, caps);
    gst_caps_unref(caps);
  }
  /* WHISPERCAST END */
  
  /* create buffer and push data further */
  if (gst_pad_alloc_buffer_and_set_caps (info->pad,
          info->offset, size, GST_PAD_CAPS (info->pad), &outbuf) != GST_FLOW_OK)
    return 0;

  /*
   * WHISPERCAST BEGIN: mark the buffer as added to the source pad's caps.
  */
  if (info->set_streamheader) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_IN_CAPS);
    info->set_streamheader = 0;
  }
  /* WHISPERCAST END */
  
  memcpy (GST_BUFFER_DATA (outbuf), buf, size);

  if (gst_pad_push (info->pad, outbuf) != GST_FLOW_OK)
    return 0;

  info->offset += size;
  return size;
}
Пример #27
0
GdkPixbuf *
xplayer_gst_playbin_get_frame (GstElement *play)
{
  GstStructure *s;
  GstSample *sample = NULL;
  GdkPixbuf *pixbuf = NULL;
  GstCaps *to_caps, *sample_caps;
  gint outwidth = 0;
  gint outheight = 0;
  GstMemory *memory;
  GstMapInfo info;
  GdkPixbufRotation rotation = GDK_PIXBUF_ROTATE_NONE;

  g_return_val_if_fail (play != NULL, NULL);
  g_return_val_if_fail (GST_IS_ELEMENT (play), NULL);

  /* our desired output format (RGB24) */
  to_caps = gst_caps_new_simple ("video/x-raw",
      "format", G_TYPE_STRING, "RGB",
      /* Note: we don't ask for a specific width/height here, so that
       * videoscale can adjust dimensions from a non-1/1 pixel aspect
       * ratio to a 1/1 pixel-aspect-ratio. We also don't ask for a
       * specific framerate, because the input framerate won't
       * necessarily match the output framerate if there's a deinterlacer
       * in the pipeline. */
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      NULL);

  /* get frame */
  g_signal_emit_by_name (play, "convert-sample", to_caps, &sample);
  gst_caps_unref (to_caps);

  if (!sample) {
    GST_DEBUG ("Could not take screenshot: %s",
        "failed to retrieve or convert video frame");
    g_warning ("Could not take screenshot: %s",
        "failed to retrieve or convert video frame");
    return NULL;
  }

  sample_caps = gst_sample_get_caps (sample);
  if (!sample_caps) {
    GST_DEBUG ("Could not take screenshot: %s", "no caps on output buffer");
    g_warning ("Could not take screenshot: %s", "no caps on output buffer");
    return NULL;
  }

  GST_DEBUG ("frame caps: %" GST_PTR_FORMAT, sample_caps);

  s = gst_caps_get_structure (sample_caps, 0);
  gst_structure_get_int (s, "width", &outwidth);
  gst_structure_get_int (s, "height", &outheight);
  if (outwidth <= 0 || outheight <= 0)
    goto done;

  memory = gst_buffer_get_memory (gst_sample_get_buffer (sample), 0);
  gst_memory_map (memory, &info, GST_MAP_READ);

  /* create pixbuf from that - use our own destroy function */
  pixbuf = gdk_pixbuf_new_from_data (info.data,
      GDK_COLORSPACE_RGB, FALSE, 8, outwidth, outheight,
      GST_ROUND_UP_4 (outwidth * 3), destroy_pixbuf, sample);

  gst_memory_unmap (memory, &info);

done:
  if (!pixbuf) {
    GST_DEBUG ("Could not take screenshot: %s", "could not create pixbuf");
    g_warning ("Could not take screenshot: %s", "could not create pixbuf");
    gst_sample_unref (sample);
  }

  /* Did we check whether we need to rotate the video? */
  if (g_object_get_data (G_OBJECT (play), "orientation-checked") == NULL) {
    GstTagList *tags = NULL;

    g_signal_emit_by_name (G_OBJECT (play), "get-video-tags", 0, &tags);
    if (tags) {
      char *orientation_str;
      gboolean ret;

      ret = gst_tag_list_get_string_index (tags, GST_TAG_IMAGE_ORIENTATION, 0, &orientation_str);
      if (!ret || !orientation_str)
        rotation = GDK_PIXBUF_ROTATE_NONE;
      else if (g_str_equal (orientation_str, "rotate-90"))
        rotation = GDK_PIXBUF_ROTATE_CLOCKWISE;
      else if (g_str_equal (orientation_str, "rotate-180"))
        rotation = GDK_PIXBUF_ROTATE_UPSIDEDOWN;
      else if (g_str_equal (orientation_str, "rotate-270"))
        rotation = GDK_PIXBUF_ROTATE_COUNTERCLOCKWISE;

      gst_tag_list_unref (tags);
    }

    g_object_set_data (G_OBJECT (play), "orientation-checked", GINT_TO_POINTER(1));
    g_object_set_data (G_OBJECT (play), "orientation", GINT_TO_POINTER(rotation));
  }

  rotation = GPOINTER_TO_INT (g_object_get_data (G_OBJECT (play), "orientation"));
  if (rotation != GDK_PIXBUF_ROTATE_NONE) {
    GdkPixbuf *rotated;

    rotated = gdk_pixbuf_rotate_simple (pixbuf, rotation);
    if (rotated) {
      g_object_unref (pixbuf);
      pixbuf = rotated;
    }
  }

  return pixbuf;
}
Пример #28
0
static offset_t
gst_ffmpegdata_seek (URLContext * h, offset_t pos, int whence)
{
  GstProtocolInfo *info;
  guint64 newpos;

  GST_DEBUG ("Seeking to %" G_GINT64_FORMAT ", whence=%d", pos, whence);

  info = (GstProtocolInfo *) h->priv_data;

  /* TODO : if we are push-based, we need to return sensible info */

  switch (h->flags) {
    case URL_RDONLY:
    {
      /* sinkpad */
      switch (whence) {
        case SEEK_SET:
          info->offset = (guint64) pos;
          break;
        case SEEK_CUR:
          info->offset += pos;
          break;
        case SEEK_END:
          /* ffmpeg wants to know the current end position in bytes ! */
        {
          GstFormat format = GST_FORMAT_BYTES;
          gint64 duration;

          if (gst_pad_is_linked (info->pad))
            if (gst_pad_query_duration (GST_PAD_PEER (info->pad), &format,
                    &duration))
              info->offset = ((guint64) duration) + pos;
        }
          break;
        default:
          break;
      }
      /* FIXME : implement case for push-based behaviour */
      newpos = info->offset;
    }
      break;
    case URL_WRONLY:
    {
      /* srcpad */
      switch (whence) {
        case SEEK_SET:
          info->offset = (guint64) pos;
          gst_pad_push_event (info->pad, gst_event_new_new_segment
              (TRUE, 1.0, GST_FORMAT_BYTES, info->offset,
                  GST_CLOCK_TIME_NONE, info->offset));
          break;
        case SEEK_CUR:
          info->offset += pos;
          gst_pad_push_event (info->pad, gst_event_new_new_segment
              (TRUE, 1.0, GST_FORMAT_BYTES, info->offset,
                  GST_CLOCK_TIME_NONE, info->offset));
          break;
        default:
          break;
      }
      newpos = info->offset;
    }
      break;
    default:
      g_assert (0);
      break;
  }

  GST_DEBUG ("Now at offset %lld", info->offset);
  return newpos;
}
static GstBuffer *
gst_rtp_qdm2_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
  GstRtpQDM2Depay *rtpqdm2depay;
  GstBuffer *outbuf = NULL;
  guint16 seq;
  GstRTPBuffer rtp = { NULL };

  rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload);

  {
    gint payload_len;
    guint8 *payload;
    guint avail;
    guint pos = 0;

    gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
    payload_len = gst_rtp_buffer_get_payload_len (&rtp);
    if (payload_len < 3)
      goto bad_packet;

    payload = gst_rtp_buffer_get_payload (&rtp);
    seq = gst_rtp_buffer_get_seq (&rtp);
    if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) {
      GST_DEBUG ("GAP in sequence number, Resetting data !");
      /* Flush previous data */
      flush_data (rtpqdm2depay);
      /* And store new timestamp */
      rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
      rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf);
      /* And that previous data will be pushed at the bottom */
    }
    rtpqdm2depay->nextseq = seq + 1;

    GST_DEBUG ("Payload size %d 0x%x sequence:%d", payload_len, payload_len,
        seq);

    GST_MEMDUMP ("Incoming payload", payload, payload_len);

    while (pos < payload_len) {
      switch (payload[pos]) {
        case 0x80:{
          GST_DEBUG ("Unrecognized 0x80 marker, skipping 12 bytes");
          pos += 12;
        }
          break;
        case 0xff:
          /* HEADERS */
          GST_DEBUG ("Headers");
          /* Store the incoming timestamp */
          rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
          rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf);
          /* flush the internal data if needed */
          flush_data (rtpqdm2depay);
          if (G_UNLIKELY (!rtpqdm2depay->configured)) {
            guint8 *ourdata;
            GstBuffer *codecdata;
            GstMapInfo cmap;
            GstCaps *caps;

            /* First bytes are unknown */
            GST_MEMDUMP ("Header", payload + pos, 32);
            ourdata = payload + pos + 10;
            pos += 10;
            rtpqdm2depay->channs = GST_READ_UINT32_BE (payload + pos + 4);
            rtpqdm2depay->samplerate = GST_READ_UINT32_BE (payload + pos + 8);
            rtpqdm2depay->bitrate = GST_READ_UINT32_BE (payload + pos + 12);
            rtpqdm2depay->blocksize = GST_READ_UINT32_BE (payload + pos + 16);
            rtpqdm2depay->framesize = GST_READ_UINT32_BE (payload + pos + 20);
            rtpqdm2depay->packetsize = GST_READ_UINT32_BE (payload + pos + 24);
            /* 16 bit empty block (0x02 0x00) */
            pos += 30;
            GST_DEBUG
                ("channs:%d, samplerate:%d, bitrate:%d, blocksize:%d, framesize:%d, packetsize:%d",
                rtpqdm2depay->channs, rtpqdm2depay->samplerate,
                rtpqdm2depay->bitrate, rtpqdm2depay->blocksize,
                rtpqdm2depay->framesize, rtpqdm2depay->packetsize);

            /* Caps */
            codecdata = gst_buffer_new_and_alloc (48);
            gst_buffer_map (codecdata, &cmap, GST_MAP_WRITE);
            memcpy (cmap.data, headheader, 20);
            memcpy (cmap.data + 20, ourdata, 28);
            gst_buffer_unmap (codecdata, &cmap);

            caps = gst_caps_new_simple ("audio/x-qdm2",
                "samplesize", G_TYPE_INT, 16,
                "rate", G_TYPE_INT, rtpqdm2depay->samplerate,
                "channels", G_TYPE_INT, rtpqdm2depay->channs,
                "codec_data", GST_TYPE_BUFFER, codecdata, NULL);
            gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), caps);
            gst_caps_unref (caps);
            rtpqdm2depay->configured = TRUE;
          } else {
            GST_DEBUG ("Already configured, skipping headers");
            pos += 40;
          }
          break;
        default:{
          /* Shuffled packet contents */
          guint packetid = payload[pos++];
          guint packettype = payload[pos++];
          guint packlen = payload[pos++];
          guint hsize = 2;

          GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
              packetid, packettype, packlen);

          /* Packets bigger than 0xff bytes have a type with the high bit set */
          if (G_UNLIKELY (packettype & 0x80)) {
            packettype &= 0x7f;
            packlen <<= 8;
            packlen |= payload[pos++];
            hsize = 3;
            GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
                packetid, packettype, packlen);
          }

          if (packettype > 0x7f) {
            GST_ERROR ("HOUSTON WE HAVE A PROBLEM !!!!");
          }
          add_packet (rtpqdm2depay, packetid, packlen + hsize,
              payload + pos - hsize);
          pos += packlen;
        }
      }
    }

    GST_DEBUG ("final pos %d", pos);

    avail = gst_adapter_available (rtpqdm2depay->adapter);
    if (G_UNLIKELY (avail)) {
      GST_DEBUG ("Pushing out %d bytes of collected data", avail);
      outbuf = gst_adapter_take_buffer (rtpqdm2depay->adapter, avail);
      GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp;
      GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
    }
  }

  gst_rtp_buffer_unmap (&rtp);
  return outbuf;

  /* ERRORS */
bad_packet:
  {
    GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
        (NULL), ("Packet was too short"));
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
}
Пример #30
0
static GObject *
gstbt_direct_control_binding_constructor (GType type, guint n_construct_params,
    GObjectConstructParam * construct_params)
{
  GstBtDirectControlBinding *self;

  self =
      GSTBT_DIRECT_CONTROL_BINDING (G_OBJECT_CLASS (parent_class)->constructor
      (type, n_construct_params, construct_params));

  if (GST_CONTROL_BINDING_PSPEC (self)) {
    GType type, base;

    base = type = G_PARAM_SPEC_VALUE_TYPE (GST_CONTROL_BINDING_PSPEC (self));
    g_value_init (&self->cur_value, type);
    while ((type = g_type_parent (type)))
      base = type;

    GST_DEBUG ("  using type %s", g_type_name (base));

    /* select mapping function */

#define SET_CONVERT_FUNCTION(type) \
    if (self->ABI.abi.want_absolute) { \
        self->convert_g_value = abs_convert_g_value_to_##type; \
        self->convert_value = abs_convert_value_to_##type; \
    } \
    else { \
        self->convert_g_value = convert_g_value_to_##type; \
        self->convert_value = convert_value_to_##type; \
    } \
    self->byte_size = sizeof (g##type);


    switch (base) {
      case G_TYPE_INT:
        SET_CONVERT_FUNCTION (int);
        break;
      case G_TYPE_UINT:
        SET_CONVERT_FUNCTION (uint);
        break;
      case G_TYPE_LONG:
        SET_CONVERT_FUNCTION (long);
        break;
      case G_TYPE_ULONG:
        SET_CONVERT_FUNCTION (ulong);
        break;
      case G_TYPE_INT64:
        SET_CONVERT_FUNCTION (int64);
        break;
      case G_TYPE_UINT64:
        SET_CONVERT_FUNCTION (uint64);
        break;
      case G_TYPE_FLOAT:
        SET_CONVERT_FUNCTION (float);
        break;
      case G_TYPE_DOUBLE:
        SET_CONVERT_FUNCTION (double);
        break;
      case G_TYPE_BOOLEAN:
        self->convert_g_value = convert_g_value_to_boolean;
        self->convert_value = convert_value_to_boolean;
        self->byte_size = sizeof (gboolean);
        break;
      case G_TYPE_ENUM:
        self->convert_g_value = convert_g_value_to_enum;
        self->convert_value = convert_value_to_enum;
        self->byte_size = sizeof (gint);
        break;
      default:
        GST_WARNING ("incomplete implementation for paramspec type '%s'",
            G_PARAM_SPEC_TYPE_NAME (GST_CONTROL_BINDING_PSPEC (self)));
        GST_CONTROL_BINDING_PSPEC (self) = NULL;
        break;
    }
  }
  return (GObject *) self;
}