コード例 #1
0
static void
gst_decklink_video_src_got_frame (GstElement * element,
    IDeckLinkVideoInputFrame * frame, GstDecklinkModeEnum mode,
    GstClockTime capture_time, GstClockTime capture_duration)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);

  GST_LOG_OBJECT (self, "Got video frame at %" GST_TIME_FORMAT,
      GST_TIME_ARGS (capture_time));

  gst_decklink_video_src_convert_to_external_clock (self, &capture_time,
      &capture_duration);

  GST_LOG_OBJECT (self, "Actual timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (capture_time));

  g_mutex_lock (&self->lock);
  if (!self->flushing) {
    CaptureFrame *f;

    while (g_queue_get_length (&self->current_frames) >= self->buffer_size) {
      f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
      GST_WARNING_OBJECT (self, "Dropping old frame at %" GST_TIME_FORMAT,
          GST_TIME_ARGS (f->capture_time));
      capture_frame_free (f);
    }

    f = (CaptureFrame *) g_malloc0 (sizeof (CaptureFrame));
    f->frame = frame;
    f->capture_time = capture_time;
    f->capture_duration = capture_duration;
    f->mode = mode;
    frame->AddRef ();
    g_queue_push_tail (&self->current_frames, f);
    g_cond_signal (&self->cond);
  }
  g_mutex_unlock (&self->lock);
}
コード例 #2
0
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;
  gboolean caps_changed = FALSE;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }
  // If we're not flushing, we should have a valid frame from the queue
  g_assert (f != NULL);

  g_mutex_lock (&self->lock);
  if (self->caps_mode != f->mode) {
    if (self->mode == GST_DECKLINK_MODE_AUTO) {
      GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
          f->mode);
      caps_changed = TRUE;
      self->caps_mode = f->mode;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid mode in captured frame"),
          ("Mode set to %d but captured %d", self->caps_mode, f->mode));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }
  if (self->caps_format != f->format) {
    if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) {
      GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
          f->format);
      caps_changed = TRUE;
      self->caps_format = f->format;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid pixel format in captured frame"),
          ("Format set to %d but captured %d", self->caps_format, f->format));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  g_mutex_unlock (&self->lock);
  if (caps_changed) {
    caps = gst_decklink_mode_get_caps (f->mode, f->format);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);

  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;
  gst_buffer_add_video_time_code_meta (*buffer, f->tc);

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
コード例 #3
0
static void
gst_decklink_video_src_got_frame (GstElement * element,
    IDeckLinkVideoInputFrame * frame, GstDecklinkModeEnum mode,
    GstClockTime capture_time, GstClockTime capture_duration, guint hours,
    guint minutes, guint seconds, guint frames, BMDTimecodeFlags bflags)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);

  GST_LOG_OBJECT (self, "Got video frame at %" GST_TIME_FORMAT,
      GST_TIME_ARGS (capture_time));

  gst_decklink_video_src_convert_to_external_clock (self, &capture_time,
      &capture_duration);

  GST_LOG_OBJECT (self, "Actual timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (capture_time));

  g_mutex_lock (&self->lock);
  if (!self->flushing) {
    CaptureFrame *f;
    const GstDecklinkMode *bmode;
    GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE;
    guint field_count = 0;

    while (g_queue_get_length (&self->current_frames) >= self->buffer_size) {
      f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
      GST_WARNING_OBJECT (self, "Dropping old frame at %" GST_TIME_FORMAT,
          GST_TIME_ARGS (f->capture_time));
      capture_frame_free (f);
    }

    f = (CaptureFrame *) g_malloc0 (sizeof (CaptureFrame));
    f->frame = frame;
    f->capture_time = capture_time;
    f->capture_duration = capture_duration;
    f->mode = mode;
    f->format = frame->GetPixelFormat ();
    bmode = gst_decklink_get_mode (mode);
    if (bmode->interlaced) {
      flags =
          (GstVideoTimeCodeFlags) (flags |
          GST_VIDEO_TIME_CODE_FLAGS_INTERLACED);
      if (bflags & bmdTimecodeFieldMark)
        field_count = 2;
      else
        field_count = 1;
    }
    if (bflags & bmdTimecodeIsDropFrame)
      flags =
          (GstVideoTimeCodeFlags) (flags |
          GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME);
    f->tc =
        gst_video_time_code_new (bmode->fps_n, bmode->fps_d, NULL, flags, hours,
        minutes, seconds, frames, field_count);

    frame->AddRef ();
    g_queue_push_tail (&self->current_frames, f);
    g_cond_signal (&self->cond);
  }
  g_mutex_unlock (&self->lock);
}
コード例 #4
0
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }

  g_mutex_lock (&self->lock);
  if (self->mode == GST_DECKLINK_MODE_AUTO && self->caps_mode != f->mode) {
    GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
        f->mode);
    self->caps_mode = f->mode;
    g_mutex_unlock (&self->lock);
    caps = gst_decklink_mode_get_caps (f->mode);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);
  } else {
    g_mutex_unlock (&self->lock);
  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
コード例 #5
0
ファイル: HttpHandler.cpp プロジェクト: lgpnk/fast_cpp
void HttpHandler::handle_sobel(const char *method, const char *path, const http_options *options, int fd)
{
  media_stream * stream;
  ssize_t        ret_value = 1;
  const char *   my_tmp_option;
  char           my_media_props[100];
  int            skipframes = 0;
  int            sleeptime  = 0;

  /* handle test stuff */
  my_tmp_option = net_http_option(options, "skipframes");
  if (my_tmp_option) {
    skipframes = atoi(my_tmp_option);
    LOG("%s/%s() skipframes=%d\n",
           __FILE__, __FUNCTION__, skipframes);
  }
  
  my_tmp_option = net_http_option(options, "sleeptime");
  if (my_tmp_option) {
    sleeptime = atoi(my_tmp_option);
    LOG("%s/%s() sleeptime=%d\n",
           __FILE__, __FUNCTION__, sleeptime);
  }

  /* handle fps */
  my_tmp_option = net_http_option(options, "fps");
  if (!my_tmp_option) {
    my_tmp_option = "10";
  }
  
  snprintf(my_media_props,
           sizeof(my_media_props),
           "fps=%s",
           my_tmp_option);

  /* Handle resolution */
  my_tmp_option = net_http_option(options, "resolution");
  if (my_tmp_option) {
    strncat(my_media_props,
            "&resolution=",
            sizeof(my_media_props) - 1);
    strncat(my_media_props,
            my_tmp_option,
            sizeof(my_media_props) - 1);
  } else {
    strncat(my_media_props,
            "&resolution=352x288",
            sizeof(my_media_props) - 1);
  }
  /* put in the format */
  strncat(my_media_props,
          "&sdk_format=Y800",
          sizeof(my_media_props) - 1);
  /* media_props completed */
  LOG("%s media_props=\"%s\"\n",
         __FUNCTION__,
         my_media_props);

  stream = capture_open_stream(IMAGE_UNCOMPRESSED, my_media_props);

  LOG("%s opening stream=%p\n",
         __FUNCTION__,
         stream);

  if (stream) {
    media_frame *frame = NULL;

    ret_value = net_http_send_headers(fd,
                                      HTTP_TIMEOUT,
                                      txt_HTTP_HEADER_200,
                                      "Content-Type: image/x-portable-graymap\r\n",
                                      txt_CRLF,
                                      NULL);
    if (ret_value < 0) {
      goto closefd;
    }
    /* Read a new buffer */
    frame = capture_get_frame(stream);

//     /* Skip frames, just to stress the system */
//     while (skipframes && frame) {
//       capture_frame_free(frame);
//       LOG("%s skipframe\n",
//              __FUNCTION__);
// 
//       sleep(sleeptime);
//       frame = capture_get_frame(stream);
//       skipframes--;
//     }

    /* If buf == NULL: nothing could be read */

    /* This could happen if we are using non-blocking read,
     * or in case of an error. As we are not using non-blocking,
     * treat it as an error */
    if (!frame) {
      ret_value = -1;
      LOG("%s/%s frame = NULL\n",
          __FILE__, __FUNCTION__);
      goto closefd;
    }
    {
      /* set up an image header first */
      int            image_height        = capture_frame_height(frame);
      int            image_width         = capture_frame_width(frame);
      int            image_stride        = capture_frame_stride(frame);
      uint8_t 	     *data               = (uint8_t*)capture_frame_data(frame);
      uint8_t	     *sobel_data	 = NULL;

      const size_t   image_header_length = 100;
      char *         my_image_header     = NULL;
      size_t         content_length      = image_stride * image_height * 4;
      
      my_image_header = (char *) malloc(image_header_length + content_length);
      sobel_data = (uint8_t*)malloc(image_stride * image_height);
      int pos = 0;
      if (!my_image_header) {
        goto closefd;
      }
//       ret_value = snprintf(my_image_header, image_header_length,
//                            "P5\n"
//                            "# CREATOR: Axis Communications AB\n"
//                            "%d %d\n"
//                            "%d\n"
// 			   "EOH",
//                            image_width, image_height, 255);

    /*  if (ret_value > 0) */{
        unsigned char *image = (unsigned char *)(my_image_header);
        int            j;
        int            i;
	
 	sobel(data, image_width, image_height, image_stride, sobel_data);
	
        for (i = 0; i < image_height; i++) {
          for (j = 0; j < image_width; j++) {
	    pos += sprintf(my_image_header + pos, "%d;", sobel_data[i * image_stride+ j]);
          }
        }
        content_length += ret_value;
        (void)net_http_send_string_utf8(fd, HTTP_TIMEOUT, my_image_header);
      }
      
      free(my_image_header);
      free(sobel_data);
      capture_frame_free(frame);
    }
    LOG("%s closing stream=%p",
        __FUNCTION__,
        stream);

    capture_close_stream(stream);
  } else {
    net_http_send_headers(fd,
                          HTTP_TIMEOUT,
                          txt_HTTP_RESPONSE_500,
                          txt_CRLF,
                          NULL);
  }
  
closefd:
  close(fd);
}