static gboolean
gst_v4l2_transform_query (GstBaseTransform * trans, GstPadDirection direction,
    GstQuery * query)
{
  GstV4l2Transform *self = GST_V4L2_TRANSFORM (trans);
  gboolean ret = TRUE;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:{
      GstCaps *filter, *caps = NULL, *result = NULL;
      GstPad *pad, *otherpad;

      gst_query_parse_caps (query, &filter);

      if (direction == GST_PAD_SRC) {
        pad = GST_BASE_TRANSFORM_SRC_PAD (trans);
        otherpad = GST_BASE_TRANSFORM_SINK_PAD (trans);
        if (self->probed_srccaps)
          caps = gst_caps_ref (self->probed_srccaps);
      } else {
        pad = GST_BASE_TRANSFORM_SINK_PAD (trans);
        otherpad = GST_BASE_TRANSFORM_SRC_PAD (trans);
        if (self->probed_sinkcaps)
          caps = gst_caps_ref (self->probed_sinkcaps);
      }

      if (!caps)
        caps = gst_pad_get_pad_template_caps (pad);

      if (filter) {
        GstCaps *tmp = caps;
        caps = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
        gst_caps_unref (tmp);
      }

      result = gst_pad_peer_query_caps (otherpad, caps);
      result = gst_caps_make_writable (result);
      gst_caps_append (result, caps);

      GST_DEBUG_OBJECT (self, "Returning %s caps %" GST_PTR_FORMAT,
          GST_PAD_NAME (pad), result);

      gst_query_set_caps_result (query, result);
      gst_caps_unref (result);
      break;
    }

    default:
      ret = GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
          query);
      break;
  }

  return ret;
}
Example #2
0
static gboolean
gst_capsfilter_accept_caps (GstBaseTransform * base,
    GstPadDirection direction, GstCaps * caps)
{
  GstCapsFilter *capsfilter = GST_CAPSFILTER (base);
  GstCaps *filter_caps;
  gboolean ret;

  GST_OBJECT_LOCK (capsfilter);
  filter_caps = gst_caps_ref (capsfilter->filter_caps);
  GST_OBJECT_UNLOCK (capsfilter);

  ret = gst_caps_can_intersect (caps, filter_caps);
  GST_DEBUG_OBJECT (capsfilter, "can intersect: %d", ret);
  if (ret) {
    /* if we can intersect, see if the other end also accepts */
    if (direction == GST_PAD_SRC)
      ret =
          gst_pad_peer_query_accept_caps (GST_BASE_TRANSFORM_SINK_PAD (base),
          caps);
    else
      ret =
          gst_pad_peer_query_accept_caps (GST_BASE_TRANSFORM_SRC_PAD (base),
          caps);
    GST_DEBUG_OBJECT (capsfilter, "peer accept: %d", ret);
  }

  gst_caps_unref (filter_caps);

  return ret;
}
Example #3
0
static void
gst_vdp_video_yuv_init (GstVdpVideoYUV * video_yuv, GstVdpVideoYUVClass * klass)
{
  video_yuv->display = NULL;

  gst_pad_set_bufferalloc_function (GST_BASE_TRANSFORM_SINK_PAD (video_yuv),
      gst_vdp_video_yuv_buffer_alloc);
}
static void
gst_frei0r_filter_init (GstFrei0rFilter * self, GstFrei0rFilterClass * klass)
{
  self->property_cache =
      gst_frei0r_property_cache_init (klass->properties, klass->n_properties);
  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (self));
  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (self));
}
Example #5
0
static void
gst_quarktv_init (GstQuarkTV * filter, GstQuarkTVClass * klass)
{
  filter->planes = PLANES;
  filter->current_plane = filter->planes - 1;

  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
}
Example #6
0
static void
gst_shagadelictv_init (GstShagadelicTV * filter, GstShagadelicTVClass * klass)
{
  filter->ripple = NULL;
  filter->spiral = NULL;

  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
}
Example #7
0
static void
gst_dicetv_init (GstDiceTV * filter, GstDiceTVClass * klass)
{
  filter->dicemap = NULL;
  filter->g_cube_bits = DEFAULT_CUBE_BITS;
  filter->g_cube_size = 0;
  filter->g_map_height = 0;
  filter->g_map_width = 0;

  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
  gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
}
Example #8
0
static void
gst_cenc_decrypt_init (GstCencDecrypt * self)
{
  GstBaseTransform *base = GST_BASE_TRANSFORM (self);
  
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_TRANSFORM_SINK_PAD (self));

  gst_base_transform_set_in_place (base, TRUE);
  gst_base_transform_set_passthrough (base, FALSE);
  gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (self), FALSE);
  self->keys = g_ptr_array_new_with_free_func (gst_cenc_keypair_destroy);
  self->drm_type = GST_DRM_UNKNOWN;
}
static void
gst_cv_sobel_init (GstCvSobel * filter)
{
  gst_pad_set_event_function (GST_BASE_TRANSFORM_SINK_PAD (filter),
      GST_DEBUG_FUNCPTR (gst_cv_sobel_handle_sink_event));

  filter->x_order = DEFAULT_X_ORDER;
  filter->y_order = DEFAULT_Y_ORDER;
  filter->aperture_size = DEFAULT_APERTURE_SIZE;
  filter->mask = DEFAULT_MASK;

  gst_opencv_video_filter_set_in_place (GST_OPENCV_VIDEO_FILTER_CAST (filter),
      FALSE);
}
Example #10
0
static gboolean extract_resolution (GstTcamWhitebalance* self)
{

    GstPad* pad  = GST_BASE_TRANSFORM_SINK_PAD(self);
    GstCaps* caps = gst_pad_get_current_caps(pad);
    GstStructure *structure = gst_caps_get_structure (caps, 0);

    g_return_val_if_fail(gst_structure_get_int(structure, "width", &self->image_size.width), FALSE);
    g_return_val_if_fail(gst_structure_get_int(structure, "height", &self->image_size.height), FALSE);

    guint fourcc;

    if (gst_structure_get_field_type(structure, "format") == G_TYPE_STRING)
    {
        const char *string;
        string = gst_structure_get_string (structure, "format");
        fourcc = GST_STR_FOURCC (string);
    }

    if (fourcc == MAKE_FOURCC ('g','r','b','g'))
    {
        self->pattern = GR;
    }
    else if (fourcc == MAKE_FOURCC ('r', 'g', 'g', 'b'))
    {
        self->pattern = RG;
    }
    else if (fourcc == MAKE_FOURCC ('g', 'b', 'r', 'g'))
    {
        self->pattern = GB;
    }
    else if (fourcc == MAKE_FOURCC ('b', 'g', 'g', 'r'))
    {
        self->pattern = BG;
    }
    else
    {
        GST_ERROR("Unable to determine bayer pattern.");
        return FALSE;
    }

    // we only handle bayer 8 bit -> 1 byte
    int bytes_per_pixel = 1;
    self->expected_buffer_size = self->image_size.height * self->image_size.width * bytes_per_pixel;

    self->res = find_source(GST_ELEMENT(self));

    return TRUE;
}
static gboolean
gst_identity_accept_caps (GstBaseTransform * base,
    GstPadDirection direction, GstCaps * caps)
{
  gboolean ret;
  GstPad *pad;

  /* Proxy accept-caps */

  if (direction == GST_PAD_SRC)
    pad = GST_BASE_TRANSFORM_SINK_PAD (base);
  else
    pad = GST_BASE_TRANSFORM_SRC_PAD (base);

  ret = gst_pad_peer_query_accept_caps (pad, caps);

  return ret;
}
Example #12
0
static gboolean
pad_can_dmabuf (GstMsdkVPP * thiz, GstPadDirection direction, GstCaps * filter)
{
  gboolean ret = FALSE;
  GstCaps *caps, *out_caps;
  GstPad *pad;
  GstBaseTransform *trans = GST_BASE_TRANSFORM (thiz);

  if (direction == GST_PAD_SRC)
    pad = GST_BASE_TRANSFORM_SRC_PAD (trans);
  else
    pad = GST_BASE_TRANSFORM_SINK_PAD (trans);

  /* make a copy of filter caps since we need to alter the structure
   * by adding dmabuf-capsfeatures */
  caps = gst_caps_copy (filter);
  gst_caps_set_features (caps, 0,
      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_DMABUF));

  out_caps = gst_pad_peer_query_caps (pad, caps);
  if (!out_caps)
    goto done;

  if (gst_caps_is_any (out_caps) || gst_caps_is_empty (out_caps)
      || out_caps == caps)
    goto done;

  if (_gst_caps_has_feature (out_caps, GST_CAPS_FEATURE_MEMORY_DMABUF))
    ret = TRUE;
done:
  if (caps)
    gst_caps_unref (caps);
  if (out_caps)
    gst_caps_unref (out_caps);
  return ret;
}
void camera_calibrate_run(GstCameraCalibrate *calib, IplImage *img)
{
  cv::Mat view = cv::cvarrToMat(img);

  // For camera only take new samples after delay time
  if (calib->mode == CAPTURING) {
    // get_input
    cv::Size imageSize = view.size();

    /* find_pattern
     * FIXME find ways to reduce CPU usage
     * don't do it on all frames ? will it help ? corner display will be affected.
     * in a separate frame?
     * in a separate element that gets composited back into the main stream
     * (video is tee-d into it and can then be decimated, scaled, etc..) */

    std::vector<cv::Point2f> pointBuf;
    bool found;
    int chessBoardFlags = cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_NORMALIZE_IMAGE;

    if (!calib->useFisheye) {
      /* fast check erroneously fails with high distortions like fisheye */
      chessBoardFlags |= cv::CALIB_CB_FAST_CHECK;
    }

    /* Find feature points on the input format */
    switch(calib->calibrationPattern) {
      case GST_CAMERA_CALIBRATION_PATTERN_CHESSBOARD:
        found = cv::findChessboardCorners(view, calib->boardSize, pointBuf, chessBoardFlags);
        break;
      case GST_CAMERA_CALIBRATION_PATTERN_CIRCLES_GRID:
        found = cv::findCirclesGrid(view, calib->boardSize, pointBuf);
        break;
      case GST_CAMERA_CALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID:
        found = cv::findCirclesGrid(view, calib->boardSize, pointBuf, cv::CALIB_CB_ASYMMETRIC_GRID );
        break;
      default:
        found = FALSE;
        break;
    }

    bool blinkOutput = FALSE;
    if (found) {
      /* improve the found corners' coordinate accuracy for chessboard */
      if (calib->calibrationPattern == GST_CAMERA_CALIBRATION_PATTERN_CHESSBOARD && calib->cornerSubPix) {
        /* FIXME findChessboardCorners and alike do a cv::COLOR_BGR2GRAY (and a histogram balance)
         * the color convert should be done once (if needed) and shared
         * FIXME keep viewGray around to avoid reallocating it each time... */
        cv::Mat viewGray;
        cv::cvtColor(view, viewGray, cv::COLOR_BGR2GRAY);
        cv::cornerSubPix(viewGray, pointBuf, cv::Size(11, 11), cv::Size(-1, -1),
            cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1));
      }

      /* take new samples after delay time */
      if ((calib->mode == CAPTURING) && ((clock() - calib->prevTimestamp) > calib->delay * 1e-3 * CLOCKS_PER_SEC)) {
        calib->imagePoints.push_back(pointBuf);
        calib->prevTimestamp = clock();
        blinkOutput = true;
      }

      /* draw the corners */
      if (calib->showCorners) {
        cv::drawChessboardCorners(view, calib->boardSize, cv::Mat(pointBuf), found);
      }
    }

    /* if got enough frames then stop calibration and show result */
    if (calib->mode == CAPTURING && calib->imagePoints.size() >= (size_t)calib->nrFrames) {

      if (camera_calibrate_calibrate(calib, imageSize, calib->cameraMatrix, calib->distCoeffs, calib->imagePoints)) {
        calib->mode = CALIBRATED;

        GstPad *sink_pad = GST_BASE_TRANSFORM_SINK_PAD (calib);
        GstPad *src_pad = GST_BASE_TRANSFORM_SRC_PAD (calib);
        GstEvent *sink_event;
        GstEvent *src_event;

        /* set settings property */
        g_free (calib->settings);
        calib->settings = camera_serialize_undistort_settings(calib->cameraMatrix, calib->distCoeffs);

        /* create calibrated event and send upstream and downstream */
        sink_event = gst_camera_event_new_calibrated (calib->settings);
        GST_LOG_OBJECT (sink_pad, "Sending upstream event %s.", GST_EVENT_TYPE_NAME (sink_event));
        if (!gst_pad_push_event (sink_pad, sink_event)) {
          GST_WARNING_OBJECT (sink_pad, "Sending upstream event %p (%s) failed.",
              sink_event, GST_EVENT_TYPE_NAME (sink_event));
        }

        src_event = gst_camera_event_new_calibrated (calib->settings);
        GST_LOG_OBJECT (src_pad, "Sending downstream event %s.", GST_EVENT_TYPE_NAME (src_event));
        if (!gst_pad_push_event (src_pad, src_event)) {
          GST_WARNING_OBJECT (src_pad, "Sending downstream event %p (%s) failed.",
              src_event, GST_EVENT_TYPE_NAME (src_event));
        }
      } else {
        /* failed to calibrate, go back to detection mode */
        calib->mode = DETECTION;
      }
    }

    if (calib->mode == CAPTURING && blinkOutput) {
      bitwise_not(view, view);
    }

  }

  /* output text */
  /* FIXME ll additional rendering (text, corners, ...) should be done with
   * cairo or another gst framework.
   * this will relax the conditions on the input format (RBG only at the moment).
   * the calibration itself accepts more formats... */

  std::string msg = (calib->mode == CAPTURING) ? "100/100" :
      (calib->mode == CALIBRATED) ? "Calibrated" : "Waiting...";
  int baseLine = 0;
  cv::Size textSize = cv::getTextSize(msg, 1, 1, 1, &baseLine);
  cv::Point textOrigin(view.cols - 2 * textSize.width - 10, view.rows - 2 * baseLine - 10);

  if (calib->mode == CAPTURING) {
    msg = cv::format("%d/%d", (int)calib->imagePoints.size(), calib->nrFrames);
  }

  const cv::Scalar RED(0,0,255);
  const cv::Scalar GREEN(0,255,0);

  cv::putText(view, msg, textOrigin, 1, 1, calib->mode == CALIBRATED ?  GREEN : RED);
}
Example #14
0
static gboolean gst_gcs_set_caps(GstBaseTransform * btrans, GstCaps * incaps, GstCaps * outcaps) 
{
  GstGcs *gcs = GST_GCS (btrans);
  gint in_width, in_height;
  gint out_width, out_height;
  
  GST_GCS_LOCK (gcs);
  
  gst_video_format_parse_caps(incaps, &gcs->in_format, &in_width, &in_height);
  gst_video_format_parse_caps(outcaps, &gcs->out_format, &out_width, &out_height);
  if (!(gcs->in_format == gcs->out_format) || 
      !(in_width == out_width && in_height == out_height)) {
    GST_WARNING("Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps, outcaps);
    GST_GCS_UNLOCK (gcs);
    return FALSE;
  }
  
  gcs->width  = in_width;
  gcs->height = in_height;
  
  GST_INFO("Initialising Gcs...");
  gst_pad_set_event_function(GST_BASE_TRANSFORM_SINK_PAD(gcs),  gst_gcs_sink_event);

  const CvSize size = cvSize(gcs->width, gcs->height);
  GST_WARNING (" width %d, height %d", gcs->width, gcs->height);

  //////////////////////////////////////////////////////////////////////////////
  // allocate image structs in all spaces///////////////////////////////////////
  gcs->pImageRGBA    = cvCreateImageHeader(size, IPL_DEPTH_8U, 4);

  gcs->pImgRGB       = cvCreateImage(size, IPL_DEPTH_8U, 3);
  gcs->pImgScratch   = cvCreateImage(size, IPL_DEPTH_8U, 3);

  gcs->pImgGRAY      = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgGRAY_copy = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgGRAY_diff = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgGRAY_1    = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgGRAY_1copy= cvCreateImage(size, IPL_DEPTH_8U, 1);
  cvZero( gcs->pImgGRAY_1 );
  cvZero( gcs->pImgGRAY_1copy );

  gcs->pImgChA       = cvCreateImageHeader(size, IPL_DEPTH_8U, 1);
  gcs->pImgCh1       = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgCh2       = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgCh3       = cvCreateImage(size, IPL_DEPTH_8U, 1);
  gcs->pImgChX       = cvCreateImage(size, IPL_DEPTH_8U, 1);

  gcs->pImg_skin     = cvCreateImage(size, IPL_DEPTH_8U, 1);

  gcs->grabcut_mask   = cvCreateMat( size.height, size.width, CV_8UC1);
  cvZero(gcs->grabcut_mask);
  initialise_grabcut( &(gcs->GC), gcs->pImgRGB, gcs->grabcut_mask );
  gcs->bbox_prev      = cvRect( 60,70, 210, 170 );

  //////////////////////////////////////////////////////////////////////////////
#ifdef KMEANS
  // k-means allocation ////////////////////////////////////////////////////////
  gcs->pImgRGB_kmeans  = cvCreateImage(size, IPL_DEPTH_8U, 3);
  gcs->num_samples     = size.height * size.width;
  gcs->kmeans_points   = cvCreateMat( gcs->num_samples, 5, CV_32FC1);
  gcs->kmeans_clusters = cvCreateMat( gcs->num_samples, 1, CV_32SC1);
#endif //KMEANS

  //////////////////////////////////////////////////////////////////////////////
  // Init ghost file ///////////////////////////////////////////////////////////
  curlMemoryStructGCS  chunk;
  //gchar url[]="file:///home/mcasassa/imco2/mods/gstreamer/cyclops/shaders/mask8.png";
  //gchar url[]="file:///apps/devnfs/mcasassa/mask_320x240.png";
  char curlErrBuf[255];
  
  if( gcs->ghostfilename){
    if(FALSE == curl_download(gcs->ghostfilename, "", &chunk, curlErrBuf)) {
      GST_ERROR("download failed, err: %s", curlErrBuf);
    }
    
    char errBuf[255];
    if( FALSE == read_png(&chunk, &(gcs->raw_image), &(gcs->info), errBuf)){
      GST_ERROR("png load failed, err: %s", errBuf);
    }

    const CvSize sizegh = cvSize(gcs->info.width, gcs->info.height);
    gcs->cvGhost = cvCreateImageHeader(sizegh, IPL_DEPTH_8U, gcs->info.channels);
    gcs->cvGhost->imageData = (char*)gcs->raw_image;

    gcs->cvGhostBw = cvCreateImage(sizegh, IPL_DEPTH_8U, 1);
    if( gcs->info.channels > 1){
      cvCvtColor( gcs->cvGhost, gcs->cvGhostBw, CV_RGB2GRAY );
    }
    else{
      cvCopy(gcs->cvGhost, gcs->cvGhostBw, NULL);
    }

    gcs->cvGhostBwResized = cvCreateImage(size, IPL_DEPTH_8U, 1);
    cvResize( gcs->cvGhostBw, gcs->cvGhostBwResized, CV_INTER_LINEAR);

    gcs->cvGhostBwAffined = cvCreateImage(size, IPL_DEPTH_8U, 1);
  }

  GST_INFO(" Collected caps, image in size (%dx%d), ghost size (%dx%d) %dch",gcs->width, gcs->height,
            gcs->info.width, gcs->info.height, gcs->info.channels );

  // 3 points of the face bbox associated to the ghost.
  gcs->srcTri[0].x = 145;
  gcs->srcTri[0].y = 74;
  gcs->srcTri[1].x = 145;
  gcs->srcTri[1].y = 74+39;
  gcs->srcTri[2].x = 145+34;
  gcs->srcTri[2].y = 74+39;

  gcs->warp_mat = cvCreateMat(2,3,CV_32FC1);


  gcs->numframes = 0;

  GST_INFO("Gcs initialized.");
  
  GST_GCS_UNLOCK (gcs);
  
  return TRUE;
}
Example #15
0
/*
  Given the pad in this direction and the given caps, what caps are allowed on
  the other pad in this element ?
*/
static GstCaps *
gst_cenc_decrypt_transform_caps (GstBaseTransform * base,
    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
  GstCaps *res = NULL;
  gint i, j;

  g_return_val_if_fail (direction != GST_PAD_UNKNOWN, NULL);

  GST_DEBUG_OBJECT (base, "direction: %s   caps: %" GST_PTR_FORMAT "   filter:"
      " %" GST_PTR_FORMAT, (direction == GST_PAD_SRC) ? "Src" : "Sink",
      caps, filter);

  if(direction == GST_PAD_SRC && gst_caps_is_any (caps)){
    res = gst_pad_get_pad_template_caps (GST_BASE_TRANSFORM_SINK_PAD (base));
    goto filter;
  }
  
  res = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (caps); ++i) {
    GstStructure *in = gst_caps_get_structure (caps, i);
    GstStructure *out = NULL;

    if (direction == GST_PAD_SINK) {
      gint n_fields;

      if (!gst_structure_has_field (in, "original-media-type"))
        continue;

      out = gst_structure_copy (in);
      n_fields = gst_structure_n_fields (in);

      gst_structure_set_name (out,
          gst_structure_get_string (out, "original-media-type"));

      /* filter out the DRM related fields from the down-stream caps */
      for(j=n_fields-1; j>=0; --j){
          const gchar *field_name;

          field_name = gst_structure_nth_field_name (in, j);

          if( g_str_has_prefix(field_name, "protection-system") ||
              g_str_has_prefix(field_name, "original-media-type") ){
              gst_structure_remove_field (out, field_name);
          }
      }
      gst_cenc_decrypt_append_if_not_duplicate(res, out);
    } else {                    /* GST_PAD_SRC */
      gint n_fields;
      GstStructure *tmp = NULL;
      guint p;
      tmp = gst_structure_copy (in);
      gst_cenc_remove_codec_fields (tmp);
      for(p=0; gst_cenc_decrypt_protection_ids[p]; ++p){
        /* filter out the audio/video related fields from the down-stream 
           caps, because they are not relevant to the input caps of this 
           element and they can cause caps negotiation failures with 
           adaptive bitrate streams */
        out = gst_structure_copy (tmp);
        gst_structure_set (out,
                           "protection-system", G_TYPE_STRING, gst_cenc_decrypt_protection_ids[p],
                           "original-media-type", G_TYPE_STRING, gst_structure_get_name (in),
                           NULL);
        gst_structure_set_name (out, "application/x-cenc");
        gst_cenc_decrypt_append_if_not_duplicate(res, out);
      }
      gst_structure_free (tmp);
    }
  }
  if(direction == GST_PAD_SINK && gst_caps_get_size (res)==0){
    gst_caps_unref (res);
    res = gst_caps_new_any ();
  }
 filter:
  if (filter) {
    GstCaps *intersection;

    GST_DEBUG_OBJECT (base, "Using filter caps %" GST_PTR_FORMAT, filter);
    intersection =
      gst_caps_intersect_full (res, filter, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (res);
    res = intersection;
  }

  GST_DEBUG_OBJECT (base, "returning %" GST_PTR_FORMAT, res);
  return res;
}
Example #16
0
static gboolean
gst_video_rate_query (GstBaseTransform * trans, GstPadDirection direction,
    GstQuery * query)
{
  GstVideoRate *videorate = GST_VIDEO_RATE (trans);
  gboolean res = FALSE;
  GstPad *otherpad;

  otherpad = (direction == GST_PAD_SRC) ?
      GST_BASE_TRANSFORM_SINK_PAD (trans) : GST_BASE_TRANSFORM_SRC_PAD (trans);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      GstClockTime min, max;
      gboolean live;
      guint64 latency;
      guint64 avg_period;
      GstPad *peer;

      GST_OBJECT_LOCK (videorate);
      avg_period = videorate->average_period_set;
      GST_OBJECT_UNLOCK (videorate);

      if (avg_period == 0 && (peer = gst_pad_get_peer (otherpad))) {
        if ((res = gst_pad_query (peer, query))) {
          gst_query_parse_latency (query, &live, &min, &max);

          GST_DEBUG_OBJECT (videorate, "Peer latency: min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          if (videorate->from_rate_numerator != 0) {
            /* add latency. We don't really know since we hold on to the frames
             * until we get a next frame, which can be anything. We assume
             * however that this will take from_rate time. */
            latency = gst_util_uint64_scale (GST_SECOND,
                videorate->from_rate_denominator,
                videorate->from_rate_numerator);
          } else {
            /* no input framerate, we don't know */
            latency = 0;
          }

          GST_DEBUG_OBJECT (videorate, "Our latency: %"
              GST_TIME_FORMAT, GST_TIME_ARGS (latency));

          min += latency;
          if (max != -1)
            max += latency;

          GST_DEBUG_OBJECT (videorate, "Calculated total latency : min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          gst_query_set_latency (query, live, min, max);
        }
        gst_object_unref (peer);
        break;
      }
      /* Simple fallthrough if we don't have a latency or not a peer that we
       * can't ask about its latency yet.. */
    }
    default:
      res =
          GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
          query);
      break;
  }

  return res;
}
Example #17
0
static void
gst_capsfilter_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstCapsFilter *capsfilter = GST_CAPSFILTER (object);

  switch (prop_id) {
    case PROP_FILTER_CAPS:{
      GstCaps *new_caps;
      GstCaps *old_caps, *suggest, *nego;
      const GstCaps *new_caps_val = gst_value_get_caps (value);

      if (new_caps_val == NULL) {
        new_caps = gst_caps_new_any ();
      } else {
        new_caps = (GstCaps *) new_caps_val;
        gst_caps_ref (new_caps);
      }

      GST_OBJECT_LOCK (capsfilter);
      old_caps = capsfilter->filter_caps;
      capsfilter->filter_caps = new_caps;
      GST_OBJECT_UNLOCK (capsfilter);

      gst_caps_unref (old_caps);

      GST_DEBUG_OBJECT (capsfilter, "set new caps %" GST_PTR_FORMAT, new_caps);

      /* filter the currently negotiated format against the new caps */
      GST_OBJECT_LOCK (GST_BASE_TRANSFORM_SINK_PAD (object));
      nego = GST_PAD_CAPS (GST_BASE_TRANSFORM_SINK_PAD (object));
      if (nego) {
        GST_DEBUG_OBJECT (capsfilter, "we had negotiated caps %" GST_PTR_FORMAT,
            nego);

        if (G_UNLIKELY (gst_caps_is_any (new_caps))) {
          GST_DEBUG_OBJECT (capsfilter, "not settings any suggestion");

          suggest = NULL;
        } else {
          GstStructure *s1, *s2;

          /* first check if the name is the same */
          s1 = gst_caps_get_structure (nego, 0);
          s2 = gst_caps_get_structure (new_caps, 0);

          if (gst_structure_get_name_id (s1) == gst_structure_get_name_id (s2)) {
            /* same name, copy all fields from the new caps into the previously
             * negotiated caps */
            suggest = gst_caps_copy (nego);
            s1 = gst_caps_get_structure (suggest, 0);
            gst_structure_foreach (s2, (GstStructureForeachFunc) copy_func, s1);
            GST_DEBUG_OBJECT (capsfilter, "copied structure fields");
          } else {
            GST_DEBUG_OBJECT (capsfilter, "different structure names");
            /* different names, we can only suggest the complete caps */
            suggest = gst_caps_copy (new_caps);
          }
        }
      } else {
        GST_DEBUG_OBJECT (capsfilter, "no negotiated caps");
        /* no previous caps, the getcaps function will be used to find suitable
         * caps */
        suggest = NULL;
      }
      GST_OBJECT_UNLOCK (GST_BASE_TRANSFORM_SINK_PAD (object));

      GST_DEBUG_OBJECT (capsfilter, "suggesting new caps %" GST_PTR_FORMAT,
          suggest);
      gst_base_transform_suggest (GST_BASE_TRANSFORM (object), suggest, 0);
      if (suggest)
        gst_caps_unref (suggest);

      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}