Exemplo n.º 1
0
static inline GstBuffer *
gst_y4m_encode_get_stream_header (GstY4mEncode * filter)
{
  gpointer header;
  GstBuffer *buf;
  gchar interlaced;

  interlaced = 'p';

  if (filter->interlaced && filter->top_field_first)
    interlaced = 't';
  else if (filter->interlaced)
    interlaced = 'b';

  header = g_strdup_printf ("YUV4MPEG2 C%s W%d H%d I%c F%d:%d A%d:%d\n",
      filter->colorspace, filter->width, filter->height, interlaced,
      filter->fps_num, filter->fps_den, filter->par_num, filter->par_den);

  buf = gst_buffer_new ();
  gst_buffer_set_data (buf, header, strlen (header));
  /* so it gets free'd when needed */
  GST_BUFFER_MALLOCDATA (buf) = header;

  return buf;
}
Exemplo n.º 2
0
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_edgedetect_chain (GstPad * pad, GstBuffer * buf)
{
  Gstedgedetect *filter;

  filter = GST_EDGEDETECT (GST_OBJECT_PARENT (pad));

  filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);

  cvCvtColor (filter->cvImage, filter->cvGray, CV_RGB2GRAY);
  cvSmooth (filter->cvGray, filter->cvEdge, CV_BLUR, 3, 3, 0, 0);
  cvNot (filter->cvGray, filter->cvEdge);
  cvCanny (filter->cvGray, filter->cvEdge, filter->threshold1,
      filter->threshold2, filter->aperture);

  cvZero (filter->cvCEdge);
  if (filter->mask) {
    cvCopy (filter->cvImage, filter->cvCEdge, filter->cvEdge);
  } else {
    cvCvtColor (filter->cvEdge, filter->cvCEdge, CV_GRAY2RGB);
  }
  gst_buffer_set_data (buf, filter->cvCEdge->imageData,
      filter->cvCEdge->imageSize);

  return gst_pad_push (filter->srcpad, buf);
}
Exemplo n.º 3
0
gboolean airplayRendererFeedData(GstElement *appsrc, guint size, gpointer *object) {
	AirplayRenderer *self = AIRPLAY_RENDERER(object);
	AirplayRendererPrivate *priv = AIRPLAY_RENDERER_GET_PRIVATE(self);

g_print("AirplayMediaPlayer: feed data %d\n", priv->seq);

	AudioPkg *pkg = g_async_queue_try_pop(priv->bufferList);
	while(pkg == NULL) {
		if(!priv->isRunning) return FALSE;
		
		pkg = g_hash_table_lookup(priv->resendTable, &priv->seq);
g_print("Look up: %d %d\n", priv->seq, pkg == NULL);
		if(pkg) {
			priv->seq = (priv->seq + 1) % 65536;
			break;
		}
		
		usleep(50000);
		sched_yield();
		pkg = g_async_queue_try_pop(priv->bufferList);
g_print("Sleep: %d\n", priv->seq);
	}
	
	GstBuffer *buffer = gst_buffer_new();
	gst_buffer_set_data(buffer, pkg->data, pkg->length);
	GST_BUFFER_SIZE(buffer) = pkg->length;
	GST_BUFFER_MALLOCDATA(buffer) = pkg->data;
	GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer);

	gst_app_src_push_buffer((GstAppSrc *)appsrc, buffer);
	g_free(pkg);
	// gst_buffer_unref(buffer);
		
	return TRUE;
}
Exemplo n.º 4
0
/*****************************************************************************
 * gst_tiprepencbuf_prepare_output_buffer
 *    Function is used to allocate output buffer
 *****************************************************************************/
static GstFlowReturn
gst_tiprepencbuf_prepare_output_buffer(GstBaseTransform * trans,
    GstBuffer * inBuf, gint size, GstCaps * caps, GstBuffer ** outBuf)
{
    GstTIPrepEncBuf *prepencbuf = GST_TIPREPENCBUF(trans);
    Buffer_Handle    hOutBuf;

    GST_LOG("begin prepare output buffer\n");

    /* Get free buffer from buftab */
    if (!(hOutBuf = gst_tidmaibuftab_get_buf(prepencbuf->hOutBufTab))) {
        GST_ELEMENT_ERROR(prepencbuf, RESOURCE, READ,
            ("failed to get free buffer\n"), (NULL));
        return GST_FLOW_ERROR;
    }

    /* Create a DMAI transport buffer object to carry a DMAI buffer to
     * the source pad.  The transport buffer knows how to release the
     * buffer for re-use in this element when the source pad calls
     * gst_buffer_unref().
     */
    GST_LOG("creating dmai transport buffer\n");
    *outBuf = gst_tidmaibuffertransport_new(hOutBuf, prepencbuf->hOutBufTab, NULL, NULL);
    gst_buffer_set_data(*outBuf, (guint8 *) Buffer_getUserPtr(hOutBuf),
        Buffer_getSize(hOutBuf));
    gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

    GST_LOG("end prepare output buffer\n");

    return GST_FLOW_OK;
}
Exemplo n.º 5
0
static void
write_metadata (GstWavEnc * wavenc)
{
  GString *info_str;
  GList *props;
  int total = 4;
  gboolean need_to_write = FALSE;

  info_str = g_string_new ("LIST    INFO");

  for (props = wavenc->metadata->properties->properties; props;
      props = props->next) {
    GstPropsEntry *entry = props->data;
    const char *name;
    guint32 id;

    name = gst_props_entry_get_name (entry);
    id = get_id_from_name (name);
    if (id != 0) {
      const char *text;
      char *tmp;
      int len, req, i;

      need_to_write = TRUE;     /* We've got at least one entry */

      gst_props_entry_get_string (entry, &text);
      len = strlen (text) + 1;  /* The length in the file includes the \0 */

      tmp = g_strdup_printf ("%" GST_FOURCC_FORMAT "%d%s", GST_FOURCC_ARGS (id),
          GUINT32_TO_LE (len), text);
      g_string_append (info_str, tmp);
      g_free (tmp);

      /* Check that we end on an even boundary */
      req = ((len + 8) + 1) & ~1;
      for (i = 0; i < req - len; i++) {
        g_string_append_printf (info_str, "%c", 0);
      }

      total += req;
    }
  }

  if (need_to_write) {
    GstBuffer *buf;

    /* Now we've got all the strings together, we can write our length in */
    info_str->str[4] = GUINT32_TO_LE (total);

    buf = gst_buffer_new ();
    gst_buffer_set_data (buf, info_str->str, info_str->len);

    gst_pad_push (wavenc->srcpad, GST_DATA (buf));
    g_string_free (info_str, FALSE);
  }
}
Exemplo n.º 6
0
static void
write_cues (GstWavEnc * wavenc)
{
  GString *cue_string, *point_string;
  GstBuffer *buf;
  GList *cue_list, *c;
  int num_cues, total = 4;

  if (gst_props_get (wavenc->metadata->properties,
          "cues", &cue_list, NULL) == FALSE) {
    /* No cues, move along please, nothing to see here */
    return;
  }

  /* Space for 'cue ', chunk size and number of cuepoints */
  cue_string = g_string_new ("cue         ");
#define CUEPOINT_SIZE 24
  point_string = g_string_sized_new (CUEPOINT_SIZE);

  for (c = cue_list, num_cues = 0; c; c = c->next, num_cues++) {
    GstCaps *cue_caps = c->data;
    guint32 pos;

    gst_props_get (cue_caps->properties, "position", &pos, NULL);

    point_string->str[0] = GUINT32_TO_LE (num_cues + 1);
    point_string->str[4] = GUINT32_TO_LE (0);
    /* Fixme: There is probably a macro for this */
    point_string->str[8] = 'd';
    point_string->str[9] = 'a';
    point_string->str[10] = 't';
    point_string->str[11] = 'a';
    point_string->str[12] = GUINT32_TO_LE (0);
    point_string->str[16] = GUINT32_TO_LE (0);
    point_string->str[20] = GUINT32_TO_LE (pos);

    total += CUEPOINT_SIZE;
  }

  /* Set the length and chunk size */
  cue_string->str[4] = GUINT32_TO_LE (total);
  cue_string->str[8] = GUINT32_TO_LE (num_cues);
  /* Stick the cue points on the end */
  g_string_append (cue_string, point_string->str);
  g_string_free (point_string, TRUE);

  buf = gst_buffer_new ();
  gst_buffer_set_data (buf, cue_string->str, cue_string->len);

  gst_pad_push (wavenc->srcpad, GST_DATA (buf));
  g_string_free (cue_string, FALSE);
}
Exemplo n.º 7
0
static inline GstBuffer *
gst_y4m_encode_get_frame_header (GstY4mEncode * filter)
{
  gpointer header;
  GstBuffer *buf;

  header = g_strdup_printf ("FRAME\n");

  buf = gst_buffer_new ();
  gst_buffer_set_data (buf, header, strlen (header));
  /* so it gets free'd when needed */
  GST_BUFFER_MALLOCDATA (buf) = header;

  return buf;
}
Exemplo n.º 8
0
static GList *
_make_buffers_out (GList * buffer_out, guint8 * test_data, gsize test_data_size)
{
  GstBuffer *buffer;
  GstCaps *caps;

  buffer = gst_buffer_new ();
  gst_buffer_set_data (buffer, test_data, test_data_size);

  caps = gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
      "framerate", GST_TYPE_FRACTION, 1, 1, NULL);
  gst_buffer_set_caps (buffer, caps);
  gst_caps_unref (caps);

  buffer_out = g_list_append (buffer_out, buffer);
  return buffer_out;
}
Exemplo n.º 9
0
static GList *
_make_buffers_in (GList * buffer_in, guint8 * test_data, gsize test_data_size)
{
  GstBuffer *buffer;
  GstCaps *caps;
  gsize i;

  for (i = 0; i < test_data_size; i++) {
    buffer = gst_buffer_new ();
    gst_buffer_set_data (buffer, test_data + i, 1);
    caps = gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, FALSE,
        NULL);
    gst_buffer_set_caps (buffer, caps);
    gst_caps_unref (caps);
    buffer_in = g_list_append (buffer_in, buffer);
  }
  return buffer_in;
}
Exemplo n.º 10
0
static void
cb_need_data (GstElement *appsrc,
			  guint       unused_size,
			  gpointer    user_data)
{
	static gboolean white = FALSE;
	static GstClockTime timestamp = 0;
	GstBuffer *buffer;
	guint size;
	GstFlowReturn ret;
	int width;
	int height;
	if (NULL == getenv("VIT_WIDTH")) {
		width = IMWIDTH;
	}
	else {
		width = atoi(getenv("VIT_WIDTH"));
	}

	if (NULL == getenv("VIT_HEIGHT")) {
		height = IMHEIGHT;
	}
	else {
		height = atoi(getenv("VIT_HEIGHT"));
	}

	size = width * height * 1;

	buffer = gst_buffer_new();
	gst_buffer_set_data(buffer, _T_framebuffer, size);

	GST_BUFFER_TIMESTAMP(buffer) = timestamp;
	GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 25);

	timestamp += GST_BUFFER_DURATION(buffer);

	g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
	gst_buffer_unref(buffer);

	if (ret != GST_FLOW_OK) {
		/* something wrong, stop pushing */
		g_main_loop_quit(loop);
	}
}
Exemplo n.º 11
0
/* ============================================================================
 * @Function: 	 cb_need_data
 * @Description: This callback function feed GST pipeline with video frame.
 * ============================================================================
 */
static void cb_need_data (GstElement *appsrc, guint unused_size, gpointer user_data)
{
	static GstClockTime timestamp = 0;
	GstFlowReturn ret;

	sem_wait(&gdata_ready);
	gst_buffer_set_data(databuffer, inputdatabuffer, datasize);

	GST_BUFFER_TIMESTAMP (databuffer) = timestamp;
	GST_BUFFER_DURATION (databuffer) = gst_util_uint64_scale_int (1, GST_SECOND, arguments.gfps);

	timestamp += GST_BUFFER_DURATION (databuffer);

	g_signal_emit_by_name (appsrc, "push-buffer", databuffer, &ret);

	if (ret != GST_FLOW_OK) {
		/* something wrong, stop pushing */
		g_main_loop_quit (loop);
	}
	sem_post(&gdata_wait);
}
Exemplo n.º 12
0
static GstFlowReturn
gst_nuv_demux_read_bytes (GstNuvDemux * nuv, guint64 size, gboolean move,
    GstBuffer ** buffer)
{
  GstFlowReturn ret = GST_FLOW_OK;

  if (size == 0) {
    *buffer = gst_buffer_new ();
    return ret;
  }

  if (nuv->mode == 0) {
    ret = gst_pad_pull_range (nuv->sinkpad, nuv->offset, size, buffer);
    if (ret == GST_FLOW_OK) {
      if (move) {
        nuv->offset += size;
      }
      /* got eos */
    } else if (ret == GST_FLOW_UNEXPECTED) {
      gst_nuv_demux_send_eos (nuv);
      return GST_FLOW_WRONG_STATE;
    }
  } else {
    if (gst_adapter_available (nuv->adapter) < size)
      return GST_FLOW_ERROR_NO_DATA;

    if (move) {
      *buffer = gst_adapter_take_buffer (nuv->adapter, size);
    } else {
      guint8 *data = NULL;

      data = (guint8 *) gst_adapter_peek (nuv->adapter, size);
      *buffer = gst_buffer_new ();
      gst_buffer_set_data (*buffer, data, size);
    }
  }
  return ret;
}
Exemplo n.º 13
0
static GstFlowReturn
gst_objectsinteraction_chain(GstPad *pad, GstBuffer *buf)
{
    GstObjectsInteraction *filter;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_OBJECTSINTERACTION(GST_OBJECT_PARENT(pad));
    filter->image->imageData = (char*) GST_BUFFER_DATA(buf);

    // Process all objects
    if ((filter->object_in_array != NULL) && (filter->object_in_array->len > 0)) {
        // Find interceptions rects pairs
        guint i, j;

        for (i = 0; i < filter->object_in_array->len; ++i) {
            for (j = i + 1; j < filter->object_in_array->len; ++j) {
                InstanceObjectIn obj_a, obj_b;
                gint             interception;

                obj_a = g_array_index(filter->object_in_array, InstanceObjectIn, i);
                obj_b = g_array_index(filter->object_in_array, InstanceObjectIn, j);
                interception = 100 * MIN(rectIntercept(&obj_a.rect, &obj_b.rect), rectIntercept(&obj_b.rect, &obj_a.rect));

                if (interception) {
                    GstEvent     *event;
                    GstMessage   *message;
                    GstStructure *structure;
                    CvRect        rect;

                    // Interception percentage
                    rect = rectIntersection(&obj_a.rect, &obj_b.rect);

                    if (filter->verbose)
                        GST_INFO_OBJECT(filter, "INTERCEPTION %i%%: rect_a(%i, %i, %i, %i), rect_b(%i, %i, %i, %i), rect_intercept(%i, %i, %i, %i)\n",
                                        interception,
                                        obj_a.rect.x, obj_a.rect.y, obj_a.rect.width, obj_a.rect.height,
                                        obj_b.rect.x, obj_b.rect.y, obj_b.rect.width, obj_b.rect.height,
                                        rect.x, rect.y, rect.width, rect.height);

                    // Draw intercept rect and label
                    if (filter->display) {
                        char *label;
                        float font_scaling;

                        cvRectangle(filter->image,
                                    cvPoint(rect.x, rect.y),
                                    cvPoint(rect.x + rect.width, rect.y + rect.height),
                                    PRINT_COLOR, -1, 8, 0);
                        font_scaling = ((filter->image->width * filter->image->height) > (320 * 240)) ? 0.5f : 0.3f;
                        label = g_strdup_printf("%i+%i (%i%%)", obj_a.id, obj_b.id, interception);
                        printText(filter->image, cvPoint(rect.x + (rect.width / 2), rect.y + (rect.height / 2)), label, PRINT_COLOR, font_scaling, 1);
                        g_free(label);
                    }

                    // Send downstream event and bus message with the rect info
                    structure = gst_structure_new("object-interaction",
                                                  "id_a",       G_TYPE_UINT,   obj_a.id,
                                                  "id_b",       G_TYPE_UINT,   obj_b.id,
                                                  "percentage", G_TYPE_UINT,   interception,
                                                  "x",          G_TYPE_UINT,   rect.x,
                                                  "y",          G_TYPE_UINT,   rect.y,
                                                  "width",      G_TYPE_UINT,   rect.width,
                                                  "height",     G_TYPE_UINT,   rect.height,
                                                  "timestamp",  G_TYPE_UINT64, GST_BUFFER_TIMESTAMP(buf),
                                                  NULL);
                    message = gst_message_new_element(GST_OBJECT(filter), gst_structure_copy(structure));
                    gst_element_post_message(GST_ELEMENT(filter), message);
                    event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
                    gst_pad_push_event(filter->srcpad, event);

                }
            }
        }

    }

    // Clean objects
    g_array_free(filter->object_in_array, TRUE);
    filter->object_in_array = g_array_sized_new(FALSE, FALSE, sizeof(InstanceObjectIn), 1);

    gst_buffer_set_data(buf, (guint8*) filter->image->imageData, (guint) filter->image->imageSize);
    return gst_pad_push(filter->srcpad, buf);
}
Exemplo n.º 14
0
// chain function; this function does the actual processing
static GstFlowReturn
gst_haar_adjust_chain(GstPad *pad, GstBuffer *buf)
{
    GstHaarAdjust *filter;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_HAARADJUST(GST_OBJECT_PARENT(pad));

    filter->image->imageData = (char*) GST_BUFFER_DATA(buf);

    if ((filter->rect_timestamp == GST_BUFFER_TIMESTAMP(buf)) &&
        (filter->rect_array != NULL) &&
        (filter->rect_array->len > 0)) {
        guint i;

        for (i = 0; i < filter->rect_array->len; ++i) {
            CvRect        rect;
            GstEvent     *event;
            GstMessage   *message;
            GstStructure *structure;
            gint          complement_height_top_bg, complement_height_bottom_bg,
                          complement_height_top_projected, complement_height_bottom_projected;

            rect = g_array_index(filter->rect_array, CvRect, i);

            complement_height_top_bg = complement_height_bottom_bg = -1;

            // Calculation of the 'height' complement of projected value
            complement_height_bottom_projected = complement_height_top_projected = (rect.height * filter->height_adjustment) - rect.height;

            // Calculation of the 'height' complement of haar rect and bg rect
            if ((filter->rect_bg_timestamp == GST_BUFFER_TIMESTAMP(buf)) &&
                    (filter->rect_bg_array != NULL) &&
                    (filter->rect_bg_array->len > 0)) {

                guint i;

                for (i = 0; i < filter->rect_bg_array->len; ++i) {
                    CvRect rect_bg_temp;
                    rect_bg_temp = g_array_index(filter->rect_bg_array, CvRect, i);

                    if (rectIntercept(&rect, &rect_bg_temp) == 1) {
                        complement_height_bottom_bg = rect_bg_temp.height - rect.height - (rect.y - rect_bg_temp.y);
                        complement_height_top_bg = rect_bg_temp.height - rect.height - ((rect_bg_temp.y + rect_bg_temp.height)-(rect.y + rect.height));
                        break;
                    }
                }
            }

            // adjust ROIs
            if (g_strcasecmp(filter->object_type, OBJECT_TYPE_UPPER_BODY) == 0) {

                if (CV_IABS(complement_height_bottom_projected - complement_height_bottom_bg) < (rect.height * MAX_PERC_DESVIATION_TO_FOLLOW_BG))
                    rect.height += complement_height_bottom_bg;
                else
                    rect.height += complement_height_bottom_projected;

            } else if (g_strcasecmp(filter->object_type, OBJECT_TYPE_LOWER_BODY) == 0) {

                if (CV_IABS(complement_height_bottom_projected - complement_height_bottom_bg) < (rect.height * MAX_PERC_DESVIATION_TO_FOLLOW_BG)) {
                    rect.height += complement_height_bottom_bg;
                    rect.y -= complement_height_bottom_bg;
                } else {
                    rect.height += complement_height_bottom_projected;
                    rect.y -= complement_height_bottom_projected;
                }

            } else {
                GST_ERROR("invalid object type: '%s'", filter->object_type);
                break;
            }

            // if greater than the image margins, set new limits
            //if (rect.x < 0) rect.x = 0;
            //if (rect.y < 0) rect.y = 0;
            //if (rect.x + rect.width  > filter->image->width ) rect.width  = filter->image->width  - rect.x;
            //if (rect.y + rect.height > filter->image->height) rect.height = filter->image->height - rect.y;

            if (filter->verbose)
                GST_INFO("[rect] x: %d, y: %d, width: %d, height: %d",
                         rect.x, rect.y, rect.width, rect.height);

            if (filter->display) {
                cvRectangle(filter->image,
                            cvPoint(rect.x, rect.y),
                            cvPoint(rect.x + rect.width, rect.y + rect.height),
                            CV_RGB(255, 0, 255), 1, 8, 0);
            }

            // send downstream event and bus message with the rect info
            structure = gst_structure_new("haar-adjust-roi",
                                          "x",         G_TYPE_UINT,   rect.x,
                                          "y",         G_TYPE_UINT,   rect.y,
                                          "width",     G_TYPE_UINT,   rect.width,
                                          "height",    G_TYPE_UINT,   rect.height,
                                          "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP(buf),
                                          NULL);

            message = gst_message_new_element(GST_OBJECT(filter), gst_structure_copy(structure));
            gst_element_post_message(GST_ELEMENT(filter), message);

            event   = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
            gst_pad_push_event(filter->srcpad, event);
        }
    }

    gst_buffer_set_data(buf, (guint8*) filter->image->imageData, (guint) filter->image->imageSize);
    return gst_pad_push(filter->srcpad, buf);
}
Exemplo n.º 15
0
// chain function - this function does the actual processing
static GstFlowReturn
gst_bgfg_acmmm2003_chain(GstPad *pad, GstBuffer *buf)
{
    GstBgFgACMMM2003 *filter;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_BGFG_ACMMM2003(GST_OBJECT_PARENT(pad));

    filter->image->imageData = (gchar*) GST_BUFFER_DATA(buf);

    // the bg model must be initialized with a valid image; thus we delay its
    // creation until the chain function
    if (filter->model == NULL) {
        filter->model = cvCreateFGDStatModel(filter->image, NULL);

        ((CvFGDStatModel*)filter->model)->params.minArea           = filter->min_area;
        ((CvFGDStatModel*)filter->model)->params.erode_iterations  = filter->n_erode_iterations;
        ((CvFGDStatModel*)filter->model)->params.dilate_iterations = filter->n_dilate_iterations;

        return gst_pad_push(filter->srcpad, buf);
    }

    cvUpdateBGStatModel(filter->image, filter->model, -1);

    // send mask event, if requested
    if (filter->send_mask_events) {
        GstStructure *structure;
        GstEvent     *event;
        GArray       *data_array;
        IplImage     *mask;

        // prepare and send custom event with the mask surface
        mask = filter->model->foreground;
        data_array = g_array_sized_new(FALSE, FALSE, sizeof(mask->imageData[0]), mask->imageSize);
        g_array_append_vals(data_array, mask->imageData, mask->imageSize);

        structure = gst_structure_new("bgfg-mask",
                                      "data",      G_TYPE_POINTER, data_array,
                                      "width",     G_TYPE_UINT,    mask->width,
                                      "height",    G_TYPE_UINT,    mask->height,
                                      "depth",     G_TYPE_UINT,    mask->depth,
                                      "channels",  G_TYPE_UINT,    mask->nChannels,
                                      "timestamp", G_TYPE_UINT64,  GST_BUFFER_TIMESTAMP(buf),
                                      NULL);

        event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
        gst_pad_push_event(filter->srcpad, event);
        g_array_unref(data_array);

        if (filter->display) {
            // shade the regions not selected by the acmmm2003 algorithm
            cvXorS(mask,          CV_RGB(255, 255, 255), mask,          NULL);
            cvSubS(filter->image, CV_RGB(191, 191, 191), filter->image, mask);
            cvXorS(mask,          CV_RGB(255, 255, 255), mask,          NULL);
        }
    }

    if (filter->send_roi_events) {
        CvSeq        *contour;
        CvRect       *bounding_rects;
        guint         i, j, n_rects;

        // count # of contours, allocate array to store the bounding rectangles
        for (contour = filter->model->foreground_regions, n_rects = 0;
             contour != NULL;
             contour = contour->h_next, ++n_rects);

        bounding_rects = g_new(CvRect, n_rects);

        for (contour = filter->model->foreground_regions, i = 0; contour != NULL; contour = contour->h_next, ++i)
            bounding_rects[i] = cvBoundingRect(contour, 0);

        for (i = 0; i < n_rects; ++i) {
            // skip collapsed rectangles
            if ((bounding_rects[i].width == 0) || (bounding_rects[i].height == 0)) continue;

            for (j = (i + 1); j < n_rects; ++j) {
                // skip collapsed rectangles
                if ((bounding_rects[j].width == 0) || (bounding_rects[j].height == 0)) continue;

                if (rect_overlap(bounding_rects[i], bounding_rects[j])) {
                    bounding_rects[i] = rect_collapse(bounding_rects[i], bounding_rects[j]);
                    bounding_rects[j] = NULL_RECT;
                }
            }
        }

        for (i = 0; i < n_rects; ++i) {
            GstEvent     *event;
            GstStructure *structure;
            CvRect        r;

            // skip collapsed rectangles
            r = bounding_rects[i];
            if ((r.width == 0) || (r.height == 0)) continue;

            structure = gst_structure_new("bgfg-roi",
                                          "x",         G_TYPE_UINT,   r.x,
                                          "y",         G_TYPE_UINT,   r.y,
                                          "width",     G_TYPE_UINT,   r.width,
                                          "height",    G_TYPE_UINT,   r.height,
                                          "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP(buf),
                                          NULL);

            event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
            gst_pad_send_event(filter->sinkpad, event);

            if (filter->verbose)
                GST_INFO("[roi] x: %d, y: %d, width: %d, height: %d\n",
                         r.x, r.y, r.width, r.height);

            if (filter->display)
                cvRectangle(filter->image, cvPoint(r.x, r.y), cvPoint(r.x + r.width, r.y + r.height),
                            CV_RGB(0, 0, 255), 1, 0, 0);
        }

        g_free(bounding_rects);
    }

    if (filter->display)
        gst_buffer_set_data(buf, (guchar*) filter->image->imageData, filter->image->imageSize);

    return gst_pad_push(filter->srcpad, buf);
}
Exemplo n.º 16
0
static void
write_labels (GstWavEnc * wavenc)
{
  GstBuffer *buf;
  GString *info_str;
  int total = 4;
  GList *caps;

  info_str = g_string_new ("LIST    adtl");
  if (gst_props_get (wavenc->metadata->properties, "ltxts", &caps, NULL)) {
    GList *p;
    int i;

    for (p = caps, i = 1; p; p = p->next, i++) {
      GstCaps *ltxt_caps = p->data;
      GString *ltxt;
      char *label = NULL;
      int len, req, j;

      gst_props_get (ltxt_caps->properties, "name", &label, NULL);
      len = strlen (label);

#define LTXT_SIZE 28
      ltxt = g_string_new ("ltxt                        ");
      ltxt->str[8] = GUINT32_TO_LE (i); /* Identifier */
      ltxt->str[12] = GUINT32_TO_LE (0);        /* Sample Length */
      ltxt->str[16] = GUINT32_TO_LE (0);        /* FIXME: Don't save the purpose yet */
      ltxt->str[20] = GUINT16_TO_LE (0);        /* Country */
      ltxt->str[22] = GUINT16_TO_LE (0);        /* Language */
      ltxt->str[24] = GUINT16_TO_LE (0);        /* Dialect */
      ltxt->str[26] = GUINT16_TO_LE (0);        /* Code Page */
      g_string_append (ltxt, label);
      g_free (label);

      len += LTXT_SIZE;

      ltxt->str[4] = GUINT32_TO_LE (len);

      /* Check that we end on an even boundary */
      req = ((len + 8) + 1) & ~1;
      for (j = 0; j < req - len; j++) {
        g_string_append_printf (ltxt, "%c", 0);
      }

      total += req;

      g_string_append (info_str, ltxt->str);
      g_string_free (ltxt, TRUE);
    }
  }

  if (gst_props_get (wavenc->metadata->properties, "labels", &caps, NULL)) {
    GList *p;
    int i;

    for (p = caps, i = 1; p; p = p->next, i++) {
      GstCaps *labl_caps = p->data;
      GString *labl;
      char *label = NULL;
      int len, req, j;

      gst_props_get (labl_caps->properties, "name", &label, NULL);
      len = strlen (label);

#define LABL_SIZE 4
      labl = g_string_new ("labl        ");
      labl->str[8] = GUINT32_TO_LE (i);
      g_string_append (labl, label);
      g_free (label);

      len += LABL_SIZE;

      labl->str[4] = GUINT32_TO_LE (len);

      /* Check our size */
      req = ((len + 8) + 1) & ~1;
      for (j = 0; j < req - len; j++) {
        g_string_append_printf (labl, "%c", 0);
      }

      total += req;

      g_string_append (info_str, labl->str);
      g_string_free (labl, TRUE);
    }
  }

  if (gst_props_get (wavenc->metadata->properties, "notes", &caps, NULL)) {
    GList *p;
    int i;

    for (p = caps, i = 1; p; p = p->next, i++) {
      GstCaps *note_caps = p->data;
      GString *note;
      char *label = NULL;
      int len, req, j;

      gst_props_get (note_caps->properties, "name", &label, NULL);
      len = strlen (label);

#define NOTE_SIZE 4
      note = g_string_new ("note        ");
      note->str[8] = GUINT32_TO_LE (i);
      g_string_append (note, label);
      g_free (label);

      len += NOTE_SIZE;

      note->str[4] = GUINT32_TO_LE (len);

      /* Size check */
      req = ((len + 8) + 1) & ~1;
      for (j = 0; j < req - len; j++) {
        g_string_append_printf (note, "%c", 0);
      }

      total += req;

      g_string_append (info_str, note->str);
      g_string_free (note, TRUE);
    }
  }

  info_str->str[4] = GUINT32_TO_LE (total);

  buf = gst_buffer_new ();
  gst_buffer_set_data (buf, info_str->str, info_str->len);

  gst_pad_push (wavenc->srcpad, GST_DATA (buf));
  g_string_free (info_str, FALSE);
}
Exemplo n.º 17
0
static GstFlowReturn
gst_pnmenc_chain (GstPad * pad, GstBuffer * buf)
{
  GstPnmenc *s = GST_PNMENC (gst_pad_get_parent (pad));
  GstFlowReturn r;
  gchar *header;
  GstBuffer *out;

  /* Assumption: One buffer, one image. That is, always first write header. */
  header = g_strdup_printf ("P%i\n%i %i\n%i\n",
      s->info.type + 3 * (1 - s->info.encoding), s->info.width, s->info.height,
      s->info.max);
  out = gst_buffer_new ();
  gst_buffer_set_data (out, (guchar *) header, strlen (header));
  gst_buffer_set_caps (out, GST_PAD_CAPS (s->src));
  if ((r = gst_pad_push (s->src, out)) != GST_FLOW_OK)
    goto out;

  /* Need to convert from GStreamer rowstride to PNM rowstride */
  if (s->info.width % 4 != 0) {
    guint i_rowstride;
    guint o_rowstride;
    GstBuffer *obuf;
    guint i;

    if (s->info.type == GST_PNM_TYPE_PIXMAP) {
      o_rowstride = 3 * s->info.width;
      i_rowstride = GST_ROUND_UP_4 (o_rowstride);
    } else {
      o_rowstride = s->info.width;
      i_rowstride = GST_ROUND_UP_4 (o_rowstride);
    }

    obuf = gst_buffer_new_and_alloc (o_rowstride * s->info.height);
    for (i = 0; i < s->info.height; i++)
      memcpy (GST_BUFFER_DATA (obuf) + o_rowstride * i,
          GST_BUFFER_DATA (buf) + i_rowstride * i, o_rowstride);
    gst_buffer_unref (buf);
    buf = obuf;
  } else {
    /* Pass through the data. */
    buf = gst_buffer_make_metadata_writable (buf);
  }

  /* We might need to convert to ASCII... */
  if (s->info.encoding == GST_PNM_ENCODING_ASCII) {
    GstBuffer *obuf;
    guint i, o;

    obuf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (buf) * (4 + 1 / 20.));
    for (i = o = 0; i < GST_BUFFER_SIZE (buf); i++) {
      g_snprintf ((char *) GST_BUFFER_DATA (obuf) + o, 4, "%3i",
          GST_BUFFER_DATA (buf)[i]);
      o += 3;
      GST_BUFFER_DATA (obuf)[o++] = ' ';
      if (!((i + 1) % 20))
        GST_BUFFER_DATA (obuf)[o++] = '\n';
    }
    gst_buffer_unref (buf);
    buf = obuf;
  }

  gst_buffer_set_caps (buf, GST_PAD_CAPS (s->src));
  r = gst_pad_push (s->src, buf);

out:
  gst_object_unref (s);

  return r;
}
Exemplo n.º 18
0
/*****************************************************************************
 * gst_tidmaiaccel_prepare_output_buffer
 *    Function is used to allocate output buffer
 *****************************************************************************/
static GstFlowReturn gst_tidmaiaccel_prepare_output_buffer (GstBaseTransform
    *trans, GstBuffer *inBuf, gint size, GstCaps *caps, GstBuffer **outBuf)
{
    GstTIDmaiaccel *dmaiaccel = GST_TIDMAIACCEL(trans);
    Buffer_Handle   hOutBuf;
    Bool isContiguous = FALSE;
    UInt32 phys = 0;

    /* Always check if the buffer is contiguous */
    phys = Memory_getBufferPhysicalAddress(
                    GST_BUFFER_DATA(inBuf),
                    GST_BUFFER_SIZE(inBuf),
                    &isContiguous);

    if (isContiguous && dmaiaccel->width){
        GST_DEBUG("Is contiguous video buffer");

        Memory_registerContigBuf((UInt32)GST_BUFFER_DATA(inBuf),
            GST_BUFFER_SIZE(inBuf),phys);
        /* This is a contiguous buffer, create a dmai buffer transport */
        BufferGfx_Attrs gfxAttrs    = BufferGfx_Attrs_DEFAULT;

        gfxAttrs.bAttrs.reference   = TRUE;
        gfxAttrs.dim.width          = dmaiaccel->width;
        gfxAttrs.dim.height         = dmaiaccel->height;
        gfxAttrs.colorSpace         = dmaiaccel->colorSpace;
        gfxAttrs.dim.lineLength     = dmaiaccel->lineLength;

        hOutBuf = Buffer_create(GST_BUFFER_SIZE(inBuf), &gfxAttrs.bAttrs);
        BufferGfx_setDimensions(hOutBuf,&gfxAttrs.dim);
        BufferGfx_setColorSpace(hOutBuf,gfxAttrs.colorSpace);
        Buffer_setUserPtr(hOutBuf, (Int8*)GST_BUFFER_DATA(inBuf));
        Buffer_setNumBytesUsed(hOutBuf, GST_BUFFER_SIZE(inBuf));
        *outBuf = gst_tidmaibuffertransport_new(hOutBuf, NULL, NULL, FALSE);
        gst_buffer_set_data(*outBuf, (guint8*) Buffer_getUserPtr(hOutBuf),
            Buffer_getSize(hOutBuf));
        gst_buffer_copy_metadata(*outBuf,inBuf,GST_BUFFER_COPY_ALL);
        gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

        /* We need to grab a reference to the input buffer since we have 
         * a pointer to his buffer */
        gst_buffer_ref(inBuf);

        gst_tidmaibuffertransport_set_release_callback(
            (GstTIDmaiBufferTransport *)*outBuf,
            dmaiaccel_release_cb,inBuf);

        return GST_FLOW_OK;
    } else {
        GST_DEBUG("Copying into contiguous video buffer");
        /* This is a contiguous buffer, create a dmai buffer transport */
        if (!dmaiaccel->bufTabAllocated){
            /* Initialize our buffer tab */
            BufferGfx_Attrs gfxAttrs    = BufferGfx_Attrs_DEFAULT;

            gfxAttrs.dim.width          = dmaiaccel->width;
            gfxAttrs.dim.height         = dmaiaccel->height;
            gfxAttrs.colorSpace         = dmaiaccel->colorSpace;
            gfxAttrs.dim.lineLength     = dmaiaccel->lineLength;

            dmaiaccel->hOutBufTab =
                        BufTab_create(2, GST_BUFFER_SIZE(inBuf),
                            BufferGfx_getBufferAttrs(&gfxAttrs));
            pthread_mutex_init(&dmaiaccel->bufTabMutex, NULL);
            pthread_cond_init(&dmaiaccel->bufTabCond, NULL);
            if (dmaiaccel->hOutBufTab == NULL) {
                GST_ELEMENT_ERROR(dmaiaccel,RESOURCE,NO_SPACE_LEFT,(NULL),
                    ("failed to create output buffer tab"));
                return GST_FLOW_ERROR;
            }
            dmaiaccel->bufTabAllocated = TRUE;
        }

        pthread_mutex_lock(&dmaiaccel->bufTabMutex);
        hOutBuf = BufTab_getFreeBuf(dmaiaccel->hOutBufTab);
        if (hOutBuf == NULL) {
            GST_INFO("Failed to get free buffer, waiting on bufTab\n");
            pthread_cond_wait(&dmaiaccel->bufTabCond, &dmaiaccel->bufTabMutex);

            hOutBuf = BufTab_getFreeBuf(dmaiaccel->hOutBufTab);

            if (hOutBuf == NULL) {
                GST_ELEMENT_ERROR(dmaiaccel,RESOURCE,NO_SPACE_LEFT,(NULL),
                    ("failed to get a free contiguous buffer from BufTab"));
                pthread_mutex_unlock(&dmaiaccel->bufTabMutex);
                return GST_FLOW_ERROR;
            }
        }
        pthread_mutex_unlock(&dmaiaccel->bufTabMutex);

        memcpy(Buffer_getUserPtr(hOutBuf),GST_BUFFER_DATA(inBuf),
            GST_BUFFER_SIZE(inBuf));
        Buffer_setNumBytesUsed(hOutBuf, GST_BUFFER_SIZE(inBuf));
        *outBuf = gst_tidmaibuffertransport_new(hOutBuf, &dmaiaccel->bufTabMutex,
            &dmaiaccel->bufTabCond, FALSE);
        gst_buffer_set_data(*outBuf, (guint8*) Buffer_getUserPtr(hOutBuf),
            Buffer_getSize(hOutBuf));
        gst_buffer_copy_metadata(*outBuf,inBuf,GST_BUFFER_COPY_ALL);
        gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

        return GST_FLOW_OK;
    }
}
Exemplo n.º 19
0
// chain function; this function does the actual processing
static GstFlowReturn
gst_surf_tracker_chain(GstPad *pad, GstBuffer *buf) {
    GstSURFTracker *filter;
    GstClockTime    timestamp;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_SURF_TRACKER(GST_OBJECT_PARENT(pad));
    filter->image->imageData = (char*) GST_BUFFER_DATA(buf);

    // Create the gray image for the surf 'features' search process
    cvCvtColor(filter->image, filter->gray, CV_BGR2GRAY);
    ++filter->frames_processed;
    timestamp = GST_BUFFER_TIMESTAMP(buf);

    // If exist stored_objects: search matching, update, cleaning
    if ((filter->stored_objects != NULL) && (filter->stored_objects->len > 0)) {
        CvMemStorage *surf_image_mem_storage;
        CvSeq        *surf_image_keypoints, *surf_image_descriptors;
        guint         i;
        gint          j;

        // Update the match set 'features' for each object
        surf_image_mem_storage = cvCreateMemStorage(0);

        // Search 'features' in full image
        surf_image_keypoints = surf_image_descriptors = NULL;
        cvExtractSURF(filter->gray, NULL, &surf_image_keypoints, &surf_image_descriptors,
                      surf_image_mem_storage, filter->params, 0);

        for (i = 0; i < filter->stored_objects->len; ++i) {
            InstanceObject *object;
            GArray         *pairs;

            object = &g_array_index(filter->stored_objects, InstanceObject, i);
            pairs  = g_array_new(FALSE, FALSE, sizeof(IntPair));

            findPairs(object->surf_object_keypoints, object->surf_object_descriptors,
                      surf_image_keypoints, surf_image_descriptors, pairs);

            // if match, update object
            if (pairs->len && (float) pairs->len / object->surf_object_descriptors->total >= MIN_MATCH_OBJECT) {
                object->range_viewed++;
                object->last_frame_viewed = filter->frames_processed;
                object->timestamp         = timestamp;

                if (object->surf_object_keypoints_last_match != NULL)
                    cvClearSeq(object->surf_object_keypoints_last_match);
                object->surf_object_keypoints_last_match = getMatchPoints(surf_image_keypoints, pairs, 1, object->mem_storage);

                if (object->surf_object_descriptors_last_match != NULL)
                    cvClearSeq(object->surf_object_descriptors_last_match);
                object->surf_object_descriptors_last_match = getMatchPoints(surf_image_descriptors, pairs, 1, object->mem_storage);

                // Estimate rect of objects localized
                object->rect_estimated = rectDisplacement(object->surf_object_keypoints, surf_image_keypoints, pairs, object->rect, PAIRS_PERC_CONSIDERATE);
            }

            g_array_free(pairs, TRUE);
        }

        if (surf_image_keypoints != NULL) cvClearSeq(surf_image_keypoints);
        if (surf_image_descriptors != NULL) cvClearSeq(surf_image_descriptors);
        cvReleaseMemStorage(&surf_image_mem_storage);

        // Clean old objects
        for (j = filter->stored_objects->len - 1; j >= 0; --j) {
            InstanceObject *object;

            object = &g_array_index(filter->stored_objects, InstanceObject, j);
            if ((filter->frames_processed - object->last_frame_viewed > DELOBJ_NFRAMES_IS_OLD) ||
                (filter->frames_processed != object->last_frame_viewed && object->range_viewed < DELOBJ_COMBOFRAMES_IS_IRRELEVANT)) {
                if (object->surf_object_keypoints != NULL) cvClearSeq(object->surf_object_keypoints);
                if (object->surf_object_descriptors != NULL) cvClearSeq(object->surf_object_descriptors);
                if (object->surf_object_keypoints_last_match != NULL) cvClearSeq(object->surf_object_keypoints_last_match);
                if (object->surf_object_descriptors_last_match != NULL) cvClearSeq(object->surf_object_descriptors_last_match);
                cvReleaseMemStorage(&object->mem_storage);
                g_array_remove_index_fast(filter->stored_objects, j);
            }
        }

    } // if any object exist

    // Process all haar rects
    if ((filter->rect_array != NULL) && (filter->rect_array->len > 0)) {
        guint i, j;

        for (i = 0; i < filter->rect_array->len; ++i) {
            CvRect rect = g_array_index(filter->rect_array, CvRect, i);

            // If already exist in 'stored_objects', update features. Else save
            // as new.
            for (j = 0; j < filter->stored_objects->len; ++j) {
                InstanceObject *object;

                object = &g_array_index(filter->stored_objects, InstanceObject, j);

                // It is considered equal if the "centroid match features" is inner
                // haar rect AND max area deviation is PERC_RECT_TO_SAME_OBJECT
                if (pointIntoRect(rect, (object->surf_object_keypoints_last_match != NULL) ? surfCentroid(object->surf_object_keypoints_last_match, cvPoint(0, 0)) : surfCentroid(object->surf_object_keypoints, cvPoint(0, 0))) &&
                    ((float) MIN((object->rect.width * object->rect.height), (rect.width * rect.height)) / (float) MAX((object->rect.width * object->rect.height), (rect.width * rect.height)) >= PERC_RECT_TO_SAME_OBJECT)) {

                    // Update the object features secound the new body rect
                    cvSetImageROI(filter->gray, rect);
                    cvExtractSURF(filter->gray, NULL, &object->surf_object_keypoints, &object->surf_object_descriptors,
                                  object->mem_storage, filter->params, 0);
                    cvResetImageROI(filter->gray);
                    object->rect = object->rect_estimated = rect;
                    object->last_body_identify_timestamp = timestamp;

                    break;
                }
            }

            // If new, create object and append in stored_objects
            if (j >= filter->stored_objects->len) {
                InstanceObject object;

                object.surf_object_keypoints   = 0;
                object.surf_object_descriptors = 0;
                object.mem_storage             = cvCreateMemStorage(0);

                cvSetImageROI(filter->gray, rect);
                cvExtractSURF(filter->gray, NULL, &object.surf_object_keypoints, &object.surf_object_descriptors,
                              object.mem_storage, filter->params, 0);
                cvResetImageROI(filter->gray);

                if (object.surf_object_descriptors && object.surf_object_descriptors->total > 0) {
                    object.id                                 = filter->static_count_objects++;
                    object.last_frame_viewed                  = filter->frames_processed;
                    object.range_viewed                       = 1;
                    object.rect                               = object.rect_estimated               = rect;
                    object.timestamp                          = object.last_body_identify_timestamp = timestamp;
                    object.surf_object_keypoints_last_match   = NULL;
                    object.surf_object_descriptors_last_match = NULL;

                    g_array_append_val(filter->stored_objects, object);
                }
            } // new
        }
    }

    // Put the objects found in the frame in gstreamer pad
    if ((filter->stored_objects != NULL) && (filter->stored_objects->len > 0)) {
        guint i;

        for (i = 0; i < filter->stored_objects->len; ++i) {
            InstanceObject object = g_array_index(filter->stored_objects, InstanceObject, i);

            // 'Continue' whether the object is not found in this frame
            if (object.timestamp == timestamp) {
                TrackedObject *tracked_object;
                GstEvent      *event;
                CvRect         rect;

                rect = ((object.last_body_identify_timestamp == timestamp) ? object.rect : object.rect_estimated);

                if (filter->verbose) {
                    GST_INFO("[object #%d rect] x: %d, y: %d, width: %d, height: %d\n", object.id, rect.x, rect.y, rect.width, rect.height);
                    // drawSurfPoints(object.surf_object_keypoints, cvPoint(object.rect.x, object.rect.y), filter->image, PRINT_COLOR, 0);
                    // drawSurfPoints(object.surf_object_keypoints_last_match, cvPoint(object.rect.x, object.rect.y), filter->image, PRINT_COLOR, 1);
                }

                if (filter->display_features) {
                    drawSurfPoints(object.surf_object_keypoints_last_match, cvPoint(0, 0), filter->image, PRINT_COLOR, 1);
                }

                if (filter->display) {
                    char *label;
                    float font_scaling;

                    font_scaling = ((filter->image->width * filter->image->height) > (320 * 240)) ? 0.5f : 0.3f;

                    cvRectangle(filter->image, cvPoint(rect.x, rect.y), cvPoint(rect.x + rect.width, rect.y + rect.height),
                                PRINT_COLOR, ((object.last_body_identify_timestamp == timestamp) ? 2 : 1), 8, 0);
                    label = g_strdup_printf("OBJ#%i", object.id);
                    printText(filter->image, cvPoint(rect.x + (rect.width / 2), rect.y + (rect.height / 2)), label, PRINT_COLOR, font_scaling, 1);
                    g_free(label);
                }

                // allocate and initialize 'TrackedObject' structure
                tracked_object = tracked_object_new();
                tracked_object->id        = g_strdup_printf("PERSON#%d", object.id);
                tracked_object->type      = TRACKED_OBJECT_DYNAMIC;
                tracked_object->height    = rect.height;
                tracked_object->timestamp = timestamp;

                // add the points that the define the lower part of the object (i.e,
                // the lower horizontal segment of the rectangle) as the objects perimeter
                tracked_object_add_point(tracked_object, rect.x, rect.y + rect.height);
                tracked_object_add_point(tracked_object, rect.x + rect.width, rect.y + rect.height);

                // send downstream event
                event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM,
                                             tracked_object_to_structure(tracked_object, "tracked-object"));
                gst_pad_push_event(filter->srcpad, event);
            }
        }
    }

    // Clean body rects
    g_array_free(filter->rect_array, TRUE);
    filter->rect_array = g_array_sized_new(FALSE, FALSE, sizeof(CvRect), 1);

    // Draw number of objects stored
    if (filter->display) {
        char *label = g_strdup_printf("N_STORED_OBJS: %3i", filter->stored_objects->len);
        printText(filter->image, cvPoint(0, 0), label, PRINT_COLOR, .5, 1);
        g_free(label);
    }

    gst_buffer_set_data(buf, (guint8*) filter->image->imageData, (guint) filter->image->imageSize);
    return gst_pad_push(filter->srcpad, buf);
}
Exemplo n.º 20
0
static GstFlowReturn
create(GstPushSrc *base, GstBuffer **buf)
{
    GstTICaptureSrc *src = (GstTICaptureSrc *)base;
    Buffer_Handle       hDstBuf;
    GstBuffer   *outBuf;
    gint    ret = GST_FLOW_OK;
    BufferGfx_Attrs  gfxAttrs = BufferGfx_Attrs_DEFAULT;
    Int32   width, height;

    GST_LOG("create begin");

    /* create capture device */
    if (src->hCapture == NULL) {

        /* set framerate based on video standard */
        switch(dmai_video_std(src->video_standard)) {
            case VideoStd_D1_NTSC:
                    gst_value_set_fraction(&src->framerate,30000,1001);
                break;
            case VideoStd_D1_PAL:
                    gst_value_set_fraction(&src->framerate,25,1);
                break;
            default:
                    gst_value_set_fraction(&src->framerate,30,1);
                break;
        }

        /* set width & height based on video standard */

        src->cAttrs.videoStd = dmai_video_std(src->video_standard);

        VideoStd_getResolution(src->cAttrs.videoStd, &width, &height);
				width = 720;
				height = 576;
				GST_WARNING("force video size to %dx%d", src->width, src->height);

        src->width = width;
        src->height = height;
        
        gfxAttrs.dim.height = src->height;
        gfxAttrs.dim.width = src->width;
        src->cAttrs.captureDimension = &gfxAttrs.dim;

        if (!capture_create(src))
            return GST_FLOW_ERROR;
    }

    /* Get buffer from driver */
    if (Capture_get(src->hCapture, &hDstBuf)) {
        GST_ELEMENT_ERROR(src, RESOURCE, FAILED,
        ("Failed to allocate buffer\n"), (NULL));
        return GST_FLOW_ERROR;
    }

    /* Create a DMAI transport buffer object to carry a DMAI buffer to
     * the source pad.  The transport buffer knows how to release the
     * buffer for re-use in this element when the source pad calls
     * gst_buffer_unref().
     */
    outBuf = gst_tidmaibuffertransport_new(hDstBuf, src->hBufTab, capture_buffer_finalize, (void*)src);
    gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf), Buffer_getSize(hDstBuf));

    *buf = outBuf;

    /* set buffer metadata */
    if (G_LIKELY (ret == GST_FLOW_OK && *buf)) {
        GstClock *clock;
        GstClockTime timestamp;

        GST_BUFFER_OFFSET (*buf) = src->offset++;
        GST_BUFFER_OFFSET_END (*buf) = src->offset;

        /* timestamps, LOCK to get clock and base time. */
        GST_OBJECT_LOCK (src);
        if ((clock = GST_ELEMENT_CLOCK (src))) {
            /* we have a clock, get base time and ref clock */
            timestamp = GST_ELEMENT (src)->base_time;
            gst_object_ref (clock);
        } else {
            /* no clock, can't set timestamps */
            timestamp = GST_CLOCK_TIME_NONE;
        }
        GST_OBJECT_UNLOCK (src);

        if (G_LIKELY (clock)) {
            /* the time now is the time of the clock minus the base time */
            timestamp = gst_clock_get_time (clock) - timestamp;
            gst_object_unref (clock);

            /* if we have a framerate adjust timestamp for frame latency */
            if (GST_CLOCK_TIME_IS_VALID (src->duration)) {
                if (timestamp > src->duration)
                    timestamp -= src->duration;
                else
                    timestamp = 0;
            }

        }

        /* FIXME: use the timestamp from the buffer itself! */
        GST_BUFFER_TIMESTAMP (*buf) = timestamp;
        GST_BUFFER_DURATION (*buf) = src->duration;
    }

    /* Create caps for buffer */
    GstCaps *mycaps;
    GstStructure        *structure;

    mycaps = gst_caps_new_empty();

    if (src->cAttrs.colorSpace == ColorSpace_UYVY) {
        structure = gst_structure_new( "video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'),
            "framerate", GST_TYPE_FRACTION,
                gst_value_get_fraction_numerator(&src->framerate),
                gst_value_get_fraction_denominator(&src->framerate),
            "width", G_TYPE_INT,    src->width,
            "height", G_TYPE_INT,   src->height,
            (gchar*) NULL);

    }
    else if(src->cAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
        structure = gst_structure_new( "video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('N', 'V', '1', '2'),
            "framerate", GST_TYPE_FRACTION,
                gst_value_get_fraction_numerator(&src->framerate),
                gst_value_get_fraction_denominator(&src->framerate),
            "width", G_TYPE_INT,    src->width,
            "height", G_TYPE_INT,   src->height,
            (gchar*) NULL);
    }
    else {
        GST_ERROR("unsupported fourcc\n");
        return FALSE;
    }

    gst_caps_append_structure(mycaps, gst_structure_copy (structure));
    gst_structure_free(structure);
    gst_buffer_set_caps(*buf, mycaps);
    gst_caps_unref(mycaps);

		{
			static int fn;
			fn++;
			GST_INFO("capture frame %d", fn);
		}

    GST_LOG("create end");
    return GST_FLOW_OK;
}
Exemplo n.º 21
0
/******************************************************************************
 * gst_tiaudenc1_encode_thread
 *     Call the audio codec to process a full input buffer
 ******************************************************************************/
static void* gst_tiaudenc1_encode_thread(void *arg)
{
    GstTIAudenc1   *audenc1    = GST_TIAUDENC1(gst_object_ref(arg));
    void          *threadRet = GstTIThreadSuccess;
    Buffer_Handle  hDstBuf;
    Int32          encDataConsumed;
    GstBuffer     *encDataWindow = NULL;
    GstClockTime   encDataTime;
    Buffer_Handle  hEncDataWindow;
    GstBuffer     *outBuf;
    GstClockTime   sampleDuration;
    guint          sampleRate;
    guint          numSamples;
    Int            bufIdx;
    Int            ret;

    GST_LOG("starting audenc encode thread\n");

    /* Initialize codec engine */
    ret = gst_tiaudenc1_codec_start(audenc1);

    /* Notify main thread that it is ok to continue initialization */
    Rendezvous_meet(audenc1->waitOnEncodeThread);
    Rendezvous_reset(audenc1->waitOnEncodeThread);

    if (ret == FALSE) {
        GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED,
        ("Failed to start codec\n"), (NULL));
        goto thread_exit;
    }

    while (TRUE) {

        /* Obtain an raw data frame */
        encDataWindow  = gst_ticircbuffer_get_data(audenc1->circBuf);
        encDataTime    = GST_BUFFER_TIMESTAMP(encDataWindow);
        hEncDataWindow = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encDataWindow);

        /* Check if there is enough encoded data to be sent to the codec.
         * The last frame of data may not be sufficient to meet the codec
         * requirements for the amount of input data.  If so just throw
         * away the last bit of data rather than filling with bogus
         * data.
         */
        if (GST_BUFFER_SIZE(encDataWindow) <
            Aenc1_getInBufSize(audenc1->hAe)) {
            GST_LOG("Not enough audio data remains\n");
            if (!audenc1->drainingEOS) {
                goto thread_failure;
            }
            goto thread_exit;
        }

        /* Obtain a free output buffer for the encoded data */
        if (!(hDstBuf = gst_tidmaibuftab_get_buf(audenc1->hOutBufTab))) {
            GST_ELEMENT_ERROR(audenc1, RESOURCE, READ,
                ("Failed to get a free contiguous buffer from BufTab\n"),
                (NULL));
            goto thread_exit;
        }

        /* Invoke the audio encoder */
        GST_LOG("Invoking the audio encoder at 0x%08lx with %u bytes\n",
            (unsigned long)Buffer_getUserPtr(hEncDataWindow),
            GST_BUFFER_SIZE(encDataWindow));
        ret             = Aenc1_process(audenc1->hAe, hEncDataWindow, hDstBuf);
        encDataConsumed = Buffer_getNumBytesUsed(hEncDataWindow);

        if (ret < 0) {
            GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE,
            ("Failed to encode audio buffer\n"), (NULL));
            goto thread_failure;
        }

        /* If no encoded data was used we cannot find the next frame */
        if (ret == Dmai_EBITERROR && encDataConsumed == 0) {
            GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE,
            ("Fatal bit error\n"), (NULL));
            goto thread_failure;
        }

        if (ret > 0) {
            GST_LOG("Aenc1_process returned success code %d\n", ret); 
        }

        sampleRate     = audenc1->samplefreq;
        numSamples     = encDataConsumed / (2 * audenc1->channels) ;
        sampleDuration = GST_FRAMES_TO_CLOCK_TIME(numSamples, sampleRate);

        /* Release the reference buffer, and tell the circular buffer how much
         * data was consumed.
         */
        ret = gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow,
                  encDataConsumed);
        encDataWindow = NULL;

        if (!ret) {
            goto thread_failure;
        }

        /* Set the source pad capabilities based on the encoded frame
         * properties.
         */
        gst_tiaudenc1_set_source_caps(audenc1);

        /* Create a DMAI transport buffer object to carry a DMAI buffer to
         * the source pad.  The transport buffer knows how to release the
         * buffer for re-use in this element when the source pad calls
         * gst_buffer_unref().
         */
        outBuf = gst_tidmaibuffertransport_new(hDstBuf, audenc1->hOutBufTab, NULL, NULL);
        gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf),
            Buffer_getNumBytesUsed(hDstBuf));
        gst_buffer_set_caps(outBuf, GST_PAD_CAPS(audenc1->srcpad));

        /* Set timestamp on output buffer */
        if (audenc1->genTimeStamps) {
            GST_BUFFER_DURATION(outBuf)     = sampleDuration;
            GST_BUFFER_TIMESTAMP(outBuf)    = encDataTime;
        }
        else {
            GST_BUFFER_TIMESTAMP(outBuf)    = GST_CLOCK_TIME_NONE;
        }

        /* Tell circular buffer how much time we consumed */
        gst_ticircbuffer_time_consumed(audenc1->circBuf, sampleDuration);

        /* Push the transport buffer to the source pad */
        GST_LOG("pushing buffer to source pad with timestamp : %"
                GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT,
                GST_TIME_ARGS (GST_BUFFER_TIMESTAMP(outBuf)),
                GST_TIME_ARGS (GST_BUFFER_DURATION(outBuf)));

        if (gst_pad_push(audenc1->srcpad, outBuf) != GST_FLOW_OK) {
            GST_DEBUG("push to source pad failed\n");
            goto thread_failure;
        }

        /* Release buffers no longer in use by the codec */
        Buffer_freeUseMask(hDstBuf, gst_tidmaibuffer_CODEC_FREE);
    }

thread_failure:

    gst_tithread_set_status(audenc1, TIThread_CODEC_ABORTED);
    gst_ticircbuffer_consumer_aborted(audenc1->circBuf);
    threadRet = GstTIThreadFailure;

thread_exit:

    /* Re-claim any buffers owned by the codec */
    bufIdx = BufTab_getNumBufs(GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab));

    while (bufIdx-- > 0) {
        Buffer_Handle hBuf = BufTab_getBuf(
            GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab), bufIdx);
        Buffer_freeUseMask(hBuf, gst_tidmaibuffer_CODEC_FREE);
    }

    /* Release the last buffer we retrieved from the circular buffer */
    if (encDataWindow) {
        gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, 0);
    }

    /* We have to wait to shut down this thread until we can guarantee that
     * no more input buffers will be queued into the circular buffer
     * (we're about to delete it).  
     */
    Rendezvous_meet(audenc1->waitOnEncodeThread);
    Rendezvous_reset(audenc1->waitOnEncodeThread);

    /* Notify main thread that we are done draining before we shutdown the
     * codec, or we will hang.  We proceed in this order so the EOS event gets
     * propagated downstream before we attempt to shut down the codec.  The
     * codec-shutdown process will block until all BufTab buffers have been
     * released, and downstream-elements may hang on to buffers until
     * they get the EOS.
     */
    Rendezvous_force(audenc1->waitOnEncodeDrain);

    /* Initialize codec engine */
    if (gst_tiaudenc1_codec_stop(audenc1) < 0) {
        GST_ERROR("failed to stop codec\n");
        GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED,
        ("Failed to stop codec\n"), (NULL));
    }

    gst_object_unref(audenc1);

    GST_LOG("exit audio encode_thread (%d)\n", (int)threadRet);
    return threadRet;
}