Ejemplo n.º 1
0
static gboolean gst_imx_ipu_blitter_allocate_internal_fill_frame(GstImxIpuBlitter *ipu_blitter)
{
	GstImxPhysMemory *phys_mem;

	/* Not using the dma bufferpool for this, since that bufferpool will
	 * be configured to input frame sizes. Plus, the pool wouldn't yield any benefits here. */
	ipu_blitter->fill_frame = gst_buffer_new_allocate(
		ipu_blitter->allocator,
		fill_frame_width * fill_frame_height * gst_imx_ipu_video_bpp(format),
		NULL
	);

	if (ipu_blitter->fill_frame == NULL)
	{
		GST_ERROR_OBJECT(ipu_blitter, "could not allocate internal fill frame");
		return FALSE;
	}

	phys_mem = (GstImxPhysMemory *)gst_buffer_peek_memory(ipu_blitter->fill_frame, 0);

	memset(&(ipu_blitter->priv->fill_task), 0, sizeof(struct ipu_task));
	ipu_blitter->priv->fill_task.input.crop.pos.x = 0;
	ipu_blitter->priv->fill_task.input.crop.pos.y = 0;
	ipu_blitter->priv->fill_task.input.crop.w = fill_frame_width;
	ipu_blitter->priv->fill_task.input.crop.h = fill_frame_height;
	ipu_blitter->priv->fill_task.input.width = fill_frame_width;
	ipu_blitter->priv->fill_task.input.height = fill_frame_height;
	ipu_blitter->priv->fill_task.input.paddr = (dma_addr_t)(phys_mem->phys_addr);
	ipu_blitter->priv->fill_task.input.format = gst_imx_ipu_blitter_get_v4l_format(format);

	return TRUE;
}
Ejemplo n.º 2
0
static GstFlowReturn
gst_mpeg2dec_alloc_sized_buf (GstMpeg2dec * mpeg2dec, guint size,
    GstVideoCodecFrame * frame, GstBuffer ** buffer)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoCodecState *state;

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec));

  if (!mpeg2dec->need_cropping || mpeg2dec->has_cropping) {
    /* need parsed input, but that might be slightly bogus,
     * so avoid giving up altogether and mark it as error */
    if (frame->output_buffer) {
      gst_buffer_replace (&frame->output_buffer, NULL);
      GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE,
          ("decoding error"), ("Input not correctly parsed"), ret);
    }
    ret =
        gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (mpeg2dec),
        frame);
    *buffer = frame->output_buffer;
  } else {
    GstAllocationParams params = { 0, 15, 0, 0 };

    *buffer = gst_buffer_new_allocate (NULL, size, &params);
    gst_video_codec_frame_set_user_data (frame, *buffer,
        (GDestroyNotify) frame_user_data_destroy_notify);
  }

  gst_video_codec_state_unref (state);

  return ret;
}
Ejemplo n.º 3
0
static GstBuffer *
gst_kate_enc_create_buffer (GstKateEnc * ke, kate_packet * kp,
    kate_int64_t granpos, GstClockTime timestamp, GstClockTime duration,
    gboolean header)
{
  GstBuffer *buffer;

  g_return_val_if_fail (kp != NULL, NULL);
  g_return_val_if_fail (kp->data != NULL, NULL);

  buffer = gst_buffer_new_allocate (NULL, kp->nbytes, NULL);
  if (G_UNLIKELY (!buffer)) {
    GST_WARNING_OBJECT (ke, "Failed to allocate buffer for %u bytes",
        (guint) kp->nbytes);
    return NULL;
  }

  gst_buffer_fill (buffer, 0, kp->data, kp->nbytes);

  /* same system as other Ogg codecs, as per ext/ogg/README:
     OFFSET_END is the granulepos
     OFFSET is its time representation
   */
  GST_BUFFER_OFFSET_END (buffer) = granpos;
  GST_BUFFER_OFFSET (buffer) = timestamp;
  GST_BUFFER_TIMESTAMP (buffer) = timestamp;
  GST_BUFFER_DURATION (buffer) = duration;

  return buffer;
}
Ejemplo n.º 4
0
static GstBuffer*
gst_cenc_decrypt_key_id_from_content_id(GstCencDecrypt * self, const gchar *content_id)
{
  GstBuffer *kid;
  GstMapInfo map;
  gboolean failed=FALSE;
  guint i,pos;
  /*gchar *id_string;*/

  if(!g_str_has_prefix (content_id, "urn:marlin:kid:")){
    return NULL;
  }
  kid = gst_buffer_new_allocate (NULL, KID_LENGTH, NULL);
  gst_buffer_map (kid, &map, GST_MAP_READWRITE);
  for(i=0, pos=strlen("urn:marlin:kid:"); i<KID_LENGTH; ++i){
    guint b;
    if(!sscanf(&content_id[pos], "%02x", &b)){
      failed=TRUE;
      break;
    }
    map.data[i] = b;
    pos += 2;
  }
  /*id_string = gst_cenc_create_uuid_string (map.data);
  GST_DEBUG_OBJECT (self, "content_id=%s  key=%s", content_id, id_string);
  g_free (id_string);*/
  gst_buffer_unmap (kid, &map);
  if(failed){
    gst_buffer_unref (kid);
    kid=NULL;
  }
  return kid;
}
Ejemplo n.º 5
0
Archivo: main.c Proyecto: joojler/jrepo
static void
cb_need_data (GstElement *appsrc,
	      guint       unused_size,
	      gpointer    user_data)
{
  static gboolean white = FALSE;
  static GstClockTime timestamp = 0;
  GstBuffer *buffer;
  guint size;
  GstFlowReturn ret;

  //g_print ("cb_need_data called!\n");
//  sleep (1);
  size = 385 * 288 * 2;

  buffer = gst_buffer_new_allocate (NULL, size, NULL);

  /* this makes the image black/white */
  gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);
  
  white = !white;

//  GST_BUFFER_PTS (buffer) = timestamp;
//  GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);

// timestamp += GST_BUFFER_DURATION (buffer);

  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);

  if (ret != GST_FLOW_OK) {
    /* something wrong, stop pushing */
    g_main_loop_quit (loop);
  }
}
Ejemplo n.º 6
0
/***********************************************************************************
 * Push functions
 ***********************************************************************************/
static inline GstBuffer* packet_to_buffer(AVPacket *packet)
{
    GstBuffer* result = gst_buffer_new_allocate(NULL, packet->size, NULL);
    if (result != NULL)
        gst_buffer_fill(result, 0, packet->data, packet->size);
    return result;
}
Ejemplo n.º 7
0
Archivo: main.c Proyecto: joojler/jrepo
static gboolean push_data (CustomData *data) {
  static gboolean white = FALSE;
  static GstClockTime timestamp = 0;
  GstBuffer *buffer;
  guint size;
  GstFlowReturn ret;

//  g_print ("cb_need_data called!\n");

#if 1
  size = 385 * 288 * 2;

  buffer = gst_buffer_new_allocate (NULL, size, NULL);

  /* this makes the image black/white */
  gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);
  
  white = !white;

//  GST_BUFFER_PTS (buffer) = timestamp;
//  GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);

  timestamp += GST_BUFFER_DURATION (buffer);
  g_print("uint64 timestamp: %" PRIu64 "\n", timestamp);
  g_signal_emit_by_name (data->appsrc, "push-buffer", buffer, &ret);

  if (ret != GST_FLOW_OK) {
    /* something wrong, stop pushing */
    g_main_loop_quit (loop);
  }
#endif
  return TRUE;
}
Ejemplo n.º 8
0
static GstFlowReturn
play_push_func (GstMidiParse * midiparse, GstMidiTrack * track,
    guint8 event, guint8 * data, guint length, gpointer user_data)
{
  GstBuffer *outbuf;
  GstMapInfo info;
  GstClockTime position;

  outbuf = gst_buffer_new_allocate (NULL, length + 1, NULL);

  gst_buffer_map (outbuf, &info, GST_MAP_WRITE);
  info.data[0] = event;
  if (length)
    memcpy (&info.data[1], data, length);
  gst_buffer_unmap (outbuf, &info);

  position = midiparse->segment.position;
  GST_BUFFER_PTS (outbuf) = position;
  GST_BUFFER_DTS (outbuf) = position;

  GST_DEBUG_OBJECT (midiparse, "pushing %" GST_TIME_FORMAT,
      GST_TIME_ARGS (position));

  if (midiparse->discont) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    midiparse->discont = FALSE;
  }

  return gst_pad_push (midiparse->srcpad, outbuf);
}
Ejemplo n.º 9
0
static GstFlowReturn
gst_dashdemux_http_src_create (GstTestHTTPSrc * src,
    guint64 offset,
    guint length, GstBuffer ** retbuf, gpointer context, gpointer user_data)
{
  /*  const GstDashDemuxTestInputData *input =
     (const GstDashDemuxTestInputData *) user_data; */
  const GstDashDemuxTestInputData *input =
      (const GstDashDemuxTestInputData *) context;
  GstBuffer *buf;

  buf = gst_buffer_new_allocate (NULL, length, NULL);
  fail_if (buf == NULL, "Not enough memory to allocate buffer");

  if (input->payload) {
    gst_buffer_fill (buf, 0, input->payload + offset, length);
  } else {
    GstMapInfo info;
    guint pattern;

    pattern = offset - offset % sizeof (pattern);

    gst_buffer_map (buf, &info, GST_MAP_WRITE);
    for (guint64 i = 0; i < length; ++i) {
      gchar pattern_byte_to_write = (offset + i) % sizeof (pattern);
      if (pattern_byte_to_write == 0) {
        pattern = offset + i;
      }
      info.data[i] = (pattern >> (pattern_byte_to_write * 8)) & 0xFF;
    }
    gst_buffer_unmap (buf, &info);
  }
  *retbuf = buf;
  return GST_FLOW_OK;
}
Ejemplo n.º 10
0
static gboolean
gst_validate_ssim_convert (GstValidateSsim * self, SSimConverterInfo * info,
    GstVideoFrame * frame, GstVideoFrame * converted_frame)
{
  gboolean res = TRUE;
  GstBuffer *outbuf = NULL;

  g_return_val_if_fail (info != NULL, FALSE);

  outbuf = gst_buffer_new_allocate (NULL, info->out_info.size, NULL);
  if (!gst_video_frame_map (converted_frame, &info->out_info, outbuf,
          GST_MAP_WRITE)) {
    GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR,
        "Could not map output converted_frame");
    goto fail;
  }

  gst_video_converter_frame (info->converter, frame, converted_frame);

done:
  if (outbuf)
    gst_buffer_unref (outbuf);

  return res;

fail:
  res = FALSE;
  goto done;
}
Ejemplo n.º 11
0
static GstFlowReturn
video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstVideoBufferPool *vpool = GST_VIDEO_BUFFER_POOL_CAST (pool);
  GstVideoBufferPoolPrivate *priv = vpool->priv;
  GstVideoInfo *info;

  info = &priv->info;

  GST_DEBUG_OBJECT (pool, "alloc %" G_GSIZE_FORMAT, info->size);

  *buffer =
      gst_buffer_new_allocate (priv->allocator, info->size, &priv->params);
  if (*buffer == NULL)
    goto no_memory;

  if (priv->add_videometa) {
    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");

    gst_buffer_add_video_meta_full (*buffer, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_INFO_FORMAT (info),
        GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info),
        GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride);
  }

  return GST_FLOW_OK;

  /* ERROR */
no_memory:
  {
    GST_WARNING_OBJECT (pool, "can't create memory");
    return GST_FLOW_ERROR;
  }
}
Ejemplo n.º 12
0
static void gst_imx_ipu_blitter_init_dummy_black_buffer(GstImxIpuBlitter *ipu_blitter)
{
	GstVideoInfo video_info;

	gst_video_info_init(&video_info);
	gst_video_info_set_format(&video_info, GST_VIDEO_FORMAT_RGBx, 64, 64);

	ipu_blitter->dummy_black_buffer = gst_buffer_new_allocate(ipu_blitter->allocator, GST_VIDEO_INFO_SIZE(&video_info), NULL);
	gst_buffer_memset(ipu_blitter->dummy_black_buffer, 0, 0, GST_VIDEO_INFO_SIZE(&video_info));

	gst_buffer_add_video_meta_full(
		ipu_blitter->dummy_black_buffer,
		GST_VIDEO_FRAME_FLAG_NONE,
		GST_VIDEO_INFO_FORMAT(&video_info),
		GST_VIDEO_INFO_WIDTH(&video_info),
		GST_VIDEO_INFO_HEIGHT(&video_info),
		GST_VIDEO_INFO_N_PLANES(&video_info),
		&(GST_VIDEO_INFO_PLANE_OFFSET(&video_info, 0)),
		&(GST_VIDEO_INFO_PLANE_STRIDE(&video_info, 0))
	);

	{
		GstImxPhysMemory *imx_phys_mem_mem = (GstImxPhysMemory *)gst_buffer_peek_memory(ipu_blitter->dummy_black_buffer, 0);
		GstImxPhysMemMeta *phys_mem_meta = (GstImxPhysMemMeta *)GST_IMX_PHYS_MEM_META_ADD(ipu_blitter->dummy_black_buffer);

		phys_mem_meta->phys_addr = imx_phys_mem_mem->phys_addr;
	}
}
Ejemplo n.º 13
0
static gboolean
theora_enc_read_multipass_cache (GstTheoraEnc * enc)
{
    GstBuffer *cache_buf;
    const guint8 *cache_data;
    gsize bytes_read = 0;
    gssize bytes_consumed = 0;
    GIOStatus stat = G_IO_STATUS_NORMAL;
    gboolean done = FALSE;

    while (!done) {
        if (gst_adapter_available (enc->multipass_cache_adapter) == 0) {
            GstMapInfo minfo;

            cache_buf = gst_buffer_new_allocate (NULL, 512, NULL);

            gst_buffer_map (cache_buf, &minfo, GST_MAP_WRITE);

            stat = g_io_channel_read_chars (enc->multipass_cache_fd,
                                            (gchar *) minfo.data, minfo.size, &bytes_read, NULL);

            if (bytes_read <= 0) {
                gst_buffer_unmap (cache_buf, &minfo);
                gst_buffer_unref (cache_buf);
                break;
            } else {
                gst_buffer_unmap (cache_buf, &minfo);
                gst_buffer_resize (cache_buf, 0, bytes_read);

                gst_adapter_push (enc->multipass_cache_adapter, cache_buf);
            }
        }
        if (gst_adapter_available (enc->multipass_cache_adapter) == 0)
            break;

        bytes_read =
            MIN (gst_adapter_available (enc->multipass_cache_adapter), 512);

        cache_data = gst_adapter_map (enc->multipass_cache_adapter, bytes_read);

        bytes_consumed =
            th_encode_ctl (enc->encoder, TH_ENCCTL_2PASS_IN, (guint8 *) cache_data,
                           bytes_read);
        gst_adapter_unmap (enc->multipass_cache_adapter);

        done = bytes_consumed <= 0;
        if (bytes_consumed > 0)
            gst_adapter_flush (enc->multipass_cache_adapter, bytes_consumed);
    }

    if (stat == G_IO_STATUS_ERROR || (stat == G_IO_STATUS_EOF && bytes_read == 0)
            || bytes_consumed < 0) {
        GST_ELEMENT_ERROR (enc, RESOURCE, READ, (NULL),
                           ("Failed to read multipass cache file"));
        return FALSE;
    }
    return TRUE;
}
Ejemplo n.º 14
0
static void* gst_imx_vpu_encoder_base_acquire_output_buffer(void *context, size_t size, void **acquired_handle)
{
	GstImxVpuEncoderBase *vpu_encoder_base = (GstImxVpuEncoderBase *)(context);
	GstBuffer *buffer = gst_buffer_new_allocate(NULL, size, NULL);
	vpu_encoder_base->output_buffer = buffer;
	gst_buffer_map(buffer, &(vpu_encoder_base->output_buffer_map_info), GST_MAP_WRITE);
	GST_LOG_OBJECT(vpu_encoder_base, "acquired output buffer %p with %zu byte", (gpointer)buffer, size);
	*acquired_handle = buffer;
	return vpu_encoder_base->output_buffer_map_info.data;
}
Ejemplo n.º 15
0
static void
gst_fake_src_alloc_parent (GstFakeSrc * src)
{
  GstBuffer *buf;

  buf = gst_buffer_new_allocate (NULL, src->parentsize, NULL);

  src->parent = buf;
  src->parentoffset = 0;
}
Ejemplo n.º 16
0
void ofxGstRTPServer::newFrameDepth(ofShortPixels & pixels, GstClockTime timestamp, float pixel_size, float distance){
	//unsigned long long time = ofGetElapsedTimeMicros();

	// here we push new depth frames in the pipeline, it's important
	// to timestamp them properly so gstreamer can sync them with the
	// audio.

	if(!appSrcDepth) return;

	GstClockTime now = timestamp;
	if(!depthAutoTimestamp){
		if(now==GST_CLOCK_TIME_NONE){
			now = getTimeStamp();
		}

		if(firstDepthFrame){
			prevTimestampDepth = now;
			firstDepthFrame = false;
			return;
		}
	}

	ofxDepthCompressedFrame frame = depthCompressor.newFrame(pixels,pixel_size,distance);
	GstBuffer * buffer = gst_buffer_new_allocate(NULL,frame.compressedData().size()*sizeof(short),NULL);
	GstMapInfo mapinfo;
	gst_buffer_map(buffer,&mapinfo,GST_MAP_WRITE);
	memcpy(mapinfo.data,&frame.compressedData()[0],frame.compressedData().size()*sizeof(short));
	gst_buffer_unmap(buffer,&mapinfo);
	// timestamp the buffer, right now we are using:
	// timestamp = current pipeline time - base time
	// duration = timestamp - previousTimeStamp
	// the duration is actually the duration of the previous frame
	// but should be accurate enough

	if(!depthAutoTimestamp){
		GST_BUFFER_OFFSET(buffer) = numFrameDepth++;
		GST_BUFFER_OFFSET_END(buffer) = numFrameDepth;
		GST_BUFFER_DTS (buffer) = now;
		GST_BUFFER_PTS (buffer) = now;
		GST_BUFFER_DURATION(buffer) = now-prevTimestampDepth;
		prevTimestampDepth = now;
	}

	if(sendDepthKeyFrame){
		//emitDepthKeyFrame();
	}

	// finally push the buffer into the pipeline through the appsrc element
	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcDepth, buffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError() << "error pushing depth buffer: flow_return was " << flow_return;
	}
	//cout << "sending depth buffer with " << pixels.getWidth() << "," << pixels.getHeight() << " csize: " << frame.compressedData().size() << endl;
	//cout << ofGetElapsedTimeMicros() - time << endl;
}
Ejemplo n.º 17
0
static GstFlowReturn
default_alloc_buffer (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstBufferPoolPrivate *priv = pool->priv;

  *buffer =
      gst_buffer_new_allocate (priv->allocator, priv->size, &priv->params);

  return GST_FLOW_OK;
}
static GstBufferList *
create_buffer_list (guint * data_size)
{
  GstBufferList *list;
  GstBuffer *rtp_buffer;
  GstBuffer *data_buffer;

  list = gst_buffer_list_new ();

  /*** First group, i.e. first packet. **/

  /* Create the RTP header buffer */
  rtp_buffer = gst_buffer_new_allocate (NULL, RTP_HEADER_SIZE, NULL);
  gst_buffer_memset (rtp_buffer, 0, 0, RTP_HEADER_SIZE);

  /* Create the buffer that holds the payload */
  data_buffer = gst_buffer_new_allocate (NULL, RTP_PAYLOAD_SIZE, NULL);
  gst_buffer_memset (data_buffer, 0, 0, RTP_PAYLOAD_SIZE);

  /* Create a new group to hold the rtp header and the payload */
  gst_buffer_list_add (list, gst_buffer_append (rtp_buffer, data_buffer));

  /***  Second group, i.e. second packet. ***/

  /* Create the RTP header buffer */
  rtp_buffer = gst_buffer_new_allocate (NULL, RTP_HEADER_SIZE, NULL);
  gst_buffer_memset (rtp_buffer, 0, 0, RTP_HEADER_SIZE);

  /* Create the buffer that holds the payload */
  data_buffer = gst_buffer_new_allocate (NULL, RTP_PAYLOAD_SIZE, NULL);
  gst_buffer_memset (data_buffer, 0, 0, RTP_PAYLOAD_SIZE);

  /* Create a new group to hold the rtp header and the payload */
  gst_buffer_list_add (list, gst_buffer_append (rtp_buffer, data_buffer));

  /* Calculate the size of the data */
  *data_size = 2 * RTP_HEADER_SIZE + 2 * RTP_PAYLOAD_SIZE;

  return list;
}
Ejemplo n.º 19
0
static GstBuffer *
gst_x265_enc_get_header_buffer (GstX265Enc * encoder)
{
  x265_nal *nal;
  guint32 i_nal, i, offset;
  gint32 vps_idx, sps_idx, pps_idx;
  int header_return;
  GstBuffer *buf;

  header_return = x265_encoder_headers (encoder->x265enc, &nal, &i_nal);
  if (header_return < 0) {
    GST_ELEMENT_ERROR (encoder, STREAM, ENCODE, ("Encode x265 header failed."),
        ("x265_encoder_headers return code=%d", header_return));
    return FALSE;
  }

  GST_DEBUG_OBJECT (encoder, "%d nal units in header", i_nal);

  /* x265 returns also non header nal units with the call x265_encoder_headers.
   * The usefull headers are sequential (VPS, SPS and PPS), so we look for this
   * nal units and only copy these tree nal units as the header */

  vps_idx = sps_idx = pps_idx = -1;
  for (i = 0; i < i_nal; i++) {
    if (nal[i].type == 32) {
      vps_idx = i;
    } else if (nal[i].type == 33) {
      sps_idx = i;
    } else if (nal[i].type == 34) {
      pps_idx = i;
    }
  }

  if (vps_idx == -1 || sps_idx == -1 || pps_idx == -1) {
    GST_ELEMENT_ERROR (encoder, STREAM, ENCODE, ("Encode x265 header failed."),
        ("x265_encoder_headers did not return VPS, SPS and PPS"));
    return FALSE;
  }

  offset = 0;
  buf =
      gst_buffer_new_allocate (NULL,
      nal[vps_idx].sizeBytes + nal[sps_idx].sizeBytes + nal[pps_idx].sizeBytes,
      NULL);
  gst_buffer_fill (buf, offset, nal[vps_idx].payload, nal[vps_idx].sizeBytes);
  offset += nal[vps_idx].sizeBytes;
  gst_buffer_fill (buf, offset, nal[sps_idx].payload, nal[sps_idx].sizeBytes);
  offset += nal[sps_idx].sizeBytes;
  gst_buffer_fill (buf, offset, nal[pps_idx].payload, nal[pps_idx].sizeBytes);

  return buf;
}
Ejemplo n.º 20
0
static GstBuffer *
gst_dtmf_src_generate_tone (GstDTMFSrcEvent * event, DTMF_KEY key,
    float duration, gint sample_rate)
{
  GstBuffer *buffer;
  GstMapInfo map;
  gint16 *p;
  gint tone_size;
  double i = 0;
  double amplitude, f1, f2;
  double volume_factor;
  static GstAllocationParams params = { 0, 1, 0, 0, };

  /* Create a buffer for the tone */
  tone_size = ((duration / 1000) * sample_rate * SAMPLE_SIZE * CHANNELS) / 8;

  buffer = gst_buffer_new_allocate (NULL, tone_size, &params);

  gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
  p = (gint16 *) map.data;

  volume_factor = pow (10, (-event->volume) / 20);

  /*
   * For each sample point we calculate 'x' as the
   * the amplitude value.
   */
  for (i = 0; i < (tone_size / (SAMPLE_SIZE / 8)); i++) {
    /*
     * We add the fundamental frequencies together.
     */
    f1 = sin (2 * M_PI * key.low_frequency * (event->sample / sample_rate));
    f2 = sin (2 * M_PI * key.high_frequency * (event->sample / sample_rate));

    amplitude = (f1 + f2) / 2;

    /* Adjust the volume */
    amplitude *= volume_factor;

    /* Make the [-1:1] interval into a [-32767:32767] interval */
    amplitude *= 32767;

    /* Store it in the data buffer */
    *(p++) = (gint16) amplitude;

    (event->sample)++;
  }

  gst_buffer_unmap (buffer, &map);

  return buffer;
}
Ejemplo n.º 21
0
static GstBuffer *
gst_audio_aggregator_create_output_buffer (GstAudioAggregator * aagg,
    guint num_frames)
{
  GstBuffer *outbuf = gst_buffer_new_allocate (NULL, num_frames *
      GST_AUDIO_INFO_BPF (&aagg->info), NULL);
  GstMapInfo outmap;

  gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
  gst_audio_format_fill_silence (aagg->info.finfo, outmap.data, outmap.size);
  gst_buffer_unmap (outbuf, &outmap);

  return outbuf;
}
Ejemplo n.º 22
0
static GstBuffer* get_codec_extradata(AVCodecContext *codec)
{
    GstBuffer *codec_data = NULL;
    if (codec->extradata)
    {
        codec_data = gst_buffer_new_allocate(NULL, codec->extradata_size, NULL);
        if (codec_data != NULL)
        {
            gst_buffer_fill(codec_data, 0, codec->extradata, codec->extradata_size);
        }
    }

    return codec_data;
}
static void
check_correct_buffer (guint8 * src_data, guint src_size, guint8 * dst_data,
    guint dst_size)
{
  GstBuffer *buffer = gst_buffer_new_allocate (NULL, src_size, 0);
  GstBuffer *newBuffer;
  GstElement *avisubtitle = setup_avisubtitle ();
  GstEvent *event;

  fail_unless (g_list_length (buffers) == 0, "Buffers list needs to be empty");
  gst_buffer_fill (buffer, 0, src_data, src_size);
  fail_unless (gst_element_set_state (avisubtitle,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");
  ASSERT_BUFFER_REFCOUNT (buffer, "inbuffer", 1);
  event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
      GST_SEEK_TYPE_SET, 2 * GST_SECOND, GST_SEEK_TYPE_SET, 5 * GST_SECOND);
  fail_unless (gst_element_send_event (avisubtitle, event) == FALSE,
      "Seeking is not possible when there is no buffer yet");
  fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK,
      "not accepted a correct buffer");
  /* we gave away our reference to the buffer, don't assume anything */
  buffer = NULL;
  /* a new buffer is created in the list */
  fail_unless (g_list_length (buffers) == 1,
      "No new buffer in the buffers list");
  event = gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
      GST_SEEK_TYPE_SET, 2 * GST_SECOND, GST_SEEK_TYPE_SET, 5 * GST_SECOND);
  fail_unless (gst_element_send_event (avisubtitle, event) == TRUE,
      "seeking should be working now");
  fail_unless (g_list_length (buffers) == 2,
      "After seeking we need another buffer in the buffers");
  newBuffer = GST_BUFFER (buffers->data);
  buffers = g_list_remove (buffers, newBuffer);
  fail_unless (g_list_length (buffers) == 1, "Buffers list needs to be empty");
  fail_unless (gst_buffer_get_size (newBuffer) == dst_size,
      "size of the new buffer is wrong ( %d != %d)",
      gst_buffer_get_size (newBuffer), dst_size);
  fail_unless (gst_buffer_memcmp (newBuffer, 0, dst_data, dst_size) == 0,
      "data of the buffer is not correct");
  gst_buffer_unref (newBuffer);
  /* free the buffer from seeking */
  gst_buffer_unref (GST_BUFFER (buffers->data));
  buffers = g_list_remove (buffers, buffers->data);
  fail_unless (gst_element_set_state (avisubtitle,
          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS, "could not set to null");
  cleanup_avisubtitle (avisubtitle);
}
Ejemplo n.º 24
0
static GstFlowReturn
gst_hlsdemux_test_src_create (GstTestHTTPSrc * src,
    guint64 offset,
    guint length, GstBuffer ** retbuf, gpointer context, gpointer user_data)
{
  GstBuffer *buf;
  /*  const GstHlsDemuxTestCase *test_case = (const GstHlsDemuxTestCase *) user_data; */
  GstHlsDemuxTestInputData *input = (GstHlsDemuxTestInputData *) context;

  buf = gst_buffer_new_allocate (NULL, length, NULL);
  fail_if (buf == NULL, "Not enough memory to allocate buffer");
  fail_if (input->payload == NULL);
  gst_buffer_fill (buf, 0, input->payload + offset, length);
  *retbuf = buf;
  return GST_FLOW_OK;
}
Ejemplo n.º 25
0
/* Update the buffer used to draw black borders. When we have viewporter
 * support, this is a scaled up 1x1 image, and without we need an black image
 * the size of the rendering areay. */
static void
gst_wl_window_update_borders (GstWlWindow * window)
{
  GstVideoFormat format;
  GstVideoInfo info;
  gint width, height;
  GstBuffer *buf;
  struct wl_buffer *wlbuf;
  GstWlBuffer *gwlbuf;
  GstAllocator *alloc;

  if (window->no_border_update)
    return;

  if (window->display->viewporter) {
    width = height = 1;
    window->no_border_update = TRUE;
  } else {
    width = window->render_rectangle.w;
    height = window->render_rectangle.h;
  }

  /* we want WL_SHM_FORMAT_XRGB8888 */
#if G_BYTE_ORDER == G_BIG_ENDIAN
  format = GST_VIDEO_FORMAT_xRGB;
#else
  format = GST_VIDEO_FORMAT_BGRx;
#endif

  /* draw the area_subsurface */
  gst_video_info_set_format (&info, format, width, height);

  alloc = gst_wl_shm_allocator_get ();

  buf = gst_buffer_new_allocate (alloc, info.size, NULL);
  gst_buffer_memset (buf, 0, 0, info.size);
  wlbuf =
      gst_wl_shm_memory_construct_wl_buffer (gst_buffer_peek_memory (buf, 0),
      window->display, &info);
  gwlbuf = gst_buffer_add_wl_buffer (buf, wlbuf, window->display);
  gst_wl_buffer_attach (gwlbuf, window->area_surface_wrapper);

  /* at this point, the GstWlBuffer keeps the buffer
   * alive and will free it on wl_buffer::release */
  gst_buffer_unref (buf);
  g_object_unref (alloc);
}
Ejemplo n.º 26
0
static GstBuffer *
gst_core_media_buffer_new_from_buffer (GstBuffer * buf, GstVideoInfo * info)
{
  gboolean ret;
  GstBuffer *copy_buf;
  GstVideoFrame dest, src;
  GstAllocator *allocator;

  allocator = gst_allocator_find (GST_ALLOCATOR_SYSMEM);
  if (!allocator) {
    GST_ERROR ("Could not find SYSMEM allocator");
    return NULL;
  }

  copy_buf = gst_buffer_new_allocate (allocator, info->size, NULL);

  gst_object_unref (allocator);

  if (!gst_video_frame_map (&dest, info, copy_buf, GST_MAP_WRITE)) {
    GST_ERROR ("Could not map destination frame");
    goto error;
  }

  if (!gst_video_frame_map (&src, info, buf, GST_MAP_READ)) {
    GST_ERROR ("Could not map source frame");
    gst_video_frame_unmap (&dest);
    goto error;
  }

  ret = gst_video_frame_copy (&dest, &src);

  gst_video_frame_unmap (&dest);
  gst_video_frame_unmap (&src);

  if (!ret) {
    GST_ERROR ("Could not copy frame");
    goto error;
  }

  return copy_buf;

error:
  if (copy_buf) {
    gst_buffer_unref (copy_buf);
  }
  return NULL;
}
Ejemplo n.º 27
0
void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer,
                                          GstBuffer** aOutBuffer,
                                          nsRefPtr<PlanarYCbCrImage> &image)
{
  *aOutBuffer = gst_buffer_new_allocate(mAllocator, gst_buffer_get_size(aBuffer), nullptr);
  GstMemory *mem = gst_buffer_peek_memory(*aOutBuffer, 0);
  GstMapInfo map_info;
  gst_memory_map(mem, &map_info, GST_MAP_WRITE);
  gst_buffer_extract(aBuffer, 0, map_info.data, gst_buffer_get_size(aBuffer));
  gst_memory_unmap(mem, &map_info);

  /* create a new gst buffer with the newly created memory and copy the
   * metadata over from the incoming buffer */
  gst_buffer_copy_into(*aOutBuffer, aBuffer,
      (GstBufferCopyFlags)(GST_BUFFER_COPY_METADATA), 0, -1);
  image = GetImageFromBuffer(*aOutBuffer);
}
Ejemplo n.º 28
0
/* called when a new media pipeline is constructed. We can query the
 * pipeline and configure our appsrc */
void ReStream::media_configure_feature (GstRTSPMediaFactory * factory, GstRTSPMedia * media, gpointer user_data)
{
	try{
		GstElement *element, *appsrc;
		struct MyContext *ctx  = (struct MyContext *)user_data;

		element = gst_rtsp_media_get_element (media);

		appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");

		gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
		g_object_set (G_OBJECT (appsrc), "caps",
				gst_caps_new_simple ("video/x-raw",
						"format", G_TYPE_STRING, "BGR",
						"width", G_TYPE_INT, ctx->width,
						"height", G_TYPE_INT,ctx->height,
						"framerate", GST_TYPE_FRACTION, FPS, 1, NULL),NULL );


		//ctx->white = FALSE;
		ctx->timestamp = 0;
		ctx->buffer =  gst_buffer_new_allocate (NULL, ctx->height * ctx->width*3 , NULL);
		gst_buffer_map (ctx->buffer, &ctx->map, GST_MAP_WRITE);

		/* make sure ther datais freed when the media is gone */
		//g_object_set_data_full (G_OBJECT (media), "my-extra-data", ctx, (GDestroyNotify) g_free);

		g_signal_connect (appsrc, "need-data", (GCallback)need_data_feature, ctx);
		g_signal_connect (media, "unprepared", (GCallback)unprepared_feature, ctx);
		if ( ISDEBUG )
			cout<<"media prepared_feature\n";
		gst_object_unref (appsrc);
		gst_object_unref (element);
	}
	catch(Exception &e){
		CommonClass localcommclass;
		localcommclass.PrintException("ReStream","CV::media_configure_feature",e);
	}
	catch(exception &e){
		CommonClass localcommclass;
		localcommclass.PrintException("ReStream","STD::media_configure_feature",e);
	}
}
Ejemplo n.º 29
0
static GstFlowReturn
produce_samples (GstFluidDec * fluiddec, GstClockTime pts, guint64 sample)
{
  GstClockTime duration, timestamp;
  guint64 samples, offset;
  GstMapInfo info;
  GstBuffer *outbuf;

  samples = sample - fluiddec->last_sample;
  duration = pts - fluiddec->last_pts;
  offset = fluiddec->last_sample;
  timestamp = fluiddec->last_pts;

  fluiddec->last_pts = pts;
  fluiddec->last_sample = sample;

  if (samples == 0)
    return GST_FLOW_OK;

  GST_DEBUG_OBJECT (fluiddec, "duration %" GST_TIME_FORMAT
      ", samples %" G_GUINT64_FORMAT, GST_TIME_ARGS (duration), samples);

  outbuf = gst_buffer_new_allocate (NULL, samples * FLUID_DEC_BPS, NULL);

  gst_buffer_map (outbuf, &info, GST_MAP_WRITE);
  fluid_synth_write_float (fluiddec->synth, samples, info.data, 0, 2,
      info.data, 1, 2);
  gst_buffer_unmap (outbuf, &info);

  GST_BUFFER_DTS (outbuf) = timestamp;
  GST_BUFFER_PTS (outbuf) = timestamp;
  GST_BUFFER_DURATION (outbuf) = duration;
  GST_BUFFER_OFFSET (outbuf) = offset;
  GST_BUFFER_OFFSET_END (outbuf) = offset + samples;

  if (fluiddec->discont) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    fluiddec->discont = FALSE;
  }

  return gst_pad_push (fluiddec->srcpad, outbuf);
}
Ejemplo n.º 30
0
static GstFlowReturn
gst_mpg123_audio_dec_push_decoded_bytes (GstMpg123AudioDec * mpg123_decoder,
    unsigned char const *decoded_bytes, size_t const num_decoded_bytes)
{
  GstBuffer *output_buffer;
  GstFlowReturn alloc_error;
  GstAudioDecoder *dec;

  output_buffer = NULL;
  dec = GST_AUDIO_DECODER (mpg123_decoder);

  if ((num_decoded_bytes == 0) || (decoded_bytes == NULL)) {
    /* This occurs in the first few frames, which do not carry data; once MPG123_AUDIO_DEC_NEW_FORMAT is received, the empty frames stop occurring */
    GST_TRACE_OBJECT (mpg123_decoder,
        "Nothing was decoded -> no output buffer to push");
    return GST_FLOW_OK;
  }

  output_buffer = gst_buffer_new_allocate (NULL, num_decoded_bytes, NULL);
  alloc_error = (output_buffer == NULL) ? GST_FLOW_ERROR : GST_FLOW_OK;

  if (alloc_error != GST_FLOW_OK) {
    /* This is necessary to advance playback in time, even when nothing was decoded. */
    return gst_audio_decoder_finish_frame (dec, NULL, 1);
  } else {
    GstMapInfo info;

    if (gst_buffer_map (output_buffer, &info, GST_MAP_WRITE)) {
      if (info.size != num_decoded_bytes)
        GST_ERROR_OBJECT (mpg123_decoder,
            "Mapped memory region has size %u instead of expected size %u",
            info.size, num_decoded_bytes);
      else
        memcpy (info.data, decoded_bytes, num_decoded_bytes);

      gst_buffer_unmap (output_buffer, &info);
    } else
      GST_ERROR_OBJECT (mpg123_decoder, "Could not map buffer");

    return gst_audio_decoder_finish_frame (dec, output_buffer, 1);
  }
}