コード例 #1
0
static GstMemory *
gst_shm_sink_allocator_alloc (GstAllocator * allocator, gsize size,
    GstAllocationParams * params)
{
  GstShmSinkAllocator *self = GST_SHM_SINK_ALLOCATOR (allocator);
  GstMemory *memory = NULL;

  GST_OBJECT_LOCK (self->sink);
  memory = gst_shm_sink_allocator_alloc_locked (self, size, params);
  GST_OBJECT_UNLOCK (self->sink);

  if (!memory) {
    memory = gst_allocator_alloc (NULL, size, params);
    GST_LOG_OBJECT (self,
        "Not enough shared memory for GstMemory of %" G_GSIZE_FORMAT
        "bytes, allocating using standard allocator", size);
  }

  return memory;
}
コード例 #2
0
ファイル: framebuffers.c プロジェクト: Radbug/gst-fsl
static gboolean gst_fsl_vpu_framebuffers_configure(GstFslVpuFramebuffers *framebuffers, GstFslVpuFramebufferParams *params, GstAllocator *allocator)
{
	int alignment;
	unsigned char *phys_ptr, *virt_ptr;
	guint i;

	g_assert(GST_IS_FSL_PHYS_MEM_ALLOCATOR(allocator));

	framebuffers->num_reserve_framebuffers = params->min_framebuffer_count;
	framebuffers->num_framebuffers = MAX((guint)(params->min_framebuffer_count), (guint)10) + framebuffers->num_reserve_framebuffers;
	framebuffers->num_available_framebuffers = framebuffers->num_framebuffers - framebuffers->num_reserve_framebuffers;
	framebuffers->framebuffers = (VpuFrameBuffer *)g_slice_alloc(sizeof(VpuFrameBuffer) * framebuffers->num_framebuffers);

	framebuffers->allocator = allocator;

	framebuffers->y_stride = ALIGN_VAL_TO(params->pic_width, FRAME_ALIGN);
	if (params->interlace)
		framebuffers->y_size = framebuffers->y_stride * ALIGN_VAL_TO(params->pic_height, (2 * FRAME_ALIGN));
	else
		framebuffers->y_size = framebuffers->y_stride * ALIGN_VAL_TO(params->pic_height, FRAME_ALIGN);

	switch (params->mjpeg_source_format)
	{
		case 0: /* I420 (4:2:0) */
			framebuffers->uv_stride = framebuffers->y_stride / 2;
			framebuffers->u_size = framebuffers->v_size = framebuffers->mv_size = framebuffers->y_size / 4;
			break;
		case 1: /* Y42B (4:2:2 horizontal) */
			framebuffers->uv_stride = framebuffers->y_stride / 2;
			framebuffers->u_size = framebuffers->v_size = framebuffers->mv_size = framebuffers->y_size / 2;
			break;
		case 3: /* Y444 (4:4:4) */
			framebuffers->uv_stride = framebuffers->y_stride;
			framebuffers->u_size = framebuffers->v_size = framebuffers->mv_size = framebuffers->y_size;
			break;
		default:
			g_assert_not_reached();
	}

	alignment = params->address_alignment;
	if (alignment > 1)
	{
		framebuffers->y_size = ALIGN_VAL_TO(framebuffers->y_size, alignment);
		framebuffers->u_size = ALIGN_VAL_TO(framebuffers->u_size, alignment);
		framebuffers->v_size = ALIGN_VAL_TO(framebuffers->v_size, alignment);
		framebuffers->mv_size = ALIGN_VAL_TO(framebuffers->mv_size, alignment);
	}

	framebuffers->pic_width = params->pic_width;
	framebuffers->pic_height = params->pic_height;

	framebuffers->total_size = framebuffers->y_size + framebuffers->u_size + framebuffers->v_size + framebuffers->mv_size + alignment;
	GST_DEBUG_OBJECT(framebuffers, "num framebuffers:  total: %u  reserved: %u  available: %d", framebuffers->num_framebuffers, framebuffers->num_reserve_framebuffers, framebuffers->num_available_framebuffers);
	GST_DEBUG_OBJECT(framebuffers, "framebuffer memory block size:  total: %d  Y: %d  U: %d  V: %d  Mv:  %d  alignment: %d", framebuffers->total_size, framebuffers->y_size, framebuffers->u_size, framebuffers->v_size, framebuffers->mv_size, alignment);

	for (i = 0; i < framebuffers->num_framebuffers; ++i)
	{
		GstFslPhysMemory *memory;
		VpuFrameBuffer *framebuffer;

		framebuffer = &(framebuffers->framebuffers[i]);

		memory = (GstFslPhysMemory *)gst_allocator_alloc(allocator, framebuffers->total_size, NULL);
		if (memory == NULL)
			return FALSE;
		gst_fsl_vpu_append_phys_mem_block(memory, &(framebuffers->fb_mem_blocks));

		phys_ptr = (unsigned char*)(memory->phys_addr);
		virt_ptr = (unsigned char*)(memory->mapped_virt_addr); /* TODO */

		if (alignment > 1)
		{
			phys_ptr = (unsigned char*)ALIGN_VAL_TO(phys_ptr, alignment);
			virt_ptr = (unsigned char*)ALIGN_VAL_TO(virt_ptr, alignment);
		}

		framebuffer->nStrideY = framebuffers->y_stride;
		framebuffer->nStrideC = framebuffers->uv_stride;	

		/* fill phy addr*/
		framebuffer->pbufY     = phys_ptr;
		framebuffer->pbufCb    = phys_ptr + framebuffers->y_size;
		framebuffer->pbufCr    = phys_ptr + framebuffers->y_size + framebuffers->u_size;
		framebuffer->pbufMvCol = phys_ptr + framebuffers->y_size + framebuffers->u_size + framebuffers->v_size;

		/* fill virt addr */
		framebuffer->pbufVirtY     = virt_ptr;
		framebuffer->pbufVirtCb    = virt_ptr + framebuffers->y_size;
		framebuffer->pbufVirtCr    = virt_ptr + framebuffers->y_size + framebuffers->u_size;
		framebuffer->pbufVirtMvCol = virt_ptr + framebuffers->y_size + framebuffers->u_size + framebuffers->v_size;

		framebuffer->pbufY_tilebot = 0;
		framebuffer->pbufCb_tilebot = 0;
		framebuffer->pbufVirtY_tilebot = 0;
		framebuffer->pbufVirtCb_tilebot = 0;
	}

	return TRUE;
}
コード例 #3
0
ファイル: rojvar.c プロジェクト: dsp-box/gROJ
GstMemory *
groj_config_new(void)
{
  GstMemory *cmem = gst_allocator_alloc (NULL, sizeof(RojConfig), NULL);
  return cmem;
}
コード例 #4
0
static GstFlowReturn
gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
  GstFlowReturn ret = GST_FLOW_OK;
#ifdef HAVE_OPENJPEG_1
  opj_cinfo_t *enc;
  GstMapInfo map;
  guint length;
  opj_cio_t *io;
#else
  opj_codec_t *enc;
  opj_stream_t *stream;
  MemStream mstream;
#endif
  opj_image_t *image;
  GstVideoFrame vframe;

  GST_DEBUG_OBJECT (self, "Handling frame");

  enc = opj_create_compress (self->codec_format);
  if (!enc)
    goto initialization_error;

#ifdef HAVE_OPENJPEG_1
  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    opj_event_mgr_t callbacks;

    callbacks.error_handler = gst_openjpeg_enc_opj_error;
    callbacks.warning_handler = gst_openjpeg_enc_opj_warning;
    callbacks.info_handler = gst_openjpeg_enc_opj_info;
    opj_set_event_mgr ((opj_common_ptr) enc, &callbacks, self);
  } else {
    opj_set_event_mgr ((opj_common_ptr) enc, NULL, NULL);
  }
#else
  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self);
    opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self);
    opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self);
  } else {
    opj_set_info_handler (enc, NULL, NULL);
    opj_set_warning_handler (enc, NULL, NULL);
    opj_set_error_handler (enc, NULL, NULL);
  }
#endif

  if (!gst_video_frame_map (&vframe, &self->input_state->info,
          frame->input_buffer, GST_MAP_READ))
    goto map_read_error;

  image = gst_openjpeg_enc_fill_image (self, &vframe);
  if (!image)
    goto fill_image_error;
  gst_video_frame_unmap (&vframe);

  opj_setup_encoder (enc, &self->params, image);

#ifdef HAVE_OPENJPEG_1
  io = opj_cio_open ((opj_common_ptr) enc, NULL, 0);
  if (!io)
    goto open_error;

  if (!opj_encode (enc, io, image, NULL))
    goto encode_error;

  opj_image_destroy (image);

  length = cio_tell (io);

  ret =
      gst_video_encoder_allocate_output_frame (encoder, frame,
      length + (self->is_jp2c ? 8 : 0));
  if (ret != GST_FLOW_OK)
    goto allocate_error;

  gst_buffer_fill (frame->output_buffer, self->is_jp2c ? 8 : 0, io->buffer,
      length);
  if (self->is_jp2c) {
    gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE);
    GST_WRITE_UINT32_BE (map.data, length + 8);
    GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
    gst_buffer_unmap (frame->output_buffer, &map);
  }

  opj_cio_close (io);
  opj_destroy_compress (enc);
#else
  stream = opj_stream_create (4096, OPJ_FALSE);
  if (!stream)
    goto open_error;

  mstream.allocsize = 4096;
  mstream.data = g_malloc (mstream.allocsize);
  mstream.offset = 0;
  mstream.size = 0;

  opj_stream_set_read_function (stream, read_fn);
  opj_stream_set_write_function (stream, write_fn);
  opj_stream_set_skip_function (stream, skip_fn);
  opj_stream_set_seek_function (stream, seek_fn);
  opj_stream_set_user_data (stream, &mstream);
  opj_stream_set_user_data_length (stream, mstream.size);

  if (!opj_start_compress (enc, image, stream))
    goto encode_error;

  if (!opj_encode (enc, stream))
    goto encode_error;

  if (!opj_end_compress (enc, stream))
    goto encode_error;

  opj_image_destroy (image);
  opj_stream_destroy (stream);
  opj_destroy_codec (enc);

  frame->output_buffer = gst_buffer_new ();

  if (self->is_jp2c) {
    GstMapInfo map;
    GstMemory *mem;

    mem = gst_allocator_alloc (NULL, 8, NULL);
    gst_memory_map (mem, &map, GST_MAP_WRITE);
    GST_WRITE_UINT32_BE (map.data, mstream.size + 8);
    GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
    gst_memory_unmap (mem, &map);
    gst_buffer_append_memory (frame->output_buffer, mem);
  }

  gst_buffer_append_memory (frame->output_buffer,
      gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0,
          mstream.size, NULL, (GDestroyNotify) g_free));
#endif

  ret = gst_video_encoder_finish_frame (encoder, frame);

  return ret;

initialization_error:
  {
    gst_video_codec_frame_unref (frame);
    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to initialize OpenJPEG encoder"), (NULL));
    return GST_FLOW_ERROR;
  }
map_read_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to map input buffer"), (NULL));
    return GST_FLOW_ERROR;
  }
fill_image_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_frame_unmap (&vframe);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to fill OpenJPEG image"), (NULL));
    return GST_FLOW_ERROR;
  }
open_error:
  {
    opj_image_destroy (image);
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to open OpenJPEG data"), (NULL));
    return GST_FLOW_ERROR;
  }
encode_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_cio_close (io);
    opj_image_destroy (image);
    opj_destroy_compress (enc);
#else
    opj_stream_destroy (stream);
    g_free (mstream.data);
    opj_image_destroy (image);
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, STREAM, ENCODE,
        ("Failed to encode OpenJPEG stream"), (NULL));
    return GST_FLOW_ERROR;
  }
#ifdef HAVE_OPENJPEG_1
allocate_error:
  {
    opj_cio_close (io);
    opj_destroy_compress (enc);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to allocate output buffer"), (NULL));
    return ret;
  }
#endif
}
コード例 #5
0
ファイル: base_enc.c プロジェクト: Radbug/gst-fsl
static GstFlowReturn gst_fsl_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstFslPhysMemMeta *phys_mem_meta;
	GstFslVpuBaseEncClass *klass;
	GstFslVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;

	vpu_base_enc = GST_FSL_VPU_BASE_ENC(encoder);
	klass = GST_FSL_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(frame->input_buffer);

	if (phys_mem_meta == NULL)
	{
		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_fsl_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_fsl_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_FSL_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return FALSE;
			}
		}

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		input_buffer = vpu_base_enc->internal_input_buffer;
		phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
		input_buffer = frame->input_buffer;

	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL;
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);

		if (vpu_base_enc->framebuffers == NULL)
		{
			GstFslVpuFramebufferParams fbparams;
			gst_fsl_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
			fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
			fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;
			vpu_base_enc->framebuffers = gst_fsl_vpu_framebuffers_new(&fbparams, gst_fsl_vpu_enc_allocator_obtain());
			gst_fsl_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, plane_strides[0]);
		}

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			vpu_base_enc->output_phys_buffer = (GstFslPhysMemory *)gst_allocator_alloc(gst_fsl_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

			if (vpu_base_enc->output_phys_buffer == NULL)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
				return GST_FLOW_ERROR;
			}
		}
	}

	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;

	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
	if (enc_ret != VPU_ENC_RET_SUCCESS)
	{
		GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_fsl_vpu_strerror(enc_ret));
		VPU_EncReset(vpu_base_enc->handle);
		return GST_FLOW_ERROR;
	}

	GST_LOG_OBJECT(vpu_base_enc, "out ret code: 0x%x  out size: %u", enc_enc_param.eOutRetCode, enc_enc_param.nOutOutputSize);

	if ((enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS) || (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER))
	{
		gst_video_encoder_allocate_output_frame(encoder, frame, enc_enc_param.nOutOutputSize);
		gst_buffer_fill(frame->output_buffer, 0, vpu_base_enc->output_phys_buffer->mapped_virt_addr, enc_enc_param.nOutOutputSize);
		gst_video_encoder_finish_frame(encoder, frame);
	}

	return GST_FLOW_OK;
}
コード例 #6
0
/* Copy fixed header and extension. Replace current ssrc by ssrc1,
 * remove OSN and replace current seq num by OSN.
 * Copy memory to avoid to manually copy each rtp buffer field.
 */
static GstBuffer *
_gst_rtp_buffer_new_from_rtx (GstRTPBuffer * rtp, guint32 ssrc1,
    guint16 orign_seqnum, guint8 origin_payload_type)
{
  GstMemory *mem = NULL;
  GstRTPBuffer new_rtp = GST_RTP_BUFFER_INIT;
  GstBuffer *new_buffer = gst_buffer_new ();
  GstMapInfo map;
  guint payload_len = 0;

  /* copy fixed header */
  mem = gst_memory_copy (rtp->map[0].memory,
      (guint8 *) rtp->data[0] - rtp->map[0].data, rtp->size[0]);
  gst_buffer_append_memory (new_buffer, mem);

  /* copy extension if any */
  if (rtp->size[1]) {
    mem = gst_memory_copy (rtp->map[1].memory,
        (guint8 *) rtp->data[1] - rtp->map[1].data, rtp->size[1]);
    gst_buffer_append_memory (new_buffer, mem);
  }

  /* copy payload and remove OSN */
  payload_len = rtp->size[2] - 2;
  mem = gst_allocator_alloc (NULL, payload_len, NULL);

  gst_memory_map (mem, &map, GST_MAP_WRITE);
  if (rtp->size[2])
    memcpy (map.data, (guint8 *) rtp->data[2] + 2, payload_len);
  gst_memory_unmap (mem, &map);
  gst_buffer_append_memory (new_buffer, mem);

  /* the sender always constructs rtx packets without padding,
   * But the receiver can still receive rtx packets with padding.
   * So just copy it.
   */
  if (rtp->size[3]) {
    guint pad_len = rtp->size[3];

    mem = gst_allocator_alloc (NULL, pad_len, NULL);

    gst_memory_map (mem, &map, GST_MAP_WRITE);
    map.data[pad_len - 1] = pad_len;
    gst_memory_unmap (mem, &map);

    gst_buffer_append_memory (new_buffer, mem);
  }

  /* set ssrc and seq num */
  gst_rtp_buffer_map (new_buffer, GST_MAP_WRITE, &new_rtp);
  gst_rtp_buffer_set_ssrc (&new_rtp, ssrc1);
  gst_rtp_buffer_set_seq (&new_rtp, orign_seqnum);
  gst_rtp_buffer_set_payload_type (&new_rtp, origin_payload_type);
  gst_rtp_buffer_unmap (&new_rtp);

  gst_buffer_copy_into (new_buffer, rtp->buffer,
      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
  GST_BUFFER_FLAG_SET (new_buffer, GST_RTP_BUFFER_FLAG_RETRANSMISSION);

  return new_buffer;
}
コード例 #7
0
ファイル: base_enc.c プロジェクト: chamois94/gstreamer-imx
static GstFlowReturn gst_imx_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuBaseEncClass *klass;
	GstImxVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;
	gint src_stride;

	vpu_base_enc = GST_IMX_VPU_BASE_ENC(encoder);
	klass = GST_IMX_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_base_enc, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				GST_DEBUG_OBJECT(vpu_base_enc, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_imx_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the internal input buffer as the encoder's input */
		input_buffer = vpu_base_enc->internal_input_buffer;
		/* And use the internal input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = frame->input_buffer;
	}

	/* Set up physical addresses for the input framebuffer */
	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL; /* not used by the VPU encoder */
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		/* this is needed for framebuffers registration below */
		src_stride = plane_strides[0];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);
	}

	/* Create framebuffers structure (if not already present) */
	if (vpu_base_enc->framebuffers == NULL)
	{
		GstImxVpuFramebufferParams fbparams;
		gst_imx_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
		fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
		fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;

		vpu_base_enc->framebuffers = gst_imx_vpu_framebuffers_new(&fbparams, gst_imx_vpu_enc_allocator_obtain());
		if (vpu_base_enc->framebuffers == NULL)
		{
			GST_ELEMENT_ERROR(vpu_base_enc, RESOURCE, NO_SPACE_LEFT, ("could not create framebuffers structure"), (NULL));
			return GST_FLOW_ERROR;
		}

		gst_imx_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, src_stride);
	}

	/* Allocate physical buffer for output data (if not already present) */
	if (vpu_base_enc->output_phys_buffer == NULL)
	{
		vpu_base_enc->output_phys_buffer = (GstImxPhysMemory *)gst_allocator_alloc(gst_imx_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
			return GST_FLOW_ERROR;
		}
	}

	/* Set up encoding parameters */
	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;
	enc_enc_param.nForceIPicture = 0;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(frame))
	{
		enc_enc_param.nForceIPicture = 1;
		GST_LOG_OBJECT(vpu_base_enc, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	/* Main encoding block */
	{
		GstBuffer *output_buffer = NULL;
		gsize output_buffer_offset = 0;
		gboolean frame_finished = FALSE;

		frame->output_buffer = NULL;

		/* Run in a loop until the VPU reports the input as used */
		do
		{
			/* Feed input data */
			enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
			if (enc_ret != VPU_ENC_RET_SUCCESS)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_imx_vpu_strerror(enc_ret));
				VPU_EncReset(vpu_base_enc->handle);
				return GST_FLOW_ERROR;
			}

			if (frame_finished)
			{
				GST_WARNING_OBJECT(vpu_base_enc, "frame was already finished for the current input, but input not yet marked as used");
				continue;
			}

			if (enc_enc_param.eOutRetCode & (VPU_ENC_OUTPUT_DIS | VPU_ENC_OUTPUT_SEQHEADER))
			{
				/* Create an output buffer on demand */
				if (output_buffer == NULL)
				{
					output_buffer = gst_video_encoder_allocate_output_buffer(
						encoder,
						vpu_base_enc->output_phys_buffer->mem.size
					);
					frame->output_buffer = output_buffer;
				}

				GST_LOG_OBJECT(vpu_base_enc, "processing output data: %u bytes, output buffer offset %u", enc_enc_param.nOutOutputSize, output_buffer_offset);

				if (klass->fill_output_buffer != NULL)
				{
					/* Derived class fills data on its own */

					gsize cur_offset = output_buffer_offset;
					output_buffer_offset += klass->fill_output_buffer(
						vpu_base_enc,
						frame,
						cur_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize,
						enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER
					);
				}
				else
				{
					/* Use default data filling (= copy input to output) */

					gst_buffer_fill(
						output_buffer,
						output_buffer_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize
					);
					output_buffer_offset += enc_enc_param.nOutOutputSize;
				}

				if (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS)
				{
					g_assert(output_buffer != NULL);

					/* Set the output buffer's size to the actual number of bytes
					 * filled by the derived class */
					gst_buffer_set_size(output_buffer, output_buffer_offset);

					/* Set the frame DTS */
					frame->dts = frame->pts;

					/* And finish the frame, handing the output data over to the base class */
					gst_video_encoder_finish_frame(encoder, frame);

					output_buffer = NULL;
					frame_finished = TRUE;

					if (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED))
						GST_WARNING_OBJECT(vpu_base_enc, "frame finished, but VPU did not report the input as used");

					break;
				}
			}
		}
		while (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED)); /* VPU_ENC_INPUT_NOT_USED has value 0x0 - cannot use it for flag checks */

		/* If output_buffer is NULL at this point, it means VPU_ENC_OUTPUT_DIS was never communicated
		 * by the VPU, and the buffer is unfinished. -> Drop it. */
		if (output_buffer != NULL)
		{
			GST_WARNING_OBJECT(vpu_base_enc, "frame unfinished ; dropping");
			gst_buffer_unref(output_buffer);
			frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, frame);
		}
	}

	return GST_FLOW_OK;
}
コード例 #8
0
static GstFlowReturn
gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
  GstCaps *caps;
  GstBuffer *buffer;
  guint n, bpf;
  guint64 period_time;
  guint64 period_samples;

  GST_DEBUG_OBJECT (interaudiosrc, "create");

  buffer = NULL;
  caps = NULL;

  g_mutex_lock (&interaudiosrc->surface->mutex);
  if (interaudiosrc->surface->audio_info.finfo) {
    if (!gst_audio_info_is_equal (&interaudiosrc->surface->audio_info,
            &interaudiosrc->info)) {
      caps = gst_audio_info_to_caps (&interaudiosrc->surface->audio_info);
      interaudiosrc->timestamp_offset +=
          gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,
          interaudiosrc->info.rate);
      interaudiosrc->n_samples = 0;
    }
  }

  bpf = interaudiosrc->surface->audio_info.bpf;
  period_time = interaudiosrc->surface->audio_period_time;
  period_samples =
      gst_util_uint64_scale (period_time, interaudiosrc->info.rate, GST_SECOND);

  if (bpf > 0)
    n = gst_adapter_available (interaudiosrc->surface->audio_adapter) / bpf;
  else
    n = 0;

  if (n > period_samples)
    n = period_samples;
  if (n > 0) {
    buffer = gst_adapter_take_buffer (interaudiosrc->surface->audio_adapter,
        n * bpf);
  } else {
    buffer = gst_buffer_new ();
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_GAP);
  }
  g_mutex_unlock (&interaudiosrc->surface->mutex);

  if (caps) {
    gboolean ret = gst_base_src_set_caps (src, caps);
    gst_caps_unref (caps);
    if (!ret) {
      GST_ERROR_OBJECT (src, "Failed to set caps %" GST_PTR_FORMAT, caps);
      if (buffer)
        gst_buffer_unref (buffer);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  buffer = gst_buffer_make_writable (buffer);

  bpf = interaudiosrc->info.bpf;
  if (n < period_samples) {
    GstMapInfo map;
    GstMemory *mem;

    GST_DEBUG_OBJECT (interaudiosrc,
        "creating %" G_GUINT64_FORMAT " samples of silence",
        period_samples - n);
    mem = gst_allocator_alloc (NULL, (period_samples - n) * bpf, NULL);
    if (gst_memory_map (mem, &map, GST_MAP_WRITE)) {
      gst_audio_format_fill_silence (interaudiosrc->info.finfo, map.data,
          map.size);
      gst_memory_unmap (mem, &map);
    }
    gst_buffer_prepend_memory (buffer, mem);
  }
  n = period_samples;

  GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples;
  GST_BUFFER_OFFSET_END (buffer) = interaudiosrc->n_samples + n;
  GST_BUFFER_TIMESTAMP (buffer) = interaudiosrc->timestamp_offset +
      gst_util_uint64_scale (interaudiosrc->n_samples, GST_SECOND,
      interaudiosrc->info.rate);
  GST_DEBUG_OBJECT (interaudiosrc, "create ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
  GST_BUFFER_DURATION (buffer) = interaudiosrc->timestamp_offset +
      gst_util_uint64_scale (interaudiosrc->n_samples + n, GST_SECOND,
      interaudiosrc->info.rate) - GST_BUFFER_TIMESTAMP (buffer);
  GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
  if (interaudiosrc->n_samples == 0) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
  }
  interaudiosrc->n_samples += n;

  *buf = buffer;

  return GST_FLOW_OK;
}