gboolean atoms_recov_write_trak_samples (FILE * f, AtomTRAK * trak, guint32 nsamples, guint32 delta, guint32 size, guint64 chunk_offset, gboolean sync, gboolean do_pts, gint64 pts_offset) { guint8 data[TRAK_BUFFER_ENTRY_INFO_SIZE]; /* * We have to write a TrakBufferEntryInfo */ GST_WRITE_UINT32_BE (data + 0, trak->tkhd.track_ID); GST_WRITE_UINT32_BE (data + 4, nsamples); GST_WRITE_UINT32_BE (data + 8, delta); GST_WRITE_UINT32_BE (data + 12, size); GST_WRITE_UINT64_BE (data + 16, chunk_offset); if (sync) GST_WRITE_UINT8 (data + 24, 1); else GST_WRITE_UINT8 (data + 24, 0); if (do_pts) { GST_WRITE_UINT8 (data + 25, 1); GST_WRITE_UINT64_BE (data + 26, pts_offset); } else { GST_WRITE_UINT8 (data + 25, 0); GST_WRITE_UINT64_BE (data + 26, 0); } return fwrite (data, 1, TRAK_BUFFER_ENTRY_INFO_SIZE, f) == TRAK_BUFFER_ENTRY_INFO_SIZE; }
static gpointer make_samr_magic_cookie (GstBuffer * codec_data, gsize * len) { guint8 *res; *len = 48; res = g_malloc0 (0x30); /* 12 first bytes are 'frma' (format) atom with 'samr' value */ GST_WRITE_UINT32_BE (res, 0xc); GST_WRITE_UINT32_LE (res + 4, QT_MAKE_FOURCC_BE ('f', 'r', 'm', 'a')); GST_WRITE_UINT32_LE (res + 8, QT_MAKE_FOURCC_BE ('s', 'a', 'm', 'r')); /* 10 bytes for 'enda' atom with 0 */ GST_WRITE_UINT32_BE (res + 12, 10); GST_WRITE_UINT32_LE (res + 16, QT_MAKE_FOURCC_BE ('e', 'n', 'd', 'a')); /* 17(+1) bytes for the codec_data contents */ GST_WRITE_UINT32_BE (res + 22, 18); memcpy (res + 26, GST_BUFFER_DATA (codec_data) + 4, 17); /* yes... we need to replace 'damr' by 'samr'. Blame Apple ! */ GST_WRITE_UINT8 (res + 26, 's'); /* Terminator atom */ GST_WRITE_UINT32_BE (res + 40, 8); #if DEBUG_DUMP gst_util_dump_mem (res, 48); #endif return res; }
static void gst_exif_writer_write_byte_tag (GstExifWriter * writer, guint16 tag, guint8 value) { guint32 offset = 0; GST_WRITE_UINT8 ((guint8 *) & offset, value); gst_exif_writer_write_tag_header (writer, tag, EXIF_TYPE_BYTE, 1, offset, TRUE); }
/* Create a copy of @buffer_in having the RTP extension */ static GstBuffer * create_extension_buffer (GstBuffer * buffer_in, gboolean clean_point, gboolean end_contiguous, gboolean discont, guint64 ntp_offset, guint8 cseq) { GstBuffer *buffer_out; GstRTPBuffer rtpbuffer_out = GST_RTP_BUFFER_INIT; guint8 *data; guint8 flags = 0; buffer_out = gst_buffer_copy (buffer_in); fail_unless (gst_rtp_buffer_map (buffer_out, GST_MAP_READWRITE, &rtpbuffer_out)); /* extension */ fail_unless (gst_rtp_buffer_set_extension_data (&rtpbuffer_out, 0xABAC, 3)); fail_unless (gst_rtp_buffer_get_extension (&rtpbuffer_out)); fail_unless (gst_rtp_buffer_get_extension_data (&rtpbuffer_out, NULL, (gpointer) & data, NULL)); /* NTP timestamp */ GST_WRITE_UINT64_BE (data, convert_to_ntp (GST_BUFFER_PTS (buffer_in) + ntp_offset)); /* C E D mbz */ if (clean_point) flags |= (1 << 7); if (end_contiguous) flags |= (1 << 6); if (discont) flags |= (1 << 5); GST_WRITE_UINT8 (data + 8, flags); /* CSeq */ GST_WRITE_UINT8 (data + 9, cseq); memset (data + 10, 0, 4); gst_rtp_buffer_unmap (&rtpbuffer_out); return buffer_out; }
static GstCaps * gst_vp8_enc_get_caps (GstBaseVideoEncoder * base_video_encoder) { GstCaps *caps; const GstVideoState *state; GstTagList *tags = NULL; const GstTagList *iface_tags; GstBuffer *stream_hdr, *vorbiscomment; guint8 *data; GstStructure *s; GValue array = { 0 }; GValue value = { 0 }; state = gst_base_video_encoder_get_state (base_video_encoder); caps = gst_caps_new_simple ("video/x-vp8", "width", G_TYPE_INT, state->width, "height", G_TYPE_INT, state->height, "framerate", GST_TYPE_FRACTION, state->fps_n, state->fps_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n, state->par_d, NULL); s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&value, GST_TYPE_BUFFER); /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); data = GST_BUFFER_DATA (stream_hdr); GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */ GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */ GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */ GST_WRITE_UINT16_BE (data + 8, state->width); GST_WRITE_UINT16_BE (data + 10, state->height); GST_WRITE_UINT24_BE (data + 12, state->par_n); GST_WRITE_UINT24_BE (data + 15, state->par_d); GST_WRITE_UINT32_BE (data + 18, state->fps_n); GST_WRITE_UINT32_BE (data + 22, state->fps_d); GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS); gst_value_set_buffer (&value, stream_hdr); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (stream_hdr); iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder)); if (iface_tags) { vorbiscomment = gst_tag_list_to_vorbiscomment_buffer ((iface_tags) ? iface_tags : tags, (const guint8 *) "OVP80\2 ", 7, "Encoded with GStreamer vp8enc " PACKAGE_VERSION); GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, vorbiscomment); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (vorbiscomment); } gst_structure_set_value (s, "streamheader", &array); g_value_unset (&array); return caps; }
static void gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps, GstVideoInfo * info) { GstStructure *s; GstVideoEncoder *video_encoder; GstBuffer *stream_hdr, *vorbiscomment; const GstTagList *iface_tags; GValue array = { 0, }; GValue value = { 0, }; guint8 *data = NULL; GstMapInfo map; video_encoder = GST_VIDEO_ENCODER (enc); s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&value, GST_TYPE_BUFFER); /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE); data = map.data; GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */ GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */ GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */ GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info)); GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info)); GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info)); GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info)); GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info)); GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info)); gst_buffer_unmap (stream_hdr, &map); GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER); gst_value_set_buffer (&value, stream_hdr); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (stream_hdr); iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder)); if (iface_tags) { vorbiscomment = gst_tag_list_to_vorbiscomment_buffer (iface_tags, (const guint8 *) "OVP80\2 ", 7, "Encoded with GStreamer vp8enc " PACKAGE_VERSION); GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, vorbiscomment); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (vorbiscomment); } gst_structure_set_value (s, "streamheader", &array); g_value_unset (&array); }
static gboolean gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, GstVideoState * state) { GstVP8Enc *encoder; vpx_codec_enc_cfg_t cfg; vpx_codec_err_t status; vpx_image_t *image; guint8 *data = NULL; GstCaps *caps; gboolean ret; encoder = GST_VP8_ENC (base_video_encoder); GST_DEBUG_OBJECT (base_video_encoder, "set_format"); if (encoder->inited) { GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation"); return FALSE; } status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to get default encoder configuration"), ("%s", gst_vpx_error_name (status))); return FALSE; } /* Scale default bitrate to our size */ cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate, state->width * state->height, cfg.g_w * cfg.g_h); cfg.g_w = state->width; cfg.g_h = state->height; cfg.g_timebase.num = state->fps_d; cfg.g_timebase.den = state->fps_n; cfg.g_error_resilient = encoder->error_resilient; cfg.g_lag_in_frames = encoder->max_latency; cfg.g_threads = encoder->threads; cfg.rc_end_usage = encoder->mode; cfg.rc_2pass_vbr_minsection_pct = encoder->minsection_pct; cfg.rc_2pass_vbr_maxsection_pct = encoder->maxsection_pct; /* Standalone qp-min do not make any sence, with bitrate=0 and qp-min=1 * encoder will use only default qp-max=63. Also this will make * worst possbile quality. */ if (encoder->bitrate != DEFAULT_BITRATE || encoder->max_quantizer != DEFAULT_MAX_QUANTIZER) { cfg.rc_target_bitrate = encoder->bitrate / 1000; cfg.rc_min_quantizer = encoder->min_quantizer; cfg.rc_max_quantizer = encoder->max_quantizer; } else { cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2); cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2); } cfg.rc_dropframe_thresh = encoder->drop_frame; cfg.rc_resize_allowed = encoder->resize_allowed; cfg.kf_mode = VPX_KF_AUTO; cfg.kf_min_dist = 0; cfg.kf_max_dist = encoder->max_keyframe_distance; cfg.g_pass = encoder->multipass_mode; if (encoder->multipass_mode == VPX_RC_FIRST_PASS) { encoder->first_pass_cache_content = g_byte_array_sized_new (4096); } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) { GError *err = NULL; if (!encoder->multipass_cache_file) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("No multipass cache file provided"), (NULL)); return FALSE; } if (!g_file_get_contents (encoder->multipass_cache_file, (gchar **) & encoder->last_pass_cache_content.buf, &encoder->last_pass_cache_content.sz, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("Failed to read multipass cache file provided"), ("%s", err->message)); g_error_free (err); return FALSE; } cfg.rc_twopass_stats_in = encoder->last_pass_cache_content; } status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status))); return FALSE; } /* FIXME move this to a set_speed() function */ status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, (encoder->speed == 0) ? 0 : (encoder->speed - 1)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s", gst_vpx_error_name (status)); } status = vpx_codec_control (&encoder->encoder, VP8E_SET_NOISE_SENSITIVITY, encoder->noise_sensitivity); status = vpx_codec_control (&encoder->encoder, VP8E_SET_SHARPNESS, encoder->sharpness); status = vpx_codec_control (&encoder->encoder, VP8E_SET_STATIC_THRESHOLD, encoder->static_threshold); status = vpx_codec_control (&encoder->encoder, VP8E_SET_TOKEN_PARTITIONS, encoder->partitions); #if 0 status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_MAXFRAMES, encoder->arnr_maxframes); status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_STRENGTH, encoder->arnr_strength); status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE, encoder->arnr_type); #endif #ifdef HAVE_VP8ENC_TUNING status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING, encoder->tuning); #endif status = vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF, (encoder->auto_alt_ref_frames ? 1 : 0)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s", (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status)); } cfg.g_lag_in_frames = encoder->lag_in_frames; gst_base_video_encoder_set_latency (base_video_encoder, 0, gst_util_uint64_scale (encoder->max_latency, state->fps_d * GST_SECOND, state->fps_n)); encoder->inited = TRUE; /* prepare cached image buffer setup */ image = &encoder->image; memset (image, 0, sizeof (*image)); image->fmt = VPX_IMG_FMT_I420; image->bps = 12; image->x_chroma_shift = image->y_chroma_shift = 1; image->w = image->d_w = state->width; image->h = image->d_h = state->height; image->stride[VPX_PLANE_Y] = gst_video_format_get_row_stride (state->format, 0, state->width); image->stride[VPX_PLANE_U] = gst_video_format_get_row_stride (state->format, 1, state->width); image->stride[VPX_PLANE_V] = gst_video_format_get_row_stride (state->format, 2, state->width); image->planes[VPX_PLANE_Y] = data + gst_video_format_get_component_offset (state->format, 0, state->width, state->height); image->planes[VPX_PLANE_U] = data + gst_video_format_get_component_offset (state->format, 1, state->width, state->height); image->planes[VPX_PLANE_V] = data + gst_video_format_get_component_offset (state->format, 2, state->width, state->height); caps = gst_caps_new_simple ("video/x-vp8", "width", G_TYPE_INT, state->width, "height", G_TYPE_INT, state->height, "framerate", GST_TYPE_FRACTION, state->fps_n, state->fps_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n, state->par_d, NULL); { GstStructure *s; GstBuffer *stream_hdr, *vorbiscomment; const GstTagList *iface_tags; GValue array = { 0, }; GValue value = { 0, }; s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&value, GST_TYPE_BUFFER); /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); data = GST_BUFFER_DATA (stream_hdr); GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */ GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */ GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */ GST_WRITE_UINT16_BE (data + 8, state->width); GST_WRITE_UINT16_BE (data + 10, state->height); GST_WRITE_UINT24_BE (data + 12, state->par_n); GST_WRITE_UINT24_BE (data + 15, state->par_d); GST_WRITE_UINT32_BE (data + 18, state->fps_n); GST_WRITE_UINT32_BE (data + 22, state->fps_d); GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS); gst_value_set_buffer (&value, stream_hdr); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (stream_hdr); iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder)); if (iface_tags) { vorbiscomment = gst_tag_list_to_vorbiscomment_buffer (iface_tags, (const guint8 *) "OVP80\2 ", 7, "Encoded with GStreamer vp8enc " PACKAGE_VERSION); GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, vorbiscomment); gst_value_array_append_value (&array, &value); g_value_unset (&value); gst_buffer_unref (vorbiscomment); } gst_structure_set_value (s, "streamheader", &array); g_value_unset (&array); } ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps); gst_caps_unref (caps); return ret; }