static void convert_line_v210 (GstVideoVBIParser * parser, const guint8 * data) { guint i; guint16 *y = (guint16 *) parser->work_data; guint16 *uv = y + parser->info.width; guint32 a, b, c, d; /* Convert the line */ for (i = 0; i < parser->info.width - 5; i += 6) { a = GST_READ_UINT32_LE (data + (i / 6) * 16 + 0); b = GST_READ_UINT32_LE (data + (i / 6) * 16 + 4); c = GST_READ_UINT32_LE (data + (i / 6) * 16 + 8); d = GST_READ_UINT32_LE (data + (i / 6) * 16 + 12); *uv++ = (a >> 0) & 0x3ff; *y++ = (a >> 10) & 0x3ff; *uv++ = (a >> 20) & 0x3ff; *y++ = (b >> 0) & 0x3ff; *uv++ = (b >> 10) & 0x3ff; *y++ = (b >> 20) & 0x3ff; *uv++ = (c >> 0) & 0x3ff; *y++ = (c >> 10) & 0x3ff; *uv++ = (c >> 20) & 0x3ff; *y++ = (d >> 0) & 0x3ff; *uv++ = (d >> 10) & 0x3ff; *y++ = (d >> 20) & 0x3ff; } if (0) { guint off = 0; gsize length = parser->info.width * 2; GST_TRACE ("--------" "-------------------------------------------------------------------"); while (off < length) { gchar buf[128]; /* gst_info_dump_mem_line will process 16 bytes (8 16bit chunks) at most */ gst_info_dump_mem16_line (buf, sizeof (buf), (guint16 *) parser->work_data, off, length - off); GST_TRACE ("%s", buf); off += 8; } GST_TRACE ("--------" "-------------------------------------------------------------------"); } }
static GstFlowReturn gst_glimage_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf) { GstGLImageSink *glimage_sink; GstBuffer *stored_buffer; GST_TRACE ("rendering buffer:%p", buf); glimage_sink = GST_GLIMAGE_SINK (vsink); GST_TRACE ("redisplay texture:%u of size:%ux%u, window size:%ux%u", glimage_sink->next_tex, GST_VIDEO_INFO_WIDTH (&glimage_sink->info), GST_VIDEO_INFO_HEIGHT (&glimage_sink->info), GST_VIDEO_SINK_WIDTH (glimage_sink), GST_VIDEO_SINK_HEIGHT (glimage_sink)); /* Avoid to release the texture while drawing */ GST_GLIMAGE_SINK_LOCK (glimage_sink); glimage_sink->redisplay_texture = glimage_sink->next_tex; stored_buffer = glimage_sink->stored_buffer; glimage_sink->stored_buffer = gst_buffer_ref (buf); GST_GLIMAGE_SINK_UNLOCK (glimage_sink); if (stored_buffer) gst_buffer_unref (stored_buffer); /* Ask the underlying window to redraw its content */ if (!gst_glimage_sink_redisplay (glimage_sink)) goto redisplay_failed; GST_TRACE ("post redisplay"); if (g_atomic_int_get (&glimage_sink->to_quit) != 0) { GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND, ("%s", gst_gl_context_get_error ()), (NULL)); gst_gl_upload_release_buffer (glimage_sink->upload); return GST_FLOW_ERROR; } return GST_FLOW_OK; /* ERRORS */ redisplay_failed: { gst_gl_upload_release_buffer (glimage_sink->upload); GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND, ("%s", gst_gl_context_get_error ()), (NULL)); return GST_FLOW_ERROR; } }
static void bus_msg_cb (GstBus * bus, GstMessage * msg, gpointer pipeline) { switch (msg->type) { case GST_MESSAGE_ERROR:{ GST_ERROR ("%s bus error: %" GST_PTR_FORMAT, GST_ELEMENT_NAME (pipeline), msg); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "bus_error"); fail ("Error received on %s bus", GST_ELEMENT_NAME (pipeline)); break; } case GST_MESSAGE_WARNING:{ GST_WARNING ("%s bus: %" GST_PTR_FORMAT, GST_ELEMENT_NAME (pipeline), msg); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "warning"); break; } case GST_MESSAGE_STATE_CHANGED:{ GST_TRACE ("%s bus event: %" GST_PTR_FORMAT, GST_ELEMENT_NAME (pipeline), msg); break; } default: break; } }
guint64 gst_gl_query_result (GstGLQuery * query) { const GstGLFuncs *gl; guint64 ret; g_return_val_if_fail (query != NULL, 0); g_return_val_if_fail (!query->start_called, 0); if (!query->supported) return 0; gl = query->context->gl_vtable; if (gl->GetQueryObjectui64v) { gl->GetQueryObjectui64v (query->query_id, GL_QUERY_RESULT, &ret); } else { guint tmp; gl->GetQueryObjectuiv (query->query_id, GL_QUERY_RESULT, &tmp); ret = tmp; } GST_TRACE ("%p get result %" G_GUINT64_FORMAT " type \'%s\' id %u", query, ret, _query_type_to_string (query->query_type), query->query_id); return ret; }
static void remb_event_manager_calc_min (RembEventManager * manager) { guint remb_min = 0; GstClockTime time = kms_utils_get_time_nsecs (); GstClockTime oldest_time = GST_CLOCK_TIME_NONE; GHashTableIter iter; gpointer key, v; g_hash_table_iter_init (&iter, manager->remb_hash); while (g_hash_table_iter_next (&iter, &key, &v)) { guint br = ((RembHashValue *) v)->bitrate; GstClockTime ts = ((RembHashValue *) v)->ts; if (time - ts > REMB_HASH_CLEAR_INTERVAL) { GST_TRACE ("Remove entry %" G_GUINT32_FORMAT, GPOINTER_TO_UINT (key)); g_hash_table_iter_remove (&iter); continue; } if (remb_min == 0) { remb_min = br; } else { remb_min = MIN (remb_min, br); } oldest_time = MIN (oldest_time, ts); } manager->oldest_remb_value = oldest_time; remb_event_manager_set_min (manager, remb_min); }
static void bus_msg (GstBus * bus, GstMessage * msg, gpointer pipe) { switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: { GError *err = NULL; gchar *dbg_info = NULL; gchar *err_str; GST_ERROR ("Error: %" GST_PTR_FORMAT, msg); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipe), GST_DEBUG_GRAPH_SHOW_ALL, "bus_error"); gst_message_parse_error (msg, &err, &dbg_info); err_str = g_strdup_printf ("Error received on bus: %s: %s", err->message, dbg_info); g_error_free (err); g_free (dbg_info); fail (err_str); g_free (err_str); break; } case GST_MESSAGE_STATE_CHANGED: { GST_TRACE ("Event: %" GST_PTR_FORMAT, msg); break; } default: break; } }
static gboolean _init_download (GstGLDownload * download) { GstVideoFormat v_format; guint out_width, out_height; GstVideoInfo in_info; v_format = GST_VIDEO_INFO_FORMAT (&download->info); out_width = GST_VIDEO_INFO_WIDTH (&download->info); out_height = GST_VIDEO_INFO_HEIGHT (&download->info); if (download->initted) return TRUE; GST_TRACE ("initializing texture download for format %s", gst_video_format_to_string (v_format)); if (USING_GLES2 (download->context) && !USING_GLES3 (download->context)) { /* GL_RGBA is the only officially supported texture format in GLES2 */ if (v_format == GST_VIDEO_FORMAT_RGB || v_format == GST_VIDEO_FORMAT_BGR) { gst_gl_context_set_error (download->context, "Cannot download RGB " "textures in GLES2"); return FALSE; } } gst_video_info_set_format (&in_info, GST_VIDEO_FORMAT_RGBA, out_width, out_height); gst_gl_color_convert_set_format (download->convert, &in_info, &download->info); return TRUE; }
static void gst_tracer_record_build_format (GstTracerRecord * self) { GstStructure *structure = self->spec; GString *s; gchar *name = (gchar *) g_quark_to_string (structure->name); gchar *p; g_return_if_fail (g_str_has_suffix (name, ".class")); /* announce the format */ GST_TRACE ("%" GST_PTR_FORMAT, structure); /* cut off '.class' suffix */ name = g_strdup (name); p = strrchr (name, '.'); g_assert (p != NULL); *p = '\0'; s = g_string_sized_new (STRUCTURE_ESTIMATED_STRING_LEN (structure)); g_string_append (s, name); gst_structure_foreach (structure, build_field_template, s); g_string_append_c (s, ';'); self->format = g_string_free (s, FALSE); GST_DEBUG ("new format string: %s", self->format); g_free (name); }
static void load_modules_from_dir (std::string &location, std::map<std::string, KurentoModule *> &modules) { DIR *dir; struct dirent *ent; std::string name; GST_TRACE ("Looking for modules in %s", location.c_str() ); dir = opendir (location.c_str() ); if (dir == NULL) { GST_ERROR ("Unable to load modules from: %s", location.c_str() ); return; } /* print all the files and directories within directory */ while ( (ent = readdir (dir) ) != NULL) { name = ent->d_name; if (ent->d_type == DT_REG) { std::string name = location + "/" + ent->d_name; load_module (name, modules); } else if (ent->d_type == DT_DIR && "." != name && ".." != name) { std::string dirName = location + "/" + ent->d_name; load_modules_from_dir (dirName, modules); } else if (ent->d_type == DT_LNK) { // TODO: Follow sym link and try to load plugins } } closedir (dir); }
static void msg_cb (GstGLWindowWin32 * window_win32, MSG * msg, gpointer user_data) { GST_TRACE ("handle message"); TranslateMessage (msg); DispatchMessage (msg); }
static GstFlowReturn kms_rtcp_demux_chain (GstPad * chain, GstObject * parent, GstBuffer * buffer) { KmsRtcpDemux *self = KMS_RTCP_DEMUX (parent); GstMapInfo map; guint8 pt; if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) { gst_buffer_unref (buffer); GST_ERROR_OBJECT (parent, "Buffer cannot be mapped"); return GST_FLOW_ERROR; } pt = map.data[1]; gst_buffer_unmap (buffer, &map); /* 200-204 is the range of valid values for a rtcp pt according to rfc3550 */ if (pt >= 200 && pt <= 204) { GST_TRACE ("Buffer is rtcp: %d", pt); gst_pad_push (self->priv->rtcp_src, buffer); } else { gst_pad_push (self->priv->rtp_src, buffer); } return GST_FLOW_OK; }
static void _gst_gl_context_thread_run_generic (RunGenericData * data) { GST_TRACE ("running function:%p data:%p", data->func, data->data); data->func (data->context, data->data); }
static void gst_gl_window_win32_run (GstGLWindow * window) { gint bRet; MSG msg; GST_INFO ("begin message loop"); while (TRUE) { bRet = GetMessage (&msg, NULL, 0, 0); if (bRet == 0) break; else if (bRet == -1) { GST_WARNING ("Failed to get message 0x%x", (unsigned int) GetLastError ()); break; } else { GST_TRACE ("handle message"); TranslateMessage (&msg); DispatchMessage (&msg); } } GST_INFO ("end message loop"); }
static void _gen_texture (GstGLContext * context, GenTexture * data) { const GstGLFuncs *gl = context->gl_vtable; GLenum internal_format; GST_TRACE ("Generating texture format:%u dimensions:%ux%u", data->format, data->width, data->height); gl->GenTextures (1, &data->result); gl->BindTexture (GL_TEXTURE_2D, data->result); internal_format = gst_gl_sized_gl_format_from_gl_format_type (context, GL_RGBA, GL_UNSIGNED_BYTE); if (data->width > 0 && data->height > 0) gl->TexImage2D (GL_TEXTURE_2D, 0, internal_format, data->width, data->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); gl->TexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); GST_LOG ("generated texture id:%d", data->result); }
void kms_webrtc_transport_nice_agent_recv_cb (NiceAgent * agent, guint stream_id, guint component_id, guint len, gchar * buf, gpointer user_data) { /* Nothing to do, this callback is only for negotiation */ GST_TRACE ("ICE data received on stream_id: '%" G_GUINT32_FORMAT "' component_id: '%" G_GUINT32_FORMAT "'", stream_id, component_id); }
void gst_toc_dump (GstToc * toc) { #ifndef GST_DISABLE_GST_DEBUG GST_TRACE (" Toc %p, scope: %s, tags: %" GST_PTR_FORMAT, toc, (toc->scope == GST_TOC_SCOPE_GLOBAL) ? "global" : "current", toc->tags); gst_toc_dump_entries (toc->entries, 2); #endif }
static gboolean gst_net_client_clock_socket_cb (GSocket * socket, GIOCondition condition, gpointer user_data) { GIOCondition *p_cond = user_data; GST_TRACE ("socket %p I/O condition: 0x%02x", socket, condition); *p_cond = condition; return TRUE; }
/** * gst_video_codec_frame_ref: * @frame: a #GstVideoCodecFrame * * Increases the refcount of the given frame by one. * * Returns: @buf */ GstVideoCodecFrame * gst_video_codec_frame_ref (GstVideoCodecFrame * frame) { g_return_val_if_fail (frame != NULL, NULL); GST_TRACE ("%p ref %d->%d", frame, frame->ref_count, frame->ref_count + 1); g_atomic_int_inc (&frame->ref_count); return frame; }
/** * gst_video_codec_state_ref: * @state: a #GstVideoCodecState * * Increases the refcount of the given state by one. * * Returns: @buf */ GstVideoCodecState * gst_video_codec_state_ref (GstVideoCodecState * state) { g_return_val_if_fail (state != NULL, NULL); GST_TRACE ("%p ref %d->%d", state, state->ref_count, state->ref_count + 1); g_atomic_int_inc (&state->ref_count); return state; }
/** * gst_video_codec_frame_unref: * @frame: a #GstVideoCodecFrame * * Decreases the refcount of the frame. If the refcount reaches 0, the frame * will be freed. */ void gst_video_codec_frame_unref (GstVideoCodecFrame * frame) { g_return_if_fail (frame != NULL); g_return_if_fail (frame->ref_count > 0); GST_TRACE ("%p unref %d->%d", frame, frame->ref_count, frame->ref_count - 1); if (g_atomic_int_dec_and_test (&frame->ref_count)) { _gst_video_codec_frame_free (frame); } }
/** * gst_video_codec_state_unref: * @state: a #GstVideoCodecState * * Decreases the refcount of the state. If the refcount reaches 0, the state * will be freed. */ void gst_video_codec_state_unref (GstVideoCodecState * state) { g_return_if_fail (state != NULL); g_return_if_fail (state->ref_count > 0); GST_TRACE ("%p unref %d->%d", state, state->ref_count, state->ref_count - 1); if (g_atomic_int_dec_and_test (&state->ref_count)) { _gst_video_codec_state_free (state); } }
static gboolean buffer_list_calc_size (GstBuffer ** buf, guint idx, gpointer data) { guint *p_size = data; gsize buf_size; buf_size = gst_buffer_get_size (*buf); GST_TRACE ("buffer %u has size %" G_GSIZE_FORMAT, idx, buf_size); *p_size += buf_size; return TRUE; }
static GstVdpVideoMemory * _vdp_video_mem_new (GstAllocator * allocator, GstMemory * parent, GstVdpDevice * device, GstVideoInfo * info) { VdpStatus status; GstVdpVideoMemory *mem; VdpVideoSurface surface; mem = g_slice_new0 (GstVdpVideoMemory); _vdp_video_mem_init (mem, allocator, parent, device, info); GST_TRACE ("Calling VdpVideoSurfaceCreate(chroma_type:%d, width:%d, height:%d)", mem->chroma_type, mem->info->width, mem->info->height); status = device->vdp_video_surface_create (device->device, mem->chroma_type, mem->info->width, mem->info->height, &surface); if (status != VDP_STATUS_OK) goto create_error; /* device->vdp_video_surface_get_parameters (device->device, &chroma_type, */ /* &width, &height); */ GST_TRACE ("created surface %u", surface); mem->surface = surface; return mem; /* ERRORS */ create_error: { GST_ERROR ("Failed to create video surface: %s", device->vdp_get_error_string (status)); g_slice_free (GstVdpVideoMemory, mem); return NULL; } }
static gboolean gst_gl_mixer_sink_query (GstAggregator * agg, GstAggregatorPad * bpad, GstQuery * query) { gboolean ret = FALSE; GstGLMixer *mix = GST_GL_MIXER (agg); GST_TRACE ("QUERY %" GST_PTR_FORMAT, query); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_ALLOCATION: { GstQuery *decide_query = NULL; gboolean negotiated; GST_OBJECT_LOCK (mix); if (G_UNLIKELY (!(negotiated = mix->priv->negotiated))) { GST_DEBUG_OBJECT (mix, "not negotiated yet, can't answer ALLOCATION query"); GST_OBJECT_UNLOCK (mix); return FALSE; } if ((decide_query = mix->priv->query)) gst_query_ref (decide_query); GST_OBJECT_UNLOCK (mix); GST_DEBUG_OBJECT (mix, "calling propose allocation with query %" GST_PTR_FORMAT, decide_query); /* pass the query to the propose_allocation vmethod if any */ ret = gst_gl_mixer_propose_allocation (mix, decide_query, query); if (decide_query) gst_query_unref (decide_query); GST_DEBUG_OBJECT (mix, "ALLOCATION ret %d, %" GST_PTR_FORMAT, ret, query); break; } case GST_QUERY_CONTEXT: { ret = gst_gl_handle_context_query ((GstElement *) mix, query, &mix->display); break; } default: ret = GST_AGGREGATOR_CLASS (parent_class)->sink_query (agg, bpad, query); break; } return ret; }
void gst_vulkan_trash_free (GstVulkanTrash * trash) { if (!trash) return; GST_TRACE ("Freeing trash object %p with fence %" GST_PTR_FORMAT, trash, trash->fence); gst_vulkan_fence_unref (trash->fence); g_free (trash); }
static void link_new_agnosticbin (gchar * key, GstElement * adder, GstElement * agnosticbin) { GstPad *srcpad = NULL, *sinkpad = NULL; char *padname; padname = g_object_get_data (G_OBJECT (agnosticbin), KEY_SINK_PAD_NAME); if (padname == NULL) { GST_ERROR ("No pad associated with %" GST_PTR_FORMAT, agnosticbin); goto end; } if (g_str_equal (key, padname)) { /* Do not connect the origin audio input */ GST_TRACE ("Do not connect echo audio input %" GST_PTR_FORMAT, agnosticbin); goto end; } srcpad = gst_element_get_request_pad (agnosticbin, "src_%u"); if (srcpad == NULL) { GST_ERROR ("Could not get src pad in %" GST_PTR_FORMAT, agnosticbin); goto end; } sinkpad = gst_element_get_request_pad (adder, "sink_%u"); if (srcpad == NULL) { GST_ERROR ("Could not get sink pad in %" GST_PTR_FORMAT, adder); gst_element_release_request_pad (agnosticbin, srcpad); goto end; } GST_DEBUG ("Linking %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, srcpad, sinkpad); if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) { GST_ERROR ("Could not link %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, srcpad, sinkpad); gst_element_release_request_pad (agnosticbin, srcpad); gst_element_release_request_pad (adder, sinkpad); } end: if (srcpad != NULL) { g_object_unref (srcpad); } if (sinkpad != NULL) { g_object_unref (sinkpad); } }
/** * gst_caps_features_free: * @features: (in) (transfer full): the #GstCapsFeatures to free * * Frees a #GstCapsFeatures and all its values. The caps features must not * have a parent when this function is called. * * Since: 1.2 */ void gst_caps_features_free (GstCapsFeatures * features) { g_return_if_fail (features != NULL); g_return_if_fail (features->parent_refcount == NULL); g_array_free (features->array, TRUE); #ifdef USE_POISONING memset (features, 0xff, sizeof (GstCapsFeatures)); #endif GST_TRACE ("free caps features %p", features); g_slice_free (GstCapsFeatures, features); }
/* filter out the audio and video related fields from the up-stream caps, because they are not relevant to the input caps of this element and can cause caps negotiation failures with adaptive bitrate streams */ static void gst_cenc_remove_codec_fields (GstStructure *gs) { gint j, n_fields = gst_structure_n_fields (gs); for(j=n_fields-1; j>=0; --j){ const gchar *field_name; field_name = gst_structure_nth_field_name (gs, j); GST_TRACE ("Check field \"%s\" for removal", field_name); if( g_strcmp0 (field_name, "base-profile")==0 || g_strcmp0 (field_name, "codec_data")==0 || g_strcmp0 (field_name, "height")==0 || g_strcmp0 (field_name, "framerate")==0 || g_strcmp0 (field_name, "level")==0 || g_strcmp0 (field_name, "pixel-aspect-ratio")==0 || g_strcmp0 (field_name, "profile")==0 || g_strcmp0 (field_name, "rate")==0 || g_strcmp0 (field_name, "width")==0 ){ gst_structure_remove_field (gs, field_name); GST_TRACE ("Removing field %s", field_name); } } }
/** * gst_caps_features_new_empty: * * Creates a new, empty #GstCapsFeatures. * * Free-function: gst_caps_features_free * * Returns: (transfer full): a new, empty #GstCapsFeatures * * Since: 1.2 */ GstCapsFeatures * gst_caps_features_new_empty (void) { GstCapsFeatures *features; features = g_slice_new (GstCapsFeatures); features->type = _gst_caps_features_type; features->parent_refcount = NULL; features->array = g_array_new (FALSE, FALSE, sizeof (GQuark)); features->is_any = FALSE; GST_TRACE ("created caps features %p", features); return features; }
static gboolean _upload_memory (GstGLUpload * upload) { guint in_width, in_height; guint in_texture[GST_VIDEO_MAX_PLANES]; GstBuffer *inbuf; GstVideoFrame out_frame; GstVideoInfo out_info; gint i; in_width = GST_VIDEO_INFO_WIDTH (&upload->in_info); in_height = GST_VIDEO_INFO_HEIGHT (&upload->in_info); if (!upload->initted) { if (!_init_upload (upload)) { return FALSE; } } inbuf = gst_buffer_new (); for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&upload->in_info); i++) { in_texture[i] = upload->in_tex[i]->tex_id; gst_buffer_append_memory (inbuf, gst_memory_ref ((GstMemory *) upload->in_tex[i])); } GST_TRACE ("uploading with textures:%u,%u,%u dimensions:%ux%u", in_texture[0], in_texture[1], in_texture[2], in_width, in_height); upload->priv->outbuf = gst_gl_color_convert_perform (upload->convert, inbuf); gst_buffer_unref (inbuf); gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, in_width, in_height); if (!gst_video_frame_map (&out_frame, &out_info, upload->priv->outbuf, GST_MAP_READ | GST_MAP_GL)) { gst_buffer_unref (upload->priv->outbuf); upload->priv->outbuf = NULL; return FALSE; } upload->out_tex->tex_id = *(guint *) out_frame.data[0]; gst_video_frame_unmap (&out_frame); upload->priv->released = FALSE; return TRUE; }