static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_fourcc (structure, "format", &fourcc); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); if(gst_aravis->gain_auto) { arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous", gst_aravis->gain_auto); } else { if (gst_aravis->gain >= 0) { GST_DEBUG_OBJECT (gst_aravis, "Gain = %d", gst_aravis->gain); arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); } GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %d", arv_camera_get_gain (gst_aravis->camera)); } if(gst_aravis->exposure_auto) { arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = contiuous", gst_aravis->exposure_auto); } else { if (gst_aravis->exposure_time_us > 0.0) { GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); } GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); } if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
static void gst_wavpack_enc_set_wp_config (GstWavpackEnc * enc) { enc->wp_config = g_new0 (WavpackConfig, 1); /* set general stream informations in the WavpackConfig */ enc->wp_config->bytes_per_sample = GST_ROUND_UP_8 (enc->depth) / 8; enc->wp_config->bits_per_sample = enc->depth; enc->wp_config->num_channels = enc->channels; enc->wp_config->channel_mask = enc->channel_mask; enc->wp_config->sample_rate = enc->samplerate; /* * Set parameters in WavpackConfig */ /* Encoding mode */ switch (enc->mode) { #if 0 case GST_WAVPACK_ENC_MODE_VERY_FAST: enc->wp_config->flags |= CONFIG_VERY_FAST_FLAG; enc->wp_config->flags |= CONFIG_FAST_FLAG; break; #endif case GST_WAVPACK_ENC_MODE_FAST: enc->wp_config->flags |= CONFIG_FAST_FLAG; break; case GST_WAVPACK_ENC_MODE_DEFAULT: break; case GST_WAVPACK_ENC_MODE_HIGH: enc->wp_config->flags |= CONFIG_HIGH_FLAG; break; #ifndef WAVPACK_OLD_API case GST_WAVPACK_ENC_MODE_VERY_HIGH: enc->wp_config->flags |= CONFIG_HIGH_FLAG; enc->wp_config->flags |= CONFIG_VERY_HIGH_FLAG; break; #endif } /* Bitrate, enables lossy mode */ if (enc->bitrate) { enc->wp_config->flags |= CONFIG_HYBRID_FLAG; enc->wp_config->flags |= CONFIG_BITRATE_KBPS; enc->wp_config->bitrate = enc->bitrate / 1000.0; } else if (enc->bps) { enc->wp_config->flags |= CONFIG_HYBRID_FLAG; enc->wp_config->bitrate = enc->bps; } /* Correction Mode, only in lossy mode */ if (enc->wp_config->flags & CONFIG_HYBRID_FLAG) { if (enc->correction_mode > GST_WAVPACK_CORRECTION_MODE_OFF) { GstCaps *caps = gst_caps_new_simple ("audio/x-wavpack-correction", "framed", G_TYPE_BOOLEAN, TRUE, NULL); enc->wvcsrcpad = gst_pad_new_from_static_template (&wvcsrc_factory, "wvcsrc"); /* try to add correction src pad, don't set correction mode on failure */ GST_DEBUG_OBJECT (enc, "Adding correction pad with caps %" GST_PTR_FORMAT, caps); if (!gst_pad_set_caps (enc->wvcsrcpad, caps)) { enc->correction_mode = 0; GST_WARNING_OBJECT (enc, "setting correction caps failed"); } else { gst_pad_use_fixed_caps (enc->wvcsrcpad); gst_pad_set_active (enc->wvcsrcpad, TRUE); gst_element_add_pad (GST_ELEMENT (enc), enc->wvcsrcpad); enc->wp_config->flags |= CONFIG_CREATE_WVC; if (enc->correction_mode == GST_WAVPACK_CORRECTION_MODE_OPTIMIZED) { enc->wp_config->flags |= CONFIG_OPTIMIZE_WVC; } } gst_caps_unref (caps); } } else { if (enc->correction_mode > GST_WAVPACK_CORRECTION_MODE_OFF) { enc->correction_mode = 0; GST_WARNING_OBJECT (enc, "setting correction mode only has " "any effect if a bitrate is provided."); } } gst_element_no_more_pads (GST_ELEMENT (enc)); /* MD5, setup MD5 context */ if ((enc->md5) && !(enc->md5_context)) { enc->wp_config->flags |= CONFIG_MD5_CHECKSUM; enc->md5_context = g_new0 (MD5_CTX, 1); MD5Init (enc->md5_context); } /* Extra encode processing */ if (enc->extra_processing) { enc->wp_config->flags |= CONFIG_EXTRA_MODE; enc->wp_config->xmode = enc->extra_processing; } /* Joint stereo mode */ switch (enc->joint_stereo_mode) { case GST_WAVPACK_JS_MODE_AUTO: break; case GST_WAVPACK_JS_MODE_LEFT_RIGHT: enc->wp_config->flags |= CONFIG_JOINT_OVERRIDE; enc->wp_config->flags &= ~CONFIG_JOINT_STEREO; break; case GST_WAVPACK_JS_MODE_MID_SIDE: enc->wp_config->flags |= (CONFIG_JOINT_OVERRIDE | CONFIG_JOINT_STEREO); break; } }
static GstFlowReturn gst_pngenc_chain (GstPad * pad, GstBuffer * buf) { GstPngEnc *pngenc; gint row_index; png_byte **row_pointers; GstFlowReturn ret = GST_FLOW_OK; GstBuffer *encoded_buf = NULL; pngenc = GST_PNGENC (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (pngenc, "BEGINNING"); if (G_UNLIKELY (pngenc->width <= 0 || pngenc->height <= 0)) { ret = GST_FLOW_NOT_NEGOTIATED; goto done; } if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < pngenc->height * pngenc->stride)) { gst_buffer_unref (buf); GST_ELEMENT_ERROR (pngenc, STREAM, FORMAT, (NULL), ("Provided input buffer is too small, caps problem?")); ret = GST_FLOW_ERROR; goto done; } /* initialize png struct stuff */ pngenc->png_struct_ptr = png_create_write_struct (PNG_LIBPNG_VER_STRING, (png_voidp) NULL, user_error_fn, user_warning_fn); if (pngenc->png_struct_ptr == NULL) { gst_buffer_unref (buf); GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL), ("Failed to initialize png structure")); ret = GST_FLOW_ERROR; goto done; } pngenc->png_info_ptr = png_create_info_struct (pngenc->png_struct_ptr); if (!pngenc->png_info_ptr) { gst_buffer_unref (buf); png_destroy_write_struct (&(pngenc->png_struct_ptr), (png_infopp) NULL); GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL), ("Failed to initialize the png info structure")); ret = GST_FLOW_ERROR; goto done; } /* non-0 return is from a longjmp inside of libpng */ if (setjmp (png_jmpbuf (pngenc->png_struct_ptr)) != 0) { gst_buffer_unref (buf); png_destroy_write_struct (&pngenc->png_struct_ptr, &pngenc->png_info_ptr); GST_ELEMENT_ERROR (pngenc, LIBRARY, FAILED, (NULL), ("returning from longjmp")); ret = GST_FLOW_ERROR; goto done; } png_set_filter (pngenc->png_struct_ptr, 0, PNG_FILTER_NONE | PNG_FILTER_VALUE_NONE); png_set_compression_level (pngenc->png_struct_ptr, pngenc->compression_level); png_set_IHDR (pngenc->png_struct_ptr, pngenc->png_info_ptr, pngenc->width, pngenc->height, 8, pngenc->png_color_type, PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT); png_set_write_fn (pngenc->png_struct_ptr, pngenc, (png_rw_ptr) user_write_data, user_flush_data); row_pointers = g_new (png_byte *, pngenc->height); for (row_index = 0; row_index < pngenc->height; row_index++) { row_pointers[row_index] = GST_BUFFER_DATA (buf) + (row_index * pngenc->stride); } /* allocate the output buffer */ pngenc->buffer_out = gst_buffer_new_and_alloc (pngenc->height * pngenc->stride); pngenc->written = 0; png_write_info (pngenc->png_struct_ptr, pngenc->png_info_ptr); png_write_image (pngenc->png_struct_ptr, row_pointers); png_write_end (pngenc->png_struct_ptr, NULL); g_free (row_pointers); encoded_buf = gst_buffer_create_sub (pngenc->buffer_out, 0, pngenc->written); png_destroy_info_struct (pngenc->png_struct_ptr, &pngenc->png_info_ptr); png_destroy_write_struct (&pngenc->png_struct_ptr, (png_infopp) NULL); gst_buffer_copy_metadata (encoded_buf, buf, GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buf); gst_buffer_set_caps (encoded_buf, GST_PAD_CAPS (pngenc->srcpad)); if ((ret = gst_pad_push (pngenc->srcpad, encoded_buf)) != GST_FLOW_OK) goto done; if (pngenc->snapshot) { GstEvent *event; GST_DEBUG_OBJECT (pngenc, "snapshot mode, sending EOS"); /* send EOS event, since a frame has been pushed out */ event = gst_event_new_eos (); gst_pad_push_event (pngenc->srcpad, event); ret = GST_FLOW_UNEXPECTED; } done: GST_DEBUG_OBJECT (pngenc, "END, ret:%d", ret); if (pngenc->buffer_out != NULL) { gst_buffer_unref (pngenc->buffer_out); pngenc->buffer_out = NULL; } gst_object_unref (pngenc); return ret; }
static gboolean gst_alsasink_prepare (GstAudioSink * asink, GstRingBufferSpec * spec) { GstAlsaSink *alsa; gint err; alsa = GST_ALSA_SINK (asink); if (spec->format == GST_IEC958) { snd_pcm_close (alsa->handle); alsa->handle = gst_alsa_open_iec958_pcm (GST_OBJECT (alsa)); if (G_UNLIKELY (!alsa->handle)) { goto no_iec958; } } if (!alsasink_parse_spec (alsa, spec)) goto spec_parse; CHECK (set_hwparams (alsa), hw_params_failed); CHECK (set_swparams (alsa), sw_params_failed); alsa->bytes_per_sample = spec->bytes_per_sample; spec->segsize = alsa->period_size * spec->bytes_per_sample; spec->segtotal = alsa->buffer_size / alsa->period_size; { snd_output_t *out_buf = NULL; char *msg = NULL; snd_output_buffer_open (&out_buf); snd_pcm_dump_hw_setup (alsa->handle, out_buf); snd_output_buffer_string (out_buf, &msg); GST_DEBUG_OBJECT (alsa, "Hardware setup: \n%s", msg); snd_output_close (out_buf); snd_output_buffer_open (&out_buf); snd_pcm_dump_sw_setup (alsa->handle, out_buf); snd_output_buffer_string (out_buf, &msg); GST_DEBUG_OBJECT (alsa, "Software setup: \n%s", msg); snd_output_close (out_buf); } return TRUE; /* ERRORS */ no_iec958: { GST_ELEMENT_ERROR (alsa, RESOURCE, OPEN_WRITE, (NULL), ("Could not open IEC958 (SPDIF) device for playback")); return FALSE; } spec_parse: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Error parsing spec")); return FALSE; } hw_params_failed: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Setting of hwparams failed: %s", snd_strerror (err))); return FALSE; } sw_params_failed: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Setting of swparams failed: %s", snd_strerror (err))); return FALSE; } }
static gboolean gst_identity_sink_event (GstBaseTransform * trans, GstEvent * event) { GstIdentity *identity; gboolean ret = TRUE; identity = GST_IDENTITY (trans); if (!identity->silent) { const GstStructure *s; const gchar *tstr; gchar *sstr; GST_OBJECT_LOCK (identity); g_free (identity->last_message); tstr = gst_event_type_get_name (GST_EVENT_TYPE (event)); if ((s = gst_event_get_structure (event))) sstr = gst_structure_to_string (s); else sstr = g_strdup (""); identity->last_message = g_strdup_printf ("event ******* (%s:%s) E (type: %s (%d), %s) %p", GST_DEBUG_PAD_NAME (trans->sinkpad), tstr, GST_EVENT_TYPE (event), sstr, event); g_free (sstr); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); } if (identity->single_segment && (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT)) { if (!trans->have_segment) { GstEvent *news; GstSegment segment; gst_event_copy_segment (event, &segment); gst_event_copy_segment (event, &trans->segment); trans->have_segment = TRUE; /* This is the first segment, send out a (0, -1) segment */ gst_segment_init (&segment, segment.format); news = gst_event_new_segment (&segment); gst_pad_event_default (trans->sinkpad, GST_OBJECT_CAST (trans), news); } else { /* need to track segment for proper running time */ gst_event_copy_segment (event, &trans->segment); } } if (GST_EVENT_TYPE (event) == GST_EVENT_GAP && trans->have_segment && trans->segment.format == GST_FORMAT_TIME) { GstClockTime start, dur; gst_event_parse_gap (event, &start, &dur); if (GST_CLOCK_TIME_IS_VALID (start)) { start = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, start); gst_identity_do_sync (identity, start); /* also transform GAP timestamp similar to buffer timestamps */ if (identity->single_segment) { gst_event_unref (event); event = gst_event_new_gap (start, dur); } } } /* Reset previous timestamp, duration and offsets on SEGMENT * to prevent false warnings when checking for perfect streams */ if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { identity->prev_timestamp = identity->prev_duration = GST_CLOCK_TIME_NONE; identity->prev_offset = identity->prev_offset_end = GST_BUFFER_OFFSET_NONE; } if (identity->single_segment && GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { /* eat up segments */ gst_event_unref (event); ret = TRUE; } else { if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) { GST_OBJECT_LOCK (identity); if (identity->clock_id) { GST_DEBUG_OBJECT (identity, "unlock clock wait"); gst_clock_id_unschedule (identity->clock_id); } GST_OBJECT_UNLOCK (identity); } ret = GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event); } return ret; }
static GstFlowReturn gst_h263_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstH263Parse *h263parse; GstBuffer *buffer; guint psc_pos, next_psc_pos; gsize size; H263Params params = { 0, }; GstFlowReturn res = GST_FLOW_OK; h263parse = GST_H263_PARSE (parse); buffer = frame->buffer; size = gst_buffer_get_size (buffer); if (size < 3) { *skipsize = 1; return GST_FLOW_OK; } psc_pos = find_psc (buffer, 0); if (psc_pos == -1) { /* PSC not found, need more data */ if (size > 3) psc_pos = size - 3; else psc_pos = 0; goto more; } /* need to skip */ if (psc_pos > 0) goto more; /* Found the start of the frame, now try to find the end */ next_psc_pos = psc_pos + 3; next_psc_pos = find_psc (buffer, next_psc_pos); if (next_psc_pos == -1) { if (GST_BASE_PARSE_DRAINING (parse)) /* FLUSH/EOS, it's okay if we can't find the next frame */ next_psc_pos = size; else goto more; } /* We should now have a complete frame */ /* If this is the first frame, parse and set srcpad caps */ if (h263parse->state == PARSING) { res = gst_h263_parse_get_params (¶ms, buffer, FALSE, &h263parse->state); if (res != GST_FLOW_OK || h263parse->state != GOT_HEADER) { GST_WARNING ("Couldn't parse header - setting passthrough mode"); gst_base_parse_set_passthrough (parse, TRUE); } else { /* Set srcpad caps since we now have sufficient information to do so */ gst_h263_parse_set_src_caps (h263parse, ¶ms); gst_base_parse_set_passthrough (parse, FALSE); } memset (¶ms, 0, sizeof (params)); } /* XXX: After getting a keyframe, should we adjust min_frame_size to * something smaller so we don't end up collecting too many non-keyframes? */ GST_DEBUG_OBJECT (h263parse, "found a frame of size %u at pos %u", next_psc_pos, psc_pos); res = gst_h263_parse_get_params (¶ms, buffer, TRUE, &h263parse->state); if (res != GST_FLOW_OK) goto more; if (h263parse->state == PASSTHROUGH || h263parse->state == PARSING) { /* There's a feature we don't support, or we didn't have enough data to * parse the header, which should not be possible. Either way, go into * passthrough mode and let downstream handle it if it can. */ GST_WARNING ("Couldn't parse header - setting passthrough mode"); gst_base_parse_set_passthrough (parse, TRUE); goto more; } if (gst_h263_parse_is_delta_unit (¶ms)) GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); else GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); return gst_base_parse_finish_frame (parse, frame, next_psc_pos); more: *skipsize = psc_pos; return res; }
static int set_hwparams (GstAlsaSink * alsa) { guint rrate; gint err, dir; snd_pcm_hw_params_t *params; guint period_time, buffer_time; snd_pcm_hw_params_malloc (¶ms); GST_DEBUG_OBJECT (alsa, "Negotiating to %d channels @ %d Hz (format = %s) " "SPDIF (%d)", alsa->channels, alsa->rate, snd_pcm_format_name (alsa->format), alsa->iec958); /* start with requested values, if we cannot configure alsa for those values, * we set these values to -1, which will leave the default alsa values */ buffer_time = alsa->buffer_time; period_time = alsa->period_time; retry: /* choose all parameters */ CHECK (snd_pcm_hw_params_any (alsa->handle, params), no_config); /* set the interleaved read/write format */ CHECK (snd_pcm_hw_params_set_access (alsa->handle, params, alsa->access), wrong_access); /* set the sample format */ if (alsa->iec958) { /* Try to use big endian first else fallback to le and swap bytes */ if (snd_pcm_hw_params_set_format (alsa->handle, params, alsa->format) < 0) { alsa->format = SND_PCM_FORMAT_S16_LE; alsa->need_swap = TRUE; GST_DEBUG_OBJECT (alsa, "falling back to little endian with swapping"); } else { alsa->need_swap = FALSE; } } CHECK (snd_pcm_hw_params_set_format (alsa->handle, params, alsa->format), no_sample_format); /* set the count of channels */ CHECK (snd_pcm_hw_params_set_channels (alsa->handle, params, alsa->channels), no_channels); /* set the stream rate */ rrate = alsa->rate; CHECK (snd_pcm_hw_params_set_rate_near (alsa->handle, params, &rrate, NULL), no_rate); if (rrate != alsa->rate) goto rate_match; /* get and dump some limits */ { guint min, max; snd_pcm_hw_params_get_buffer_time_min (params, &min, &dir); snd_pcm_hw_params_get_buffer_time_max (params, &max, &dir); GST_DEBUG_OBJECT (alsa, "buffer time %u, min %u, max %u", alsa->buffer_time, min, max); snd_pcm_hw_params_get_period_time_min (params, &min, &dir); snd_pcm_hw_params_get_period_time_max (params, &max, &dir); GST_DEBUG_OBJECT (alsa, "period time %u, min %u, max %u", alsa->period_time, min, max); snd_pcm_hw_params_get_periods_min (params, &min, &dir); snd_pcm_hw_params_get_periods_max (params, &max, &dir); GST_DEBUG_OBJECT (alsa, "periods min %u, max %u", min, max); } /* now try to configure the buffer time and period time, if one * of those fail, we fall back to the defaults and emit a warning. */ if (buffer_time != -1 && !alsa->iec958) { /* set the buffer time */ if ((err = snd_pcm_hw_params_set_buffer_time_near (alsa->handle, params, &buffer_time, &dir)) < 0) { GST_ELEMENT_WARNING (alsa, RESOURCE, SETTINGS, (NULL), ("Unable to set buffer time %i for playback: %s", buffer_time, snd_strerror (err))); /* disable buffer_time the next round */ buffer_time = -1; goto retry; } GST_DEBUG_OBJECT (alsa, "buffer time %u", buffer_time); } if (period_time != -1 && !alsa->iec958) { /* set the period time */ if ((err = snd_pcm_hw_params_set_period_time_near (alsa->handle, params, &period_time, &dir)) < 0) { GST_ELEMENT_WARNING (alsa, RESOURCE, SETTINGS, (NULL), ("Unable to set period time %i for playback: %s", period_time, snd_strerror (err))); /* disable period_time the next round */ period_time = -1; goto retry; } GST_DEBUG_OBJECT (alsa, "period time %u", period_time); } /* Set buffer size and period size manually for SPDIF */ if (G_UNLIKELY (alsa->iec958)) { snd_pcm_uframes_t buffer_size = SPDIF_BUFFER_SIZE; snd_pcm_uframes_t period_size = SPDIF_PERIOD_SIZE; CHECK (snd_pcm_hw_params_set_buffer_size_near (alsa->handle, params, &buffer_size), buffer_size); CHECK (snd_pcm_hw_params_set_period_size_near (alsa->handle, params, &period_size, NULL), period_size); } /* write the parameters to device */ CHECK (snd_pcm_hw_params (alsa->handle, params), set_hw_params); /* now get the configured values */ CHECK (snd_pcm_hw_params_get_buffer_size (params, &alsa->buffer_size), buffer_size); CHECK (snd_pcm_hw_params_get_period_size (params, &alsa->period_size, &dir), period_size); GST_DEBUG_OBJECT (alsa, "buffer size %lu, period size %lu", alsa->buffer_size, alsa->period_size); snd_pcm_hw_params_free (params); return 0; /* ERRORS */ no_config: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Broken configuration for playback: no configurations available: %s", snd_strerror (err))); snd_pcm_hw_params_free (params); return err; } wrong_access: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Access type not available for playback: %s", snd_strerror (err))); snd_pcm_hw_params_free (params); return err; } no_sample_format: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Sample format not available for playback: %s", snd_strerror (err))); snd_pcm_hw_params_free (params); return err; } no_channels: { gchar *msg = NULL; if ((alsa->channels) == 1) msg = g_strdup (_("Could not open device for playback in mono mode.")); if ((alsa->channels) == 2) msg = g_strdup (_("Could not open device for playback in stereo mode.")); if ((alsa->channels) > 2) msg = g_strdup_printf (_ ("Could not open device for playback in %d-channel mode."), alsa->channels); GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (msg), (snd_strerror (err))); g_free (msg); snd_pcm_hw_params_free (params); return err; } no_rate: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Rate %iHz not available for playback: %s", alsa->rate, snd_strerror (err))); return err; } rate_match: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Rate doesn't match (requested %iHz, get %iHz)", alsa->rate, err)); snd_pcm_hw_params_free (params); return -EINVAL; } buffer_size: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Unable to get buffer size for playback: %s", snd_strerror (err))); snd_pcm_hw_params_free (params); return err; } period_size: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Unable to get period size for playback: %s", snd_strerror (err))); snd_pcm_hw_params_free (params); return err; } set_hw_params: { GST_ELEMENT_ERROR (alsa, RESOURCE, SETTINGS, (NULL), ("Unable to set hw params for playback: %s", snd_strerror (err))); snd_pcm_hw_params_free (params); return err; } }
static GstFlowReturn gst_festival_chain (GstPad * pad, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstFestival *festival; guint8 *p, *ep; gint f; FILE *fd; festival = GST_FESTIVAL (GST_PAD_PARENT (pad)); GST_LOG_OBJECT (festival, "Got text buffer, %u bytes", GST_BUFFER_SIZE (buf)); f = dup (festival->info->server_fd); if (f < 0) goto fail_open; fd = fdopen (f, "wb"); if (fd == NULL) { close (f); goto fail_open; } /* Copy text over to server, escaping any quotes */ fprintf (fd, "(Parameter.set 'Audio_Required_Rate 16000)\n"); fflush (fd); GST_DEBUG_OBJECT (festival, "issued Parameter.set command"); if (read_response (festival) == FALSE) { fclose (fd); goto fail_read; } fprintf (fd, "(tts_textall \""); p = GST_BUFFER_DATA (buf); ep = p + GST_BUFFER_SIZE (buf); for (; p < ep && (*p != '\0'); p++) { if ((*p == '"') || (*p == '\\')) { putc ('\\', fd); } putc (*p, fd); } fprintf (fd, "\" \"%s\")\n", festival->info->text_mode); fclose (fd); GST_DEBUG_OBJECT (festival, "issued tts_textall command"); /* Read back info from server */ if (read_response (festival) == FALSE) goto fail_read; out: gst_buffer_unref (buf); return ret; /* ERRORS */ fail_open: { GST_ELEMENT_ERROR (festival, RESOURCE, OPEN_WRITE, (NULL), (NULL)); ret = GST_FLOW_ERROR; goto out; } fail_read: { GST_ELEMENT_ERROR (festival, RESOURCE, READ, (NULL), (NULL)); ret = GST_FLOW_ERROR; goto out; } }
static GstFlowReturn gst_mve_video_create_buffer (GstMveDemux * mve, guint8 version, const guint8 * data, guint16 len) { GstBuffer *buf; guint16 w, h, n, true_color, bpp; guint required, size; GST_DEBUG_OBJECT (mve, "create video buffer"); if (mve->video_stream == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("trying to create video buffer for uninitialized stream")); return GST_FLOW_ERROR; } /* need 4 to 8 more bytes */ required = (version > 1) ? 8 : (version * 2); if (len < required) return gst_mve_stream_error (mve, required, len); w = GST_READ_UINT16_LE (data) << 3; h = GST_READ_UINT16_LE (data + 2) << 3; if (version > 0) n = GST_READ_UINT16_LE (data + 4); else n = 1; if (version > 1) true_color = GST_READ_UINT16_LE (data + 6); else true_color = 0; bpp = (true_color ? 2 : 1); size = w * h * bpp; if (mve->video_stream->buffer != NULL) { GST_DEBUG_OBJECT (mve, "video buffer already created"); if (GST_BUFFER_SIZE (mve->video_stream->buffer) == size * 2) return GST_FLOW_OK; GST_DEBUG_OBJECT (mve, "video buffer size has changed"); gst_buffer_unref (mve->video_stream->buffer); } GST_DEBUG_OBJECT (mve, "allocating video buffer, w:%u, h:%u, n:%u, true_color:%u", w, h, n, true_color); /* we need a buffer to keep the last 2 frames, since those may be needed for decoding the next one */ buf = gst_buffer_new_and_alloc (size * 2); mve->video_stream->bpp = bpp; mve->video_stream->width = w; mve->video_stream->height = h; mve->video_stream->buffer = buf; mve->video_stream->back_buf1 = GST_BUFFER_DATA (buf); mve->video_stream->back_buf2 = mve->video_stream->back_buf1 + size; mve->video_stream->max_block_offset = (h - 7) * w - 8; memset (mve->video_stream->back_buf1, 0, size * 2); return GST_FLOW_OK; }
static gboolean gst_video_rate_query (GstPad * pad, GstQuery * query) { GstVideoRate *videorate; gboolean res = FALSE; videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad)); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_LATENCY: { GstClockTime min, max; gboolean live; guint64 latency; GstPad *peer; if ((peer = gst_pad_get_peer (videorate->sinkpad))) { if ((res = gst_pad_query (peer, query))) { gst_query_parse_latency (query, &live, &min, &max); GST_DEBUG_OBJECT (videorate, "Peer latency: min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min), GST_TIME_ARGS (max)); if (videorate->from_rate_numerator != 0) { /* add latency. We don't really know since we hold on to the frames * until we get a next frame, which can be anything. We assume * however that this will take from_rate time. */ latency = gst_util_uint64_scale (GST_SECOND, videorate->from_rate_denominator, videorate->from_rate_numerator); } else { /* no input framerate, we don't know */ latency = 0; } GST_DEBUG_OBJECT (videorate, "Our latency: %" GST_TIME_FORMAT, GST_TIME_ARGS (latency)); min += latency; if (max != -1) max += latency; GST_DEBUG_OBJECT (videorate, "Calculated total latency : min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min), GST_TIME_ARGS (max)); gst_query_set_latency (query, live, min, max); } gst_object_unref (peer); } break; } default: res = gst_pad_query_default (pad, query); break; } gst_object_unref (videorate); return res; }
static GstFlowReturn gst_video_rate_chain (GstPad * pad, GstBuffer * buffer) { GstVideoRate *videorate; GstFlowReturn res = GST_FLOW_OK; GstClockTime intime, in_ts, in_dur; videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad)); /* make sure the denominators are not 0 */ if (videorate->from_rate_denominator == 0 || videorate->to_rate_denominator == 0) goto not_negotiated; in_ts = GST_BUFFER_TIMESTAMP (buffer); in_dur = GST_BUFFER_DURATION (buffer); if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) { in_ts = videorate->last_ts; if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) goto invalid_buffer; } /* get the time of the next expected buffer timestamp, we use this when the * next buffer has -1 as a timestamp */ videorate->last_ts = in_ts; if (in_dur != GST_CLOCK_TIME_NONE) videorate->last_ts += in_dur; GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (in_ts)); /* the input time is the time in the segment + all previously accumulated * segments */ intime = in_ts + videorate->segment.accum; /* we need to have two buffers to compare */ if (videorate->prevbuf == NULL) { gst_video_rate_swap_prev (videorate, buffer, intime); videorate->in++; if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) { /* new buffer, we expect to output a buffer that matches the first * timestamp in the segment */ if (videorate->skip_to_first) { videorate->next_ts = intime; videorate->base_ts = in_ts - videorate->segment.start; videorate->out_frame_count = 0; } else { videorate->next_ts = videorate->segment.start + videorate->segment.accum; } } } else { GstClockTime prevtime; gint count = 0; gint64 diff1, diff2; prevtime = videorate->prev_ts; GST_LOG_OBJECT (videorate, "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime), GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts)); videorate->in++; /* drop new buffer if it's before previous one */ if (intime < prevtime) { GST_DEBUG_OBJECT (videorate, "The new buffer (%" GST_TIME_FORMAT ") is before the previous buffer (%" GST_TIME_FORMAT "). Dropping new buffer.", GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime)); videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); gst_buffer_unref (buffer); goto done; } /* got 2 buffers, see which one is the best */ do { diff1 = prevtime - videorate->next_ts; diff2 = intime - videorate->next_ts; /* take absolute values, beware: abs and ABS don't work for gint64 */ if (diff1 < 0) diff1 = -diff1; if (diff2 < 0) diff2 = -diff2; GST_LOG_OBJECT (videorate, "diff with prev %" GST_TIME_FORMAT " diff with new %" GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts)); /* output first one when its the best */ if (diff1 <= diff2) { count++; /* on error the _flush function posted a warning already */ if ((res = gst_video_rate_flush_prev (videorate, count > 1)) != GST_FLOW_OK) { gst_buffer_unref (buffer); goto done; } } /* continue while the first one was the best, if they were equal avoid * going into an infinite loop */ } while (diff1 < diff2); /* if we outputed the first buffer more then once, we have dups */ if (count > 1) { videorate->dup += count - 1; if (!videorate->silent) gst_video_rate_notify_duplicate (videorate); } /* if we didn't output the first buffer, we have a drop */ else if (count == 0) { videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); GST_LOG_OBJECT (videorate, "new is best, old never used, drop, outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts)); } GST_LOG_OBJECT (videorate, "END, putting new in old, diff1 %" GST_TIME_FORMAT ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %" G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts), videorate->in, videorate->out, videorate->drop, videorate->dup); /* swap in new one when it's the best */ gst_video_rate_swap_prev (videorate, buffer, intime); } done: return res; /* ERRORS */ not_negotiated: { GST_WARNING_OBJECT (videorate, "no framerate negotiated"); gst_buffer_unref (buffer); res = GST_FLOW_NOT_NEGOTIATED; goto done; } invalid_buffer: { GST_WARNING_OBJECT (videorate, "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it"); gst_buffer_unref (buffer); goto done; } }
static gboolean gst_video_rate_event (GstPad * pad, GstEvent * event) { GstVideoRate *videorate; gboolean ret; videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NEWSEGMENT: { gint64 start, stop, time; gdouble rate, arate; gboolean update; GstFormat format; gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format, &start, &stop, &time); if (format != GST_FORMAT_TIME) goto format_error; GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT"); /* close up the previous segment, if appropriate */ if (!update && videorate->prevbuf) { gint count = 0; GstFlowReturn res; res = GST_FLOW_OK; /* fill up to the end of current segment, * or only send out the stored buffer if there is no specific stop. * regardless, prevent going loopy in strange cases */ while (res == GST_FLOW_OK && count <= MAGIC_LIMIT && ((GST_CLOCK_TIME_IS_VALID (videorate->segment.stop) && videorate->next_ts - videorate->segment.accum < videorate->segment.stop) || count < 1)) { res = gst_video_rate_flush_prev (videorate, count > 0); count++; } if (count > 1) { videorate->dup += count - 1; if (!videorate->silent) gst_video_rate_notify_duplicate (videorate); } else if (count == 0) { videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); } /* clean up for the new one; _chain will resume from the new start */ videorate->base_ts = 0; videorate->out_frame_count = 0; gst_video_rate_swap_prev (videorate, NULL, 0); videorate->next_ts = GST_CLOCK_TIME_NONE; } /* We just want to update the accumulated stream_time */ gst_segment_set_newsegment_full (&videorate->segment, update, rate, arate, format, start, stop, time); GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT, &videorate->segment); break; } case GST_EVENT_EOS:{ gint count = 0; GstFlowReturn res = GST_FLOW_OK; GST_DEBUG_OBJECT (videorate, "Got EOS"); /* If the segment has a stop position, fill the segment */ if (GST_CLOCK_TIME_IS_VALID (videorate->segment.stop)) { /* fill up to the end of current segment, * or only send out the stored buffer if there is no specific stop. * regardless, prevent going loopy in strange cases */ while (res == GST_FLOW_OK && count <= MAGIC_LIMIT && ((videorate->next_ts - videorate->segment.accum < videorate->segment.stop) || count < 1)) { res = gst_video_rate_flush_prev (videorate, count > 0); count++; } } else if (videorate->prevbuf) { /* Output at least one frame but if the buffer duration is valid, output * enough frames to use the complete buffer duration */ if (GST_BUFFER_DURATION_IS_VALID (videorate->prevbuf)) { GstClockTime end_ts = videorate->next_ts + GST_BUFFER_DURATION (videorate->prevbuf); while (res == GST_FLOW_OK && count <= MAGIC_LIMIT && ((videorate->next_ts - videorate->segment.accum < end_ts) || count < 1)) { res = gst_video_rate_flush_prev (videorate, count > 0); count++; } } else { res = gst_video_rate_flush_prev (videorate, FALSE); count = 1; } } if (count > 1) { videorate->dup += count - 1; if (!videorate->silent) gst_video_rate_notify_duplicate (videorate); } else if (count == 0) { videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); } break; } case GST_EVENT_FLUSH_STOP: /* also resets the segment */ GST_DEBUG_OBJECT (videorate, "Got FLUSH_STOP"); gst_video_rate_reset (videorate); break; default: break; } ret = gst_pad_push_event (videorate->srcpad, event); done: gst_object_unref (videorate); return ret; /* ERRORS */ format_error: { GST_WARNING_OBJECT (videorate, "Got segment but doesn't have GST_FORMAT_TIME value"); gst_event_unref (event); ret = FALSE; goto done; } }
static gboolean gst_video_rate_setcaps (GstPad * pad, GstCaps * caps) { GstVideoRate *videorate; GstStructure *structure; gboolean ret = TRUE; GstPad *otherpad, *opeer; gint rate_numerator, rate_denominator; videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (pad, "setcaps called %" GST_PTR_FORMAT, caps); structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_fraction (structure, "framerate", &rate_numerator, &rate_denominator)) goto no_framerate; if (pad == videorate->srcpad) { /* out_frame_count is scaled by the frame rate caps when calculating next_ts. * when the frame rate caps change, we must update base_ts and reset * out_frame_count */ if (videorate->to_rate_numerator) { videorate->base_ts += gst_util_uint64_scale (videorate->out_frame_count, videorate->to_rate_denominator * GST_SECOND, videorate->to_rate_numerator); } videorate->out_frame_count = 0; videorate->to_rate_numerator = rate_numerator; videorate->to_rate_denominator = rate_denominator; otherpad = videorate->sinkpad; } else { videorate->from_rate_numerator = rate_numerator; videorate->from_rate_denominator = rate_denominator; otherpad = videorate->srcpad; } /* now try to find something for the peer */ opeer = gst_pad_get_peer (otherpad); if (opeer) { if (gst_pad_accept_caps (opeer, caps)) { /* the peer accepts the caps as they are */ gst_pad_set_caps (otherpad, caps); ret = TRUE; } else { GstCaps *peercaps; GstCaps *transform = NULL; ret = FALSE; /* see how we can transform the input caps */ if (!gst_video_rate_transformcaps (pad, caps, otherpad, &transform)) goto no_transform; /* see what the peer can do */ peercaps = gst_pad_get_caps (opeer); GST_DEBUG_OBJECT (opeer, "icaps %" GST_PTR_FORMAT, peercaps); GST_DEBUG_OBJECT (videorate, "transform %" GST_PTR_FORMAT, transform); /* filter against our possibilities */ caps = gst_caps_intersect (peercaps, transform); gst_caps_unref (peercaps); gst_caps_unref (transform); GST_DEBUG_OBJECT (videorate, "intersect %" GST_PTR_FORMAT, caps); /* could turn up empty, due to e.g. colorspace etc */ if (gst_caps_get_size (caps) == 0) { gst_caps_unref (caps); goto no_transform; } /* take first possibility */ gst_caps_truncate (caps); structure = gst_caps_get_structure (caps, 0); /* and fixate */ gst_structure_fixate_field_nearest_fraction (structure, "framerate", rate_numerator, rate_denominator); gst_structure_get_fraction (structure, "framerate", &rate_numerator, &rate_denominator); if (otherpad == videorate->srcpad) { videorate->to_rate_numerator = rate_numerator; videorate->to_rate_denominator = rate_denominator; } else { videorate->from_rate_numerator = rate_numerator; videorate->from_rate_denominator = rate_denominator; } if (gst_structure_has_field (structure, "interlaced")) gst_structure_fixate_field_boolean (structure, "interlaced", FALSE); if (gst_structure_has_field (structure, "color-matrix")) gst_structure_fixate_field_string (structure, "color-matrix", "sdtv"); if (gst_structure_has_field (structure, "chroma-site")) gst_structure_fixate_field_string (structure, "chroma-site", "mpeg2"); if (gst_structure_has_field (structure, "pixel-aspect-ratio")) gst_structure_fixate_field_nearest_fraction (structure, "pixel-aspect-ratio", 1, 1); gst_pad_set_caps (otherpad, caps); gst_caps_unref (caps); ret = TRUE; } gst_object_unref (opeer); } done: /* After a setcaps, our caps may have changed. In that case, we can't use * the old buffer, if there was one (it might have different dimensions) */ GST_DEBUG_OBJECT (videorate, "swapping old buffers"); gst_video_rate_swap_prev (videorate, NULL, GST_CLOCK_TIME_NONE); gst_object_unref (videorate); return ret; no_framerate: { GST_DEBUG_OBJECT (videorate, "no framerate specified"); goto done; } no_transform: { GST_DEBUG_OBJECT (videorate, "no framerate transform possible"); ret = FALSE; goto done; } }
static GstBusSyncReply bus_callback (GstBus * bus, GstMessage * message, gpointer data) { const GstStructure *st; const GValue *image; GstBuffer *buf = NULL; guint8 *data_buf = NULL; gchar *caps_string; guint size = 0; gchar *preview_filename = NULL; FILE *f = NULL; size_t written; switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR:{ GError *err; gchar *debug; gst_message_parse_error (message, &err, &debug); g_print ("Error: %s\n", err->message); g_error_free (err); g_free (debug); /* Write debug graph to file */ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camera_bin), GST_DEBUG_GRAPH_SHOW_ALL, "camerabin.error"); g_main_loop_quit (loop); break; } case GST_MESSAGE_STATE_CHANGED: if (GST_IS_BIN (GST_MESSAGE_SRC (message))) { GstState oldstate, newstate; gst_message_parse_state_changed (message, &oldstate, &newstate, NULL); GST_DEBUG_OBJECT (GST_MESSAGE_SRC (message), "state-changed: %s -> %s", gst_element_state_get_name (oldstate), gst_element_state_get_name (newstate)); } break; case GST_MESSAGE_EOS: /* end-of-stream */ GST_INFO ("got eos() - should not happen"); g_main_loop_quit (loop); break; default: st = gst_message_get_structure (message); if (st) { if (gst_structure_has_name (message->structure, "prepare-xwindow-id")) { if (window) { gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (GST_MESSAGE_SRC (message)), window); gst_message_unref (message); message = NULL; return GST_BUS_DROP; } } else if (gst_structure_has_name (st, "image-captured")) { GST_DEBUG ("image-captured"); } else if (gst_structure_has_name (st, "preview-image")) { GST_DEBUG ("preview-image"); //extract preview-image from msg image = gst_structure_get_value (st, "buffer"); if (image) { buf = gst_value_get_buffer (image); data_buf = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); preview_filename = g_strdup_printf ("test_vga.rgb"); caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf)); g_print ("writing buffer to %s, buffer caps: %s\n", preview_filename, caps_string); g_free (caps_string); f = g_fopen (preview_filename, "w"); if (f) { written = fwrite (data_buf, size, 1, f); if (!written) { g_print ("errro writing file\n"); } fclose (f); } else { g_print ("error opening file for raw image writing\n"); } g_free (preview_filename); } } } /* unhandled message */ break; } return GST_BUS_PASS; }
/** * gst_ghost_pad_set_target: * @gpad: the #GstGhostpad * @newtarget: the new pad target * * Set the new target of the ghostpad @gpad. Any existing target * is unlinked and links to the new target are established. if @newtarget is * NULL the target will be cleared. * * Returns: TRUE if the new target could be set. This function can return FALSE * when the internal pads could not be linked. */ gboolean gst_ghost_pad_set_target (GstGhostPad * gpad, GstPad * newtarget) { GstPad *internal; GstPad *oldtarget; gboolean result; GstPadLinkReturn lret; g_return_val_if_fail (GST_IS_GHOST_PAD (gpad), FALSE); GST_PROXY_LOCK (gpad); internal = GST_PROXY_PAD_INTERNAL (gpad); if (newtarget) GST_DEBUG_OBJECT (gpad, "set target %s:%s", GST_DEBUG_PAD_NAME (newtarget)); else GST_DEBUG_OBJECT (gpad, "clearing target"); /* clear old target */ if ((oldtarget = GST_PROXY_PAD_TARGET (gpad))) { if (GST_PAD_IS_SRC (oldtarget)) { g_signal_handlers_disconnect_by_func (oldtarget, on_src_target_notify, gpad); } /* if we have an internal pad, unlink */ if (internal) { if (GST_PAD_IS_SRC (internal)) gst_pad_unlink (internal, oldtarget); else gst_pad_unlink (oldtarget, internal); } } result = gst_proxy_pad_set_target_unlocked (GST_PAD_CAST (gpad), newtarget); if (result && newtarget) { if (GST_PAD_IS_SRC (newtarget)) { g_signal_connect (newtarget, "notify::caps", G_CALLBACK (on_src_target_notify), gpad); } /* and link to internal pad */ GST_DEBUG_OBJECT (gpad, "connecting internal pad to target"); if (GST_PAD_IS_SRC (internal)) lret = gst_pad_link (internal, newtarget); else lret = gst_pad_link (newtarget, internal); if (lret != GST_PAD_LINK_OK) goto link_failed; } GST_PROXY_UNLOCK (gpad); return result; /* ERRORS */ link_failed: { GST_WARNING_OBJECT (gpad, "could not link internal and target, reason:%d", lret); /* and unset target again */ gst_proxy_pad_set_target_unlocked (GST_PAD_CAST (gpad), NULL); GST_PROXY_UNLOCK (gpad); return FALSE; } }
static GstFlowReturn gst_mve_video_palette (GstMveDemux * mve, const guint8 * data, guint16 len) { GstBuffer *buf; guint16 start, count; const guint8 *pal; guint32 *pal_ptr; gint i; GST_DEBUG_OBJECT (mve, "video palette"); if (mve->video_stream == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("found palette before video stream was initialized")); return GST_FLOW_ERROR; } /* need 4 more bytes now, more later */ if (len < 4) return gst_mve_stream_error (mve, 4, len); len -= 4; start = GST_READ_UINT16_LE (data); count = GST_READ_UINT16_LE (data + 2); GST_DEBUG_OBJECT (mve, "found palette start:%u, count:%u", start, count); /* need more bytes */ if (len < count * 3) return gst_mve_stream_error (mve, count * 3, len); /* make sure we don't exceed the buffer */ if (start + count > MVE_PALETTE_COUNT) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("palette too large for buffer")); return GST_FLOW_ERROR; } if (mve->video_stream->palette != NULL) { /* older buffers floating around might still use the old palette, so make sure we can update it */ buf = gst_buffer_make_writable (mve->video_stream->palette); } else { buf = gst_buffer_new_and_alloc (MVE_PALETTE_COUNT * 4); memset (GST_BUFFER_DATA (buf), 0, GST_BUFFER_SIZE (buf)); } mve->video_stream->palette = buf; pal = data + 4; pal_ptr = ((guint32 *) GST_BUFFER_DATA (buf)) + start; for (i = 0; i < count; ++i) { /* convert from 6-bit VGA to 8-bit palette */ guint8 r, g, b; r = (*pal) << 2; ++pal; g = (*pal) << 2; ++pal; b = (*pal) << 2; ++pal; *pal_ptr = (r << 16) | (g << 8) | (b); ++pal_ptr; } return GST_FLOW_OK; }
static void gst_h263_parse_set_src_caps (GstH263Parse * h263parse, const H263Params * params) { GstStructure *st = NULL; GstCaps *caps, *sink_caps; gint fr_num, fr_denom, par_num, par_denom; g_assert (h263parse->state == PASSTHROUGH || h263parse->state == GOT_HEADER); caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (h263parse)); if (caps) { caps = gst_caps_make_writable (caps); } else { caps = gst_caps_new_simple ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); } gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (h263parse)); if (sink_caps && (st = gst_caps_get_structure (sink_caps, 0)) && gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) { /* Got it in caps - nothing more to do */ GST_DEBUG_OBJECT (h263parse, "sink caps override framerate from headers"); } else { /* Caps didn't have the framerate - get it from params */ gst_h263_parse_get_framerate (params, &fr_num, &fr_denom); } gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL); if (params->width && params->height) gst_caps_set_simple (caps, "width", G_TYPE_INT, params->width, "height", G_TYPE_INT, params->height, NULL); if (st != NULL && gst_structure_get_fraction (st, "pixel-aspect-ratio", &par_num, &par_denom)) { /* Got it in caps - nothing more to do */ GST_DEBUG_OBJECT (h263parse, "sink caps override PAR"); } else { /* Caps didn't have the framerate - get it from params */ gst_h263_parse_get_par (params, &par_num, &par_denom); } gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, par_num, par_denom, NULL); if (h263parse->state == GOT_HEADER) { gst_caps_set_simple (caps, "annex-d", G_TYPE_BOOLEAN, (params->features & H263_OPTION_UMV_MODE), "annex-e", G_TYPE_BOOLEAN, (params->features & H263_OPTION_SAC_MODE), "annex-f", G_TYPE_BOOLEAN, (params->features & H263_OPTION_AP_MODE), "annex-g", G_TYPE_BOOLEAN, (params->features & H263_OPTION_PB_MODE), "annex-i", G_TYPE_BOOLEAN, (params->features & H263_OPTION_AIC_MODE), "annex-j", G_TYPE_BOOLEAN, (params->features & H263_OPTION_DF_MODE), "annex-k", G_TYPE_BOOLEAN, (params->features & H263_OPTION_SS_MODE), "annex-m", G_TYPE_BOOLEAN, (params->type == PICTURE_IMPROVED_PB), "annex-n", G_TYPE_BOOLEAN, (params->features & H263_OPTION_RPS_MODE), "annex-q", G_TYPE_BOOLEAN, (params->features & H263_OPTION_RRU_MODE), "annex-r", G_TYPE_BOOLEAN, (params->features & H263_OPTION_ISD_MODE), "annex-s", G_TYPE_BOOLEAN, (params->features & H263_OPTION_AIV_MODE), "annex-t", G_TYPE_BOOLEAN, (params->features & H263_OPTION_MQ_MODE), "annex-u", G_TYPE_BOOLEAN, (params->features & H263_OPTION_ERPS_MODE), "annex-v", G_TYPE_BOOLEAN, (params->features & H263_OPTION_DPS_MODE), NULL); h263parse->profile = gst_h263_parse_get_profile (params); if (h263parse->profile != -1) { gchar *profile_str; profile_str = g_strdup_printf ("%u", h263parse->profile); gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile_str, NULL); g_free (profile_str); } h263parse->level = gst_h263_parse_get_level (params, h263parse->profile, h263parse->bitrate, fr_num, fr_denom); if (h263parse->level != -1) { gchar *level_str; level_str = g_strdup_printf ("%u", h263parse->level); gst_caps_set_simple (caps, "level", G_TYPE_STRING, level_str, NULL); g_free (level_str); } } gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (h263parse)), caps); gst_caps_unref (caps); }
static GstFlowReturn gst_mve_video_data (GstMveDemux * mve, const guint8 * data, guint16 len, GstBuffer ** output) { GstFlowReturn ret = GST_FLOW_OK; gint16 cur_frame, last_frame; gint16 x_offset, y_offset; gint16 x_size, y_size; guint16 flags; gint dec; GstBuffer *buf = NULL; GstMveDemuxStream *s = mve->video_stream; GST_LOG_OBJECT (mve, "video data"); if (s == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("trying to decode video data before stream was initialized")); return GST_FLOW_ERROR; } if (GST_CLOCK_TIME_IS_VALID (mve->frame_duration)) { if (GST_CLOCK_TIME_IS_VALID (s->last_ts)) s->last_ts += mve->frame_duration; else s->last_ts = 0; } if (!s->code_map_avail) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("no code map available for decoding")); return GST_FLOW_ERROR; } /* need at least 14 more bytes */ if (len < 14) return gst_mve_stream_error (mve, 14, len); len -= 14; cur_frame = GST_READ_UINT16_LE (data); last_frame = GST_READ_UINT16_LE (data + 2); x_offset = GST_READ_UINT16_LE (data + 4); y_offset = GST_READ_UINT16_LE (data + 6); x_size = GST_READ_UINT16_LE (data + 8); y_size = GST_READ_UINT16_LE (data + 10); flags = GST_READ_UINT16_LE (data + 12); data += 14; GST_DEBUG_OBJECT (mve, "video data hot:%d, cold:%d, xoff:%d, yoff:%d, w:%d, h:%d, flags:%x", cur_frame, last_frame, x_offset, y_offset, x_size, y_size, flags); if (flags & MVE_VIDEO_DELTA_FRAME) { guint8 *temp = s->back_buf1; s->back_buf1 = s->back_buf2; s->back_buf2 = temp; } ret = gst_mve_buffer_alloc_for_pad (s, s->width * s->height * s->bpp, &buf); if (ret != GST_FLOW_OK) return ret; if (s->bpp == 2) { dec = ipvideo_decode_frame16 (s, data, len); } else { if (s->palette == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("no palette available")); goto error; } dec = ipvideo_decode_frame8 (s, data, len); } if (dec != 0) goto error; memcpy (GST_BUFFER_DATA (buf), s->back_buf1, GST_BUFFER_SIZE (buf)); GST_BUFFER_DURATION (buf) = mve->frame_duration; GST_BUFFER_OFFSET_END (buf) = ++s->offset; if (s->bpp == 1) { GstCaps *caps; /* set the palette on the outgoing buffer */ caps = gst_caps_copy (s->caps); gst_caps_set_simple (caps, "palette_data", GST_TYPE_BUFFER, s->palette, NULL); gst_buffer_set_caps (buf, caps); gst_caps_unref (caps); } *output = buf; return GST_FLOW_OK; error: gst_buffer_unref (buf); return GST_FLOW_ERROR; }
static void kms_dummy_src_set_property (GObject * object, guint property_id, const GValue * value, GParamSpec * pspec) { KmsDummySrc *self = KMS_DUMMY_SRC (object); KMS_ELEMENT_LOCK (KMS_ELEMENT (self)); switch (property_id) { case PROP_DATA: self->priv->data = g_value_get_boolean (value); if (self->priv->data && self->priv->dataappsrc == NULL) { GstElement *tee; GstCaps *caps; GST_DEBUG_OBJECT (self, "Creating data stream"); tee = kms_element_get_data_tee (KMS_ELEMENT (self)); caps = gst_caps_from_string (KMS_AGNOSTIC_DATA_CAPS); self->priv->dataappsrc = gst_element_factory_make ("appsrc", NULL); g_object_set (G_OBJECT (self->priv->dataappsrc), "is-live", TRUE, "caps", caps, "emit-signals", TRUE, "stream-type", 0, "format", GST_FORMAT_TIME, NULL); gst_caps_unref (caps); g_signal_connect (self->priv->dataappsrc, "need-data", G_CALLBACK (kms_dummy_src_feed_data_channel), self); gst_bin_add (GST_BIN (self), self->priv->dataappsrc); gst_element_link_pads (self->priv->dataappsrc, "src", tee, "sink"); gst_element_sync_state_with_parent (self->priv->dataappsrc); } break; case PROP_AUDIO: self->priv->audio = g_value_get_boolean (value); if (self->priv->audio && self->priv->audioappsrc == NULL) { GstElement *agnosticbin; GST_DEBUG_OBJECT (self, "Creating audio stream"); agnosticbin = kms_element_get_audio_agnosticbin (KMS_ELEMENT (self)); self->priv->audioappsrc = gst_element_factory_make ("audiotestsrc", NULL); g_object_set (G_OBJECT (self->priv->audioappsrc), "is-live", TRUE, NULL); gst_bin_add (GST_BIN (self), self->priv->audioappsrc); gst_element_link_pads (self->priv->audioappsrc, "src", agnosticbin, "sink"); gst_element_sync_state_with_parent (self->priv->audioappsrc); } break; case PROP_VIDEO: self->priv->video = g_value_get_boolean (value); if (self->priv->video && self->priv->videoappsrc == NULL) { GstElement *agnosticbin; GST_DEBUG_OBJECT (self, "Creating video stream"); agnosticbin = kms_element_get_video_agnosticbin (KMS_ELEMENT (self)); self->priv->videoappsrc = gst_element_factory_make ("videotestsrc", NULL); g_object_set (G_OBJECT (self->priv->videoappsrc), "is-live", TRUE, NULL); gst_bin_add (GST_BIN (self), self->priv->videoappsrc); gst_element_link_pads (self->priv->videoappsrc, "src", agnosticbin, "sink"); gst_element_sync_state_with_parent (self->priv->videoappsrc); } break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; } KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); }
static GstFlowReturn gst_mve_audio_init (GstMveDemux * mve, guint8 version, const guint8 * data, guint16 len) { GstMveDemuxStream *stream; guint16 flags; guint32 requested_buffer; GstTagList *list; gchar *name; GST_DEBUG_OBJECT (mve, "init audio"); /* need 8 more bytes */ if (len < 8) return gst_mve_stream_error (mve, 8, len); if (mve->audio_stream == NULL) { stream = g_new0 (GstMveDemuxStream, 1); stream->offset = 0; stream->last_ts = 0; stream->last_flow = GST_FLOW_OK; mve->audio_stream = stream; } else { stream = mve->audio_stream; gst_caps_unref (stream->caps); } flags = GST_READ_UINT16_LE (data + 2); stream->sample_rate = GST_READ_UINT16_LE (data + 4); requested_buffer = GST_READ_UINT32_LE (data + 6); /* bit 0: 0 = mono, 1 = stereo */ stream->n_channels = (flags & MVE_AUDIO_STEREO) + 1; /* bit 1: 0 = 8 bit, 1 = 16 bit */ stream->sample_size = (((flags & MVE_AUDIO_16BIT) >> 1) + 1) * 8; /* bit 2: 0 = uncompressed, 1 = compressed */ stream->compression = ((version > 0) && (flags & MVE_AUDIO_COMPRESSED)) ? TRUE : FALSE; GST_DEBUG_OBJECT (mve, "audio init, sample_rate:%d, channels:%d, " "bits_per_sample:%d, compression:%d, buffer:%u", stream->sample_rate, stream->n_channels, stream->sample_size, stream->compression, requested_buffer); stream->caps = gst_caps_from_string ("audio/x-raw-int"); if (stream->caps == NULL) return GST_FLOW_ERROR; gst_caps_set_simple (stream->caps, "signed", G_TYPE_BOOLEAN, (stream->sample_size == 8) ? FALSE : TRUE, "depth", G_TYPE_INT, stream->sample_size, "width", G_TYPE_INT, stream->sample_size, "channels", G_TYPE_INT, stream->n_channels, "rate", G_TYPE_INT, stream->sample_rate, NULL); if (stream->sample_size > 8) { /* for uncompressed audio we can simply copy the incoming buffer which is always in little endian format */ gst_caps_set_simple (stream->caps, "endianness", G_TYPE_INT, (stream->compression ? G_BYTE_ORDER : G_LITTLE_ENDIAN), NULL); } else if (stream->compression) { GST_WARNING_OBJECT (mve, "compression is only supported for 16-bit samples"); stream->compression = FALSE; } list = gst_tag_list_new (); name = g_strdup_printf ("Raw %d-bit PCM audio", stream->sample_size); gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_AUDIO_CODEC, name, NULL); g_free (name); if (gst_mve_add_stream (mve, stream, list)) return gst_pad_push_event (mve->audio_stream->pad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0)) ? GST_FLOW_OK : GST_FLOW_ERROR; else return GST_FLOW_OK; }
static gboolean alsasink_parse_spec (GstAlsaSink * alsa, GstRingBufferSpec * spec) { /* Initialize our boolean */ alsa->iec958 = FALSE; switch (spec->type) { case GST_BUFTYPE_LINEAR: GST_DEBUG_OBJECT (alsa, "Linear format : depth=%d, width=%d, sign=%d, bigend=%d", spec->depth, spec->width, spec->sign, spec->bigend); alsa->format = snd_pcm_build_linear_format (spec->depth, spec->width, spec->sign ? 0 : 1, spec->bigend ? 1 : 0); break; case GST_BUFTYPE_FLOAT: switch (spec->format) { case GST_FLOAT32_LE: alsa->format = SND_PCM_FORMAT_FLOAT_LE; break; case GST_FLOAT32_BE: alsa->format = SND_PCM_FORMAT_FLOAT_BE; break; case GST_FLOAT64_LE: alsa->format = SND_PCM_FORMAT_FLOAT64_LE; break; case GST_FLOAT64_BE: alsa->format = SND_PCM_FORMAT_FLOAT64_BE; break; default: goto error; } break; case GST_BUFTYPE_A_LAW: alsa->format = SND_PCM_FORMAT_A_LAW; break; case GST_BUFTYPE_MU_LAW: alsa->format = SND_PCM_FORMAT_MU_LAW; break; case GST_BUFTYPE_IEC958: alsa->format = SND_PCM_FORMAT_S16_BE; alsa->iec958 = TRUE; break; default: goto error; } alsa->rate = spec->rate; alsa->channels = spec->channels; alsa->buffer_time = spec->buffer_time; alsa->period_time = spec->latency_time; alsa->access = SND_PCM_ACCESS_RW_INTERLEAVED; return TRUE; /* ERRORS */ error: { return FALSE; } }
static GstFlowReturn gst_mve_audio_data (GstMveDemux * mve, guint8 type, const guint8 * data, guint16 len, GstBuffer ** output) { GstFlowReturn ret; GstMveDemuxStream *s = mve->audio_stream; GstBuffer *buf = NULL; guint16 stream_mask; guint16 size; GST_LOG_OBJECT (mve, "audio data"); if (s == NULL) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("trying to queue samples with no audio stream")); return GST_FLOW_ERROR; } /* need at least 6 more bytes */ if (len < 6) return gst_mve_stream_error (mve, 6, len); len -= 6; stream_mask = GST_READ_UINT16_LE (data + 2); size = GST_READ_UINT16_LE (data + 4); data += 6; if (stream_mask & MVE_DEFAULT_AUDIO_STREAM) { guint16 n_samples = size / s->n_channels / (s->sample_size / 8); GstClockTime duration = (GST_SECOND / s->sample_rate) * n_samples; if (type == MVE_OC_AUDIO_DATA) { guint16 required = (s->compression ? size / 2 + s->n_channels : size); if (len < required) return gst_mve_stream_error (mve, required, len); ret = gst_mve_buffer_alloc_for_pad (s, size, &buf); if (ret != GST_FLOW_OK) return ret; if (s->compression) ipaudio_uncompress ((gint16 *) GST_BUFFER_DATA (buf), size, data, s->n_channels); else memcpy (GST_BUFFER_DATA (buf), data, size); GST_DEBUG_OBJECT (mve, "created audio buffer, size:%u, stream_mask:%x", size, stream_mask); } else { /* silence - create a minimal buffer with no sound */ size = s->n_channels * (s->sample_size / 8); ret = gst_mve_buffer_alloc_for_pad (s, size, &buf); memset (GST_BUFFER_DATA (buf), 0, size); } GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_OFFSET_END (buf) = s->offset + n_samples; *output = buf; s->offset += n_samples; s->last_ts += duration; } else { /* alternate audio streams not supported. are there any movies which use them? */ if (type == MVE_OC_AUDIO_DATA) GST_WARNING_OBJECT (mve, "found non-empty alternate audio stream"); } return GST_FLOW_OK; }
static gboolean gst_video_flip_src_event (GstBaseTransform * trans, GstEvent * event) { GstVideoFlip *vf = GST_VIDEO_FLIP (trans); gdouble new_x, new_y, x, y; GstStructure *structure; gboolean ret; GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NAVIGATION: event = GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event))); structure = (GstStructure *) gst_event_get_structure (event); if (gst_structure_get_double (structure, "pointer_x", &x) && gst_structure_get_double (structure, "pointer_y", &y)) { GST_DEBUG_OBJECT (vf, "converting %fx%f", x, y); switch (vf->method) { case GST_VIDEO_FLIP_METHOD_90R: new_x = y; new_y = vf->to_width - x; break; case GST_VIDEO_FLIP_METHOD_90L: new_x = vf->to_height - y; new_y = x; break; case GST_VIDEO_FLIP_METHOD_OTHER: new_x = vf->to_height - y; new_y = vf->to_width - x; break; case GST_VIDEO_FLIP_METHOD_TRANS: new_x = y; new_y = x; break; case GST_VIDEO_FLIP_METHOD_180: new_x = vf->to_width - x; new_y = vf->to_height - y; break; case GST_VIDEO_FLIP_METHOD_HORIZ: new_x = vf->to_width - x; new_y = y; break; case GST_VIDEO_FLIP_METHOD_VERT: new_x = x; new_y = vf->to_height - y; break; default: new_x = x; new_y = y; break; } GST_DEBUG_OBJECT (vf, "to %fx%f", new_x, new_y); gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, new_x, "pointer_y", G_TYPE_DOUBLE, new_y, NULL); } break; default: break; } ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event); return ret; }
static GstFlowReturn gst_mve_timer_create (GstMveDemux * mve, const guint8 * data, guint16 len, GstBuffer ** buf) { guint32 t_rate; guint16 t_subdiv; GstMveDemuxStream *s; GstTagList *list; gint rate_nom, rate_den; g_return_val_if_fail (mve->video_stream != NULL, GST_FLOW_ERROR); /* need 6 more bytes */ if (len < 6) return gst_mve_stream_error (mve, 6, len); t_rate = GST_READ_UINT32_LE (data); t_subdiv = GST_READ_UINT16_LE (data + 4); GST_DEBUG_OBJECT (mve, "found timer:%ux%u", t_rate, t_subdiv); mve->frame_duration = t_rate * t_subdiv * GST_USECOND; /* now really start rolling... */ s = mve->video_stream; if ((s->buffer == NULL) || (s->width == 0) || (s->height == 0)) { GST_ELEMENT_ERROR (mve, STREAM, DECODE, (NULL), ("missing or invalid create-video-buffer segment (%dx%d)", s->width, s->height)); return GST_FLOW_ERROR; } if (s->pad != NULL) { if (s->caps != NULL) { gst_caps_unref (s->caps); s->caps = NULL; } if (s->code_map != NULL) { g_free (s->code_map); s->code_map = NULL; } list = NULL; } else { list = gst_tag_list_new (); gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_VIDEO_CODEC, "Raw RGB video", NULL); } s->caps = gst_caps_from_string ("video/x-raw-rgb"); if (s->caps == NULL) return GST_FLOW_ERROR; rate_nom = GST_SECOND / GST_USECOND; rate_den = mve->frame_duration / GST_USECOND; gst_caps_set_simple (s->caps, "bpp", G_TYPE_INT, s->bpp * 8, "depth", G_TYPE_INT, (s->bpp == 1) ? 8 : 15, "width", G_TYPE_INT, s->width, "height", G_TYPE_INT, s->height, "framerate", GST_TYPE_FRACTION, rate_nom, rate_den, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL); if (s->bpp > 1) { gst_caps_set_simple (s->caps, "red_mask", G_TYPE_INT, 0x7C00, /* 31744 */ "green_mask", G_TYPE_INT, 0x03E0, /* 992 */ "blue_mask", G_TYPE_INT, 0x001F, /* 31 */ NULL); } s->code_map = g_malloc ((s->width * s->height) / (8 * 8 * 2)); if (gst_mve_add_stream (mve, s, list)) return gst_pad_push_event (s->pad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0)) ? GST_FLOW_OK : GST_FLOW_ERROR; else return GST_FLOW_OK; }
static GstStateChangeReturn gst_identity_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn ret; GstIdentity *identity = GST_IDENTITY (element); gboolean no_preroll = FALSE; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: break; case GST_STATE_CHANGE_READY_TO_PAUSED: GST_OBJECT_LOCK (identity); identity->blocked = TRUE; GST_OBJECT_UNLOCK (identity); if (identity->sync) no_preroll = TRUE; break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: GST_OBJECT_LOCK (identity); identity->blocked = FALSE; g_cond_broadcast (&identity->blocked_cond); GST_OBJECT_UNLOCK (identity); break; case GST_STATE_CHANGE_PAUSED_TO_READY: GST_OBJECT_LOCK (identity); if (identity->clock_id) { GST_DEBUG_OBJECT (identity, "unlock clock wait"); gst_clock_id_unschedule (identity->clock_id); } identity->blocked = FALSE; g_cond_broadcast (&identity->blocked_cond); GST_OBJECT_UNLOCK (identity); break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PLAYING_TO_PAUSED: GST_OBJECT_LOCK (identity); identity->upstream_latency = 0; identity->blocked = TRUE; GST_OBJECT_UNLOCK (identity); if (identity->sync) no_preroll = TRUE; break; case GST_STATE_CHANGE_PAUSED_TO_READY: break; case GST_STATE_CHANGE_READY_TO_NULL: break; default: break; } if (no_preroll && ret == GST_STATE_CHANGE_SUCCESS) ret = GST_STATE_CHANGE_NO_PREROLL; return ret; }
/* parse segment */ static GstFlowReturn gst_mve_parse_segment (GstMveDemux * mve, GstMveDemuxStream ** stream, GstBuffer ** send) { GstFlowReturn ret = GST_FLOW_OK; const guint8 *buffer, *data; guint8 type, version; guint16 len; buffer = gst_adapter_peek (mve->adapter, mve->needed_bytes); type = GST_MVE_SEGMENT_TYPE (buffer); /* check whether to handle the segment */ if (type < 32) { version = GST_MVE_SEGMENT_VERSION (buffer); len = GST_MVE_SEGMENT_SIZE (buffer); data = buffer + 4; switch (type) { case MVE_OC_END_OF_CHUNK: gst_mve_end_chunk (mve); break; case MVE_OC_CREATE_TIMER: ret = gst_mve_timer_create (mve, data, len, send); *stream = mve->audio_stream; break; case MVE_OC_AUDIO_BUFFERS: ret = gst_mve_audio_init (mve, version, data, len); break; case MVE_OC_VIDEO_BUFFERS: ret = gst_mve_video_create_buffer (mve, version, data, len); break; case MVE_OC_AUDIO_DATA: case MVE_OC_AUDIO_SILENCE: ret = gst_mve_audio_data (mve, type, data, len, send); *stream = mve->audio_stream; break; case MVE_OC_VIDEO_MODE: ret = gst_mve_video_init (mve, data); break; case MVE_OC_PALETTE: ret = gst_mve_video_palette (mve, data, len); break; case MVE_OC_PALETTE_COMPRESSED: ret = gst_mve_video_palette_compressed (mve, data, len); break; case MVE_OC_CODE_MAP: ret = gst_mve_video_code_map (mve, data, len); break; case MVE_OC_VIDEO_DATA: ret = gst_mve_video_data (mve, data, len, send); *stream = mve->video_stream; break; case MVE_OC_END_OF_STREAM: case MVE_OC_PLAY_AUDIO: case MVE_OC_PLAY_VIDEO: /* these are chunks we don't need to handle */ GST_LOG_OBJECT (mve, "ignored segment type:0x%02x, version:0x%02x", type, version); break; case 0x13: /* ??? */ case 0x14: /* ??? */ case 0x15: /* ??? */ /* these are chunks we know exist but we don't care about */ GST_DEBUG_OBJECT (mve, "known but unhandled segment type:0x%02x, version:0x%02x", type, version); break; default: GST_WARNING_OBJECT (mve, "unhandled segment type:0x%02x, version:0x%02x", type, version); break; } } gst_adapter_flush (mve->adapter, mve->needed_bytes); return ret; }
static int gst_wavpack_enc_push_block (void *id, void *data, int32_t count) { GstWavpackEncWriteID *wid = (GstWavpackEncWriteID *) id; GstWavpackEnc *enc = GST_WAVPACK_ENC (wid->wavpack_enc); GstFlowReturn *flow; GstBuffer *buffer; GstPad *pad; guchar *block = (guchar *) data; pad = (wid->correction) ? enc->wvcsrcpad : enc->srcpad; flow = (wid->correction) ? &enc-> wvcsrcpad_last_return : &enc->srcpad_last_return; *flow = gst_pad_alloc_buffer_and_set_caps (pad, GST_BUFFER_OFFSET_NONE, count, GST_PAD_CAPS (pad), &buffer); if (*flow != GST_FLOW_OK) { GST_WARNING_OBJECT (enc, "flow on %s:%s = %s", GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow)); return FALSE; } g_memmove (GST_BUFFER_DATA (buffer), block, count); if (count > sizeof (WavpackHeader) && memcmp (block, "wvpk", 4) == 0) { /* if it's a Wavpack block set buffer timestamp and duration, etc */ WavpackHeader wph; GST_LOG_OBJECT (enc, "got %d bytes of encoded wavpack %sdata", count, (wid->correction) ? "correction " : ""); gst_wavpack_read_header (&wph, block); /* Only set when pushing the first buffer again, in that case * we don't want to delay the buffer or push newsegment events */ if (!wid->passthrough) { /* Only push complete blocks */ if (enc->pending_buffer == NULL) { enc->pending_buffer = buffer; enc->pending_offset = wph.block_index; } else if (enc->pending_offset == wph.block_index) { enc->pending_buffer = gst_buffer_join (enc->pending_buffer, buffer); } else { GST_ERROR ("Got incomplete block, dropping"); gst_buffer_unref (enc->pending_buffer); enc->pending_buffer = buffer; enc->pending_offset = wph.block_index; } if (!(wph.flags & FINAL_BLOCK)) return TRUE; buffer = enc->pending_buffer; enc->pending_buffer = NULL; enc->pending_offset = 0; /* if it's the first wavpack block, send a NEW_SEGMENT event */ if (wph.block_index == 0) { gst_pad_push_event (pad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_BUFFER_OFFSET_NONE, 0)); /* save header for later reference, so we can re-send it later on * EOS with fixed up values for total sample count etc. */ if (enc->first_block == NULL && !wid->correction) { enc->first_block = g_memdup (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer)); enc->first_block_size = GST_BUFFER_SIZE (buffer); } } } /* set buffer timestamp, duration, offset, offset_end from * the wavpack header */ GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (GST_SECOND, wph.block_index, enc->samplerate); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (GST_SECOND, wph.block_samples, enc->samplerate); GST_BUFFER_OFFSET (buffer) = wph.block_index; GST_BUFFER_OFFSET_END (buffer) = wph.block_index + wph.block_samples; } else { /* if it's something else set no timestamp and duration on the buffer */ GST_DEBUG_OBJECT (enc, "got %d bytes of unknown data", count); GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE; } /* push the buffer and forward errors */ GST_DEBUG_OBJECT (enc, "pushing buffer with %d bytes", GST_BUFFER_SIZE (buffer)); *flow = gst_pad_push (pad, buffer); if (*flow != GST_FLOW_OK) { GST_WARNING_OBJECT (enc, "flow on %s:%s = %s", GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow)); return FALSE; } return TRUE; }
static GstFlowReturn gst_mve_demux_chain (GstPad * sinkpad, GstBuffer * inbuf) { GstMveDemux *mve = GST_MVE_DEMUX (GST_PAD_PARENT (sinkpad)); GstFlowReturn ret = GST_FLOW_OK; gst_adapter_push (mve->adapter, inbuf); GST_DEBUG_OBJECT (mve, "queuing buffer, needed:%d, available:%u", mve->needed_bytes, gst_adapter_available (mve->adapter)); while ((gst_adapter_available (mve->adapter) >= mve->needed_bytes) && (ret == GST_FLOW_OK)) { GstMveDemuxStream *stream = NULL; GstBuffer *outbuf = NULL; switch (mve->state) { case MVEDEMUX_STATE_INITIAL: gst_adapter_flush (mve->adapter, mve->needed_bytes); mve->chunk_offset += mve->needed_bytes; mve->needed_bytes = 4; mve->state = MVEDEMUX_STATE_NEXT_CHUNK; break; case MVEDEMUX_STATE_NEXT_CHUNK:{ const guint8 *data; guint16 size; data = gst_adapter_peek (mve->adapter, mve->needed_bytes); size = GST_MVE_SEGMENT_SIZE (data); if (mve->chunk_offset >= mve->chunk_size) { /* new chunk, flush buffer and proceed with next segment */ guint16 chunk_type = GST_READ_UINT16_LE (data + 2); gst_adapter_flush (mve->adapter, mve->needed_bytes); mve->chunk_size = size; mve->chunk_offset = 0; if (chunk_type > MVE_CHUNK_END) { GST_WARNING_OBJECT (mve, "skipping unknown chunk type 0x%02x of size:%u", chunk_type, size); mve->needed_bytes += size; mve->state = MVEDEMUX_STATE_SKIP; } else { GST_DEBUG_OBJECT (mve, "found new chunk type 0x%02x of size:%u", chunk_type, size); } } else if (mve->chunk_offset <= mve->chunk_size) { /* new segment */ GST_DEBUG_OBJECT (mve, "found segment type 0x%02x of size:%u", GST_MVE_SEGMENT_TYPE (data), size); mve->needed_bytes += size; mve->state = MVEDEMUX_STATE_MOVIE; } } break; case MVEDEMUX_STATE_MOVIE: ret = gst_mve_parse_segment (mve, &stream, &outbuf); if ((ret == GST_FLOW_OK) && (outbuf != NULL)) { /* send buffer */ GST_DEBUG_OBJECT (mve, "pushing buffer with time %" GST_TIME_FORMAT " (%u bytes) on pad %s", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_BUFFER_SIZE (outbuf), GST_PAD_NAME (stream->pad)); ret = gst_pad_push (stream->pad, outbuf); stream->last_flow = ret; } if (ret == GST_FLOW_NOT_LINKED) { if (mve->audio_stream && mve->audio_stream->last_flow != GST_FLOW_NOT_LINKED) ret = GST_FLOW_OK; if (mve->video_stream && mve->video_stream->last_flow != GST_FLOW_NOT_LINKED) ret = GST_FLOW_OK; } /* update current offset */ mve->chunk_offset += mve->needed_bytes; mve->state = MVEDEMUX_STATE_NEXT_CHUNK; mve->needed_bytes = 4; break; case MVEDEMUX_STATE_SKIP: mve->chunk_offset += mve->needed_bytes; gst_adapter_flush (mve->adapter, mve->needed_bytes); mve->state = MVEDEMUX_STATE_NEXT_CHUNK; mve->needed_bytes = 4; break; default: GST_ERROR_OBJECT (mve, "invalid state: %d", mve->state); break; } } return ret; }
static gboolean theora_dec_set_format (GstVideoDecoder * bdec, GstVideoCodecState * state) { GstTheoraDec *dec; dec = GST_THEORA_DEC (bdec); /* Keep a copy of the input state */ if (dec->input_state) gst_video_codec_state_unref (dec->input_state); dec->input_state = gst_video_codec_state_ref (state); /* FIXME : Interesting, we always accept any kind of caps ? */ if (state->codec_data) { GstBuffer *buffer; GstMapInfo minfo; guint8 *data; guint size; guint offset; buffer = state->codec_data; gst_buffer_map (buffer, &minfo, GST_MAP_READ); offset = 0; size = minfo.size; data = (guint8 *) minfo.data; while (size > 2) { guint psize; GstBuffer *buf; psize = (data[0] << 8) | data[1]; /* skip header */ data += 2; size -= 2; offset += 2; /* make sure we don't read too much */ psize = MIN (psize, size); buf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, psize); /* first buffer is a discont buffer */ if (offset == 2) GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); /* now feed it to the decoder we can ignore the error */ theora_dec_decode_buffer (dec, buf, NULL); gst_buffer_unref (buf); /* skip the data */ size -= psize; data += psize; offset += psize; } gst_buffer_unmap (buffer, &minfo); } GST_DEBUG_OBJECT (dec, "Done"); return TRUE; }
static GstFlowReturn gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer) { GstVideoRate *videorate; GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED; GstClockTime intime, in_ts, in_dur; GstClockTime avg_period; gboolean skip = FALSE; videorate = GST_VIDEO_RATE (trans); /* make sure the denominators are not 0 */ if (videorate->from_rate_denominator == 0 || videorate->to_rate_denominator == 0) goto not_negotiated; GST_OBJECT_LOCK (videorate); avg_period = videorate->average_period_set; GST_OBJECT_UNLOCK (videorate); /* MT-safe switching between modes */ if (G_UNLIKELY (avg_period != videorate->average_period)) { gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0); videorate->average_period = avg_period; videorate->last_ts = GST_CLOCK_TIME_NONE; if (switch_mode) { if (avg_period) { /* enabling average mode */ videorate->average = 0; /* make sure no cached buffers from regular mode are left */ gst_video_rate_swap_prev (videorate, NULL, 0); } else { /* enable regular mode */ videorate->next_ts = GST_CLOCK_TIME_NONE; skip = TRUE; } /* max averaging mode has a no latency, normal mode does */ gst_element_post_message (GST_ELEMENT (videorate), gst_message_new_latency (GST_OBJECT (videorate))); } } if (videorate->average_period > 0) return gst_video_rate_trans_ip_max_avg (videorate, buffer); in_ts = GST_BUFFER_TIMESTAMP (buffer); in_dur = GST_BUFFER_DURATION (buffer); if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) { in_ts = videorate->last_ts; if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) goto invalid_buffer; } /* get the time of the next expected buffer timestamp, we use this when the * next buffer has -1 as a timestamp */ videorate->last_ts = in_ts; if (in_dur != GST_CLOCK_TIME_NONE) videorate->last_ts += in_dur; GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (in_ts)); /* the input time is the time in the segment + all previously accumulated * segments */ intime = in_ts + videorate->segment.base; /* we need to have two buffers to compare */ if (videorate->prevbuf == NULL) { gst_video_rate_swap_prev (videorate, buffer, intime); videorate->in++; if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) { /* new buffer, we expect to output a buffer that matches the first * timestamp in the segment */ if (videorate->skip_to_first || skip) { videorate->next_ts = intime; videorate->base_ts = in_ts - videorate->segment.start; videorate->out_frame_count = 0; } else { videorate->next_ts = videorate->segment.start + videorate->segment.base; } } } else { GstClockTime prevtime; gint count = 0; gint64 diff1, diff2; prevtime = videorate->prev_ts; GST_LOG_OBJECT (videorate, "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime), GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts)); videorate->in++; /* drop new buffer if it's before previous one */ if (intime < prevtime) { GST_DEBUG_OBJECT (videorate, "The new buffer (%" GST_TIME_FORMAT ") is before the previous buffer (%" GST_TIME_FORMAT "). Dropping new buffer.", GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime)); videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); goto done; } /* got 2 buffers, see which one is the best */ do { diff1 = prevtime - videorate->next_ts; diff2 = intime - videorate->next_ts; /* take absolute values, beware: abs and ABS don't work for gint64 */ if (diff1 < 0) diff1 = -diff1; if (diff2 < 0) diff2 = -diff2; GST_LOG_OBJECT (videorate, "diff with prev %" GST_TIME_FORMAT " diff with new %" GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts)); /* output first one when its the best */ if (diff1 <= diff2) { GstFlowReturn r; count++; /* on error the _flush function posted a warning already */ if ((r = gst_video_rate_flush_prev (videorate, count > 1)) != GST_FLOW_OK) { res = r; goto done; } } /* Do not produce any dups. We can exit loop now */ if (videorate->drop_only) break; /* continue while the first one was the best, if they were equal avoid * going into an infinite loop */ } while (diff1 < diff2); /* if we outputed the first buffer more then once, we have dups */ if (count > 1) { videorate->dup += count - 1; if (!videorate->silent) gst_video_rate_notify_duplicate (videorate); } /* if we didn't output the first buffer, we have a drop */ else if (count == 0) { videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); GST_LOG_OBJECT (videorate, "new is best, old never used, drop, outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts)); } GST_LOG_OBJECT (videorate, "END, putting new in old, diff1 %" GST_TIME_FORMAT ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %" G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts), videorate->in, videorate->out, videorate->drop, videorate->dup); /* swap in new one when it's the best */ gst_video_rate_swap_prev (videorate, buffer, intime); } done: return res; /* ERRORS */ not_negotiated: { GST_WARNING_OBJECT (videorate, "no framerate negotiated"); res = GST_FLOW_NOT_NEGOTIATED; goto done; } invalid_buffer: { GST_WARNING_OBJECT (videorate, "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it"); res = GST_BASE_TRANSFORM_FLOW_DROPPED; goto done; } }