void camera_calibrate_run(GstCameraCalibrate *calib, IplImage *img) { cv::Mat view = cv::cvarrToMat(img); // For camera only take new samples after delay time if (calib->mode == CAPTURING) { // get_input cv::Size imageSize = view.size(); /* find_pattern * FIXME find ways to reduce CPU usage * don't do it on all frames ? will it help ? corner display will be affected. * in a separate frame? * in a separate element that gets composited back into the main stream * (video is tee-d into it and can then be decimated, scaled, etc..) */ std::vector<cv::Point2f> pointBuf; bool found; int chessBoardFlags = cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_NORMALIZE_IMAGE; if (!calib->useFisheye) { /* fast check erroneously fails with high distortions like fisheye */ chessBoardFlags |= cv::CALIB_CB_FAST_CHECK; } /* Find feature points on the input format */ switch(calib->calibrationPattern) { case GST_CAMERA_CALIBRATION_PATTERN_CHESSBOARD: found = cv::findChessboardCorners(view, calib->boardSize, pointBuf, chessBoardFlags); break; case GST_CAMERA_CALIBRATION_PATTERN_CIRCLES_GRID: found = cv::findCirclesGrid(view, calib->boardSize, pointBuf); break; case GST_CAMERA_CALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID: found = cv::findCirclesGrid(view, calib->boardSize, pointBuf, cv::CALIB_CB_ASYMMETRIC_GRID ); break; default: found = FALSE; break; } bool blinkOutput = FALSE; if (found) { /* improve the found corners' coordinate accuracy for chessboard */ if (calib->calibrationPattern == GST_CAMERA_CALIBRATION_PATTERN_CHESSBOARD && calib->cornerSubPix) { /* FIXME findChessboardCorners and alike do a cv::COLOR_BGR2GRAY (and a histogram balance) * the color convert should be done once (if needed) and shared * FIXME keep viewGray around to avoid reallocating it each time... */ cv::Mat viewGray; cv::cvtColor(view, viewGray, cv::COLOR_BGR2GRAY); cv::cornerSubPix(viewGray, pointBuf, cv::Size(11, 11), cv::Size(-1, -1), cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1)); } /* take new samples after delay time */ if ((calib->mode == CAPTURING) && ((clock() - calib->prevTimestamp) > calib->delay * 1e-3 * CLOCKS_PER_SEC)) { calib->imagePoints.push_back(pointBuf); calib->prevTimestamp = clock(); blinkOutput = true; } /* draw the corners */ if (calib->showCorners) { cv::drawChessboardCorners(view, calib->boardSize, cv::Mat(pointBuf), found); } } /* if got enough frames then stop calibration and show result */ if (calib->mode == CAPTURING && calib->imagePoints.size() >= (size_t)calib->nrFrames) { if (camera_calibrate_calibrate(calib, imageSize, calib->cameraMatrix, calib->distCoeffs, calib->imagePoints)) { calib->mode = CALIBRATED; GstPad *sink_pad = GST_BASE_TRANSFORM_SINK_PAD (calib); GstPad *src_pad = GST_BASE_TRANSFORM_SRC_PAD (calib); GstEvent *sink_event; GstEvent *src_event; /* set settings property */ g_free (calib->settings); calib->settings = camera_serialize_undistort_settings(calib->cameraMatrix, calib->distCoeffs); /* create calibrated event and send upstream and downstream */ sink_event = gst_camera_event_new_calibrated (calib->settings); GST_LOG_OBJECT (sink_pad, "Sending upstream event %s.", GST_EVENT_TYPE_NAME (sink_event)); if (!gst_pad_push_event (sink_pad, sink_event)) { GST_WARNING_OBJECT (sink_pad, "Sending upstream event %p (%s) failed.", sink_event, GST_EVENT_TYPE_NAME (sink_event)); } src_event = gst_camera_event_new_calibrated (calib->settings); GST_LOG_OBJECT (src_pad, "Sending downstream event %s.", GST_EVENT_TYPE_NAME (src_event)); if (!gst_pad_push_event (src_pad, src_event)) { GST_WARNING_OBJECT (src_pad, "Sending downstream event %p (%s) failed.", src_event, GST_EVENT_TYPE_NAME (src_event)); } } else { /* failed to calibrate, go back to detection mode */ calib->mode = DETECTION; } } if (calib->mode == CAPTURING && blinkOutput) { bitwise_not(view, view); } } /* output text */ /* FIXME ll additional rendering (text, corners, ...) should be done with * cairo or another gst framework. * this will relax the conditions on the input format (RBG only at the moment). * the calibration itself accepts more formats... */ std::string msg = (calib->mode == CAPTURING) ? "100/100" : (calib->mode == CALIBRATED) ? "Calibrated" : "Waiting..."; int baseLine = 0; cv::Size textSize = cv::getTextSize(msg, 1, 1, 1, &baseLine); cv::Point textOrigin(view.cols - 2 * textSize.width - 10, view.rows - 2 * baseLine - 10); if (calib->mode == CAPTURING) { msg = cv::format("%d/%d", (int)calib->imagePoints.size(), calib->nrFrames); } const cv::Scalar RED(0,0,255); const cv::Scalar GREEN(0,255,0); cv::putText(view, msg, textOrigin, 1, 1, calib->mode == CALIBRATED ? GREEN : RED); }
static gboolean gst_raw_parse_sink_event (GstPad * pad, GstEvent * event) { GstRawParse *rp = GST_RAW_PARSE (gst_pad_get_parent (pad)); gboolean ret; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: case GST_EVENT_FLUSH_STOP: /* Only happens in push mode */ gst_raw_parse_reset (rp); ret = gst_pad_push_event (rp->srcpad, event); break; case GST_EVENT_NEWSEGMENT: { GstClockTimeDiff start, stop, time; gdouble rate, arate; gboolean update; GstFormat format; /* Only happens in push mode */ gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format, &start, &stop, &time); if (format == GST_FORMAT_TIME) { gst_segment_set_newsegment_full (&rp->segment, update, rate, arate, GST_FORMAT_TIME, start, stop, time); ret = gst_pad_push_event (rp->srcpad, event); } else { gst_event_unref (event); ret = gst_raw_parse_convert (rp, format, start, GST_FORMAT_TIME, &start); ret &= gst_raw_parse_convert (rp, format, time, GST_FORMAT_TIME, &time); ret &= gst_raw_parse_convert (rp, format, stop, GST_FORMAT_TIME, &stop); if (!ret) { GST_ERROR_OBJECT (rp, "Failed converting to GST_FORMAT_TIME format (%d)", format); break; } gst_segment_set_newsegment_full (&rp->segment, update, rate, arate, GST_FORMAT_TIME, start, stop, time); /* create new segment with the fields converted to time */ event = gst_event_new_new_segment_full (update, rate, arate, GST_FORMAT_TIME, start, stop, time); ret = gst_pad_push_event (rp->srcpad, event); } break; } default: ret = gst_pad_event_default (rp->sinkpad, event); break; } gst_object_unref (rp); return ret; }
static gboolean gst_rtp_mux_setcaps (GstPad * pad, GstRTPMux * rtp_mux, GstCaps * caps) { GstStructure *structure; gboolean ret = FALSE; GstRTPMuxPadPrivate *padpriv; GstCaps *peercaps; if (!gst_caps_is_fixed (caps)) return FALSE; peercaps = gst_pad_peer_query_caps (rtp_mux->srcpad, NULL); if (peercaps) { GstCaps *tcaps, *othercaps;; tcaps = gst_pad_get_pad_template_caps (pad); othercaps = gst_caps_intersect_full (peercaps, tcaps, GST_CAPS_INTERSECT_FIRST); if (gst_caps_get_size (othercaps) > 0) { structure = gst_caps_get_structure (othercaps, 0); GST_OBJECT_LOCK (rtp_mux); if (gst_structure_get_uint (structure, "ssrc", &rtp_mux->current_ssrc)) { GST_DEBUG_OBJECT (pad, "Use downstream ssrc: %x", rtp_mux->current_ssrc); rtp_mux->have_ssrc = TRUE; } GST_OBJECT_UNLOCK (rtp_mux); } gst_caps_unref (othercaps); gst_caps_unref (peercaps); gst_caps_unref (tcaps); } structure = gst_caps_get_structure (caps, 0); if (!structure) return FALSE; GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (padpriv && gst_structure_get_uint (structure, "timestamp-offset", &padpriv->timestamp_offset)) { padpriv->have_timestamp_offset = TRUE; } caps = gst_caps_copy (caps); /* if we don't have a specified ssrc, first try to take one from the caps, and if that fails, generate one */ if (!rtp_mux->have_ssrc) { if (rtp_mux->ssrc_random) { if (!gst_structure_get_uint (structure, "ssrc", &rtp_mux->current_ssrc)) rtp_mux->current_ssrc = g_random_int (); rtp_mux->have_ssrc = TRUE; } } gst_caps_set_simple (caps, "timestamp-offset", G_TYPE_UINT, rtp_mux->ts_base, "seqnum-offset", G_TYPE_UINT, rtp_mux->seqnum_base, "ssrc", G_TYPE_UINT, rtp_mux->current_ssrc, NULL); GST_OBJECT_UNLOCK (rtp_mux); if (rtp_mux->send_stream_start) { gchar s_id[32]; /* stream-start (FIXME: create id based on input ids) */ g_snprintf (s_id, sizeof (s_id), "interleave-%08x", g_random_int ()); gst_pad_push_event (rtp_mux->srcpad, gst_event_new_stream_start (s_id)); rtp_mux->send_stream_start = FALSE; } GST_DEBUG_OBJECT (rtp_mux, "setting caps %" GST_PTR_FORMAT " on src pad..", caps); ret = gst_pad_set_caps (rtp_mux->srcpad, caps); gst_caps_unref (caps); return ret; }
static gboolean gst_musepackdec_handle_seek_event (GstMusepackDec * dec, GstEvent * event) { GstSeekType start_type, stop_type; GstSeekFlags flags; GstSegment segment; GstFormat format; gboolean flush; gdouble rate; gint64 start, stop; gint samplerate; gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start, &stop_type, &stop); if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) { GST_DEBUG_OBJECT (dec, "seek failed: only TIME or DEFAULT format allowed"); return FALSE; } samplerate = g_atomic_int_get (&dec->rate); if (format == GST_FORMAT_TIME) { if (start_type != GST_SEEK_TYPE_NONE) start = gst_util_uint64_scale_int (start, samplerate, GST_SECOND); if (stop_type != GST_SEEK_TYPE_NONE) stop = gst_util_uint64_scale_int (stop, samplerate, GST_SECOND); } flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH); if (flush) gst_pad_push_event (dec->srcpad, gst_event_new_flush_start ()); else gst_pad_pause_task (dec->sinkpad); /* not _stop_task()? */ GST_PAD_STREAM_LOCK (dec->sinkpad); /* operate on segment copy until we know the seek worked */ segment = dec->segment; gst_segment_set_seek (&segment, rate, GST_FORMAT_DEFAULT, flags, start_type, start, stop_type, stop, NULL); gst_pad_push_event (dec->sinkpad, gst_event_new_flush_stop ()); GST_DEBUG_OBJECT (dec, "segment: [%" G_GINT64_FORMAT "-%" G_GINT64_FORMAT "] = [%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT "]", segment.start, segment.stop, GST_TIME_ARGS (segment.start * GST_SECOND / dec->rate), GST_TIME_ARGS (segment.stop * GST_SECOND / dec->rate)); GST_DEBUG_OBJECT (dec, "performing seek to sample %" G_GINT64_FORMAT, segment.start); if (segment.start < 0 || segment.start >= segment.duration) { GST_WARNING_OBJECT (dec, "seek out of bounds"); goto failed; } #ifdef MPC_IS_OLD_API if (!mpc_decoder_seek_sample (dec->d, segment.start)) goto failed; #else if (mpc_demux_seek_sample (dec->d, segment.start) != MPC_STATUS_OK) goto failed; #endif if ((flags & GST_SEEK_FLAG_SEGMENT) == GST_SEEK_FLAG_SEGMENT) { GST_DEBUG_OBJECT (dec, "posting SEGMENT_START message"); gst_element_post_message (GST_ELEMENT (dec), gst_message_new_segment_start (GST_OBJECT (dec), GST_FORMAT_TIME, gst_util_uint64_scale_int (segment.start, GST_SECOND, dec->rate))); } if (flush) { gst_pad_push_event (dec->srcpad, gst_event_new_flush_stop ()); } gst_segment_set_last_stop (&segment, GST_FORMAT_DEFAULT, segment.start); dec->segment = segment; gst_musepackdec_send_newsegment (dec); GST_DEBUG_OBJECT (dec, "seek successful"); gst_pad_start_task (dec->sinkpad, (GstTaskFunction) gst_musepackdec_loop, dec->sinkpad, NULL); GST_PAD_STREAM_UNLOCK (dec->sinkpad); return TRUE; failed: { GST_WARNING_OBJECT (dec, "seek failed"); GST_PAD_STREAM_UNLOCK (dec->sinkpad); return FALSE; } }
static gboolean rsn_audiomunge_sink_event (GstPad * pad, GstEvent * event) { gboolean ret = FALSE; RsnAudioMunge *munge = RSN_AUDIOMUNGE (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: rsn_audiomunge_reset (munge); ret = gst_pad_push_event (munge->srcpad, event); break; case GST_EVENT_NEWSEGMENT: { GstSegment *segment; gboolean update; GstFormat format; gdouble rate, arate; gint64 start, stop, time; gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format, &start, &stop, &time); /* we need TIME format */ if (format != GST_FORMAT_TIME) goto newseg_wrong_format; /* now configure the values */ segment = &munge->sink_segment; gst_segment_set_newsegment_full (segment, update, rate, arate, format, start, stop, time); if (munge->have_audio) { ret = gst_pad_push_event (munge->srcpad, event); break; } /* * FIXME: * If the accum >= threshold or we're in a still frame and there's been * no audio received, then we need to generate some audio data. * If caused by a segment start update (time advancing in a gap) adjust * the new-segment and send the buffer. * * Otherwise, send the buffer before the newsegment, so that it appears * in the closing segment. */ if (!update) { GST_DEBUG_OBJECT (munge, "Sending newsegment: start %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (segment->accum)); ret = gst_pad_push_event (munge->srcpad, event); } if (segment->accum >= AUDIO_FILL_THRESHOLD || munge->in_still) { g_print ("*********** Sending audio fill: accum = %" GST_TIME_FORMAT " still-state=%d\n", GST_TIME_ARGS (segment->accum), munge->in_still); /* Just generate a 100ms silence buffer for now. FIXME: Fill the gap */ if (rsn_audiomunge_make_audio (munge, segment->start, GST_SECOND / 10) == GST_FLOW_OK) munge->have_audio = TRUE; } else { GST_LOG_OBJECT (munge, "Not sending audio fill buffer: " "segment accum below thresh: accum = %" GST_TIME_FORMAT, GST_TIME_ARGS (segment->accum)); } if (update) { GST_DEBUG_OBJECT (munge, "Sending newsegment: start %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (segment->accum)); ret = gst_pad_push_event (munge->srcpad, event); } break; } case GST_EVENT_CUSTOM_DOWNSTREAM: { const GstStructure *s = gst_event_get_structure (event); if (s && gst_structure_has_name (s, "application/x-gst-dvd")) rsn_audiomunge_handle_dvd_event (munge, event); ret = gst_pad_push_event (munge->srcpad, event); break; } default: ret = gst_pad_push_event (munge->srcpad, event); break; } return ret; newseg_wrong_format: GST_DEBUG_OBJECT (munge, "received non TIME newsegment"); gst_event_unref (event); gst_object_unref (munge); return FALSE; }
static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstFlacTag *tag; GstFlowReturn ret; GstMapInfo map; gsize size; ret = GST_FLOW_OK; tag = GST_FLAC_TAG (parent); gst_adapter_push (tag->adapter, buffer); /* Initial state, we don't even know if we are dealing with a flac file */ if (tag->state == GST_FLAC_TAG_STATE_INIT) { GstBuffer *id_buffer; if (gst_adapter_available (tag->adapter) < sizeof (FLAC_MAGIC)) goto cleanup; id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE); GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier"); if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) { GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer"); ret = gst_pad_push (tag->srcpad, id_buffer); if (ret != GST_FLOW_OK) goto cleanup; tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { /* FIXME: does that work well with FLAC files containing ID3v2 tags ? */ gst_buffer_unref (id_buffer); GST_ELEMENT_ERROR (tag, STREAM, WRONG_TYPE, (NULL), (NULL)); ret = GST_FLOW_ERROR; } } /* The fLaC magic string has been skipped, try to detect the beginning * of a metadata block */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) { guint type; gboolean is_last; const guint8 *block_header; g_assert (tag->metadata_block_size == 0); g_assert (tag->metadata_last_block == FALSE); /* The header of a flac metadata block is 4 bytes long: * 1st bit: indicates whether this is the last metadata info block * 7 next bits: 4 if vorbis comment block * 24 next bits: size of the metadata to follow (big endian) */ if (gst_adapter_available (tag->adapter) < 4) goto cleanup; block_header = gst_adapter_map (tag->adapter, 4); is_last = ((block_header[0] & 0x80) == 0x80); type = block_header[0] & 0x7F; size = (block_header[1] << 16) | (block_header[2] << 8) | block_header[3]; gst_adapter_unmap (tag->adapter); /* The 4 bytes long header isn't included in the metadata size */ tag->metadata_block_size = size + 4; tag->metadata_last_block = is_last; GST_DEBUG_OBJECT (tag, "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, " "is vorbiscomment: %d, is last: %d", size, type, (type == 0x04), is_last); /* Metadata blocks of type 4 are vorbis comment blocks */ if (type == 0x04) { tag->state = GST_FLAC_TAG_STATE_VC_METADATA_BLOCK; } else { tag->state = GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK; } } /* Reads a metadata block */ if ((tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) || (tag->state == GST_FLAC_TAG_STATE_VC_METADATA_BLOCK)) { GstBuffer *metadata_buffer; if (gst_adapter_available (tag->adapter) < tag->metadata_block_size) goto cleanup; metadata_buffer = gst_adapter_take_buffer (tag->adapter, tag->metadata_block_size); /* clear the is-last flag, as the last metadata block will * be the vorbis comment block which we will build ourselves. */ gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE); map.data[0] &= (~0x80); gst_buffer_unmap (metadata_buffer, &map); if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) { GST_DEBUG_OBJECT (tag, "pushing metadata block buffer"); ret = gst_pad_push (tag->srcpad, metadata_buffer); if (ret != GST_FLOW_OK) goto cleanup; } else { tag->vorbiscomment = metadata_buffer; } tag->metadata_block_size = 0; tag->state = GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK; } /* This state is mainly used to be able to stop as soon as we read * a vorbiscomment block from the flac file if we are in an only output * tags mode */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK) { /* Check if in the previous iteration we read a vorbis comment metadata * block, and stop now if the user only wants to read tags */ if (tag->vorbiscomment != NULL) { guint8 id_data[4]; /* We found some tags, try to parse them and notify the other elements * that we encountered some tags */ GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags"); gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4); tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment, id_data, 4, NULL); if (tag->tags != NULL) { gst_pad_push_event (tag->srcpad, gst_event_new_tag (gst_tag_list_copy (tag->tags))); } gst_buffer_unref (tag->vorbiscomment); tag->vorbiscomment = NULL; } /* Skip to next state */ if (tag->metadata_last_block == FALSE) { tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { tag->state = GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT; } } /* Creates a vorbis comment block from the metadata which was set * on the gstreamer element, and add it to the flac stream */ if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) { GstBuffer *buffer; const GstTagList *user_tags; GstTagList *merged_tags; /* merge the tag lists */ user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tag)); if (user_tags != NULL) { merged_tags = gst_tag_list_merge (user_tags, tag->tags, gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tag))); } else { merged_tags = gst_tag_list_copy (tag->tags); } if (merged_tags == NULL) { /* If we get a NULL list of tags, we must generate a padding block * which is marked as the last metadata block, otherwise we'll * end up with a corrupted flac file. */ GST_WARNING_OBJECT (tag, "No tags found"); buffer = gst_buffer_new_and_alloc (12); if (buffer == NULL) goto no_buffer; gst_buffer_map (buffer, &map, GST_MAP_WRITE); memset (map.data, 0, map.size); map.data[0] = 0x81; /* 0x80 = Last metadata block, * 0x01 = padding block */ gst_buffer_unmap (buffer, &map); } else { guchar header[4]; guint8 fbit[1]; memset (header, 0, sizeof (header)); header[0] = 0x84; /* 0x80 = Last metadata block, * 0x04 = vorbiscomment block */ buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header, sizeof (header), NULL); GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags); gst_tag_list_free (merged_tags); if (buffer == NULL) goto no_comment; size = gst_buffer_get_size (buffer); if ((size < 4) || ((size - 4) > 0xFFFFFF)) goto comment_too_long; fbit[0] = 1; /* Get rid of the framing bit at the end of the vorbiscomment buffer * if it exists since libFLAC seems to lose sync because of this * bit in gstflacdec */ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) { buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, 0, size - 1); } } /* The 4 byte metadata block header isn't accounted for in the total * size of the metadata block */ gst_buffer_map (buffer, &map, GST_MAP_WRITE); map.data[1] = (((map.size - 4) & 0xFF0000) >> 16); map.data[2] = (((map.size - 4) & 0x00FF00) >> 8); map.data[3] = ((map.size - 4) & 0x0000FF); gst_buffer_unmap (buffer, &map); GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment " "buffer", map.size); ret = gst_pad_push (tag->srcpad, buffer); if (ret != GST_FLOW_OK) { goto cleanup; } tag->state = GST_FLAC_TAG_STATE_AUDIO_DATA; }
static gboolean gst_type_find_element_seek (GstTypeFindElement * typefind, GstEvent * event) { GstSeekFlags flags; GstSeekType start_type, stop_type; GstFormat format; gboolean flush; gdouble rate; gint64 start, stop; GstSegment seeksegment = { 0, }; gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start, &stop_type, &stop); /* we can only seek on bytes */ if (format != GST_FORMAT_BYTES) { GST_DEBUG_OBJECT (typefind, "Can only seek on BYTES"); return FALSE; } /* copy segment, we need this because we still need the old * segment when we close the current segment. */ memcpy (&seeksegment, &typefind->segment, sizeof (GstSegment)); GST_DEBUG_OBJECT (typefind, "configuring seek"); gst_segment_do_seek (&seeksegment, rate, format, flags, start_type, start, stop_type, stop, NULL); flush = ! !(flags & GST_SEEK_FLAG_FLUSH); GST_DEBUG_OBJECT (typefind, "New segment %" GST_SEGMENT_FORMAT, &seeksegment); if (flush) { GST_DEBUG_OBJECT (typefind, "Starting flush"); gst_pad_push_event (typefind->sink, gst_event_new_flush_start ()); gst_pad_push_event (typefind->src, gst_event_new_flush_start ()); } else { GST_DEBUG_OBJECT (typefind, "Non-flushing seek, pausing task"); gst_pad_pause_task (typefind->sink); } /* now grab the stream lock so that streaming cannot continue, for * non flushing seeks when the element is in PAUSED this could block * forever. */ GST_DEBUG_OBJECT (typefind, "Waiting for streaming to stop"); GST_PAD_STREAM_LOCK (typefind->sink); if (flush) { GST_DEBUG_OBJECT (typefind, "Stopping flush"); gst_pad_push_event (typefind->sink, gst_event_new_flush_stop (TRUE)); gst_pad_push_event (typefind->src, gst_event_new_flush_stop (TRUE)); } /* now update the real segment info */ GST_DEBUG_OBJECT (typefind, "Committing new seek segment"); memcpy (&typefind->segment, &seeksegment, sizeof (GstSegment)); typefind->offset = typefind->segment.start; /* notify start of new segment */ if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) { GstMessage *msg; msg = gst_message_new_segment_start (GST_OBJECT (typefind), GST_FORMAT_BYTES, typefind->segment.start); gst_element_post_message (GST_ELEMENT (typefind), msg); } typefind->need_segment = TRUE; /* restart our task since it might have been stopped when we did the * flush. */ gst_pad_start_task (typefind->sink, (GstTaskFunction) gst_type_find_element_loop, typefind->sink, NULL); /* streaming can continue now */ GST_PAD_STREAM_UNLOCK (typefind->sink); return TRUE; }
/* sinkpad functions */ static gboolean gst_stream_synchronizer_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstStreamSynchronizer *self = GST_STREAM_SYNCHRONIZER (parent); gboolean ret = FALSE; GST_LOG_OBJECT (pad, "Handling event %s: %" GST_PTR_FORMAT, GST_EVENT_TYPE_NAME (event), event); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_STREAM_START: { GstSyncStream *stream, *ostream; guint32 seqnum = gst_event_get_seqnum (event); guint group_id; gboolean have_group_id; GList *l; gboolean all_wait = TRUE; gboolean new_stream = TRUE; have_group_id = gst_event_parse_group_id (event, &group_id); GST_STREAM_SYNCHRONIZER_LOCK (self); self->have_group_id &= have_group_id; have_group_id = self->have_group_id; stream = gst_pad_get_element_private (pad); if (!stream) { GST_DEBUG_OBJECT (self, "No stream or STREAM_START from same source"); GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } gst_event_parse_stream_flags (event, &stream->flags); if ((have_group_id && stream->group_id != group_id) || (!have_group_id && stream->stream_start_seqnum != seqnum)) { stream->is_eos = FALSE; stream->eos_sent = FALSE; stream->flushing = FALSE; stream->stream_start_seqnum = seqnum; stream->group_id = group_id; if (!have_group_id) { /* Check if this belongs to a stream that is already there, * e.g. we got the visualizations for an audio stream */ for (l = self->streams; l; l = l->next) { ostream = l->data; if (ostream != stream && ostream->stream_start_seqnum == seqnum && !ostream->wait) { new_stream = FALSE; break; } } if (!new_stream) { GST_DEBUG_OBJECT (pad, "Stream %d belongs to running stream %d, no waiting", stream->stream_number, ostream->stream_number); stream->wait = FALSE; GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } } else if (group_id == self->group_id) { GST_DEBUG_OBJECT (pad, "Stream %d belongs to running group %d, " "no waiting", stream->stream_number, group_id); GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } GST_DEBUG_OBJECT (pad, "Stream %d changed", stream->stream_number); stream->wait = TRUE; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; all_wait = all_wait && ((ostream->flags & GST_STREAM_FLAG_SPARSE) || (ostream->wait && (!have_group_id || ostream->group_id == group_id))); if (!all_wait) break; } if (all_wait) { gint64 position = 0; if (have_group_id) GST_DEBUG_OBJECT (self, "All streams have changed to group id %u -- unblocking", group_id); else GST_DEBUG_OBJECT (self, "All streams have changed -- unblocking"); self->group_id = group_id; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; gint64 stop_running_time; gint64 position_running_time; ostream->wait = FALSE; if (ostream->segment.format == GST_FORMAT_TIME) { if (ostream->segment.rate > 0) stop_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.stop); else stop_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.start); position_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.position); position_running_time = MAX (position_running_time, stop_running_time); if (ostream->segment.rate > 0) position_running_time -= gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.start); else position_running_time -= gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.stop); position_running_time = MAX (0, position_running_time); position = MAX (position, position_running_time); } } self->group_start_time += position; GST_DEBUG_OBJECT (self, "New group start time: %" GST_TIME_FORMAT, GST_TIME_ARGS (self->group_start_time)); for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; ostream->wait = FALSE; g_cond_broadcast (&ostream->stream_finish_cond); } } } GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } case GST_EVENT_SEGMENT:{ GstSyncStream *stream; GstSegment segment; gst_event_copy_segment (event, &segment); GST_STREAM_SYNCHRONIZER_LOCK (self); gst_stream_synchronizer_wait (self, pad); if (self->shutdown) { GST_STREAM_SYNCHRONIZER_UNLOCK (self); gst_event_unref (event); goto done; } stream = gst_pad_get_element_private (pad); if (stream && segment.format == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (pad, "New stream, updating base from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (segment.base), GST_TIME_ARGS (segment.base + self->group_start_time)); segment.base += self->group_start_time; GST_DEBUG_OBJECT (pad, "Segment was: %" GST_SEGMENT_FORMAT, &stream->segment); gst_segment_copy_into (&segment, &stream->segment); GST_DEBUG_OBJECT (pad, "Segment now is: %" GST_SEGMENT_FORMAT, &stream->segment); stream->segment_seqnum = gst_event_get_seqnum (event); GST_DEBUG_OBJECT (pad, "Stream start running time: %" GST_TIME_FORMAT, GST_TIME_ARGS (stream->segment.base)); { GstEvent *tmpev; tmpev = gst_event_new_segment (&stream->segment); gst_event_set_seqnum (tmpev, stream->segment_seqnum); gst_event_unref (event); event = tmpev; } } else if (stream) { GST_WARNING_OBJECT (pad, "Non-TIME segment: %s", gst_format_get_name (segment.format)); gst_segment_init (&stream->segment, GST_FORMAT_UNDEFINED); } GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } case GST_EVENT_FLUSH_START:{ GstSyncStream *stream; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); self->eos = FALSE; if (stream) { GST_DEBUG_OBJECT (pad, "Flushing streams"); stream->flushing = TRUE; g_cond_broadcast (&stream->stream_finish_cond); } GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } case GST_EVENT_FLUSH_STOP:{ GstSyncStream *stream; GList *l; GstClockTime new_group_start_time = 0; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (stream) { GST_DEBUG_OBJECT (pad, "Resetting segment for stream %d", stream->stream_number); gst_segment_init (&stream->segment, GST_FORMAT_UNDEFINED); stream->is_eos = FALSE; stream->eos_sent = FALSE; stream->flushing = FALSE; stream->wait = FALSE; g_cond_broadcast (&stream->stream_finish_cond); } for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; GstClockTime start_running_time; if (ostream == stream || ostream->flushing) continue; if (ostream->segment.format == GST_FORMAT_TIME) { if (ostream->segment.rate > 0) start_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.start); else start_running_time = gst_segment_to_running_time (&ostream->segment, GST_FORMAT_TIME, ostream->segment.stop); new_group_start_time = MAX (new_group_start_time, start_running_time); } } GST_DEBUG_OBJECT (pad, "Updating group start time from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (self->group_start_time), GST_TIME_ARGS (new_group_start_time)); self->group_start_time = new_group_start_time; GST_STREAM_SYNCHRONIZER_UNLOCK (self); break; } /* unblocking EOS wait when track switch. */ case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:{ if (gst_event_has_name (event, "playsink-custom-video-flush") || gst_event_has_name (event, "playsink-custom-audio-flush") || gst_event_has_name (event, "playsink-custom-subtitle-flush")) { GstSyncStream *stream; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (stream) { stream->is_eos = FALSE; stream->eos_sent = FALSE; stream->wait = FALSE; g_cond_broadcast (&stream->stream_finish_cond); } GST_STREAM_SYNCHRONIZER_UNLOCK (self); } break; } case GST_EVENT_EOS:{ GstSyncStream *stream; GList *l; gboolean all_eos = TRUE; gboolean seen_data; GSList *pads = NULL; GstPad *srcpad; GstClockTime timestamp; GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (!stream) { GST_STREAM_SYNCHRONIZER_UNLOCK (self); GST_WARNING_OBJECT (pad, "EOS for unknown stream"); break; } GST_DEBUG_OBJECT (pad, "Have EOS for stream %d", stream->stream_number); stream->is_eos = TRUE; seen_data = stream->seen_data; srcpad = gst_object_ref (stream->srcpad); if (seen_data && stream->segment.position != -1) timestamp = stream->segment.position; else if (stream->segment.rate < 0.0 || stream->segment.stop == -1) timestamp = stream->segment.start; else timestamp = stream->segment.stop; stream->segment.position = timestamp; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; all_eos = all_eos && ostream->is_eos; if (!all_eos) break; } if (all_eos) { GST_DEBUG_OBJECT (self, "All streams are EOS -- forwarding"); self->eos = TRUE; for (l = self->streams; l; l = l->next) { GstSyncStream *ostream = l->data; /* local snapshot of current pads */ gst_object_ref (ostream->srcpad); pads = g_slist_prepend (pads, ostream->srcpad); } } if (pads) { GstPad *pad; GSList *epad; GstSyncStream *ostream; ret = TRUE; epad = pads; while (epad) { pad = epad->data; ostream = gst_pad_get_element_private (pad); if (ostream) { g_cond_broadcast (&ostream->stream_finish_cond); } gst_object_unref (pad); epad = g_slist_next (epad); } g_slist_free (pads); } else { if (seen_data) { self->send_gap_event = TRUE; stream->gap_duration = GST_CLOCK_TIME_NONE; stream->wait = TRUE; ret = gst_stream_synchronizer_wait (self, srcpad); } } /* send eos if haven't seen data. seen_data will be true if data buffer * of the track have received in anytime. sink is ready if seen_data is * true, so can send GAP event. Will send EOS if sink isn't ready. The * scenario for the case is one track haven't any media data and then * send EOS. Or no any valid media data in one track, so decoder can't * get valid CAPS for the track. sink can't ready without received CAPS.*/ if (!seen_data || self->eos) { GST_DEBUG_OBJECT (pad, "send EOS event"); /* drop lock when sending eos, which may block in e.g. preroll */ GST_STREAM_SYNCHRONIZER_UNLOCK (self); ret = gst_pad_push_event (srcpad, gst_event_new_eos ()); GST_STREAM_SYNCHRONIZER_LOCK (self); stream = gst_pad_get_element_private (pad); if (stream) { stream->eos_sent = TRUE; } } gst_object_unref (srcpad); gst_event_unref (event); GST_STREAM_SYNCHRONIZER_UNLOCK (self); goto done; } default: break; } ret = gst_pad_event_default (pad, parent, event); done: return ret; }
static GstFlowReturn gst_gdp_depay_chain (GstPad * pad, GstBuffer * buffer) { GstGDPDepay *this; GstFlowReturn ret = GST_FLOW_OK; GstCaps *caps; GstBuffer *buf; GstEvent *event; guint available; this = GST_GDP_DEPAY (gst_pad_get_parent (pad)); /* On DISCONT, get rid of accumulated data. We assume a buffer after the * DISCONT contains (part of) a new valid header, if not we error because we * lost sync */ if (GST_BUFFER_IS_DISCONT (buffer)) { gst_adapter_clear (this->adapter); this->state = GST_GDP_DEPAY_STATE_HEADER; } gst_adapter_push (this->adapter, buffer); while (TRUE) { switch (this->state) { case GST_GDP_DEPAY_STATE_HEADER: { guint8 *header; /* collect a complete header, validate and store the header. Figure out * the payload length and switch to the PAYLOAD state */ available = gst_adapter_available (this->adapter); if (available < GST_DP_HEADER_LENGTH) goto done; GST_LOG_OBJECT (this, "reading GDP header from adapter"); header = gst_adapter_take (this->adapter, GST_DP_HEADER_LENGTH); if (!gst_dp_validate_header (GST_DP_HEADER_LENGTH, header)) { g_free (header); goto header_validate_error; } /* store types and payload length. Also store the header, which we need * to make the payload. */ this->payload_length = gst_dp_header_payload_length (header); this->payload_type = gst_dp_header_payload_type (header); /* free previous header and store new one. */ g_free (this->header); this->header = header; GST_LOG_OBJECT (this, "read GDP header, payload size %d, payload type %d, switching to state PAYLOAD", this->payload_length, this->payload_type); this->state = GST_GDP_DEPAY_STATE_PAYLOAD; break; } case GST_GDP_DEPAY_STATE_PAYLOAD: { /* in this state we wait for all the payload data to be available in the * adapter. Then we switch to the state where we actually process the * payload. */ available = gst_adapter_available (this->adapter); if (available < this->payload_length) goto done; /* change state based on type */ if (this->payload_type == GST_DP_PAYLOAD_BUFFER) { GST_LOG_OBJECT (this, "switching to state BUFFER"); this->state = GST_GDP_DEPAY_STATE_BUFFER; } else if (this->payload_type == GST_DP_PAYLOAD_CAPS) { GST_LOG_OBJECT (this, "switching to state CAPS"); this->state = GST_GDP_DEPAY_STATE_CAPS; } else if (this->payload_type >= GST_DP_PAYLOAD_EVENT_NONE) { GST_LOG_OBJECT (this, "switching to state EVENT"); this->state = GST_GDP_DEPAY_STATE_EVENT; } else { goto wrong_type; } if (this->payload_length && (!gst_dp_validate_payload (GST_DP_HEADER_LENGTH, this->header, gst_adapter_peek (this->adapter, this->payload_length)))) { goto payload_validate_error; } break; } case GST_GDP_DEPAY_STATE_BUFFER: { /* if we receive a buffer without caps first, we error out */ if (!this->caps) goto no_caps; GST_LOG_OBJECT (this, "reading GDP buffer from adapter"); buf = gst_dp_buffer_from_header (GST_DP_HEADER_LENGTH, this->header); if (!buf) goto buffer_failed; /* now take the payload if there is any */ if (this->payload_length > 0) { guint8 *payload; payload = gst_adapter_take (this->adapter, this->payload_length); memcpy (GST_BUFFER_DATA (buf), payload, this->payload_length); g_free (payload); } /* set caps and push */ gst_buffer_set_caps (buf, this->caps); GST_LOG_OBJECT (this, "deserialized buffer %p, pushing, timestamp %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT ", size %d, flags 0x%x", buf, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_SIZE (buf), GST_BUFFER_FLAGS (buf)); ret = gst_pad_push (this->srcpad, buf); if (ret != GST_FLOW_OK) goto push_error; GST_LOG_OBJECT (this, "switching to state HEADER"); this->state = GST_GDP_DEPAY_STATE_HEADER; break; } case GST_GDP_DEPAY_STATE_CAPS: { guint8 *payload; /* take the payload of the caps */ GST_LOG_OBJECT (this, "reading GDP caps from adapter"); payload = gst_adapter_take (this->adapter, this->payload_length); caps = gst_dp_caps_from_packet (GST_DP_HEADER_LENGTH, this->header, payload); g_free (payload); if (!caps) goto caps_failed; GST_DEBUG_OBJECT (this, "deserialized caps %" GST_PTR_FORMAT, caps); gst_caps_replace (&(this->caps), caps); gst_pad_set_caps (this->srcpad, caps); /* drop the creation ref we still have */ gst_caps_unref (caps); GST_LOG_OBJECT (this, "switching to state HEADER"); this->state = GST_GDP_DEPAY_STATE_HEADER; break; } case GST_GDP_DEPAY_STATE_EVENT: { guint8 *payload; GST_LOG_OBJECT (this, "reading GDP event from adapter"); /* adapter doesn't like 0 length payload */ if (this->payload_length > 0) payload = gst_adapter_take (this->adapter, this->payload_length); else payload = NULL; event = gst_dp_event_from_packet (GST_DP_HEADER_LENGTH, this->header, payload); g_free (payload); if (!event) goto event_failed; GST_DEBUG_OBJECT (this, "deserialized event %p of type %s, pushing", event, gst_event_type_get_name (event->type)); gst_pad_push_event (this->srcpad, event); GST_LOG_OBJECT (this, "switching to state HEADER"); this->state = GST_GDP_DEPAY_STATE_HEADER; break; } } } done: gst_object_unref (this); return ret; /* ERRORS */ header_validate_error: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("GDP packet header does not validate")); ret = GST_FLOW_ERROR; goto done; } payload_validate_error: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("GDP packet payload does not validate")); ret = GST_FLOW_ERROR; goto done; } wrong_type: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("GDP packet header is of wrong type")); ret = GST_FLOW_ERROR; goto done; } no_caps: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("Received a buffer without first receiving caps")); ret = GST_FLOW_NOT_NEGOTIATED; goto done; } buffer_failed: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("could not create buffer from GDP packet")); ret = GST_FLOW_ERROR; goto done; } push_error: { GST_WARNING_OBJECT (this, "pushing depayloaded buffer returned %d", ret); goto done; } caps_failed: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("could not create caps from GDP packet")); ret = GST_FLOW_ERROR; goto done; } event_failed: { GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL), ("could not create event from GDP packet")); ret = GST_FLOW_ERROR; goto done; } }
static gboolean gst_dvbsub_overlay_event_text (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean ret = FALSE; GstDVBSubOverlay *render = GST_DVBSUB_OVERLAY (parent); GST_DEBUG_OBJECT (pad, "received text event %s", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { GstSegment seg; GST_DEBUG_OBJECT (render, "received new segment"); gst_event_copy_segment (event, &seg); if (seg.format == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (render, "SUBTITLE SEGMENT now: %" GST_SEGMENT_FORMAT, &render->subtitle_segment); render->subtitle_segment = seg; GST_DEBUG_OBJECT (render, "SUBTITLE SEGMENT after: %" GST_SEGMENT_FORMAT, &render->subtitle_segment); ret = TRUE; gst_event_unref (event); } else { GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL), ("received non-TIME newsegment event on subtitle sinkpad")); ret = FALSE; gst_event_unref (event); } break; } case GST_EVENT_FLUSH_STOP: GST_DEBUG_OBJECT (render, "stop flushing"); gst_dvbsub_overlay_flush_subtitles (render); gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME); gst_event_unref (event); ret = TRUE; break; case GST_EVENT_FLUSH_START: GST_DEBUG_OBJECT (render, "begin flushing"); gst_event_unref (event); ret = TRUE; break; case GST_EVENT_EOS: GST_INFO_OBJECT (render, "text EOS"); gst_event_unref (event); ret = TRUE; break; case GST_EVENT_GAP: gst_event_unref (event); ret = TRUE; break; case GST_EVENT_CAPS: /* don't want to forward the subtitle caps */ gst_event_unref (event); ret = TRUE; break; default: ret = gst_pad_push_event (render->srcpad, event); break; } return ret; }
static gboolean pad_event (GstPad *pad, GstEvent *event) { GstOmxBaseFilter *self; GOmxCore *gomx; GOmxPort *in_port; GOmxPort *out_port; gboolean ret; self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad)); gomx = self->gomx; in_port = self->in_port; out_port = self->out_port; GST_LOG_OBJECT (self, "begin"); GST_INFO_OBJECT (self, "event: %s", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: if (self->initialized) { /* send buffer with eos flag */ /** @todo move to util */ { OMX_BUFFERHEADERTYPE *omx_buffer; GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (in_port); if (G_LIKELY (omx_buffer)) { omx_buffer->nFlags |= OMX_BUFFERFLAG_EOS; GST_LOG_OBJECT (self, "release_buffer"); /* foo_buffer_untaint (omx_buffer); */ g_omx_port_release_buffer (in_port, omx_buffer); } else { g_omx_core_set_done (gomx); } } /* Wait for the output port to get the EOS. */ g_omx_core_wait_for_done (gomx); } ret = gst_pad_push_event (self->srcpad, event); break; case GST_EVENT_FLUSH_START: gst_pad_push_event (self->srcpad, event); self->last_pad_push_return = GST_FLOW_WRONG_STATE; g_omx_core_flush_start (gomx); gst_pad_pause_task (self->srcpad); ret = TRUE; break; case GST_EVENT_FLUSH_STOP: gst_pad_push_event (self->srcpad, event); self->last_pad_push_return = GST_FLOW_OK; g_omx_core_flush_stop (gomx); gst_pad_start_task (self->srcpad, output_loop, self->srcpad); ret = TRUE; break; case GST_EVENT_NEWSEGMENT: ret = gst_pad_push_event (self->srcpad, event); break; default: ret = gst_pad_push_event (self->srcpad, event); break; } GST_LOG_OBJECT (self, "end"); return ret; }
static void test_basic (const gchar * elem_name, const gchar * sink2, int count, check_cb cb) { GstElement *rtpmux = NULL; GstPad *reqpad1 = NULL; GstPad *reqpad2 = NULL; GstPad *src1 = NULL; GstPad *src2 = NULL; GstPad *sink = NULL; GstBuffer *inbuf = NULL; GstCaps *src1caps = NULL; GstCaps *src2caps = NULL; GstCaps *sinkcaps = NULL; GstCaps *caps; GstSegment segment; int i; rtpmux = gst_check_setup_element (elem_name); reqpad1 = gst_element_get_request_pad (rtpmux, "sink_1"); fail_unless (reqpad1 != NULL); reqpad2 = gst_element_get_request_pad (rtpmux, sink2); fail_unless (reqpad2 != NULL); sink = gst_check_setup_sink_pad_by_name (rtpmux, &sinktemplate, "src"); src1 = gst_pad_new_from_static_template (&srctemplate, "src"); src2 = gst_pad_new_from_static_template (&srctemplate, "src"); fail_unless (gst_pad_link (src1, reqpad1) == GST_PAD_LINK_OK); fail_unless (gst_pad_link (src2, reqpad2) == GST_PAD_LINK_OK); gst_pad_set_query_function (src1, query_func); gst_pad_set_query_function (src2, query_func); gst_pad_set_query_function (sink, query_func); gst_pad_set_event_function (sink, event_func); g_object_set_data (G_OBJECT (src1), "caps", &src1caps); g_object_set_data (G_OBJECT (src2), "caps", &src2caps); g_object_set_data (G_OBJECT (sink), "caps", &sinkcaps); src1caps = gst_caps_new_simple ("application/x-rtp", "clock-rate", G_TYPE_INT, 1, "ssrc", G_TYPE_UINT, 11, NULL); src2caps = gst_caps_new_simple ("application/x-rtp", "clock-rate", G_TYPE_INT, 2, "ssrc", G_TYPE_UINT, 12, NULL); sinkcaps = gst_caps_new_simple ("application/x-rtp", "clock-rate", G_TYPE_INT, 3, "ssrc", G_TYPE_UINT, 13, NULL); caps = gst_pad_peer_query_caps (src1, NULL); fail_unless (gst_caps_is_empty (caps)); gst_caps_unref (caps); gst_caps_set_simple (src2caps, "clock-rate", G_TYPE_INT, 3, NULL); caps = gst_pad_peer_query_caps (src1, NULL); gst_caps_unref (caps); g_object_set (rtpmux, "seqnum-offset", 100, "timestamp-offset", 1000, "ssrc", 55, NULL); fail_unless (gst_element_set_state (rtpmux, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS); gst_pad_set_active (sink, TRUE); gst_pad_set_active (src1, TRUE); gst_pad_set_active (src2, TRUE); fail_unless (gst_pad_push_event (src1, gst_event_new_stream_start ("stream1"))); fail_unless (gst_pad_push_event (src2, gst_event_new_stream_start ("stream2"))); gst_caps_set_simple (sinkcaps, "payload", G_TYPE_INT, 98, "seqnum-offset", G_TYPE_UINT, 100, "timestamp-offset", G_TYPE_UINT, 1000, "ssrc", G_TYPE_UINT, 66, NULL); caps = gst_caps_new_simple ("application/x-rtp", "payload", G_TYPE_INT, 98, "clock-rate", G_TYPE_INT, 3, "seqnum-offset", G_TYPE_UINT, 56, "timestamp-offset", G_TYPE_UINT, 57, "ssrc", G_TYPE_UINT, 66, NULL); fail_unless (gst_pad_set_caps (src1, caps)); gst_caps_unref (caps); caps = gst_pad_peer_query_caps (sink, NULL); fail_if (gst_caps_is_empty (caps)); gst_segment_init (&segment, GST_FORMAT_TIME); segment.start = 100000; fail_unless (gst_pad_push_event (src1, gst_event_new_segment (&segment))); segment.start = 0; fail_unless (gst_pad_push_event (src2, gst_event_new_segment (&segment))); for (i = 0; i < count; i++) { GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT; inbuf = gst_rtp_buffer_new_allocate (10, 0, 0); GST_BUFFER_PTS (inbuf) = i * 1000 + 100000; GST_BUFFER_DURATION (inbuf) = 1000; gst_rtp_buffer_map (inbuf, GST_MAP_WRITE, &rtpbuffer); gst_rtp_buffer_set_version (&rtpbuffer, 2); gst_rtp_buffer_set_payload_type (&rtpbuffer, 98); gst_rtp_buffer_set_ssrc (&rtpbuffer, 44); gst_rtp_buffer_set_timestamp (&rtpbuffer, 200 + i); gst_rtp_buffer_set_seq (&rtpbuffer, 2000 + i); gst_rtp_buffer_unmap (&rtpbuffer); fail_unless (gst_pad_push (src1, inbuf) == GST_FLOW_OK); if (buffers) fail_unless (GST_BUFFER_PTS (buffers->data) == i * 1000, "%lld", GST_BUFFER_PTS (buffers->data)); cb (src2, i); g_list_foreach (buffers, (GFunc) gst_buffer_unref, NULL); g_list_free (buffers); buffers = NULL; } gst_pad_set_active (sink, FALSE); gst_pad_set_active (src1, FALSE); gst_pad_set_active (src2, FALSE); fail_unless (gst_element_set_state (rtpmux, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS); gst_check_teardown_pad_by_name (rtpmux, "src"); gst_object_unref (reqpad1); gst_object_unref (reqpad2); gst_check_teardown_pad_by_name (rtpmux, "sink_1"); gst_check_teardown_pad_by_name (rtpmux, sink2); gst_element_release_request_pad (rtpmux, reqpad1); gst_element_release_request_pad (rtpmux, reqpad2); gst_caps_unref (caps); gst_caps_replace (&src1caps, NULL); gst_caps_replace (&src2caps, NULL); gst_caps_replace (&sinkcaps, NULL); gst_check_teardown_element (rtpmux); }
static void gst_timidity_loop (GstPad * sinkpad) { GstTimidity *timidity = GST_TIMIDITY (GST_PAD_PARENT (sinkpad)); GstBuffer *out; GstFlowReturn ret; if (timidity->mididata_size == 0) { if (!gst_timidity_get_upstream_size (timidity, &timidity->mididata_size)) { GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL), ("Unable to get song length")); goto paused; } if (timidity->mididata) g_free (timidity->mididata); timidity->mididata = g_malloc (timidity->mididata_size); timidity->mididata_offset = 0; return; } if (timidity->mididata_offset < timidity->mididata_size) { GstBuffer *buffer; gint64 size; GST_DEBUG_OBJECT (timidity, "loading song"); ret = gst_pad_pull_range (timidity->sinkpad, timidity->mididata_offset, -1, &buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL), ("Unable to load song")); goto paused; } size = timidity->mididata_size - timidity->mididata_offset; if (GST_BUFFER_SIZE (buffer) < size) size = GST_BUFFER_SIZE (buffer); memmove (timidity->mididata + timidity->mididata_offset, GST_BUFFER_DATA (buffer), size); gst_buffer_unref (buffer); timidity->mididata_offset += size; GST_DEBUG_OBJECT (timidity, "Song loaded"); return; } if (!timidity->song) { MidIStream *stream; GstTagList *tags = NULL; gchar *text; GST_DEBUG_OBJECT (timidity, "Parsing song"); stream = mid_istream_open_mem (timidity->mididata, timidity->mididata_size, 0); timidity->song = mid_song_load (stream, timidity->song_options); mid_istream_close (stream); if (!timidity->song) { GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL), ("Unable to parse midi")); goto paused; } mid_song_start (timidity->song); timidity->o_len = (GST_MSECOND * (GstClockTime) mid_song_get_total_time (timidity->song)) / timidity->time_per_frame; gst_segment_set_newsegment (timidity->o_segment, FALSE, 1.0, GST_FORMAT_DEFAULT, 0, GST_CLOCK_TIME_NONE, 0); gst_pad_push_event (timidity->srcpad, gst_timidity_get_new_segment_event (timidity, GST_FORMAT_TIME, FALSE)); /* extract tags */ text = mid_song_get_meta (timidity->song, MID_SONG_TEXT); if (text) { tags = gst_tag_list_new (); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, text, NULL); //g_free (text); } text = mid_song_get_meta (timidity->song, MID_SONG_COPYRIGHT); if (text) { if (tags == NULL) tags = gst_tag_list_new (); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_COPYRIGHT, text, NULL); //g_free (text); } if (tags) { gst_element_found_tags (GST_ELEMENT (timidity), tags); } GST_DEBUG_OBJECT (timidity, "Parsing song done"); return; } if (timidity->o_segment_changed) { GstSegment *segment = gst_timidity_get_segment (timidity, GST_FORMAT_TIME, !timidity->o_new_segment); GST_LOG_OBJECT (timidity, "sending newsegment from %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT ", pos=%" GST_TIME_FORMAT, GST_TIME_ARGS ((guint64) segment->start), GST_TIME_ARGS ((guint64) segment->stop), GST_TIME_ARGS ((guint64) segment->time)); if (timidity->o_segment->flags & GST_SEEK_FLAG_SEGMENT) { gst_element_post_message (GST_ELEMENT (timidity), gst_message_new_segment_start (GST_OBJECT (timidity), segment->format, segment->start)); } gst_segment_free (segment); timidity->o_segment_changed = FALSE; return; } if (timidity->o_seek) { /* perform a seek internally */ timidity->o_segment->last_stop = timidity->o_segment->time; mid_song_seek (timidity->song, (timidity->o_segment->last_stop * timidity->time_per_frame) / GST_MSECOND); } out = gst_timidity_get_buffer (timidity); if (!out) { GST_LOG_OBJECT (timidity, "Song ended, generating eos"); gst_pad_push_event (timidity->srcpad, gst_event_new_eos ()); timidity->o_seek = FALSE; goto paused; } if (timidity->o_seek) { GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT); timidity->o_seek = FALSE; } gst_buffer_set_caps (out, timidity->out_caps); ret = gst_pad_push (timidity->srcpad, out); if (ret == GST_FLOW_UNEXPECTED) goto eos; else if (ret < GST_FLOW_UNEXPECTED || ret == GST_FLOW_NOT_LINKED) goto error; return; paused: { GST_DEBUG_OBJECT (timidity, "pausing task"); gst_pad_pause_task (timidity->sinkpad); return; } eos: { gst_pad_push_event (timidity->srcpad, gst_event_new_eos ()); goto paused; } error: { GST_ELEMENT_ERROR (timidity, STREAM, FAILED, ("Internal data stream error"), ("Streaming stopped, reason %s", gst_flow_get_name (ret))); gst_pad_push_event (timidity->srcpad, gst_event_new_eos ()); goto paused; } }
static gboolean gst_timidity_src_event (GstPad * pad, GstEvent * event) { gboolean res = FALSE; GstTimidity *timidity = GST_TIMIDITY (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (pad, "%s event received", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEEK: { gdouble rate; GstFormat src_format, dst_format; GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 orig_start, start = 0, stop = 0; gboolean flush, update; if (!timidity->song) break; gst_event_parse_seek (event, &rate, &src_format, &flags, &start_type, &orig_start, &stop_type, &stop); dst_format = GST_FORMAT_DEFAULT; gst_timidity_src_convert (timidity, src_format, orig_start, &dst_format, &start); gst_timidity_src_convert (timidity, src_format, stop, &dst_format, &stop); flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH); if (flush) { GST_DEBUG ("performing flush"); gst_pad_push_event (timidity->srcpad, gst_event_new_flush_start ()); } else { gst_pad_stop_task (timidity->sinkpad); } GST_PAD_STREAM_LOCK (timidity->sinkpad); if (flush) { gst_pad_push_event (timidity->srcpad, gst_event_new_flush_stop ()); } gst_segment_set_seek (timidity->o_segment, rate, dst_format, flags, start_type, start, stop_type, stop, &update); if (flags & GST_SEEK_FLAG_SEGMENT) { GST_DEBUG_OBJECT (timidity, "received segment seek %d, %d", (gint) start_type, (gint) stop_type); } else { GST_DEBUG_OBJECT (timidity, "received normal seek %d", (gint) start_type); update = FALSE; } gst_pad_push_event (timidity->srcpad, gst_timidity_get_new_segment_event (timidity, GST_FORMAT_TIME, update)); timidity->o_seek = TRUE; gst_pad_start_task (timidity->sinkpad, (GstTaskFunction) gst_timidity_loop, timidity->sinkpad); GST_PAD_STREAM_UNLOCK (timidity->sinkpad); GST_DEBUG ("seek done"); } res = TRUE; break; default: break; } g_object_unref (timidity); return res; }
static GstFlowReturn gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstMultipartDemux *multipart; GstAdapter *adapter; gint size = 1; GstFlowReturn res; multipart = GST_MULTIPART_DEMUX (parent); adapter = multipart->adapter; res = GST_FLOW_OK; if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { GSList *l; for (l = multipart->srcpads; l != NULL; l = l->next) { GstMultipartPad *srcpad = l->data; srcpad->discont = TRUE; } gst_adapter_clear (adapter); } gst_adapter_push (adapter, buf); while (gst_adapter_available (adapter) > 0) { GstMultipartPad *srcpad; GstBuffer *outbuf; gboolean created; gint datalen; if (G_UNLIKELY (!multipart->header_completed)) { if ((size = multipart_parse_header (multipart)) < 0) { goto nodata; } else { gst_adapter_flush (adapter, size); multipart->header_completed = TRUE; } } if ((size = multipart_find_boundary (multipart, &datalen)) < 0) { goto nodata; } /* Invalidate header info */ multipart->header_completed = FALSE; multipart->content_length = -1; if (G_UNLIKELY (datalen <= 0)) { GST_DEBUG_OBJECT (multipart, "skipping empty content."); gst_adapter_flush (adapter, size - datalen); } else if (G_UNLIKELY (!multipart->mime_type)) { GST_DEBUG_OBJECT (multipart, "content has no MIME type."); gst_adapter_flush (adapter, size - datalen); } else { GstClockTime ts; srcpad = gst_multipart_find_pad_by_mime (multipart, multipart->mime_type, &created); ts = gst_adapter_prev_pts (adapter, NULL); outbuf = gst_adapter_take_buffer (adapter, datalen); gst_adapter_flush (adapter, size - datalen); if (created) { GstTagList *tags; GstSegment segment; gst_segment_init (&segment, GST_FORMAT_TIME); /* Push new segment, first buffer has 0 timestamp */ gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment)); tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL); gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL); gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags)); } outbuf = gst_buffer_make_writable (outbuf); if (srcpad->last_ts == GST_CLOCK_TIME_NONE || srcpad->last_ts != ts) { GST_BUFFER_TIMESTAMP (outbuf) = ts; srcpad->last_ts = ts; } else { GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE; } if (srcpad->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); srcpad->discont = FALSE; } else { GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT); } GST_DEBUG_OBJECT (multipart, "pushing buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); res = gst_pad_push (srcpad->pad, outbuf); res = gst_multipart_combine_flows (multipart, srcpad, res); if (res != GST_FLOW_OK) break; } } nodata: if (G_UNLIKELY (size == MULTIPART_DATA_ERROR)) return GST_FLOW_ERROR; if (G_UNLIKELY (size == MULTIPART_DATA_EOS)) return GST_FLOW_EOS; return res; }
static GstFlowReturn gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer) { GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad)); gboolean completed = FALSE; const guint8 *data; guint size; gboolean ret = GST_FLOW_OK; /* first_timestamp is used slightly differently where a framerate is given or not. If there is a frame rate, it will be used as a base. If there is not, it will be used to keep track of the timestamp of the first buffer, to be used as the timestamp of the output buffer. When a buffer is output, first timestamp will resync to the next buffer's timestamp. */ if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer); else if (rsvg->fps_n != 0) rsvg->first_timestamp = 0; } gst_adapter_push (rsvg->adapter, buffer); size = gst_adapter_available (rsvg->adapter); /* "<svg></svg>" */ while (size >= 5 + 6 && ret == GST_FLOW_OK) { guint i; data = gst_adapter_peek (rsvg->adapter, size); for (i = size - 6; i >= 5; i--) { if (memcmp (data + i, "</svg>", 6) == 0) { completed = TRUE; size = i + 6; break; } } if (completed) { GstBuffer *outbuf = NULL; GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size); data = gst_adapter_peek (rsvg->adapter, size); ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf); if (ret != GST_FLOW_OK) break; if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) { GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp; GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE; if (GST_BUFFER_DURATION_IS_VALID (buffer)) { GstClockTime end = GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ? GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp; end += GST_BUFFER_DURATION (buffer); GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf); } if (rsvg->fps_n == 0) { rsvg->first_timestamp = GST_CLOCK_TIME_NONE; } else { GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d, rsvg->fps_n * GST_SECOND); } } else if (rsvg->fps_n != 0) { GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d, rsvg->fps_n * GST_SECOND); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d, rsvg->fps_n * GST_SECOND); } else { GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp; GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE; } rsvg->frame_count++; if (rsvg->need_newsegment) { gst_pad_push_event (rsvg->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); rsvg->need_newsegment = FALSE; } if (rsvg->pending_events) { GList *l; for (l = rsvg->pending_events; l; l = l->next) gst_pad_push_event (rsvg->srcpad, l->data); g_list_free (rsvg->pending_events); rsvg->pending_events = NULL; } if (rsvg->pending_tags) { gst_element_found_tags (GST_ELEMENT_CAST (rsvg), rsvg->pending_tags); rsvg->pending_tags = NULL; } GST_LOG_OBJECT (rsvg, "image rendered okay"); ret = gst_pad_push (rsvg->srcpad, outbuf); if (ret != GST_FLOW_OK) break; gst_adapter_flush (rsvg->adapter, size); size = gst_adapter_available (rsvg->adapter); continue; } else { break; } } return GST_FLOW_OK; }
static GstFlowReturn mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux) { /* main muxing function */ GstFlowReturn ret = GST_FLOW_OK; MpegPsPadData *best = NULL; gboolean keyunit; GST_DEBUG_OBJECT (mux, "Pads collected"); if (mux->first) { /* process block for the first mux */ /* iterate through the collect pads and add streams to @mux */ ret = mpegpsmux_create_streams (mux); /* Assumption : all pads are already added at this time */ if (G_UNLIKELY (ret != GST_FLOW_OK)) return ret; best = mpegpsmux_choose_best_stream (mux); /* prepare the src pad (output), return if failed */ if (!mpegpsdemux_prepare_srcpad (mux)) { GST_DEBUG_OBJECT (mux, "Failed to send new segment"); goto new_seg_fail; } mux->first = FALSE; } else { best = mpegpsmux_choose_best_stream (mux); } if (best != NULL) { GstBuffer *buf = best->queued.buf; gint64 pts, dts; g_assert (buf != NULL); GST_LOG_OBJECT (mux, "Chose stream from pad %" GST_PTR_FORMAT " for output (PID: 0x%04x): " "adjusted pts: %" GST_TIME_FORMAT ", dts: %" GST_TIME_FORMAT, best->collect.pad, best->stream_id, GST_TIME_ARGS (best->queued.pts), GST_TIME_ARGS (best->queued.dts)); /* and convert to mpeg time stamps */ pts = GSTTIME_TO_MPEGTIME (best->queued.pts); dts = GSTTIME_TO_MPEGTIME (best->queued.dts); /* start of new GOP? */ keyunit = !GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); if (keyunit && best->stream_id == mux->video_stream_id && mux->gop_list != NULL) { ret = mpegpsmux_push_gop_list (mux); if (ret != GST_FLOW_OK) goto done; } /* give the buffer to libpsmux for processing */ psmux_stream_add_data (best->stream, buf, pts, dts, keyunit); best->queued.buf = NULL; /* write the data from libpsmux to stream */ while (psmux_stream_bytes_in_buffer (best->stream) > 0) { GST_LOG_OBJECT (mux, "Before @psmux_write_stream_packet"); if (!psmux_write_stream_packet (mux->psmux, best->stream)) { GST_DEBUG_OBJECT (mux, "Failed to write data packet"); goto write_fail; } } mux->last_ts = best->last_ts; } else { /* FIXME: Drain all remaining streams */ /* At EOS */ if (mux->gop_list != NULL) mpegpsmux_push_gop_list (mux); if (!psmux_write_end_code (mux->psmux)) { GST_WARNING_OBJECT (mux, "Writing MPEG PS Program end code failed."); } gst_pad_push_event (mux->srcpad, gst_event_new_eos ()); ret = GST_FLOW_EOS; } done: return ret; new_seg_fail: return GST_FLOW_ERROR; write_fail: /* FIXME: Failed writing data for some reason. Should set appropriate error */ return mux->last_flow_ret; }
static GstFlowReturn xing_mp3_encoder_chain(GstPad *pad, GstBuffer *buf) { XingMp3Encoder *encoder; GstFlowReturn ret = GST_FLOW_OK; g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR); g_return_val_if_fail(GST_IS_PAD(pad), GST_FLOW_ERROR); g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR); encoder = XING_MP3_ENCODER(gst_pad_get_parent(pad)); if(GST_IS_EVENT(buf)) { GstEvent *event = GST_EVENT(buf); switch(GST_EVENT_TYPE(event)) { case GST_EVENT_EOS: encoder->at_end_of_stream = TRUE; gst_event_unref(event); break; default: gst_pad_event_default(pad, event); return ret; } } else { guchar *data; gulong size; gulong i, j; float **buffer; gint buf_size; gint buf_pos; if(!encoder->is_initialized) { gst_buffer_unref(buf); GST_ELEMENT_ERROR(encoder, CORE, NEGOTIATION, (NULL), ("Encoder not initialized")); return GST_FLOW_UNEXPECTED; } if(!encoder->header_sent) { E_CONTROL control; MPEG_HEAD head; guchar output_buffer[OUTPUT_BUFFER_SIZE]; gint buf_len; if(!encoder->use_cbr) { GstFlowReturn push_ret; hx_mp3enc_l3_info_ec(encoder->xing_encoder, &control); hx_mp3enc_l3_info_head(encoder->xing_encoder, &head); buf_len = XingHeader(control.samprate, head.mode, control.cr_bit, control.original, VBR_SCALE_FLAG /* FRAMES_FLAG | BYTES_FLAG | TOC_FLAG */, 0, 0, control.vbr_flag ? control.vbr_mnr : -1, NULL, output_buffer, 0, 0, 0); if((push_ret = xing_mp3_encoder_push_buffer(encoder, output_buffer, buf_len)) != GST_FLOW_OK) { gst_buffer_unref(buf); return push_ret; } } encoder->header_sent = TRUE; } data = (guchar *)GST_BUFFER_DATA(buf); buf_size = GST_BUFFER_SIZE(buf); buf_pos = 0; /* Transfer incoming data to internal buffer. * TODO: Use a ring buffer, avoid memmove () */ while(buf_pos < buf_size) { gint gulp = MIN(buf_size - buf_pos, INPUT_BUFFER_SIZE - encoder->input_buffer_pos); memcpy(encoder->input_buffer + encoder->input_buffer_pos, data + buf_pos, gulp); encoder->samples_in += gulp / (2 * encoder->channels); encoder->input_buffer_pos += gulp; buf_pos += gulp; /* Pass data on to encoder */ while(encoder->input_buffer_pos >= encoder->bytes_per_frame) { guchar output_buffer[OUTPUT_BUFFER_SIZE]; IN_OUT x; x = hx_mp3enc_mp3_encode_frame(encoder->xing_encoder, encoder->input_buffer, output_buffer); if(x.in_bytes == 0 && x.out_bytes == 0) { break; } memmove(encoder->input_buffer, encoder->input_buffer + x.in_bytes, encoder->input_buffer_pos - x.in_bytes); encoder->input_buffer_pos -= x.in_bytes; /* Accept output from encoder and pass it on. * TODO: Do this less often and save CPU */ if(x.out_bytes > 0) { GstFlowReturn push_ret; if((push_ret = xing_mp3_encoder_push_buffer(encoder, output_buffer, x.out_bytes)) != GST_FLOW_OK) { gst_buffer_unref(buf); return push_ret; } } } } gst_buffer_unref(buf); } if(encoder->at_end_of_stream) { gst_pad_push_event(encoder->srcpad, gst_event_new_eos()); } return ret; }
static void gst_type_find_element_loop (GstPad * pad) { GstTypeFindElement *typefind; GstFlowReturn ret = GST_FLOW_OK; typefind = GST_TYPE_FIND_ELEMENT (GST_PAD_PARENT (pad)); if (typefind->need_stream_start) { gchar *stream_id; GstEvent *event; stream_id = gst_pad_create_stream_id (typefind->src, GST_ELEMENT_CAST (typefind), NULL); GST_DEBUG_OBJECT (typefind, "Pushing STREAM_START"); event = gst_event_new_stream_start (stream_id); gst_event_set_group_id (event, gst_util_group_id_next ()); gst_pad_push_event (typefind->src, event); typefind->need_stream_start = FALSE; g_free (stream_id); } if (typefind->mode == MODE_TYPEFIND) { GstPad *peer = NULL; GstCaps *found_caps = NULL; GstTypeFindProbability probability = GST_TYPE_FIND_NONE; GST_DEBUG_OBJECT (typefind, "find type in pull mode"); GST_OBJECT_LOCK (typefind); if (typefind->force_caps) { found_caps = gst_caps_ref (typefind->force_caps); probability = GST_TYPE_FIND_MAXIMUM; } GST_OBJECT_UNLOCK (typefind); if (!found_caps) { peer = gst_pad_get_peer (pad); if (peer) { gint64 size; gchar *ext; if (!gst_pad_query_duration (peer, GST_FORMAT_BYTES, &size)) { GST_WARNING_OBJECT (typefind, "Could not query upstream length!"); gst_object_unref (peer); ret = GST_FLOW_ERROR; goto pause; } /* the size if 0, we cannot continue */ if (size == 0) { /* keep message in sync with message in sink event handler */ GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (_("Stream contains no data.")), ("Can't typefind empty stream")); gst_object_unref (peer); ret = GST_FLOW_ERROR; goto pause; } ext = gst_type_find_get_extension (typefind, pad); found_caps = gst_type_find_helper_get_range (GST_OBJECT_CAST (peer), GST_OBJECT_PARENT (peer), (GstTypeFindHelperGetRangeFunction) (GST_PAD_GETRANGEFUNC (peer)), (guint64) size, ext, &probability); g_free (ext); GST_DEBUG ("Found caps %" GST_PTR_FORMAT, found_caps); gst_object_unref (peer); } } if (!found_caps || probability < typefind->min_probability) { GST_DEBUG ("Trying to guess using extension"); gst_caps_replace (&found_caps, NULL); found_caps = gst_type_find_guess_by_extension (typefind, pad, &probability); } if (!found_caps || probability < typefind->min_probability) { GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL)); gst_caps_replace (&found_caps, NULL); ret = GST_FLOW_ERROR; goto pause; } GST_DEBUG ("Emiting found caps %" GST_PTR_FORMAT, found_caps); gst_type_find_element_emit_have_type (typefind, probability, found_caps); typefind->mode = MODE_NORMAL; gst_caps_unref (found_caps); } else if (typefind->mode == MODE_NORMAL) { GstBuffer *outbuf = NULL; if (typefind->need_segment) { typefind->need_segment = FALSE; gst_pad_push_event (typefind->src, gst_event_new_segment (&typefind->segment)); } /* Pull 4k blocks and send downstream */ ret = gst_pad_pull_range (typefind->sink, typefind->offset, 4096, &outbuf); if (ret != GST_FLOW_OK) goto pause; typefind->offset += gst_buffer_get_size (outbuf); ret = gst_pad_push (typefind->src, outbuf); if (ret != GST_FLOW_OK) goto pause; } else { /* Error out */ ret = GST_FLOW_ERROR; goto pause; } return; pause: { const gchar *reason = gst_flow_get_name (ret); gboolean push_eos = FALSE; GST_LOG_OBJECT (typefind, "pausing task, reason %s", reason); gst_pad_pause_task (typefind->sink); if (ret == GST_FLOW_EOS) { /* perform EOS logic */ if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) { gint64 stop; /* for segment playback we need to post when (in stream time) * we stopped, this is either stop (when set) or the duration. */ if ((stop = typefind->segment.stop) == -1) stop = typefind->offset; GST_LOG_OBJECT (typefind, "Sending segment done, at end of segment"); gst_element_post_message (GST_ELEMENT (typefind), gst_message_new_segment_done (GST_OBJECT (typefind), GST_FORMAT_BYTES, stop)); gst_pad_push_event (typefind->src, gst_event_new_segment_done (GST_FORMAT_BYTES, stop)); } else { push_eos = TRUE; } } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { /* for fatal errors we post an error message */ GST_ELEMENT_FLOW_ERROR (typefind, ret); push_eos = TRUE; } if (push_eos) { /* send EOS, and prevent hanging if no streams yet */ GST_LOG_OBJECT (typefind, "Sending EOS, at end of stream"); gst_pad_push_event (typefind->src, gst_event_new_eos ()); } return; } }
static gboolean gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event) { gdouble rate; GstFormat src_format, dst_format; GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 start, stop; gboolean flush, update; #ifdef HAVE_WILDMIDI_0_2_2 gboolean accurate; #endif gboolean res; unsigned long int sample; GstSegment *segment; if (!wildmidi->song) return FALSE; gst_event_parse_seek (event, &rate, &src_format, &flags, &start_type, &start, &stop_type, &stop); /* convert the input format to samples */ dst_format = GST_FORMAT_DEFAULT; res = TRUE; if (start_type != GST_SEEK_TYPE_NONE) { res = gst_wildmidi_src_convert (wildmidi, src_format, start, &dst_format, &start); } if (res && stop_type != GST_SEEK_TYPE_NONE) { res = gst_wildmidi_src_convert (wildmidi, src_format, stop, &dst_format, &stop); } /* unsupported format */ if (!res) return res; flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH); #ifdef HAVE_WILDMIDI_0_2_2 accurate = ((flags & GST_SEEK_FLAG_ACCURATE) == GST_SEEK_FLAG_ACCURATE); #endif if (flush) { GST_DEBUG ("performing flush"); gst_pad_push_event (wildmidi->srcpad, gst_event_new_flush_start ()); } else { gst_pad_stop_task (wildmidi->sinkpad); } segment = wildmidi->o_segment; GST_PAD_STREAM_LOCK (wildmidi->sinkpad); if (flush) { gst_pad_push_event (wildmidi->srcpad, gst_event_new_flush_stop (TRUE)); } /* update the segment now */ gst_segment_do_seek (segment, rate, dst_format, flags, start_type, start, stop_type, stop, &update); /* we need to seek to position in the segment now, sample will be updated */ sample = segment->position; GST_OBJECT_LOCK (wildmidi); #ifdef HAVE_WILDMIDI_0_2_2 if (accurate) { WildMidi_SampledSeek (wildmidi->song, &sample); } else { WildMidi_FastSeek (wildmidi->song, &sample); } #else WildMidi_FastSeek (wildmidi->song, &sample); #endif GST_OBJECT_UNLOCK (wildmidi); segment->start = segment->time = segment->position = sample; gst_pad_push_event (wildmidi->srcpad, gst_wildmidi_get_new_segment_event (wildmidi, GST_FORMAT_TIME)); gst_pad_start_task (wildmidi->sinkpad, (GstTaskFunction) gst_wildmidi_loop, wildmidi->sinkpad, NULL); wildmidi->discont = TRUE; GST_PAD_STREAM_UNLOCK (wildmidi->sinkpad); GST_DEBUG ("seek done"); return TRUE; }
static gboolean gst_type_find_element_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean res = FALSE; GstTypeFindElement *typefind = GST_TYPE_FIND_ELEMENT (parent); GST_DEBUG_OBJECT (typefind, "got %s event in mode %d", GST_EVENT_TYPE_NAME (event), typefind->mode); switch (typefind->mode) { case MODE_TYPEFIND: switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps; /* Parse and push out our caps and data */ gst_event_parse_caps (event, &caps); res = gst_type_find_element_setcaps (typefind, caps); gst_event_unref (event); break; } case GST_EVENT_GAP: { GST_FIXME_OBJECT (typefind, "GAP events during typefinding not handled properly"); /* FIXME: These would need to be inserted in the stream at * the right position between buffers, but we combine all * buffers with a GstAdapter. Drop the GAP event for now, * which will only cause an implicit GAP between buffers. */ gst_event_unref (event); res = TRUE; break; } case GST_EVENT_EOS: { GST_INFO_OBJECT (typefind, "Got EOS and no type found yet"); gst_type_find_element_chain_do_typefinding (typefind, FALSE, TRUE); res = gst_pad_push_event (typefind->src, event); break; } case GST_EVENT_FLUSH_STOP:{ GList *l; GST_OBJECT_LOCK (typefind); for (l = typefind->cached_events; l; l = l->next) { if (GST_EVENT_IS_STICKY (l->data) && GST_EVENT_TYPE (l->data) != GST_EVENT_SEGMENT && GST_EVENT_TYPE (l->data) != GST_EVENT_EOS) { gst_pad_store_sticky_event (typefind->src, l->data); } gst_event_unref (l->data); } g_list_free (typefind->cached_events); typefind->cached_events = NULL; gst_adapter_clear (typefind->adapter); GST_OBJECT_UNLOCK (typefind); /* fall through */ } case GST_EVENT_FLUSH_START: res = gst_pad_push_event (typefind->src, event); break; default: /* Forward events that would happen before the caps event * directly instead of storing them. There's no reason not * to send them directly and we should only store events * for later sending that would need to come after the caps * event */ if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) { res = gst_pad_push_event (typefind->src, event); } else { GST_DEBUG_OBJECT (typefind, "Saving %s event to send later", GST_EVENT_TYPE_NAME (event)); GST_OBJECT_LOCK (typefind); typefind->cached_events = g_list_append (typefind->cached_events, event); GST_OBJECT_UNLOCK (typefind); res = TRUE; } break; } break; case MODE_NORMAL: res = gst_pad_push_event (typefind->src, event); break; case MODE_ERROR: break; default: g_assert_not_reached (); } return res; }
static GstFlowReturn gst_wildmidi_parse_song (GstWildmidi * wildmidi) { struct _WM_Info *info; GstCaps *outcaps; guint8 *data; guint size; GST_DEBUG_OBJECT (wildmidi, "Parsing song"); size = gst_adapter_available (wildmidi->adapter); data = gst_adapter_take (wildmidi->adapter, size); /* this method takes our memory block */ GST_OBJECT_LOCK (wildmidi); wildmidi->song = WildMidi_OpenBuffer (data, size); if (!wildmidi->song) goto open_failed; #ifdef HAVE_WILDMIDI_0_2_2 WildMidi_LoadSamples (wildmidi->song); #endif #ifdef HAVE_WILDMIDI_0_2_2 WildMidi_SetOption (wildmidi->song, WM_MO_LINEAR_VOLUME, wildmidi->linear_volume); WildMidi_SetOption (wildmidi->song, WM_MO_EXPENSIVE_INTERPOLATION, wildmidi->high_quality); #else WildMidi_SetOption (wildmidi->song, WM_MO_LOG_VOLUME, !wildmidi->linear_volume); WildMidi_SetOption (wildmidi->song, WM_MO_ENHANCED_RESAMPLING, wildmidi->high_quality); #endif info = WildMidi_GetInfo (wildmidi->song); GST_OBJECT_UNLOCK (wildmidi); wildmidi->o_len = info->approx_total_samples; outcaps = gst_caps_copy (gst_pad_get_pad_template_caps (wildmidi->srcpad)); gst_pad_set_caps (wildmidi->srcpad, outcaps); gst_caps_unref (outcaps); /* we keep an internal segment in samples */ gst_segment_init (wildmidi->o_segment, GST_FORMAT_DEFAULT); gst_pad_push_event (wildmidi->srcpad, gst_wildmidi_get_new_segment_event (wildmidi, GST_FORMAT_TIME)); GST_DEBUG_OBJECT (wildmidi, "Parsing song done"); return GST_FLOW_OK; /* ERRORS */ open_failed: { GST_OBJECT_UNLOCK (wildmidi); GST_ELEMENT_ERROR (wildmidi, STREAM, DECODE, (NULL), ("Unable to parse midi data")); return GST_FLOW_ERROR; } }
static void gst_musepackdec_loop (GstPad * sinkpad) { GstMusepackDec *musepackdec; GstFlowReturn flow; GstBuffer *out; #ifdef MPC_IS_OLD_API guint32 update_acc, update_bits; #else mpc_frame_info frame; mpc_status err; #endif gint num_samples, samplerate, bitspersample; musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad)); samplerate = g_atomic_int_get (&musepackdec->rate); if (samplerate == 0) { if (!gst_musepack_stream_init (musepackdec)) goto pause_task; gst_musepackdec_send_newsegment (musepackdec); samplerate = g_atomic_int_get (&musepackdec->rate); } bitspersample = g_atomic_int_get (&musepackdec->bps); flow = gst_pad_alloc_buffer_and_set_caps (musepackdec->srcpad, -1, MPC_DECODER_BUFFER_LENGTH * 4, GST_PAD_CAPS (musepackdec->srcpad), &out); if (flow != GST_FLOW_OK) { GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow)); goto pause_task; } #ifdef MPC_IS_OLD_API num_samples = mpc_decoder_decode (musepackdec->d, (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out), &update_acc, &update_bits); if (num_samples < 0) { GST_ERROR_OBJECT (musepackdec, "Failed to decode sample"); GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL)); goto pause_task; } else if (num_samples == 0) { goto eos_and_pause; } #else frame.buffer = (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out); err = mpc_demux_decode (musepackdec->d, &frame); if (err != MPC_STATUS_OK) { GST_ERROR_OBJECT (musepackdec, "Failed to decode sample"); GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL)); goto pause_task; } else if (frame.bits == -1) { goto eos_and_pause; } num_samples = frame.samples; #endif GST_BUFFER_SIZE (out) = num_samples * bitspersample; GST_BUFFER_OFFSET (out) = musepackdec->segment.last_stop; GST_BUFFER_TIMESTAMP (out) = gst_util_uint64_scale_int (musepackdec->segment.last_stop, GST_SECOND, samplerate); GST_BUFFER_DURATION (out) = gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate); musepackdec->segment.last_stop += num_samples; GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out))); flow = gst_pad_push (musepackdec->srcpad, out); if (flow != GST_FLOW_OK) { GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow)); goto pause_task; } /* check if we're at the end of a configured segment */ if (musepackdec->segment.stop != -1 && musepackdec->segment.last_stop >= musepackdec->segment.stop) { gint64 stop_time; GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment"); if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0) goto eos_and_pause; GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message"); stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop, GST_SECOND, samplerate); gst_element_post_message (GST_ELEMENT (musepackdec), gst_message_new_segment_done (GST_OBJECT (musepackdec), GST_FORMAT_TIME, stop_time)); goto pause_task; } return; eos_and_pause: { GST_DEBUG_OBJECT (musepackdec, "sending EOS event"); gst_pad_push_event (musepackdec->srcpad, gst_event_new_eos ()); /* fall through to pause */ } pause_task: { GST_DEBUG_OBJECT (musepackdec, "Pausing task"); gst_pad_pause_task (sinkpad); return; } }
static void gst_wildmidi_loop (GstPad * sinkpad) { GstWildmidi *wildmidi = GST_WILDMIDI (GST_PAD_PARENT (sinkpad)); GstFlowReturn ret; switch (wildmidi->state) { case GST_WILDMIDI_STATE_LOAD: { GstBuffer *buffer = NULL; GST_DEBUG_OBJECT (wildmidi, "loading song"); ret = gst_pad_pull_range (wildmidi->sinkpad, wildmidi->offset, -1, &buffer); if (ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (wildmidi, "Song loaded"); wildmidi->state = GST_WILDMIDI_STATE_PARSE; } else if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (wildmidi, STREAM, DECODE, (NULL), ("Unable to read song")); goto pause; } else { GST_DEBUG_OBJECT (wildmidi, "pushing buffer"); gst_adapter_push (wildmidi->adapter, buffer); wildmidi->offset += gst_buffer_get_size (buffer); } break; } case GST_WILDMIDI_STATE_PARSE: ret = gst_wildmidi_parse_song (wildmidi); if (ret != GST_FLOW_OK) goto pause; wildmidi->state = GST_WILDMIDI_STATE_PLAY; break; case GST_WILDMIDI_STATE_PLAY: ret = gst_wildmidi_do_play (wildmidi); if (ret != GST_FLOW_OK) goto pause; break; default: break; } return; pause: { const gchar *reason = gst_flow_get_name (ret); GstEvent *event; GST_DEBUG_OBJECT (wildmidi, "pausing task, reason %s", reason); gst_pad_pause_task (sinkpad); if (ret == GST_FLOW_EOS) { /* perform EOS logic */ event = gst_event_new_eos (); gst_pad_push_event (wildmidi->srcpad, event); } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { event = gst_event_new_eos (); /* for fatal errors we post an error message, post the error * first so the app knows about the error first. */ GST_ELEMENT_ERROR (wildmidi, STREAM, FAILED, ("Internal data flow error."), ("streaming task paused, reason %s (%d)", reason, ret)); gst_pad_push_event (wildmidi->srcpad, event); } } }
static void gst_raw_parse_loop (GstElement * element) { GstRawParse *rp = GST_RAW_PARSE (element); GstRawParseClass *rp_class = GST_RAW_PARSE_GET_CLASS (rp); GstFlowReturn ret; GstBuffer *buffer; gint size; if (!gst_raw_parse_set_src_caps (rp)) goto no_caps; if (rp->close_segment) { GST_DEBUG_OBJECT (rp, "sending close segment"); gst_pad_push_event (rp->srcpad, rp->close_segment); rp->close_segment = NULL; } if (rp->start_segment) { GST_DEBUG_OBJECT (rp, "sending start segment"); gst_pad_push_event (rp->srcpad, rp->start_segment); rp->start_segment = NULL; } if (rp_class->multiple_frames_per_buffer && rp->framesize < 4096) size = 4096 - (4096 % rp->framesize); else size = rp->framesize; if (rp->segment.rate >= 0) { if (rp->offset + size > rp->upstream_length) { GstFormat fmt = GST_FORMAT_BYTES; if (!gst_pad_query_peer_duration (rp->sinkpad, &fmt, &rp->upstream_length)) { GST_WARNING_OBJECT (rp, "Could not get upstream duration, trying to pull frame by frame"); size = rp->framesize; } else if (rp->upstream_length < rp->offset + rp->framesize) { ret = GST_FLOW_UNEXPECTED; goto pause; } else if (rp->offset + size > rp->upstream_length) { size = rp->upstream_length - rp->offset; size -= size % rp->framesize; } } } else { if (rp->offset == 0) { ret = GST_FLOW_UNEXPECTED; goto pause; } else if (rp->offset < size) { size -= rp->offset; } rp->offset -= size; } ret = gst_pad_pull_range (rp->sinkpad, rp->offset, size, &buffer); if (ret != GST_FLOW_OK) { GST_DEBUG_OBJECT (rp, "pull_range (%" G_GINT64_FORMAT ", %u) " "failed, flow: %s", rp->offset, size, gst_flow_get_name (ret)); buffer = NULL; goto pause; } if (GST_BUFFER_SIZE (buffer) < size) { GST_DEBUG_OBJECT (rp, "Short read at offset %" G_GINT64_FORMAT ", got only %u of %u bytes", rp->offset, GST_BUFFER_SIZE (buffer), size); if (size > rp->framesize) { GST_BUFFER_SIZE (buffer) -= GST_BUFFER_SIZE (buffer) % rp->framesize; } else { gst_buffer_unref (buffer); buffer = NULL; ret = GST_FLOW_UNEXPECTED; goto pause; } } ret = gst_raw_parse_push_buffer (rp, buffer); if (ret != GST_FLOW_OK) goto pause; return; /* ERRORS */ no_caps: { GST_ERROR_OBJECT (rp, "could not negotiate caps"); ret = GST_FLOW_NOT_NEGOTIATED; goto pause; } pause: { const gchar *reason = gst_flow_get_name (ret); GST_LOG_OBJECT (rp, "pausing task, reason %s", reason); gst_pad_pause_task (rp->sinkpad); if (ret == GST_FLOW_UNEXPECTED) { if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) { GstClockTime stop; GST_LOG_OBJECT (rp, "Sending segment done"); if ((stop = rp->segment.stop) == -1) stop = rp->segment.duration; gst_element_post_message (GST_ELEMENT_CAST (rp), gst_message_new_segment_done (GST_OBJECT_CAST (rp), rp->segment.format, stop)); } else { GST_LOG_OBJECT (rp, "Sending EOS, at end of stream"); gst_pad_push_event (rp->srcpad, gst_event_new_eos ()); } } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_UNEXPECTED) { GST_ELEMENT_ERROR (rp, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", reason)); gst_pad_push_event (rp->srcpad, gst_event_new_eos ()); } return; } }
static void gst_amc_audio_dec_loop (GstAmcAudioDec * self) { GstFlowReturn flow_ret = GST_FLOW_OK; gboolean is_eos; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; gint idx; GError *err = NULL; GST_AUDIO_DECODER_STREAM_LOCK (self); retry: /*if (self->input_caps_changed) { idx = INFO_OUTPUT_FORMAT_CHANGED; } else { */ GST_DEBUG_OBJECT (self, "Waiting for available output buffer"); GST_AUDIO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000, &err); GST_AUDIO_DECODER_STREAM_LOCK (self); /*} */ if (idx < 0) { if (self->flushing) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_OUTPUT_BUFFERS_CHANGED: /* Handled internally */ g_assert_not_reached (); break; case INFO_OUTPUT_FORMAT_CHANGED:{ GstAmcFormat *format; gchar *format_string; GST_DEBUG_OBJECT (self, "Output format has changed"); format = gst_amc_codec_get_output_format (self->codec, &err); if (!format) goto format_error; format_string = gst_amc_format_to_string (format, &err); if (err) { gst_amc_format_free (format); goto format_error; } GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string); g_free (format_string); if (!gst_amc_audio_dec_set_src_caps (self, format)) { gst_amc_format_free (format); goto format_error; } gst_amc_format_free (format); goto retry; } case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out"); goto retry; case G_MININT: GST_ERROR_OBJECT (self, "Failure dequeueing output buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } goto retry; } GST_DEBUG_OBJECT (self, "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.offset, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM); buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err); if (err) goto failed_to_get_output_buffer; else if (!buf) goto got_null_output_buffer; if (buffer_info.size > 0) { GstBuffer *outbuf; GstMapInfo minfo; /* This sometimes happens at EOS or if the input is not properly framed, * let's handle it gracefully by allocating a new buffer for the current * caps and filling it */ if (buffer_info.size % self->info.bpf != 0) goto invalid_buffer_size; outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (self), buffer_info.size); if (!outbuf) goto failed_allocate; gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE); if (self->needs_reorder) { gint i, n_samples, c, n_channels; gint *reorder_map = self->reorder_map; gint16 *dest, *source; dest = (gint16 *) minfo.data; source = (gint16 *) (buf->data + buffer_info.offset); n_samples = buffer_info.size / self->info.bpf; n_channels = self->info.channels; for (i = 0; i < n_samples; i++) { for (c = 0; c < n_channels; c++) { dest[i * n_channels + reorder_map[c]] = source[i * n_channels + c]; } } } else { orc_memcpy (minfo.data, buf->data + buffer_info.offset, buffer_info.size); } gst_buffer_unmap (outbuf, &minfo); if (self->spf != -1) { gst_adapter_push (self->output_adapter, outbuf); } else { flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, 1); } } gst_amc_buffer_free (buf); buf = NULL; if (self->spf != -1) { GstBuffer *outbuf; guint avail = gst_adapter_available (self->output_adapter); guint nframes; /* On EOS we take the complete adapter content, no matter * if it is a multiple of the codec frame size or not. * Otherwise we take a multiple of codec frames and push * them downstream */ avail /= self->info.bpf; if (!is_eos) { nframes = avail / self->spf; avail = nframes * self->spf; } else { nframes = (avail + self->spf - 1) / self->spf; } avail *= self->info.bpf; if (avail > 0) { outbuf = gst_adapter_take_buffer (self->output_adapter, avail); flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, nframes); } } if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto failed_release; } if (is_eos || flow_ret == GST_FLOW_EOS) { GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); flow_ret = GST_FLOW_EOS; } g_mutex_unlock (&self->drain_lock); GST_AUDIO_DECODER_STREAM_LOCK (self); } else { GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); } self->downstream_flow_ret = flow_ret; if (flow_ret != GST_FLOW_OK) goto flow_error; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } format_error: { if (err) GST_ELEMENT_ERROR_FROM_ERROR (self, err); else GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to handle format")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_release: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_FLUSHING; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } flow_error: { if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret == GST_FLOW_FLUSHING) { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_to_get_output_buffer: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } got_null_output_buffer: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Got no output buffer")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } invalid_buffer_size: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Invalid buffer size %u (bfp %d)", buffer_info.size, self->info.bpf)); gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_allocate: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to allocate output buffer")); gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } }
static gboolean gst_raw_parse_handle_seek_pull (GstRawParse * rp, GstEvent * event) { gdouble rate; GstFormat format; GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 start, stop; gint64 last_stop; gboolean ret = FALSE; gboolean flush; GstSegment seeksegment; if (event) { gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start, &stop_type, &stop); /* convert input offsets to time */ ret = gst_raw_parse_convert (rp, format, start, GST_FORMAT_TIME, &start); ret &= gst_raw_parse_convert (rp, format, stop, GST_FORMAT_TIME, &stop); if (!ret) goto convert_failed; GST_DEBUG_OBJECT (rp, "converted start - stop to time"); format = GST_FORMAT_TIME; gst_event_unref (event); } else { format = GST_FORMAT_TIME; flags = 0; } flush = ((flags & GST_SEEK_FLAG_FLUSH) != 0); /* start flushing up and downstream so that the loop function pauses and we * can acquire the STREAM_LOCK. */ if (flush) { GST_LOG_OBJECT (rp, "flushing"); gst_pad_push_event (rp->sinkpad, gst_event_new_flush_start ()); gst_pad_push_event (rp->srcpad, gst_event_new_flush_start ()); } else { GST_LOG_OBJECT (rp, "pause task"); gst_pad_pause_task (rp->sinkpad); } GST_PAD_STREAM_LOCK (rp->sinkpad); memcpy (&seeksegment, &rp->segment, sizeof (GstSegment)); if (event) { /* configure the seek values */ gst_segment_set_seek (&seeksegment, rate, format, flags, start_type, start, stop_type, stop, NULL); } /* get the desired position */ last_stop = seeksegment.last_stop; GST_LOG_OBJECT (rp, "seeking to %" GST_TIME_FORMAT, GST_TIME_ARGS (last_stop)); /* convert the desired position to bytes */ ret = gst_raw_parse_convert (rp, format, last_stop, GST_FORMAT_BYTES, &last_stop); /* prepare for streaming */ if (flush) { GST_LOG_OBJECT (rp, "stop flush"); gst_pad_push_event (rp->sinkpad, gst_event_new_flush_stop ()); gst_pad_push_event (rp->srcpad, gst_event_new_flush_stop ()); } else if (ret && rp->running) { /* we are running the current segment and doing a non-flushing seek, * close the segment first based on the last_stop. */ GST_DEBUG_OBJECT (rp, "prepare close segment %" G_GINT64_FORMAT " to %" G_GINT64_FORMAT, rp->segment.start, rp->segment.last_stop); /* queue the segment for sending in the stream thread */ if (rp->close_segment) gst_event_unref (rp->close_segment); rp->close_segment = gst_event_new_new_segment_full (TRUE, rp->segment.rate, rp->segment.applied_rate, rp->segment.format, rp->segment.start, rp->segment.last_stop, rp->segment.time); } if (ret) { /* seek done */ /* Seek on a frame boundary */ last_stop -= last_stop % rp->framesize; rp->offset = last_stop; rp->n_frames = last_stop / rp->framesize; GST_LOG_OBJECT (rp, "seeking to bytes %" G_GINT64_FORMAT, last_stop); memcpy (&rp->segment, &seeksegment, sizeof (GstSegment)); if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) { gst_element_post_message (GST_ELEMENT_CAST (rp), gst_message_new_segment_start (GST_OBJECT_CAST (rp), rp->segment.format, rp->segment.last_stop)); } /* for deriving a stop position for the playback segment from the seek * segment, we must take the duration when the stop is not set */ if ((stop = rp->segment.stop) == -1) stop = rp->segment.duration; GST_DEBUG_OBJECT (rp, "preparing newsegment from %" G_GINT64_FORMAT " to %" G_GINT64_FORMAT, rp->segment.start, stop); /* now replace the old segment so that we send it in the stream thread the * next time it is scheduled. */ if (rp->start_segment) gst_event_unref (rp->start_segment); if (rp->segment.rate >= 0.0) { /* forward, we send data from last_stop to stop */ rp->start_segment = gst_event_new_new_segment_full (FALSE, rp->segment.rate, rp->segment.applied_rate, rp->segment.format, rp->segment.last_stop, stop, rp->segment.time); } else { /* reverse, we send data from last_stop to start */ rp->start_segment = gst_event_new_new_segment_full (FALSE, rp->segment.rate, rp->segment.applied_rate, rp->segment.format, rp->segment.start, rp->segment.last_stop, rp->segment.time); } } rp->discont = TRUE; GST_LOG_OBJECT (rp, "start streaming"); rp->running = TRUE; gst_pad_start_task (rp->sinkpad, (GstTaskFunction) gst_raw_parse_loop, rp); GST_PAD_STREAM_UNLOCK (rp->sinkpad); return ret; /* ERRORS */ convert_failed: { GST_DEBUG_OBJECT (rp, "Seek failed: couldn't convert to byte positions"); return FALSE; } }
/* chain function * this function does the actual processing */ static GstFlowReturn gst_ivf_parse_chain (GstPad * pad, GstBuffer * buf) { GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad)); gboolean res; /* lazy creation of the adapter */ if (G_UNLIKELY (ivf->adapter == NULL)) { ivf->adapter = gst_adapter_new (); } GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter", GST_BUFFER_SIZE (buf)); gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */ res = GST_FLOW_OK; switch (ivf->state) { case GST_IVF_PARSE_START: if (gst_adapter_available (ivf->adapter) >= 32) { GstCaps *caps; const guint8 *data = gst_adapter_peek (ivf->adapter, 32); guint32 magic = GST_READ_UINT32_LE (data); guint16 version = GST_READ_UINT16_LE (data + 4); guint16 header_size = GST_READ_UINT16_LE (data + 6); guint32 fourcc = GST_READ_UINT32_LE (data + 8); guint16 width = GST_READ_UINT16_LE (data + 12); guint16 height = GST_READ_UINT16_LE (data + 14); guint32 rate_num = GST_READ_UINT32_LE (data + 16); guint32 rate_den = GST_READ_UINT32_LE (data + 20); #ifndef GST_DISABLE_GST_DEBUG guint32 num_frames = GST_READ_UINT32_LE (data + 24); #endif /* last 4 bytes unused */ gst_adapter_flush (ivf->adapter, 32); if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') || version != 0 || header_size != 32 || fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) { GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } /* create src pad caps */ caps = gst_caps_new_simple ("video/x-vp8", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL); GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps); GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames); gst_pad_set_caps (ivf->srcpad, caps); gst_caps_unref (caps); /* keep framerate in instance for convenience */ ivf->rate_num = rate_num; ivf->rate_den = rate_den; gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); /* move along */ ivf->state = GST_IVF_PARSE_DATA; } else { GST_LOG_OBJECT (ivf, "Header data not yet available."); break; } /* fall through */ case GST_IVF_PARSE_DATA: while (gst_adapter_available (ivf->adapter) > 12) { const guint8 *data = gst_adapter_peek (ivf->adapter, 12); guint32 frame_size = GST_READ_UINT32_LE (data); guint64 frame_pts = GST_READ_UINT64_LE (data + 4); GST_LOG_OBJECT (ivf, "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size, frame_pts); if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) { GstBuffer *frame; gst_adapter_flush (ivf->adapter, 12); frame = gst_adapter_take_buffer (ivf->adapter, frame_size); gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad)); GST_BUFFER_TIMESTAMP (frame) = gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den, ivf->rate_num); GST_BUFFER_DURATION (frame) = gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den, ivf->rate_num); GST_DEBUG_OBJECT (ivf, "Pushing frame of size %u, ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT ", off_end %" G_GUINT64_FORMAT, GST_BUFFER_SIZE (frame), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (frame)), GST_TIME_ARGS (GST_BUFFER_DURATION (frame)), GST_BUFFER_OFFSET (frame), GST_BUFFER_OFFSET_END (frame)); res = gst_pad_push (ivf->srcpad, frame); if (res != GST_FLOW_OK) break; } else { GST_LOG_OBJECT (ivf, "Frame data not yet available."); break; } } break; default: g_return_val_if_reached (GST_FLOW_ERROR); } return res; }
static gboolean gst_base_video_codec_src_event (GstPad * pad, GstEvent * event) { GstBaseVideoCodec *base_video_codec; gboolean res = FALSE; base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEEK: { GstFormat format, tformat; gdouble rate; GstEvent *real_seek; GstSeekFlags flags; GstSeekType cur_type, stop_type; gint64 cur, stop; gint64 tcur, tstop; gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur, &stop_type, &stop); gst_event_unref (event); tformat = GST_FORMAT_TIME; res = gst_base_video_encoded_video_convert (&base_video_codec->state, format, cur, &tformat, &tcur); if (!res) goto convert_error; res = gst_base_video_encoded_video_convert (&base_video_codec->state, format, stop, &tformat, &tstop); if (!res) goto convert_error; real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, flags, cur_type, tcur, stop_type, tstop); res = gst_pad_push_event (base_video_codec->sinkpad, real_seek); break; } #if 0 case GST_EVENT_QOS: { gdouble proportion; GstClockTimeDiff diff; GstClockTime timestamp; gst_event_parse_qos (event, &proportion, &diff, ×tamp); GST_OBJECT_LOCK (base_video_codec); base_video_codec->proportion = proportion; base_video_codec->earliest_time = timestamp + diff; GST_OBJECT_UNLOCK (base_video_codec); GST_DEBUG_OBJECT (base_video_codec, "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT, GST_TIME_ARGS (timestamp), diff); res = gst_pad_push_event (base_video_codec->sinkpad, event); break; } #endif default: res = gst_pad_push_event (base_video_codec->sinkpad, event); break; } done: gst_object_unref (base_video_codec); return res; convert_error: GST_DEBUG_OBJECT (base_video_codec, "could not convert format"); goto done; }
static void gst_omx_audio_enc_loop (GstOMXAudioEnc * self) { GstOMXAudioEncClass *klass; GstOMXPort *port = self->enc_out_port; GstOMXBuffer *buf = NULL; GstFlowReturn flow_ret = GST_FLOW_OK; GstOMXAcquireBufferReturn acq_return; OMX_ERRORTYPE err; klass = GST_OMX_AUDIO_ENC_GET_CLASS (self); acq_return = gst_omx_port_acquire_buffer (port, &buf); if (acq_return == GST_OMX_ACQUIRE_BUFFER_ERROR) { goto component_error; } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_FLUSHING) { goto flushing; } else if (acq_return == GST_OMX_ACQUIRE_BUFFER_EOS) { goto eos; } if (!gst_pad_has_current_caps (GST_AUDIO_ENCODER_SRC_PAD (self)) || acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) { GstAudioInfo *info = gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self)); GstCaps *caps; GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps"); /* Reallocate all buffers */ if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) { err = gst_omx_port_set_enabled (port, FALSE); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_wait_buffers_released (port, 5 * GST_SECOND); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_deallocate_buffers (port); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_wait_enabled (port, 1 * GST_SECOND); if (err != OMX_ErrorNone) goto reconfigure_error; } GST_AUDIO_ENCODER_STREAM_LOCK (self); caps = klass->get_caps (self, self->enc_out_port, info); if (!caps) { if (buf) gst_omx_port_release_buffer (self->enc_out_port, buf); GST_AUDIO_ENCODER_STREAM_UNLOCK (self); goto caps_failed; } GST_DEBUG_OBJECT (self, "Setting output caps: %" GST_PTR_FORMAT, caps); if (!gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (self), caps)) { gst_caps_unref (caps); if (buf) gst_omx_port_release_buffer (self->enc_out_port, buf); GST_AUDIO_ENCODER_STREAM_UNLOCK (self); goto caps_failed; } gst_caps_unref (caps); GST_AUDIO_ENCODER_STREAM_UNLOCK (self); if (acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURE) { err = gst_omx_port_set_enabled (port, TRUE); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_allocate_buffers (port); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_wait_enabled (port, 5 * GST_SECOND); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_populate (port); if (err != OMX_ErrorNone) goto reconfigure_error; err = gst_omx_port_mark_reconfigured (port); if (err != OMX_ErrorNone) goto reconfigure_error; } /* Now get a buffer */ if (acq_return != GST_OMX_ACQUIRE_BUFFER_OK) { return; } } g_assert (acq_return == GST_OMX_ACQUIRE_BUFFER_OK); if (!buf) { g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)); GST_AUDIO_ENCODER_STREAM_LOCK (self); goto eos; } GST_DEBUG_OBJECT (self, "Handling buffer: 0x%08x %" G_GUINT64_FORMAT, (guint) buf->omx_buf->nFlags, (guint64) buf->omx_buf->nTimeStamp); /* This prevents a deadlock between the srcpad stream * lock and the videocodec stream lock, if ::reset() * is called at the wrong time */ if (gst_omx_port_is_flushing (self->enc_out_port)) { GST_DEBUG_OBJECT (self, "Flushing"); gst_omx_port_release_buffer (self->enc_out_port, buf); goto flushing; } GST_AUDIO_ENCODER_STREAM_LOCK (self); if ((buf->omx_buf->nFlags & OMX_BUFFERFLAG_CODECCONFIG) && buf->omx_buf->nFilledLen > 0) { GstCaps *caps; GstBuffer *codec_data; GstMapInfo map = GST_MAP_INFO_INIT; GST_DEBUG_OBJECT (self, "Handling codec data"); caps = gst_caps_copy (gst_pad_get_current_caps (GST_AUDIO_ENCODER_SRC_PAD (self))); codec_data = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); gst_buffer_map (codec_data, &map, GST_MAP_WRITE); memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); gst_buffer_unmap (codec_data, &map); gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data, NULL); if (!gst_pad_set_caps (GST_AUDIO_ENCODER_SRC_PAD (self), caps)) { gst_caps_unref (caps); if (buf) gst_omx_port_release_buffer (self->enc_out_port, buf); GST_AUDIO_ENCODER_STREAM_UNLOCK (self); goto caps_failed; } gst_caps_unref (caps); flow_ret = GST_FLOW_OK; } else if (buf->omx_buf->nFilledLen > 0) { GstBuffer *outbuf; guint n_samples; GST_DEBUG_OBJECT (self, "Handling output data"); n_samples = klass->get_num_samples (self, self->enc_out_port, gst_audio_encoder_get_audio_info (GST_AUDIO_ENCODER (self)), buf); if (buf->omx_buf->nFilledLen > 0) { GstMapInfo map = GST_MAP_INFO_INIT; outbuf = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen); gst_buffer_map (outbuf, &map, GST_MAP_WRITE); memcpy (map.data, buf->omx_buf->pBuffer + buf->omx_buf->nOffset, buf->omx_buf->nFilledLen); gst_buffer_unmap (outbuf, &map); } else { outbuf = gst_buffer_new (); } GST_BUFFER_TIMESTAMP (outbuf) = gst_util_uint64_scale (buf->omx_buf->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); if (buf->omx_buf->nTickCount != 0) GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale (buf->omx_buf->nTickCount, GST_SECOND, OMX_TICKS_PER_SECOND); flow_ret = gst_audio_encoder_finish_frame (GST_AUDIO_ENCODER (self), outbuf, n_samples); } GST_DEBUG_OBJECT (self, "Handled output data"); GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); err = gst_omx_port_release_buffer (port, buf); if (err != OMX_ErrorNone) goto release_error; self->downstream_flow_ret = flow_ret; if (flow_ret != GST_FLOW_OK) goto flow_error; GST_AUDIO_ENCODER_STREAM_UNLOCK (self); return; component_error: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("OpenMAX component in error state %s (0x%08x)", gst_omx_component_get_last_error_string (self->enc), gst_omx_component_get_last_error (self->enc))); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; self->started = FALSE; return; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_FLUSHING; self->started = FALSE; return; } eos: { g_mutex_lock (&self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); flow_ret = GST_FLOW_OK; gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); } else { GST_DEBUG_OBJECT (self, "Component signalled EOS"); flow_ret = GST_FLOW_EOS; } g_mutex_unlock (&self->drain_lock); GST_AUDIO_ENCODER_STREAM_LOCK (self); self->downstream_flow_ret = flow_ret; /* Here we fallback and pause the task for the EOS case */ if (flow_ret != GST_FLOW_OK) goto flow_error; GST_AUDIO_ENCODER_STREAM_UNLOCK (self); return; } flow_error: { if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); } self->started = FALSE; GST_AUDIO_ENCODER_STREAM_UNLOCK (self); return; } reconfigure_error: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Unable to reconfigure output port")); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED; self->started = FALSE; return; } caps_failed: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to set caps")); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED; self->started = FALSE; return; } release_error: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to relase output buffer to component: %s (0x%08x)", gst_omx_error_to_string (err), err)); gst_pad_push_event (GST_AUDIO_ENCODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_ENCODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; self->started = FALSE; GST_AUDIO_ENCODER_STREAM_UNLOCK (self); return; } }