void Pipeline::setResolution(Resolution value) { int width, height; switch (value) { case Low: // QVGA width = LOW_RES_WIDTH; height = LOW_RES_HEIGHT; break; case Medium: // VGA width = MID_RES_WIDTH; height = MID_RES_HEIGHT; break; case High: // WVGA width = HIGH_RES_WIDTH; height = HIGH_RES_HEIGHT; break; default: qCritical() << "Unsupported resolution value " << value; return; } g_signal_emit_by_name(camerabin, "set-video-resolution-fps", width, height, VIDEO_FRN, VIDEO_FRD, NULL); // set new rendering position to the viewfinder gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(viewfinder), 32, 30, 560, 420); }
bool QGstreamerGLTextureRenderer::processSyncMessage(const QGstreamerMessage &message) { GstMessage* gm = message.rawMessage(); if ((GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) && gst_structure_has_name(gm->structure, "prepare-xwindow-id") && m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { #ifdef GL_TEXTURE_SINK_DEBUG qDebug() << Q_FUNC_INFO; #endif GstXOverlay *overlay = GST_X_OVERLAY(m_videoSink); gst_x_overlay_set_xwindow_id(overlay, m_winId); if (!m_displayRect.isEmpty()) { gst_x_overlay_set_render_rectangle(overlay, m_displayRect.x(), m_displayRect.y(), m_displayRect.width(), m_displayRect.height()); } GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink"); m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this); return true; } return false; }
void QGstreamerVideoWindow::setDisplayRect(const QRect &rect) { m_displayRect = rect; if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { #if GST_VERSION_MICRO >= 29 if (m_displayRect.isEmpty()) gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), -1, -1, -1, -1); else gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), m_displayRect.x(), m_displayRect.y(), m_displayRect.width(), m_displayRect.height()); repaint(); #endif } }
void QGstreamerGLTextureRenderer::setOverlayGeometry(const QRect &geometry) { if (m_displayRect != geometry) { #ifdef GL_TEXTURE_SINK_DEBUG qDebug() << Q_FUNC_INFO << geometry; #endif m_displayRect = geometry; if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { if (m_displayRect.isEmpty()) gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), -1, -1, -1, -1); else gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(m_videoSink), m_displayRect.x(), m_displayRect.y(), m_displayRect.width(), m_displayRect.height()); repaintOverlay(); } } }
void QGstreamerGLTextureRenderer::precessNewStream() { if (m_videoSink && GST_IS_X_OVERLAY(m_videoSink)) { GstXOverlay *overlay = GST_X_OVERLAY(m_videoSink); gst_x_overlay_set_xwindow_id(overlay, m_winId); if (!m_displayRect.isEmpty()) { gst_x_overlay_set_render_rectangle(overlay, m_displayRect.x(), m_displayRect.y(), m_displayRect.width(), m_displayRect.height()); } GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink"); m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this); } }
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) : QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0), m_width(0), m_height(0), m_fpsn(0), m_fpsd(0) { QHash<int, QByteArray> roles; roles.insert( 33 , "uri" ); roles.insert( 34 , "fileName" ); roles.insert( 35 , "inPoint" ); roles.insert( 36 , "duration" ); setRoleNames(roles); connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition())); m_timeline = ges_timeline_new_audio_video(); m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new(); ges_timeline_add_layer(m_timeline, m_timelineLayer); m_pipeline = ges_timeline_pipeline_new(); GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (bus, bus_call, this); gst_object_unref (bus); /* * gst-dsp encoders seems to not proxy downstream caps correctly, this can make * GES fail to render some projects. We override the default getcaps on our own */ g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL); ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline); m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink"); ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink); gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink), 171, 0, 512, 288); ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW); gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED); m_duration = GST_CLOCK_TIME_NONE; m_progress = 0.0; }
static gboolean animate_render_rect (gpointer user_data) { if (anim_state.running) { GstVideoRectangle *r = &anim_state.rect; gdouble s = sin (3.0 * anim_state.a); gdouble c = cos (2.0 * anim_state.a); anim_state.a += anim_state.p; if (anim_state.a > (G_PI + G_PI)) anim_state.a -= (G_PI + G_PI); r->w = anim_state.w / 2; r->x = (r->w - (r->w / 2)) + c * (r->w / 2); r->h = anim_state.h / 2; r->y = (r->h - (r->h / 2)) + s * (r->h / 2); gst_x_overlay_set_render_rectangle (anim_state.overlay, r->x, r->y, r->w, r->h); gtk_widget_queue_draw (anim_state.widget); } return TRUE; }
void Eyrie::record() { QVariant ret; if(recbin != NULL) { qDebug() << "Ending recording"; gst_element_set_state(recbin, GST_STATE_NULL); recbin = NULL; QMetaObject::invokeMethod(parent(), "reset", Q_RETURN_ARG(QVariant, ret)); return; } qDebug() << "Starting recording"; QMetaObject::invokeMethod(parent(), "setStatus", Q_RETURN_ARG(QVariant, ret), Q_ARG(QVariant, "")); recbin = gst_pipeline_new("pipeline"); GError *err = NULL; recbin = gst_parse_launch("autoaudiosrc ! level ! tee name=t t. ! queue ! audioconvert ! audioresample ! appsink name=asink caps=audio/x-raw-float,channels=1,rate=11025,width=32,endianness=1234 t. ! queue ! audioconvert ! monoscope ! videobalance saturation=0 ! videoflip method=6 ! ffmpegcolorspace ! xvimagesink name=overlay", &err); sink = gst_bin_get_by_name(GST_BIN(recbin), "asink"); GstAppSinkCallbacks appsink_cbs = { NULL, NULL, on_buffer, NULL }; gst_app_sink_set_callbacks(GST_APP_SINK(sink), &appsink_cbs, this, NULL); overlay = gst_bin_get_by_name(GST_BIN(recbin), "overlay"); gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(overlay), view->effectiveWinId()); gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(overlay), 655, 140, 100, 200); gst_element_set_state(recbin, GST_STATE_PLAYING); attempts = 0; timer->start(10000); }
int tsmf_window_resize(TSMFGstreamerDecoder *decoder, int x, int y, int width, int height, int nr_rects, RDP_RECT *rects) { if (decoder->media_type != TSMF_MAJOR_TYPE_VIDEO) return -3; else { #if GST_VERSION_MAJOR > 0 GstVideoOverlay *overlay = GST_VIDEO_OVERLAY(decoder->outsink); #else GstXOverlay *overlay = GST_X_OVERLAY(decoder->outsink); #endif struct X11Handle *hdl = (struct X11Handle *)decoder->platform; DEBUG_TSMF("resize: x=%d, y=%d, w=%d, h=%d", x, y, width, height); assert(decoder); assert(hdl); #if GST_VERSION_MAJOR > 0 if (!gst_video_overlay_set_render_rectangle(overlay, 0, 0, width, height)) { DEBUG_WARN("Could not resize overlay!"); } gst_video_overlay_expose(overlay); #else if (!gst_x_overlay_set_render_rectangle(overlay, 0, 0, width, height)) { DEBUG_WARN("Could not resize overlay!"); } gst_x_overlay_expose(overlay); #endif if (hdl->subwin) { XMoveResizeWindow(hdl->disp, hdl->subwin, x, y, width, height); #if defined(WITH_XEXT) if (hdl->has_shape) { int i; XRectangle *xrects = calloc(nr_rects, sizeof(XRectangle)); for (i=0; i<nr_rects; i++) { xrects[i].x = rects[i].x - x; xrects[i].y = rects[i].y - y; xrects[i].width = rects[i].width; xrects[i].height = rects[i].height; } XShapeCombineRectangles(hdl->disp, hdl->subwin, ShapeBounding, x, y, xrects, nr_rects, ShapeSet, 0); free(xrects); } #endif XSync(hdl->disp, FALSE); } return 0; } }
GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data) { gchar * sourceName; // source GstObject * source; source = GST_MESSAGE_SRC(msg); if (!GST_IS_OBJECT(source)) return GST_BUS_DROP; sourceName = gst_object_get_name(source); switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_EOS: { g_message("End-of-stream"); end_eof = 1; break; } case GST_MESSAGE_ERROR: { gchar * debug; GError *err; gst_message_parse_error(msg, &err, &debug); g_free (debug); lt_info_c( "%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName ); if ( err->domain == GST_STREAM_ERROR ) { if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND ) { if ( g_strrstr(sourceName, "videosink") ) lt_info_c( "%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event??? else if ( g_strrstr(sourceName, "audiosink") ) lt_info_c( "%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event??? } } g_error_free(err); end_eof = 1; // NOTE: just to exit break; } case GST_MESSAGE_INFO: { gchar *debug; GError *inf; gst_message_parse_info (msg, &inf, &debug); g_free (debug); if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE ) { if ( g_strrstr(sourceName, "videosink") ) lt_info_c( "%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event??? } g_error_free(inf); break; } case GST_MESSAGE_TAG: { GstTagList *tags, *result; gst_message_parse_tag(msg, &tags); result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE); if (result) { if (m_stream_tags) gst_tag_list_free(m_stream_tags); m_stream_tags = result; } const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0); if ( gv_image ) { GstBuffer *buf_image; buf_image = gst_value_get_buffer (gv_image); int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644); if(fd >= 0) { int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image)); close(fd); lt_info_c( "%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__ , ret); } //FIXME: how shall playback handle this event??? } gst_tag_list_free(tags); lt_info_c( "%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__); //FIXME: how shall playback handle this event??? break; } case GST_MESSAGE_STATE_CHANGED: { if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin)) break; GstState old_state, new_state; gst_message_parse_state_changed(msg, &old_state, &new_state, NULL); if(old_state == new_state) break; lt_info_c( "%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state)); GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state); switch(transition) { case GST_STATE_CHANGE_NULL_TO_READY: { } break; case GST_STATE_CHANGE_READY_TO_PAUSED: { GstIterator *children; if (audioSink) { gst_object_unref(GST_OBJECT(audioSink)); audioSink = NULL; } if (videoSink) { gst_object_unref(GST_OBJECT(videoSink)); videoSink = NULL; } children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin)); audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink")); videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink")); gst_iterator_free(children); } break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: { } break; case GST_STATE_CHANGE_PLAYING_TO_PAUSED: { } break; case GST_STATE_CHANGE_PAUSED_TO_READY: { if (audioSink) { gst_object_unref(GST_OBJECT(audioSink)); audioSink = NULL; } if (videoSink) { gst_object_unref(GST_OBJECT(videoSink)); videoSink = NULL; } } break; case GST_STATE_CHANGE_READY_TO_NULL: { } break; } break; } #if 0 case GST_MESSAGE_ELEMENT: { if(gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id")) { // set window id gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), glfb->getWindowID()); // reshape window gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight()); // sync frames gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC (msg))); } } #endif break; default: break; } return GST_BUS_DROP; }
static inline gboolean gst_video_overlay_set_render_rectangle(GstXOverlay *overlay, int x, gint y, gint width, gint height) { return gst_x_overlay_set_render_rectangle(overlay, x, y, width, height); }