/* * PsychGSCreateMovie() -- Create a movie object. * * This function tries to open a moviefile (with or without audio/video tracks) * and create an associated movie object for it. * * win = Pointer to window record of associated onscreen window. * moviename = char* with the name of the moviefile. * preloadSecs = How many seconds of the movie should be preloaded/prefetched into RAM at movie open time? * moviehandle = handle to the new movie. */ void PsychGSCreateMovie(PsychWindowRecordType *win, const char* moviename, double preloadSecs, int* moviehandle) { GstCaps *colorcaps; GstElement *theMovie = NULL; GMainLoop *MovieContext = NULL; GstBus *bus = NULL; GstFormat fmt; GstElement *videosink; gint64 length_format; GstPad *pad, *peerpad; const GstCaps *caps; GstStructure *str; gint width,height; gint rate1, rate2; int i, slotid; GError *error = NULL; char movieLocation[FILENAME_MAX]; psych_bool trueValue = TRUE; char msgerr[10000]; char errdesc[1000]; psych_bool printErrors; // Suppress output of error-messages if moviehandle == 1000. That means we // run in our own Posix-Thread, not in the Matlab-Thread. Printing via Matlabs // printing facilities would likely cause a terrible crash. printErrors = (*moviehandle == -1000) ? FALSE : TRUE; // Set movie handle to "failed" initially: *moviehandle = -1; // We start GStreamer only on first invocation. if (firsttime) { // Initialize GStreamer: The routine is defined in PsychVideoCaptureSupportGStreamer.c PsychGSCheckInit("movie playback"); firsttime = FALSE; } if (win && !PsychIsOnscreenWindow(win)) { if (printErrors) PsychErrorExitMsg(PsychError_user, "Provided windowPtr is not an onscreen window."); else return; } if (NULL == moviename) { if (printErrors) PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of moviename passed!"); else return; } if (numMovieRecords >= PSYCH_MAX_MOVIES) { *moviehandle = -2; if (printErrors) PsychErrorExitMsg(PsychError_user, "Allowed maximum number of simultaneously open movies exceeded!"); else return; } // Search first free slot in movieRecordBANK: for (i=0; (i < PSYCH_MAX_MOVIES) && (movieRecordBANK[i].theMovie); i++); if (i>=PSYCH_MAX_MOVIES) { *moviehandle = -2; if (printErrors) PsychErrorExitMsg(PsychError_user, "Allowed maximum number of simultaneously open movies exceeded!"); else return; } // Slot slotid will contain the movie record for our new movie object: slotid=i; // Zero-out new record in moviebank: memset(&movieRecordBANK[slotid], 0, sizeof(PsychMovieRecordType)); // Create name-string for moviename: If an URI qualifier is at the beginning, // we're fine and just pass the URI as-is. Otherwise we add the file:// URI prefix. if (strstr(moviename, "://") || ((strstr(moviename, "v4l") == moviename) && strstr(moviename, "//"))) { snprintf(movieLocation, sizeof(movieLocation)-1, "%s", moviename); } else { snprintf(movieLocation, sizeof(movieLocation)-1, "file:///%s", moviename); } strncpy(movieRecordBANK[slotid].movieLocation, movieLocation, FILENAME_MAX); strncpy(movieRecordBANK[slotid].movieName, moviename, FILENAME_MAX); // Create movie playback pipeline: theMovie = gst_element_factory_make ("playbin2", "ptbmovieplaybackpipeline"); // Assign name of movie to play: g_object_set(G_OBJECT(theMovie), "uri", movieLocation, NULL); // Connect callback to about-to-finish signal: Signal is emitted as soon as // end of current playback iteration is approaching. The callback checks if // looped playback is requested. If so, it schedules a new playback iteration. g_signal_connect(G_OBJECT(theMovie), "about-to-finish", G_CALLBACK(PsychMovieAboutToFinishCB), &(movieRecordBANK[slotid])); // Assign message context, message bus and message callback for // the pipeline to report events and state changes, errors etc.: MovieContext = g_main_loop_new (NULL, FALSE); movieRecordBANK[slotid].MovieContext = MovieContext; bus = gst_pipeline_get_bus(GST_PIPELINE(theMovie)); // Didn't work: g_signal_connect (G_OBJECT(bus), "message::error", G_CALLBACK(PsychMessageErrorCB), NULL); // g_signal_connect (G_OBJECT(bus), "message::warning", G_CALLBACK(PsychMessageErrorCB), NULL); gst_bus_add_watch(bus, PsychMovieBusCallback, &(movieRecordBANK[slotid])); gst_object_unref(bus); // Assign a fakesink named "ptbsink0" as destination video-sink for // all video content. This allows us to get hold of the video frame buffers for // converting them into PTB OpenGL textures: videosink = gst_element_factory_make ("appsink", "ptbsink0"); if (!videosink) { printf("PTB-ERROR: Failed to create video-sink appsink ptbsink!\n"); PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE); PsychErrorExitMsg(PsychError_system, "Opening the movie failed. Reason hopefully given above."); }; movieRecordBANK[slotid].videosink = videosink; // Our OpenGL texture creation routine needs GL_BGRA8 data in G_UNSIGNED_8_8_8_8_REV // format, but the pipeline usually delivers YUV data in planar format. Therefore // need to perform colorspace/colorformat conversion. We build a little videobin // which consists of a ffmpegcolorspace converter plugin connected to our appsink // plugin which will deliver video data to us for conversion into textures. // The "sink" pad of the converter plugin is connected as the "sink" pad of our // videobin, and the videobin is connected to the video-sink output of the pipeline, // thereby receiving decoded video data. We place a videocaps filter inbetween the // converter and the appsink to enforce a color format conversion to the "colorcaps" // we need. colorcaps define the needed data format for efficient conversion into // a RGBA8 texture: colorcaps = gst_caps_new_simple ( "video/x-raw-rgb", "bpp", G_TYPE_INT, 32, "depth", G_TYPE_INT, 32, "alpha_mask", G_TYPE_INT, 0x000000FF, "red_mask", G_TYPE_INT, 0x0000FF00, "green_mask", G_TYPE_INT, 0x00FF0000, "blue_mask", G_TYPE_INT, 0xFF000000, NULL); /* // Old style method: Only left here for documentation to show how one can create // video sub-pipelines via bin's and connect them to each other via ghostpads: GstElement *videobin = gst_bin_new ("video_output_bin"); GstElement *videocon = gst_element_factory_make ("ffmpegcolorspace", "color_converter"); gst_bin_add_many(GST_BIN(videobin), videocon, videosink, NULL); GstPad *ghostpad = gst_ghost_pad_new("Video_Ghostsink", gst_element_get_pad(videocon, "sink")); gst_element_add_pad(videobin, ghostpad); gst_element_link_filtered(videocon, videosink, colorcaps); // Assign our special videobin as video-sink of the pipeline: g_object_set(G_OBJECT(theMovie), "video-sink", videobin, NULL); */ // New style method: Leaves the freedom of choice of color converter (if any) // to the auto-plugger. // Assign 'colorcaps' as caps to our videosink. This marks the videosink so // that it can only receive video image data in the format defined by colorcaps, // i.e., a format that is easy to consume for OpenGL's texture creation on std. // gpu's. It is the job of the video pipeline's autoplugger to plug in proper // color & format conversion plugins to satisfy videosink's needs. gst_app_sink_set_caps(GST_APP_SINK(videosink), colorcaps); // Assign our special appsink 'videosink' as video-sink of the pipeline: g_object_set(G_OBJECT(theMovie), "video-sink", videosink, NULL); gst_caps_unref(colorcaps); // Get the pad from the final sink for probing width x height of movie frames and nominal framerate of movie: pad = gst_element_get_pad(videosink, "sink"); PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, FALSE); // Should we preroll / preload? if ((preloadSecs > 0) || (preloadSecs == -1)) { // Preload / Preroll the pipeline: if (!PsychMoviePipelineSetState(theMovie, GST_STATE_PAUSED, 30.0)) { PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE); PsychErrorExitMsg(PsychError_user, "In OpenMovie: Opening the movie failed. Reason given above."); } } else { // Ready the pipeline: if (!PsychMoviePipelineSetState(theMovie, GST_STATE_READY, 30.0)) { PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE); PsychErrorExitMsg(PsychError_user, "In OpenMovie: Opening the movie failed. Reason given above."); } } // Query number of available video and audio tracks in movie: g_object_get (G_OBJECT(theMovie), "n-video", &movieRecordBANK[slotid].nrVideoTracks, "n-audio", &movieRecordBANK[slotid].nrAudioTracks, NULL); // We need a valid onscreen window handle for real video playback: if ((NULL == win) && (movieRecordBANK[slotid].nrVideoTracks > 0)) { if (printErrors) PsychErrorExitMsg(PsychError_user, "No windowPtr to an onscreen window provided. Must do so for movies with videotrack!"); else return; } PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, FALSE); PsychInitMutex(&movieRecordBANK[slotid].mutex); PsychInitCondition(&movieRecordBANK[slotid].condition, NULL); if (oldstyle) { // Install the probe callback for reception of video frames from engine at the sink-pad itself: gst_pad_add_buffer_probe(pad, G_CALLBACK(PsychHaveVideoDataCallback), &(movieRecordBANK[slotid])); } else { // Install callbacks used by the videosink (appsink) to announce various events: gst_app_sink_set_callbacks(GST_APP_SINK(videosink), &videosinkCallbacks, &(movieRecordBANK[slotid]), PsychDestroyNotifyCallback); } // Drop frames if callback can't pull buffers fast enough: // This together with the max queue lengths of 1 allows to // maintain audio-video sync by framedropping if needed. gst_app_sink_set_drop(GST_APP_SINK(videosink), TRUE); // Only allow one queued buffer before dropping: gst_app_sink_set_max_buffers(GST_APP_SINK(videosink), 1); // Assign harmless initial settings for fps and frame size: rate1 = 0; rate2 = 1; width = height = 0; // Videotrack available? if (movieRecordBANK[slotid].nrVideoTracks > 0) { // Yes: Query size and framerate of movie: peerpad = gst_pad_get_peer(pad); caps=gst_pad_get_negotiated_caps(peerpad); if (caps) { str=gst_caps_get_structure(caps,0); /* Get some data about the frame */ rate1 = 1; rate2 = 1; gst_structure_get_fraction(str, "pixel-aspect-ratio", &rate1, &rate2); movieRecordBANK[slotid].aspectRatio = (double) rate1 / (double) rate2; gst_structure_get_int(str,"width",&width); gst_structure_get_int(str,"height",&height); rate1 = 0; rate2 = 1; gst_structure_get_fraction(str, "framerate", &rate1, &rate2); } else { printf("PTB-DEBUG: No frame info available after preroll.\n"); } } if (strstr(moviename, "v4l2:")) { // Special case: The "movie" is actually a video4linux2 live source. // Need to make parameters up for now, so it to work as "movie": rate1 = 30; width = 640; height = 480; movieRecordBANK[slotid].nrVideoTracks = 1; // Uglyness at its best ;-) if (strstr(moviename, "320")) { width = 320; height = 240; }; } // Release the pad: gst_object_unref(pad); // Assign new record in moviebank: movieRecordBANK[slotid].theMovie = theMovie; movieRecordBANK[slotid].loopflag = 0; movieRecordBANK[slotid].frameAvail = 0; movieRecordBANK[slotid].imageBuffer = NULL; *moviehandle = slotid; // Increase counter: numMovieRecords++; // Compute basic movie properties - Duration and fps as well as image size: // Retrieve duration in seconds: fmt = GST_FORMAT_TIME; if (gst_element_query_duration(theMovie, &fmt, &length_format)) { // This returns nsecs, so convert to seconds: movieRecordBANK[slotid].movieduration = (double) length_format / (double) 1e9; //printf("PTB-DEBUG: Duration of movie %i [%s] is %lf seconds.\n", slotid, moviename, movieRecordBANK[slotid].movieduration); } else { movieRecordBANK[slotid].movieduration = DBL_MAX; printf("PTB-WARNING: Could not query duration of movie %i [%s] in seconds. Returning infinity.\n", slotid, moviename); } // Assign expected framerate, assuming a linear spacing between frames: movieRecordBANK[slotid].fps = (double) rate1 / (double) rate2; //printf("PTB-DEBUG: Framerate fps of movie %i [%s] is %lf fps.\n", slotid, moviename, movieRecordBANK[slotid].fps); // Compute framecount from fps and duration: movieRecordBANK[slotid].nrframes = (int)(movieRecordBANK[slotid].fps * movieRecordBANK[slotid].movieduration + 0.5); //printf("PTB-DEBUG: Number of frames in movie %i [%s] is %i.\n", slotid, moviename, movieRecordBANK[slotid].nrframes); // Define size of images in movie: movieRecordBANK[slotid].width = width; movieRecordBANK[slotid].height = height; // Ready to rock! return; }
int PsychCreateNewMovieFile(char* moviefile, int width, int height, double framerate, char* movieoptions) { PsychMovieWriterRecordType* pwriterRec = NULL; int moviehandle = 0; GError *myErr = NULL; char* poption; char codecString[1000]; char launchString[10000]; int dummyInt; float dummyFloat; char myfourcc[5]; psych_bool doAudio = FALSE; // Still capacity left? if (moviewritercount >= PSYCH_MAX_MOVIEWRITERDEVICES) PsychErrorExitMsg(PsychError_user, "Maximum number of movie writers exceeded. Please close some first!"); // Find first free (i.e., NULL) slot and assign moviehandle: while ((pwriterRec = PsychGetMovieWriter(moviehandle, TRUE)) && pwriterRec->Movie) moviehandle++; if (firsttime) { // Make sure GStreamer is ready: PsychGSCheckInit("movie writing"); firsttime = FALSE; } // Store movie filename: strcpy(pwriterRec->File, moviefile); // Store width, height: pwriterRec->height = height; pwriterRec->width = width; pwriterRec->eos = FALSE; // If no movieoptions specified, create default string for default // codec selection and configuration: if (strlen(movieoptions) == 0) { // No options provided. Select default encoder with default settings: movieoptions = strdup("DEFAULTenc"); } else if ((poption = strstr(movieoptions, ":CodecSettings="))) { // Replace ':' with a zero in movieoptions, so it gets null-terminated: movieoptions = poption; *movieoptions = 0; // Move after null-terminator: movieoptions++; // Replace the ':CodecSettings=' with the special keyword 'DEFAULTenc', so // so the default video codec is chosen, but the given settings override its // default parameters. strncpy(movieoptions, "DEFAULTenc ", strlen("DEFAULTenc ")); if (strlen(movieoptions) == 0) PsychErrorExitMsg(PsychError_user, "Invalid (empty) :CodecSettings= parameter specified. Aborted."); } else if ((poption = strstr(movieoptions, ":CodecType="))) { // Replace ':' with a zero in movieoptions, so it gets null-terminated // and only points to the actual movie filename: movieoptions = poption; *movieoptions = 0; // Advance movieoptions to point to the actual codec spec string: movieoptions+= 11; if (strlen(movieoptions) == 0) PsychErrorExitMsg(PsychError_user, "Invalid (empty) :CodecType= parameter specified. Aborted."); } // Assign numeric 32-bit FOURCC equivalent code to select codec: // This is optional. We default to kH264CodecType: if ((poption = strstr(movieoptions, "CodecFOURCCId="))) { if (sscanf(poption, "CodecFOURCCId=%i", &dummyInt) == 1) { pwriterRec->CodecType = dummyInt; if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Codec with FOURCC numeric id %i [%" GST_FOURCC_FORMAT "] requested for encoding of movie %i [%s].\n", dummyInt, GST_FOURCC_ARGS(dummyInt), moviehandle, moviefile); if (PsychPrefStateGet_Verbosity() > 1) printf("PTB-WARNING: Codec selection by FOURCC not yet supported. FOURCC code ignored!\n"); } else PsychErrorExitMsg(PsychError_user, "Invalid CodecFOURCCId= parameter provided in movieoptions parameter. Parse error!"); } // Assign 4 character string FOURCC code to select codec: if ((poption = strstr(movieoptions, "CodecFOURCC="))) { if (sscanf(poption, "CodecFOURCC=%c%c%c%c", &myfourcc[0], &myfourcc[1], &myfourcc[2], &myfourcc[3]) == 4) { myfourcc[4] = 0; dummyInt = (int) GST_STR_FOURCC (myfourcc); pwriterRec->CodecType = dummyInt; if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Codec with FOURCC numeric id %i [%" GST_FOURCC_FORMAT "] requested for encoding of movie %i [%s].\n", dummyInt, GST_FOURCC_ARGS(dummyInt), moviehandle, moviefile); if (PsychPrefStateGet_Verbosity() > 1) printf("PTB-WARNING: Codec selection by FOURCC not yet supported. FOURCC code ignored!\n"); } else PsychErrorExitMsg(PsychError_user, "Invalid CodecFOURCC= parameter provided in movieoptions parameter. Must be exactly 4 characters! Parse error!"); } // Assign numeric encoding quality level: // This is optional. We default to "normal quality": if ((poption = strstr(movieoptions, "EncodingQuality="))) { if ((sscanf(poption, "EncodingQuality=%f", &dummyFloat) == 1) && (dummyFloat >= 0) && (dummyFloat <= 1)) { // Map floating point quality level between 0.0 and 1.0 to 10 discrete levels: if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Encoding quality level %f selected for encoding of movie %i [%s].\n", dummyFloat, moviehandle, moviefile); // Rewrite "EncodingQuality=" string into "VideoQuality=" string, with proper // padding: "EncodingQuality=" // This way EncodingQuality in Quicktime lingo corresponds to // VideoQuality in GStreamer lingo: strncpy(poption, " Videoquality=", strlen(" Videoquality=")); } else PsychErrorExitMsg(PsychError_user, "Invalid EncodingQuality= parameter provided in movieoptions parameter. Parse error or out of valid 0 - 1 range!"); } // Check for valid parameters. Also warn if some parameters are borderline for certain codecs: if ((framerate < 1) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: Negative or zero 'framerate' %f units for moviehandle %i provided! Sounds like trouble ahead.\n", (float) framerate, moviehandle); if (width < 1) PsychErrorExitMsg(PsychError_user, "In CreateMovie: Invalid zero or negative 'width' for video frame size provided!"); if ((width < 4) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'width' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with such a small width.\n", width, moviehandle); if ((width % 4 != 0) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'width' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with a width which is not a multiple of 4 or 16.\n", width, moviehandle); if (height < 1) PsychErrorExitMsg(PsychError_user, "In CreateMovie: Invalid zero or negative 'height' for video frame size provided!"); if ((height < 4) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'height' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with such a small height.\n", height, moviehandle); // Full GStreamer launch line a la gst-launch command provided? if (strstr(movieoptions, "gst-launch")) { // Yes: We use movieoptions directly as launch line: movieoptions = strstr(movieoptions, "gst-launch"); // Move string pointer behind the "gst-launch" word (plus a blank): movieoptions+= strlen("gst-launch "); // Can directly use this: sprintf(launchString, "%s", movieoptions); // With audio track? if (strstr(movieoptions, "name=ptbaudioappsrc")) doAudio = TRUE; } else { // No: Do our own parsing and setup: // Find the gst-launch style string for codecs and muxers: if (!PsychGetCodecLaunchLineFromString(movieoptions, &(codecString[0]))) { // No config for this format possible: if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR:In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find matching codec setup.\n", moviehandle, moviefile); goto bail; } // With audio track? if (strstr(movieoptions, "AddAudioTrack")) doAudio = TRUE; // Build final launch string: if (doAudio) { // Video and audio: sprintf(launchString, "appsrc name=ptbvideoappsrc do-timestamp=0 stream-type=0 max-bytes=0 block=1 is-live=0 emit-signals=0 ! capsfilter caps=\"video/x-raw-rgb, bpp=(int)32, depth=(int)32, endianess=(int)4321, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, width=(int)%i, height=(int)%i, framerate=%i/1 \" ! videorate ! ffmpegcolorspace ! %s ! filesink name=ptbfilesink async=0 location=%s ", width, height, ((int) (framerate + 0.5)), codecString, moviefile); } else { // Video only: sprintf(launchString, "appsrc name=ptbvideoappsrc do-timestamp=0 stream-type=0 max-bytes=0 block=1 is-live=0 emit-signals=0 ! capsfilter caps=\"video/x-raw-rgb, bpp=(int)32, depth=(int)32, endianess=(int)4321, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, width=(int)%i, height=(int)%i, framerate=%i/1 \" ! videorate ! ffmpegcolorspace ! %s ! filesink name=ptbfilesink async=0 location=%s ", width, height, ((int) (framerate + 0.5)), codecString, moviefile); } } // Create a movie file for the destination movie: if (PsychPrefStateGet_Verbosity() > 3) { printf("PTB-INFO: Movie writing pipeline gst-launch line (without the -e option required on the command line!) is:\n"); printf("gst-launch %s\n", launchString); } // Build pipeline from launch string: pwriterRec->Movie = gst_parse_launch((const gchar*) launchString, &myErr); if ((NULL == pwriterRec->Movie) || myErr) { if (PsychPrefStateGet_Verbosity() > 0) { printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not build pipeline.\n", moviehandle, moviefile); printf("PTB-ERROR: Parameters were: %s\n", movieoptions); printf("PTB-ERROR: Launch string was: %s\n", launchString); printf("PTB-ERROR: GStreamer error message was: %s\n", (char*) myErr->message); // Special tips for the challenged: if (strstr(myErr->message, "property")) { // Bailed due to unsupported x264enc parameter "speed-preset" or "profile". Can be solved by upgrading // GStreamer or the OS or the VideoCodec= override: printf("PTB-TIP: The reason this failed is because your GStreamer codec installation is too outdated.\n"); printf("PTB-TIP: Either upgrade your GStreamer (plugin) installation to a more recent version,\n"); printf("PTB-TIP: or upgrade your operating system (e.g., Ubuntu 10.10 'Maverick Meercat' and later are fine).\n"); printf("PTB-TIP: A recent GStreamer installation is required to use all features and get optimal performance.\n"); printf("PTB-TIP: As a workaround, you can manually specify all codec settings, leaving out the unsupported\n"); printf("PTB-TIP: option. See 'help VideoRecording' on how to do that.\n\n"); } } goto bail; } // Get handle to ptbvideoappsrc: pwriterRec->ptbvideoappsrc = gst_bin_get_by_name(GST_BIN(pwriterRec->Movie), (const gchar *) "ptbvideoappsrc"); if (NULL == pwriterRec->ptbvideoappsrc) { if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find ptbvideoappsrc pipeline element.\n", moviehandle, moviefile); goto bail; } // Get handle to ptbaudioappsrc: pwriterRec->ptbaudioappsrc = gst_bin_get_by_name(GST_BIN(pwriterRec->Movie), (const gchar *) "ptbaudioappsrc"); if (doAudio && (NULL == pwriterRec->ptbaudioappsrc)) { if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find ptbaudioappsrc pipeline element.\n", moviehandle, moviefile); goto bail; } pwriterRec->Context = g_main_loop_new (NULL, FALSE); pwriterRec->bus = gst_pipeline_get_bus (GST_PIPELINE(pwriterRec->Movie)); gst_bus_add_watch(pwriterRec->bus, (GstBusFunc) PsychMovieBusCallback, pwriterRec); gst_object_unref(pwriterRec->bus); // Start the pipeline: if (!PsychMoviePipelineSetState(pwriterRec->Movie, GST_STATE_PLAYING, 10)) { if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Failed to start movie encoding pipeline!\n", moviehandle, moviefile); goto bail; } PsychGSProcessMovieContext(pwriterRec->Context, FALSE); // Increment count of open movie writers: moviewritercount++; if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Moviehandle %i successfully opened for movie writing into file '%s'.\n", moviehandle, moviefile); // Should we dump the whole encoding pipeline graph to a file for visualization // with GraphViz? This can be controlled via PsychTweak('GStreamerDumpFilterGraph' dirname); if (getenv("GST_DEBUG_DUMP_DOT_DIR")) { // Dump complete encoding filter graph to a .dot file for later visualization with GraphViz: printf("PTB-DEBUG: Dumping movie encoder graph for movie %s to directory %s.\n", moviefile, getenv("GST_DEBUG_DUMP_DOT_DIR")); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pwriterRec->Movie), GST_DEBUG_GRAPH_SHOW_ALL, "PsychMovieWritingGraph"); } // Return new handle: return(moviehandle); bail: if (pwriterRec->ptbvideoappsrc) gst_object_unref(GST_OBJECT(pwriterRec->ptbvideoappsrc)); pwriterRec->ptbvideoappsrc = NULL; if (pwriterRec->ptbaudioappsrc) gst_object_unref(GST_OBJECT(pwriterRec->ptbaudioappsrc)); pwriterRec->ptbaudioappsrc = NULL; if (pwriterRec->Movie) gst_object_unref(GST_OBJECT(pwriterRec->Movie)); pwriterRec->Movie = NULL; if (pwriterRec->Context) g_main_loop_unref(pwriterRec->Context); pwriterRec->Context = NULL; // Return failure: return(-1); }