void GStreamerBaseFrameSourceImpl::FinalizeGstPipeLine()
{
    if (pipeline)
    {
        handleGStreamerMessages();

        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        handleGStreamerMessages();

        gst_object_unref(GST_OBJECT(bus));
        bus = NULL;

        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        gst_object_unref(GST_OBJECT(pipeline));
        pipeline = NULL;
    }
}
void GStreamerBaseFrameSourceImpl::close()
{
    handleGStreamerMessages();
    FinalizeGstPipeLine();

    if (devMem != NULL)
    {
        cudaFree(devMem);
        devMem = NULL;
    }

    if (scaledImage)
    {
        vxReleaseImage(&scaledImage);
        scaledImage = NULL;
    }
}
bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        printf("Cannot create Gstreamer pipeline\n");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create v4l2src
    GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL);
    if (v4l2src == NULL)
    {
        printf("Cannot create v4l2src\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream cameraDev;
    cameraDev << "/dev/video" << cameraIdx;
    g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), v4l2src);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        printf("Cannot create %s element\n", COLOR_ELEM);
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), color);

    // create appsink element
    sink = gst_element_factory_make("appsink", NULL);
    if (sink == NULL)
    {
        printf("Cannot create appsink element\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), sink);

    // if initial values for FrameSource::Parameters are not
    // specified, let's set them manually to prevent very huge images
    if (configuration.frameWidth == (vx_uint32)-1)
        configuration.frameWidth = 1920;
    if (configuration.frameHeight == (vx_uint32)-1)
        configuration.frameHeight = 1080;
    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = 30;

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_v42lsrc = gst_caps_new_simple ("video/x-raw-rgb",
                 "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth,
                 "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight,
                 "framerate", GST_TYPE_FRACTION, (int)configuration.fps,
                 NULL);
#else
    std::ostringstream stream;
    stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth <<
              "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";

    GstCaps* caps_v42lsrc = gst_caps_from_string(stream.str().c_str());
#endif

    if (caps_v42lsrc == NULL)
    {
        printf("Failed to create caps\n");
        FinalizeGstPipeLine();

        return false;
    }

    // link elements
    if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc))
    {
        printf("GStreamer: cannot link v4l2src -> color using caps\n");
        FinalizeGstPipeLine();
        gst_caps_unref(caps_v42lsrc);

        return false;
    }
    gst_caps_unref(caps_v42lsrc);

    // link elements
    if (!gst_element_link(color, sink))
    {
        printf("GStreamer: cannot link color -> appsink\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), true);

    // do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_appsink = gst_caps_new_simple("video/x-raw-rgb",
                                                "bpp",        G_TYPE_INT, 24,
                                                "red_mask",   G_TYPE_INT, 0xFF0000,
                                                "green_mask", G_TYPE_INT, 0x00FF00,
                                                "blue_mask",  G_TYPE_INT, 0x0000FF,
                                                NULL);
#else
    // support 1 and 3 channel 8 bit data
    GstCaps* caps_appsink = gst_caps_from_string("video/x-raw, format=(string){RGB, GRAY8};");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink);
    gst_caps_unref(caps_appsink);

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        printf("GStreamer: unable to start playback\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::unique_ptr<GstPad, GStreamerObjectDeleter> pad(gst_element_get_static_pad(color, "src"));
#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_caps(pad.get()));
#else
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_current_caps(pad.get()));
#endif

    const GstStructure *structure = gst_caps_get_structure(bufferCaps.get(), 0);

    int width, height;
    if (!gst_structure_get_int(structure, "width", &width))
    {
        handleGStreamerMessages();
        printf("Cannot query video width\n");
    }

    if (!gst_structure_get_int(structure, "height", &height))
    {
        handleGStreamerMessages();
        printf("Cannot query video height\n");
    }

    configuration.frameWidth = static_cast<vx_uint32>(width);
    configuration.frameHeight = static_cast<vx_uint32>(height);

    gint num = 0, denom = 1;
    if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
    {
        handleGStreamerMessages();
        printf("Cannot query video fps\n");
    }

    configuration.fps = static_cast<float>(num) / denom;
    end = false;

    return true;
}
FrameSource::FrameStatus GStreamerBaseFrameSourceImpl::fetch(vx_image image, vx_uint32 /*timeout*/)
{
    if (end)
    {
        close();
        return FrameSource::CLOSED;
    }

    handleGStreamerMessages();

    if (gst_app_sink_is_eos(GST_APP_SINK(sink)))
    {
        close();
        return FrameSource::CLOSED;
    }

    if ((lastFrameTimestamp.toc()/1000.0) > Application::get().getSourceDefaultTimeout())
    {
        close();
        return FrameSource::CLOSED;
    }

    lastFrameTimestamp.tic();

#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstBuffer, GStreamerObjectDeleter> bufferHolder(
        gst_app_sink_pull_buffer(GST_APP_SINK(sink)));
    GstBuffer* buffer = bufferHolder.get();
#else
    std::unique_ptr<GstSample, GStreamerObjectDeleter> sample(gst_app_sink_pull_sample(GST_APP_SINK(sink)));

    if (!sample)
    {
        close();
        return FrameSource::CLOSED;
    }

    GstBuffer* buffer = gst_sample_get_buffer(sample.get());
#endif

    gint          width;
    gint          height;

#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCapsHolder(gst_buffer_get_caps(buffer));
    GstCaps* bufferCaps = bufferCapsHolder.get();
#else
    GstCaps* bufferCaps = gst_sample_get_caps(sample.get());
#endif
    // bail out in no caps
    assert(gst_caps_get_size(bufferCaps) == 1);
    GstStructure* structure = gst_caps_get_structure(bufferCaps, 0);

    // bail out if width or height are 0
    if (!gst_structure_get_int(structure, "width", &width) ||
            !gst_structure_get_int(structure, "height", &height))
    {
        close();
        return FrameSource::CLOSED;
    }

    int depth = 3;
#if GST_VERSION_MAJOR > 0
    depth = 0;
    const gchar* name = gst_structure_get_name(structure);
    const gchar* format = gst_structure_get_string(structure, "format");

    if (!name || !format)
    {
        close();
        return FrameSource::CLOSED;
    }

    // we support 2 types of data:
    //     video/x-raw, format=BGR   -> 8bit, 3 channels
    //     video/x-raw, format=GRAY8 -> 8bit, 1 channel
    if (strcasecmp(name, "video/x-raw") == 0)
    {
        if (strcasecmp(format, "RGB") == 0)
        {
            depth = 3;
        }
        else if(strcasecmp(format, "GRAY8") == 0)
        {
            depth = 1;
        }
    }
#endif
    if (depth == 0)
    {
        close();
        return FrameSource::CLOSED;
    }

    vx_imagepatch_addressing_t decodedImageAddr;
    decodedImageAddr.dim_x = width;
    decodedImageAddr.dim_y = height;
    decodedImageAddr.stride_x = depth;
    // GStreamer uses as stride width rounded up to the nearest multiple of 4
    decodedImageAddr.stride_y = ((width*depth+3)/4)*4;
    decodedImageAddr.scale_x = 1;
    decodedImageAddr.scale_y = 1;
    vx_image decodedImage = NULL;
    vx_df_image_e vx_type_map[5] = { VX_DF_IMAGE_VIRT, VX_DF_IMAGE_U8,
                                     VX_DF_IMAGE_VIRT, VX_DF_IMAGE_RGB, VX_DF_IMAGE_RGBX };

    // fetch image width and height
    vx_uint32 actual_width, actual_height;
    vx_df_image_e actual_format;
    NVXIO_SAFE_CALL( vxQueryImage(image, VX_IMAGE_ATTRIBUTE_WIDTH, (void *)&actual_width, sizeof(actual_width)) );
    NVXIO_SAFE_CALL( vxQueryImage(image, VX_IMAGE_ATTRIBUTE_HEIGHT, (void *)&actual_height, sizeof(actual_height)) );
    NVXIO_SAFE_CALL( vxQueryImage(image, VX_IMAGE_ATTRIBUTE_FORMAT, (void *)&actual_format, sizeof(actual_format)) );
    bool needScale = width != (int)configuration.frameWidth || height != (int)configuration.frameHeight;

    // config and actual image sized must be the same!
    if ((actual_height != configuration.frameHeight) ||
            (actual_width != configuration.frameWidth) ||
            (actual_format != configuration.format))
    {
        close();

        NVXIO_THROW_EXCEPTION("Actual image [ " << actual_width << " x " << actual_height <<
                              " ] does not equal configuration one [ " << configuration.frameWidth
                              << " x " << configuration.frameHeight << " ]");
    }

    // we assume that decoced image will have no more than 3 channels per pixel
    if (!devMem)
    {
        NVXIO_ASSERT( cudaSuccess == cudaMallocPitch(&devMem, &devMemPitch, width * 3, height) );
    }

    // check if decoded image format has changed
    if (scaledImage)
    {
        vx_df_image_e scaled_format;
        NVXIO_SAFE_CALL( vxQueryImage(scaledImage, VX_IMAGE_ATTRIBUTE_FORMAT, (void *)&scaled_format, sizeof(scaled_format)) );

        if (scaled_format != vx_type_map[depth])
        {
            vxReleaseImage(&scaledImage);
            scaledImage = NULL;
        }
    }

    if (needScale && !scaledImage)
    {
        scaledImage = vxCreateImage(vxContext, configuration.frameWidth,
                                    configuration.frameHeight, vx_type_map[depth]);
        NVXIO_CHECK_REFERENCE( scaledImage );
    }

#if GST_VERSION_MAJOR == 0
    bool needConvert = configuration.format != VX_DF_IMAGE_RGB;
    void * decodedPtr = GST_BUFFER_DATA(buffer);
#else
    GstMapInfo info;

    gboolean success = gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
    if (!success)
    {
        printf("GStreamer: unable to map buffer\n");
        close();
        return FrameSource::CLOSED;
    }

    bool needConvert = configuration.format != vx_type_map[depth];
    void * decodedPtr = info.data;
#endif

    if (!needConvert && !needScale)
    {
        decodedImage = vxCreateImageFromHandle(vxContext, vx_type_map[depth], &decodedImageAddr,
                                               &decodedPtr, VX_IMPORT_TYPE_HOST);
        NVXIO_CHECK_REFERENCE( decodedImage );
        NVXIO_SAFE_CALL( nvxuCopyImage(vxContext, decodedImage, image) );
    }
    else
    {
        // 1. upload decoced image to CUDA buffer
        NVXIO_ASSERT( cudaSuccess == cudaMemcpy2D(devMem, devMemPitch,
                                                  decodedPtr, decodedImageAddr.stride_y,
                                                  decodedImageAddr.dim_x * depth, decodedImageAddr.dim_y,
                                                  cudaMemcpyHostToDevice) );

        // 2. create vx_image wrapper for decoded buffer
        decodedImageAddr.stride_y = static_cast<vx_int32>(devMemPitch);
        decodedImage = vxCreateImageFromHandle(vxContext, vx_type_map[depth], &decodedImageAddr,
                                               &devMem, NVX_IMPORT_TYPE_CUDA);
        NVXIO_CHECK_REFERENCE( decodedImage );

        if (needScale)
        {
            // 3. scale image
            NVXIO_SAFE_CALL( vxuScaleImage(vxContext, decodedImage, scaledImage, VX_INTERPOLATION_TYPE_BILINEAR) );

            // 4. convert to dst image
            NVXIO_SAFE_CALL( vxuColorConvert(vxContext, scaledImage, image) );
        }
        else
        {
            // 3. convert to dst image
            NVXIO_SAFE_CALL( vxuColorConvert(vxContext, decodedImage, image) );
        }
    }

#if GST_VERSION_MAJOR != 0
    gst_buffer_unmap(buffer, &info);
#endif

    NVXIO_SAFE_CALL( vxReleaseImage(&decodedImage) );

    return FrameSource::OK;
}
bool GStreamerNvCameraFrameSourceImpl::InitializeGstPipeLine()
{
    // select config with max FPS value to be default
    NvCameraConfigs nvcameraconfig = configs[2];

    // use user specified camera config
    if ( (configuration.frameWidth != (vx_uint32)-1) &&
         (configuration.frameHeight != (vx_uint32)-1) )
    {
        nvcameraconfig.frameWidth = configuration.frameWidth;
        nvcameraconfig.frameHeight = configuration.frameHeight;
        nvcameraconfig.fps = 30;

        // select FPS default for the specified config
        for (vx_size i = 0; i < dimOf(configs); ++i)
        {
            if ((nvcameraconfig.frameWidth == configs[i].frameWidth) &&
                (nvcameraconfig.frameHeight == configs[i].frameHeight))
            {
                nvcameraconfig.fps = configs[i].fps;
                break;
            }
        }
    }

    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = nvcameraconfig.fps;

    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create nvcamerasrc
    GstElement * nvcamerasrc = gst_element_factory_make("nvcamerasrc", NULL);
    if (nvcamerasrc == NULL)
    {
        NVXIO_PRINT("Cannot create nvcamerasrc");
        NVXIO_PRINT("\"nvcamerasrc\" element is not available on this platform");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream stream;
    stream << configuration.fps << " " << configuration.fps;
    std::string fpsRange = stream.str();

    g_object_set(G_OBJECT(nvcamerasrc), "sensor-id", cameraIdx, NULL);
    g_object_set(G_OBJECT(nvcamerasrc), "fpsRange", fpsRange.c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), nvcamerasrc);

    // create nvvideosink element
    GstElement * nvvideosink = gst_element_factory_make("nvvideosink", NULL);
    if (nvvideosink == NULL)
    {
        NVXIO_PRINT("Cannot create nvvideosink element");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(nvvideosink), "display", context.display, NULL);
    g_object_set(G_OBJECT(nvvideosink), "stream", context.stream, NULL);
    g_object_set(G_OBJECT(nvvideosink), "fifo", fifoMode, NULL);
    g_object_set(G_OBJECT(nvvideosink), "max-lateness", -1, NULL);
    g_object_set(G_OBJECT(nvvideosink), "throttle-time", 0, NULL);
    g_object_set(G_OBJECT(nvvideosink), "render-delay", 0, NULL);
    g_object_set(G_OBJECT(nvvideosink), "qos", FALSE, NULL);
    g_object_set(G_OBJECT(nvvideosink), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(nvvideosink), "async", TRUE, NULL);

    gst_bin_add(GST_BIN(pipeline), nvvideosink);

    // link elements
    stream.str(std::string());
    stream << "video/x-raw(memory:NVMM), width=(int)" << nvcameraconfig.frameWidth << ", "
              "height=(int)" << nvcameraconfig.frameHeight << ", format=(string){I420}, "
              "framerate=(fraction)" << nvcameraconfig.fps << "/1;";

    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string(stream.str().c_str()));

    if (!caps_nvvidconv)
    {
        NVXIO_PRINT("Failed to create caps");
        FinalizeGstPipeLine();

        return false;
    }

    if (!gst_element_link_filtered(nvcamerasrc, nvvideosink, caps_nvvidconv.get()))
    {
        NVXIO_PRINT("GStreamer: cannot link nvvidconv -> nvvideosink using caps");
        FinalizeGstPipeLine();

        return false;
    }

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        NVXIO_PRINT("GStreamer: unable to start playback");
        FinalizeGstPipeLine();

        return false;
    }

    vx_uint32 initialFPS = configuration.fps;

    if (!updateConfiguration(nvcamerasrc, configuration))
    {
        FinalizeGstPipeLine();
        return false;
    }

    // if initialFPS is specified, we should use this, because
    // retrieved via the updateConfiguration function FPS corresponds
    // to camera config FPS
    if (initialFPS != (vx_uint32)-1)
        configuration.fps = initialFPS;

    end = false;

    return true;
}
bool GStreamerOpenMAXFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    std::string uri;
    if (!gst_uri_is_valid(fileName.c_str()))
    {
        char* real = realpath(fileName.c_str(), NULL);

        if (!real)
        {
            NVXIO_PRINT("Can't resolve path \"%s\": %s", fileName.c_str(), strerror(errno));
            return false;
        }

        std::unique_ptr<char[], GlibDeleter> pUri(g_filename_to_uri(real, NULL, NULL));
        free(real);
        uri = pUri.get();
    }
    else
    {
        uri = fileName;
    }

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create uridecodebin
    GstBin * uriDecodeBin = GST_BIN(gst_element_factory_make("uridecodebin", NULL));
    if (uriDecodeBin == NULL)
    {
        NVXIO_PRINT("Cannot create uridecodebin");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(uriDecodeBin), "uri", uri.c_str(), NULL);
    g_object_set(G_OBJECT(uriDecodeBin), "message-forward", TRUE, NULL);

    gst_bin_add(GST_BIN(pipeline), GST_ELEMENT(uriDecodeBin));

    // create nvvidconv
    GstElement * nvvidconv = gst_element_factory_make("nvvidconv", NULL);
    if (nvvidconv == NULL)
    {
        NVXIO_PRINT("Cannot create nvvidconv");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), nvvidconv);

      // create nvvideosink element
    GstElement * nvvideosink = gst_element_factory_make("nvvideosink", NULL);
    if (nvvideosink == NULL)
    {
        NVXIO_PRINT("Cannot create nvvideosink element");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(nvvideosink), "display", context.display, NULL);
    g_object_set(G_OBJECT(nvvideosink), "stream", context.stream, NULL);
    g_object_set(G_OBJECT(nvvideosink), "fifo", fifoMode, NULL);

    gst_bin_add(GST_BIN(pipeline), nvvideosink);

    g_signal_connect(uriDecodeBin, "autoplug-select", G_CALLBACK(GStreamerOpenMAXFrameSourceImpl::autoPlugSelect), NULL);
    g_signal_connect(uriDecodeBin, "pad-added", G_CALLBACK(GStreamerBaseFrameSourceImpl::newGstreamerPad), nvvidconv);

    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string("video/x-raw(memory:NVMM), format=(string){I420}"));

    // link nvvidconv using caps
    if (!gst_element_link_filtered(nvvidconv, nvvideosink, caps_nvvidconv.get()))
    {
        NVXIO_PRINT("GStreamer: cannot link nvvidconv -> nvvideosink");
        FinalizeGstPipeLine();

        return false;
    }

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);

    handleGStreamerMessages();
    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        NVXIO_PRINT("GStreamer: unable to start playback");
        FinalizeGstPipeLine();

        return false;
    }

    // GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "gst_pipeline");

    if (!updateConfiguration(nvvidconv, configuration))
    {
        FinalizeGstPipeLine();
        return false;
    }

    end = false;

    return true;
}