Example #1
0
static vx_status vxEdgeTrace(vx_image norm, vx_threshold threshold, vx_image output)
{
    vx_rectangle_t rect;
    vx_imagepatch_addressing_t norm_addr, output_addr;
    void *norm_base = NULL, *output_base = NULL;
    vx_uint32 y = 0, x = 0;
    vx_int32 lower = 0, upper = 0;
    vx_status status = VX_SUCCESS;
    vxQueryThreshold(threshold, VX_THRESHOLD_ATTRIBUTE_THRESHOLD_LOWER, &lower, sizeof(lower));
    vxQueryThreshold(threshold, VX_THRESHOLD_ATTRIBUTE_THRESHOLD_UPPER, &upper, sizeof(upper));
    vxGetValidRegionImage(norm, &rect);

    status |= vxAccessImagePatch(norm, &rect, 0, &norm_addr, &norm_base, VX_READ_ONLY);
    status |= vxAccessImagePatch(output, &rect, 0, &output_addr, &output_base, VX_WRITE_ONLY);
    if (status == VX_SUCCESS) {
        const vx_uint8 NO = 0, MAYBE = 127, YES = 255;

        /* Initially we add all YES pixels to the stack. Later we only add MAYBE
           pixels to it, and we reset their state to YES afterwards; so we can never
           add the same pixel more than once. That means that the stack size is bounded
           by the image size. */
        vx_uint32 (*tracing_stack)[2] = malloc(output_addr.dim_y * output_addr.dim_x * sizeof *tracing_stack);
        vx_uint32 (*stack_top)[2] = tracing_stack;

        for (y = 0; y < norm_addr.dim_y; y++)
            for (x = 0; x < norm_addr.dim_x; x++)
            {
                vx_uint16 *norm_ptr = vxFormatImagePatchAddress2d(norm_base, x, y, &norm_addr);
                vx_uint8 *output_ptr = vxFormatImagePatchAddress2d(output_base, x, y, &output_addr);

                if (*norm_ptr > upper)
                {
                    *output_ptr = YES;
                    (*stack_top)[0] = x;
                    (*stack_top)[1] = y;
                    ++stack_top;
                }
                else if (*norm_ptr <= lower)
                {
                    *output_ptr = NO;
                }
                else
                {
                    *output_ptr = MAYBE;
                }
            }


        while (stack_top != tracing_stack) {
            int i;
            --stack_top;
            x = (*stack_top)[0];
            y = (*stack_top)[1];

            for (i = 0; i < dimof(dir_offsets); ++i) {
                const struct offset_t offset = dir_offsets[i];
                vx_uint32 new_x, new_y;
                vx_uint8 *output_ptr;

                if (x == 0 && offset.x < 0) continue;
                if (x == output_addr.dim_x - 1 && offset.x > 0) continue;
                if (y == 0 && offset.y < 0) continue;
                if (y == output_addr.dim_y - 1 && offset.y > 0) continue;

                new_x = x + offset.x;
                new_y = y + offset.y;

                output_ptr = vxFormatImagePatchAddress2d(output_base, new_x, new_y, &output_addr);
                if (*output_ptr != MAYBE) continue;

                *output_ptr = YES;

                (*stack_top)[0] = new_x;
                (*stack_top)[1] = new_y;
                ++stack_top;
            }
        }

        free(tracing_stack);

        for (y = 0; y < output_addr.dim_y; y++)
            for (x = 0; x < output_addr.dim_x; x++)
            {
                vx_uint8 *output_ptr = vxFormatImagePatchAddress2d(output_base, x, y, &output_addr);
                if (*output_ptr == MAYBE) *output_ptr = NO;
            }

        status |= vxCommitImagePatch(norm, 0, 0, &norm_addr, norm_base);
        status |= vxCommitImagePatch(output, &rect, 0, &output_addr, output_base);
    }
    return status;
}
namespace nvxio
{

struct NvCameraConfigs
{
    vx_uint32 frameWidth, frameHeight, fps;
};

static const NvCameraConfigs configs[4] =
{
    { vx_uint32(2592), vx_uint32(1944), vx_uint32(30)  }, // 0
    { vx_uint32(2592), vx_uint32(1458), vx_uint32(30)  }, // 1
    { vx_uint32(1280), vx_uint32(720) , vx_uint32(120) }, // 2
    { vx_uint32(2592), vx_uint32(1944), vx_uint32(24)  }  // 3
};

GStreamerNvCameraFrameSourceImpl::GStreamerNvCameraFrameSourceImpl(vx_context vxcontext, uint cameraIdx_) :
    GStreamerEGLStreamSinkFrameSourceImpl(vxcontext, FrameSource::CAMERA_SOURCE, "GStreamerNvCameraFrameSource", false),
    cameraIdx(cameraIdx_)
{
}

bool GStreamerNvCameraFrameSourceImpl::setConfiguration(const FrameSource::Parameters& params)
{
    bool result = true;

    if (end)
    {
        configuration.frameHeight = params.frameHeight;
        configuration.frameWidth = params.frameWidth;
        configuration.fps = params.fps;
    }
    else
    {
        if ((params.frameWidth != (vx_uint32)-1) && (params.frameWidth != configuration.frameWidth))
            result = false;
        if ((params.frameHeight != (vx_uint32)-1) && (params.frameHeight != configuration.frameHeight))
            result = false;
        if ((params.fps != (vx_uint32)-1) && (params.fps != configuration.fps))
            result = false;
    }

    configuration.format = params.format;

    return result;
}

bool GStreamerNvCameraFrameSourceImpl::InitializeGstPipeLine()
{
    // select config with max FPS value to be default
    NvCameraConfigs nvcameraconfig = configs[2];

    // use user specified camera config
    if ( (configuration.frameWidth != (vx_uint32)-1) &&
         (configuration.frameHeight != (vx_uint32)-1) )
    {
        nvcameraconfig.frameWidth = configuration.frameWidth;
        nvcameraconfig.frameHeight = configuration.frameHeight;
        nvcameraconfig.fps = 30;

        // select FPS default for the specified config
        for (vx_size i = 0; i < dimOf(configs); ++i)
        {
            if ((nvcameraconfig.frameWidth == configs[i].frameWidth) &&
                (nvcameraconfig.frameHeight == configs[i].frameHeight))
            {
                nvcameraconfig.fps = configs[i].fps;
                break;
            }
        }
    }

    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = nvcameraconfig.fps;

    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create nvcamerasrc
    GstElement * nvcamerasrc = gst_element_factory_make("nvcamerasrc", NULL);
    if (nvcamerasrc == NULL)
    {
        NVXIO_PRINT("Cannot create nvcamerasrc");
        NVXIO_PRINT("\"nvcamerasrc\" element is not available on this platform");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream stream;
    stream << configuration.fps << " " << configuration.fps;
    std::string fpsRange = stream.str();

    g_object_set(G_OBJECT(nvcamerasrc), "sensor-id", cameraIdx, NULL);
    g_object_set(G_OBJECT(nvcamerasrc), "fpsRange", fpsRange.c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), nvcamerasrc);

    // create nvvideosink element
    GstElement * nvvideosink = gst_element_factory_make("nvvideosink", NULL);
    if (nvvideosink == NULL)
    {
        NVXIO_PRINT("Cannot create nvvideosink element");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(nvvideosink), "display", context.display, NULL);
    g_object_set(G_OBJECT(nvvideosink), "stream", context.stream, NULL);
    g_object_set(G_OBJECT(nvvideosink), "fifo", fifoMode, NULL);
    g_object_set(G_OBJECT(nvvideosink), "max-lateness", -1, NULL);
    g_object_set(G_OBJECT(nvvideosink), "throttle-time", 0, NULL);
    g_object_set(G_OBJECT(nvvideosink), "render-delay", 0, NULL);
    g_object_set(G_OBJECT(nvvideosink), "qos", FALSE, NULL);
    g_object_set(G_OBJECT(nvvideosink), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(nvvideosink), "async", TRUE, NULL);

    gst_bin_add(GST_BIN(pipeline), nvvideosink);

    // link elements
    stream.str(std::string());
    stream << "video/x-raw(memory:NVMM), width=(int)" << nvcameraconfig.frameWidth << ", "
              "height=(int)" << nvcameraconfig.frameHeight << ", format=(string){I420}, "
              "framerate=(fraction)" << nvcameraconfig.fps << "/1;";

    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string(stream.str().c_str()));

    if (!caps_nvvidconv)
    {
        NVXIO_PRINT("Failed to create caps");
        FinalizeGstPipeLine();

        return false;
    }

    if (!gst_element_link_filtered(nvcamerasrc, nvvideosink, caps_nvvidconv.get()))
    {
        NVXIO_PRINT("GStreamer: cannot link nvvidconv -> nvvideosink using caps");
        FinalizeGstPipeLine();

        return false;
    }

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        NVXIO_PRINT("GStreamer: unable to start playback");
        FinalizeGstPipeLine();

        return false;
    }

    vx_uint32 initialFPS = configuration.fps;

    if (!updateConfiguration(nvcamerasrc, configuration))
    {
        FinalizeGstPipeLine();
        return false;
    }

    // if initialFPS is specified, we should use this, because
    // retrieved via the updateConfiguration function FPS corresponds
    // to camera config FPS
    if (initialFPS != (vx_uint32)-1)
        configuration.fps = initialFPS;

    end = false;

    return true;
}

} // namespace nvxio