void RealSenseDevice::setDepthBuffer() {

    rs_error * e = 0;

    int next_buffer = (depth_index + 1) % 2;

    const uint16_t* depthPtr = (const uint16_t*)rs_get_frame_data(dev, depthStream, &e);
    const size_t depthBufferSize = 640*480;
    memcpy(depth_buffer[depth_index],depthPtr,depthBufferSize * sizeof(uint16_t));

    //    uint16_t rescale = 1.f / depthScale;

    for (size_t i = 0; i < depthBufferSize;i++) {
        //        depth_buffer[depth_index][i] = 1 / (depth_buffer[depth_index][i] + 1);
//        depth_buffer[depth_index][i] -= 1;
    }

    const unsigned char* rgbPtr = (const unsigned char*)rs_get_frame_data(dev, colorStream, &e);
    memcpy(rgb_buffer,rgbPtr,640*480*3);

    depth_index = next_buffer;

    gotDepth = true;
}
示例#2
0
char * getRealsenseDepthPixels(int devID)    { return rs_get_frame_data(device[devID].dev, device[devID].depthStreamToUse , &e); }
示例#3
0
char * getRealsenseColorPixels(int devID)    {  return rs_get_frame_data(device[devID].dev, device[devID].colorStreamToUse, &e); }
int main()
{
    /* Turn on logging. We can separately enable logging to console or to file, and use different severity filters for each. */
    rs_log_to_console(RS_LOG_SEVERITY_WARN, &e);
    check_error();
    /*rs_log_to_file(RS_LOG_SEVERITY_DEBUG, "librealsense.log", &e);
    check_error();*/

    /* Create a context object. This object owns the handles to all connected realsense devices. */
    rs_context * ctx = rs_create_context(RS_API_VERSION, &e);
    check_error();
    printf("There are %d connected RealSense devices.\n", rs_get_device_count(ctx, &e));
    check_error();
    if(rs_get_device_count(ctx, &e) == 0) return EXIT_FAILURE;

    /* This tutorial will access only a single device, but it is trivial to extend to multiple devices */
    rs_device * dev = rs_get_device(ctx, 0, &e);
    check_error();
    printf("\nUsing device 0, an %s\n", rs_get_device_name(dev, &e));
    check_error();
    printf("    Serial number: %s\n", rs_get_device_serial(dev, &e));
    check_error();
    printf("    Firmware version: %s\n", rs_get_device_firmware_version(dev, &e));
    check_error();

    /* Configure depth and color to run with the device's preferred settings */
    rs_enable_stream_preset(dev, RS_STREAM_DEPTH, RS_PRESET_BEST_QUALITY, &e);
    check_error();
    rs_enable_stream_preset(dev, RS_STREAM_COLOR, RS_PRESET_BEST_QUALITY, &e);
    check_error();
    rs_start_device(dev, RS_SOURCE_VIDEO, &e);
    check_error();

    /* Open a GLFW window to display our output */
    glfwInit();
    GLFWwindow * win = glfwCreateWindow(1280, 960, "librealsense tutorial #3", NULL, NULL);
    glfwSetCursorPosCallback(win, on_cursor_pos);
    glfwSetMouseButtonCallback(win, on_mouse_button);
    glfwMakeContextCurrent(win);
    while(!glfwWindowShouldClose(win))
    {
        /* Wait for new frame data */
        glfwPollEvents();
        rs_wait_for_frames(dev, &e);
        check_error();

        /* Retrieve our images */
        const uint16_t * depth_image = (const uint16_t *)rs_get_frame_data(dev, RS_STREAM_DEPTH, &e);
        check_error();
        const uint8_t * color_image = (const uint8_t *)rs_get_frame_data(dev, RS_STREAM_COLOR, &e);
        check_error();

        /* Retrieve camera parameters for mapping between depth and color */
        rs_intrinsics depth_intrin, color_intrin;
        rs_extrinsics depth_to_color;
        rs_get_stream_intrinsics(dev, RS_STREAM_DEPTH, &depth_intrin, &e);
        check_error();
        rs_get_device_extrinsics(dev, RS_STREAM_DEPTH, RS_STREAM_COLOR, &depth_to_color, &e);
        check_error();
        rs_get_stream_intrinsics(dev, RS_STREAM_COLOR, &color_intrin, &e);
        check_error();
        float scale = rs_get_device_depth_scale(dev, &e);
        check_error();

        /* Set up a perspective transform in a space that we can rotate by clicking and dragging the mouse */
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glMatrixMode(GL_PROJECTION);
        glLoadIdentity();
        gluPerspective(60, (float)1280/960, 0.01f, 20.0f);
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        gluLookAt(0,0,0, 0,0,1, 0,-1,0);
        glTranslatef(0,0,+0.5f);
        glRotated(pitch, 1, 0, 0);
        glRotated(yaw, 0, 1, 0);
        glTranslatef(0,0,-0.5f);

        /* We will render our depth data as a set of points in 3D space */
        glPointSize(2);
        glEnable(GL_DEPTH_TEST);
        glBegin(GL_POINTS);

        int dx, dy;
        for(dy=0; dy<depth_intrin.height; ++dy)
        {
            for(dx=0; dx<depth_intrin.width; ++dx)
            {
                /* Retrieve the 16-bit depth value and map it into a depth in meters */
                uint16_t depth_value = depth_image[dy * depth_intrin.width + dx];
                float depth_in_meters = depth_value * scale;

                /* Skip over pixels with a depth value of zero, which is used to indicate no data */
                if(depth_value == 0) continue;

                /* Map from pixel coordinates in the depth image to pixel coordinates in the color image */
                float depth_pixel[2] = {(float)dx, (float)dy};
                float depth_point[3], color_point[3], color_pixel[2];
                rs_deproject_pixel_to_point(depth_point, &depth_intrin, depth_pixel, depth_in_meters);
                rs_transform_point_to_point(color_point, &depth_to_color, depth_point);
                rs_project_point_to_pixel(color_pixel, &color_intrin, color_point);

                /* Use the color from the nearest color pixel, or pure white if this point falls outside the color image */
                const int cx = (int)roundf(color_pixel[0]), cy = (int)roundf(color_pixel[1]);
                if(cx < 0 || cy < 0 || cx >= color_intrin.width || cy >= color_intrin.height)
                {
                    glColor3ub(255, 255, 255);
                }
                else
                {
                    glColor3ubv(color_image + (cy * color_intrin.width + cx) * 3);
                }

                /* Emit a vertex at the 3D location of this depth pixel */
                glVertex3f(depth_point[0], depth_point[1], depth_point[2]);
            }
        }
        glEnd();

        glfwSwapBuffers(win);
    }
    
    return EXIT_SUCCESS;
}
示例#5
0
int main()
{
    /* Create a context object. This object owns the handles to all connected realsense devices. */
    rs_context * ctx = rs_create_context(RS_API_VERSION, &e);
    check_error();
    printf("There are %d connected RealSense devices.\n", rs_get_device_count(ctx, &e));
    check_error();
    if(rs_get_device_count(ctx, &e) == 0) return EXIT_FAILURE;

    /* This tutorial will access only a single device, but it is trivial to extend to multiple devices */
    rs_device * dev = rs_get_device(ctx, 0, &e);
    check_error();
    printf("\nUsing device 0, an %s\n", rs_get_device_name(dev, &e));
    check_error();
    printf("    Serial number: %s\n", rs_get_device_serial(dev, &e));
    check_error();
    printf("    Firmware version: %s\n", rs_get_device_firmware_version(dev, &e));
    check_error();

    /* Configure depth to run at VGA resolution at 30 frames per second */
    rs_enable_stream(dev, RS_STREAM_DEPTH, 640, 480, RS_FORMAT_Z16, 30, &e);
    check_error();
    rs_start_device(dev, &e);
    check_error();

    /* Determine depth value corresponding to one meter */
    const uint16_t one_meter = (uint16_t)(1.0f / rs_get_device_depth_scale(dev, &e));
    check_error();

    while(1)
    {
        /* This call waits until a new coherent set of frames is available on a device
           Calls to get_frame_data(...) and get_frame_timestamp(...) on a device will return stable values until wait_for_frames(...) is called */
        rs_wait_for_frames(dev, &e);

        /* Retrieve depth data, which was previously configured as a 640 x 480 image of 16-bit depth values */
        const uint16_t * depth_frame = (const uint16_t *)(rs_get_frame_data(dev, RS_STREAM_DEPTH, &e));

        /* Print a simple text-based representation of the image, by breaking it into 10x20 pixel regions and and approximating the coverage of pixels within one meter */
        char buffer[(640/10+1)*(480/20)+1];
        char * out = buffer;
        int coverage[64] = {0}, x,y,i;
        for(y=0; y<480; ++y)
        {
            for(x=0; x<640; ++x)
            {
                int depth = *depth_frame++;
                if(depth > 0 && depth < one_meter) ++coverage[x/10];
            }

            if(y%20 == 19)
            {
                for(i=0; i<64; ++i)
                {
                    *out++ = " .:nhBXWW"[coverage[i]/25];
                    coverage[i] = 0;
                }
                *out++ = '\n';
            }
        }
        *out++ = 0;
        printf("\n%s", buffer);
    }
    
    return EXIT_SUCCESS;
}
Frame CameraWrapper::record()
{
    // clear buffer
    rs_wait_for_frames(dev, &e);
    check_error();

    std::vector<cv::Mat> depthstack;
    std::vector<cv::Mat> colorstack;
    std::vector<cv::Mat> irstack;

    // record a stack of frames
    for(int frame = 0; frame < framesToRecord; frame++)
    {
        // wait for frames
        rs_wait_for_frames(dev, &e);
        check_error();

        // get data
        const uint16_t * depth_image = (const uint16_t *)rs_get_frame_data(dev, RS_STREAM_DEPTH, &e);
        check_error();
        const uint8_t * color_image = (const uint8_t *)rs_get_frame_data(dev, RS_STREAM_COLOR, &e);
        check_error();
        const uint8_t * ir_image = (const uint8_t *)rs_get_frame_data(dev, RS_STREAM_INFRARED, &e);
        check_error();

        // convert depth to meters and float
        cv::Mat depthframe = cv::Mat(depth_intrin.height, depth_intrin.width, CV_32F);
        int dx, dy;
        for(dy=0; dy<depth_intrin.height; ++dy)
        {
            for(dx=0; dx<depth_intrin.width; ++dx)
            {
                /* Retrieve the 16-bit depth value and map it into a depth in meters */
                uint16_t depth_value = depth_image[dy * depth_intrin.width + dx];
                float depth_in_meters = depth_value * depthScale;
                depthframe.at<float>(dy, dx) = depth_in_meters;
            }
        }
        depthstack.push_back(depthframe);

        // color
        cv::Mat colorframeRGB = cv::Mat(color_intrin.height, color_intrin.width, CV_8UC3, cv::Scalar(0));
        memcpy(colorframeRGB.ptr(),color_image,color_intrin.height*color_intrin.width*3);
        // f200 gives RGB image, openCV uses BGR - so convert
        cv::Mat colorframeBGR;
        cv::cvtColor(colorframeRGB, colorframeBGR, CV_RGB2BGR);
        colorstack.push_back(colorframeBGR);

        // ir
        cv::Mat irframe = cv::Mat(ir_intrin.height, ir_intrin.width, CV_8UC1, cv::Scalar(0));
        memcpy(irframe.ptr(),ir_image,ir_intrin.height*ir_intrin.width);
        irstack.push_back(irframe);
    }


    // create Frame object
    Frame frame1;
    convertIntrinsicToOpenCV(depth_intrin, frame1.cameraMatrix_depth, frame1.coefficients_depth);
    convertIntrinsicToOpenCV(color_intrin, frame1.cameraMatrix_color, frame1.coefficients_color);
    convertIntrinsicToOpenCV(ir_intrin, frame1.cameraMatrix_ir, frame1.coefficients_ir);



    Eigen::Matrix3f R(depth_to_color.rotation);
    Eigen::Vector3f T(depth_to_color.translation);
    Eigen::Matrix4f m1 = Eigen::Affine3f(Eigen::Translation3f(T)).matrix();
    Eigen::Matrix4f m2 = Eigen::Affine3f(R).matrix();
    Eigen::Matrix4f dtc = m1*m2;
    Eigen::Affine3f mpose (dtc);

    frame1.depth_to_color_transform = mpose;

    // call averager
    // DESIGN add interface

    // limit to 4 frames
    // average RGB images
    int framesToRecordRGB;
    if (framesToRecord > 4)
        framesToRecordRGB = 4;
    else
        framesToRecordRGB = framesToRecord;
    frame1.processedImageRGB = cv::Mat::zeros(color_intrin.height, color_intrin.width, CV_8UC3);
    for (int frame = 0; frame < framesToRecordRGB; frame++)
    {
        frame1.processedImageRGB = frame1.processedImageRGB + (1.0/framesToRecordRGB)*colorstack[frame];
    }

    // average IR
    frame1.processedImageIR = cv::Mat::zeros(ir_intrin.height, ir_intrin.width, CV_8UC1);
    for (int frame = 0; frame < framesToRecordRGB; frame++)
    {
        frame1.processedImageIR = frame1.processedImageIR + (1.0/framesToRecordRGB)*irstack[frame];
    }

    // average depth, calculate variance and zeroes

    cv::Mat belief(depth_intrin.height, depth_intrin.width, CV_8U, cv::Scalar::all(0));
    cv::Mat processedImageDepth(depth_intrin.height, depth_intrin.width, CV_32F, cv::Scalar::all(0));


    for (int y = 0; y < depth_intrin.height; y++)
    {
        for (int x = 0; x < depth_intrin.width; x++)
        {
            double tempresult_depth = 0;
            int depth_zeroes = 0;
            std::vector<float> var_values;
            for(int frame = 0; frame < depthstack.size(); frame++)
            {
                if(depthstack[frame].at<float>(y,x) == 0.0)
                {
                    depth_zeroes++;
                }else
                {
                    tempresult_depth += (float)depthstack[frame].at<float>(y,x);
                    // increase values by big factor because of value range
                    var_values.push_back(depthstack[frame].at<float>(y,x)*10000);
                }
            }
            // calculate average

            double var = calculateVariance(var_values);

            // limit variance for display reasons
            if (var > 255.0)
                var = 255.0;

            // reduce belief if pixel has zero value i.e. no data
            int zeroPenaltyFactor = 255/depthstack.size();
            int beliefPx = 255-((int)var + (depth_zeroes * zeroPenaltyFactor));
            if (beliefPx < 0)
                beliefPx = 0;

            processedImageDepth.at<float>(y,x) = tempresult_depth/(depthstack.size()-depth_zeroes);
            // zeroes: (255-((255/stack.size())*depth_zeroes)
            belief.at<uchar>(y,x) = beliefPx;
        }
    }
    frame1.belief = belief;
    frame1.processedImageDepth = processedImageDepth;
    //frame1.processedImageDepth = depthstack[0];

    return frame1;
}
int main()
{
    /* Create a context object. This object owns the handles to all connected realsense devices. */
    rs_context * ctx = rs_create_context(RS_API_VERSION, &e);
    check_error();
    printf("There are %d connected RealSense devices.\n", rs_get_device_count(ctx, &e));
    check_error();
    if(rs_get_device_count(ctx, &e) == 0) return EXIT_FAILURE;

    /* This tutorial will access only a single device, but it is trivial to extend to multiple devices */
    rs_device * dev = rs_get_device(ctx, 0, &e);
    check_error();
    printf("\nUsing device 0, an %s\n", rs_get_device_name(dev, &e));
    check_error();
    printf("    Serial number: %s\n", rs_get_device_serial(dev, &e));
    check_error();
    printf("    Firmware version: %s\n", rs_get_device_firmware_version(dev, &e));
    check_error();

    /* Configure all streams to run at VGA resolution at 60 frames per second */
    rs_enable_stream(dev, RS_STREAM_DEPTH, 640, 480, RS_FORMAT_Z16, 60, &e);
    check_error();
    rs_enable_stream(dev, RS_STREAM_COLOR, 640, 480, RS_FORMAT_RGB8, 60, &e);
    check_error();
    rs_enable_stream(dev, RS_STREAM_INFRARED, 640, 480, RS_FORMAT_Y8, 60, &e);
    check_error();
    rs_enable_stream(dev, RS_STREAM_INFRARED2, 640, 480, RS_FORMAT_Y8, 60, NULL); /* Pass NULL to ignore errors */
    rs_start_device(dev, &e);
    check_error();

    /* Open a GLFW window to display our output */
    glfwInit();
    GLFWwindow * win = glfwCreateWindow(1280, 960, "librealsense tutorial #2", NULL, NULL);
    glfwMakeContextCurrent(win);
    while(!glfwWindowShouldClose(win))
    {
        /* Wait for new frame data */
        glfwPollEvents();
        rs_wait_for_frames(dev, &e);
        check_error();

        glClear(GL_COLOR_BUFFER_BIT);
        glPixelZoom(1, -1);

        /* Display depth data by linearly mapping depth between 0 and 2 meters to the red channel */
        glRasterPos2f(-1, 1);
        glPixelTransferf(GL_RED_SCALE, 0xFFFF * rs_get_device_depth_scale(dev, &e) / 2.0f);
        check_error();
        glDrawPixels(640, 480, GL_RED, GL_UNSIGNED_SHORT, rs_get_frame_data(dev, RS_STREAM_DEPTH, &e));
        check_error();
        glPixelTransferf(GL_RED_SCALE, 1.0f);

        /* Display color image as RGB triples */
        glRasterPos2f(0, 1);
        glDrawPixels(640, 480, GL_RGB, GL_UNSIGNED_BYTE, rs_get_frame_data(dev, RS_STREAM_COLOR, &e));
        check_error();

        /* Display infrared image by mapping IR intensity to visible luminance */
        glRasterPos2f(-1, 0);
        glDrawPixels(640, 480, GL_LUMINANCE, GL_UNSIGNED_BYTE, rs_get_frame_data(dev, RS_STREAM_INFRARED, &e));
        check_error();

        /* Display second infrared image by mapping IR intensity to visible luminance */
        if(rs_is_stream_enabled(dev, RS_STREAM_INFRARED2, NULL))
        {
            glRasterPos2f(0, 0);
            glDrawPixels(640, 480, GL_LUMINANCE, GL_UNSIGNED_BYTE, rs_get_frame_data(dev, RS_STREAM_INFRARED2, &e));
        }

        glfwSwapBuffers(win);
    }
    
    return EXIT_SUCCESS;
}