コード例 #1
0
 void software_sensor::close()
 {
     if (_is_streaming)
         throw wrong_api_call_sequence_exception("close() failed. Software device is streaming!");
     else if (!_is_opened)
         throw wrong_api_call_sequence_exception("close() failed. Software device was not opened!");
     _is_opened = false;
     set_active_streams({});
 }
コード例 #2
0
 void software_sensor::open(const stream_profiles& requests)
 {
     if (_is_streaming)
         throw wrong_api_call_sequence_exception("open(...) failed. Software device is streaming!");
     else if (_is_opened)
         throw wrong_api_call_sequence_exception("open(...) failed. Software device is already opened!");
     _is_opened = true;
     set_active_streams(requests);
 }
コード例 #3
0
 void software_sensor::start(frame_callback_ptr callback)
 {
     if (_is_streaming)
         throw wrong_api_call_sequence_exception("start_streaming(...) failed. Software device is already streaming!");
     else if (!_is_opened)
         throw wrong_api_call_sequence_exception("start_streaming(...) failed. Software device was not opened!");
     _source.get_published_size_option()->set(0);
     _source.init(_metadata_parsers);
     _source.set_sensor(this->shared_from_this());
     _source.set_callback(callback);
     _is_streaming = true;
     raise_on_before_streaming_changes(true);
 }
コード例 #4
0
 void start(device_changed_callback callback) override
 {
     std::lock_guard<std::mutex> lock(_m);
     if (!_data._stopped) throw wrong_api_call_sequence_exception("Cannot start a running device_watcher");
     _data._stopped = false;
     _data._callback = std::move(callback);
     _thread = std::thread([this]() { run(); });
 }
コード例 #5
0
    void software_sensor::stop()
    {
        if (!_is_streaming)
            throw wrong_api_call_sequence_exception("stop_streaming() failed. Software device is not streaming!");

        _is_streaming = false;
        raise_on_before_streaming_changes(false);
        _source.flush();
        _source.reset();
    }
コード例 #6
0
    frame_interface* synthetic_source::allocate_points(std::shared_ptr<stream_profile_interface> stream, frame_interface* original)
    {
        auto vid_stream = dynamic_cast<video_stream_profile_interface*>(stream.get());
        if (vid_stream)
        {
            frame_additional_data data{};
            data.frame_number = original->get_frame_number();
            data.timestamp = original->get_frame_timestamp();
            data.timestamp_domain = original->get_frame_timestamp_domain();
            data.metadata_size = 0;
            data.system_time = _actual_source.get_time();

            auto res = _actual_source.alloc_frame(RS2_EXTENSION_POINTS, vid_stream->get_width() * vid_stream->get_height() * sizeof(float) * 5, data, true);
            if (!res) throw wrong_api_call_sequence_exception("Out of frame resources!");
            res->set_sensor(original->get_sensor());
            res->set_stream(stream);
            return res;
        }
        return nullptr;
    }
コード例 #7
0
    frame_interface* synthetic_source::allocate_video_frame(std::shared_ptr<stream_profile_interface> stream,
                                                            frame_interface* original,
                                                            int new_bpp,
                                                            int new_width,
                                                            int new_height,
                                                            int new_stride,
                                                            rs2_extension frame_type)
    {
        video_frame* vf = nullptr;

        if (new_bpp == 0 || (new_width == 0 && new_stride == 0) || new_height == 0)
        {
            // If the user wants to delegate width, height and etc to original frame, it must be a video frame
            if (!rs2_is_frame_extendable_to((rs2_frame*)original, RS2_EXTENSION_VIDEO_FRAME, nullptr))
            {
                throw std::runtime_error("If original frame is not video frame, you must specify new bpp, width/stide and height!");
            }
            vf = static_cast<video_frame*>(original);
        }

        frame_additional_data data{};
        data.frame_number = original->get_frame_number();
        data.timestamp = original->get_frame_timestamp();
        data.timestamp_domain = original->get_frame_timestamp_domain();
        data.metadata_size = 0;
        data.system_time = _actual_source.get_time();

        auto width = new_width;
        auto height = new_height;
        auto bpp = new_bpp * 8;
        auto stride = new_stride;

        if (bpp == 0)
        {
            bpp = vf->get_bpp();
        }

        if (width == 0 && stride == 0)
        {
            width = vf->get_width();
            stride = width * bpp / 8;
        }
        else if (width == 0)
        {
            width = stride * 8 / bpp;
        }
        else if (stride == 0)
        {
            stride = width * bpp / 8;
        }

        if (height == 0)
        {
            height = vf->get_height();
        }

        auto res = _actual_source.alloc_frame(frame_type, stride * height, data, true);
        if (!res) throw wrong_api_call_sequence_exception("Out of frame resources!");
        vf = static_cast<video_frame*>(res);
        vf->assign(width, height, stride, bpp);
        vf->set_sensor(original->get_sensor());
        res->set_stream(stream);

        if (frame_type == RS2_EXTENSION_DEPTH_FRAME)
        {
            original->acquire();
            (dynamic_cast<depth_frame*>(res))->set_original(original);
        }

        return res;
    }