std::vector<uint8_t> librealsense::command_transfer_over_xu::send_receive(const std::vector<uint8_t>& data, int, bool require_response) { return _uvc.invoke_powered([this, &data, require_response] (platform::uvc_device& dev) { std::vector<uint8_t> result; std::lock_guard<platform::uvc_device> lock(dev); if (data.size() > HW_MONITOR_BUFFER_SIZE) { LOG_ERROR("XU command size is invalid"); throw invalid_value_exception(to_string() << "Requested XU command size " << std::dec << data.size() << " exceeds permitted limit " << HW_MONITOR_BUFFER_SIZE); } std::vector<uint8_t> transmit_buf(HW_MONITOR_BUFFER_SIZE, 0); std::copy(data.begin(), data.end(), transmit_buf.begin()); if (!dev.set_xu(_xu, _ctrl, transmit_buf.data(), static_cast<int>(transmit_buf.size()))) throw invalid_value_exception(to_string() << "set_xu(ctrl=" << unsigned(_ctrl) << ") failed!" << " Last Error: " << strerror(errno)); if (require_response) { result.resize(HW_MONITOR_BUFFER_SIZE); if (!dev.get_xu(_xu, _ctrl, result.data(), static_cast<int>(result.size()))) throw invalid_value_exception(to_string() << "get_xu(ctrl=" << unsigned(_ctrl) << ") failed!" << " Last Error: " << strerror(errno)); // Returned data size located in the last 4 bytes auto data_size = *(reinterpret_cast<uint32_t*>(result.data() + HW_MONITOR_DATA_SIZE_OFFSET)) + SIZE_OF_HW_MONITOR_HEADER; result.resize(data_size); } return result; }); }
disparity_transform::disparity_transform(bool transform_to_disparity): generic_processing_block(transform_to_disparity ? "Depth to Disparity" : "Disparity to Depth"), _transform_to_disparity(transform_to_disparity), _update_target(false), _stereoscopic_depth(false), _focal_lenght_mm(0.f), _stereo_baseline_meter(0.f), _depth_units(0.f), _d2d_convert_factor(0.f), _width(0), _height(0), _bpp(0) { auto transform_opt = std::make_shared<ptr_option<bool>>( false,true,true,true, &_transform_to_disparity, "Stereoscopic Transformation Mode"); transform_opt->set_description(false, "Disparity to Depth"); transform_opt->set_description(true, "Depth to Disparity"); transform_opt->on_set([this, transform_opt](float val) { std::lock_guard<std::mutex> lock(_mutex); if (!transform_opt->is_valid(val)) throw invalid_value_exception(to_string() << "Unsupported transformation mode" << (int)val << " is out of range."); on_set_mode(static_cast<bool>(!!int(val))); }); unregister_option(RS2_OPTION_FRAMES_QUEUE_SIZE); on_set_mode(_transform_to_disparity); }
hole_filling_filter::hole_filling_filter() : _width(0), _height(0), _stride(0), _bpp(0), _extension_type(RS2_EXTENSION_DEPTH_FRAME), _current_frm_size_pixels(0), _hole_filling_mode(hole_fill_def) { _stream_filter.stream = RS2_STREAM_DEPTH; _stream_filter.format = RS2_FORMAT_Z16; auto hole_filling_mode = std::make_shared<ptr_option<uint8_t>>( hole_fill_min, hole_fill_max, hole_fill_step, hole_fill_def, &_hole_filling_mode, "Hole Filling mode"); hole_filling_mode->set_description(hf_fill_from_left, "Fill from Left"); hole_filling_mode->set_description(hf_farest_from_around, "Farest from around"); hole_filling_mode->set_description(hf_nearest_from_around, "Nearest from around"); hole_filling_mode->on_set([this, hole_filling_mode](float val) { std::lock_guard<std::mutex> lock(_mutex); if (!hole_filling_mode->is_valid(val)) throw invalid_value_exception(to_string() << "Unsupported mode for hole filling selected: value " << val << " is out of range."); _hole_filling_mode = static_cast<uint8_t>(val); }); register_option(RS2_OPTION_HOLES_FILL, hole_filling_mode); }
void playback_sensor::handle_frame(frame_holder frame, bool is_real_time) { if(frame == nullptr) { throw invalid_value_exception("null frame passed to handle_frame"); } if(m_is_started) { frame->get_owner()->set_sensor(shared_from_this()); auto type = frame->get_stream()->get_stream_type(); auto index = static_cast<uint32_t>(frame->get_stream()->get_stream_index()); frame->set_stream(m_streams[std::make_pair(type, index)]); frame->set_sensor(shared_from_this()); auto stream_id = frame.frame->get_stream()->get_unique_id(); //TODO: Ziv, remove usage of shared_ptr when frame_holder is cpoyable auto pf = std::make_shared<frame_holder>(std::move(frame)); m_dispatchers.at(stream_id)->invoke([this, pf](dispatcher::cancellable_timer t) { frame_interface* pframe = nullptr; std::swap((*pf).frame, pframe); m_user_callback->on_frame((rs2_frame*)pframe); }); if(is_real_time) { m_dispatchers.at(stream_id)->flush(); } } }
void playback_sensor::register_sensor_options(const device_serializer::sensor_snapshot& sensor_snapshot) { auto options_snapshot = sensor_snapshot.get_sensor_extensions_snapshots().find(RS2_EXTENSION_OPTIONS); if (options_snapshot == nullptr) { LOG_WARNING("Recorded file does not contain sensor options"); return; } auto options_api = As<options_interface>(options_snapshot); if (options_api == nullptr) { throw invalid_value_exception("Failed to get options interface from sensor snapshots"); } for (int i = 0; i < static_cast<int>(RS2_OPTION_COUNT); i++) { auto option_id = static_cast<rs2_option>(i); try { if (options_api->supports_option(option_id)) { auto&& option = options_api->get_option(option_id); float value = option.query(); register_option(option_id, std::make_shared<const_value_option>(option.get_description(), option.query())); LOG_DEBUG("Registered " << rs2_option_to_string(option_id) << " for sensor " << m_sensor_id << " with value: " << option.query()); } } catch (std::exception& e) { LOG_WARNING("Failed to register option " << option_id << ". Exception: " << e.what()); } } }
context::context(backend_type type, const char* filename, const char* section, rs2_recording_mode mode) : _devices_changed_callback(nullptr, [](rs2_devices_changed_callback*){}) { LOG_DEBUG("Librealsense " << std::string(std::begin(rs2_api_version),std::end(rs2_api_version))); switch(type) { case backend_type::standard: _backend = platform::create_backend(); break; case backend_type::record: _backend = std::make_shared<platform::record_backend>(platform::create_backend(), filename, section, mode); break; case backend_type::playback: _backend = std::make_shared<platform::playback_backend>(filename, section); break; default: throw invalid_value_exception(to_string() << "Undefined backend type " << static_cast<int>(type)); } environment::get_instance().set_time_service(_backend->create_time_service()); _device_watcher = _backend->create_device_watcher(); }
void playback_sensor::register_sensor_infos(const device_serializer::sensor_snapshot& sensor_snapshot) { auto info_snapshot = sensor_snapshot.get_sensor_extensions_snapshots().find(RS2_EXTENSION_INFO); if (info_snapshot == nullptr) { LOG_WARNING("Recorded file does not contain sensor information"); return; } auto info_api = As<info_interface>(info_snapshot); if (info_api == nullptr) { throw invalid_value_exception("Failed to get info interface from sensor snapshots"); } for (int i = 0; i < RS2_CAMERA_INFO_COUNT; ++i) { rs2_camera_info info = static_cast<rs2_camera_info>(i); if (info_api->supports_info(info)) { const std::string& str = info_api->get_info(info); register_info(info, str); LOG_DEBUG("Registered " << info << " for sensor " << m_sensor_id << " with value: " << str); } } }
pointcloud::pointcloud() :stream_filter_processing_block("Pointcloud") { _occlusion_filter = std::make_shared<occlusion_filter>(); auto occlusion_invalidation = std::make_shared<ptr_option<uint8_t>>( occlusion_none, occlusion_max - 1, 1, occlusion_none, (uint8_t*)&_occlusion_filter->_occlusion_filter, "Occlusion removal"); occlusion_invalidation->on_set([this, occlusion_invalidation](float val) { if (!occlusion_invalidation->is_valid(val)) throw invalid_value_exception(to_string() << "Unsupported occlusion filtering requiested " << val << " is out of range."); _occlusion_filter->set_mode(static_cast<uint8_t>(val)); }); occlusion_invalidation->set_description(0.f, "Off"); occlusion_invalidation->set_description(1.f, "Heuristic"); occlusion_invalidation->set_description(2.f, "Exhaustive"); register_option(RS2_OPTION_FILTER_MAGNITUDE, occlusion_invalidation); }
platform::uvc_device_info get_mi(const std::vector<platform::uvc_device_info>& devices, uint32_t mi) { for (auto&& info : devices) { if (info.mi == mi) return info; } throw invalid_value_exception("Interface not found!"); }
void check_vector_for_valid_entries(VectorType const & vec, std::string message = "Location not specified") { for (std::size_t i=0; i<vec.size(); ++i) { if ( viennashe::util::is_NaN(vec[i]) ) { std::stringstream ss; ss << message << ": Found NaN in vector at index " << i << std::endl; throw invalid_value_exception(ss.str()); } if ( viennashe::util::is_Inf(vec[i]) ) { std::stringstream ss; ss << message << ": Found infinity in vector at index " << i << std::endl; throw invalid_value_exception(ss.str()); } } }
void librealsense::uvc_pu_option::set(float value) { _ep.invoke_powered( [this, value](platform::uvc_device& dev) { if (!dev.set_pu(_id, static_cast<int32_t>(value))) throw invalid_value_exception(to_string() << "set_pu(id=" << std::to_string(_id) << ") failed!" << " Last Error: " << strerror(errno)); _record(*this); }); }
bool option_base::is_valid(float value) const { if (!std::isnormal(_opt_range.step)) throw invalid_value_exception(to_string() << "is_valid(...) failed! step is not properly defined. (" << _opt_range.step << ")"); if ((value < _opt_range.min) || (value > _opt_range.max)) return false; auto n = (value - _opt_range.min) / _opt_range.step; return (fabs(fmod(n, 1)) < std::numeric_limits<float>::min()); }
float librealsense::uvc_pu_option::query() const { return static_cast<float>(_ep.invoke_powered( [this](platform::uvc_device& dev) { int32_t value = 0; if (!dev.get_pu(_id, value)) throw invalid_value_exception(to_string() << "get_pu(id=" << std::to_string(_id) << ") failed!" << " Last Error: " << strerror(errno)); return static_cast<float>(value); })); }
void hw_monitor::execute_usb_command(uint8_t *out, size_t outSize, uint32_t & op, uint8_t * in, size_t & inSize) const { std::vector<uint8_t> out_vec(out, out + outSize); auto res = _locked_transfer->send_receive(out_vec); // read if (in && inSize) { if (res.size() < static_cast<int>(sizeof(uint32_t))) throw invalid_value_exception("Incomplete bulk usb transfer!"); if (res.size() > IVCAM_MONITOR_MAX_BUFFER_SIZE) throw invalid_value_exception("Out buffer is greater than max buffer size!"); op = *reinterpret_cast<uint32_t *>(res.data()); if (res.size() > static_cast<int>(inSize)) throw invalid_value_exception("bulk transfer failed - user buffer too small"); inSize = res.size(); librealsense::copy(in, res.data(), inSize); } }
void hw_monitor::update_cmd_details(hwmon_cmd_details& details, size_t receivedCmdLen, unsigned char* outputBuffer) { details.receivedCommandDataLength = receivedCmdLen; if (details.oneDirection) return; if (details.receivedCommandDataLength < 4) throw invalid_value_exception("received incomplete response to usb command"); details.receivedCommandDataLength -= 4; librealsense::copy(details.receivedOpcode.data(), outputBuffer, 4); if (details.receivedCommandDataLength > 0) librealsense::copy(details.receivedCommandData.data(), outputBuffer + 4, details.receivedCommandDataLength); }
void librealsense::polling_errors_disable::set(float value) { if (value < 0) throw invalid_value_exception("Invalid polling errors disable request " + std::to_string(value)); if (value == 0) { _polling_error_handler->stop(); _value = 0; } else { _polling_error_handler->start(); _value = 1; } _recording_function(*this); }
std::vector<uint8_t> hw_monitor::send(command cmd) const { hwmon_cmd newCommand(cmd); auto opCodeXmit = static_cast<uint32_t>(newCommand.cmd); hwmon_cmd_details details; details.oneDirection = newCommand.oneDirection; details.timeOut = newCommand.timeOut; fill_usb_buffer(opCodeXmit, newCommand.param1, newCommand.param2, newCommand.param3, newCommand.param4, newCommand.data, newCommand.sizeOfSendCommandData, details.sendCommandData.data(), details.sizeOfSendCommandData); send_hw_monitor_command(details); // Error/exit conditions if (newCommand.oneDirection) return std::vector<uint8_t>(); librealsense::copy(newCommand.receivedOpcode, details.receivedOpcode.data(), 4); librealsense::copy(newCommand.receivedCommandData, details.receivedCommandData.data(), details.receivedCommandDataLength); newCommand.receivedCommandDataLength = details.receivedCommandDataLength; // endian? auto opCodeAsUint32 = pack(details.receivedOpcode[3], details.receivedOpcode[2], details.receivedOpcode[1], details.receivedOpcode[0]); if (opCodeAsUint32 != opCodeXmit) { throw invalid_value_exception(to_string() << "OpCodes do not match! Sent " << opCodeXmit << " but received " << static_cast<int>(opCodeAsUint32) << "!"); } return std::vector<uint8_t>(newCommand.receivedCommandData, newCommand.receivedCommandData + newCommand.receivedCommandDataLength); }
void librealsense::float_option::set(float value) { if (!is_valid(value)) throw invalid_value_exception(to_string() << "set(...) failed! " << value << " is not a valid value"); _value = value; }
disparity_transform::disparity_transform(bool transform_to_disparity): _transform_to_disparity(transform_to_disparity), _update_target(false), _stereoscopic_depth(false), _focal_lenght_mm(0.f), _stereo_baseline(0.f), _depth_units(0.f), _d2d_convert_factor(0.f), _width(0), _height(0), _bpp(0) { auto transform_opt = std::make_shared<ptr_option<bool>>( false,true,true,true, &_transform_to_disparity, "Stereoscopic Transformation Mode"); transform_opt->set_description(false, "Disparity to Depth"); transform_opt->set_description(true, "Depth to Disparity"); transform_opt->on_set([this, transform_opt](float val) { std::lock_guard<std::mutex> lock(_mutex); if (!transform_opt->is_valid(val)) throw invalid_value_exception(to_string() << "Unsupported transformation mode" << (int)val << " is out of range."); on_set_mode(static_cast<bool>(!!int(val))); }); unregister_option(RS2_OPTION_FRAMES_QUEUE_SIZE); auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) { std::lock_guard<std::mutex> lock(_mutex); rs2::frame out = f; rs2::frame tgt, depth_data; bool composite = f.is<rs2::frameset>(); tgt = depth_data = (composite) ? f.as<rs2::frameset>().first_or_default(RS2_STREAM_DEPTH) : f; // Verify that the input depth format is aligned with the block's configuration if (depth_data && (f.is<rs2::disparity_frame>() != _transform_to_disparity)) { update_transformation_profile(depth_data); if (_stereoscopic_depth && (tgt = prepare_target_frame(depth_data, source))) { auto src = depth_data.as<rs2::video_frame>(); if (_transform_to_disparity) convert<uint16_t, float>(src.get_data(), const_cast<void*>(tgt.get_data())); else convert<float, uint16_t>(src.get_data(), const_cast<void*>(tgt.get_data())); } } out = composite ? source.allocate_composite_frame({ tgt }) : tgt; source.frame_ready(out); }; auto callback = new rs2::frame_processor_callback<decltype(on_frame)>(on_frame); processing_block::set_processing_callback(std::shared_ptr<rs2_frame_processor_callback>(callback)); on_set_mode(_transform_to_disparity); }