int main(int argc, char** argv) { if(argc < 2) { print_usage(argv[0]); return 1; } char* calib_filename = argv[1]; Viewer(640, 480, calib_filename).run(&argc, argv); return 0; }
static void ModalViewFileInternal(const std::string &pathname, int DisableHistory, int DisableEdit, bool scroll_to_end, bool autoclose) { FileViewer Viewer(StrMB2Wide(pathname).c_str(), FALSE, DisableHistory, DisableEdit); Viewer.SetDynamicallyBorn(false); if (scroll_to_end) Viewer.ProcessKey(KEY_END); if (autoclose) Viewer.SetAutoClose(true); FrameManager->EnterModalEV(); FrameManager->ExecuteModal(); FrameManager->ExitModalEV(); Viewer.GetExitCode(); }
void UIpNetDriver::ProcessRemoteFunction(class AActor* Actor, UFunction* Function, void* Parameters, FFrame* Stack, class UObject * SubObject ) { bool bIsServer = IsServer(); UNetConnection* Connection = NULL; if (bIsServer) { if ((Function->FunctionFlags & FUNC_NetMulticast)) { // Multicast functions go to every client TArray<UNetConnection*> UniqueRealConnections; for (int32 i=0; i<ClientConnections.Num(); ++i) { Connection = ClientConnections[i]; if (Connection) { // Do relevancy check if unreliable. // Reliables will always go out. This is odd behavior. On one hand we wish to garuntee "reliables always get there". On the other // hand, replicating a reliable to something on the other side of the map that is non relevant seems weird. // // Multicast reliables should probably never be used in gameplay code for actors that have relevancy checks. If they are, the // rpc will go through and the channel will be closed soon after due to relevancy failing. bool IsRelevant = true; if ((Function->FunctionFlags & FUNC_NetReliable) == 0) { if (Connection->Viewer) { FNetViewer Viewer(Connection, 0.f); IsRelevant = Actor->IsNetRelevantFor(Viewer.InViewer, Viewer.Viewer, Viewer.ViewLocation); } else { // No viewer for this connection(?), just let it go through. UE_LOG(LogNet, Log, TEXT("Multicast function called on connection with no Viewer")); } } if (IsRelevant) { if (Connection->GetUChildConnection() != NULL) { Connection = ((UChildConnection*)Connection)->Parent; } InternalProcessRemoteFunction( Actor, SubObject, Connection, Function, Parameters, Stack, bIsServer ); } } } // Return here so we don't call InternalProcessRemoteFunction again at the bottom of this function return; } } // Send function data to remote. Connection = Actor->GetNetConnection(); if (Connection) { InternalProcessRemoteFunction( Actor, SubObject, Connection, Function, Parameters, Stack, bIsServer ); } }
DisplayConfigPtr DisplayConfigFactory::create(OSVR_ClientContext ctx) { DisplayConfigPtr cfg(new DisplayConfig); try { auto const descriptorString = ctx->getStringParameter("/display"); auto desc = display_schema_1::DisplayDescriptor(descriptorString); cfg->m_viewers.container().emplace_back(Viewer(ctx, HEAD_PATH)); auto &viewer = cfg->m_viewers.container().front(); auto eyesDesc = desc.getEyes(); /// Set up stereo vs mono std::vector<uint8_t> eyeIndices; Eigen::Vector3d offset; if (eyesDesc.size() == 2) { // stereo offset = desc.getIPDMeters() / 2. * Eigen::Vector3d::UnitX(); eyeIndices = {0, 1}; } else { // if (eyesDesc.size() == 1) // mono offset = Eigen::Vector3d::Zero(); eyeIndices = {0}; } /// Handle radial distortion parameters boost::optional<OSVR_RadialDistortionParameters> distort; auto k1 = desc.getDistortion(); if (k1.k1_red != 0 || k1.k1_green != 0 || k1.k1_blue != 0) { OSVR_RadialDistortionParameters params; params.k1.data[0] = k1.k1_red; params.k1.data[1] = k1.k1_green; params.k1.data[2] = k1.k1_blue; distort = params; } /// Compute angular offset about Y of the optical (view) axis util::Angle axisOffset = 0. * util::radians; { auto overlapPct = desc.getOverlapPercent(); if (overlapPct < 1.) { const auto hfov = desc.getHorizontalFOV(); const auto angularOverlap = hfov * overlapPct; axisOffset = (hfov - angularOverlap) / 2.; } } /// Infer the number of display inputs and their association with /// eyes (actually surfaces) based on the descriptor. std::vector<OSVR_DisplayInputCount> displayInputIndices; if (eyesDesc.size() == 2 && display_schema_1::DisplayDescriptor::FULL_SCREEN == desc.getDisplayMode()) { // two eyes, full screen - that means two screens. displayInputIndices = {0, 1}; cfg->m_displayInputs.push_back(DisplayInput( desc.getDisplayWidth(), desc.getDisplayHeight())); cfg->m_displayInputs.push_back(DisplayInput( desc.getDisplayWidth(), desc.getDisplayHeight())); } else { // everything else, assume 1 screen. // Note that it's OK that displayInputIndices.size() >= // eyesDesc.size(), we'll just not end up using the second // entry. displayInputIndices = {0, 0}; cfg->m_displayInputs.push_back(DisplayInput( desc.getDisplayWidth(), desc.getDisplayHeight())); } BOOST_ASSERT_MSG(displayInputIndices.size() >= eyesDesc.size(), "Must have at least as many indices as eyes"); /// Create the actual eye (with implied surface) objects for (auto eye : eyeIndices) { // This little computation turns 0 into -1 and 1 into 1, used as // a coefficient to make the two eyes do opposite things. // Doesn't affect mono, which has a zero offset vector. double offsetFactor = (2. * eye) - 1.; // Set up per-eye distortion parameters, if needed boost::optional<OSVR_RadialDistortionParameters> distortEye( distort); if (distortEye) { distortEye->centerOfProjection.data[0] = eyesDesc[eye].m_CenterProjX; distortEye->centerOfProjection.data[1] = eyesDesc[eye].m_CenterProjY; } // precompute translation offset for this eye auto xlateOffset = (offsetFactor * offset).eval(); // precompute the optical axis rotation for this eye // here, the left eye should get a positive offset since it's a // positive rotation about y, hence the -1 factor. auto eyeAxisOffset = axisOffset * -1. * offsetFactor; // Look up the display index for this eye. auto displayInputIdx = displayInputIndices[eye]; /// Create the ViewerEye[Surface] and add it to the container. viewer.container().emplace_back(ViewerEye( ctx, xlateOffset, HEAD_PATH, computeViewport(eye, desc), computeRect(desc), eyesDesc[eye].m_rotate180, desc.getPitchTilt().value(), distortEye, displayInputIdx, eyeAxisOffset)); } OSVR_DEV_VERBOSE("Display: " << desc.getHumanReadableDescription()); return cfg; } catch (std::exception const &e) { OSVR_DEV_VERBOSE( "Couldn't create a display config internally! Exception: " << e.what()); return DisplayConfigPtr{}; } catch (...) { OSVR_DEV_VERBOSE("Couldn't create a display config internally! " "Unknown exception!"); return DisplayConfigPtr{}; } }
int main(int argc, char** argv) { openni::Status rc = openni::STATUS_OK; openni::Device device; openni::VideoStream depth, color; const char* deviceURI = openni::ANY_DEVICE; if (argc > 1) { deviceURI = argv[1]; } rc = openni::OpenNI::initialize(); printf("After initialization:\n%s\n", openni::OpenNI::getExtendedError()); rc = device.open(deviceURI); if (rc != openni::STATUS_OK) { printf("SimpleViewer: Device open failed:\n%s\n", openni::OpenNI::getExtendedError()); openni::OpenNI::shutdown(); return 1; } rc = depth.create(device, openni::SENSOR_DEPTH); if (rc == openni::STATUS_OK) { rc = depth.start(); if (rc != openni::STATUS_OK) { printf("SimpleViewer: Couldn't start depth stream:\n%s\n", openni::OpenNI::getExtendedError()); depth.destroy(); } } else { printf("SimpleViewer: Couldn't find depth stream:\n%s\n", openni::OpenNI::getExtendedError()); } rc = color.create(device, openni::SENSOR_COLOR); if (rc == openni::STATUS_OK) { rc = color.start(); if (rc != openni::STATUS_OK) { printf("SimpleViewer: Couldn't start color stream:\n%s\n", openni::OpenNI::getExtendedError()); color.destroy(); } } else { printf("SimpleViewer: Couldn't find color stream:\n%s\n", openni::OpenNI::getExtendedError()); } if (!depth.isValid() || !color.isValid()) { printf("SimpleViewer: No valid streams. Exiting\n"); openni::OpenNI::shutdown(); return 2; } /*Viewer class initialization. */ Viewer Viewer("Simple Viewer", device, depth, color); rc = Viewer.init(); if (rc != openni::STATUS_OK) { openni::OpenNI::shutdown(); return 3; } rc = Viewer.initOpenCv(); if (rc != openni::STATUS_OK) { openni::OpenNI::shutdown(); return 4; } /*Hand processing initialization. */ init_recording(&cvctx); //init_windows(); init_ctx(&cvctx); /*loop program */ while(1) { Viewer.run(); } }