//! Goto specified cut out position void TTCutOutFrame::onGotoCutOut( int pos, TTMpeg2VideoStream* pMpeg2Stream ) { if ( pMpeg2Stream != 0 ) initVideoStream( pMpeg2Stream ); if ( mpeg2Stream == 0 ) return; currentPosition = mpeg2Stream->moveToIndexPos(pos); mpegWindow->showFrameAt( currentPosition ); updateCurrentPosition(); }
bool Capture::init() { if (mVideoDevice.empty()) { sgct::MessageHandler::instance()->print(sgct::MessageHandler::NOTIFY_ERROR, "No video device specified!\n"); cleanup(); return false; } initFFmpeg(); setupOptions(); // -------------------------------------------------- //check out https://ffmpeg.org/ffmpeg-devices.html // -------------------------------------------------- AVInputFormat * iformat; std::string inputName; #ifdef __WIN32__ iformat = av_find_input_format("dshow"); inputName = "video=" + mVideoDevice; #elif defined __APPLE__ iformat = av_find_input_format("avfoundation"); inputName = mVideoDevice; #else //linux NOT-Tested iformat = av_find_input_format("video4linux2"); inputName = mVideoDevice; #endif if (avformat_open_input(&mFMTContext, inputName.c_str(), iformat, &mOptions) < 0) { sgct::MessageHandler::instance()->print(sgct::MessageHandler::NOTIFY_ERROR, "Could not open capture input!\n"); cleanup(); return false; } /* retrieve stream information */ if (avformat_find_stream_info(mFMTContext, nullptr) < 0) { sgct::MessageHandler::instance()->print(sgct::MessageHandler::NOTIFY_ERROR, "Could not find stream information!\n"); cleanup(); return false; } if (!initVideoStream()) { cleanup(); return false; } //dump format info to console av_dump_format(mFMTContext, 0, inputName.c_str(), 0); if (mVideoCodecContext) { if (!allocateVideoDecoderData(mVideoCodecContext->pix_fmt)) { cleanup(); return false; } } /* initialize packet, set data to nullptr, let the demuxer fill it */ av_init_packet(&mPkt); mPkt.data = nullptr; mPkt.size = 0; //success mInited = true; return true; }
int main(int argc, char* argv[]) { GLFWwindow* window; MarkerTracker markerTracker; /* Initialize the library */ if (!glfwInit()) return -1; // initialize the window system /* Create a windowed mode window and its OpenGL context */ window = glfwCreateWindow(camera_width, camera_height, "Combine", NULL, NULL); if (!window) { glfwTerminate(); return -1; } glfwMakeContextCurrent(window); glfwSwapInterval(1); int window_width, window_height; glfwGetFramebufferSize(window, &window_width, &window_height); glViewport(0, 0, window_width, window_height); // initialize the GL library initGL(argc, argv); // setup OpenCV cv::Mat img_bgr; cv::VideoCapture cap; cap.set(CV_CAP_PROP_FRAME_WIDTH, camera_width); cap.set(CV_CAP_PROP_FRAME_HEIGHT, camera_height); initVideoStream(cap); /* Loop until the user closes the window */ while (!glfwWindowShouldClose(window)) { cap >> img_bgr; if (img_bgr.empty()){ std::cout << "Could not query frame. Trying to reinitialize." << std::endl; initVideoStream(cap); cv::waitKey(1000); // Wait for one sec. continue; } // Track markers markerTracker.find(img_bgr); /* Render here */ display(window, img_bgr); /* Swap front and back buffers */ glfwSwapBuffers(window); /* Poll for and process events */ glfwPollEvents(); } glfwTerminate(); return 0; }
bool UBWindowsMediaFile::init(const QString& videoFileName, const QString& profileData, int pFramesPerSecond , int pixelWidth, int pixelHeight, int bitsPerPixel) { mFramesPerSecond = pFramesPerSecond; mVideoFileName = videoFileName; CoInitialize(0); if (FAILED(WMCreateProfileManager(&mWMProfileManager))) { setLastErrorMessage("Unable to create a WMProfileManager"); close(); return false; } IWMProfileManager2 *wmProfileManager = 0; if (FAILED(mWMProfileManager->QueryInterface(IID_IWMProfileManager2, (void**) &wmProfileManager))) { setLastErrorMessage("Unable to query the WMProfileManager for interface WMProfileManager2"); close(); return false; } HRESULT hr = wmProfileManager->SetSystemProfileVersion(WMT_VER_9_0); wmProfileManager->Release(); if (FAILED(hr)) { setLastErrorMessage("Unable to set WMProfileManager SystemProfileVersion"); close(); return false; } if (FAILED(mWMProfileManager->LoadProfileByData((LPCTSTR) profileData.utf16(), &mWMProfile))) { setLastErrorMessage("Unable to load WMProfileManager custom profile"); close(); return false; } DWORD streamCount = -1; if (FAILED(mWMProfile->GetStreamCount(&streamCount))) { setLastErrorMessage("Unable to read mWMProfile stream count"); close(); return false; } if (FAILED(WMCreateWriter(NULL, &mWMWriter))) { setLastErrorMessage("Unable to create WMMediaWriter Object"); close(); return false; } if (FAILED(mWMWriter->SetProfile(mWMProfile))) { setLastErrorMessage("Unable to set WMWriter system profile"); close(); return false; } DWORD mediaInputCount = 0; if (FAILED(mWMWriter->GetInputCount(&mediaInputCount))) { setLastErrorMessage("Unable to get input count for profile"); close(); return false; } for (DWORD i = 0; i < mediaInputCount; i++) { IWMInputMediaProps* wmInoutMediaProps = 0; if (FAILED(mWMWriter->GetInputProps(i, &wmInoutMediaProps))) { setLastErrorMessage("Unable to get WMWriter input properties"); close(); return false; } GUID guidInputType; if (FAILED(wmInoutMediaProps->GetType(&guidInputType))) { setLastErrorMessage("Unable to get WMWriter input property type"); close(); return false; } if (guidInputType == WMMEDIATYPE_Video) { mWMInputVideoProps = wmInoutMediaProps; mVideoInputIndex = i; } else if (guidInputType == WMMEDIATYPE_Audio) { mWMInputAudioProps = wmInoutMediaProps; mAudioInputIndex = i; } else { wmInoutMediaProps->Release(); wmInoutMediaProps = 0; } } if (mWMInputVideoProps == 0) { setLastErrorMessage("Profile does not accept video input"); close(); return false; } if (mWMInputAudioProps == 0) { setLastErrorMessage("Profile does not accept audio input"); close(); return false; } if (FAILED(mWMWriter->SetOutputFilename((LPCTSTR) videoFileName.utf16()))) { setLastErrorMessage("Unable to set the output filename"); close(); return false; } if(!initVideoStream(pixelWidth, pixelHeight, bitsPerPixel)) { close(); return false; } if (FAILED(mWMWriter->BeginWriting())) { setLastErrorMessage("Unable to initialize video frame writing"); return false; } return true; }
int main(int argc, char* argv[]) { ros::init(argc, argv, "realsense_camera_node"); ros::NodeHandle n; image_transport::ImageTransport image_transport(n); ros::NodeHandle private_node_handle_("~"); private_node_handle_.param("realsense_camera_type", realsense_camera_type, std::string("Intel(R) RealSense(TM) 3D Camer")); private_node_handle_.param("rgb_frame_id", rgb_frame_id, std::string("_rgb_optical_frame")); private_node_handle_.param("depth_frame_id", depth_frame_id, std::string("_depth_optical_frame")); private_node_handle_.param("rgb_frame_w", rgb_frame_w, 1280); private_node_handle_.param("rgb_frame_h", rgb_frame_h, 720); double depth_unit_d, depth_scale_d; private_node_handle_.param("depth_unit", depth_unit_d, 31.25); private_node_handle_.param("depth_scale", depth_scale_d, 0.001); depth_unit = depth_unit_d; depth_scale = depth_scale_d; double depth_fx, depth_fy; private_node_handle_.param("depth_fx", depth_fx, 463.888885); private_node_handle_.param("depth_fy", depth_fy, 463.888885); depth_fxinv = 1.0f / depth_fx; depth_fyinv = 1.0f / depth_fy; double depth_cx_d, depth_cy_d; private_node_handle_.param("depth_cx", depth_cx_d, 320.0); private_node_handle_.param("depth_cy", depth_cy_d, 240.0); depth_cx = depth_cx_d; depth_cy = depth_cy_d; private_node_handle_.param("depth_uv_enable_min", depth_uv_enable_min, 0); private_node_handle_.param("depth_uv_enable_max", depth_uv_enable_max, 2047); private_node_handle_.param("topic_depth_points_id", topic_depth_points_id, std::string("/depth/points")); private_node_handle_.param("topic_depth_registered_points_id", topic_depth_registered_points_id, std::string("/depth_registered/points")); private_node_handle_.param("topic_image_rgb_raw_id", topic_image_rgb_raw_id, std::string("/rgb/image_raw")); private_node_handle_.param("topic_image_depth_raw_id", topic_image_depth_raw_id, std::string("/depth/image_raw")); private_node_handle_.param("topic_image_infrared_raw_id", topic_image_infrared_raw_id, std::string("/ir/image_raw")); private_node_handle_.param("debug_depth_unit", debug_depth_unit, false); std::string rgb_info_url; private_node_handle_.param("rgb_camera_info_url", rgb_info_url, std::string()); std::string ir_camera_info_url; private_node_handle_.param("ir_camera_info_url", ir_camera_info_url, std::string()); printf("\n\n===================\n" "realsense_camera_type = %s\n" "rgb_frame_id = %s\n" "depth_frame_id = %s\n" "depth_unit = %f\n" "depth_scale = %f\n" "depth_fxinv = %f\n" "depth_fyinv = %f\n" "depth_cx = %f\n" "depth_cy = %f\n" "depth_uv_enable_min = %d\n" "depth_uv_enable_max = %d\n" "topic_depth_points_id = %s\n" "topic_depth_registered_points_id = %s\n" "topic_image_rgb_raw_id = %s\n" "topic_image_depth_raw_id = %s\n" "topic_image_infrared_raw_id = %s\n" "debug_depth_unit = %d\n" "rgb_camera_info_url = %s\n" "ir_camera_info_url = %s\n" "=======================\n\n", realsense_camera_type.c_str(), rgb_frame_id.c_str(), depth_frame_id.c_str(), depth_unit, depth_scale, depth_fxinv, depth_fyinv, depth_cx, depth_cy, depth_uv_enable_min, depth_uv_enable_max, topic_depth_points_id.c_str(), topic_depth_registered_points_id.c_str(), topic_image_rgb_raw_id.c_str(), topic_image_depth_raw_id.c_str(), topic_image_infrared_raw_id.c_str(), debug_depth_unit, rgb_info_url.c_str(), ir_camera_info_url.c_str() ); #ifdef V4L2_PIX_FMT_INZI printf("\ndepthWithIRStream - YEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEES\n"); #else printf("\ndepthWithIRStream - NOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO\n"); printf("if you want IR stream, please visit\n" "http://solsticlipse.com/2015/03/31/intel-real-sense-3d-on-linux-macos.html\n" "https://github.com/teknotus/depthview/tree/kernelpatchfmt\n"); #endif //find realsense video device std::vector<VIDEO_DEVICE> video_lists; list_devices(realsense_camera_type, video_lists); if(video_lists.empty()) { printf("\n\n""can not find Intel(R) RealSense(TM) 3D Camera video device!!!!!!!!! - %s\n\n", realsense_camera_type.c_str()); ROS_ERROR("can not find Intel(R) RealSense(TM) 3D Camera video device!!!!!!!!! - %s", realsense_camera_type.c_str()); ros::shutdown(); return 0; } if(1) { printf("\n===========================================\n"); printf("Intel(R) RealSense(TM) 3D Camera lists\n"); for(int i=0; i<video_lists.size(); ++i) { printf("\nPCI: %s\n", video_lists[i].card_name.c_str()); printf("Serial: %s\n", video_lists[i].serial_number.c_str()); for(int j=0; j<video_lists[i].video_names.size(); ++j) { printf("\t%s\n", video_lists[i].video_names[j].c_str()); } } printf("===========================================\n\n"); } //return 0; if(video_lists[0].video_names.size() < 2) { printf("Intel(R) RealSense(TM) 3D Camera video device count error!!!!!!!!!!!\n"); ros::shutdown(); return 0; } else { useDeviceSerialNum = video_lists[0].serial_number; printf("use camera %s\n", useDeviceSerialNum.c_str()); } initDepthToRGBUVMap(); initVideoStream(); strncpy(rgb_stream.videoName, video_lists[0].video_names[0].c_str(), video_lists[0].video_names[0].length()); strncpy(depth_stream.videoName, video_lists[0].video_names[1].c_str(), video_lists[0].video_names[1].length()); printf("video rgb name is %s\n", rgb_stream.videoName); printf("video depth name is %s\n", depth_stream.videoName); if(capturer_mmap_init(&rgb_stream)) { printf("open %s error!!!!!!!!\n", rgb_stream.videoName); ros::shutdown(); return 0; } else { printf("video rgb w,h - %d, %d\n", rgb_stream.width, rgb_stream.height); } if(capturer_mmap_init(&depth_stream)) { printf("open %s error!!!!!!!!\n", depth_stream.videoName); ros::shutdown(); return 0; } else { printf("video depth w,h - %d, %d\n", depth_stream.width, depth_stream.height); } if (!rgb_info_url.empty()) { std::string camera_name_rgb = "realsense_camera_rgb_" + useDeviceSerialNum; camera_info_manager::CameraInfoManager rgb_info_manager(n, camera_name_rgb, rgb_info_url); if (rgb_info_manager.isCalibrated()) { rgb_camera_info = boost::make_shared<sensor_msgs::CameraInfo>(rgb_info_manager.getCameraInfo()); if (rgb_camera_info->width != rgb_frame_w || rgb_camera_info->height != rgb_frame_h) { ROS_WARN("RGB image resolution does not match calibration file"); rgb_camera_info.reset(new sensor_msgs::CameraInfo()); rgb_camera_info->width = rgb_frame_w; rgb_camera_info->height = rgb_frame_h; } } } if (!rgb_camera_info) { rgb_camera_info.reset(new sensor_msgs::CameraInfo()); rgb_camera_info->width = rgb_frame_w; rgb_camera_info->height = rgb_frame_h; } if (!ir_camera_info_url.empty()) { std::string camera_name_ir = "realsense_camera_ir_" + useDeviceSerialNum; camera_info_manager::CameraInfoManager ir_camera_info_manager(n, camera_name_ir, ir_camera_info_url); if (ir_camera_info_manager.isCalibrated()) { ir_camera_info = boost::make_shared<sensor_msgs::CameraInfo>(ir_camera_info_manager.getCameraInfo()); if (ir_camera_info->width != depth_stream.width || ir_camera_info->height != depth_stream.height) { ROS_WARN("IR image resolution does not match calibration file"); ir_camera_info.reset(new sensor_msgs::CameraInfo()); ir_camera_info->width = depth_stream.width; ir_camera_info->height = depth_stream.height; } } } if (!ir_camera_info) { ir_camera_info.reset(new sensor_msgs::CameraInfo()); ir_camera_info->width = depth_stream.width; ir_camera_info->height = depth_stream.height; } if(debug_depth_unit && realsense_camera_type == "Intel(R) RealSense(TM) 3D Camer") { getRealsenseUSBHandle(usbContext, usbHandle, useDeviceSerialNum); if(usbContext && usbHandle) { printf("getRealsenseUSBHandle OK!\n"); } } printf("RealSense Camera is running!\n"); #if USE_BGR24 rgb_frame_buffer = new unsigned char[rgb_stream.width * rgb_stream.height * 3]; #else rgb_frame_buffer = new unsigned char[rgb_stream.width * rgb_stream.height * 2]; #endif depth_frame_buffer = new unsigned char[depth_stream.width * depth_stream.height]; #ifdef V4L2_PIX_FMT_INZI ir_frame_buffer = new unsigned char[depth_stream.width * depth_stream.height]; #endif realsense_points_pub = n.advertise<sensor_msgs::PointCloud2> (topic_depth_points_id, 1); realsense_reg_points_pub = n.advertise<sensor_msgs::PointCloud2>(topic_depth_registered_points_id, 1); realsense_rgb_image_pub = image_transport.advertiseCamera(topic_image_rgb_raw_id, 1); realsense_depth_image_pub = image_transport.advertiseCamera(topic_image_depth_raw_id, 1); //pub_depth_info = n.advertise<sensor_msgs::CameraInfo>("depth/camera_info", 1); //pub_rgb_info = n.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1); #ifdef V4L2_PIX_FMT_INZI realsense_infrared_image_pub = image_transport.advertiseCamera(topic_image_infrared_raw_id, 1); #endif ros::Subscriber config_sub = n.subscribe("/realsense_camera_config", 1, realsenseConfigCallback); getRGBUVService = n.advertiseService("get_rgb_uv", getRGBUV); capturer_mmap_init_v4l2_controls(); dynamic_reconfigure::Server<realsense_camera::RealsenseCameraConfig> dynamic_reconfigure_server; dynamic_reconfigure_server.setCallback(boost::bind(&dynamicReconfigCallback, _1, _2)); ros::Rate loop_rate(60); ros::Rate idle_rate(1); while(ros::ok()) { while ((getNumRGBSubscribers() + getNumDepthSubscribers()) == 0 && ros::ok()) { ros::spinOnce(); idle_rate.sleep(); } processRGBD(); #if SHOW_RGBD_FRAME cv::waitKey(10); #endif ros::spinOnce(); loop_rate.sleep(); } capturer_mmap_exit(&rgb_stream); capturer_mmap_exit(&depth_stream); delete[] rgb_frame_buffer; delete[] depth_frame_buffer; #ifdef V4L2_PIX_FMT_INZI delete[] ir_frame_buffer; #endif if(debug_depth_unit) { libusb_close(usbHandle); libusb_exit(usbContext); } printf("RealSense Camera is shutdown!\n"); return 0; }
tResult Test::OnPinEvent(IPin *source, tInt eventCore, tInt param1, tInt param2, IMediaSample *mediaSample) { RETURN_IF_POINTER_NULL(source); RETURN_IF_POINTER_NULL(mediaSample); if (eventCore == IPinEventSink::PE_MediaSampleReceived) { if (source == &this->maneuverPin) { static tUInt16 tmpManeuver; getManeuver(mediaSample, maneuverPin, this->coderDescriptionManeuver, tmpManeuver); if (tmpManeuver == MANEUVER_STRAIGHT) { this->isDriveActive = true; RETURN_IF_FAILED_AND_LOG_ERROR_STR(transmitAcceleration(this->driveSpeed), "Cant transmit drive"); } } else if (source == &this->wheelTicksPin) { if (!this->isConnectedToServer) { if (!tcpClient->connectToServer(this->ip, 5555)) { LOG_INFO(cString::Format("Cant connect to server with ip: %s:5555", this->ip.c_str())); RETURN_NOERROR; } else { this->isConnectedToServer = true; LOG_INFO("Could connect to server :)"); } } static tFloat32 tmpWheelTicks; RETURN_IF_FAILED_AND_LOG_ERROR_STR(getWheelTicks(mediaSample, tmpWheelTicks), "cant get wheel ticks"); this->currentWheelTicks = static_cast<int>(tmpWheelTicks); if (this->isStopLineFound) { driveToStopLine(); } } else if(source == &this->xtionPin) { if (this->isFirstFrame) { RETURN_IF_FAILED_AND_LOG_ERROR_STR(initVideoStream(), "Cant init video stream"); this->isFirstFrame = false; } else { const tVoid *buffer; if (this->isDriveActive && IS_OK(mediaSample->Lock(&buffer))) { //Receive the image Mat image(Size(this->videoInputInfo.nWidth, this->videoInputInfo.nHeight), CV_8UC3, (char*) buffer); Mat result = image.clone(); RETURN_IF_FAILED_AND_LOG_ERROR_STR(mediaSample->Unlock(buffer), "Cant unlock image"); this->driveAlgorithm->prepareImage(result); if (!this->isStopLineFound) { this->isStopLineFound = this->crossroadDetector-> searchStopLine(result); this->ticksToDrive = this->ticksToStopLine + this->currentWheelTicks; } } } } } RETURN_NOERROR; }