Example #1
0
SerialCom::SerialCom()
{
  sub_cmd_vel_ = serial_node_handle_.subscribe("cmd_velocity", 1000, &SerialCom::comCallBack,this);
  sub_cmd_twist_ = serial_node_handle_.subscribe("cmd_twist", 1000, &SerialCom::velSend,this);
  vel_pub_ = serial_node_handle_.advertise<dlut_move_base::Twist>("robot_velocity", 1);
  odom_data_.open("odometry.txt");

  serial_node_handle_.param("Port",port_,DEFAULT_PORT);
  serial_node_handle_.param("BaudRate",baudrate_,DEFAULT_BAUDRATE);

  twist_.angular.z=0.0;
  twist_.linear.x=0.0;

  comInit();
 
  x_ = 0.0;
  y_ = 0.0;
  th_ = 0.0;

  vx_ = 0.0;
  vy_ = 0.0;
  vth_ = 0.0;

  current_time_ = ros::Time::now();
  last_time_ = ros::Time::now();
  
  odom_pub_ = serial_node_handle_.advertise<nav_msgs::Odometry>("odom", 50);
}
int videoInputCamera::getDeviceCount() {

	ICreateDevEnum *pDevEnum = NULL;
	IEnumMoniker *pEnum = NULL;
	int deviceCount = 0;

	comInit();

	HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
		CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
		reinterpret_cast<void**>(&pDevEnum));

	if (SUCCEEDED(hr))
	{
		// Create an enumerator for the video capture category.
		hr = pDevEnum->CreateClassEnumerator(
			CLSID_VideoInputDeviceCategory,
			&pEnum, 0);

		if(hr == S_OK) {
			IMoniker *pMoniker = NULL;

			while (pEnum->Next(1, &pMoniker, NULL) == S_OK){

				pMoniker->Release();
				pMoniker = NULL;

				deviceCount++;
			}

			pDevEnum->Release();
			pDevEnum = NULL;

			pEnum->Release();
			pEnum = NULL;
		}
	}

	comUnInit();
	return deviceCount;
}
Example #3
0
int serial_init(int port, int baudrate) {
	comInit();
	return comOpen(port, baudrate, DATABITS_8, STOPBITS_1, PARITY_NONE, FLOWCONTROL_NONE, COM_READWRITE, __SERIAL_BUFFER_SIZE);
}
std::vector<CameraConfig> videoInputCamera::getCameraConfigs(int dev_id) {

	std::vector<CameraConfig> cfg_list;

	int count = getDeviceCount();
	if (count==0) return cfg_list;

	comInit();

	HRESULT hr;
	ICaptureGraphBuilder2 *lpCaptureGraphBuilder;
	IGraphBuilder *lpGraphBuilder;
	IBaseFilter *lpInputFilter;
	IAMStreamConfig *lpStreamConfig;

	char 	nDeviceName[255];
	WCHAR 	wDeviceName[255];

	for (int cam_id=0;cam_id<count;cam_id++) {
		if ((dev_id>=0) && (dev_id!=cam_id)) continue;
		hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&lpCaptureGraphBuilder);
		if (FAILED(hr))	// FAILED is a macro that tests the return value
		{
			printf("ERROR - Could not create the Filter Graph Manager\n");
			comUnInit();
			return cfg_list;
		}

		// Create the Filter Graph Manager.
		hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not add the graph builder!\n");
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->SetFiltergraph(lpGraphBuilder);
		if (FAILED(hr))
		{
			printf("ERROR - Could not set filtergraph\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		memset(wDeviceName, 0, sizeof(WCHAR) * 255);
		memset(nDeviceName, 0, sizeof(char) * 255);
		hr = getDevice(&lpInputFilter, cam_id, wDeviceName, nDeviceName);

		if (SUCCEEDED(hr)){
			hr = lpGraphBuilder->AddFilter(lpInputFilter, wDeviceName);
		}else{
			printf("ERROR - Could not find specified video device\n");
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		hr = lpCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, lpInputFilter, IID_IAMStreamConfig, (void **)&lpStreamConfig);
		if(FAILED(hr)){
			printf("ERROR: Couldn't config the stream!\n");
			lpInputFilter->Release();
			lpGraphBuilder->Release();
			lpCaptureGraphBuilder->Release();
			comUnInit();
			return cfg_list;
		}

		CameraConfig cam_cfg;
		CameraTool::initCameraConfig(&cam_cfg);

		cam_cfg.driver = DRIVER_DEFAULT;
		cam_cfg.device = cam_id;
		sprintf(cam_cfg.name, "%s", nDeviceName);

		int iCount = 0;
		int iSize = 0;
		hr = lpStreamConfig->GetNumberOfCapabilities(&iCount, &iSize);
		std::vector<CameraConfig> fmt_list;

		if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
		{
			GUID lastFormat = MEDIASUBTYPE_None;
			for (int iFormat = 0; iFormat < iCount; iFormat+=2)
			{
				VIDEO_STREAM_CONFIG_CAPS scc;
				AM_MEDIA_TYPE *pmtConfig;
				hr =  lpStreamConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
				if (SUCCEEDED(hr)){

					if ( pmtConfig->subtype != lastFormat) {

						if (fmt_list.size()>0) {
							std::sort(fmt_list.begin(), fmt_list.end());
							cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
							fmt_list.clear();
						}
						cam_cfg.cam_format = getMediaSubtype(pmtConfig->subtype);
						lastFormat = pmtConfig->subtype;
					}

					int stepX = scc.OutputGranularityX;
					int stepY = scc.OutputGranularityY;
					if(stepX < 1 || stepY < 1) continue;

					else if ((stepX==1) && (stepY==1)) {

						cam_cfg.cam_width = scc.InputSize.cx;
						cam_cfg.cam_height = scc.InputSize.cy;

						int maxFrameInterval = scc.MaxFrameInterval;
						if (maxFrameInterval==0) maxFrameInterval = 10000000;
						float last_fps=-1;
						VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
						for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
							pVih->AvgTimePerFrame = iv;
							hr = lpStreamConfig->SetFormat(pmtConfig);
							if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
							float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
							if (fps!=last_fps) {
								cam_cfg.cam_fps = fps;
								fmt_list.push_back(cam_cfg);
								last_fps=fps;
							} }
						}

					} else {
						int x,y;
						for (x=scc.MinOutputSize.cx,y=scc.MinOutputSize.cy;x<=scc.MaxOutputSize.cx,y<=scc.MaxOutputSize.cy;x+=stepX,y+=stepY) {

							cam_cfg.cam_width = x;
							cam_cfg.cam_height = y;

							int maxFrameInterval = scc.MaxFrameInterval;
							if (maxFrameInterval==0) maxFrameInterval = 10000000;
							float last_fps=-1;
							VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
							for (int iv=scc.MinFrameInterval;iv<=maxFrameInterval;iv=iv*2) {
								pVih->AvgTimePerFrame = iv;
								hr = lpStreamConfig->SetFormat(pmtConfig);
								if (hr==S_OK) { hr = lpStreamConfig->GetFormat(&pmtConfig);
								float fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;
								if (fps!=last_fps) {
									cam_cfg.cam_fps = fps;
									fmt_list.push_back(cam_cfg);
									last_fps=fps;
								} }
							}

						}
					}

					deleteMediaType(pmtConfig);
				}
			}
		}

		if (fmt_list.size()>0) {
			std::sort(fmt_list.begin(), fmt_list.end());
			cfg_list.insert( cfg_list.end(), fmt_list.begin(), fmt_list.end() );
			fmt_list.clear();
		}

		lpStreamConfig->Release();
		lpInputFilter->Release();
		lpGraphBuilder->Release();
		lpCaptureGraphBuilder->Release();
	}

	comUnInit();
	return cfg_list;
}
HRESULT videoInputCamera::setupDevice() {

	comInit();
	GUID CAPTURE_MODE   = PIN_CATEGORY_CAPTURE; //Don't worry - it ends up being preview (which is faster)
	//printf("SETUP: Setting up device %i\n",deviceID);

	// CREATE THE GRAPH BUILDER //
	// Create the filter graph manager and query for interfaces.
	HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);
	if (FAILED(hr))	// FAILED is a macro that tests the return value
	{
		printf("ERROR - Could not create the Filter Graph Manager\n");
		return hr;
	}

	//FITLER GRAPH MANAGER//
	// Create the Filter Graph Manager.
	hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&pGraphBuilder);
	if (FAILED(hr))
	{
		printf("ERROR - Could not add the graph builder!\n");
		stopDevice();
		return hr;
	}

	//SET THE FILTERGRAPH//
	hr = pCaptureGraphBuilder->SetFiltergraph(pGraphBuilder);
	if (FAILED(hr))
	{
		printf("ERROR - Could not set filtergraph\n");
		stopDevice();
		return hr;
	}

	//MEDIA CONTROL (START/STOPS STREAM)//
	// Using QueryInterface on the graph builder,
	// Get the Media Control object.
	hr = pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
	if (FAILED(hr))
	{
		printf("ERROR - Could not create the Media Control object\n");
		stopDevice();
		return hr;
	}

	char 	nDeviceName[255];
	WCHAR 	wDeviceName[255];
	memset(wDeviceName, 0, sizeof(WCHAR) * 255);
	memset(nDeviceName, 0, sizeof(char) * 255);

	//FIND VIDEO DEVICE AND ADD TO GRAPH//
	//gets the device specified by the second argument.
	hr = getDevice(&pInputFilter, cfg->device, wDeviceName, nDeviceName);

	if (SUCCEEDED(hr)){
		sprintf(cfg->name,nDeviceName);
		//printf("SETUP: %s\n", nDeviceName);
		hr = pGraphBuilder->AddFilter(pInputFilter, wDeviceName);
	}else{
		printf("ERROR - Could not find specified video device\n");
		stopDevice();
		return hr;
	}

	//LOOK FOR PREVIEW PIN IF THERE IS NONE THEN WE USE CAPTURE PIN AND THEN SMART TEE TO PREVIEW
	IAMStreamConfig *streamConfTest = NULL;
	hr = pCaptureGraphBuilder->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pInputFilter, IID_IAMStreamConfig, (void **)&streamConfTest);
	if(FAILED(hr)){
		//printf("SETUP: Couldn't find preview pin using SmartTee\n");
	}else{
		CAPTURE_MODE = PIN_CATEGORY_PREVIEW;
		streamConfTest->Release();
		streamConfTest = NULL;
	}

	//CROSSBAR (SELECT PHYSICAL INPUT TYPE)//
	//my own function that checks to see if the device can support a crossbar and if so it routes it.
	//webcams tend not to have a crossbar so this function will also detect a webcams and not apply the crossbar
	/*if(useCrossbar)
	{
	//printf("SETUP: Checking crossbar\n");
	routeCrossbar(pCaptureGraphBuilder, pInputFilter, connection, CAPTURE_MODE);
	}*/

	//we do this because webcams don't have a preview mode
	hr = pCaptureGraphBuilder->FindInterface(&CAPTURE_MODE, &MEDIATYPE_Video, pInputFilter, IID_IAMStreamConfig, (void **)&pStreamConfig);
	if(FAILED(hr)){
		printf("ERROR: Couldn't config the stream!\n");
		stopDevice();
		return hr;
	}

	//NOW LETS DEAL WITH GETTING THE RIGHT SIZE
	hr = pStreamConfig->GetFormat(&pAmMediaType);
	if(FAILED(hr)){
		printf("ERROR: Couldn't getFormat for pAmMediaType!\n");
		stopDevice();
		return hr;
	}

	if (!setSizeAndSubtype()) return false;

	VIDEOINFOHEADER *pVih =  reinterpret_cast<VIDEOINFOHEADER*>(pAmMediaType->pbFormat);
	cfg->cam_width	=  HEADER(pVih)->biWidth;
	cfg->cam_height	=  HEADER(pVih)->biHeight;
	cfg->cam_fps = ((int)floor(100000000.0f/(float)pVih->AvgTimePerFrame + 0.5f))/10.0f;

	long bufferSize = cfg->cam_width*cfg->cam_height*3;
	sgCallback->setupBuffer(bufferSize);

	// Create the Sample Grabber.
	hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&pGrabberFilter);
	if (FAILED(hr)){
		printf("Could not Create Sample Grabber - CoCreateInstance()\n");
		stopDevice();
		return hr;
	}

	hr = pGraphBuilder->AddFilter(pGrabberFilter, L"Sample Grabber");
	if (FAILED(hr)){
		printf("Could not add Sample Grabber - AddFilter()\n");
		stopDevice();
		return hr;
	}

	hr = pGrabberFilter->QueryInterface(IID_ISampleGrabber, (void**)&pSampleGrabber);
	if (FAILED(hr)){
		printf("ERROR: Could not query SampleGrabber\n");
		stopDevice();
		return hr;
	}

	//Get video properties from the stream's mediatype and apply to the grabber (otherwise we don't get an RGB image)
	AM_MEDIA_TYPE mt;
	ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
	mt.majortype 	= MEDIATYPE_Video;
	mt.subtype 		= MEDIASUBTYPE_RGB24;
	//mt.subtype 		= MEDIASUBTYPE_YUY2;

	mt.formattype 	= FORMAT_VideoInfo;

	//Set Params - One Shot should be false unless you want to capture just one buffer
	hr = pSampleGrabber->SetMediaType(&mt);
	hr = pSampleGrabber->SetOneShot(FALSE);
	hr = pSampleGrabber->SetBufferSamples(FALSE);

	//Tell the grabber to use our callback function - 0 is for SampleCB and 1 for BufferCB
	//We use SampleCB
	hr = pSampleGrabber->SetCallback(sgCallback, 0);
	if (FAILED(hr)) {
		printf("ERROR: problem setting callback\n");
		stopDevice();
		return hr;
	} /*else {
		printf("SETUP: Capture callback set\n");
	  }*/


	//lets try freeing our stream conf here too
	//this will fail if the device is already running
	/* if(pStreamConfig) {
		pStreamConfig->Release();
		pStreamConfig = NULL;
	} else {
		printf("ERROR: connecting device - prehaps it is already being used?\n");
		stopDevice();
		return S_FALSE;
	}*/

	//used to give the video stream somewhere to go to.
	hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&pDestFilter));
	if (FAILED(hr)){
		printf("ERROR: Could not create filter - NullRenderer\n");
		stopDevice();
		return hr;
	}

	hr = pGraphBuilder->AddFilter(pDestFilter, L"NullRenderer");
	if (FAILED(hr)){
		printf("ERROR: Could not add filter - NullRenderer\n");
		stopDevice();
		return hr;
	}

	//This is where the stream gets put together.
	hr = pCaptureGraphBuilder->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pInputFilter, pGrabberFilter, pDestFilter);

	if (FAILED(hr)){
		printf("ERROR: Could not connect pins - RenderStream()\n");
		stopDevice();
		return hr;
	}


	//EXP - lets try setting the sync source to null - and make it run as fast as possible
	{
		IMediaFilter *pMediaFilter = 0;
		hr = pGraphBuilder->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter);
		if (FAILED(hr)){
			printf("ERROR: Could not get IID_IMediaFilter interface\n");
		}else{
			pMediaFilter->SetSyncSource(NULL);
			pMediaFilter->Release();
		}
	}

	//printf("SETUP: Device is setup and ready to capture.\n\n");

	//if we release this then we don't have access to the settings
	//we release our video input filter but then reconnect with it
	//each time we need to use it
	//pInputFilter->Release();
	//pInputFilter = NULL;

	pGrabberFilter->Release();
	pGrabberFilter = NULL;

	pDestFilter->Release();
	pDestFilter = NULL;

	return S_OK;
}