Esempio n. 1
0
static void changeMode(unsigned int newMode) {
    printf("Previous device mode: ");
    printMode();

    cfgWriteU8(cfgMode, newMode);

    printf("New device mode: ");
    printMode();
}
Esempio n. 2
0
/***********************************************************
* 
* updateLCD
* 
***********************************************************/
void TTUI::updateLCD()
{
	
	//only update LCD every 300ms
	int now = millis()/100; //100 ms 
	int elapsed = now - activeRefreshTime;

	if(elapsed > 3)  //300ms
	{
		activeRefreshTime = now;
	
	
		if(trapActive_ == true)
		{
			if(batteryPower() == false) //USB connected
			{
				clear();
				printMode(0);

				char printBuffer[9];
				triggers[currentTrigger]->getActiveMessage(printBuffer);
				setCursor(0,1);
				print(printBuffer);
					
			}
		}
		else if(trapActive_ == false) //waiting for UI input
		{
			if(activeMenu == UP_MENU || activeMenu == DOWN_MENU)
			{
				clear();
				printSelect(0);
				printInc(1,0);
			}
			else if(activeMenu == MODE_MENU || activeMenu == OPTION_MENU)
			{
				clear();
			 	printMode(0);
			 	printSelect(1);
			}
			else if(activeMenu == START_MESSAGE)
			{
				clear();
				print("TrigTrap");
				setCursor(0,1);
				print("v");
				print(FIRMWARE_VERSION);
			}
		}
	}
	
}
  openni::VideoMode KinectInterfacePrimesense::findMaxResYFPSMode(
    const openni::SensorInfo& sensor, const int required_format) const {
      const openni::Array<openni::VideoMode>& modes = 
        sensor.getSupportedVideoModes();
      int ibest = -1;
#if defined(DEBUG) || defined(_DEBUG)
      std::cout << "Supported Modes:" << std::endl;
#endif
      for (int i = 0; i < modes.getSize(); i++) {
        const openni::VideoMode& mode = modes[i];
#if defined(DEBUG) || defined(_DEBUG)
        printMode(mode);
#endif
        PixelFormat format = mode.getPixelFormat();
        int res_x = mode.getResolutionX();
        int res_y = mode.getResolutionY();
        int fps = mode.getFps();
        if (format == required_format) {
          if (ibest == -1 || res_y > modes[ibest].getResolutionY()) {
            ibest = i;
          } else if (res_y == modes[ibest].getResolutionY() && 
            fps > modes[ibest].getFps()){
              ibest = i;
          }
        }
      }
      if (ibest == -1) {
        shutdownOpenNIStatic();
        throw std::runtime_error("KinectInterfacePrimesense::findMaxResYFPSMode() - "
          "ERROR: Couldn't find a good format!");
      }
      return modes[ibest];
  }
  openni::VideoMode KinectInterfacePrimesense::findMatchingMode(
    const openni::SensorInfo& sensor, const jtil::math::Int2& dim, 
    const int fps, const int format) const {
      const openni::Array<openni::VideoMode>& modes = 
        sensor.getSupportedVideoModes();
      int ibest = -1;
#if defined(DEBUG) || defined(_DEBUG)
      std::cout << "Supported Modes:" << std::endl;
#endif
      for (int i = 0; i < modes.getSize(); i++) {
        const openni::VideoMode& mode = modes[i];
#if defined(DEBUG) || defined(_DEBUG)
        printMode(mode);
#endif
        PixelFormat cur_format = mode.getPixelFormat();
        int res_x = mode.getResolutionX();
        int res_y = mode.getResolutionY();
        int cur_fps = mode.getFps();
        if (cur_format == format && res_x == dim[0] && res_y == dim[1] &&
          fps == fps) {
            ibest = i;
        }
      }
      if (ibest == -1) {
        shutdownOpenNIStatic();
        throw std::runtime_error("KinectInterfacePrimesense::findMatchingMode() - "
          "ERROR: Couldn't find a matching format!");
      }
      return modes[ibest];
  }
Esempio n. 5
0
void displayBuffer::setMode(displayMode_t m)
{
  bool print = (m != mode);
  mode = m;
  if(print)
  {
    printMode();
  }
}
Esempio n. 6
0
uint8_t SetCoProcMode(uint8_t pin, uint8_t mode) {
	if (GetChannelMode(pin) == mode)
		return true;
	println_E("Setting Mode: ");print_E(" on: ");p_int_E(pin);printMode(mode,ERROR_PRINT);
	//getBcsIoDataTable(pin)->PIN.currentChannelMode = mode;
	SetChannelModeDataTable(pin,mode);
	down[pin].changeMode = true;
	return false;
}
void printModes(){
	int i;
	println_I("Modes");
	for(i=0;i<GetNumberOfIOChannels();i++){
		println_I("\t# ");p_int_I(i);
		print_I("\tCurrent ");printMode(getBcsIoDataTable()[i].PIN.currentChannelMode,INFO_PRINT);
		//print_I("\tPrevious ");printMode(getBcsIoDataTable()[i].PIN.previousChannelMode,INFO_PRINT);
	}
}
Esempio n. 8
0
/***********************************************************
* 
* bttnMode
* 
***********************************************************/
  void TTUI::bttnMode()  
  {

	if(activeMenu == MODE_MENU || activeMenu == OPTION_MENU) //only increment when its the second+ time pressed for this bttn
	{
    	currentTrigger+=1; //mode button has been pressed, advance the mode option to next
		currentTrigger = currentTrigger % (NUM_OF_SENSORS+1); //plus 1 for system menu
	}
	
	activeMenu = MODE_MENU;
	
	clear();
	printMode(0);
	printSelect(1);

  }
  void KinectInterfacePrimesense::initIR(const bool start) {
    bool has_ir = start && device_->hasSensor(openni::SENSOR_IR);
    if (!has_ir) {
      streams_[IR_STREAM] = NULL;
      frames_[IR_STREAM] = NULL;
      return;
    }
    streams_[IR_STREAM] = new openni::VideoStream();
    checkOpenNIRC(streams_[IR_STREAM]->create(*device_, openni::SENSOR_IR),
      "Failed to connect to device IR channel");

    // Find a resolution mode to match the depth mode
    const openni::SensorInfo& ir_sensor_info = 
      streams_[IR_STREAM]->getSensorInfo(); 
#if defined(DEBUG) || defined(_DEBUG)
    std::cout << "IR_STREAM ";
#endif
    openni::VideoMode mode = findMatchingMode(ir_sensor_info, depth_dim_,
      depth_fps_setting_, PixelFormat::PIXEL_FORMAT_GRAY16);
    std::cout << "Setting IR_STREAM mode: ";
    printMode(mode);
    checkOpenNIRC(streams_[IR_STREAM]->setVideoMode(mode), 
      "Failed to set ir video mode");
    ir_fps_setting_ = mode.getFps();

    // Now retrieve the current mode to make sure everything went OK.
    openni::VideoMode ir_mode = streams_[IR_STREAM]->getVideoMode();
    if (ir_mode.getPixelFormat() != PixelFormat::PIXEL_FORMAT_GRAY16) {
      throw std::runtime_error("KinectInterfacePrimesense::init() - ERROR: "
        "IR_STREAM stream is not a 24 bit rgb format!");
    }
    ir_dim_.set(ir_mode.getResolutionX(), ir_mode.getResolutionY());

    if (start) {
      checkOpenNIRC(streams_[IR_STREAM]->start(), "Failed to start IR stream");
      if (!streams_[IR_STREAM]->isValid()) {
        shutdownOpenNIStatic();
        throw std::runtime_error("KinectInterfacePrimesense::init() - ERROR: "
          "RGB_IR_STREAM stream is not valid");
      }
    }

    frames_[IR_STREAM] = new openni::VideoFrameRef(); 
  }
  void KinectInterfacePrimesense::initDepth() {
    streams_[DEPTH_STREAM] = new openni::VideoStream();
    checkOpenNIRC(streams_[DEPTH_STREAM]->create(*device_, openni::SENSOR_DEPTH),
      "Failed to connect to device depth channel");

    // Find the supported mode with the highest depth resolution.  If more
    // than one format has the max resolution, choose the highest fps mode
    const openni::SensorInfo& depth_sensor_info = 
      streams_[DEPTH_STREAM]->getSensorInfo();
#if defined(DEBUG) || defined(_DEBUG)
    std::cout << "Depth ";
#endif
    openni::VideoMode mode = findMaxResYFPSMode(depth_sensor_info,
      PixelFormat::PIXEL_FORMAT_DEPTH_1_MM);
    std::cout << "Setting Depth mode: ";
    printMode(mode);
    checkOpenNIRC(streams_[DEPTH_STREAM]->setVideoMode(mode), 
      "Failed to set depth video mode");
    depth_fps_setting_ = mode.getFps();

    streams_[DEPTH_STREAM]->setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, true);

    // Now retrieve the current mode to make sure everything went OK.
    openni::VideoMode depth_mode = streams_[DEPTH_STREAM]->getVideoMode();
    if (depth_mode.getPixelFormat() != PixelFormat::PIXEL_FORMAT_DEPTH_1_MM) {
      throw std::runtime_error("KinectInterfacePrimesense::init() - ERROR: "
        "Depth stream is not a 16 bit grayscale format!");
    }
    depth_format_100um_ = false;
    depth_dim_.set(depth_mode.getResolutionX(), depth_mode.getResolutionY());

    checkOpenNIRC(streams_[DEPTH_STREAM]->start(), "Failed to start depth stream");
    if (!streams_[DEPTH_STREAM]->isValid()) {
      shutdownOpenNIStatic();
      throw std::runtime_error("KinectInterfacePrimesense::init() - ERROR: "
        "Depth stream is not valid");
    }

    frames_[DEPTH_STREAM] = new openni::VideoFrameRef();

    if (depth_format_100um_) {
      depth_1mm_ = new uint16_t[depth_dim_[0] * depth_dim_[1]];
    }
  }
  void KinectInterfacePrimesense::initRGB(const bool start) {
    streams_[RGB_STREAM] = new openni::VideoStream();
    checkOpenNIRC(streams_[RGB_STREAM]->create(*device_, openni::SENSOR_COLOR),
      "Failed to connect to device rgb channel");

    // Find a resolution mode to match the depth mode
    const openni::SensorInfo& rgb_sensor_info = 
      streams_[RGB_STREAM]->getSensorInfo();
#if defined(DEBUG) || defined(_DEBUG)
    std::cout << "RGB_STREAM ";
#endif
    openni::VideoMode mode = findMatchingMode(rgb_sensor_info, depth_dim_,
      depth_fps_setting_, PixelFormat::PIXEL_FORMAT_RGB888);
    std::cout << "Setting RGB_STREAM mode: ";
    printMode(mode);
    checkOpenNIRC(streams_[RGB_STREAM]->setVideoMode(mode), 
      "Failed to set rgb video mode");
    rgb_fps_setting_ = mode.getFps();

    // Now retrieve the current mode to make sure everything went OK.
    openni::VideoMode rgb_mode = streams_[RGB_STREAM]->getVideoMode();
    if (rgb_mode.getPixelFormat() != PixelFormat::PIXEL_FORMAT_RGB888) {
      throw std::runtime_error("KinectInterfacePrimesense::init() - ERROR: "
        "RGB_STREAM stream is not a 24 bit rgb format!");
    }
    rgb_dim_.set(rgb_mode.getResolutionX(), rgb_mode.getResolutionY());

    if (start) {
      checkOpenNIRC(streams_[RGB_STREAM]->start(), "Failed to start rgb stream");
      if (!streams_[RGB_STREAM]->isValid()) {
        shutdownOpenNIStatic();
        throw std::runtime_error("KinectInterfacePrimesense::init() - ERROR: "
          "RGB_IR_STREAM stream is not valid");
      }
    }

    frames_[RGB_STREAM] = new openni::VideoFrameRef(); 
  }
Esempio n. 12
0
/***********************************************************
* 
* bttnOption
* 
***********************************************************/
  void TTUI::bttnOption()
  {
	char printBuffer[9];
	
	if(activeMenu == MODE_MENU || activeMenu == OPTION_MENU) //only increment when its the second+ time pressed for this bttn
	{	
		if(currentTrigger < NUM_OF_SENSORS)
		{
			triggers[currentTrigger]->incSelect(); //set sensor to next select mode
		}
		else //system Menu
		{
			incSystemOption++;
			incSystemOption = incSystemOption % 3;
		}
	}
	
	activeMenu = OPTION_MENU;
	
	clear();
	printMode(0);
	printSelect(1);
	
  }
Esempio n. 13
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
    ARdouble m[16];

#ifdef ARDOUBLE_IS_FLOAT
    GLdouble p0[16];
    GLdouble m0[16];
#endif
    int                 i, j, k;
    GLfloat             w, bw, bh, vertices[6][2];
    GLubyte             pixels[300];
    char                text[256];
    GLdouble            winX, winY, winZ;
    int                 showMErr[CHECK_ID_MULTIMARKERS_MAX];
    GLdouble            MX[CHECK_ID_MULTIMARKERS_MAX];
    GLdouble            MY[CHECK_ID_MULTIMARKERS_MAX];
    int                 pattDetectMode;
    AR_MATRIX_CODE_TYPE matrixCodeType;


    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);     // Clear the buffers for new frame.

    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);

    if (gMultiConfigCount)
    {
        arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
        glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(p);
#else
        glLoadMatrixd(p);
#endif
        glMatrixMode(GL_MODELVIEW);
        glEnable(GL_DEPTH_TEST);

        // If we have multi-configs, show their origin onscreen.
        for (k = 0; k < gMultiConfigCount; k++)
        {
            showMErr[k] = FALSE;
            if (gMultiConfigs[k]->prevF != 0)
            {
                arglCameraViewRH((const ARdouble (*)[4])gMultiConfigs[k]->trans, m, 1.0);
#ifdef ARDOUBLE_IS_FLOAT
                glLoadMatrixf(m);
#else
                glLoadMatrixd(m);
#endif
                drawAxes();
#ifdef ARDOUBLE_IS_FLOAT
                for (i = 0; i < 16; i++)
                    m0[i] = (GLdouble)m[i];

                for (i = 0; i < 16; i++)
                    p0[i] = (GLdouble)p[i];

                if (gluProject(0, 0, 0, m0, p0, gViewport, &winX, &winY, &winZ) == GL_TRUE)
#else
                if (gluProject(0, 0, 0, m, p, gViewport, &winX, &winY, &winZ) == GL_TRUE)
#endif
                {
                    showMErr[k] = TRUE;
                    MX[k]       = winX; MY[k] = winY;
                }
            }
        } // for k
    }

    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    arGetPatternDetectionMode(gARHandle, &pattDetectMode);
    arGetMatrixCodeType(gARHandle, &matrixCodeType);

    // For all markers, draw onscreen position.
    // Colour based on cutoffPhase.
    glLoadIdentity();
    glVertexPointer(2, GL_FLOAT, 0, vertices);
    glEnableClientState(GL_VERTEX_ARRAY);
    glLineWidth(2.0f);

    for (j = 0; j < gARHandle->marker_num; j++)
    {
        glColor3ubv(cutoffPhaseColours[gARHandle->markerInfo[j].cutoffPhase].colour);

        for (i = 0; i < 5; i++)
        {
            int dir = gARHandle->markerInfo[j].dir;
            vertices[i][0] = (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][0] * (float)windowWidth / (float)gARHandle->xsize;
            vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][1]) * (float)windowHeight / (float)gARHandle->ysize;
        }

        vertices[i][0] = (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize;
        vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize;
        glDrawArrays(GL_LINE_STRIP, 0, 6);
        // For markers that have been identified, draw the ID number.
        if (gARHandle->markerInfo[j].id >= 0)
        {
            glColor3ub(255, 0, 0);
            if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID && (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX))
                snprintf(text, sizeof(text), "%llu (err=%d)", gARHandle->markerInfo[j].globalID, gARHandle->markerInfo[j].errorCorrected);
            else
                snprintf(text, sizeof(text), "%d", gARHandle->markerInfo[j].id);

            print(text, (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize, ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize, 0, 0);
        }
    }

    glDisableClientState(GL_VERTEX_ARRAY);

    // For matrix mode, draw the pattern image of the largest marker.
    if (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX)
    {
        int area = 0, biggestMarker = -1;

        for (j = 0; j < gARHandle->marker_num; j++)
            if (gARHandle->markerInfo[j].area > area)
            {
                area          = gARHandle->markerInfo[j].area;
                biggestMarker = j;
            }

        if (area >= AR_AREA_MIN)
        {
            int      imageProcMode;
            ARdouble pattRatio;
            ARUint8  ext_patt[AR_PATT_SIZE2_MAX * AR_PATT_SIZE2_MAX * 3]; // Holds unwarped pattern extracted from image.
            int      size;
            int      zoom = 4;
            ARdouble vertexUpright[4][2];

            // Reorder vertices based on dir.
            for (i = 0; i < 4; i++)
            {
                int dir = gARHandle->markerInfo[biggestMarker].dir;
                vertexUpright[i][0] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][0];
                vertexUpright[i][1] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][1];
            }

            arGetImageProcMode(gARHandle, &imageProcMode);
            arGetPattRatio(gARHandle, &pattRatio);
            if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID)
            {
                size = 14;
                arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2,
                                gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, (ARdouble)14 / (ARdouble)(14 + 2), ext_patt);
            }
            else
            {
                size = matrixCodeType & AR_MATRIX_CODE_TYPE_SIZE_MASK;
                arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2,
                                gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, pattRatio, ext_patt);
            }

            glRasterPos2f((float)(windowWidth - size * zoom) - 4.0f, (float)(size * zoom) + 4.0f);
            glPixelZoom((float)zoom, (float)-zoom);
            glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
            glDrawPixels(size, size, GL_LUMINANCE, GL_UNSIGNED_BYTE, ext_patt);
            glPixelZoom(1.0f, 1.0f);
        }
    }


    // Draw error value for multimarker pose.
    for (k = 0; k < gMultiConfigCount; k++)
    {
        if (showMErr[k])
        {
            snprintf(text, sizeof(text), "err=%0.3f", gMultiErrs[k]);
            print(text, MX[k], MY[k], 0, 0);
        }
    }

    //
    // Draw help text and mode.
    //
    glLoadIdentity();
    if (gShowMode)
    {
        printMode();
    }

    if (gShowHelp)
    {
        if (gShowHelp == 1)
        {
            printHelpKeys();
        }
        else if (gShowHelp == 2)
        {
            bw = 0.0f;

            for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++)
            {
                w = (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (unsigned char*)arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase]);
                if (w > bw)
                    bw = w;
            }

            bw += 12.0f; // Space for color block.
            bh  = AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT * 10.0f /* character height */ + (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1) * 2.0f /* line spacing */;
            drawBackground(bw, bh, 2.0f, 2.0f);

            // Draw the colour block and text, line by line.
            for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++)
            {
                for (j = 0; j < 300; j += 3)
                {
                    pixels[j]     = cutoffPhaseColours[i].colour[0];
                    pixels[j + 1] = cutoffPhaseColours[i].colour[1];
                    pixels[j + 2] = cutoffPhaseColours[i].colour[2];
                }

                glRasterPos2f(2.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f);
                glPixelZoom(1.0f, 1.0f);
                glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
                glDrawPixels(10, 10, GL_RGB, GL_UNSIGNED_BYTE, pixels);
                print(arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase], 14.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f, 0, 0);
            }
        }
    }

    glutSwapBuffers();
}
Esempio n. 14
0
int main(int argc, char* argv[])
{
	std::string url = "http://127.0.0.1:8080";
	std::string username;
	std::string password;
	int c=0;
	while ((c = getopt (argc, argv, "hu:p:")) != -1)
	{
		switch (c)
		{
			case 'u':	username = optarg; break;
			case 'p':	password = optarg; break;
			case 'h':
				std::cout << argv[0] << " [-u username] [-p password] url" << std::endl;
				exit(0);
			break;
		}
	}
	if (optind<argc)
	{
		url = argv[optind];
	}		
	
	std::cout << "Connecting to " << url << std::endl;

	
	// create connection to devicemgmt.wsdl server
        DeviceBindingProxy deviceProxy(url.c_str());
	
	// call Device::GetDeviceInformation
	std::cout << "=>Device::GetDeviceInformation" << std::endl;	
	_tds__GetDeviceInformation         tds__GetDeviceInformation;
	_tds__GetDeviceInformationResponse tds__GetDeviceInformationResponse;
	addSecurity(deviceProxy.soap, username, password);	
	if (deviceProxy.GetDeviceInformation(&tds__GetDeviceInformation, &tds__GetDeviceInformationResponse) == SOAP_OK)
	{
		std::cout << "\tManufacturer:" << tds__GetDeviceInformationResponse.Manufacturer << std::endl;
	}
	else
	{
		deviceProxy.soap_stream_fault(std::cerr);
	}

	// call Device::GetHostname
	std::cout << "=>Device::GetHostname" << std::endl;	
	_tds__GetHostname         tds__GetHostname;
	_tds__GetHostnameResponse tds__GetHostnameResponse;
	addSecurity(deviceProxy.soap, username, password);	
	if (deviceProxy.GetHostname(&tds__GetHostname, &tds__GetHostnameResponse) == SOAP_OK)
	{
		std::cout << "\tHostname:" << tds__GetHostnameResponse.HostnameInformation->Name->c_str() << std::endl;
	}
	else
	{
		deviceProxy.soap_stream_fault(std::cerr);
	}

	// call Device::GetNetworkInterfaces
	std::cout << "=>Device::GetNetworkInterfaces" << std::endl;	
	_tds__GetNetworkInterfaces         tds__GetNetworkInterfaces;
	_tds__GetNetworkInterfacesResponse tds__GetNetworkInterfacesResponse;
	addSecurity(deviceProxy.soap, username, password);	
	if (deviceProxy.GetNetworkInterfaces(&tds__GetNetworkInterfaces, &tds__GetNetworkInterfacesResponse) == SOAP_OK)
	{
		for (auto iface : tds__GetNetworkInterfacesResponse.NetworkInterfaces)
		{
			if (iface->Info != NULL)
			{
				std::cout << "\t" << iface->Info->Name->c_str() <<  " " << iface->Info->HwAddress << std::endl;			
			}
			if ( (iface->IPv4 != NULL) && (iface->IPv4->Config != NULL) )
			{
				for (auto addr : iface->IPv4->Config->Manual)
				{
					std::cout << "\tIP:" << addr->Address  << "/" << addr->PrefixLength << std::endl;					
				}
			}
		}
	}
	else
	{
		deviceProxy.soap_stream_fault(std::cerr);
	}
	
	// call Device::GetServices
	std::cout << "=>Device::GetServices" << std::endl;		
	_tds__GetServices         tds__GetServices;
	tds__GetServices.IncludeCapability = true;
	_tds__GetServicesResponse tds__GetServicesResponse;
	addSecurity(deviceProxy.soap, username, password);	
	if (deviceProxy.GetServices(&tds__GetServices, &tds__GetServicesResponse) == SOAP_OK)
	{
		for (auto service : tds__GetServicesResponse.Service)
		{
			std::cout << "\tns:" << service->Namespace << " " << service->XAddr << " Version:" << service->Version->Major << "." << service->Version->Minor << std::endl;
			if (service->Capabilities)
			{
				std::cout << "\t" << service->Capabilities->__any << std::endl;
			}
		}
	}
	
	// call Device::GetCapabilities
	std::cout << "=>Device::GetCapabilities" << std::endl;		
	_tds__GetCapabilities         tds__GetCapabilities;
	_tds__GetCapabilitiesResponse tds__GetCapabilitiesResponse;
	addSecurity(deviceProxy.soap, username, password);	
	if (deviceProxy.GetCapabilities(&tds__GetCapabilities, &tds__GetCapabilitiesResponse) == SOAP_OK)
	{
		std::unique_ptr<ImagingBindingProxy> imagingProxy;
		if ( (tds__GetCapabilitiesResponse.Capabilities->Extension != NULL) && (tds__GetCapabilitiesResponse.Capabilities->Imaging != NULL) )
		{
			std::cout << "\tImaging Url:" << tds__GetCapabilitiesResponse.Capabilities->Imaging->XAddr << std::endl;			
			imagingProxy.reset(new ImagingBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Imaging->XAddr.c_str()));
		}
		std::unique_ptr<ReplayBindingProxy> replayProxy;
		if ( (tds__GetCapabilitiesResponse.Capabilities->Extension != NULL) && (tds__GetCapabilitiesResponse.Capabilities->Extension->Replay != NULL) )
		{
			std::cout << "\tReplay Url:" << tds__GetCapabilitiesResponse.Capabilities->Extension->Replay->XAddr << std::endl;			
			replayProxy.reset(new ReplayBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Extension->Replay->XAddr.c_str()));
		}
		std::unique_ptr<MediaBindingProxy> mediaProxy;
		if (tds__GetCapabilitiesResponse.Capabilities->Media != NULL)
		{
			std::cout << "\tMedia Url:" << tds__GetCapabilitiesResponse.Capabilities->Media->XAddr << std::endl;			
			mediaProxy.reset(new MediaBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Media->XAddr.c_str()));
		}
		std::unique_ptr<ReceiverBindingProxy> receiverProxy;
		if ( (tds__GetCapabilitiesResponse.Capabilities->Extension != NULL) && (tds__GetCapabilitiesResponse.Capabilities->Extension->Receiver != NULL) )
		{
			std::cout << "\tReceiver Url:" << tds__GetCapabilitiesResponse.Capabilities->Extension->Receiver->XAddr << std::endl;			
			receiverProxy.reset(new ReceiverBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Extension->Receiver->XAddr.c_str()));
		}
		std::unique_ptr<RecordingBindingProxy> recordingProxy;
		if ( (tds__GetCapabilitiesResponse.Capabilities->Extension != NULL) && (tds__GetCapabilitiesResponse.Capabilities->Extension->Recording != NULL) )
		{
			std::cout << "\tRecording Url:" << tds__GetCapabilitiesResponse.Capabilities->Extension->Recording->XAddr << std::endl;			
			recordingProxy.reset(new RecordingBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Extension->Recording->XAddr.c_str()));
		}		
		std::unique_ptr<SearchBindingProxy> searchProxy;
		if ( (tds__GetCapabilitiesResponse.Capabilities->Extension != NULL) && (tds__GetCapabilitiesResponse.Capabilities->Extension->Search != NULL) )
		{
			std::cout << "\tSearch Url:" << tds__GetCapabilitiesResponse.Capabilities->Extension->Search->XAddr << std::endl;			
			searchProxy.reset (new SearchBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Extension->Search->XAddr.c_str()));
		}
		std::unique_ptr<EventBindingProxy> eventProxy;
		if (tds__GetCapabilitiesResponse.Capabilities->Events != NULL)
		{
			std::cout << "\tEvent Url:" << tds__GetCapabilitiesResponse.Capabilities->Events->XAddr << std::endl;			
			eventProxy.reset(new EventBindingProxy(tds__GetCapabilitiesResponse.Capabilities->Events->XAddr.c_str()));
		}
		
		
		if (mediaProxy.get() != NULL)
		{
			// call Media::GetVideoSources
			std::cout << "=>Media::GetVideoSources" << std::endl;				
			_trt__GetVideoSources         trt__GetVideoSources;
			_trt__GetVideoSourcesResponse trt__GetVideoSourcesResponse;			
			addSecurity(mediaProxy->soap, username, password);	
			if (mediaProxy->GetVideoSources(&trt__GetVideoSources, &trt__GetVideoSourcesResponse) == SOAP_OK)
			{		
				for (auto source : trt__GetVideoSourcesResponse.VideoSources)
				{
					std::cout << "\t" << source->token;
					if (source->Resolution)
					{
						std::cout << " " << source->Resolution->Width << "x" << source->Resolution->Height;
					}
					std::cout << std::endl;
					
					_trt__GetVideoEncoderConfiguration         trt__GetVideoEncoderConfiguration;
					trt__GetVideoEncoderConfiguration.ConfigurationToken = source->token;
					_trt__GetVideoEncoderConfigurationResponse trt__GetVideoEncoderConfigurationResponse;
					addSecurity(mediaProxy->soap, username, password);						
					if (mediaProxy->GetVideoEncoderConfiguration(&trt__GetVideoEncoderConfiguration, &trt__GetVideoEncoderConfigurationResponse) == SOAP_OK)
					{		
						std::cout << "\tEncoding:" << trt__GetVideoEncoderConfigurationResponse.Configuration->Encoding << std::endl;
						if (trt__GetVideoEncoderConfigurationResponse.Configuration->H264)
						{
							std::cout << "\tH264Profile:" << trt__GetVideoEncoderConfigurationResponse.Configuration->H264->H264Profile << std::endl;
						}
						if (trt__GetVideoEncoderConfigurationResponse.Configuration->Resolution)
						{
							std::cout << "\tResolution:" << trt__GetVideoEncoderConfigurationResponse.Configuration->Resolution->Width << "x" << trt__GetVideoEncoderConfigurationResponse.Configuration->Resolution->Height << std::endl;
						}
					}

					_trt__GetVideoEncoderConfigurationOptions         trt__GetVideoEncoderConfigurationOptions;
					trt__GetVideoEncoderConfigurationOptions.ConfigurationToken = soap_new_std__string(mediaProxy->soap);
					trt__GetVideoEncoderConfigurationOptions.ConfigurationToken->assign(source->token);
					_trt__GetVideoEncoderConfigurationOptionsResponse trt__GetVideoEncoderConfigurationOptionsResponse;
					addSecurity(mediaProxy->soap, username, password);						
					if (mediaProxy->GetVideoEncoderConfigurationOptions(&trt__GetVideoEncoderConfigurationOptions, &trt__GetVideoEncoderConfigurationOptionsResponse) == SOAP_OK)
					{	
						if (trt__GetVideoEncoderConfigurationOptionsResponse.Options->H264)
						{
							for (auto res : trt__GetVideoEncoderConfigurationOptionsResponse.Options->H264->ResolutionsAvailable)
							{
								std::cout << "\tResolution:" << res->Width << "x" << res->Height << std::endl;
							}
						}
					}
					
					if (imagingProxy.get() != NULL)
					{
						_timg__GetImagingSettings timg__GetImagingSettings;
						timg__GetImagingSettings.VideoSourceToken = source->token;
						_timg__GetImagingSettingsResponse timg__GetImagingSettingsResponse;
						addSecurity(imagingProxy->soap, username, password);						
						if (imagingProxy->GetImagingSettings(&timg__GetImagingSettings, &timg__GetImagingSettingsResponse) == SOAP_OK)
						{
							std::cout << "\tBrightness  :" << printPtr (timg__GetImagingSettingsResponse.ImagingSettings->Brightness)            << std::endl;
							std::cout << "\tContrast    :" << printPtr (timg__GetImagingSettingsResponse.ImagingSettings->Contrast)              << std::endl;
							std::cout << "\tSaturation  :" << printPtr (timg__GetImagingSettingsResponse.ImagingSettings->ColorSaturation)       << std::endl;
							std::cout << "\tSharpness   :" << printPtr (timg__GetImagingSettingsResponse.ImagingSettings->Sharpness)             << std::endl;
							std::cout << "\tBacklight   :" << printMode(timg__GetImagingSettingsResponse.ImagingSettings->BacklightCompensation) << std::endl;
							std::cout << "\tWideDynamic :" << printMode(timg__GetImagingSettingsResponse.ImagingSettings->WideDynamicRange) << std::endl;
							std::cout << "\tExposure    :" << printMode(timg__GetImagingSettingsResponse.ImagingSettings->Exposure)              << std::endl;
							if (timg__GetImagingSettingsResponse.ImagingSettings->Exposure)
								std::cout << "\t\tExposureTime :" << printPtr(timg__GetImagingSettingsResponse.ImagingSettings->Exposure->ExposureTime)          << std::endl;
							std::cout << "\tWhiteBalance:" << printMode(timg__GetImagingSettingsResponse.ImagingSettings->WhiteBalance)          << std::endl;
							
						}
						
						_timg__GetOptions timg__GetOptions;
						timg__GetOptions.VideoSourceToken = source->token;
						_timg__GetOptionsResponse timg__GetOptionsResponse;
						addSecurity(imagingProxy->soap, username, password);						
						if (imagingProxy->GetOptions(&timg__GetOptions, &timg__GetOptionsResponse) == SOAP_OK)
						{
							std::cout << "\tBrightness: " << printRangePtr(timg__GetOptionsResponse.ImagingOptions->Brightness)      << std::endl;
							std::cout << "\tContrast  : " << printRangePtr(timg__GetOptionsResponse.ImagingOptions->Contrast)        << std::endl;
							std::cout << "\tSaturation: " << printRangePtr(timg__GetOptionsResponse.ImagingOptions->ColorSaturation) << std::endl;
							std::cout << "\tSharpness : " << printRangePtr(timg__GetOptionsResponse.ImagingOptions->Sharpness)       << std::endl;
						}
						
					}
				}
			}
			
			std::cout << "=>Media::GetProfiles" << std::endl;					
			_trt__GetProfiles         trt__GetProfiles;
			_trt__GetProfilesResponse trt__GetProfilesResponse;
			addSecurity(mediaProxy->soap, username, password);								
			if (mediaProxy->GetProfiles(&trt__GetProfiles, &trt__GetProfilesResponse) == SOAP_OK)
			{		
				for (auto profile : trt__GetProfilesResponse.Profiles)
				{
					std::string token(profile->token);
					std::cout << "\tMediaProfile:" << token << std::endl;
					
					_trt__GetStreamUri         trt__GetStreamUri;
					_trt__GetStreamUriResponse trt__GetStreamUriResponse;
					trt__GetStreamUri.ProfileToken = token;
					trt__GetStreamUri.StreamSetup = soap_new_tt__StreamSetup(mediaProxy->soap);
					trt__GetStreamUri.StreamSetup->Transport = soap_new_tt__Transport(mediaProxy->soap);
					trt__GetStreamUri.StreamSetup->Transport->Protocol = tt__TransportProtocol__RTSP;
					addSecurity(mediaProxy->soap, username, password);						
					if (mediaProxy->GetStreamUri(&trt__GetStreamUri, &trt__GetStreamUriResponse) == SOAP_OK)
					{
						std::cout << "\tMediaUri:" << trt__GetStreamUriResponse.MediaUri->Uri << std::endl;
					}
				}
			}
		}	
		
		if (recordingProxy.get() != NULL)
		{
			std::cout << "=>Recording::GetRecordings" << std::endl;											
			_trc__GetRecordings         trc__GetRecordings;
			_trc__GetRecordingsResponse trc__GetRecordingsResponse;
			addSecurity(recordingProxy->soap, username, password);						
			if (recordingProxy->GetRecordings(&trc__GetRecordings, &trc__GetRecordingsResponse) == SOAP_OK)
			{
				for (auto recording : trc__GetRecordingsResponse.RecordingItem)
				{
					std::string token(recording->RecordingToken);
					std::cout << "\tRecording:" << token << std::endl;
					
					if (replayProxy.get() != NULL)
					{
						_trp__GetReplayUri         trp__GetReplayUri;
						_trp__GetReplayUriResponse trp__GetReplayUriResponse;
						trp__GetReplayUri.RecordingToken = token;
						addSecurity(replayProxy->soap, username, password);						
						if (replayProxy->GetReplayUri(&trp__GetReplayUri, &trp__GetReplayUriResponse) == SOAP_OK)
						{
							std::cout << "\tReplay Uri:" << trp__GetReplayUriResponse.Uri << std::endl;
						}
					}
				}
			}
			
			std::cout << "=>Recording::GetRecordingJobs" << std::endl;											
			_trc__GetRecordingJobs         trc__GetRecordingJobs;
			_trc__GetRecordingJobsResponse trc__GetRecordingJobsResponse;
			addSecurity(recordingProxy->soap, username, password);						
			if (recordingProxy->GetRecordingJobs(&trc__GetRecordingJobs, &trc__GetRecordingJobsResponse) == SOAP_OK)
			{
				for (auto job : trc__GetRecordingJobsResponse.JobItem)
				{
					std::string token(job->JobToken);
					std::cout << "\tRecordingJob:" << token << std::endl;
					
					if (job->JobConfiguration)
					{
						std::cout << "\tRecordingToken:" << job->JobConfiguration->RecordingToken << std::endl;						
						for (auto src : job->JobConfiguration->Source)
						{
							if (src->SourceToken)
							{
								std::cout << "\tSourceToken:" << src->SourceToken->Token << std::endl;						
								std::cout << "\tSourceType:" << src->SourceToken->Type << std::endl;						
							}
						}
					}
				}
			}
		}
		
		if (receiverProxy.get() != NULL)
		{
			std::cout << "=>Receiver::GetReceivers" << std::endl;								
			_trv__GetReceivers         trv__GetReceivers;
			_trv__GetReceiversResponse trv__GetReceiversResponse;
			addSecurity(receiverProxy->soap, username, password);									
			if (receiverProxy->GetReceivers(&trv__GetReceivers, &trv__GetReceiversResponse) == SOAP_OK)
			{
				for (auto receiver : trv__GetReceiversResponse.Receivers)
				{
					std::string token(receiver->Token);
					std::cout << "\tReceiver:" << token << std::endl;	

					if (receiver->Configuration)
					{
						std::cout << "\tReceiver mode:" << receiver->Configuration->Mode << " uri:" << receiver->Configuration->MediaUri << std::endl;	
					}
				}
			}
		}
		
		if (eventProxy.get() != NULL)
		{
			std::cout << "=>Event::CreatePullPoint" << std::endl;								
			_tev__CreatePullPointSubscription         tev__CreatePullPointSubscription;
			_tev__CreatePullPointSubscriptionResponse tev__CreatePullPointSubscriptionResponse;
			addSecurity(eventProxy->soap, username, password);						
			if (eventProxy->CreatePullPointSubscription(&tev__CreatePullPointSubscription, &tev__CreatePullPointSubscriptionResponse) == SOAP_OK)
			{
				std::cout << "\tPullpoint Url:" << tev__CreatePullPointSubscriptionResponse.SubscriptionReference.Address << std::endl;
				
				// pull
				PullPointSubscriptionBindingProxy pullpoint(tev__CreatePullPointSubscriptionResponse.SubscriptionReference.Address);
				soap_wsse_add_Security(pullpoint.soap);
				
				_tev__PullMessages         tev__PullMessages;
				tev__PullMessages.Timeout = "PT10S";
				tev__PullMessages.MessageLimit = 100;
				_tev__PullMessagesResponse tev__PullMessagesResponse;
				if (pullpoint.PullMessages(&tev__PullMessages, &tev__PullMessagesResponse) == SOAP_OK)
				{
					for (auto msg : tev__PullMessagesResponse.wsnt__NotificationMessage)
					{
						std::cout << "\tMessage:" << msg->Message.__any << std::endl;
					}
				}
				
				// subscribe
				NotificationConsumerBindingService consumer;
				consumer.soap->accept_timeout=5;
				consumer.bind(NULL,9090,10);
				std::thread th(&NotificationConsumerBindingService::run, &consumer, 0);
				
				NotificationProducerBindingProxy producer(tev__CreatePullPointSubscriptionResponse.SubscriptionReference.Address);
				soap_wsse_add_Security(producer.soap);
				
				_wsnt__Subscribe         wsnt__Subscribe;
				std::string url("http://127.0.0.1:9090");
				wsnt__Subscribe.ConsumerReference.Address = strcpy((char*)soap_malloc(producer.soap, url.size()+1), url.c_str());
				_wsnt__SubscribeResponse wsnt__SubscribeResponse;
				if (producer.Subscribe(&wsnt__Subscribe, &wsnt__SubscribeResponse) == SOAP_OK)
				{
				}	
				th.join();
			}
		}
	}
	
        return 0;
}
boolean pushAsyncReady( uint8_t pin){
	if(!IsAsync(pin)){
		//println_I("No asyinc on pin ");p_int_I(pin);print_I(" Mode 0x");prHEX8(GetChannelMode(pin),INFO_PRINT);
		return false; 
	}

	int32_t last;
	int32_t aval;
	int32_t db;
	//int i=pin;
	//EndCritical();

//	println_I("Checking timer \nMsTime: ");p_fl_I(tRef->MsTime);
//	print_I(" \nSetpoint: ");p_fl_I(tRef->setPoint);
//	print_I(" \nCurrentTime: ");p_fl_I(getMs());
	float timeout = RunEvery(getPinsScheduler( pin));
//	print_I(" \nTimeout: ");p_fl_I(timeout);
	if(GetChannelMode(pin)== IS_SERVO){
		aval = GetServoPos(pin);
	}else{
		aval = getDataTableCurrentValue(pin);
	}

	if(timeout !=0){
//		println_I("Time to do something");
		switch(getBcsIoDataTable(pin)->PIN.asyncDataType){
		case AUTOSAMP:
//			println_I("Auto samp ");p_int_I(pin);
			getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal = aval;

			return true; 
		case NOTEQUAL:
			//
			if(aval != getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal){
				//println_I("not equ ");p_int_I(pin);
				//print_I("\t");
				//p_int_I(getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal);
				//print_I("\t");
				//p_int_I(getBcsIoDataTable(pin)->PIN.currentValue);
				getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal = aval;
				return true; 
			}
			break;
		case DEADBAND:
			last = getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal;
			db = getBcsIoDataTable(pin)->PIN.asyncDatadeadBandval;

			if(!bound(last,aval,db,db)){
				//println_I("deadband");p_int_I(pin);
				getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal=aval;
				return true; 
			}
			break;
		case THRESHHOLD:
			//println_I("treshhold");p_int_I(pin);
			last = getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal;
			db = getBcsIoDataTable(pin)->PIN.asyncDatathreshholdval;

			if(getBcsIoDataTable(pin)->PIN.asyncDatathreshholdedge == ASYN_RISING || getBcsIoDataTable(pin)->PIN.asyncDatathreshholdedge == ASYN_BOTH){
				if(last<= db && aval>db){
					getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal = aval;
					return true; 
				}
			}
			if(getBcsIoDataTable(pin)->PIN.asyncDatathreshholdedge == ASYN_FALLING|| getBcsIoDataTable(pin)->PIN.asyncDatathreshholdedge == ASYN_BOTH){
				if(last> db && aval<=db){
					getBcsIoDataTable(pin)->PIN.asyncDataPreviousVal = aval;
					return true; 
				}
			}
			break;
		default:
			println_E("\nNo type defined!! chan: ");p_int_E(pin);
			print_E(" mode: ");printMode(pin,GetChannelMode(pin),ERROR_PRINT);
			print_E(" type: ");printAsyncType(pin,ERROR_PRINT);
			startAdvancedAsyncDefault(pin);
			break;
		}
	}else{
//		println_I("Nothing to do, returning");
	}
	return false; 
}
Esempio n. 16
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    int i;
    GLdouble p[16];
    GLdouble m[16];

    // Select correct buffer for this context.
    glDrawBuffer(GL_BACK);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.

    arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings);	// zoom = 1.0.
    gARTImage = NULL; // Invalidate image data.

    // Projection transformation.
    arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
    glMatrixMode(GL_PROJECTION);
    glLoadMatrixd(p);
    glMatrixMode(GL_MODELVIEW);

    glEnable(GL_DEPTH_TEST);

    // Viewing transformation.
    glLoadIdentity();
    // Lighting and geometry that moves with the camera should go here.
    // (I.e. must be specified before viewing transformations.)
    //none

    for (i = 0; i < gObjectDataCount; i++) {

        if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) {

            // Calculate the camera position for the object and draw it.
            // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
            arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR);
            glLoadMatrixd(m);

            // All lighting and geometry to be drawn relative to the marker goes here.
            //ARLOGe("About to draw object %i\n", i);
            arVrmlDraw(gObjectData[i].vrml_id);
        }
    }

    // Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }

    glutSwapBuffers();
}
Esempio n. 17
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
	arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings);	// zoom = 1.0.
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	glEnable(GL_DEPTH_TEST);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

		// All lighting and geometry to be drawn relative to the marker goes here.
		DrawCube();
	
	} // gPatt_found
	
	// Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
Esempio n. 18
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
    ARdouble p[16];
	ARdouble m[16];
    double zoom;
	
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
	
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
	// Projection transformation.
	arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p);
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
    glLoadMatrixf(p);
#else
    glLoadMatrixd(p);
#endif
	glMatrixMode(GL_MODELVIEW);
		
	glEnable(GL_DEPTH_TEST);

	// Viewing transformation.
	glLoadIdentity();
	// Lighting and geometry that moves with the camera should go here.
	// (I.e. must be specified before viewing transformations.)
	//none
	
	if (gPatt_found) {
	
		// Calculate the camera position relative to the marker.
		// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
		arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR);
#ifdef ARDOUBLE_IS_FLOAT
        glLoadMatrixf(m);
#else
        glLoadMatrixd(m);
#endif

		// All lighting and geometry to be drawn relative to the marker goes here.
        
        // Draw the movie frame.
        if (gMovieImage) {
            glPushMatrix();
            glRotatef(90.0f, 1.0f, 0.0f, 0.0f); // Place movie in x-z plane instead of x-y plane.
            glTranslated(-gPatt_width*0.5, 0.0f, 0.0f); // Movie origin is at lower-left of movie frame. Place this at the edge of the marker .
            zoom = 1.0/gMovieCparam.xsize * gPatt_width; // Scale the movie frame so that it is the same width as the marker.
            arglPixelBufferDataUpload(gMovieArglSettings, gMovieImage);
            arglDispImageStateful(gMovieArglSettings); // Show the movie frame.
            glPopMatrix();
        }
	
	} // gPatt_found
	
	// Any 2D overlays go here.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
Esempio n. 19
0
///////////////////////////////////////////////////////////////////////////////
// compareOne:  Compare results for a single test case
///////////////////////////////////////////////////////////////////////////////
void
BlendFuncTest::compareOne(BlendFuncResult& oldR, BlendFuncResult& newR) {
	BasicStats readbackStats;
	BasicStats blendStats;

	vector<BlendFuncResult::PartialResult>::const_iterator np;
	vector<BlendFuncResult::PartialResult>::const_iterator op;

	for (np = newR.results.begin(); np != newR.results.end(); ++np)
		// Find the matching case, if any, in the old results:
		for (op = oldR.results.begin(); op != oldR.results.end(); ++op)
			if (equalMode(*np, *op)) {
				readbackStats.sample(np->rbErr - op->rbErr);
				blendStats.sample(np->blErr - op->blErr);
			}

	if (readbackStats.n() == static_cast<int>(newR.results.size())
	    && newR.results.size() == oldR.results.size()
	    && readbackStats.mean() == 0.0 && blendStats.mean() == 0.0) {
		if (env->options.verbosity)
			env->log << name << ": SAME "
				<< newR.config->conciseDescription() << '\n';
	} else {
		env->log << name << ": DIFF "
			<< newR.config->conciseDescription() << '\n';

		if (readbackStats.mean() < 0.0)
			env->log << '\t' << env->options.db2Name
				<< " appears to have more accurate readback.\n";
		else if (readbackStats.mean() > 0.0)
			env->log << '\t' << env->options.db1Name
				<< " appears to have more accurate readback.\n";
		if (blendStats.mean() < 0.0)
			env->log << '\t' << env->options.db2Name
				<< " appears to have more accurate blending.\n";
		else if (blendStats.mean() > 0.0)
			env->log << '\t' << env->options.db1Name
				<< " appears to have more accurate blending.\n";
		if (readbackStats.n() != static_cast<int>(newR.results.size())){
			env->log << "\tThe following cases in "
				<< env->options.db2Name
				<< " have no matching test in "
				<< env->options.db1Name
				<< ":\n";
			for (np = newR.results.begin();
			    np != newR.results.end(); ++np) {
				for (op = oldR.results.begin();
				    op != oldR.results.end(); ++op)
					if (equalMode(*np, *op))
						break;
				if (op == oldR.results.end())
					printMode(*np);
			}
		}
		if (readbackStats.n() != static_cast<int>(oldR.results.size())){
			env->log << "\tThe following cases in "
				<< env->options.db1Name
				<< " have no matching test in "
				<< env->options.db2Name
				<< ":\n";
			for (op = oldR.results.begin();
			    op != oldR.results.end(); ++op) {
				for (np = newR.results.begin();
				    np != newR.results.end(); ++np)
					if (equalMode(*op, *np))
						break;
				if (np == newR.results.end())
					printMode(*op);
			}
		}
		if (env->options.verbosity) {
			env->log << "\tThe following cases appear in both "
				<< env->options.db1Name
				<< " and "
				<< env->options.db2Name
				<< ":\n";
			for (np = newR.results.begin();
			    np != newR.results.end(); ++np){
				for (op = oldR.results.begin();
				    op != oldR.results.end(); ++op)
					if (equalMode(*op, *np))
						break;
				if (op != oldR.results.end())
					printMode(*op);
			}
		}
	}
} // BlendFuncTest::compareOne
BOOL setMode(BYTE pin,BYTE mode){

	ClearPinState(pin);
	println_I("Pin :");p_int_I(pin);print_I(" is mode: ");printMode(mode,INFO_PRINT);
	//BYTE pwm,dir;
	if (mode == NO_CHANGE){
		return TRUE;
	}
	switch (mode){
	case HIGH_IMPEDANCE:
		ClearPinState(pin);
		// Return here so as not to save this state to the eeprom
		return TRUE;
	case IS_UART_TX:
	case IS_UART_RX:
		if(pin == 17 || pin == 16){
			configPinMode(16,IS_UART_TX,OUTPUT,ON);
			configPinMode(17,IS_UART_RX,INPUT,ON);
			InitUART();
			return TRUE;
		}
		break;
	case IS_SPI_MOSI:
	case IS_SPI_MISO:
	case IS_SPI_SCK:
		if(pin == 0 || pin == 1||pin == 2   ){
			configPinMode(0,IS_SPI_SCK,INPUT,ON);
			configPinMode(1,IS_SPI_MISO,INPUT,ON);
			configPinMode(2,IS_SPI_MOSI,INPUT,ON);
			return TRUE;
		}
		break;
	case IS_ANALOG_IN:
		configPinMode(pin,mode,INPUT,OFF);
		if(InitADC(pin)){
			return TRUE;
		}
		break;
	case IS_PWM:
		if(InitPWM(pin)){
			return TRUE;
		}
		return FALSE;
	case IS_DC_MOTOR_VEL:
	case IS_DC_MOTOR_DIR:
		if(InitDCMotor(pin)){
			return TRUE;
		}
		return FALSE;
	case IS_SERVO:
		InitServo(pin);
		configPinMode(pin,mode,OUTPUT,OFF);
		return TRUE;
	case IS_DO:
		configPinMode(pin,mode,OUTPUT,OFF);
		return TRUE;
	case IS_DI:
	case IS_PPM_IN:
	case IS_COUNTER_OUTPUT_INT:
	case IS_COUNTER_OUTPUT_DIR:
	case IS_COUNTER_OUTPUT_HOME:
	case IS_COUNTER_INPUT_INT:
	case IS_COUNTER_INPUT_DIR:
	case IS_COUNTER_INPUT_HOME:
		configPinMode(pin,mode,INPUT,ON);
		return TRUE;
	default:
		configPinMode(pin,mode,INPUT,ON);
		return TRUE;
	}
	return FALSE;
}
Esempio n. 21
0
//
// This function is called when the window needs redrawing.
//
static void Display(void)
{
	// Select correct buffer for this context.
	glDrawBuffer(GL_BACK);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    arglPixelBufferDataUpload(gArglSettings, gARTImage);
    arglDispImage(gArglSettings);
	gARTImage = NULL; // Invalidate image data.
				
    // Set up 3D mode.
	glMatrixMode(GL_PROJECTION);
#ifdef ARDOUBLE_IS_FLOAT
	glLoadMatrixf(cameraLens);
#else
	glLoadMatrixd(cameraLens);
#endif
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
    glEnable(GL_DEPTH_TEST);

    // Set any initial per-frame GL state you require here.
    // --->
    
    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before camera pose transform.)
    // --->
    
    VirtualEnvironmentHandleARViewDrawPreCamera();
    
    if (cameraPoseValid) {
        
#ifdef ARDOUBLE_IS_FLOAT
        glMultMatrixf(cameraPose);
#else
        glMultMatrixd(cameraPose);
#endif
        
        // All lighting and geometry to be drawn in world coordinates goes here.
        // --->
        VirtualEnvironmentHandleARViewDrawPostCamera();
    }
    
    // Set up 2D mode.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, (GLdouble)gWindowW, 0, (GLdouble)gWindowH, -1.0, 1.0);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    // Add your own 2D overlays here.
    // --->
    
    VirtualEnvironmentHandleARViewDrawOverlay();

    //
    // Draw help text and mode.
    //
    if (gShowMode) {
        printMode();
    }
    if (gShowHelp) {
        if (gShowHelp == 1) {
            printHelpKeys();
        }
    }
	
	glutSwapBuffers();
}
BOOL setMode(BYTE pin,BYTE mode){
	println_I("Setting Mode: ");printMode(mode,INFO_PRINT);print_I(" on: ");p_int_I(pin);
	BYTE currentMode = GetChannelMode(pin);
	ClearCounter(pin);
	StopSPI(pin);
	clearPPM(pin);
	print_I(" \tHardware Cleared");
	switch (mode){
	case IS_SERVO:
		if(((pin < 12) && (isRegulated_0() == 0)) || ((pin >= 12) && (isRegulated_1()== 0))   ){
			print_I("|Mode is now servo");
			break;
		}else{
			if(getBrownOutDetect()){
				print_I(" Servo Mode could not be set, voltage invalid");
				return FALSE;
			}else{
				print_I(" Servo Mode set|");
				break;
			}
		}
		break;
	case IS_SPI_MOSI:
	case IS_SPI_MISO:
	case IS_SPI_SCK:
		if( pinHasFunction(pin, mode) != FALSE){
			print_I("|Mode is now SPI");
			InitSPI();
			break;
		}else{
			return FALSE;
		}
		break;
	case IS_COUNTER_INPUT_INT:
	case IS_COUNTER_INPUT_DIR:
	case IS_COUNTER_INPUT_HOME:
		if(pinHasFunction(pin, mode) != FALSE){
			print_I("|Mode is now Counter Input");
			StartCounterInput(pin);
			break;
		}else{
			print_I(", Counter Input not availible");
			return FALSE;
		}
		break;
	case IS_COUNTER_OUTPUT_INT:
	case IS_COUNTER_OUTPUT_DIR:
	case IS_COUNTER_OUTPUT_HOME:
		if(pinHasFunction(pin, mode) != FALSE){
			print_I("|Mode is now Counter Output");
			StartCounterOutput(pin);
			break;
		}else{
			print_I(", Counter Output not availible");
			return FALSE;
		}
		break;
	case IS_PPM_IN:
		println_I("Setting up PPM...");
		startPPM(pin);
		break;
	}
	print_I(" \tMode set");
	return TRUE;
}