Beispiel #1
0
	void thread::end()
	{
		transform_constants.clear();

		if (capture_current_frame)
		{
			capture_frame("Draw " + std::to_string(vertex_draw_count));
		}
	}
Beispiel #2
0
	void thread::end()
	{
		transform_constants.clear();

		if (capture_current_frame)
		{
			for (const auto &first_count : first_count_commands)
				vertex_draw_count += first_count.second;
			capture_frame("Draw " + std::to_string(vertex_draw_count));
			vertex_draw_count = 0;
		}
	}
Beispiel #3
0
	void thread::end()
	{
		rsx::method_registers.transform_constants.clear();

		for (u8 index = 0; index < rsx::limits::vertex_count; ++index)
		{
			rsx::method_registers.register_vertex_info[index].size = 0;
			rsx::method_registers.register_vertex_data[index].clear();
		}

		if (capture_current_frame)
		{
			for (const auto &first_count : first_count_commands)
			{
				vertex_draw_count += first_count.second;
			}

			capture_frame("Draw " + std::to_string(vertex_draw_count));
			vertex_draw_count = 0;
		}
	}
Beispiel #4
0
int
main(int argc, char *argv[])
{
  XGCValues                 xgcv;
  long                      background = 0x010203;
  unsigned int              channel;
  unsigned int              speed;
  int                       i, p, cn;
  raw1394handle_t           raw_handle;
  struct raw1394_portinfo   ports[MAX_PORTS];

  get_options(argc, argv);
  /* process options */
  switch (fps) {
  case 1:
    fps = FRAMERATE_1_875;
    break;
  case 3:
    fps = FRAMERATE_3_75;
    break;
  case 15:
    fps = FRAMERATE_15;
    break;
  case 30:
    fps = FRAMERATE_30;
    break;
  case 60:
    fps = FRAMERATE_60;
    break;
  default:
    fps = FRAMERATE_7_5;
    break;
  }
  switch (res) {
  case 1:
    res = MODE_640x480_YUV411;
    device_width = 640;
    device_height = 480;
    format = XV_YUY2;
    break;
  case 2:
    res = MODE_640x480_RGB;
    device_width = 640;
    device_height = 480;
    format = XV_YUY2;
    break;
  default:
    res = MODE_320x240_YUV422;
    device_width = 320;
    device_height = 240;
    format = XV_UYVY;
    break;
  }

  /* get the number of ports (cards) */
  raw_handle = raw1394_new_handle();
  if (raw_handle == NULL) {
    perror("Unable to aquire a raw1394 handle\n");
    perror("did you load the drivers?\n");
    exit(-1);
  }

  numPorts = raw1394_get_port_info(raw_handle, ports, numPorts);
  raw1394_destroy_handle(raw_handle);
  if (verbose) printf("number of ports = %d\n", numPorts);

  /* get dc1394 handle to each port */
  for (p = 0; p < numPorts; p++) {
    int                       camCount;

    handles[p] = dc1394_create_handle(p);
    if (handles[p] == NULL) {
      perror("Unable to aquire a raw1394 handle\n");
      perror("did you load the drivers?\n");
      cleanup();
      exit(-1);
    }

    /* get the camera nodes and describe them as we find them */
    camera_nodes = dc1394_get_camera_nodes(handles[p], &camCount, verbose);

    /* setup cameras for capture */
    for (i = 0; i < camCount; i++) {
      cameras[numCameras].node = camera_nodes[i];

      if (dc1394_get_camera_feature_set
          (handles[p], cameras[numCameras].node,
           &features) != DC1394_SUCCESS) {
        printf("unable to get feature set\n");
      } else if (verbose) {
        dc1394_print_feature_set(&features);
      }

      if (dc1394_get_iso_channel_and_speed
          (handles[p], cameras[numCameras].node, &channel,
           &speed) != DC1394_SUCCESS) {
        printf("unable to get the iso channel number\n");
        cleanup();
        exit(-1);
      }

      if (dc1394_dma_setup_capture
          (handles[p], cameras[numCameras].node, i + 1 /*channel */ ,
           FORMAT_VGA_NONCOMPRESSED, res, SPEED_400, fps, NUM_BUFFERS,
           DROP_FRAMES, device_name,
           &cameras[numCameras]) != DC1394_SUCCESS) {
        fprintf(stderr,
                "unable to setup camera - check line %d of %s to make sure\n",
                __LINE__, __FILE__);
        perror("that the video mode, framerate and format are supported\n");
        printf("by your camera(s)\n");
        cleanup();
        exit(-1);
      }


      /*have the camera start sending us data */
      if (dc1394_start_iso_transmission
          (handles[p], cameras[numCameras].node) != DC1394_SUCCESS) {
        perror("unable to start camera iso transmission\n");
        cleanup();
        exit(-1);
      }
      numCameras++;
    }
  }

  fflush(stdout);
  if (numCameras < 1) {
    perror("no cameras found :(\n");
    cleanup();
    exit(-1);
  }


  //set_manual_exposure_gain(0, 440, 30);

  switch (format) {
  case XV_YV12:
    set_frame_length(device_width * device_height * 3 / 2, numCameras);
    break;
  case XV_YUY2:
  case XV_UYVY:
    set_frame_length(device_width * device_height * 2, numCameras);
    break;
  default:
    fprintf(stderr, "Unknown format set (internal error)\n");
    exit(255);
  }

  /* create OpenCV image wrappers */
  for (cn = 0; cn < MAX_CAMERAS; cn++) {
    if (cn < numCameras) {
      iplImages[cn] =
        cvCreateImage(cvSize(device_width, device_height),
		      IPL_DEPTH_8U, 3);
      readOnlyImg =
        cvCreateImageHeader(cvSize(device_width, device_height),
                            IPL_DEPTH_8U, 3);
    } else {
      iplImages[cn] = NULL;
    }
  }

  /* initialize handvu */
  hvInitialize(device_width, device_height);
  hvLoadConductor(string(conductor_fname));
  hvStartRecognition();
  hvSetOverlayLevel(3);
  if (async_processing) {
    hvAsyncSetup(num_async_bufs, displayCallback);
    hvAsyncGetImageBuffer(&m_async_image, &m_async_bufID);
    if (sync_display) async_display_image = cvCloneImage(iplImages[0]);
  }  

  /* make the window */
  display = XOpenDisplay(getenv("DISPLAY"));
  if (display == NULL) {
    fprintf(stderr, "Could not open display \"%s\"\n", getenv("DISPLAY"));
    cleanup();
    exit(-1);
  }

  QueryXv();

  if (adaptor < 0) {
    cleanup();
    exit(-1);
  }

  width = device_width;
  height = device_height * numCameras;

  window =
    XCreateSimpleWindow(display, DefaultRootWindow(display), 0, 0,
                        width, height, 0, WhitePixel(display,
                                                     DefaultScreen
                                                     (display)), background);

  XSelectInput(display, window, StructureNotifyMask | KeyPressMask);
  XMapWindow(display, window);
  connection = ConnectionNumber(display);

  gc = XCreateGC(display, window, 0, &xgcv);

  /* local main event loop */
  while (1) {

    if (async_processing) {
      // asynchronous processing in HandVu

      capture_frame();
      process_frame();
      if (sync_display) display_frame();
      handle_events();
      
    } else {
      // synchronous processing in HandVu
      capture_frame();
      process_frame();
      display_frame();
      handle_events();
    }

    /* XPending */

  }                             /* while not interrupted */

  exit(0);
}
Beispiel #5
0
int main(int argc, char *argv[])
{
        /** return value of main() */
        int res = 1;
        /* current configuration */
        LedPrefs *prefs = NULL;
	/* current setup */
	LedSetup *setup = NULL;
        /** framebuffer for captured image */
        LedFrame *frame = NULL;



        /* check binary version compatibility */
        NFT_LED_CHECK_VERSION


        /* set default loglevel to INFO */
	nft_log_level_set(L_INFO);


	/* initialize exit handlers */
	int signals[] = { SIGHUP, SIGINT, SIGQUIT, SIGABRT };
	unsigned int i;
	for(i=0; i<sizeof(signals)/sizeof(int); i++)
	{
		if(signal(signals[i], _exit_signal_handler) == SIG_ERR)
		{
			NFT_LOG_PERROR("signal()");
			goto _m_exit;
		}
	}



        /* default fps */
        _c.fps = 25;

        /* default mechanism */
        _c.method = METHOD_MIN+1;

        /* default config-filename */
        if(!led_prefs_default_filename(_c.prefsfile, sizeof(_c.prefsfile), ".ledcap.xml"))
                goto _m_exit;

        /* parse cmdline-arguments */
        if(!_parse_args(argc, argv))
                goto _m_exit;


	/* print welcome msg */
	NFT_LOG(L_INFO, "%s %s (c) D.Hiepler 2006-2012", PACKAGE_NAME, ledcap_version_long());
	NFT_LOG(L_VERBOSE, "Loglevel: %s", nft_log_level_to_string(nft_log_level_get()));



	/* initialize preferences context */
    	if(!(prefs = led_prefs_init()))
    		return -1;

	/* parse prefs-file */
    	LedPrefsNode *pnode;
    	if(!(pnode = led_prefs_node_from_file(_c.prefsfile)))
    	{
		NFT_LOG(L_ERROR, "Failed to open configfile \"%s\"",
		        		_c.prefsfile);
		goto _m_exit;
	}

        /* create setup from prefs-node */
    	if(!(setup = led_prefs_setup_from_node(prefs, pnode)))
    	{
		NFT_LOG(L_ERROR, "No valid setup found in preferences file.");
		led_prefs_node_free(pnode);
		goto _m_exit;
	}

    	/* free preferences node */
    	led_prefs_node_free(pnode);




        /* determine width of input-frames */
        LedFrameCord width, height;
        if((width = led_setup_get_width(setup)) > _c.width)
        {
                NFT_LOG(L_WARNING, "LED-Setup width (%d) > our width (%d). Using setup-value",
                        width,_c.width);
                /* use dimensions of mapped chain */
                _c.width = width;
        }

        /* determine height of input-frames */
        if((height = led_setup_get_height(setup)) > _c.height)
        {
                NFT_LOG(L_WARNING, "LED-Setup height (%d) > our height (%d). Using setup-value.",
                        height, _c.height);
                /* use dimensions of mapped chain */
                _c.height = height;
        }


        if(_c.width < 0)
        {
                NFT_LOG(L_ERROR, "width (%d) < 0", _c.width);
                goto _m_exit;
        }

        if(_c.height < 0)
        {
                NFT_LOG(L_ERROR, "height (%d) < 0", _c.height);
                goto _m_exit;
        }

        /* sanitize x-offset @todo check for maximum */
        if(_c.x < 0)
        {
                NFT_LOG(L_ERROR, "Invalid x coordinate: %d, using 0", _c.x);
                _c.x = 0;
        }

        /* sanitize y-offset @todo check for maximum */
        if(_c.y < 0)
        {
                NFT_LOG(L_ERROR, "Invalid y coordinate: %d, using 0", _c.y);
                _c.y = 0;
        }


        /* initialize capture mechanism (only imlib for now) */
        if(!capture_init(_c.method))
                goto _m_exit;

        /* allocate framebuffer */
        NFT_LOG(L_INFO, "Allocating frame: %dx%d (%s)",
	        _c.width, _c.height, capture_format());
        if(!(frame = led_frame_new(_c.width, _c.height, led_pixel_format_from_string(capture_format()))))
                goto _m_exit;

        /* respect endianess */
        led_frame_set_big_endian(frame, capture_is_big_endian());

        /* get first hardware */
        LedHardware *hw;
        if(!(hw = led_setup_get_hardware(setup)))
                goto _m_exit;

        /* initialize pixel->led mapping */
        if(!led_hardware_list_refresh_mapping(hw))
                goto _m_exit;

        /* precalc memory offsets for actual mapping */
        if(!led_chain_map_from_frame(led_hardware_get_chain(hw), frame))
                goto _m_exit;

        /* set saved gain to all registered hardware instances */
        if(!led_hardware_list_refresh_gain(hw))
                goto _m_exit;


	/* print some debug-info */
        led_frame_print(frame, L_VERBOSE);
        led_hardware_print(hw, L_VERBOSE);


        /* initially sample time for frametiming */
        if(!led_fps_sample())
                goto _m_exit;


        /* output some useful info */
        NFT_LOG(L_INFO, "Capturing %dx%d pixels at position x/y: %d/%d", _c.width, _c.height, _c.x, _c.y);

        /* loop until _c.running is set to FALSE */
        _c.running = TRUE;
        while(_c.running)
        {
                /* capture frame */
                if(!(capture_frame(frame, _c.x, _c.y)))
                        break;

                /* print frame for debugging */
                //led_frame_buffer_print(frame);

                /* map from frame */
                LedHardware *h;
                for(h = hw; h; h = led_hardware_list_get_next(h))
                {
                        if(!led_chain_fill_from_frame(led_hardware_get_chain(h), frame))
                        {
                                NFT_LOG(L_ERROR, "Error while mapping frame");
                                break;
                        }
                }


                /* send frame to hardware(s) */
                led_hardware_list_send(hw);

                /* delay in respect to fps */
                if(!led_fps_delay(_c.fps))
                        break;

                /* show frame */
                led_hardware_list_show(hw);

                /* save time when frame is displayed */
                if(!led_fps_sample())
                        break;
        }



        /* mark success */
        res = 0;

_m_exit:
        /* deinitialize capture mechanism */
        capture_deinit();

	/* free frame */
        led_frame_destroy(frame);

	/* destroy config */
        led_setup_destroy(setup);

	/* destroy config */
        led_prefs_deinit(prefs);


        return res;
}