static bool zigbee_protocol_waitAndcheckReply(uint32_t fd, uint8_t* receivedFrame, uint32_t sizeBuffer,
    zigbee_decodedFrame* decodedData)
{
  fd_set rfs;
  struct timeval waitTime;
  bool bSuccess;
  uint16_t nextSizeToRead;
  FD_ZERO(&rfs);
  FD_SET(fd, &rfs);
  waitTime.tv_sec = 2;
  waitTime.tv_usec = 0;
  bSuccess = false;
  nextSizeToRead = 0;

  if (select(fd + 1, &rfs, NULL, NULL, &waitTime) > 0)
  {
    if (FD_ISSET(fd, &rfs))
    {
      bSuccess = serial_read(fd, receivedFrame, 3);
      if (bSuccess)
      {
        bSuccess = zigbee_decodeHeader(receivedFrame, 3, &nextSizeToRead);
      }

      if (bSuccess && ((uint16_t)(3 + nextSizeToRead + 1) <= sizeBuffer))
      {
        bSuccess = serial_read(fd, &receivedFrame[3], nextSizeToRead + 1);
      }
      else
      {
        bSuccess = false;
      }

      if (bSuccess)
      {
        bSuccess = zigbee_decodeFrame(&receivedFrame[3], nextSizeToRead + 1, decodedData);
        if (bSuccess == true)
        {
          display_frame("received (ok)", receivedFrame, nextSizeToRead + 1 + 3);
        }
        else
        {
          display_frame("received (ko)", receivedFrame, nextSizeToRead + 1 + 3);
        }
      }
    }
  }
  else
  {
#ifdef TRACE_ACTIVATED
    fprintf(stdout, "Timeout\n");
#endif // TRACE_ACTIVATED
  }

#ifdef TRACE_ACTIVATED
  fprintf(stdout, "bSuccess = %d nextSizeToRead = %d\n", bSuccess, nextSizeToRead);
#endif // TRACE_ACTIVATED

  return bSuccess;
}
/* Validates incoming Control frames except READY_IND and
 * READY_QUERY which do not start with the common header.
 * Also calls display_frame() to print out conforming frames.
 * Returns < 0 for error
 */
int validate_frame(unsigned char *buff, int size)
{
        struct ready_frame *hdr; /* Ready is the shortest possible */

        if (size < sizeof(struct ready_frame)) {
                diag(COMPONENT, DIAG_DEBUG, "short frame, size %d\n", size);
                return -1;
        }

        hdr = (struct ready_frame *)buff;
        if (hdr->marker   != htons(0xff00) ||
            hdr->protocol != 0x01 ||
            hdr->version  != 0x01)
                return -1;

        /* READY_* frames are shorter than others */
        if (hdr->opcode == htons(READY_QUERY) ||
            hdr->opcode == htons(READY_IND)) {
                diag(COMPONENT, DIAG_DEBUG, "Received a %s\n", opcode2text(hdr->opcode));
                return 0;
        }

        if (size < sizeof(struct ctrl_frame)) {
                diag(COMPONENT, DIAG_DEBUG, "short frame, size %d\n", size);
                return -1;
        }

        display_frame(buff);

        return 0;
}
Exemple #3
0
// Display the atlas statusbar
int main(int argc, char **argv) {
    
    // Enable multi threading
    XInitThreads();
    
    // Initialize gtk toolkit
    gtk_init(&argc, &argv);
    
    // Setup widgets
    set_style();
    
    // Display widgets
    display_frame();
    display_tags(1);
    display_date(attrList);
    display_battery();
    display_wifi();
    display_volume();
    display_brightness();
    
    signal(SIGUSR1, widget_updater);
    
    // Run the gtk loop
    gtk_main();
    
    return 0;    
}
Exemple #4
0
int
spectrum_frame( void )
{
  libspectrum_dword frame_length;
  int error;

  /* Reduce the t-state count of both the processor and all the events
     scheduled to occur. Done slightly differently if RZX playback is
     occurring */
  frame_length = rzx_playback ? tstates
			      : machine_current->timings.tstates_per_frame;

  error = event_frame( frame_length ); if( error ) return error;
  tstates -= frame_length;
  if( z80.interrupts_enabled_at >= 0 )
    z80.interrupts_enabled_at -= frame_length;

  if( sound_enabled ) sound_frame();

  if( display_frame() ) return 1;
  if( profile_active ) profile_frame( frame_length );
  printer_frame();

  /* Add an interrupt unless they're being generated by .rzx playback */
  if( !rzx_playback ) {
    if( event_add( machine_current->timings.tstates_per_frame,
		   spectrum_frame_event ) ) return 1;
  }

  loader_frame( frame_length );

  return 0;
}
void zigbee_handleTx(zigbee_obj* zb)
{
  if (zb->sizeOfFrameToSend != 0)
  {
    serial_write(zb->fd, zb->frame, zb->sizeOfFrameToSend);
    display_frame("sent", zb->frame, zb->sizeOfFrameToSend);
  }
  zb->sizeOfFrameToSend = 0;
}
Exemple #6
0
void displayCallback(IplImage* image, hvAction action)
{
  if (sync_display) {
    cvCopy(image, async_display_image);
  } else {
    cvCopy(image, iplImages[0]);
    display_frame();
  }
}
Exemple #7
0
void VideoPlayerTool::slotStepBackwardMovie()
{
	if(!m_fileVA || playFrame <=0)
		return;

	m_fileVA->setAbsolutePosition(m_fileVA->getPrevAbsolutePosition());

	display_frame();
	// display
//	detailsView->setWorkshopImage(detailsImage);
}
Exemple #8
0
	void process_frame(ClFrame &in_frame, ClFrame &out_frame, ClFrame &orig_frame) {
		PERF_START("process_frame");

		if(m_crop) {
			in_frame.set_origin(m_width/2, m_height/2, 0);
			in_frame.set_region( m_width/3, m_height/3, 1);
			out_frame.set_origin(m_width/2, m_height/2, 0);
			out_frame.set_region( m_width/3, m_height/3, 1);
#ifndef _WITH_FRAMECPY
			//in_frame.vflip(); // no need to flip for frame memcpy
#endif //_WITH_FRAMECPY
		}

		if(m_crop) {
			set_background_image(bg_img);
		}

		if(m_ocl_image) {
			if(copyimg.proc(in_frame, out_frame) == false) {
				return;
			}
		}

		if(m_motion) {
			if(transimg.proc(3, in_frame, out_frame) == false) {
				return;
			}
		}  

		if(m_target.is_search()) {
			find_target(in_frame.get());
		}

		if(m_ocl_image || m_motion) {
			display_frame(out_frame);
		} else {
			display_frame(in_frame);
		}
		PERF_END("process_frame");
	}
Exemple #9
0
void VideoPlayerTool::on_grayButton_toggled(bool gray) {
	playGrayscale = gray;
	if(playGrayscale) {
		QPixmap pixIcon;
		if(pixIcon.load(":/images/pixmaps/:images/22x22/view-gray.png"))
			grayButton->setPixmap(pixIcon);
		else
			grayButton->setText(tr("Gray"));
	} else {
		QPixmap pixIcon;
		if(pixIcon.load(":/images/pixmaps/:images/22x22/view-color.png"))
			grayButton->setPixmap(pixIcon);
		else
			grayButton->setText(tr("Color"));
	}
	display_frame();
}
Exemple #10
0
static int ws_eink_update_display(struct ws_eink_fb_par *par)
{
	int ret = 0;
	int i;
	int frame_size;
	u8 *vmem = par->info->screen_base;
	u8 *ssbuf = par->ssbuf;
	const u8 *lut;
	size_t lut_size;
	static int update_count = 0;

	if(++update_count == 10) {
		update_count = 0;
		lut = lut_full_update;
		lut_size = ARRAY_SIZE(lut_full_update);
	} else {
		lut = lut_partial_update;
		lut_size = ARRAY_SIZE(lut_partial_update);
	}

	ret = int_lut(par, lut, lut_size);
	if (ret)
		return ret;

	frame_size = par->props->height * par->props->width * par->props->bpp / 8;

	memcpy(ssbuf, vmem, frame_size);

	for (i = 0; i < frame_size; i++) {
		ssbuf[i] = bitrev8(ssbuf[i]);
	}

	ret = set_frame_memory(par, ssbuf);
	if (ret)
		return ret;

	ret = display_frame(par);
	if (ret)
		return ret;

	ret = ws_eink_sleep(par);

	return ret;
}
Exemple #11
0
static int ws_eink_init_display(struct ws_eink_fb_par *par)
{
	int ret;
	struct device *dev = &par->spi->dev;

	ret = devm_gpio_request_one(&par->spi->dev, par->rst,
				    GPIOF_OUT_INIT_LOW, "ws_eink_rst");
	if (ret) {
		dev_err(dev, "Couldn't request reset GPIO\n");
		return ret;
	}

	ret = devm_gpio_request_one(&par->spi->dev, par->dc, GPIOF_OUT_INIT_LOW,
				    "ws_eink_dc");
	if (ret) {
		dev_err(dev, "Couldn't request data/command GPIO\n");
		return ret;
	}

	ret = devm_gpio_request_one(&par->spi->dev, par->busy, GPIOF_IN,
				    "ws_eink_busy");
	if (ret) {
		dev_err(dev, "Couldn't request busy GPIO\n");
		return ret;
	}

	ret = int_lut(par, lut_full_update, ARRAY_SIZE(lut_full_update));
	if (ret)
		return ret;

	ret = clear_frame_memory(par, 0xFF);
	if (ret)
		return ret;

	ret = display_frame(par);
	if (ret)
		return ret;

	ret = int_lut(par, lut_partial_update, ARRAY_SIZE(lut_partial_update));

	return ret;
}
Exemple #12
0
void
handle_events()
{
  XEvent                    xev;

  while (XPending(display) > 0) {
    XNextEvent(display, &xev);
    switch (xev.type) {
    case ConfigureNotify:
      width = xev.xconfigure.width;
      height = xev.xconfigure.height;
      //      capture_frame();
      //      process_frame();
      display_frame();
      break;
    case KeyPress:
      switch (XKeycodeToKeysym(display, xev.xkey.keycode, 0)) {
      case XK_q:
      case XK_Q:
      case XK_Escape:
        cleanup();
        exit(0);
        break;
      case XK_r:
      case XK_R:
	hvStopRecognition();
	hvStartRecognition();
        break;
      case XK_u:
      case XK_U:
        if (hvCanCorrectDistortion()) {
          hvCorrectDistortion(!hvIsCorrectingDistortion());
        }
        break;
      case XK_comma:
      case XK_less:
        width = (width*3)/4;
        height = (height*3)/4;
        XResizeWindow(display, window, width, height);
        //        capture_frame();
        //        process_frame();
        display_frame();
        break;
      case XK_period:
      case XK_greater:
        width = (int) (1.25 * width);
        height = (int) (1.25 * height);
        XResizeWindow(display, window, width, height);
        //        capture_frame();
        //        process_frame();
        display_frame();
        break;
      case XK_0:
	hvSetOverlayLevel(0);
        break;
      case XK_1:
	hvSetOverlayLevel(1);
        break;
      case XK_2:
	hvSetOverlayLevel(2);
        break;
      case XK_3:
	hvSetOverlayLevel(3);
        break;
      }
      break;
    }
  }
}
Exemple #13
0
void VideoPlayerTool::slotStepMovie()
{
	if(!m_fileVA) {
		slotRewindStartMovie();
		if(!m_fileVA)
			return;
	}

	if(playFileSize <= 0) {
		fprintf(stderr, "[VideoPlayerT]:%s:%d : ERROR: file size=%llu\n", __func__, __LINE__,
					playFileSize);
		return;
	}

	// Get step time
	struct timeval tv1, tv2;
	struct timezone tz;
	gettimeofday(&tv1, &tz);

	bool got_picture = m_fileVA->GetNextFrame();

	// set Slider value
	unsigned long long pos = m_fileVA->getAbsolutePosition();
	int poscent = (int)(pos * 100 / playFileSize);

	if(poscent != playScrollBar->value()) {
		playScrollBar->blockSignals(TRUE);

	//	fprintf(stderr, "[VideoPlayerT]:%s:%d : set file to pos=%d %%\n", __func__, __LINE__,
	//			poscent);

		playScrollBar->setValue(poscent);
		playScrollBar->blockSignals(FALSE);
	}

	if (got_picture) {
		display_frame();

		playFrame++;
	}
	else {

		fprintf(stderr, "%s %s:%d : cannot read frame\n", __FILE__, __func__, __LINE__);

		if (playSize <= 0) { // stop timer
			if(playTimer) {
				if(playTimer->isActive()) {
					playTimer->stop();
				}
			}
		}

		playContinuous = false;
		QPixmap icon;

		if(icon.load(":/images/pixmaps/VcrPlay.png")) {
			playMovie->setPixmap(icon);
		}
	}


	if(m_fileVA->endOfFile()) {
		fprintf(stderr, "%s %s:%d : EOF\n", __FILE__, __func__, __LINE__);
		if(playTimer) { playTimer->stop(); }
		playContinuous = false;

		QPixmap icon;
		if(icon.load(":/images/pixmaps/VcrPlay.png")) {
			playMovie->setPixmap(icon);
		}
	}

	gettimeofday(&tv2, &tz);
}
static int start_loop(void)
{
	struct v4l2_buffer buf;
	static int captFrmCnt = 0;
	char *ptrPlanar = NULL;
  	int dummy;

	ptrPlanar = (char *)calloc(1, nWidthFinal * nHeightFinal * 2);

	while (!quit) {
		fd_set fds;
		struct timeval tv;
		int r;

		if (stress_test) {
			start_loopCnt--;
			if (start_loopCnt == 0) {
				start_loopCnt = 50;
				break;
			}
		}

		if (!display_out) {
			FD_ZERO(&fds);
			FD_SET(fdCapture, &fds);

			/* Timeout */
			tv.tv_sec = 2;
			tv.tv_usec = 0;
			r = select(fdCapture + 1, &fds, NULL, NULL, &tv);
			if (-1 == r) {
				if (EINTR == errno)
					continue;
				printf("StartCameraCapture:select\n");
				return -1;
			}
		
			if (0 == r)
				continue;
		}

		if (display_out) {
			/* Wait for vertical sync */
			if (ioctl(fd_vid1, FBIO_WAITFORVSYNC, &dummy) < -1) {
				printf("Failed FBIO_WAITFORVSYNC\n");
				if (EAGAIN == errno) {
					printf("disp_again\n");
					continue;
				}
				return -1;
			}
		}

		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_USERPTR;

		/* determine ready buffer */
		if (-1 == ioctl(fdCapture, VIDIOC_DQBUF, &buf)) {
			if (EAGAIN == errno)
				printf("capt_again\n");
				continue;
			printf("StartCameraCaputre:ioctl:VIDIOC_DQBUF\n");
			return -1;
		}
		if (captFrmCnt == 100) {
			printf("save_frame = %d \n", save_frame);
			if (save_frame) {
				if (vpfe_input == 2) 
					fwrite(user_io_buffers[buf.index].user_addr, 1,
					      (nWidthFinal * nHeightFinal * 1),
					      file_fp);
				else
					fwrite(user_io_buffers[buf.index].user_addr, 1,
					      (nWidthFinal * nHeightFinal * 2),
					      file_fp);
				save_frame = 0;
				fclose(file_fp);
				printf("Saved file %s\n", CAPTURE_FILE);
				return -1;
			}
		}
		if (display_out) {
			display_frame(VID1, user_io_buffers[buf.index].user_addr);
			display_bitmap_osd0();
		}
		if (print_fn)
			printf("time:%lu    frame:%u\n", (unsigned long)time(NULL), captFrmCnt++);


		/* requeue the buffer */
		if (-1 == ioctl(fdCapture, VIDIOC_QBUF, &buf)) {
			printf("StartCameraCaputre:ioctl:VIDIOC_QBUF\n");
			return -1;
		}
	}
	return SUCCESS;
}
Exemple #15
0
int main(int argc, char** argv)
{
  fprintf(stdout, "Essai lib zigbee\n");
  uint8_t buffer[1024];
  uint32_t frameSize;
  
  UNUSED(argc);
  UNUSED(argv);

  //zigbee_panID panID = { 0, 0, 0, 0 , 0, 0 , 0 , 0};
  frameSize = zigbee_encode_SetPanID(buffer, 1024, 1, (zigbee_panID*) &zigbee_randomPanID);
  display_frame(buffer, frameSize);
  
  frameSize = zigbee_encode_setScanChannelBitmask(buffer, 1024, 1, ZIGGEE_DEFAULT_BITMASK);
  display_frame(buffer, frameSize);

  frameSize = zigbee_encode_setScanDurationExponent(buffer, 1024, 1, ZIGBEE_DEFAULT_SCAN_DURATION_EXPONENT);
  display_frame(buffer, frameSize);
  
  frameSize = zigbee_encode_setStackProfile(buffer, 1024, 1, ZIGBEE_DEFAULT_STACK_PROFILE);
  display_frame(buffer, frameSize);
  
  frameSize = zigbee_encode_setEncryptionEnabled(buffer, 1024, 1, false);
  display_frame(buffer, frameSize);

  frameSize = zigbee_encode_setNetworkEncryptionKey(buffer, 1024, 1, (zigbee_encryptionKey*) &zigbee_randomEncryptionKey);
  display_frame(buffer, frameSize);

  frameSize = zigbee_encode_setLinkKey(buffer, 1024, 1, (zigbee_linkKey*) &zigbee_noLinkKey);
  display_frame(buffer, frameSize);
  
  frameSize = zigbee_encode_setEncryptionOptions(buffer, 1024, 1, ZIGBEE_DEFAULT_ENCRYPTION_OPTION);
  display_frame(buffer, frameSize);

  frameSize = zigbee_encode_SetJoinTime(buffer, 1024, 1, ZIGBEE_JOINING_ALWAYS_ACTIVATED);
  display_frame(buffer, frameSize);

  frameSize = zigbee_encode_getAssociationIndication(buffer, 1024, 1);
  display_frame(buffer, frameSize);
  
  frameSize = zigbee_encode_getHardwareVersion(buffer, 1024, 1);
  display_frame(buffer, frameSize);
  
  frameSize = zigbee_encode_getFirmwareVersion(buffer, 1024, 1);
  display_frame(buffer, frameSize);
  
  
  uint8_t test_frame[9] = { 0x7E, 0x00, 0x05, 0x88, 0x01, 'B', 'D', 0x00, 0xF0 };
  zigbee_decodedFrame decodedData;
  bool bCorrectlyDecoded;
  uint16_t sizeOfExpectedData;
  bCorrectlyDecoded = zigbee_decodeHeader(test_frame, 3, &sizeOfExpectedData);
  fprintf(stdout, "bCorrectlyDecoded = %d\n", bCorrectlyDecoded);
  fprintf(stdout, "sizeOfExpectedData = %d\n", sizeOfExpectedData);
  
  bCorrectlyDecoded = zigbee_decodeFrame(&test_frame[3], sizeOfExpectedData + 1, &decodedData); // 1 for chechsum
  fprintf(stdout, "bCorrectlyDecoded = %d\n", bCorrectlyDecoded);
  fprintf(stdout, "decodedData.type = %.2x\n", decodedData.type);
  fprintf(stdout, "decodedData.frameID = %d\n", decodedData.atCmd.frameID);
  fprintf(stdout, "decodedData.atCmd.ATcmd = %c %c\n", decodedData.atCmd.ATcmd[0], decodedData.atCmd.ATcmd[1]);
  fprintf(stdout, "decodedData.atCmd.status = %d\n", decodedData.atCmd.status);  
  fprintf(stdout, "decodedData.atCmd.data = %p\n", decodedData.atCmd.data);
  fprintf(stdout, "decodedData.atCmd.size = %d\n", decodedData.atCmd.size);

  return EXIT_SUCCESS;
}
Exemple #16
0
int
main(int argc, char *argv[])
{
  XGCValues                 xgcv;
  long                      background = 0x010203;
  unsigned int              channel;
  unsigned int              speed;
  int                       i, p, cn;
  raw1394handle_t           raw_handle;
  struct raw1394_portinfo   ports[MAX_PORTS];

  get_options(argc, argv);
  /* process options */
  switch (fps) {
  case 1:
    fps = FRAMERATE_1_875;
    break;
  case 3:
    fps = FRAMERATE_3_75;
    break;
  case 15:
    fps = FRAMERATE_15;
    break;
  case 30:
    fps = FRAMERATE_30;
    break;
  case 60:
    fps = FRAMERATE_60;
    break;
  default:
    fps = FRAMERATE_7_5;
    break;
  }
  switch (res) {
  case 1:
    res = MODE_640x480_YUV411;
    device_width = 640;
    device_height = 480;
    format = XV_YUY2;
    break;
  case 2:
    res = MODE_640x480_RGB;
    device_width = 640;
    device_height = 480;
    format = XV_YUY2;
    break;
  default:
    res = MODE_320x240_YUV422;
    device_width = 320;
    device_height = 240;
    format = XV_UYVY;
    break;
  }

  /* get the number of ports (cards) */
  raw_handle = raw1394_new_handle();
  if (raw_handle == NULL) {
    perror("Unable to aquire a raw1394 handle\n");
    perror("did you load the drivers?\n");
    exit(-1);
  }

  numPorts = raw1394_get_port_info(raw_handle, ports, numPorts);
  raw1394_destroy_handle(raw_handle);
  if (verbose) printf("number of ports = %d\n", numPorts);

  /* get dc1394 handle to each port */
  for (p = 0; p < numPorts; p++) {
    int                       camCount;

    handles[p] = dc1394_create_handle(p);
    if (handles[p] == NULL) {
      perror("Unable to aquire a raw1394 handle\n");
      perror("did you load the drivers?\n");
      cleanup();
      exit(-1);
    }

    /* get the camera nodes and describe them as we find them */
    camera_nodes = dc1394_get_camera_nodes(handles[p], &camCount, verbose);

    /* setup cameras for capture */
    for (i = 0; i < camCount; i++) {
      cameras[numCameras].node = camera_nodes[i];

      if (dc1394_get_camera_feature_set
          (handles[p], cameras[numCameras].node,
           &features) != DC1394_SUCCESS) {
        printf("unable to get feature set\n");
      } else if (verbose) {
        dc1394_print_feature_set(&features);
      }

      if (dc1394_get_iso_channel_and_speed
          (handles[p], cameras[numCameras].node, &channel,
           &speed) != DC1394_SUCCESS) {
        printf("unable to get the iso channel number\n");
        cleanup();
        exit(-1);
      }

      if (dc1394_dma_setup_capture
          (handles[p], cameras[numCameras].node, i + 1 /*channel */ ,
           FORMAT_VGA_NONCOMPRESSED, res, SPEED_400, fps, NUM_BUFFERS,
           DROP_FRAMES, device_name,
           &cameras[numCameras]) != DC1394_SUCCESS) {
        fprintf(stderr,
                "unable to setup camera - check line %d of %s to make sure\n",
                __LINE__, __FILE__);
        perror("that the video mode, framerate and format are supported\n");
        printf("by your camera(s)\n");
        cleanup();
        exit(-1);
      }


      /*have the camera start sending us data */
      if (dc1394_start_iso_transmission
          (handles[p], cameras[numCameras].node) != DC1394_SUCCESS) {
        perror("unable to start camera iso transmission\n");
        cleanup();
        exit(-1);
      }
      numCameras++;
    }
  }

  fflush(stdout);
  if (numCameras < 1) {
    perror("no cameras found :(\n");
    cleanup();
    exit(-1);
  }


  //set_manual_exposure_gain(0, 440, 30);

  switch (format) {
  case XV_YV12:
    set_frame_length(device_width * device_height * 3 / 2, numCameras);
    break;
  case XV_YUY2:
  case XV_UYVY:
    set_frame_length(device_width * device_height * 2, numCameras);
    break;
  default:
    fprintf(stderr, "Unknown format set (internal error)\n");
    exit(255);
  }

  /* create OpenCV image wrappers */
  for (cn = 0; cn < MAX_CAMERAS; cn++) {
    if (cn < numCameras) {
      iplImages[cn] =
        cvCreateImage(cvSize(device_width, device_height),
		      IPL_DEPTH_8U, 3);
      readOnlyImg =
        cvCreateImageHeader(cvSize(device_width, device_height),
                            IPL_DEPTH_8U, 3);
    } else {
      iplImages[cn] = NULL;
    }
  }

  /* initialize handvu */
  hvInitialize(device_width, device_height);
  hvLoadConductor(string(conductor_fname));
  hvStartRecognition();
  hvSetOverlayLevel(3);
  if (async_processing) {
    hvAsyncSetup(num_async_bufs, displayCallback);
    hvAsyncGetImageBuffer(&m_async_image, &m_async_bufID);
    if (sync_display) async_display_image = cvCloneImage(iplImages[0]);
  }  

  /* make the window */
  display = XOpenDisplay(getenv("DISPLAY"));
  if (display == NULL) {
    fprintf(stderr, "Could not open display \"%s\"\n", getenv("DISPLAY"));
    cleanup();
    exit(-1);
  }

  QueryXv();

  if (adaptor < 0) {
    cleanup();
    exit(-1);
  }

  width = device_width;
  height = device_height * numCameras;

  window =
    XCreateSimpleWindow(display, DefaultRootWindow(display), 0, 0,
                        width, height, 0, WhitePixel(display,
                                                     DefaultScreen
                                                     (display)), background);

  XSelectInput(display, window, StructureNotifyMask | KeyPressMask);
  XMapWindow(display, window);
  connection = ConnectionNumber(display);

  gc = XCreateGC(display, window, 0, &xgcv);

  /* local main event loop */
  while (1) {

    if (async_processing) {
      // asynchronous processing in HandVu

      capture_frame();
      process_frame();
      if (sync_display) display_frame();
      handle_events();
      
    } else {
      // synchronous processing in HandVu
      capture_frame();
      process_frame();
      display_frame();
      handle_events();
    }

    /* XPending */

  }                             /* while not interrupted */

  exit(0);
}
Exemple #17
0
void MainWindow::process_frame(cv::Mat b4_tweak_input_image)
{
        //cv::Mat input_image = tweak_video_frame(b4_tweak_input_image);
        cv::Mat input_image;
        b4_tweak_input_image.copyTo(input_image);

        cv::Mat processed_image,Segmented_image,hole_detected_image;
        cv::Mat lab_image;
        //Converting to the given color space
        switch(cspace)
        {
        case COLOR_NONE:
            input_image.copyTo(processed_image);
           break;
        case LUMINANCE:
            processed_image = custom_Y(input_image);
            break;
        case X:
            processed_image = custom_x(input_image);
            break;
        case Y:
            processed_image = custom_y(input_image);
            break;
        case RED:
            processed_image = red_space(input_image);
            break;
        case GREEN:
            processed_image = green_space(input_image);
            break;
        case BLUE:
            processed_image = blue_space(input_image);
            break;
        case X2:
            processed_image = standard_x(input_image);
            break;
        case Y2:
            processed_image = standard_y(input_image);
            break;
        case LUMINANCE2:
            processed_image = standard_Y(input_image);
            break;
        case LAB:
            cv::cvtColor(input_image,lab_image,CV_RGB2Lab);
            break;
        case L:
            processed_image = L_space(input_image);
            break;
        case A:
            processed_image = a_space(input_image);
            break;
        case B:
            processed_image = b_space(input_image);
            break;
        }
        //Smoothing image before thresholding
        if(ui->Gaussian_checkBox->isChecked() && !processed_image.empty())
        {
            cv::GaussianBlur(processed_image,processed_image,cv::Size(Gaussian_kernel_size,Gaussian_kernel_size),0,0,cv::BORDER_DEFAULT);
        }
        //Light gradient equalization
        if((ui->Gradient_equalizer_checkBox->isChecked()) && (processed_image.channels()==1))
        {
            processed_image = Light_gradient_equalizer(processed_image);
        }

        if(ui->Morphological_gradient_equalizer_checkBox->isChecked() && processed_image.channels()==1)
        {
            processed_image = Morpological_light_gradient_equalizer(processed_image);

        }

        if(ui->Morphological_sharpen_checkBox->isChecked() && processed_image.channels()==1)
        {
            processed_image = Morpological_contrast_enhancement(processed_image);
        }
        //thresholding
        switch(thresh_met)
        {
        case NO_THRESH_MODE:
            break;
        case SOBEL:
            Segmented_image = Local_Sobel(processed_image,Local_Sobel_numberofSubImages, Local_Sobel_kernel_size,
                                          Local_Sobel_hist_percentile,Local_Sobel_dx,Local_Sobel_dy,ui->Otsu_in_edge_checkBox->isChecked(),
                                          (double)ui->Sobel_weight_dx_horizontalSlider->value()/100
                                          ,(double)ui->Sobel_weight_dy_horizontalSlider->value()/100);
            break;
        case SCHARR:
            Segmented_image = Local_Scharr(processed_image,Local_Scharr_numberofSubImages,ui->Scharr_histogram_percentile_checkBox->isChecked(),Local_Scharr_hist_percentile,
                                           ui->Scharr_threshold_checkBox->isChecked(),ui->Scharr_threshold_slider->value(),ui->Local_Scharr_dx_checkBox->isChecked(),ui->Local_Scharr_dy_checkBox->isChecked(),
                                           ui->Otsu_in_edge_checkBox->isChecked(),(double)ui->Scharr_weight_dx_horizontalSlider->value()/100
                                           ,(double)ui->Scharr_weight_dy_horizontalSlider->value()/100);
            break;
        case LAPLACIAN:
            Segmented_image = Local_Laplace(processed_image,Laplacian_numberofSubImages,Laplacian_kernel_size,Laplacian_hist_percentile);
            /*cv::Laplacian(processed_image,Segmented_image,CV_32F,3,1,0,cv::BORDER_REPLICATE);
            Segmented_image.convertTo(Segmented_image,CV_8U);*/
            break;
        case OTSU:
            Segmented_image = Local_Otsu(processed_image,Local_Otsu_numberofSubImages);
            break;
        case THRESHOLDING:
            Segmented_image = Naive_Thresholding(processed_image,Naive_threshold);
            break;
        case ADAPTIVE_THRESHOLDING:
            Segmented_image = Adaptive_Thresholding(processed_image,Adaptive_Thresholding_kernel_size,Adaptive_thresholding_C,
                                                    ui->Adaptive_Thresholding_gaussian_radioButton->isChecked());
            break;
        }
        //operations to improve the segmentation result
        if(!Segmented_image.empty())
        {
            if(ui->Inversion_checkbox->isChecked())
            {
                int num_pix = Segmented_image.rows*Segmented_image.cols;
                uchar* data = Segmented_image.ptr<uchar>(0);
                for(int i = 0; i<num_pix;i++)
                {
                    data[i] = 255-data[i];
                }
            }
            if(ui->Dilation_checkBox->isChecked())//maybe dilation before gaussian
            {
                cv::Mat Seg_copy;
                Segmented_image.copyTo(Seg_copy);
                cv::dilate(Seg_copy,Segmented_image,cv::Mat(),cv::Point(-1,-1),Dilation_iterations,cv::BORDER_CONSTANT,cv::morphologyDefaultBorderValue());
            }
        }
        switch(mode)
        {
        case NO_MODE:
            break;
        case HOLE_DETECTION:
            hole_detected_image = Hole_detection_algo(Segmented_image);
           // imwrite("Hole_detected_Image.png", hole_detected_image );
            break;
        case GROWTH_DETECTION:
            if((Percentage_foreground_clean_net > -1) && (!Segmented_image.empty()))
            {
                //gjør noe
             //   qDebug() << "percentage foreground pixels on clean net: " << Percentage_foreground_clean_net;

                int percentage_growth = Growth_Detection_algo(Percentage_foreground_clean_net,Segmented_image);
                qDebug() << "percentage growth: " << percentage_growth;
            }
            break;
        }

        display_frame(processed_image,Segmented_image,hole_detected_image);
}
Exemple #18
0
int main(int argc, char **argv)
{
    int ret;
    AVPacket packet;
    AVFrame *frame = av_frame_alloc();
    AVFrame *filt_frame = av_frame_alloc();
    int got_frame;

    if (!frame || !filt_frame) {
        perror("Could not allocate frame");
        exit(1);
    }
    if (argc != 2) {
        fprintf(stderr, "Usage: %s file\n", argv[0]);
        exit(1);
    }

    avcodec_register_all();
    av_register_all();
    avfilter_register_all();

    if ((ret = open_input_file(argv[1])) < 0)
        goto end;
    if ((ret = init_filters(filter_descr)) < 0)
        goto end;

    /* read all packets */
    while (1) {
        if ((ret = av_read_frame(fmt_ctx, &packet)) < 0)
            break;

        if (packet.stream_index == video_stream_index) {
            avcodec_get_frame_defaults(frame);
            got_frame = 0;
            ret = avcodec_decode_video2(dec_ctx, frame, &got_frame, &packet);
            if (ret < 0) {
                av_log(NULL, AV_LOG_ERROR, "Error decoding video\n");
                break;
            }

            if (got_frame) {
                frame->pts = av_frame_get_best_effort_timestamp(frame);

                /* push the decoded frame into the filtergraph */
                if (av_buffersrc_add_frame_flags(buffersrc_ctx, frame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
                    av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
                    break;
                }

                /* pull filtered frames from the filtergraph */
                while (1) {
                    ret = av_buffersink_get_frame(buffersink_ctx, filt_frame);
                    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
                        break;
                    if (ret < 0)
                        goto end;
                    display_frame(filt_frame, buffersink_ctx->inputs[0]->time_base);
                    av_frame_unref(filt_frame);
                }
                av_frame_unref(frame);
            }
        }
        av_free_packet(&packet);
    }
end:
    avfilter_graph_free(&filter_graph);
    if (dec_ctx)
        avcodec_close(dec_ctx);
    avformat_close_input(&fmt_ctx);
    av_frame_free(&frame);
    av_frame_free(&filt_frame);

    if (ret < 0 && ret != AVERROR_EOF) {
        char buf[1024];
        av_strerror(ret, buf, sizeof(buf));
        fprintf(stderr, "Error occurred: %s\n", buf);
        exit(1);
    }

    exit(0);
}
Exemple #19
0
void* decoding_thread(void* arg)
{
    ffdec_context *ffd_context = (ffdec_context*) arg;
    ffdec_reserved *ffd_reserved = (ffdec_reserved*) ffd_context->reserved;
    AVCodecContext *codec_context = ffd_context->codec_context;

    AVPacket packet;
    int got_frame;

    int decode_buffer_length = 4096;
    uint8_t decode_buffer[decode_buffer_length + FF_INPUT_BUFFER_PADDING_SIZE];
    memset(decode_buffer + decode_buffer_length, 0, FF_INPUT_BUFFER_PADDING_SIZE);

    AVFrame *frame = avcodec_alloc_frame();

    while (ffd_reserved->running)
    {
        if (ffd_reserved->read_callback) packet.size = ffd_reserved->read_callback(ffd_context,
                decode_buffer, decode_buffer_length, ffd_reserved->read_callback_arg);

        if (packet.size <= 0) break;

        packet.data = decode_buffer;

        while (ffd_reserved->running && packet.size > 0)
        {
            // reset the AVPacket
            av_init_packet(&packet);

            got_frame = 0;
            int decode_result = avcodec_decode_video2(codec_context, frame, &got_frame, &packet);

            if (decode_result < 0)
            {
                fprintf(stderr, "Error while decoding video\n");
                ffd_reserved->running = false;
                break;
            }

            if (got_frame)
            {
                if (ffd_reserved->frame_callback) ffd_reserved->frame_callback(
                        ffd_context, frame, ffd_reserved->frame_callback_arg);

                display_frame(ffd_context, frame);
            }

            packet.size -= decode_result;
            packet.data += decode_result;
        }
    }

    if (ffd_reserved->running)
    {
        // reset the AVPacket
        av_init_packet(&packet);
        packet.data = NULL;
        packet.size = 0;

        got_frame = 0;
        avcodec_decode_video2(codec_context, frame, &got_frame, &packet);

        if (got_frame)
        {
            if (ffd_reserved->frame_callback) ffd_reserved->frame_callback(
                    ffd_context, frame, ffd_reserved->frame_callback_arg);

            display_frame(ffd_context, frame);
        }
    }

    av_free(frame);
    frame = NULL;

    if (ffd_reserved->close_callback) ffd_reserved->close_callback(
            ffd_context, ffd_reserved->close_callback_arg);

    return 0;
}