Ejemplo n.º 1
0
Archivo: x11.c Proyecto: 7max/glc
void x11_event(Display *dpy, XEvent *event)
{
	if (!event)
		return;

	if (event->type == KeyPress) {
		if (event->xkey.time == x11.last_event_time)
			return; /* handle duplicates */
	
		if (x11_match_key(dpy, event, x11.capture_key, x11.capture_key_mask)) {
			if (lib.flags & LIB_CAPTURING) /* stop */
				stop_capture();
			else /* start */
				start_capture();
		} else if (x11_match_key(dpy, event, x11.reload_key, x11.reload_key_mask)) {
			if (lib.flags & LIB_CAPTURING) /* just stop */
				stop_capture();
			else { /* reload and start */
				increment_capture();
				reload_stream();
				start_capture();
			}
		}

		x11.last_event_time = event->xkey.time;
	}
}
Ejemplo n.º 2
0
void VideoMonitor::cleanup()
{
	///stop capture thread if it's still running
	if(capture_thread) stop_capture();

	///delete recorder
	if(recorder) delete recorder;
	recorder = NULL;

	///delete buffer
	if(buffer)   delete[] buffer;
	buffer   = NULL;

	///free input frame
	if(in_picture)
		av_free(in_picture);

	///clear SDL stuff
	SDL_Quit();
	screen  = NULL;
	overlay = NULL;

	///signals
	signal  = SIG_QUIT;
	stop_monitor = true;
}
Ejemplo n.º 3
0
void VideoMonitor::run()
{
	//if not inited or already running, return
	if(!recorder || stop_monitor || capture_thread) return;

	///start capture_thread
	capture_thread = Glib::Thread::create(sigc::mem_fun(*this, &VideoMonitor::capture), true);

	///monitor thread itself
	Glib::Dispatcher &emit_update_meters = *signal_update_meters;
	const unsigned char *tmp = NULL;
	while(true)
	{
		///check if we are to stop
		Glib::Mutex::Lock lock(mutex); ///< lock monitor
		if(stop_monitor) break;

		recorder->lock(); ///< lock recorder to get a picture and it's info
		///get info
		motion = recorder->getMotion();
		peak   = recorder->getPeak();
		lock.release(); ///< unlock monitor

		///get picture
		if(highlight_motion) tmp = recorder->getMBuffer();
		else tmp = recorder->getVBuffer();
		memcpy(buffer, tmp, recorder->getVBSize());
		recorder->unlock(); ///< unlock recorder

		///display grabbed image
		if(screen && overlay && sws)
		{
			SDL_LockYUVOverlay(overlay);

			//fill in the picture
			avpicture_fill((AVPicture*)in_picture, buffer, in_fmt,
							width, height);

			//Convert the image into YUV format that SDL uses
			sws_scale(sws, in_picture->data, in_picture->linesize, 0,
					  height, overlay_frame.data, overlay_frame.linesize);

			SDL_UnlockYUVOverlay(overlay);

			//display the overlay
			SDL_DisplayYUVOverlay(overlay, &screen_rect);

			//pool events (or they'll pile up and freez the app O_O)
			while(SDL_PollEvent(&event));
		}
		emit_update_meters();
		usleep(recorder->getFrameInterval()*1000000);
	}

	stop_capture();
	cleanup();
}
/*
 * ISR: service for P-path interrupt.
 */
static irqreturn_t on_camif_irq_p(int irq, void * dev)
{
	u32 ciprstatus;

	u32 frame;
	struct s3c2440camif_dev * pdev;

	ciprstatus = ioread32(S3C244X_CIPRSTATUS);
	if ((ciprstatus & (1<<21))== 0)
	{
		return IRQ_RETVAL(IRQ_NONE);
	}

	pdev = (struct s3c2440camif_dev *)dev;

	/* valid img_buff[x] just DMAed. */
	frame = (ciprstatus&(3<<26))>>26;
	frame = (frame+4-1)%4;

		img_buff[frame].state = CAMIF_BUFF_RGB565;

	if (pdev->cmdcode & CAMIF_CMD_STOP)
	{
		stop_capture(pdev);

		pdev->state = CAMIF_STATE_READY;
	}
	else
	{
		if (pdev->cmdcode & CAMIF_CMD_P2C)
		{
			camif_c2p(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_WND)
		{
			update_target_wnd_regs(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_TFMT)
		{
			update_target_fmt_regs(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_ZOOM)
		{
			update_target_zoom_regs(pdev);
		}

		invalid_image_buffer();
	}
	pdev->cmdcode = CAMIF_CMD_NONE;
	wake_up(&pdev->cmdqueue);

	return IRQ_RETVAL(IRQ_HANDLED);
}
Ejemplo n.º 5
0
  void	connectAll() {
    CaptureHandler::connect(filters_window, SIGNAL(refresh()),
			    this, SLOT(displayPacketList()));
    CaptureHandler::connect(filters, SIGNAL(clicked()), this, SLOT(exec()));
    CaptureHandler::connect(open, SIGNAL(clicked()), this, SLOT(display_file()));
    CaptureHandler::connect(start, SIGNAL(clicked()), this, SLOT(start_capture()));
    CaptureHandler::connect(stop, SIGNAL(clicked()), this, SLOT(stop_capture()));
    CaptureHandler::connect(modify, SIGNAL(clicked()),
			    forge, SLOT(setPacketToModify()));
  }
Ejemplo n.º 6
0
void BBEyesNode::toggle()
{
    if(!_active)
    {
        start_capture();
        _active = true;
        return;
    }
    stop_capture();
    _active = false;
}
Ejemplo n.º 7
0
/*
initcamdev();
initscrdev();

while(!stop){
	cnfig = getcnfg()
	data = getapic(cnfg);

	data1 = decode(data);

	show(data1,pos)
}

closedev();
*/ 
int main()
{
    int fd;
    fd = open_camera_device();
    init_camera_device(fd);
    start_capture(fd);
    mainloop(fd);
    stop_capture(fd);
    close_camera_device(fd);
    return 0;
}
Ejemplo n.º 8
0
void	sig_handler(int sig)
{
	switch (sig)
	{
		case SIGINT:
			fprintf(stderr, "interrupt\n");
			break ;
		case SIGPIPE:
			fprintf(stderr, "broken pipe\n");
			break ;
	}
	if (g_data.server > 2)
		close(g_data.server);
	stop_capture();
	fprintf(stderr, "successfully destroyed all components, leaving.\n");
	exit(0);
}
Ejemplo n.º 9
0
static void
stop_packet_capture( void ) {
  stop_capture();
}
Ejemplo n.º 10
0
void MainWindow::createactions()
{
    open_action = new QAction(tr("open"), this);
    open_action->setShortcut(QKeySequence::New);
    open_action->setStatusTip(tr("open new pcap file"));
    connect(open_action, SIGNAL(triggered()), this, SLOT(open()));

    save_action = new QAction(tr("save"), this);
    save_action->setShortcut(QKeySequence::Open);
    save_action->setStatusTip(tr("save data to disk"));
    connect(save_action, SIGNAL(triggered()), this, SLOT(save()));

    for(int i = 0; i < max_recentfiles; ++i)
    {
        recentfiles_actions[i] = new QAction(this);
        recentfiles_actions[i]->setVisible(false);
        connect(recentfiles_actions[i], SIGNAL(triggered()),
                this, SLOT(open_recent()));
    }

    close_action = new QAction(tr("close"), this);
    close_action->setStatusTip(tr("close current file"));
    connect(close_action, SIGNAL(triggered()), this, SLOT(close_file()));

    exit_action = new QAction(tr("quit"), this);
    exit_action->setShortcut(tr("Ctrl+Q"));
    exit_action->setStatusTip(tr("quit program"));
    connect(exit_action, SIGNAL(triggered()), this, SLOT(close()));

    start_action = new QAction(tr("start"), this);
    start_action->setStatusTip(tr("start to capture package"));
    connect(start_action, SIGNAL(triggered()),
            this, SLOT(start_capture()));

    restart_action = new QAction(tr("restart"), this);
    restart_action->setStatusTip(tr("restart to capture package"));
    connect(restart_action, SIGNAL(triggered()),
            this, SLOT(restart_capture()));

    stop_action = new QAction(tr("stop"), this);
    stop_action->setStatusTip(tr("stop"));
    connect(stop_action, SIGNAL(triggered()),
            this, SLOT(stop_capture()));

    avalible_action = new QAction(tr("available packages"), this);
    avalible_action->setStatusTip(tr("show packages that program cannot analysis"));
    connect(avalible_action, SIGNAL(triggered()),
            this, SLOT(show_avalible()));

    manual_action = new QAction(tr("help"), this);
    manual_action->setStatusTip(tr("help"));
    connect(manual_action, SIGNAL(triggered()),
            this, SLOT(show_manual()));

    about_action = new QAction(tr("about"), this);
    about_action->setStatusTip(tr("about"));
    connect(about_action, SIGNAL(triggered()),
            this, SLOT(about()));

    aboutqt_action = new QAction(tr("about Qt"), this);
    aboutqt_action->setStatusTip(tr("about Qt"));
    connect(aboutqt_action, SIGNAL(triggered()),
            qApp, SLOT(aboutQt()));
}
Ejemplo n.º 11
0
void MyMenu::createActions()
{
	mstart_start = new QAction(tr("&Start"), this);
//	start_start->setShortcuts(QKeySequence(tr("Ctrl+S")));
	mstart_start->setStatusTip(tr("Start a new capture"));
	connect(mstart_start, SIGNAL(triggered()), this, SLOT(start_capture()));
	
	mstart_stop = new QAction(tr("&Stop"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mstart_stop->setStatusTip(tr("Stop capturing"));
	connect(mstart_stop, SIGNAL(triggered()), this, SLOT(stop_capture()));


	mset_interface = new QAction(tr("&Interface"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mset_interface->setStatusTip(tr("Set interface to capture"));
	connect(mset_interface, SIGNAL(triggered()), this, SLOT(set_capture_interface()));	
	
	mset_channels = new QAction(tr("&Channels"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mset_channels->setStatusTip(tr("Set channels to capture"));
	connect(mset_channels, SIGNAL(triggered()), this, SLOT(set_capture_channels()));	

	mfile_new = new QAction(tr("&New"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mfile_new->setStatusTip(tr("Start a new capture"));
	connect(mfile_new, SIGNAL(triggered()), this, SLOT(new_capture()));	
	
	mfile_open = new QAction(tr("&Open"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mfile_open->setStatusTip(tr("Open a file"));
	connect(mfile_open, SIGNAL(triggered()), this, SLOT(open_capture_file()));	
	
	
	mfile_quit = new QAction(tr("&Quit"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mfile_quit->setStatusTip(tr("Exit program"));
	connect(mfile_quit, SIGNAL(triggered()), this, SLOT(quit()));	


	//crack
	mcrack_load_dict = new QAction(tr("&Load dictionary"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mcrack_load_dict->setStatusTip(tr("Loads a file that has a wordlist"));
	connect(mcrack_load_dict, SIGNAL(triggered()), this, SLOT(load_dictionary()));	


	mcrack_set_target = new QAction(tr("&Set target"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mcrack_set_target->setStatusTip(tr("You must capture at least first two messages form a handshake"));
	connect(mcrack_set_target, SIGNAL(triggered()), this, SLOT(set_target()));	
	
	//mcrack_start

	mcrack_start = new QAction(tr("&Start"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mcrack_start->setStatusTip(tr("Try find the password"));
	connect(mcrack_start, SIGNAL(triggered()), this, SLOT(start_crack()));	

/*	
	mcrack_stop = new QAction(tr("&Stop"), this);
//	start_stop->setShortcuts(tr("Ctrl+D"));
	mcrack_stop->setStatusTip(tr("Stop finding the password"));
	connect(mcrack_stop, SIGNAL(triggered()), this, SLOT(stop_crack()));	
*/			
				
	
}
Ejemplo n.º 12
0
void SRClassifier::run_capture(int32_t scale) {
	// уникальный номер нажатой клавиши
	char ch = 0;
	// порядковый номер скришота, который мы сохраняем
	int32_t index = 0;

	// создаём картинку нужного размера
	frame = cvCreateImage(cvSize(video_input.getWidth(device_id), video_input.getHeight(device_id)), IPL_DEPTH_8U, 3);


	/* Начало подготовки для захвата и супер-разрешения */
	uint32_t image_count = 5;

	std::vector<cv::Mat> degrade_images; degrade_images.resize(image_count);
	std::vector<cv::SparseMat> DHF; DHF.resize(image_count);

	// изображение после применения фильтра супер-разрешения
	cv::Mat dest = cv::Mat(cvSize(video_input.getWidth(device_id), video_input.getHeight(device_id)), CV_8UC3);
	// оригинальное изображение
	cv::Mat ideal = cv::Mat(cvSize(video_input.getWidth(device_id), video_input.getHeight(device_id)), CV_8UC3);
	// LR = HR / 2
	cv::Mat temp_1 = cv::Mat(cvSize(video_input.getWidth(device_id) / (scale / 2), video_input.getHeight(device_id) / (scale / 2)), CV_8UC3);
	// LR = HR / 4
	cv::Mat temp_2 = cv::Mat(cvSize(video_input.getWidth(device_id) / scale, video_input.getHeight(device_id) / scale), CV_8UC3);

	// количество итераций алгоритма SR над изображением
	uint32_t number_of_iteration = 5; // 5

	// асимптотическое значение метода наискорейшего спуска
	float beta = 1.3f; // 0.6 = 24.1dB default = 1.3f

	// весовой коэффициент баланса данных и сглаживания
	// коэффициент регуляризации, увеличение ведёт к сглаживанию оcтрых краёв
	float lambda = 0.03f; // default = 0.03f

	// параметр пространственного распределения в btv
	// скалярный вес, применяется для добавления пространственно затухающего эффекта суммирования слагаемых регуляризации
	float alpha = 0.7f; // default = 0.7f
	/* Конец подготовки для захвата и супер-разрешения */



	/* Начало подготовки для распознования и классификатора */
	try {
		read_csv();
	}
	catch (cv::Exception& e) {
		std::cerr << "Error opening file \"" << csv_path << "\". Reason: " << e.msg << std::endl;
		exit(1);
	}

	int im_width = images[0].cols;
	int im_height = images[0].rows;

	// Create a FaceRecognizer and train it on the given images:
	cv::Ptr<cv::face::FaceRecognizer> model = cv::face::createEigenFaceRecognizer();
	model->train(images, labels);
	// That's it for learning the Face Recognition model. You now
	// need to create the classifier for the task of Face Detection.
	// We are going to use the haar cascade you have specified in the
	// command line arguments:
	//
	cv::CascadeClassifier haar_cascade;
	haar_cascade.load(haar_path);
	/* Конец подготовки для распознования и классификатора */

	try {
		while (true) {
			if (video_input.isFrameNew(device_id)) {
				video_input.getPixels(device_id, (unsigned char *)frame->imageData, false, true); 

				if ((index > 0) && ((index % image_count) == 0))
				{
					index = 0;

					btv_sr->run_filter(degrade_images,
									   dest,
									   DHF,
									   image_count,
									   number_of_iteration,
									   beta,
									   lambda,
									   alpha,
									   cv::Size(7, 7),
									   NS_SuperResolution::SR_DATA_L1);

					// Clone the current frame:
					cv::Mat original = dest.clone();
					// Convert the current frame to grayscale:
					cv::Mat gray;
					cvtColor(original, gray, CV_BGR2GRAY);
					// Find the faces in the frame:
					std::vector< cv::Rect_<int32_t> > faces;
					haar_cascade.detectMultiScale(gray, faces);

					// At this point you have the position of the faces in
					// faces. Now we'll get the faces, make a prediction and
					// annotate it in the video. Cool or what?
					for (int32_t i = 0; i < faces.size(); i++) {
						// Process face by face:
						cv::Rect face_i = faces[i];
						// Crop the face from the image. So simple with OpenCV C++:
						cv::Mat face = gray(face_i);
						// Resizing the face is necessary for Eigenfaces and Fisherfaces. You can easily
						// verify this, by reading through the face recognition tutorial coming with OpenCV.
						// Resizing IS NOT NEEDED for Local Binary Patterns Histograms, so preparing the
						// input data really depends on the algorithm used.
						//
						// I strongly encourage you to play around with the algorithms. See which work best
						// in your scenario, LBPH should always be a contender for robust face recognition.
						//
						// Since I am showing the Fisherfaces algorithm here, I also show how to resize the
						// face you have just found:
						cv::Mat face_resized;
						cv::resize(face, face_resized, cv::Size(im_width, im_height), 1.0, 1.0, cv::INTER_CUBIC);
						// Now perform the prediction, see how easy that is:
						int prediction = model->predict(face_resized);
						// And finally write all we've found out to the original image!
						// First of all draw a green rectangle around the detected face:
						rectangle(original, face_i, CV_RGB(0, 255, 0), 1);
						// Create the text we will annotate the box with:
						std::string box_text = cv::format("Prediction = %d", prediction);
						// Calculate the position for annotated text (make sure we don't
						// put illegal values in there):
						int32_t pos_x = (std::max)(face_i.tl().x - 10, 0);
						int32_t pos_y = (std::max)(face_i.tl().y - 10, 0);
						// And now put it into the image:
						putText(original, box_text, cv::Point(pos_x, pos_y), cv::FONT_HERSHEY_PLAIN, 1.0, CV_RGB(0, 255, 0), 2.0);
					}

					// Show the result:
					sr_frame = cvCloneImage(&(IplImage)original);
					cvShowImage("SuperResolution", sr_frame);
				}
				else {
					if (index == 0)
						ideal = cv::cvarrToMat(frame);

					cv::pyrDown(cv::cvarrToMat(frame), temp_1, cv::Size(temp_1.cols, temp_1.rows));
					cv::pyrDown(temp_1, temp_2, cv::Size(temp_2.cols, temp_2.rows));

					// показываем картинку, над которой будет производится супер-разрешение
					cvShowImage(window_name.c_str(), cvCloneImage(&(IplImage)temp_2));

					degrade_images[index] = temp_2;
					DHF[index] = degrade_images[index];

					index++;
				}
			}

			ch = cvWaitKey(33);
			// Если была нажата клавиша ESC, то завершаем испонление
			if (ch == 27) {
				stop_capture();
				break;
			}
		}
	}
	catch (boost::thread_interrupted) {
		stop_capture();
		throw boost::thread_interrupted();
	}
}
Ejemplo n.º 13
0
void stopCapture(void) {
  stop_capture(cntx);
  uninit_device(cntx);
}
Ejemplo n.º 14
0
void DSLogic_Poll(void)
{
  if (cfg_init && EP0BCL == sizeof(struct cmd_cfg_count))
  {
    cfg_init = FALSE;
    init_config_intf();   
    FIFORESET = 0x80;  // set NAKALL bit to NAK all transfers from host
    SYNCDELAY;
    FIFORESET = 0x02;  // reset EP2 FIFO
    SYNCDELAY;
    FIFORESET = 0x06;  // reset EP6 FIFO
    SYNCDELAY;
    FIFORESET = 0x00;  // clear NAKALL bit to resume normal operation
    SYNCDELAY;	 

    EP2FIFOCFG = 0x00; // allow core to see zero to one transition of auto out bit
    SYNCDELAY;
    EP2FIFOCFG = 0x10; // auto out mode, disable PKTEND zero length send, word ops
    SYNCDELAY;
    EP6FIFOCFG = 0x08; // auto in mode, disable PKTEND zero length send, word ops
    SYNCDELAY;   	 

    GPIFIDLECTL &= 0xFB;	//PROG_B signal low
    EZUSB_Delay1ms();		//PROG_B signal kept asserted for 1 ms
    GPIFIDLECTL |= 0x04;	//PROG_B signal high
    SYNCDELAY;

    // setup transaction count
    GPIFTCB0 = ((const struct cmd_cfg_count *)EP0BUF)->byte0;   
    SYNCDELAY;
    GPIFTCB1 = ((const struct cmd_cfg_count *)EP0BUF)->byte1;            		
    SYNCDELAY;
    GPIFTCB2 = ((const struct cmd_cfg_count *)EP0BUF)->byte2;
    SYNCDELAY;

    cfg_enable = TRUE;			
  }  
  
  if (cfg_enable && (GPIFTRIG & 0x80)) 		// if GPIF interface IDLE
  {        		
    if ( (EP24FIFOFLGS & 0x01) && (GPIFREADYSTAT & 0x01)) {
      // if there's a packet in the peripheral domain for EP2
      // and FPGA is ready to receive the configuration bitstream
      IFCONFIG = 0xA6;
      // 7	IFCLKSRC=1   , FIFOs executes on internal clk source
      // 6	xMHz=0       , 30MHz internal clk rate
      // 5	IFCLKOE=1    , Drive IFCLK pin signal at 30MHz
      // 4	IFCLKPOL=0   , Don't invert IFCLK pin signal from internal clk
      // 3	ASYNC=0      , master samples asynchronous
      // 2	GSTATE=1     , Drive GPIF states out on PORTE[2:0], debug WF
      // 1:0	IFCFG=10, FX2 in GPIF master mode
      SYNCDELAY;

      //delay(1);				//avoid CSI_B deasserted during sync words
      GPIFTRIG = GPIFTRIGWR | GPIF_EP2;  	// launch GPIF FIFO WRITE Transaction from EP2 FIFO
      SYNCDELAY;

      while( !( GPIFTRIG & 0x80 ) );      	// poll GPIFTRIG.7 GPIF Done bit
      SYNCDELAY;
      cfg_enable= FALSE;                 	//end of configuration

      /* Put the FX2 into GPIF master mode and setup the GPIF. */
      //init_capture_intf();

      if (GPIFREADYSTAT & 0x02) {	// FPGA Configure Done
        IOA |= 0x01;
        IOA &= 0xf5;
        EZUSB_Delay1ms();
        IOA |= 0x08;
      } else {
        IOA &= 0xfc;
      }
    }	
  }

  switch(command)
  {
    case CMD_START:
	{
      if ((EP0CS & bmEPBUSY) != 0)
        break;
      if (EP0BCL == sizeof(struct cmd_start))
	  {
        if ((*(BYTE *)EP0BUF) & CMD_START_FLAGS_STOP)
          stop_capture();
        else
          start_capture();
      }
      command = 0;
      break;	
	}

	case CMD_SETTING:
    {
	  if ((EP0CS & bmEPBUSY) != 0)
        break;
 	  if (EP0BCL == sizeof(struct cmd_setting_count))
	  {
	    GPIFABORT = 0xff;
		SYNCDELAY;
		EP2FIFOCFG = 0x11; // auto out mode, disable PKTEND zero length send, word operation
		SYNCDELAY;
        setting_count_b0 = ((const struct cmd_setting_count *)EP0BUF)->byte0;
        setting_count_b1 = ((const struct cmd_setting_count *)EP0BUF)->byte1;
        setting_count_b2 = ((const struct cmd_setting_count *)EP0BUF)->byte2;
        set_enable = TRUE;
	  }
	  command = 0;
	  break;
    }

    case CMD_CONTROL:
    {
      if ((EP0CS & bmEPBUSY) != 0)
        break;
      if (EP0BCL == sizeof(struct cmd_control))
	  {
        dsoConfig[0] = ((const struct cmd_control *)EP0BUF)->byte0;
        dsoConfig[1] = ((const struct cmd_control *)EP0BUF)->byte1;
        dsoConfig[2] = ((const struct cmd_control *)EP0BUF)->byte2;
        dsoConfig[3] = ((const struct cmd_control *)EP0BUF)->byte3;
        set_dso_ctrl = TRUE;
      }
	  command = 0;
	  break;
    }

	default:
	  command = 0;
	  break;
  }

  if (set_enable && (GPIFTRIG & 0x80)) {	// if GPIF interface IDLE
    if (!(EP24FIFOFLGS & 0x02)) {
      SYNCDELAY;
      GPIFTCB2 = setting_count_b2;   
      SYNCDELAY;
      GPIFTCB1 = setting_count_b1;			// fpga setting count
      SYNCDELAY;
      GPIFTCB0 = setting_count_b0;
      SYNCDELAY;

      GPIFTRIG = GPIFTRIGWR | GPIF_EP2;  	// launch GPIF FIFO WRITE Transaction from EP2 FIFO
      SYNCDELAY;

      while( !( GPIFTRIG & 0x80 ) );      	// poll GPIFTRIG.7 GPIF Done bit
      SYNCDELAY;
      set_enable= FALSE;                 	//end of configuration

      /* Put the FX2 into GPIF master mode and setup the GPIF. */
      init_capture_intf();
    }	
  }

  if (set_dso_ctrl) {
    EZUSB_WriteI2C(0x51, 4, dsoConfig);
    set_dso_ctrl = FALSE;
  }

  poll_intf();  
}
Ejemplo n.º 15
0
int main(void) {
  open_log(NULL);
  
  ctx *context = init_ctx();
  
  strcpy(context->dev_name, "/dev/video1");
  
  open_device(context);
  
  //printf("%d %d\n", context->d_framerate.num, context->d_framerate.den);
  
  //context->c_top = 0;
  //context->c_left = 0;
  //context->c_width = 960;
  //context->c_height = 540;
  //
  //context->f_width = 960;
  //context->f_height = 540;
  
  //context->framerate.num = 1;
  //context->framerate.den = 7;
  
  init_device(context);
  
  //printf("%d %d\n", context->framerate.num, context->framerate.den);
  
  context->stop = 60;
  bool infinite;
  if (context->stop == 0) {
    infinite = true;
  }
  
  conv_ctx *conv_context = conv_ctx_init(context);
  
  //strcpy(conv_context->filename, "test.ts");
  strcpy(conv_context->format, "avi");
  
  start_capture(context);

  //rgb24_init(context, conv_context);
  
  scaler_init(context, conv_context);
  mux_encoder_init(context, conv_context);

  while (context->stop-- | infinite) {
    for (;;) {
      capture_timeout(context);
      
      /* read frame and go on */
      if (read_frame(context)) {
        //rgb24(context, conv_context);
        scale(context, conv_context);
        mux_encode(context, conv_context);
        break;
      }
    }
  }

  write_cached(context, conv_context);

  //rgb24_uninit(context, conv_context);

  scaler_uninit(context, conv_context);
  mux_encoder_uninit(context, conv_context);

  conv_ctx_uninit(conv_context);

  stop_capture(context);
  uninit_device(context);
  
  close_device(context);
  
  close_log();

  return EXIT_SUCCESS;
}
Ejemplo n.º 16
0
int32_t VideoCapture::run_capture() {
	// уникальный номер нажатой клавиши
	char ch = 0;
	// таймер сн¤ти¤ скриншота
	boost::timer snapshot_timer;
	snapshot_timer.restart();

	char snapshot_name[80];

	// cvNamedWindow(window_name.c_str(), CV_WINDOW_AUTOSIZE);

	try {

		// пор¤дковый номер скришота, который мы сохран¤ем
		uint32_t index = 0;
		// требующеес¤ нам количество кадров дл¤ работы алгоритма
		uint32_t image_count = 10;
		// количество итераций алгоритма sr_btv
		uint32_t number_of_iteration = 60; // 180
		// величина шага в методе наискорейшего спуска
		float beta = 1.3f; // 1.3f
		// коэффициент регул¤ризации, увеличение ведЄт к сглаживанию сотрых краЄв (прежде чем удал¤етс¤ шум)
		float lambda = 0.03f; // 0.03f
		// скал¤рный вес, примен¤етс¤ дл¤ добавлени¤ пространственно затухающего эффекта суммировани¤ слагаемых регул¤ризации
		float alpha = 0.7f; // 0.7f

		uint32_t test_step = 0;

		std::vector<cv::Mat> degrade_images; degrade_images.resize(image_count);
		std::vector<cv::SparseMat> DHF; DHF.resize(image_count);
		cv::Mat dest = cv::Mat(cvSize(640, 480), CV_8UC3);
		cv::Mat ideal = cv::Mat(cvSize(640, 480), CV_8UC3);

		while (true) {
			frame = cvQueryFrame(capture_frame);
			if (!frame) {
				stop_capture();
				break;
			}

			if (test_step > 5) {
				stop_capture();
				break;
			}

			if ((index > 0) && ((index % image_count) == 0))
			{
				index = 0;
				btv_sr->bilateral_total_variation_sr(degrade_images,
													 dest,
													 DHF,
													 image_count,
													 number_of_iteration,
													 beta,
													 lambda,
													 alpha,
													 cv::Size(7, 7),
													 NS_SuperResolution::SR_DATA_L1,
													 ideal,
													 test_step);
				test_step++;
				cvSetCaptureProperty(capture_frame, CV_CAP_PROP_POS_AVI_RATIO, 0);
				snapshot_timer.restart();
			}
			else {
				if (snapshot_timer.elapsed() > (snapshot_delay / 1000.0)) {
					snapshot_timer.restart();

					if (index == 0)
						ideal = cv::cvarrToMat(frame);

					degrade_images[index] = cv::cvarrToMat(frame);
					DHF[index] = cv::cvarrToMat(frame);

					sprintf(snapshot_name, ".//snapshots//Image%d.jpg", index);
					cvSaveImage(snapshot_name, frame);
					index++;
				}
			}

			// cvShowImage(window_name.c_str(), frame);
		    ch = cvWaitKey(33);
			// ≈сли была нажата клавиша ESC, то завершаем испонление
			if (ch == 27) {
			 	stop_capture();
				break;
			 }
		}
	}
	catch (boost::thread_interrupted) {
		stop_capture();
		throw boost::thread_interrupted();
	}

	return 0;
}
/******************************************************************************
 * Example to show vid1 in YUV format,OSD0 in RGB565 format
 *  and OSD1 is attribute format.
 ******************************************************************************/
static int vpbe_UE_1(void)
{
	int ret = 0;
	v4l2_std_id cur_std;

	DBGENTER;

	/* Setup Capture */
	if (initialize_capture(&cur_std) < 0) {
		printf("Failed to intialize capture\n");
		return ret;
	}

	/* Setup Display */
	if (cur_std & V4L2_STD_NTSC) {
		if (change_sysfs_attrib(ATTRIB_OUTPUT, DISPLAY_INTERFACE_COMPOSITE))
			return FAILURE;
		if (change_sysfs_attrib(ATTRIB_MODE, DISPLAY_MODE_NTSC))
			return FAILURE;
		file_size = WIDTH_NTSC * HEIGHT_NTSC * 2;
	}
	else if (cur_std & V4L2_STD_PAL) {
		if (change_sysfs_attrib(ATTRIB_OUTPUT, DISPLAY_INTERFACE_COMPOSITE))
			return FAILURE;
		if (change_sysfs_attrib(ATTRIB_MODE, DISPLAY_MODE_PAL))
			return FAILURE;
		file_size = WIDTH_PAL * HEIGHT_PAL * 2;
	}
	else if (cur_std & V4L2_STD_720P_60) {
		if (change_sysfs_attrib(ATTRIB_OUTPUT, DISPLAY_INTERFACE_COMPONENT))
			return FAILURE;
		if (change_sysfs_attrib(ATTRIB_MODE, DISPLAY_MODE_720P))
			return FAILURE;
		file_size = WIDTH_720P * HEIGHT_720P * 2;
	}
	else if (cur_std & V4L2_STD_1080I_60) {
		if (change_sysfs_attrib(ATTRIB_OUTPUT, DISPLAY_INTERFACE_COMPONENT))
			return FAILURE;
		if (change_sysfs_attrib(ATTRIB_MODE, DISPLAY_MODE_1080I))
			return FAILURE;
		file_size = WIDTH_1080I * HEIGHT_1080I * 2;
	} else {
		printf("Cannot display this standard\n");
		return FAILURE;
	}

	/* Setup VID1 output */
	if ((init_vid1_device(cur_std)) < 0) {
		printf("\nFailed to init vid1 window ");
		return FAILURE;
	}
	
	ret = start_loop();
	if (ret)
		printf("\tError: Video loopback had some errors\n");
	printf("Video loopback completed successfully\n");

	/*
	 * Once the streaming is done  stop the display
	 * hardware
	 */
	printf(" Test STREAM_OFF - \n");
	ret = stop_display(fd_vid1);
	if (ret < 0) {
		printf("\tError: Could not stop display\n");
		return ret;
	}

	/* Release display channel */
	printf(" Test buffer unmapping & closing of device - \n");
	release_display(&fd_vid1);

	ret = stop_capture(fdCapture);
	if (ret < 0)
		printf("Error in VIDIOC_STREAMOFF:capture\n");

	release_capture(&fdCapture);
	close(fd_osd0);
	printf("DONE ALL\n\n\n");

	DBGEXIT;
	return ret;
}
Ejemplo n.º 18
0
VideoMonitor::~VideoMonitor()
{
	stop();
	stop_capture();
	cleanup();
}
Ejemplo n.º 19
0
CameraController::~CameraController()
{
    stop_capture();
}
Ejemplo n.º 20
0
/*
 * ISR: service for C-path interrupt.
 */
static irqreturn_t on_camif_irq_c(int irq, void * dev)
{
	u32 cicostatus;
	u32 frame;
  int oflow_y,oflow_cb,oflow_cr;
	s3c2440camif_dev * pdev;
  

	cicostatus = ioread32(S3C244X_CICOSTATUS);
	if ((cicostatus & (1<<21))== 0)
	{
		return IRQ_RETVAL(IRQ_NONE);
	}
  

	pdev = (s3c2440camif_dev *)dev;

	/* valid img_buff[x] just DMAed. */
	frame = (cicostatus&(3<<26))>>26;
	//frame = (frame+4-1)%4;
  
  oflow_y=(cicostatus&(1<<31))>>31;
  oflow_cb=(cicostatus&(1<<30))>>30; 
  oflow_cr=(cicostatus&(1<<29))>>29; 
  
  //img_buff[frame].state = CAMIF_BUFF_YCbCr420;
  pdev->frame[frame].state = CAMIF_BUFF_YCbCr422;
	
  //printk(KERN_ALERT"%d %d %d %d\n",img_buff[0].state,img_buff[1].state,img_buff[2].state,img_buff[3].state);

  pdev->last_frame=frame;
  //printk(KERN_ALERT"on_camif_irq_c %d %d %d %d\n",frame,oflow_y,oflow_cb,oflow_cr);

	if (pdev->cmdcode & CAMIF_CMD_STOP)
	{
		stop_capture(pdev);

		pdev->state = CAMIF_STATE_READY;
	}
	else
	{
		if (pdev->cmdcode & CAMIF_CMD_C2P)
		{
			camif_c2p(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_WND)
		{
			update_target_wnd_regs(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_TFMT)
		{
			update_target_fmt_regs(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_ZOOM)
		{
			update_target_zoom_regs(pdev);
		}

		//invalid_image_buffer();
	}
	pdev->cmdcode = CAMIF_CMD_NONE;
	wake_up(&pdev->cmdqueue);
	
  //wake_up(&pdev->cap_queue);


	return IRQ_RETVAL(IRQ_HANDLED);
}
Ejemplo n.º 21
0
EXPORT void *init_capture(const char *in, const char *out, struct inputCfg *cfg)
{

	int ret = 0;
	// structure with ffmpeg variables
	struct liveStream *ctx = NULL;
	AVStream *stream = NULL;

	// allocation of Live Stream structure
	ctx = malloc(sizeof(struct liveStream));
	if(ctx == NULL)
	{
		fprintf(stderr,"Error in liveStream struct alloc\n");
		return NULL;
	}
	memset(ctx, 0, sizeof(*ctx));

	init_ffmpeg();

	ret = configure_input(ctx, in, cfg);
	if(ret < 0)
	{
		av_log(NULL,AV_LOG_ERROR,"unable to configure input\n");
		free(ctx);
		return  NULL;
	}

	stream = ctx->inputs[0].st;
	/** Initalize framerate coming from webcam */
	if(stream->avg_frame_rate.num && stream->avg_frame_rate.den)
	{
		ctx->video_avg_frame_rate.num = stream->avg_frame_rate.num;
		ctx->video_avg_frame_rate.den = stream->avg_frame_rate.den;
	}
	else if(stream->r_frame_rate.num && stream->r_frame_rate.den )
	{
		ctx->video_avg_frame_rate.num = stream->r_frame_rate.num;
		ctx->video_avg_frame_rate.den = stream->r_frame_rate.den;
	}
	else
	{
		fprintf(stderr, "Unable to take out fps from webcam assuming 30fps\n");
		ctx->video_avg_frame_rate.num = 30;
		ctx->video_avg_frame_rate.den = 1;

	}
	ctx->have_filter = 1;

	ret = init_filters(ctx);
	if(ret < 0)
	{
		fprintf(stderr,"unable to initialize filter\n");
		goto end;
	}

	ret = init_encoder(ctx, out);
	if(ret < 0)
	{
		printf("Error in encoder init for %s\n", out);
		ret =-1;
		goto end;
	}

	ctx->OutFrame = av_frame_alloc();
end:
	if(ret < 0)
	{
		stop_capture((void*)ctx);
		return NULL;
	}
	return ctx;
}
Ejemplo n.º 22
0
/*
 * ISR: service for P-path interrupt.
 */
static irqreturn_t on_camif_irq_p(int irq, void * dev)
{
	u32 ciprstatus;

	u32 frame;
	s3c2440camif_dev * pdev;

  //printk(KERN_ALERT"on_camif_irq_p\n");
	ciprstatus = ioread32(S3C244X_CIPRSTATUS);
  
	if ((ciprstatus & (1<<21))== 0)
	{
		return IRQ_RETVAL(IRQ_NONE);
	}

	pdev = (s3c2440camif_dev *)dev;

	/* valid img_buff[x] just DMAed. */
	frame = (ciprstatus&(3<<26))>>26;
	frame = (frame+4-1)%4;
  
  //printk(KERN_ALERT"on_camif_irq_p %d\n",frame);
  
  pdev->last_frame=frame;

  pdev->frame[frame].state = CAMIF_BUFF_RGB565;

	if (pdev->cmdcode & CAMIF_CMD_STOP)
	{
		stop_capture(pdev);

		pdev->state = CAMIF_STATE_READY;
	}
	else
	{
		if (pdev->cmdcode & CAMIF_CMD_P2C)
		{
			camif_c2p(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_WND)
		{
			update_target_wnd_regs(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_TFMT)
		{
			update_target_fmt_regs(pdev);
		}

		if (pdev->cmdcode & CAMIF_CMD_ZOOM)
		{
			update_target_zoom_regs(pdev);
		}
		//TODO signal that all frames are invalid
	}
	pdev->cmdcode = CAMIF_CMD_NONE;
	wake_up(&pdev->cmdqueue);
  //wake_up(&pdev->cap_queue);

	return IRQ_RETVAL(IRQ_HANDLED);
}