示例#1
0
int main(int argc, const char * argv[])
{
    // Change this calibration to yours:
    CameraCalibration calibration(545.31565719766058f, 545.31565719766058f, 326.0f, 183.5f);
    

    std::cout << "Input image not specified" << std::endl;
    std::cout << "Usage: markerless_ar_demo  [filepath to recorded video or image]" << std::endl;



    if (argc == 1)
    {
        processVideo(calibration, cv::VideoCapture());
    }
    else if (argc == 2)
    {
        std::string input = argv[1];
        cv::Mat testImage = cv::imread(input);
        if (!testImage.empty())
        {
            processSingleImage( calibration, testImage);
        }
        else 
        {
            cv::VideoCapture cap;
            if (cap.open(input))
            {
                processVideo( calibration, cap);
            }
        }
    }
    else
    {
        std::cerr << "Invalid number of arguments passed" << std::endl;
        return 1;
    }

    return 0;
}
示例#2
0
void Request::executeRequest(const std::string& request) {
	utils::StringUtils util;
	if (!request.empty()) {
		std::vector<std::string> splitRequest = util.split(request,utils::separator);
		std::string type = splitRequest.front();
		if(type == utils::NEW_VISION_AREA) {
				createVisionArea(splitRequest);
		} else if(type ==  utils::MODIFY_VISION_AREA) {
				modifyVisionArea(splitRequest);
		} else if(type == utils::DELETE_VISION_AREA) {
				deleteVisionArea(splitRequest);
		} else if (type==utils::NEW_PRODUCT) {
				createProduct(splitRequest);
		} else if (type == utils::MODIFY_PRODUCT) {
				modifyProduct(splitRequest);
		} else if (type == utils::DELETE_PRODUCT) {
			deleteProduct(splitRequest);
		} else if (type == utils::LIST_AREA) {
			listAreas();
		} else if (type == utils::LIST_PRODUCT) {
			listProducts();
		} else if (type == utils::PRODUCT_IMAGE) {
			listProductImages(splitRequest);
		} else if (type == utils::STOCK_GENERAL) {
			listStockGeneral(splitRequest);
		} else if (type == utils::STOCK_AREA) {
			listStockArea(splitRequest);
		} else if (type == utils::STOCK_HISTORIC) {
			listStockHistoric(splitRequest);
		} else if (type == utils::PROCESS_FILE) {
			processFile(splitRequest);
		} else if (type == utils::PROCESS_VIDEO) {
			processVideo(splitRequest);
		} else if (type == utils::QUIT) {
			this->stop();
		} else if (type == utils::DETAILS_AREA){
			getAreas();
		}
		else if (type == utils::DETAILS_PRODUCT){
					detallesProductos();
				}
		else if (type == utils::IMAGES){
					imagenesDeProducto(splitRequest);
				}
		else {
			this->socket->send(utils::ERROR_MESSAGE);
		}
	}
}
示例#3
0
void MediaManager::scanDirectory(const QString &path)
{
    QDir root(path);
    for (QFileInfo& dir : root.entryInfoList(QDir::Dirs | QDir::NoDotAndDotDot))
    {
        if(!dir.isHidden() && dir.isReadable())
        {
            scanDirectory(dir.absoluteFilePath());
        }
    }

    for (QFileInfo& file : root.entryInfoList(supportedFormats, QDir::Files))
    {
        if(!file.isHidden() && file.isReadable())
        {
           processVideo(file.absoluteFilePath());
        }
    }
}
示例#4
0
文件: main.cpp 项目: TBFMX/opencvWork
int main(int argc, const char * argv[])
{
	// franquy parameters
	float fx = 695.4521167717107;
	float fy = 694.5519610122569;
	float cx = 337.2059936807979;
	float cy = 231.1645822893514;
	
	// tablet parameters
	fx=628.6341119951087;
	fy=628.7519411113429;
	cx=325.3443919995285;
	cy=236.0028199018263;
	
    // Change this calibration to yours:
    //~ CameraCalibration calibration(526.58037684199849f, 524.65577209994706f, 318.41744018680112f, 202.96659047014398f);
    CameraCalibration calibration(fx,fy,cx,cy);
    
    if (argc < 2)
    {
        std::cout << "Input image not specified" << std::endl;
        std::cout << "Usage: markerless_ar_demo <pattern image> [filepath to recorded video or image]" << std::endl;
        return 1;
    }

    // Try to read the pattern:
    cv::Mat patternImage = cv::imread(argv[1]);
    if (patternImage.empty())
    {
        std::cout << "Input image cannot be read" << std::endl;
        return 2;
    }

    if (argc == 2)
    {
		cv::VideoCapture cap(0);
        processVideo(patternImage, calibration, cap);
    }
    else if (argc == 3)
    {
        std::string input = argv[2];
        cv::Mat testImage = cv::imread(input);
        if (!testImage.empty())
        {
            processSingleImage(patternImage, calibration, testImage);
        }
        else 
        {
            cv::VideoCapture cap(0);
            if (cap.open(input))
            {
                processVideo(patternImage, calibration, cap);
            }
        }
    }
    else
    {
        std::cerr << "Invalid number of arguments passed" << std::endl;
        return 1;
    }

    return 0;
}
示例#5
0
// ##############################################################################################################
void jevois::Gadget::run()
{
  JEVOIS_TRACE(1);
  
  fd_set wfds; // For UVC video streaming
  fd_set efds; // For UVC events
  struct timeval tv;
  
  // Switch to running state:
  itsRunning.store(true);

  // We may have to wait until the device is opened:
  while (itsFd == -1) std::this_thread::sleep_for(std::chrono::milliseconds(1));

  // Wait for event from the gadget kernel driver and process them:
  while (itsRunning.load())
  {
    // Wait until we either receive an event or we are ready to send the next buffer over:
    FD_ZERO(&wfds); FD_ZERO(&efds); FD_SET(itsFd, &wfds); FD_SET(itsFd, &efds);
    tv.tv_sec = 0; tv.tv_usec = 10000;
    
    int ret = select(itsFd + 1, nullptr, &wfds, &efds, &tv);
    
    if (ret == -1) { PLERROR("Select error"); if (errno == EINTR) continue; else break; }
    else if (ret > 0) // We have some events, handle them right away:
    {
      // Note: we may have more than one event, so here we try processEvents() several times to be sure:
      if (FD_ISSET(itsFd, &efds))
      {
        // First event, we will report error if any:
        try { processEvents(); } catch (...) { jevois::warnAndIgnoreException(); }

        // Let's try to dequeue one more, in most cases it should throw:
        while (true) try { processEvents(); } catch (...) { break; }
      }
        
      if (FD_ISSET(itsFd, &wfds)) try { processVideo(); } catch (...) { jevois::warnAndIgnoreException(); }
    }

    // We timed out

    // Sometimes we miss events in the main loop, likely because more events come while we are unlocked in the USB UDC
    // driver and processing here. So let's try to dequeue one more, in most cases it should throw:
    while (true) try { processEvents(); } catch (...) { break; }

    // While the driver is not busy in select(), queue at most one buffer that is ready to send off:
    try
    {
      JEVOIS_TIMED_LOCK(itsMtx);
      if (itsDoneImgs.size())
      {
        LDEBUG("Queuing image " << itsDoneImgs.front() << " for sending over USB");
        
        // We need to prepare a legit v4l2_buffer, including bytesused:
        struct v4l2_buffer buf = { };
        
        buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = itsDoneImgs.front();
        buf.length = itsBuffers->get(buf.index)->length();

        if (itsFormat.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
          buf.bytesused = itsBuffers->get(buf.index)->bytesUsed();
        else
          buf.bytesused = buf.length;

        buf.field = V4L2_FIELD_NONE;
        buf.flags = 0;
        gettimeofday(&buf.timestamp, nullptr);
        
        // Queue it up so it can be sent to the host:
        itsBuffers->qbuf(buf);
        
        // This one is done:
        itsDoneImgs.pop_front();
      }
    } catch (...) { jevois::warnAndIgnoreException(); std::this_thread::sleep_for(std::chrono::milliseconds(10)); }
  }

  // Switch out of running state in case we did interrupt the loop here by a break statement:
  itsRunning.store(false);
}