void PointSolver2::solve (cv::Mat &inputImage, Point3 &startPos, Quaternion &startOrientation)
{
	assert (inputImage.type() == CV_8UC1);

	position0 = startPos;
	orientation0 = startOrientation;
	image = inputImage;

	prepareImage ();

#define ITERATION 5

	for (int np=0; np<ITERATION; np++) {
		currentViewMatrix = createViewMatrix (position0, orientation0);

		projectLines ();
		findNearestLinePerPoint ();
		prepareMatrices ();
		solveForCorrection ();
	}
//	currentViewMatrix = createViewMatrix (position0, orientation0);
	debugDraw ("/tmp/draw.png", &position0, &orientation0);

	return;
}
bool MarkerDetector::findMarkers(const BGRAVideoFrame& frame, std::vector<Marker>& detectedMarkers)
{
  cv::Mat bgraMat(frame.height, frame.width, CV_8UC3, frame.data, frame.stride);
  
  // Convert the image to grayscale
  prepareImage(bgraMat, m_grayscaleImage);
  
  // Make it binary
  performThreshold(m_grayscaleImage, m_thresholdImg);
  
  // Detect contours
  findContours(m_thresholdImg, m_contours, m_grayscaleImage.cols / 20);
  
  // Find closed contours that can be approximated with 4 points
  findMarkerCandidates(m_contours, detectedMarkers);
  
  // Find is them are markers
  detectMarkers(m_grayscaleImage, detectedMarkers);

  // Calcualte their poses
  estimatePosition(detectedMarkers);

  //sort by id
  std::sort(detectedMarkers.begin(), detectedMarkers.end());
  return false;
}
示例#3
0
void ImageScanner::updateImage()
{
    prepareImage();

    DatabaseAccess().db()->updateItem(m_scanInfo.id, m_scanInfo.category,
                                      m_scanInfo.modificationDate, m_scanInfo.fileSize, m_scanInfo.uniqueHash);
}
示例#4
0
int initProgram(int argc,char *argv[], RAW_CSV &raw_data, STS &sts, char *&inpath, char *&outpath)
{
	if (argc == 1) {
		// Show help
		showHelp();
		return 0;
	} else if (argc == 2) {
		if (strcmp(argv[1], "-h") == 0) {
			showHelp();
			return 0;
		}
	}
	if (argc < 3 || argc > MAX_ARG) return 1;
	// Lay tham so nhap vao
	inpath = argv[1];
	outpath = argv[argc-1];
	
	// Lay du lieu tu tham so nhap vao
	// Load du lieu tho
	char *filedata = new char[strlen(inpath)+strlen(FILE_DATA)+2];
	sprintf(filedata, "%s/%s", inpath, FILE_DATA);

	if (initCSV(raw_data, filedata) == -1) {
		printf("Khong the lay du lieu duoc\n");
		return 2;
	}
	// Xy ly du lieu tho
	convertRAWtoST(raw_data, sts);

	// Chuan bi file anh
	prepareImage(sts, inpath, outpath);
	delete filedata;
}
SDL_Surface * TextureDefinition::getDefaultTextureImage() {
	SDL_Surface* loadedImage = IMG_Load(this->defaultImgSrc.c_str());
	if (loadedImage == NULL) {
		Logs::logErrorMessage(
				"Unable to load default tile texture: "
						+ string(SDL_GetError()));
		return NULL;
	}
	loadedImage = prepareImage(loadedImage);
	return loadedImage;
}
示例#6
0
void ImageScanner::addImage(int albumId)
{
    prepareImage();

    m_scanInfo.albumID          = albumId;
    m_scanInfo.status           = DatabaseItem::Visible;

    kDebug() << "Adding new item" << m_fileInfo.filePath();
    m_scanInfo.id               = DatabaseAccess().db()->addItem(m_scanInfo.albumID, m_scanInfo.itemName,
                                  m_scanInfo.status, m_scanInfo.category,
                                  m_scanInfo.modificationDate, m_scanInfo.fileSize,
                                  m_scanInfo.uniqueHash);
}
示例#7
0
int main( int argc, char ** argv) {
    Magick::InitializeMagick(*argv);
    readNetwork("base");
    int failed = 0, all = 0;
    for (int i = 1; i < argc; i++) {
        Magick::Image img(argv[i]);
        prepareImage(img);
        char res = runNetwork(img);
        char et = tolower(findChar(argv[i]));
        //printf("%c-%c\n", res, et);
        if (res != et) failed++;
        all++;
    }
    printf("%d/%d=%.2lf", failed, all, ((double)failed)/all);
}
示例#8
0
KisSprayShapeOption::KisSprayShapeOption()
    : KisPaintOpOption(i18n("Spray shape"), KisPaintOpOption::generalCategory(), true)
{
    m_checkable = true;
    // save this to be able to restore it back
    m_maxSize = 1000;

    m_options = new KisShapeOptionsWidget();
    m_useAspect = m_options->aspectButton->keepAspectRatio();
    computeAspect();

    // UI signals
    connect(m_options->proportionalBox, SIGNAL(clicked(bool)), SLOT(changeSizeUI(bool)));
    connect(m_options->aspectButton, SIGNAL(keepAspectRatioChanged(bool)), this, SLOT(aspectToggled(bool)));
    connect(m_options->imageUrl, SIGNAL(textChanged(QString)), this, SLOT(prepareImage()));

    connect(m_options->widthSpin, SIGNAL(valueChanged(int)), SLOT(updateHeight(int)));
    connect(m_options->heightSpin, SIGNAL(valueChanged(int)), SLOT(updateWidth(int)));

    setupBrushPreviewSignals();
    setConfigurationPage(m_options);
}
tResult CurveDetector::processImage(IMediaSample *mediaSample)
{
    const tVoid *buffer;

    if (IS_OK(mediaSample->Lock(&buffer)))
    {
        Mat image(Size(this->videoInputInfo.nWidth, this->videoInputInfo.nHeight), CV_8UC3, (char*)buffer);
        mediaSample->Unlock(buffer);

        Mat result = image.clone();
        prepareImage(result);

        list<SD_Point> left, right;
        searchLines(result, left, right);

        drawPointsInImage(result, left, Scalar(0, 0, 255));
        drawPointsInImage(result, right, Scalar(0, 0, 255));

        transmitVideoOutput(result, this->rgbOutput);
    }

    RETURN_NOERROR;
}
示例#10
0
bool MarkerDetector::findMarkers(const cv::Mat& frame, std::vector<Marker>& detectedMarkers)
{
    // Convert the image to grayscale
    prepareImage(frame, m_grayscaleImage);

    // Make it binary
    performThreshold(m_grayscaleImage, m_thresholdImg);

    // Detect contours
    findContours(m_thresholdImg, m_contours, m_grayscaleImage.cols / 5);

    // Find closed contours that can be approximated with 4 points
    findCandidates(m_contours, detectedMarkers);

    // Find is them are markers
    recognizeMarkers(m_grayscaleImage, detectedMarkers);

    // Calculate their poses
    estimatePosition(detectedMarkers);

    //sort by id
    std::sort(detectedMarkers.begin(), detectedMarkers.end());
    return false;
}
示例#11
0
CustomGraphicsView::CustomGraphicsView(QWidget *parent)
	: QGraphicsView(parent)
{
	//Set scene.
	this->setScene(&m_graphicsScene);

	//Set width to default View width
	m_nWidth = this->geometry().width();

	//Set width to default View height
	m_nHeight = this->geometry().height();

	//View X-offset
	m_nOffsetX = 20;

	//View Y-offset
	m_nOffsetY = 20;

	//Gap between Items.
	m_nItemSpace = 10;

	m_nRow = 0;

	m_nColoumn = 0;

	m_nMaxColumn = 0;

	m_sceneRect.setWidth(m_nWidth);

	m_graphicsScene.setSceneRect(m_sceneRect);

	m_itemTransformation = Qt::SmoothTransformation;

	hXPSConvert = LoadLibrary(L"XpsConvertQt.dll");

	docConvertThread = new DocConverterThread(this);

	docConvertThread->start();

	connect(docConvertThread, SIGNAL(addItem(QString)), this, SLOT(addItemToScene(QString)));

	//Set Rendering hint flags of GraphicsView.
	this->setRenderHints (QPainter::TextAntialiasing);

	this->setDragMode(QGraphicsView::RubberBandDrag);

	viewThread = new ViewWorkerThread(this);

	viewThread->start();

	connect(this, SIGNAL(loadImage(QList<QGraphicsItem*>)), viewThread, SLOT(prepareImage(QList<QGraphicsItem*>)));

	connect(viewThread, SIGNAL(updateItem(CustomItem* )), this, SLOT(updateItemImage(CustomItem*)));

	//Set Cache mode flag.
	//this->setCacheMode (QGraphicsView::NoCache);

	timer = new QTimer(this);

	connect(timer, SIGNAL(timeout()), this, SLOT(updateItem()));

	timer->setSingleShot(true);

	//Set Viewport Update mode of the view.
	this->setViewportUpdateMode (QGraphicsView::SmartViewportUpdate);

	connect(this->verticalScrollBar(), SIGNAL(valueChanged(int)), this, SLOT(onScrollValueChanged(int)));
}
示例#12
0
bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
  Image *send_image = prepareImage(image);
  if ( !config.timestamp_on_capture && timestamp )
    monitor->TimestampImage(send_image, timestamp);

#if HAVE_LIBAVCODEC
  if ( type == STREAM_MPEG ) {
    if ( !vid_stream ) {
      vid_stream = new VideoStream("pipe:", format, bitrate, effective_fps, send_image->Colours(), send_image->SubpixelOrder(), send_image->Width(), send_image->Height());
      fprintf(stdout, "Content-type: %s\r\n\r\n", vid_stream->MimeType());
      vid_stream->OpenStream();
    }
    static struct timeval base_time;
    struct DeltaTimeval delta_time;
    if ( !frame_count )
      base_time = *timestamp;
    DELTA_TIMEVAL(delta_time, *timestamp, base_time, DT_PREC_3);
    /* double pts = */ vid_stream->EncodeFrame(send_image->Buffer(), send_image->Size(), config.mpeg_timed_frames, delta_time.delta);
  } else
#endif // HAVE_LIBAVCODEC
  {
    static unsigned char temp_img_buffer[ZM_MAX_IMAGE_SIZE];

    int img_buffer_size = 0;
    unsigned char *img_buffer = temp_img_buffer;

    // Calculate how long it takes to actually send the frame
    struct timeval frameStartTime;
    gettimeofday(&frameStartTime, NULL);
    
    fputs("--ZoneMinderFrame\r\n", stdout);
    switch( type ) {
      case STREAM_JPEG :
        send_image->EncodeJpeg(img_buffer, &img_buffer_size);
        fputs("Content-Type: image/jpeg\r\n", stdout);
        break;
      case STREAM_RAW :
        fputs("Content-Type: image/x-rgb\r\n", stdout);
        img_buffer = (uint8_t*)send_image->Buffer();
        img_buffer_size = send_image->Size();
        break;
      case STREAM_ZIP :
        fputs("Content-Type: image/x-rgbz\r\n",stdout);
        unsigned long zip_buffer_size;
        send_image->Zip(img_buffer, &zip_buffer_size);
        img_buffer_size = zip_buffer_size;
        break;
      default :
        Error("Unexpected frame type %d", type);
        return false;
    }
    fprintf(stdout, "Content-Length: %d\r\n\r\n", img_buffer_size);
    if ( fwrite(img_buffer, img_buffer_size, 1, stdout) != 1 ) {
      if ( !zm_terminate ){ 
        // If the pipe was closed, we will get signalled SIGPIPE to exit, which will set zm_terminate
        Warning("Unable to send stream frame: %s", strerror(errno));
      }
      return false;
    }
    fputs("\r\n\r\n",stdout);
    fflush( stdout );

    struct timeval frameEndTime;
    gettimeofday( &frameEndTime, NULL );

    int frameSendTime = tvDiffMsec( frameStartTime, frameEndTime );
    if ( frameSendTime > 1000/maxfps ) {
      maxfps /= 1.5;
      Error( "Frame send time %d msec too slow, throttling maxfps to %.2f", frameSendTime, maxfps );
    }
  }
  last_frame_sent = TV_2_FLOAT( now );
  return( true );
} // end bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp )