Exemplo n.º 1
0
	void ScrollBar::setPropertyOverride(const std::string& _key, const std::string& _value)
	{
		/// @wproperty{ScrollBar, Range, size_t} Диапазон прокрутки.
		if (_key == "Range")
			setScrollRange(utility::parseValue<size_t>(_value));

		/// @wproperty{ScrollBar, RangePosition, size_t} Положение прокрутки.
		else if (_key == "RangePosition")
			setScrollPosition(utility::parseValue<size_t>(_value));

		/// @wproperty{ScrollBar, Page, size_t} Шаг прокрутки при нажатии на кнопку начала или конца.
		else if (_key == "Page")
			setScrollPage(utility::parseValue<size_t>(_value));

		/// @wproperty{ScrollBar, ViewPage, size_t} Шаг прокрутки при нажатии на одну из частей от кнопки до трекера.
		else if (_key == "ViewPage")
			setScrollViewPage(utility::parseValue<size_t>(_value));

		/// @wproperty{ScrollBar, WheelPage, size_t} Шаг прокрутки при прокрутке колесиком мыши.
		else if (_key == "WheelPage")
			setScrollWheelPage(utility::parseValue<size_t>(_value));

		/// @wproperty{ScrollBar, MoveToClick, bool} Режим перескакивания бегунка к месту клика.
		else if (_key == "MoveToClick")
			setMoveToClick(utility::parseValue<bool>(_value));

		/// @wproperty{ScrollBar, VerticalAlignment, bool} Вертикальное выравнивание.
		else if (_key == "VerticalAlignment")
			setVerticalAlignment(utility::parseValue<bool>(_value));

		/// @wproperty{ScrollBar, Repeat, bool} Sets whether scrollbar buttons should be triggered repeatedly so long as the mouse button is pressed down.
		else if (_key == "Repeat")
			setRepeatEnabled(utility::parseValue<bool>(_value));

		/// @wproperty{ScrollBar, RepeatTriggerTime, float} How long the mouse needs to be pressed on a scrollbar button for repeating to start.
		else if (_key == "RepeatTriggerTime")
			setRepeatTriggerTime(utility::parseValue<float>(_value));

		/// @wproperty{ScrollBar, RepeatStepTime, float) The time between each repeat step once repeating has started.
		else if (_key == "RepeatStepTime")
			setRepeatStepTime(utility::parseValue<float>(_value));

		else
		{
			Base::setPropertyOverride(_key, _value);
			return;
		}

		eventChangeProperty(this, _key, _value);
	}
Exemplo n.º 2
0
  void open(const char* uri) {
    if (device.open(uri) != openni::STATUS_OK)
      BOOST_THROW_EXCEPTION(GrabberException("Failed to open device")
                            << GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));

    if (color_stream.create(device, openni::SENSOR_COLOR) != openni::STATUS_OK)
      BOOST_THROW_EXCEPTION(GrabberException("Failed to create color stream")
                            << GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));

    openni::VideoMode color_mode;
    color_mode.setFps(30);
    color_mode.setResolution(color_image_resolution.width, color_image_resolution.height);
    color_mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
    color_stream.setVideoMode(color_mode);
    color_image_size = color_image_resolution.width * color_image_resolution.height * 3;
    color_stream.setMirroringEnabled(false);

    if (color_stream.start() != openni::STATUS_OK) {
      color_stream.destroy();
      BOOST_THROW_EXCEPTION(GrabberException("Failed to start color stream")
                            << GrabberException::ErrorInfo(openni::OpenNI::getExtendedError()));
    }

    streams.push_back(&color_stream);

    auto control = device.getPlaybackControl();
    if (control != nullptr) {
      // This is a file, make sure we get every frame
      control->setSpeed(-1.0f);
      control->setRepeatEnabled(false);
      num_frames = control->getNumberOfFrames(color_stream);
      is_file = true;
      if (num_frames == -1)
        BOOST_THROW_EXCEPTION(GrabberException("Unable to determine number of frames in ONI file"));
    }
  }
Exemplo n.º 3
0
TinyMainWindow::TinyMainWindow(QWidget *parent) :
	BasicMainWindow(parent),
	ui(new Ui::TinyMainWindow)
{
	ui->setupUi(this);
	setWindowFlags(Qt::Window | Qt::FramelessWindowHint);
	setAttribute(Qt::WA_QuitOnClose);

	setWindowState(windowState() | Qt::WindowFullScreen);

	pv->release_mouse_event = false;

	pv->status_label1 = new QLabel();
	ui->statusBar->addWidget(pv->status_label1, 1);
	pv->status_label2 = new QLabel();
	ui->statusBar->addWidget(pv->status_label2, 0);
	pv->status_label3 = new QLabel();
	ui->statusBar->addWidget(pv->status_label3, 0);

#if 0 //def Q_OS_WIN
	priv->folder_icon = QIcon(":/image/winfolder.png");
#else
	pv->folder_icon = QIcon(":/image/macfolder.png");
#endif

	{
		QString ss = makeStyleSheetText();
		setStyleSheet(ss);
	}

#ifdef Q_OS_MAC
#else
	ui->action_help_about->setText(tr("&About SkyMPC"));
#endif

	pv->menu.addAction(ui->action_help_about);
	pv->menu.addAction(ui->action_debug);

	setRepeatEnabled(false);
	setRandomEnabled(false);

	if (!start_with_shift_key) {
		MySettings settings;

		settings.beginGroup("Connection");
		QString addr = settings.value("Address").toString();
		int port = settings.value("Port").toInt();
		QString password = settings.value("Password").toString();
		settings.endGroup();
		pv->host = Host(addr, port);
		pv->host.setPassword(password);
	}

#if 0
	pv->command_action_map["random"] = ui->action_random;
	pv->command_action_map["repeat"] = ui->action_repeat;
	pv->command_action_map["play"] = ui->action_play_always;
	pv->command_action_map["stop"] = ui->action_stop;
	pv->command_action_map["prev"] = ui->action_previous;
	pv->command_action_map["next"] = ui->action_next;
	pv->command_action_map["single"] = ui->action_single;
	pv->command_action_map["exit"] = ui->action_file_close;
	pv->command_action_map["vu"] = ui->action_volume_up;
	pv->command_action_map["vd"] = ui->action_volume_down;
	pv->command_action_map["qs1"] = ui->action_playlist_quick_save_1;
	pv->command_action_map["qs2"] = ui->action_playlist_quick_save_2;
	pv->command_action_map["ql1"] = ui->action_playlist_quick_load_1;
	pv->command_action_map["ql2"] = ui->action_playlist_quick_load_2;
	pv->command_action_map["clear"] = ui->action_playlist_clear;

	//	priv->key_command_map[Qt::Key_P] = "play";
	//	priv->key_command_map[Qt::Key_S] = "stop";
#endif
}
Exemplo n.º 4
0
int main(int argc, char* argv[])
{
    try {
        openni::OpenNI::initialize();
        openni::Device device;

        const char* deviceURI = openni::ANY_DEVICE;
        if (argc > 1) {
            deviceURI = argv[1];
        }

        auto ret = device.open(deviceURI);


        if (ret != openni::STATUS_OK) {
            throw std::runtime_error("can't open device");
        }

        auto playbackControl = device.getPlaybackControl();
        playbackControl->setRepeatEnabled(false);

        openni::VideoStream colorStream;
        colorStream.create(device, openni::SensorType::SENSOR_COLOR);
        colorStream.start();

        openni::VideoStream depthStream;
        depthStream.create(device, openni::SensorType::SENSOR_DEPTH);
        depthStream.start();

        openni::VideoFrameRef depthFrame;
        openni::VideoFrameRef colorFrame;

        cv::Mat colorImage;
        cv::Mat depthImage;
        cv::Mat depthImageBack;
        cv::Mat depthImageFore;
        cv::Mat depthoutputImage;
        cv::Mat maskImage;
        cv::Mat zeroMaskBack;

        std::string videoID(device.getDeviceInfo().getUri());

        videoID.erase(videoID.begin(), videoID.end() - 7);
        videoID.erase(videoID.begin() + 3, videoID.end());

        std::string pathBase(device.getDeviceInfo().getUri());
        pathBase.erase(pathBase.end() - 11, pathBase.end());

        auto idPerson = std::atoi(videoID.c_str());

        auto oldNoFrame = 0;
        auto NoFrame = 1;

        do {
            // depth frame
            depthStream.readFrame(&depthFrame);
            if (depthFrame.isValid()) {
                depthImage = cv::Mat(depthFrame.getVideoMode().getResolutionY(),
                                     depthFrame.getVideoMode().getResolutionX(),
                                     CV_16UC1, (unsigned short*)depthFrame.getData());

                depthImage.convertTo(depthoutputImage, CV_8UC1, 255.0/10000);
                cv::cvtColor(depthoutputImage, depthoutputImage, CV_GRAY2BGR);
            }

            // color frame
            colorStream.readFrame(&colorFrame);
            if (colorFrame.isValid()) {
                colorImage = cv::Mat(colorStream.getVideoMode().getResolutionY(),
                                     colorStream.getVideoMode().getResolutionX(),
                                     CV_8UC3, (char*)colorFrame.getData());
                cv::cvtColor(colorImage, colorImage, CV_RGB2BGR);
            }

            // process
            if (NoFrame == 1) {
                depthImageBack = depthImage.clone();
                depthImageFore = cv::Mat::zeros(depthImage.rows,depthImage.cols, CV_16UC1);
                zeroMaskBack = depthImageBack == 0;
                std::cout << videoID <<";NULL;NULL;NULL;NULL;"
                          << depthFrame.getTimestamp() << ";" << depthFrame.getFrameIndex() << ";"
                          << colorFrame.getTimestamp() << ";" << colorFrame.getFrameIndex() << std::endl;

                std::string pathImgBack;
                pathImgBack = pathBase + "img/background.png";
                cv::imwrite(pathImgBack, colorImage);
                pathImgBack = pathBase + "depth/background.png";
                cv::imwrite(pathImgBack, depthImage);

            } else {
                depthImageFore = depthImageBack - depthImage;
                maskImage = depthImageFore > MIN_HIGH & depthImageFore < MAX_HIGH;

                // morfology
                maskImage = morphology(maskImage);

                // find contours
                std::vector<std::vector<cv::Point> > contours;
                cv::findContours(maskImage.clone(), contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);

                auto noPerson = 0;

                for (auto c_iter = contours.begin(); c_iter != contours.end(); ++c_iter) {
                    auto area = cv::contourArea(*c_iter);
                    // filtering by area
                    if (area > (MIN_AREA)) {
                        noPerson++;
                    } else {
                        // clean vector of contours
                        contours.erase(c_iter--);
                    }
                }

                if (contours.size()) {
                    oldNoFrame = NoFrame;
                    // Draw contours
                    cv::Mat drawing = cv::Mat::zeros(maskImage.size(), CV_8UC1);
                    for(auto i = 0; i < contours.size(); ++i)
                    {
                        cv::drawContours(drawing, contours, i, 255, -1);
                    }

                    cv::Mat zeroMask = depthImage == 0;
                    cv::Mat zeroMaskTot;
                    cv::bitwise_or(zeroMask, zeroMaskBack, zeroMaskTot);
                    drawing = drawing - zeroMaskTot;

                    cv::Mat depthImageFilter;
                    depthImageFore.copyTo(depthImageFilter, drawing);

                    // cv::imshow("depthImageFilter", depthImageFilter);
                    // cv::imshow("drawing", drawing);

                    if (NoFrame % 10 == 0) {
                        std::string pathImgColor;
                        pathImgColor = pathBase + "img/" + std::to_string(depthFrame.getFrameIndex()) + ".png";
                        cv::imwrite(pathImgColor, colorImage);

                        std::string pathImgDepth;
                        pathImgDepth = pathBase + "depth/" + std::to_string(depthFrame.getFrameIndex()) + ".png";
                        cv::imwrite(pathImgDepth, depthImage);

                        std::string pathImgMask;
                        pathImgMask = pathBase + "mask/" + std::to_string(depthFrame.getFrameIndex()) + ".png";
                        cv::imwrite(pathImgMask, drawing);
                    }

                    double maxValue;
                    cv::Point maxPoint;

                    cv::minMaxLoc(depthImageFilter, NULL, &maxValue, NULL, &maxPoint);

                    maxValue += int(depthImageBack.at<unsigned short>(cv::Point(depthImageBack.cols/2-1, depthImageBack.rows/2-1))) - depthImageBack.at<unsigned short>(maxPoint);

                    std::cout << videoID  << ";" << std::setfill('0') << std::setw(3) << idPerson << ";"
                              << maxValue << ";"
                              << maxPoint.x << ";"
                              << maxPoint.y << ";"
                              << depthFrame.getTimestamp() << ";" << depthFrame.getFrameIndex() << ";"
                              << colorFrame.getTimestamp() << ";" << colorFrame.getFrameIndex() << std::endl;
                    // cv::circle(colorImage, maxPoint, 5, cv::Scalar(255, 50, 0), -1, 8, 0);
                } else {
                    if (!(oldNoFrame - NoFrame + 1)) {
                        ++idPerson;
                    }
                    std::cout << videoID <<";NULL;NULL;NULL;NULL;"
                              << depthFrame.getTimestamp() << ";" << depthFrame.getFrameIndex() << ";"
                              << colorFrame.getTimestamp() << ";" << colorFrame.getFrameIndex() << std::endl;
                }
            }
            // cv::imshow("Color Camera", colorImage);
            // cv::imshow("Depth CameraFore", depthImageFore);

            int key = cv::waitKey(10);
            if (key == 'q') {
                break;
            }

            ++NoFrame;
        } while (depthFrame.getFrameIndex() < playbackControl->getNumberOfFrames(depthStream) && colorFrame.getFrameIndex() < playbackControl->getNumberOfFrames(colorStream));

        depthStream.destroy();
        colorStream.destroy();
        device.close();

        openni::OpenNI::shutdown();
        return 0;
    }
    catch ( std::exception& ) {
        std::cout << openni::OpenNI::getExtendedError() << std::endl;
    }
}