コード例 #1
0
ファイル: glwidget.cpp プロジェクト: kazeevn/Mipt-ray-tracer
GLWidget::GLWidget(QWidget *parent)
    : QGLWidget(QGLFormat(QGL::SampleBuffers), parent)
{

    speed = 0.2;
    widget_width = width();
    widget_height = height();

    normalView(); //set normal camera view
}
コード例 #2
0
Rational::Rational(int x, int y) :a(x), b(y), sign(false)
{
    normalView();
}
コード例 #3
0
Rational::Rational(int x) :a(x), b(1), sign(false)
{
    normalView();
}
コード例 #4
0
Rational::Rational() :a(0), b(1), sign(false)
{
    normalView();
}
コード例 #5
0
/** Create actions required in the application */
void MainWindow::createActions()
{


    /** Create actions for switching to normal view */
    normalViewAct = new QAction(QIcon(":/images/eye_icon&32.png"), tr("Webcam view"), this);
    normalViewAct->setStatusTip("Open normal view window");
    connect(normalViewAct, SIGNAL(triggered()), this, SLOT(normalView()));

    /** Create actions for switching to background subtraction view */
    backgroundSubViewAct = new QAction(QIcon(":/images/clipboard_copy_icon&32.png"), tr("Background Subtracted View"), this);
    backgroundSubViewAct->setStatusTip("Open  background subtracted view");
    connect(backgroundSubViewAct, SIGNAL(triggered()), this, SLOT(backroundSubView()));

    /** Create actions for switching to hand detection view */
    handDetectViewAct = new QAction(QIcon(":/images/hand_1_icon&32.png"), tr("Hand Detection View"), this);
    handDetectViewAct->setStatusTip("Open hand detection view");
    connect(handDetectViewAct, SIGNAL(triggered()), this, SLOT(handDetectView()));

    /** Create action to open image file as input */
    openImageAct = new QAction(QIcon(":/images/folder_icon&32.png"), tr("Open Image"), this);
    openImageAct->setStatusTip("Open image as the input gesture");
    connect(openImageAct, SIGNAL(triggered()), this, SLOT(openImage()));

    /** Create action to open video file as input */
    openVideoAct = new QAction(QIcon(":/images/folder_open_icon&32.png"), tr("Open Video"), this);
    openVideoAct->setStatusTip("Open video as the input gesture");
    connect(openVideoAct, SIGNAL(triggered()), this, SLOT(openImage()));

    /** Create action to start gesture recognition */
    startAct = new QAction(QIcon(":/images/playback_play_icon&32.png"), tr("Start recognition"), this);
    startAct->setStatusTip("Start gesture recongition");
    connect(startAct, SIGNAL(triggered()), this, SLOT(startRecognition()));

    /** Create action to stop gesture recognition */
    stopAct = new QAction(QIcon(":/images/playback_stop_icon&32.png"), tr("Stop recognition"), this);
    stopAct->setStatusTip("Stop gesture recongition");
    connect(stopAct, SIGNAL(triggered()), this, SLOT(stopRecognition()));

    /** Create mute action */
    muteAct = new QAction(QIcon(":/images/sound_mute_icon&32.png"), tr("Mute"), this);
    muteAct->setStatusTip("Mute");
    connect(muteAct, SIGNAL(triggered()), this, SLOT(mute()));

    /** Create volume change action */
    volumeAct = new QAction(QIcon(":/images/sound_high_icon&32.png"), tr("Adjust volume"), this);
    volumeAct->setStatusTip("Adjust volume");
    connect(volumeAct, SIGNAL(triggered()), this, SLOT(volume()));

    /** Create action to view about help for application */
    aboutAct = new QAction(tr("&About"), this);
    aboutAct->setStatusTip(tr("Show the application's About box"));
    connect(aboutAct, SIGNAL(triggered()), this, SLOT(about()));

    /** Create action to change about Qt help for application */
    aboutQtAct = new QAction(tr("About &Qt"), this);
    aboutQtAct->setStatusTip(tr("Show the Qt library's About box"));
    connect(aboutQtAct, SIGNAL(triggered()), qApp, SLOT(aboutQt()));

    /** Create exit action for the application */
    exitAct = new QAction(QIcon(":/images/on-off_icon&32.png"), tr("E&xit"), this);
    exitAct->setShortcuts(QKeySequence::Quit);
    exitAct->setStatusTip(tr("Exit the application"));
    connect(exitAct, SIGNAL(triggered()), this, SLOT(close()));

}