status_t BnSoundTrigger::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case DETACH: {
            ALOGV("DETACH");
            CHECK_INTERFACE(ISoundTrigger, data, reply);
            detach();
            return NO_ERROR;
        } break;
        case LOAD_SOUND_MODEL: {
            CHECK_INTERFACE(ISoundTrigger, data, reply);
            sp<IMemory> modelMemory = interface_cast<IMemory>(
                data.readStrongBinder());
            sound_model_handle_t handle;
            status_t status = loadSoundModel(modelMemory, &handle);
            reply->writeInt32(status);
            if (status == NO_ERROR) {
                reply->write(&handle, sizeof(sound_model_handle_t));
            }
            return NO_ERROR;
        }
        case UNLOAD_SOUND_MODEL: {
            CHECK_INTERFACE(ISoundTrigger, data, reply);
            sound_model_handle_t handle;
            data.read(&handle, sizeof(sound_model_handle_t));
            status_t status = unloadSoundModel(handle);
            reply->writeInt32(status);
            return NO_ERROR;
        }
        case START_RECOGNITION: {
            CHECK_INTERFACE(ISoundTrigger, data, reply);
            sound_model_handle_t handle;
            data.read(&handle, sizeof(sound_model_handle_t));
            sp<IMemory> dataMemory;
            if (data.readInt32() != 0) {
                dataMemory = interface_cast<IMemory>(data.readStrongBinder());
            }
            status_t status = startRecognition(handle, dataMemory);
            reply->writeInt32(status);
            return NO_ERROR;
        }
        case STOP_RECOGNITION: {
            CHECK_INTERFACE(ISoundTrigger, data, reply);
            sound_model_handle_t handle;
            data.read(&handle, sizeof(sound_model_handle_t));
            status_t status = stopRecognition(handle);
            reply->writeInt32(status);
            return NO_ERROR;
        }
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}
Пример #2
0
int main(int argc, char** argv)
{
    std::string ip = "192.168.11.9";
    std::string port = "9559";

    if (argc > 1) ip = argv[1];
    if (argc > 2) port = argv[2];

    int portn = std::atoi(port.c_str());

    AL::ALBrokerManager::getInstance()->killAllBroker();
    AL::ALBroker::Ptr broker = AL::ALBroker::createBroker("main", "0.0.0.0", 54000, ip, portn);

    try
    {
        auto hvc = AL::ALModule::createModule<HandVoiceControl>(broker, "HandVoiceControl");
        hvc->startRecognition();

        AL::ALMotionProxy motion(broker);

        auto d = AL::ALModule::createModule<Diagnostics>(broker, "Diagnostics");
        d->reportStatus();

        static bool finish = false;
        while (!finish)
        {
            motion.openHand("RHand");
            motion.setStiffnesses("HeadYaw", 0.1f);
            motion.angleInterpolation("HeadYaw", 0.2f, 1.0f, true);
            qi::os::msleep(1000);
            motion.closeHand("RHand");
            motion.angleInterpolation("HeadYaw", 0.0f, 1.0f, true);
            qi::os::msleep(1000);
        }

        hvc->stopRecognition();
    }
    catch (const AL::ALError& e)
    {
        qiLogError("error") << e.what() << std::endl;
    }

    //AL::ALMotionProxy motion(ip, portn);
    //int a = motion.post.openHand("RHand");
    //tts.callVoid("say", std::string("opening"));
    //int b = motion.post.closeHand("RHand");
    //tts.callVoid("say", std::string("closing"));

    return 0;
}
Пример #3
0
/** Create actions required in the application */
void MainWindow::createActions()
{


    /** Create actions for switching to normal view */
    normalViewAct = new QAction(QIcon(":/images/eye_icon&32.png"), tr("Webcam view"), this);
    normalViewAct->setStatusTip("Open normal view window");
    connect(normalViewAct, SIGNAL(triggered()), this, SLOT(normalView()));

    /** Create actions for switching to background subtraction view */
    backgroundSubViewAct = new QAction(QIcon(":/images/clipboard_copy_icon&32.png"), tr("Background Subtracted View"), this);
    backgroundSubViewAct->setStatusTip("Open  background subtracted view");
    connect(backgroundSubViewAct, SIGNAL(triggered()), this, SLOT(backroundSubView()));

    /** Create actions for switching to hand detection view */
    handDetectViewAct = new QAction(QIcon(":/images/hand_1_icon&32.png"), tr("Hand Detection View"), this);
    handDetectViewAct->setStatusTip("Open hand detection view");
    connect(handDetectViewAct, SIGNAL(triggered()), this, SLOT(handDetectView()));

    /** Create action to open image file as input */
    openImageAct = new QAction(QIcon(":/images/folder_icon&32.png"), tr("Open Image"), this);
    openImageAct->setStatusTip("Open image as the input gesture");
    connect(openImageAct, SIGNAL(triggered()), this, SLOT(openImage()));

    /** Create action to open video file as input */
    openVideoAct = new QAction(QIcon(":/images/folder_open_icon&32.png"), tr("Open Video"), this);
    openVideoAct->setStatusTip("Open video as the input gesture");
    connect(openVideoAct, SIGNAL(triggered()), this, SLOT(openImage()));

    /** Create action to start gesture recognition */
    startAct = new QAction(QIcon(":/images/playback_play_icon&32.png"), tr("Start recognition"), this);
    startAct->setStatusTip("Start gesture recongition");
    connect(startAct, SIGNAL(triggered()), this, SLOT(startRecognition()));

    /** Create action to stop gesture recognition */
    stopAct = new QAction(QIcon(":/images/playback_stop_icon&32.png"), tr("Stop recognition"), this);
    stopAct->setStatusTip("Stop gesture recongition");
    connect(stopAct, SIGNAL(triggered()), this, SLOT(stopRecognition()));

    /** Create mute action */
    muteAct = new QAction(QIcon(":/images/sound_mute_icon&32.png"), tr("Mute"), this);
    muteAct->setStatusTip("Mute");
    connect(muteAct, SIGNAL(triggered()), this, SLOT(mute()));

    /** Create volume change action */
    volumeAct = new QAction(QIcon(":/images/sound_high_icon&32.png"), tr("Adjust volume"), this);
    volumeAct->setStatusTip("Adjust volume");
    connect(volumeAct, SIGNAL(triggered()), this, SLOT(volume()));

    /** Create action to view about help for application */
    aboutAct = new QAction(tr("&About"), this);
    aboutAct->setStatusTip(tr("Show the application's About box"));
    connect(aboutAct, SIGNAL(triggered()), this, SLOT(about()));

    /** Create action to change about Qt help for application */
    aboutQtAct = new QAction(tr("About &Qt"), this);
    aboutQtAct->setStatusTip(tr("Show the Qt library's About box"));
    connect(aboutQtAct, SIGNAL(triggered()), qApp, SLOT(aboutQt()));

    /** Create exit action for the application */
    exitAct = new QAction(QIcon(":/images/on-off_icon&32.png"), tr("E&xit"), this);
    exitAct->setShortcuts(QKeySequence::Quit);
    exitAct->setStatusTip(tr("Exit the application"));
    connect(exitAct, SIGNAL(triggered()), this, SLOT(close()));

}