Ejemplo n.º 1
0
void HWController::setJointData(const ALValue &values) {
    if (values.getSize() < NUM_OF_JOINTS) return;
    shared_ptr<vector<float> > local_actuator_values = make_shared<vector<float> >(NUM_OF_JOINTS);
    for (int i = 0; i < NUM_OF_JOINTS; ++i) local_actuator_values->at(i) = (float) values[i];
    lock_guard<mutex> guard(this->actuator_mutex);
    this->work_actuator_values.swap(local_actuator_values);
}
void APIDemonstration::move_joints(const ALValue& joints,
                                   const ALValue& target_angles,
                                   const ALValue& target_times,
                                   const bool &restore_pos,
                                   const std::string& phrase,
                                   const float& phrase_lag) {

    try{
        bool useSensors = false;
        std::vector<float> angles_before = motion_proxy.getAngles(joints, useSensors);
        std::vector<float> stiffness_before = motion_proxy.getStiffnesses(joints);

        int n = joints.isArray() ? joints.getSize() : 1;
        motion_proxy.setStiffnesses(joints, std::vector<float>(n, 1.0));
        
        bool isAbsolute = true;
        int id = motion_proxy.post.angleInterpolation(joints, target_angles, target_times, isAbsolute);

        qi::os::sleep(phrase_lag);
        if (phrase != "") {
            TTS_proxy.setLanguage("English");
            TTS_proxy.post.say(phrase);
        }

        if (restore_pos)
            motion_proxy.angleInterpolation(joints, angles_before, std::vector<float>(n, 1.0), true);
        motion_proxy.setStiffnesses(joints, stiffness_before);
        motion_proxy.wait(id, 0);
    }
    catch (const ALError& e) {
        std::cerr << "Caught exception: " << e.what() << std::endl;
    }
}
void APIDemonstration::face_detected() {
    
    qiLogInfo("module.example") << "Executing callback method on face_detected event" << std::endl;

    ALCriticalSection section(fCallbackMutexFaceDetection);
    ALValue data =  memory_proxy.getData("FaceDetected");
    if (data.getSize() > 0) {
        TTS_proxy.say("Yep");
        b_face_detected = true;
    }
}
Ejemplo n.º 4
0
JNIEXPORT
void JNICALL Java_jp_ac_fit_asura_naoji_jal_JALMemory__1updateStringQuery(
		JNIEnv *env, jclass, jlong queryPtr) {
	Query *query = reinterpret_cast<Query*> (queryPtr);
	assert(query != NULL);

	try {
		ALValue data = query->jmemory->getProxy()->getListData(query->names);
		int size = data.getSize();
		assert(query->names.getSize() == size);

		for (int i = 0; i < size; i++) {
			env->SetObjectArrayElement(query->buffer.s, i, env->NewStringUTF(
					((string)data[i]).c_str()));
		}
	} catch (AL::ALError err) {
		std::cerr << err.toString() << std::endl;
		assert(false);
	}
}
Ejemplo n.º 5
0
JNIEXPORT
void JNICALL Java_jp_ac_fit_asura_naoji_jal_JALMemory__1updateIntQuery(
		JNIEnv *, jclass, jlong queryPtr) {
	Query *query = reinterpret_cast<Query*> (queryPtr);
	assert(query != NULL);

	try {
		ALValue data = query->jmemory->getProxy()->getListData(query->names);
		int size = data.getSize();
		assert(query->names.getSize() == size);

		jint* buf = reinterpret_cast<jint*> (query->buffer.b);
		for (int i = 0; i < size; i++) {
			buf[i] = data[i];
		}
	} catch (AL::ALError err) {
		std::cerr << err.toString() << std::endl;
		assert(false);
	}
}
Ejemplo n.º 6
0
struct timespec KImageExtractor::fetchImage(IplImage *img)
{
    struct timespec rt;//Timestamp
    cout<<"KImageExtractor::fetchimage():"<<endl;
    if (doneSubscribe==false)
    {
        cout<<"KImageExtractor: Warning! fetchImage()  called although GVM Subscription has failed!"<<endl;
        rt.tv_sec=0;
        rt.tv_nsec=0;
        return rt;
    }

#ifdef REMOTE_ON
    //		cout << "Remote method on" << endl;
    //		sleep(1);
    ALValue results;
#ifdef RAW

    results = (c->call<ALValue> ("getDirectRawImageRemote", GVM_name));
#else
    results = (c->call<ALValue> ("getImageRemote", GVM_name));
#endif
    if (results.getType() != ALValue::TypeArray && results.getSize() != 7)
    {
        throw ALError("KImageExtractor", "saveImageRemote", "Invalid image returned.");
    }
    //const int size = results[6].getSize();
    // You can get some image information that you may find useful.
    //	const int width = (int) results[0];
    //	const int height = (int) results[1];
    //	const int nbLayers = (int) results[2];
    //	const int colorSpace = (int) results[3];
    //const long long timeStamp = ((long long) (int) results[4]) * 1000000LL + ((long long) (int) results[5]);
    //	const int seconds = (int) (timeStamp / 1000000LL);
    // Set the buffer we received to our IplImage header.
    //fIplImageHeader->imageData = (char*) (results[6].GetBinary());
    //cout << "Size" << size << endl;



    int width = (int) results[0];
    int height = (int) results[1];
    int nChannels = (int) results[2];
    int colorSpace = (int) results[3];

    int size =width*height*nChannels;

    //Fetch TimeStamp;
    rt.tv_sec=(time_t)((int) results[4]);
    rt.tv_nsec=(int)  results[5]*1000L;

    //Change of image data size
    assert(img!=NULL);
    //cout<<img->imageSize<<" "<<size<<endl;
    if (img->imageSize!=size )
    {
        //cout<<img->width<<" "<<img->height<<endl;
        cout<<"KImageExtractor::fetchImage():allocating new imagedata"<<endl;
        //cout<<"Delete old"<<endl;
        //delete img->imageData;
        //img->imageData=NULL;
        cout<<"cvInitImage"<<endl;
        cvInitImageHeader(img,  cvSize(width,height),IPL_DEPTH_8U, nChannels);
        //img->imageData=NULL;
        cout<<" Done"<<endl;
        //img->imageData=(char*)malloc(img->imageSize);
    }


    if (img->imageData != NULL)
    {
        //free( fIplImageHeader->imageData)
        memcpy(img->imageData, (char*) (results[6].GetBinary()), results[6].getSize() * sizeof(unsigned char));
    }
    else
    {
        img->imageData = new char[img->imageSize];
        memcpy(img->imageData, (char*) (results[6].GetBinary()), results[6].getSize() * sizeof(char));
    }
#else
    //cout << "Remote method off" << endl;
    //sleep(1);
    ALImage* imageIn = NULL;
    // Now you can get the pointer to the video structure.
#ifdef RAW
    imageIn = (ALImage*) (c->call<int> ("getDirectRawImage", GVM_name));
#else
    imageIn = (ALImage*) (c->call<int> ("getImageLocal", GVM_name));
#endif
    if (!imageIn)
    {
        throw ALError("KImageExtractor", "saveImageLocal", "Invalid image returned.");
    }
    //fLogProxy->info(getName(), imageIn->toString());
    // You can get some image information that you may find useful.
    int width = imageIn->fWidth;
    int height = imageIn->fHeight;
    const int nChannels = imageIn->fNbLayers;
    //		const int colorSpace = imageIn->fColorSpace;
    const long long timeStamp = imageIn->fTimeStamp;
    //		const int seconds = (int) (timeStamp / 1000000LL);
    const int size = width*height*nChannels;
    // Set the buffer we received to our IplImage header.
    //Fetch TimeStamp;
    rt.tv_sec=(time_t) (timeStamp / 1000000LL);
    rt.tv_nsec=(long)  ((timeStamp-rt.tv_sec*1LL)*1000LL);


    //Change of image data size
    if (img->imageSize!=size*sizeof(char) )
    {
        free(img->imageData);
        cvInitImageHeader(img,  cvSize(width,height),IPL_DEPTH_8U, nChannels);
        img->imageData=NULL;
        //img->imageData=(char*)malloc(img->imageSize);
    }

    if (img->imageData!=NULL)
    {
        //free( fIplImageHeader->imageData);
        memcpy ( img->imageData, (char*) imageIn->getFrame(), size*sizeof(char) );
    }
    else
    {
        img->imageData = new char[size];
        memcpy ( img->imageData, (char*) imageIn->getFrame(), size*sizeof(char) );
    }
    //fIplImageHeader->imageData = (char*) imageIn->getFrame();
    //saveIplImage(fIplImageHeader, name, pImageFormat, seconds);
    // Now that you're done with the (local) image, you have to release it from the V.I.M.
    c->call<int> ("releaseImage", GVM_name);
#endif
    return rt;
};