void renderNextBlock (AudioSampleBuffer& outputBuffer, int startSample, int numSamples)
 {
     if (playing != notPlaying) {
         const double levelMult = level * (playing ==  keyReleased ? tailOff : 1.0);
         for (int sample = startSample; sample < startSample + numSamples; sample++){
             const double o1 = (sin (o1_angle * 2.0 * double_Pi));
             const double amplitude = 1.0 + (0.5 * o1);
             const float currentSampleVal = 
                 (float) (wavetable.lookup(currentAngle) * levelMult * amplitude);
             
             for (int i = outputBuffer.getNumChannels(); --i >= 0;) {
                 *outputBuffer.getSampleData (i, sample) += currentSampleVal;
             }
             
             currentAngle = angleWrap(currentAngle + angleDelta);
             o1_angle = angleWrap(o1_angle + o1_angleDelta);
         
             if (playing == keyReleased) {
                 tailOff *= 0.99;
         
                 if (tailOff <= 0.005)
                 {
                     clearCurrentNote();
                     playing = notPlaying;
                     angleDelta = 0.0;
                     break;
                 }
             }
         }
     }
 }
Beispiel #2
0
double angleDiff(double ang1, double ang2)
{
    double d;
    ang1 = angleWrap(ang1);
    ang2 = angleWrap(ang2);
    d = ang1-ang2;
    if ( d > cStdMath::MATH_PI )
        d -= 2*(double)cStdMath::MATH_PI;
    else if ( d < -(double)cStdMath::MATH_PI )
        d += 2*(double)cStdMath::MATH_PI;
    return d;
}
Beispiel #3
0
/**
 * @brief QrSlam::addObservationPos
 * 添加landmark 点(x,y) 到观测量中
 * @param landmarktemp     2d mark角点
 * @param id               2d mark ID
 * @return                 点集(x,y,id)  id = ID*5 + j (j=01234)
 */
Point3ffi QrSlam::addObservationPos(ConerPoint landmarktemp ,int id )
{
    //Generate observations.假设传感器能观察到机器人周围sd米内的所有特征   @param[in] sd
    Point2f delta;
    float dst;   //特征距离
    float theta; //特征角w.r.t.robot frame
    Point3ffi mark_temp;
    delta = Point2f(landmarktemp.X,landmarktemp.Y) ;//-Point2f(RobotPos.X,RobotPos.Y); //已经是delta值了
    dst = norm(delta);
    theta = atan2(delta.y, delta.x) ;//- robot_info_.Theta ;//    ??? the true path of vehicle
    //    if(delta.x < 0)
    //        theta = 3.14159 + theta;   //- robot_info_.Theta ;//    ??? the true path of vehicle
    //        dst+= genGaussianValue(sigma_r*sigma_r);
    //        theta+= genGaussianValue(sigma_phi*sigma_phi); //给测量量添加噪声
    angleWrap(theta);
    mark_temp.x = dst;
    mark_temp.y = theta;
    mark_temp.z = id;//temp value
    if( (id%5==4) && (id/5 ==19))
    {
        std::string text_id ="id: "+ int2str(id/5 );
        cv::putText(cv_camera_,text_id,cvPoint(20,100),CV_FONT_HERSHEY_COMPLEX,1,CV_RGB(0,0,255));

        std::string text1 ="d: "+ float2str(mark_temp.x );
        cv::putText(cv_camera_,text1,cvPoint(20,150),CV_FONT_HERSHEY_COMPLEX,1,CV_RGB(0,0,255));

        std::string text2 ="theta: "+ float2str(mark_temp.y*180/ 3.14159);
        cv::putText(cv_camera_,text2,cvPoint(20,200),CV_FONT_HERSHEY_COMPLEX,1,CV_RGB(0,0,255));
    }
    return mark_temp;

}
Beispiel #4
0
/*
 * ekf_slam 算法设计与实现
 * 利用: 里程速度信息 + 观测landmark信息 -->  更新的机器人状态与地图信息
 *
 * 步骤
 *     1. 状态初始化,初始化时间
 *     2. 运动模型: 速度向量表(t-1) + dt
 *     3. 观测模型: 新状态:miu + odservation
 *                 已有状态:代入融合更新
 *     4. 数据融合:  预测与观测偏差-->卡尔曼增益
 *        状态更新 --> robot pos +  map postions
 * 变量说明:
 *     1. miu_state(3+2n,1)   系统状态量                  miu_convar_p 系统状态协方差量(3+2n,3+2n)
 *     2. observed_mark_num    观测到landmark总数量(n)    observed_mark_num_old   landmark已有储存量 -->状态扩维
 *     3.
 */
void QrSlam::ekfSlam(float V, float W)
{
    cout<<"ekfslam start !"<<endl;
    if(init_EKF_value_)
    {
        ftime(&Time_);
        miu_state = Mat::zeros(3,1,CV_32FC1);       //已去掉跟s有关的项,原来是3+3*

        miu_state.at<float>(0) = robot_info_.X + coordinate_x_;
        miu_state.at<float>(1) = robot_info_.Y + coordinate_y_;      //  取y  标准正向
        miu_state.at<float>(2) = robot_info_.Theta + coordinate_angle_ ;

        miu_convar_p =  Mat::zeros(3,3,CV_32FC1);    //这个可以放到初始化函数中去  ??初值
        miu_convar_p.at<float>(0,0) = 0.1;//0.1;//1;//100;//0.1;
        miu_convar_p.at<float>(1,1) = 0.10;//0.1;//1;//100;//0.1;
        miu_convar_p.at<float>(2,2) = 0.38;//0.1;//0.38;

        init_EKF_value_ = false;
        TimeOld_ = Time_;
        velocities_.push_back(Point2f(V,W));
    }
    else
    {
        ftime(&Time_);
        delta_t_ = (Time_.time - TimeOld_.time) + (Time_.millitm - TimeOld_.millitm)/1000.0; //  秒
        /////----------------------------------------------------------------------------------------------------------
        // landmark_vector_ = pQrDetect_->QrCodeMapping(visual_mark_num_,robot_pose); //获取的观测值是实际物理距离值
        cout<<"observation start !"<<endl;
        genObservations();
        getNumQrcode();
        /////----------------------------------------------------------------------------------------------------------
        int observed_mark_num = observed_landmark_num.size();
        cout<<"----------------"<<observed_mark_num_old<<endl;

        if(observed_mark_num > observed_mark_num_old)     //状态miu_SLAM 扩维 一个码两个量(x,y)加入系统状态   ???加入 mark(r,seita) 坐标值  扩维与加数。
        {
            //miu_SLAM扩维       后面将其改造成成员函数
            cv::Mat miu_state_new = Mat::zeros(3+2*observed_mark_num,1,CV_32FC1);   //已去掉跟s有关的项,
            for(uint i=0;i<3+2*observed_mark_num_old;i++)
            {
                miu_state_new.at<float>(i) = miu_state.at<float>(i);
            }
            miu_state = Mat::zeros(3+2*observed_mark_num,1,CV_32FC1);   //已去掉跟s有关的项,原来是3+3*
            for(uint i=0;i<3+2*observed_mark_num_old;i++)
            {
                miu_state.at<float>(i) = miu_state_new.at<float>(i);
            }
            displayMatrix(miu_state);
            //  xP_SLAM 扩维
            cv::Mat miu_convar_p_new =  Mat::zeros(3+2*observed_mark_num,3+2*observed_mark_num,CV_32FC1);  //这个可以放到初始化函数中去  ??初值???
            displayMatrix(miu_convar_p_new);
            displayMatrix(miu_convar_p);

            for(uint i=0;i<3+2*observed_mark_num_old ;i++)
            {
                for(uint j=0;j<3+2*observed_mark_num_old;j++)
                {
                    miu_convar_p_new.at<float>(i,j) = miu_convar_p.at<float>(i,j);
                }
            }

            miu_convar_p = Mat::zeros(3+2*observed_mark_num,3+2*observed_mark_num,CV_32FC1);   //已去掉跟s有关的项,原来是3+3*
            for(uint i=0;i<3+2*observed_mark_num_old;i++)
            {
                for(uint j=0;j<3+2*observed_mark_num_old;j++)
                    miu_convar_p.at<float>(i,j) = miu_convar_p_new.at<float>(i,j);
            }

            for(uint i=3+2*observed_mark_num_old;i<3+2*observed_mark_num;i++)
            {
                miu_convar_p.at<float>(i,i) = 1000000;   //对角方差要大1000000
            }
        }
        observed_mark_num_old = observed_mark_num;
        // xPred_SLAM 与 xPPred_SLAM 传入值的问题
        Mat miu_prediction = Mat::zeros(3+2*observed_mark_num, 1, CV_32FC1); //原来是3+3*N
        //Point3f xLast; //已经不需要,第一次进来时用初始化的
        Mat x_convar_p_prediction = Mat::zeros(3+2*observed_mark_num, 3+2*observed_mark_num, CV_32FC1);//协方差矩阵的估计矩阵
        Mat Fx = Mat::zeros(3, 3+2*observed_mark_num, CV_32FC1);
        Mat I_SLAM = Mat::eye(3+2*observed_mark_num, 3+2*observed_mark_num, CV_32FC1); //算法中的单位矩阵
        Mat Gt = Mat::zeros(3+2*observed_mark_num, 3+2*observed_mark_num, CV_32FC1);
        Mat Gt_temp = Mat::zeros(3, 3, CV_32FC1); //用来计算Gt的3*3矩阵
        Mat Ht = Mat::zeros(2, 3+2*observed_mark_num, CV_32FC1);
        Mat Kt = Mat::zeros(3+2*observed_mark_num, 2, CV_32FC1); //


        Mat Fj;
        Mat cal_temp = Mat::zeros(3, 1, CV_32FC1); //算法第三行计算xPred_SLAM时的3*1矩阵
        Mat Qt = Mat::zeros(2, 2, CV_32FC1);
        Mat Ht_temp = Mat::zeros(2, 5, CV_32FC1); //用来计算Ht的2*5矩阵
//        Mat miu_temp_sum=Mat::zeros(3+2*observed_mark_num,1,CV_32FC1); //用来计算Ht的2*5矩阵
//        Mat Kt_i_Ht_sum=Mat::zeros(3+2*observed_mark_num,3+2*observed_mark_num,CV_32FC1);
//        Mat delta_z_observed =Mat::zeros(3+2*observed_mark_num,3+2*observed_mark_num,CV_32FC1);



        Mat St = Mat::zeros(2,2,CV_32FC1); //vv
        Mat Rt = Mat::zeros(3,3,CV_32FC1); //vv
        Mat Vt = Mat::zeros(3,2,CV_32FC1);
        Mat Mt = Mat::zeros(2,2,CV_32FC1);


        landmark_miu_conver_p_ = Mat::zeros(2,2,CV_64FC1); //协方差椭圆有关
        ////////////////////////////////////////////////////////////////////////////////////
        //     motionModelIndex==0 //Velocity模式
        velocities_.push_back(Point2f(V,W));
        Point2f VEL = velocities_.at(velocities_.size()-2);  //数组从0开始  又存入一值

        Vd_ = VEL.x;
        Wd_ = VEL.y;  //  取y  标准正向
        cout<<"Vd: "<<Vd_<<"   "<<"Wd"<<Wd_<<endl;
        if( Wd_ < 0.00006 && Wd_ >=0)  Wd_ = 0.00006;
        if( Wd_ > -0.00006 && Wd_ <0)  Wd_ = -0.00006;
        cout<<"vd/wd"<<Vd_/Wd_<<endl;
        //        float Vr_noise, Wr_noise, Sigv, Sigw;
        //        Sigv = a1*Vd_*Vd_+a2*Wd_*Wd_;
        //        Sigw = a3*Vd_*Vd_+a4*Wd_*Wd_;
        //        Vr_noise = genGaussianValue(Sigv);
        //        Wr_noise = genGaussianValue(Sigw);

        //        Vd_ = VEL.x + Vr_noise;
        //        Wd_ = VEL.y + Wr_noise;
        //        if( Wd_ <0.00006 && Wd_ >=0)  Wd_ =0.0001;
        //        if( Wd_ >-0.00006 && Wd_ <0)  Wd_ =-0.0001;

        ////基于EKF的SLAM方法, 条件有当前观测量Observations, 上一时刻估计所得机器人位置
        //计算Ft
        Fx.at<float>(0,0) = 1.0;
        Fx.at<float>(1,1) = 1.0;
        Fx.at<float>(2,2) = 1.0;
        //计算cal_temp,预测不加扰动
        float last_miu_theta = miu_state.at<float>(2) ;//上一个miu_SLAM的角度
        angleWrap(last_miu_theta);


        //speed mode motion increase   Wd 不能为 0
        cal_temp.at<float>(0) =  -Vd_/Wd_*sin(last_miu_theta) + Vd_/Wd_*sin(last_miu_theta+Wd_*delta_t_);
        cal_temp.at<float>(1) =   Vd_/Wd_*cos(last_miu_theta) - Vd_/Wd_*cos(last_miu_theta+Wd_*delta_t_);
        cal_temp.at<float>(2) =  Wd_*delta_t_;
        cout<<"cal_temp"<<cal_temp<<endl;

        ///prediction
        miu_prediction = miu_state + Fx.t()*cal_temp; // X'= X +Jaci_f(x)*delt(x)   predicted mean
        angleWrap(miu_prediction.at<float>(2));
        fre<<cal_temp.at<float>(0)<<"  "<<cal_temp.at<float>(1)<<" "<< cal_temp.at<float>(2)<<endl;
        // cout<<"miu_prediction"<<miu_prediction<<endl;

        //计算Gt   Jacibi_x(x,y,theita)
        Gt_temp.at<float>(0,2) = -Vd_/Wd_*cos(last_miu_theta) + Vd_/Wd_*cos(last_miu_theta+Wd_*delta_t_);
        Gt_temp.at<float>(1,2) = -Vd_/Wd_*sin(last_miu_theta) + Vd_/Wd_*sin(last_miu_theta+Wd_*delta_t_);
        Gt=I_SLAM+Fx.t()*Gt_temp*Fx ;
        //change()
        //        Gt_temp.at<float>(0,2) = Vd_/Wd_*cos(last_miu_theta) - Vd_/Wd_*cos(last_miu_theta+Wd_*delta_t_);
        //        Gt_temp.at<float>(1,2) = Vd_/Wd_*sin(last_miu_theta) - Vd_/Wd_*sin(last_miu_theta+Wd_*delta_t_);
        //        Gt=I_SLAM+Fx.t()*Gt_temp*Fx ;


        //计算Vt   Jacibi_u(v,w)
        Vt.at<float>(0,0) = (-sin(last_miu_theta)+sin(last_miu_theta+Wd_*delta_t_))/Wd_; Vt.at<float>(0,1)=Vd_*(sin(last_miu_theta)-sin(last_miu_theta+Wd_*delta_t_))/Wd_/Wd_+Vd_*cos(last_miu_theta+Wd_*delta_t_)*delta_t_/Wd_;
        Vt.at<float>(1,0) = (cos(last_miu_theta)-cos(last_miu_theta+Wd_*delta_t_))/Wd_;  Vt.at<float>(1,1)=-Vd_*(cos(last_miu_theta)-cos(last_miu_theta+Wd_*delta_t_))/Wd_/Wd_+Vd_*sin(last_miu_theta+Wd_*delta_t_)*delta_t_/Wd_;
        Vt.at<float>(2,0) = 0;                          	                                Vt.at<float>(2,1)=delta_t_;

        //计算Mt   motion noise ;  why add the motion noise   ?????
        Mt.at<float>(0,0) = a1*Vd_*Vd_+a2*Wd_*Wd_;
        Mt.at<float>(1,1) = a3*Vd_*Vd_+a4*Wd_*Wd_;
        Rt = Vt*Mt*Vt.t();//计算Rt

        //计算预测方差矩阵miu_convar_p
        cout<<"----------------------------------------"<<endl;
        // cout<<"miu_convar_p"<<miu_convar_p<<endl;
        x_convar_p_prediction = Gt*miu_convar_p*Gt.t() + Fx.t()*Rt*Fx; //计算预测方差 Px  ??????????  xP_SLAM  与 xPred_SLAM

        //计算Qt
        Qt.at<float>(0,0) = sigma_r*sigma_r;
        Qt.at<float>(1,1) = sigma_phi*sigma_phi;
        /////----------------------------------------------------------------------------------------------------------
        //    LandmarkVector = pLocalizer->QrCodeMapping(MarkVisualNum);
        //    GenObservations();

        ///??观测后面要改成 x y 坐标系
        //    /////----------------------------------------------------------------------------------------------------------
        /////----------------------------------------------------------------------------------------------------------
        //更新
        int   j = 0;  // the order of Qrmark  表示观察到的 特征标记      与状态量中landmark有关
        int   Qid = 0; //  the Id of Qrmark
        int   observed_flag = 0 ;//是否观察到过的标志  0表示从未出现过
        float dst;
        float theta;
        float q;
        Point2f delta;
        Point2f z,zp;
        Mat delta_z;
        for(int i=0; i< observations_.size(); i++)
        {
            Qid = observations_.at(i).z;
            z = Point2f(observations_.at(i).x,observations_.at(i).y); //观测值  是极坐标
            for(int c=0; c<Lm_observed_Num.size(); c++)
            {
                if(Qid == Lm_observed_Num[c])   //   出现过   逐一比对已经观察到的mark列表
                {
                    observed_flag = 1 ;
                    j = c;                     //  选取第j个landmark导入观察模型并进行状态更新。
                }
            }
            if(observed_flag == 0) //从未出现过
            {
                //           landmark_observed.at<float>(i)=j ;//保存住这次观测到的路标号
                Lm_observed_Num.push_back(Qid) ;
                j = Lm_observed_Num.size()-1 ;                // 注意 数组从0开始  -1 ??????????
                // 在观测函数 x y 极坐标系下的值。
                miu_prediction.at<float>(2*j+3) = miu_prediction.at<float>(0) + z.x*cos( z.y + miu_prediction.at<float>(2) );  //第j个landmark的y坐标
                miu_prediction.at<float>(2*j+4) = miu_prediction.at<float>(1) + z.x*sin( z.y + miu_prediction.at<float>(2) );  //第j个landmark的x坐标
            }
            observed_flag=0 ;
            //预测测量值   ??????反向查找运动预测中 Point2f(xPred_SLAM.at<float>(2*j+3),xPred_SLAM.at<float>(2*j+4))
            //             不然就为加入地图mark  无变化     xPred_SLAM  都为世界坐标系下值
            //这里的z相当于landmark的标号  (全局坐标下: 地图值- 预测机器人值)与 观测值 z : 表示mark与robot的相对距离
            delta = Point2f(miu_prediction.at<float>(2*j+3),miu_prediction.at<float>(2*j+4))-Point2f(miu_prediction.at<float>(0),miu_prediction.at<float>(1));
            q = delta.x*delta.x+delta.y*delta.y ;
            dst = sqrt(q);
            theta = atan2(delta.y, delta.x) - miu_prediction.at<float>(2);      //偏离robot的方向角方向的角度   相对xy坐标系下值
            zp.x= dst;
            angleWrap(theta);
            zp.y= theta;

            //计算Fj
            Fj=Mat::zeros(5,3+2*observed_mark_num,CV_32FC1);  //已降维,本来是6行
            Fj.at<float>(0,0) = 1;
            Fj.at<float>(1,1) = 1;
            Fj.at<float>(2,2) = 1;
            Fj.at<float>(3,2*j+3) = 1;
            Fj.at<float>(4,2*j+4) = 1;

            //计算Htjaccibi
            Ht_temp.at<float>(0,0) = -delta.x*dst;
            Ht_temp.at<float>(0,1) = -delta.y*dst;
            Ht_temp.at<float>(0,3) = delta.x*dst;
            Ht_temp.at<float>(0,4) = delta.y*dst;

            Ht_temp.at<float>(1,0) = delta.y;
            Ht_temp.at<float>(1,1) = -delta.x;
            Ht_temp.at<float>(1,2) = -q;
            Ht_temp.at<float>(1,3) = -delta.y;
            Ht_temp.at<float>(1,4) = delta.x;

            ///~~~~~~~~~~~~~~~~~~
            Ht=(1/q)*Ht_temp*Fj ;

            St = Ht*x_convar_p_prediction*Ht.t()+Qt;
            Kt = x_convar_p_prediction*Ht.t()*St.inv();

            z = z-zp;       //  更新的处理
            angleWrap(z.y);
            delta_z = Mat::zeros(2,1,CV_32FC1);
            delta_z.at<float>(0) = z.x;
            delta_z.at<float>(1) = z.y;
            //××                        //xPred_SLAM为xy坐标系    delta_z 为极坐标系   存在点问题 观测模型是以  极坐标建立的   H阵描述的也就是 极值与极径的关系  即K表述成立
            //                        miu_temp_sum = miu_temp_sum + Kt*delta_z ;
            //                        Kt_i_Ht_sum = Kt_i_Ht_sum + Kt*Ht;
            //                    }
            //                    miu_prediction =miu_prediction + miu_temp_sum ;  // xPred_SLAM 关于landmark为极坐标值
            //                    angleWrap(miu_prediction.at<float>(2));
            //                    x_convar_p_prediction= (I_SLAM-Kt_i_Ht_sum)*x_convar_p_prediction;
            //                    miu_state=miu_prediction;
            //                    miu_convar_p=x_convar_p_prediction;
            //×××
            ////××
            miu_prediction = miu_prediction +Kt*delta_z;  // xPred_SLAM 关于landmark为极坐标值
            angleWrap(miu_prediction.at<float>(2));
            x_convar_p_prediction= (I_SLAM-Kt*Ht)*x_convar_p_prediction;
        }
        miu_state=miu_prediction;
        miu_convar_p=x_convar_p_prediction;
        ////×
        ///
        Point3f xPred;
        xPred.x=miu_prediction.at<float>(0);
        xPred.y=miu_prediction.at<float>(1);
        xPred.z=miu_prediction.at<float>(2);
        angleWrap(xPred.z);

        est_path_points_.push_back(xPred);
        miu_convar_p= 0.5*(miu_convar_p+miu_convar_p.t());
        //    xP_SLAM(Rect(0,0,3,3)).copyTo(xP);   //分离出位置的协方差,用于图形显示不确定椭圆

        //draw_mark();
        for(int t=0;t<3+2*Lm_observed_Num.size();t++)
        {
            cout <<" "<<miu_state.at<float>(t)<<" ";
        }
        cout <<" "<<endl;
        //   cvWaitKey(10);
        TimeOld_ = Time_;
    }
}