void App::joint_states_cb(const sensor_msgs::JointStateConstPtr& msg){ pronto::force_torque_t force_torque; appendFootSensor(force_torque); bool send_est_robot_state = true; if ( send_est_robot_state ){ pronto::robot_state_t m; m.pose.translation.x =0; m.pose.translation.y =0; m.pose.translation.z =0.85; m.pose.rotation.w = last_ins_quat_[0]; m.pose.rotation.x = last_ins_quat_[1]; m.pose.rotation.y = last_ins_quat_[2]; m.pose.rotation.z = last_ins_quat_[3]; m.utime = (int64_t) floor(msg->header.stamp.toNSec()/1000); m.num_joints =msg->name.size(); m.joint_name = msg->name; for (size_t i = 0; i < msg->name.size(); i++) { m.joint_position.push_back( msg->position[ i ] ); m.joint_velocity.push_back( msg->velocity[ i ] ); m.joint_effort.push_back( msg->effort[ i ] ); } m.force_torque = force_torque; lcm_publish_.publish("EST_ROBOT_STATE", &m); bot_core::pose_t p; p.utime = m.utime; p.pos[0]=0; p.pos[1]=0; p.pos[2]=0.85; memcpy(p.orientation, last_ins_quat_, 4*sizeof(double) ); lcm_publish_.publish("POSE_BODY", &p); } bool send_atlas_robot_state = true; if ( send_atlas_robot_state ){ pronto::atlas_state_t m; m.utime = (int64_t) floor(msg->header.stamp.toNSec()/1000); m.num_joints =msg->name.size(); for (size_t i = 0; i < msg->name.size(); i++) { m.joint_position.push_back( msg->position[ i ] ); m.joint_velocity.push_back( msg->velocity[ i ] ); m.joint_effort.push_back( msg->effort[ i ] ); } m.force_torque = force_torque; lcm_publish_.publish("ATLAS_STATE", &m); } pronto::utime_t utime_msg; int64_t joint_utime = (int64_t) msg->header.stamp.toNSec()/1000; // from nsec to usec utime_msg.utime = joint_utime; lcm_publish_.publish("ROBOT_UTIME", &utime_msg); last_joint_state_utime_ = joint_utime; }
void ForceAutonomousMode() { // step 1: ask for a single trajectory lcmt::tvlqr_controller_action trajectory_msg; trajectory_msg.timestamp = GetTimestampNow(); trajectory_msg.trajectory_number = 0; lcm_->publish("rc-trajectory-commands", &trajectory_msg); ProcessAllLcmMessagesNoDelayedUpdate(); // step 2: send an autonmous mode signal lcmt::timestamp autonomous_msg; autonomous_msg.timestamp = GetTimestampNow(); lcm_->publish("state-machine-go-autonomous", &autonomous_msg); ProcessAllLcmMessagesNoDelayedUpdate(); }
void App::imu_cb(const sensor_msgs::ImuConstPtr& msg){ imu_msg_ = *msg; microstrain::ins_t m; m.utime = (int64_t) floor(msg->header.stamp.toNSec()/1000); m.device_time = (int64_t) floor(msg->header.stamp.toNSec()/1000); m.gyro[0] = msg->angular_velocity.x; m.gyro[1] = msg->angular_velocity.y; m.gyro[2] = msg->angular_velocity.z; m.mag[0]=0; m.mag[1]=0; m.mag[2]=0; m.accel[0] = msg->linear_acceleration.x; m.accel[1] = msg->linear_acceleration.y; m.accel[2] = msg->linear_acceleration.z; m.quat[0] = msg->orientation.w; m.quat[1] = msg->orientation.x; m.quat[2] = msg->orientation.y; m.quat[3] = msg->orientation.z; m.pressure = 0; m.rel_alt =0 ; lcm_publish_.publish("MICROSTRAIN_INS", &m); memcpy(last_ins_quat_, m.quat, 4*sizeof(double) ); }
void rgb_tool::kinectHandler(const lcm::ReceiveBuffer* rbuf, const std::string& channel, const kinect::frame_msg_t* msg){ bot_core::image_t lcm_img; lcm_img.utime =msg->image.timestamp; lcm_img.width =msg->image.width; lcm_img.height =msg->image.height; lcm_img.nmetadata =0; int n_colors = 3; lcm_img.row_stride=n_colors*msg->image.width; if (msg->image.image_data_format == kinect::image_msg_t::VIDEO_RGB){ lcm_img.pixelformat =bot_core::image_t::PIXEL_FORMAT_RGB; lcm_img.size = msg->image.image_data_nbytes; lcm_img.data = msg->image.image_data; }else if (msg->image.image_data_format == kinect::image_msg_t::VIDEO_RGB_JPEG){ lcm_img.data = msg->image.image_data; lcm_img.size = msg->image.image_data_nbytes; lcm_img.pixelformat = bot_core::image_t::PIXEL_FORMAT_MJPEG; }else{ std::cout << "Format not recognized: " << msg->image.image_data_format << std::endl; } lcm_->publish("KINECT_RGB", &lcm_img); }
void updateStateThread() { while(0 == lcm1.handle()) { x0.at(0,0) = current_state.position[0]/1000.0; x0.at(3,0) = current_state.position[1]/1000.0; x0.at(6,0) = -(current_state.position[2]/1000.0 + simu_height) + better_height; x0.at(1,0) = current_state.velocity[0]; x0.at(4,0) = current_state.velocity[1]; x0.at(7,0) = -current_state.velocity[2]; accumu_err_x = accumu_err_x + x0.at(0,0) * dT; accumu_err_y = accumu_err_y + x0.at(3,0) * dT; accumu_err_z = accumu_err_z + x0.at(6,0) * dT; x0.at(2,0) = accumu_err_x; x0.at(5,0) = accumu_err_y; x0.at(8,0) = accumu_err_z; usleep(5e4); // cout <<__LINE__<<endl; } }
void stopRobot() { cmdvel_.linear_velocity.x = 0.0; cmdvel_.linear_velocity.y = 0.0; cmdvel_.linear_velocity.z = 0.0; cmdvel_.angular_velocity.x = 0.0; cmdvel_.angular_velocity.y = 0.0; cmdvel_.angular_velocity.z = 0.0; lcm.publish("NAV_CMDS", &cmdvel_); //pub_.publish(cmdvel_); }
void SendStereoManyPointsTriple(vector<float> x_in, vector<float> y_in, vector<float> z_in) { lcmt::stereo msg; msg.timestamp = GetTimestampNow(); vector<float> x, y, z; vector<unsigned char> grey; for (int i = 0; i < (int)x_in.size(); i++) { double this_point[3]; this_point[0] = x_in[i]; this_point[1] = y_in[i]; this_point[2] = z_in[i]; double point_transformed[3]; GlobalToCameraFrame(this_point, point_transformed); //std::cout << "Point: (" << point_transformed[0] << ", " << point_transformed[1] << ", " << point_transformed[2] << ")" << std::endl; x.push_back(point_transformed[0]); y.push_back(point_transformed[1]); z.push_back(point_transformed[2]); grey.push_back(0); x.push_back(point_transformed[0]); y.push_back(point_transformed[1]); z.push_back(point_transformed[2]); grey.push_back(0); x.push_back(point_transformed[0]); y.push_back(point_transformed[1]); z.push_back(point_transformed[2]); grey.push_back(0); } msg.x = x; msg.y = y; msg.z = z; msg.grey = grey; msg.number_of_points = x.size(); msg.video_number = 0; msg.frame_number = 0; lcm_->publish("stereo", &msg); }
bool waitForLCM(lcm::LCM& lcm, double timeout) { int lcmFd = lcm.getFileno(); struct timeval tv; tv.tv_sec = 0; tv.tv_usec = timeout * 1e6; fd_set fds; FD_ZERO(&fds); FD_SET(lcmFd, &fds); int status = select(lcmFd + 1, &fds, 0, 0, &tv); if (status == -1 && errno != EINTR) { std::cout << "select() returned error: " << errno << std::endl; } else if (status == -1 && errno == EINTR) { std::cout << "select() interrupted" << std::endl; } return (status > 0 && FD_ISSET(lcmFd, &fds)); }
void ErraticKeyboardTeleopNode::keyboardLoop() { char c; double max_tv_forward = walk_vel_; double max_tv_strafe = walk_vel_; double max_rv = yaw_rate_; bool dirty = false; int speed = 0; int turn = 0; // get the console in raw mode tcgetattr(kfd, &cooked); memcpy(&raw, &cooked, sizeof(struct termios)); raw.c_lflag &=~ (ICANON | ECHO); raw.c_cc[VEOL] = 1; raw.c_cc[VEOF] = 2; tcsetattr(kfd, TCSANOW, &raw); puts("Reading from keyboard"); puts("Use Q E keys to strafe left/right"); puts("Use WASD keys to control the robot"); puts("Press Shift to move faster"); struct pollfd ufd; ufd.fd = kfd; ufd.events = POLLIN; for(;;) { boost::this_thread::interruption_point(); // get the next event from the keyboard int num; if ((num = poll(&ufd, 1, 450)) < 0) { perror("poll():"); return; } else if(num > 0) { if(read(kfd, &c, 1) < 0) { perror("read():"); return; } } else { if (dirty == true) { stopRobot(); dirty = false; } continue; } switch(c) { case KEYCODE_W: max_tv_forward = walk_vel_; max_tv_strafe = 0; speed = 1; turn = 0; dirty = true; break; case KEYCODE_S: max_tv_forward = walk_vel_; max_tv_strafe = 0; speed = -1; turn = 0; dirty = true; break; case KEYCODE_Q: max_tv_forward = 0; max_tv_strafe = walk_vel_; speed = 1; turn = 0; dirty = true; break; case KEYCODE_E: max_tv_forward = 0; max_tv_strafe = walk_vel_; speed = -1; turn = 0; dirty = true; break; case KEYCODE_A: max_rv = yaw_rate_; speed = 0; turn = 1; dirty = true; break; case KEYCODE_D: max_rv = yaw_rate_; speed = 0; turn = -1; dirty = true; break; case KEYCODE_W_CAP: max_tv_forward = run_vel_; max_tv_strafe = 0; speed = 1; turn = 0; dirty = true; break; case KEYCODE_S_CAP: max_tv_forward = run_vel_; max_tv_strafe = 0; speed = -1; turn = 0; dirty = true; break; case KEYCODE_Q_CAP: max_tv_forward = 0; max_tv_strafe = run_vel_; speed = 1; turn = 0; dirty = true; break; case KEYCODE_E_CAP: max_tv_forward =run_vel_; max_tv_strafe = 0; speed = -1; turn = 0; dirty = true; break; case KEYCODE_A_CAP: max_rv = yaw_rate_run_; max_tv_strafe = 0; speed = 0; turn = 1; dirty = true; break; case KEYCODE_D_CAP: max_rv = yaw_rate_run_; max_tv_strafe = 0; speed = 0; turn = -1; dirty = true; break; default: max_tv_forward = walk_vel_; max_rv = yaw_rate_; speed = 0; turn = 0; dirty = false; } cmdvel_.linear_velocity.x = speed * max_tv_forward; //fwd cmdvel_.linear_velocity.y = speed * max_tv_strafe; //strafe cmdvel_.angular_velocity.z = turn * max_rv; lcm.publish("NAV_CMDS", &cmdvel_); //pub_.publish(cmdvel_); } }
void UnsubscribeLcmChannels() { lcm_->unsubscribe(pose_sub_); lcm_->unsubscribe(stereo_sub_); lcm_->unsubscribe(rc_traj_sub_); lcm_->unsubscribe(go_auto_sub_); }
void SubscribeLcmChannels(StateMachineControl *fsm_control) { pose_sub_ = lcm_->subscribe(pose_channel_, &StateMachineControl::ProcessImuMsg, fsm_control); stereo_sub_ = lcm_->subscribe(stereo_channel_, &StateMachineControl::ProcessStereoMsg, fsm_control); rc_traj_sub_ = lcm_->subscribe(rc_trajectory_commands_channel_, &StateMachineControl::ProcessRcTrajectoryMsg, fsm_control); go_auto_sub_ = lcm_->subscribe(state_machine_go_autonomous_channel_, &StateMachineControl::ProcessGoAutonomousMsg, fsm_control); }
void ProcessAllLcmMessagesNoDelayedUpdate() { while (NonBlockingLcm(lcm_->getUnderlyingLCM())) {} }
void App::data_cb(const atlas_hardware_interface::AtlasPrefilterConstPtr& msg){ if (js_counter%500 ==0){ std::cout << "J ST " << js_counter << "\n"; } js_counter++; int64_t utime = (int64_t) floor(msg->filtered_imu.header.stamp.toNSec()/1000); // 1.................................... pronto::atlas_behavior_t msg_out; msg_out.utime = utime; msg_out.behavior = (int) msg->current_behavior.state; lcm_publish_.publish("ATLAS_BEHAVIOR", &msg_out); // 2.................................... bot_core::pose_t pose_msg; pose_msg.utime = utime; pose_msg.pos[0] = msg->pos_est.position.x; pose_msg.pos[1] = msg->pos_est.position.y; pose_msg.pos[2] = msg->pos_est.position.z; // what about orientation in imu msg? pose_msg.orientation[0] = msg->filtered_imu.orientation.w; pose_msg.orientation[1] = msg->filtered_imu.orientation.x; pose_msg.orientation[2] = msg->filtered_imu.orientation.y; pose_msg.orientation[3] = msg->filtered_imu.orientation.z; // This transformation is NOT correct for Trooper // April 2014: added conversion to body frame - so that both rates are in body frame Eigen::Matrix3d R = Eigen::Matrix3d( Eigen::Quaterniond( msg->filtered_imu.orientation.w, msg->filtered_imu.orientation.x, msg->filtered_imu.orientation.y, msg->filtered_imu.orientation.z )); Eigen::Vector3d lin_body_vel = R*Eigen::Vector3d ( msg->pos_est.velocity.x, msg->pos_est.velocity.y, msg->pos_est.velocity.z ); pose_msg.vel[0] = lin_body_vel[0]; pose_msg.vel[1] = lin_body_vel[1]; pose_msg.vel[2] = lin_body_vel[2]; // this is the body frame rate pose_msg.rotation_rate[0] = msg->filtered_imu.angular_velocity.x; pose_msg.rotation_rate[1] = msg->filtered_imu.angular_velocity.y; pose_msg.rotation_rate[2] = msg->filtered_imu.angular_velocity.z; // Frame? pose_msg.accel[0] = msg->filtered_imu.linear_acceleration.x; pose_msg.accel[1] = msg->filtered_imu.linear_acceleration.y; pose_msg.accel[2] = msg->filtered_imu.linear_acceleration.z; lcm_publish_.publish("POSE_BDI", &pose_msg); lcm_publish_.publish("POSE_BODY", &pose_msg); // for now // 3.......................................... pronto::atlas_state_t js_out; js_out.utime = (int64_t) utime; int n_joints = 28; js_out.joint_position.assign(n_joints , std::numeric_limits<int>::min() ); js_out.joint_velocity.assign(n_joints , std::numeric_limits<int>::min() ); js_out.joint_effort.assign(n_joints , std::numeric_limits<int>::min() ); js_out.num_joints = n_joints; for (std::vector<int>::size_type i = 0; i < n_joints; i++) { js_out.joint_position[i] = msg->j[i].q; js_out.joint_velocity[i] = msg->j[i].qd; js_out.joint_effort[i] = msg->j[i].f; } // Append FT sensor info pronto::force_torque_t ft_out; ft_out.l_foot_force_z = msg->foot_sensors[0].force.z; ft_out.l_foot_torque_x = msg->foot_sensors[0].torque.x; ft_out.l_foot_torque_y = msg->foot_sensors[0].torque.y; ft_out.r_foot_force_z = msg->foot_sensors[1].force.z; ft_out.r_foot_torque_x = msg->foot_sensors[1].torque.x; ft_out.r_foot_torque_y = msg->foot_sensors[1].torque.y; ft_out.l_hand_force[0] = 0; ft_out.l_hand_force[1] = 0; ft_out.l_hand_force[2] = 0; ft_out.l_hand_torque[0] = 0; ft_out.l_hand_torque[1] = 0; ft_out.l_hand_torque[2] = 0; ft_out.r_hand_force[0] = 0; ft_out.r_hand_force[1] = 0; ft_out.r_hand_force[2] = 0; ft_out.r_hand_torque[0] = 0; ft_out.r_hand_torque[1] = 0; ft_out.r_hand_torque[2] = 0; js_out.force_torque = ft_out; lcm_publish_.publish("ATLAS_STATE", &js_out); // 4........................................................ pronto::utime_t utime_msg; int64_t joint_utime = (int64_t) utime; utime_msg.utime = utime; lcm_publish_.publish("ROBOT_UTIME", &utime_msg); // 5........................................................ pronto::atlas_raw_imu_batch_t imu; imu.utime = (int64_t) utime; imu.num_packets = 15; for (size_t i=0; i < 15 ; i++){ pronto::atlas_raw_imu_t raw; raw.utime = (int64_t) floor(msg->raw_imu[i].imu_timestamp.toNSec()/1000); raw.packet_count = msg->raw_imu[i].packet_count; raw.delta_rotation[0] = msg->raw_imu[i].da.x; raw.delta_rotation[1] = msg->raw_imu[i].da.y; raw.delta_rotation[2] = msg->raw_imu[i].da.z; raw.linear_acceleration[0] = msg->raw_imu[i].dd.x; raw.linear_acceleration[1] = msg->raw_imu[i].dd.y; raw.linear_acceleration[2] = msg->raw_imu[i].dd.z; imu.raw_imu.push_back( raw ); } lcm_publish_.publish( ("ATLAS_IMU_BATCH") , &imu); }
int main() { if(!lcm1.good()) return 1; VideoCapture cap(1); cap >> frame; start_time = utime_now(); float x=0,y=0,z=0; int estPixel; //Temporary local variables Rect trackWindow; int hsize = 16; float hranges[] = {0,180}; const float* phranges = hranges; Mat hsv, hue, mask, hist, histimg = Mat::zeros(200, 320, CV_8UC3), backproj; bool paused = false; int _vmin,_vmax; namedWindow( "CamShift Demo", 1 ); setMouseCallback( "CamShift Demo", onMouse, 0 ); while(!frame.empty()) { RotatedRect trackBox; if( !paused ) { //resize(frame,frame,Size(FRAME_W,FRAME_H)); frame_number = cap.get(CV_CAP_PROP_POS_FRAMES); } frame.copyTo(image); if( !paused ) { cvtColor(image, hsv, COLOR_BGR2HSV); if( trackObject ) { _vmin = CAM_VMIN, _vmax = CAM_VMAX; inRange(hsv, Scalar(0, CAM_SMIN, MIN(_vmin,_vmax)), Scalar(180, 256, MAX(_vmin, _vmax)), mask); int ch[] = {0, 0}; hue.create(hsv.size(), hsv.depth()); mixChannels(&hsv, 1, &hue, 1, ch, 1); if( trackObject < 0 ) { destroyAllWindows(); Mat roi(hue, selection), maskroi(mask, selection); calcHist(&roi, 1, 0, maskroi, hist, 1, &hsize, &phranges); normalize(hist, hist, 0, 255, CV_MINMAX); trackWindow = selection; trackObject = 1; histimg = Scalar::all(0); int binW = histimg.cols / hsize; Mat buf(1, hsize, CV_8UC3); for( int i = 0; i < hsize; i++ ) buf.at<Vec3b>(i) = Vec3b(saturate_cast<uchar>(i*180./hsize), 255, 255); cvtColor(buf, buf, CV_HSV2BGR); for( int i = 0; i < hsize; i++ ) { int val = saturate_cast<int>(hist.at<float>(i)*histimg.rows/255); rectangle( histimg, Point(i*binW,histimg.rows),Point((i+1)*binW,histimg.rows - val),Scalar(buf.at<Vec3b>(i)), -1, 8 ); } } calcBackProject(&hue, 1, 0, hist, backproj, &phranges); backproj &= mask; trackBox = CamShift(backproj, trackWindow, TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 )); if( trackWindow.area() <= 1 ) { int cols = backproj.cols, rows = backproj.rows, r = (MIN(cols, rows) + 5)/6; trackWindow = Rect(trackWindow.x - r, trackWindow.y - r,trackWindow.x + r, trackWindow.y + r) &Rect(0, 0, cols, rows); if (trackWindow.area() == 0) trackWindow = Rect(360,240,100,100); } rectangle(image,trackBox.boundingRect(),Scalar(0,0,255),3); } if( backprojMode ) cvtColor( backproj, image, COLOR_GRAY2BGR ); else if( trackObject < 0 ) paused = false; if( selectObject && selection.width > 0 && selection.height > 0 ) { Mat roi(image, selection); bitwise_not(roi, roi); } imshow( "CamShift Demo", image ); waitKey(10); char c = (char)waitKey(30); if( c == 27 ) exit(0); switch(c) { case 'b': backprojMode = !backprojMode; break; case 'c': trackObject = 0; histimg = Scalar::all(0); break; case 'h': showHist = !showHist; if( !showHist ) destroyWindow( "Histogram" ); else namedWindow( "Histogram", 1 ); break; case 'p': paused = !paused; break; default: ; } if(!paused) { estPixel = trackBox.boundingRect().width; if(estPixel!=1) { z = hgtEst(estPixel,0.0025,3.31,177.8); x = persProj(trackBox.boundingRect().x+((trackBox.boundingRect().width)/2.0), z, 1325.55); y = persProj(trackBox.boundingRect().y+((trackBox.boundingRect().height)/2.0),z, 1325.55); } else { x=0;y=0;z=0; } cap >> frame; end_time = utime_now(); time_diff = end_time - start_time; // cout<<time_diff<<endl; start_time = end_time; vision_msg.timestamp = utime_now(); vision_msg.velocity[0] = (vision_msg.position[0] - x)/time_diff*1000; vision_msg.velocity[1] = (vision_msg.position[1] - y)/time_diff*1000; vision_msg.velocity[2] = (vision_msg.position[2] - z)/time_diff*1000; vision_msg.position[0] = x; vision_msg.position[1] = y; vision_msg.position[2] = z; lcm1.publish("vision state", &vision_msg); if( frame.empty() ) break; } }
int main() { if(!lcm1.good()) return 1; lcm1.subscribe("vicon_state", &Handler::handleMessage, &handlerObject); initialize_Matrices(); // initial conditions mat tmp0; tmp0 = {0, 0, 0, 1.2, 0, 0, 1.3, 0, 0, 0, 0, 0}; tmp0 = trans(tmp0); mat z0(12*HORIZON, 1); mat beq(9*HORIZON,1); x0 = {1, 0, 0, 1, 0, 0, 0, 0, 0}; x0 = trans(x0); //for testing purpose float noise_mu = 0.0001; mat noise = noise_mu * x0; mat beqFirstHorizon = A_d_all*x0 + noise; for(int i = 0; i < 9; i++) { beq(i,0) = beqFirstHorizon(i, 0); } for(int i = 0; i < 12; i ++) { z0(i,0) = tmp0(i,0); } // set the values of entrices of beq for other horizons for(int i = 9; i < HORIZON*9; i ++) { beq(i, 0) = noise(i%9,0); } for (int i = 12; i < HORIZON*12; i++) { z0(i,0) = tmp0(i%12,0); } mat v0 = 0.05 * randi<mat>(9*HORIZON, 1, distr_param(0, 20)); mat z = z0; mat v = v0; vec r(21); r.ones(); vec r_d(12*HORIZON); vec r_p(9*HORIZON); float w_r_p = 1e-4; float w_d_p = 1e-4; int loop_num = 0; float fval; float x, y, z_h; mat HESSIAN, GRADIENT; float u1, u2, u3, h; float tmp_denum_grad_x_1, tmp_denum_grad_x_2, tmp_denum_grad_y_1, tmp_denum_grad_y_2; float d_phi_dx, d_phi_dy, d_phi_dz; float d_phi_du1, d_phi_du2, d_phi_du3; float tmp_denum_x_1, tmp_denum_x_2, tmp_denum_y_1, tmp_denum_y_2; float dd_phi_dxdx, dd_phi_dxdz, dd_phi_dydy, dd_phi_dydz, dd_phi_dzdz; float dd_phi_du1du1, dd_phi_du2du2, dd_phi_du3du3; // infeasible start newton method params vec d_v, d_z, beta; mat inv_PHI, Y; //linear search parameters float beta_line_search; //parameter float d; //step size vec line_search_tmp; clock_t start; double duration; start = clock(); thread first(updateStateThread); while(1) { start = clock(); beqFirstHorizon = A_d_all*x0 + noise; for(int i = 0; i < 9; i++) { beq(i,0) = beqFirstHorizon(i, 0); } while(norm(r) > 1e-4) { duration = ( clock() - start ) / (double) CLOCKS_PER_SEC; if(duration > 0.2) { continue; } fval = dot(trans(z), (H * z)); HESSIAN.zeros(12*HORIZON, 12*HORIZON); GRADIENT.zeros(12*HORIZON, 1); for (int i = 0; i < HORIZON; i++) { x = z.at(3 + i * 12); y = z.at(6 + i * 12); z_h = z.at(9 + i * 12); u1 = z.at(0 + 12 * i); u2 = z.at(1 + 12 * i); u3 = z.at(2 + 12 * i); h = -z_h + better_height; tmp_denum_grad_x_1 = h*tan_half_phi - x; tmp_denum_grad_x_2 = h*tan_half_phi + x; tmp_denum_grad_y_1 = h*tan_half_psi - y; tmp_denum_grad_y_2 = h*tan_half_psi + y; d_phi_dx = 1/tmp_denum_grad_x_1 - 1/tmp_denum_grad_x_2; d_phi_dy = 1/tmp_denum_grad_y_1 - 1/tmp_denum_grad_y_2; d_phi_dz = 1/(z_max - z_h) - 1/(z_h - z_min) + tan_half_phi/tmp_denum_grad_x_1 + tan_half_phi/tmp_denum_grad_x_2 + tan_half_psi/tmp_denum_grad_y_1 + tan_half_psi/tmp_denum_grad_y_2; d_phi_du1 = 1/(u1_max - u1) - 1/(u1 - u1_min); d_phi_du2 = 1/(u2_max - u2) - 1/(u2 - u2_min); d_phi_du3 = 1/(u3_max - u3) - 1/(u3 - u3_min); GRADIENT.at(i*12) = mu_u*d_phi_du1; GRADIENT.at(i*12 + 1) = mu_u*d_phi_du2; GRADIENT.at(i*12 + 2) = mu_u*d_phi_du3; GRADIENT.at(i*12 + 3) = mu_state*d_phi_dx; GRADIENT.at(i*12 + 6) = mu_state*d_phi_dy; GRADIENT.at(i*12 + 9) = mu_state*d_phi_dz; tmp_denum_x_1 = (h*tan_half_phi - x)*(h*tan_half_phi - x); tmp_denum_x_2 = (h*tan_half_phi + x)*(h*tan_half_phi + x); tmp_denum_y_1 = (h*tan_half_psi - y)*(h*tan_half_psi - y); tmp_denum_y_2 = (h*tan_half_psi + y)*(h*tan_half_psi + y); dd_phi_dxdx = 1/tmp_denum_x_1 + 1/tmp_denum_x_2; dd_phi_dxdz = tan_half_phi/tmp_denum_x_1 - tan_half_phi/tmp_denum_x_2; dd_phi_dydy = 1/tmp_denum_y_1 + 1/tmp_denum_y_2; dd_phi_dydz = tan_half_psi/tmp_denum_y_1 - tan_half_psi/tmp_denum_y_2; dd_phi_dzdz = 1/(z_max - z_h)/(z_max - z_h) + 1/(z_h - z_min)/(z_h - z_min) + tan_half_phi*tan_half_phi/tmp_denum_x_1 + tan_half_phi*tan_half_phi/tmp_denum_x_2 + tan_half_psi*tan_half_psi/tmp_denum_y_1 + tan_half_psi*tan_half_psi/tmp_denum_y_2; dd_phi_du1du1 = 1/(u1_max - u1)/(u1_max - u1) + 1/(u1 - u1_min)/(u1 - u1_min); dd_phi_du2du2 = 1/(u2_max - u2)/(u2_max - u2) + 1/(u2 - u2_min)/(u2 - u2_min); dd_phi_du3du3 = 1/(u3_max - u3)/(u3_max - u3) + 1/(u3 - u3_min)/(u3 - u3_min); HESSIAN.at(12*i, 12*i) = mu_u*dd_phi_du1du1; HESSIAN.at(12*i + 1, 12*i + 1) = mu_u*dd_phi_du2du2; HESSIAN.at(12*i + 2, 12*i + 2) = mu_u*dd_phi_du3du3; HESSIAN.at(12*i + 3, 12*i + 3) = mu_state*dd_phi_dxdx; HESSIAN.at(12*i + 3, 12*i + 9) = mu_state*dd_phi_dxdz; HESSIAN.at(12*i + 6, 12*i + 6) = mu_state*dd_phi_dydy; HESSIAN.at(12*i + 6, 12*i + 9) = mu_state*dd_phi_dydz; HESSIAN.at(12*i + 9, 12*i + 9) = mu_state*dd_phi_dzdz; HESSIAN.at(12*i + 9, 12*i + 3) = mu_state*dd_phi_dxdz; HESSIAN.at(12*i + 9, 12*i + 6) = mu_state*dd_phi_dydz; } r_d = 2*H*z + GRADIENT + trans(C)*v; r_p = C*z - beq; r = r_d; r.insert_rows(12*HORIZON, r_p); // start = clock(); inv_PHI = inv(2*H + HESSIAN); // duration = ( clock() - start ) / (double) CLOCKS_PER_SEC; // cout<<"duration: " <<duration <<endl; // return 0; // inv_PHI.print("inv_PHI"); // C.print("C"); Y = C*inv_PHI*C.t(); // Y.print("Y"); beta = -r_p + C*inv_PHI*r_d; d_v = -inv_sympd(Y)*beta; d_z = inv_PHI*(-r_d - C.t()*d_v); // line search beta_line_search = 0.5; d = 1; line_search_tmp = 2*H*(z + d*d_z) + GRADIENT + C.t()*(v + d*d_v); line_search_tmp.insert_rows(line_search_tmp.n_rows, (C*(z + d*d_z)-beq)); while(norm(line_search_tmp) > (1 - 0.4*d)*norm(r) ) { d = beta_line_search*d; } // update z and v z = z + d*d_z; v = v + d*d_v; // r.print("r"); // cout<<__LINE__<<endl; } accel_msg.timestamp = utime_now(); accel_msg.accel[0] = z(0); accel_msg.accel[1] = z(1); accel_msg.accel[2] = z(2); lcm1.publish("acceleration", &accel_msg); cout<<"duration: " <<duration <<endl; r.ones(); usleep(5e4); } first.join(); cout<<"control:u1 u2 u3 " <<z(0,0)<<", " <<z(1,0) <<", " <<z(2,0)<<endl; }