From 9af416d5b44671be4516665225942c90e6aa6131 Mon Sep 17 00:00:00 2001 From: sugihara Date: Sun, 18 Feb 2024 20:05:53 +0900 Subject: [PATCH 01/14] [spinal] publish gravitational vector instead of euler angle --- .../Jsk_Lib/state_estimate/attitude/attitude_estimate.h | 2 +- .../Jsk_Lib/state_estimate/attitude/complementary_ahrs.h | 5 ++--- .../lib/Jsk_Lib/state_estimate/attitude/estimator.h | 8 +++++++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h index b21f029b8..5993df3ba 100644 --- a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h +++ b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h @@ -196,7 +196,7 @@ class AttitudeEstimate imu_msg_.acc_data[i] = estimator_->getAcc(Frame::BODY)[i]; imu_msg_.gyro_data[i] = estimator_->getAngular(Frame::BODY)[i]; #if ESTIMATE_TYPE == COMPLEMENTARY - imu_msg_.angles[i] = estimator_->getAttitude(Frame::BODY)[i]; // get the attitude at body frame3 + imu_msg_.angles[i] = estimator_->getEstG(Frame::BODY)[i]; // workaround to avoid the singularity of RPY Euler angles. #endif } #ifdef SIMULATION diff --git a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/complementary_ahrs.h b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/complementary_ahrs.h index 1b47d4fc5..1238673d7 100644 --- a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/complementary_ahrs.h +++ b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/complementary_ahrs.h @@ -26,11 +26,10 @@ class ComplementaryAHRS: public EstimatorAlgorithm { public: - ComplementaryAHRS():EstimatorAlgorithm(), est_g_(), est_m_(){} + ComplementaryAHRS():EstimatorAlgorithm() + {} private: - std::array est_g_, est_m_; - /* core esitmation process, using body frame */ void estimation() { diff --git a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/estimator.h b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/estimator.h index fa08dc0b3..609b10eb6 100644 --- a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/estimator.h +++ b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/estimator.h @@ -37,7 +37,9 @@ class EstimatorAlgorithm public: EstimatorAlgorithm(): - acc_(), gyro_(), mag_(), prev_mag_(), pre1_(), pre2_(), mag_dec_valid_(false) + acc_(), gyro_(), mag_(), prev_mag_(), + est_g_(), est_m_(), + pre1_(), pre2_(), mag_dec_valid_(false) { r_.identity(); mag_declination_ = 0; @@ -110,6 +112,8 @@ class EstimatorAlgorithm ap::Vector3f getAcc(uint8_t frame){return acc_[frame];} ap::Vector3f getMag(uint8_t frame){return mag_[frame];} ap::Matrix3f getDesiredCoord() {return r_;} + ap::Vector3f getEstG(uint8_t frame){return est_g_[frame];} + ap::Vector3f getEstM(uint8_t frame){return est_m_[frame];} #ifndef SIMULATION bool getMagDecValid() { return mag_dec_valid_; } @@ -128,6 +132,8 @@ class EstimatorAlgorithm ap::Vector3f prev_mag_; /* for lpf filter method for mag */ ap::Matrix3f r_; std::array rpy_; + std::array est_g_; /* estimated vetor of gravity */ + std::array est_m_; /* estimated vetor of magnet */ /* IIR lpf */ float rx_freq_; From c8dc28e45fc8e9e3070fda79ff5ac9d4b1990cb7 Mon Sep 17 00:00:00 2001 From: sugihara Date: Sun, 18 Feb 2024 20:13:25 +0900 Subject: [PATCH 02/14] [aerial_robot_simulation] work around euler so3 control --- .../aerial_robot_hw_sim.h | 3 +++ .../src/aerial_robot_hw_sim.cpp | 27 ++++++++++--------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/aerial_robot_simulation/include/aerial_robot_simulation/aerial_robot_hw_sim.h b/aerial_robot_simulation/include/aerial_robot_simulation/aerial_robot_hw_sim.h index 84d068341..63ea9fc5e 100644 --- a/aerial_robot_simulation/include/aerial_robot_simulation/aerial_robot_hw_sim.h +++ b/aerial_robot_simulation/include/aerial_robot_simulation/aerial_robot_hw_sim.h @@ -96,6 +96,9 @@ class AerialRobotHWSim : public gazebo_ros_control::DefaultRobotHWSim #endif std::string baselink_parent_; + double start_t_; + double spinal_init_wait_time_; + uint8_t control_mode_; ros::Subscriber sim_vel_sub_, sim_pos_sub_; ros::Publisher ground_truth_pub_; diff --git a/aerial_robot_simulation/src/aerial_robot_hw_sim.cpp b/aerial_robot_simulation/src/aerial_robot_hw_sim.cpp index 65eef6c9c..2468a3bdb 100644 --- a/aerial_robot_simulation/src/aerial_robot_hw_sim.cpp +++ b/aerial_robot_simulation/src/aerial_robot_hw_sim.cpp @@ -196,6 +196,9 @@ namespace gazebo_ros_control ground_truth_pub_ = model_nh.advertise("ground_truth", 1); mocap_pub_ = model_nh.advertise("mocap/pose", 1); + simulation_nh.param("spinal_init_wait_time", spinal_init_wait_time_, 5.0); // [sec] + start_t_ = ros::Time::now().toSec(); + return true; } @@ -240,7 +243,10 @@ namespace gazebo_ros_control else ROS_DEBUG_THROTTLE(1.0, "No magnetometer sensor handler to read sensor data"); - spinal_interface_.stateEstimate(); + if (time.toSec() - start_t_ > spinal_init_wait_time_) + spinal_interface_.stateEstimate(); + else + ROS_INFO_THROTTLE(1.0, "wait for robot configuration being stable"); /* publish ground truth value */ nav_msgs::Odometry odom_msg; @@ -249,12 +255,11 @@ namespace gazebo_ros_control odom_msg.pose.pose.position.y = baselink_pos.Y() + gazebo::gaussianKernel(ground_truth_pos_noise_); odom_msg.pose.pose.position.z = baselink_pos.Z() + gazebo::gaussianKernel(ground_truth_pos_noise_); - ignition::math::Vector3d euler = q.Euler(); - euler += ignition::math::Vector3d(gazebo::addNoise(ground_truth_rot_curr_drift_.X(), ground_truth_rot_drift_, ground_truth_rot_drift_frequency_, 0, ground_truth_rot_noise_, period.toSec()), - gazebo::addNoise(ground_truth_rot_curr_drift_.Y(), ground_truth_rot_drift_, ground_truth_rot_drift_frequency_, 0, ground_truth_rot_noise_, period.toSec()), - gazebo::addNoise(ground_truth_rot_curr_drift_.Z(), ground_truth_rot_drift_, ground_truth_rot_drift_frequency_, 0, ground_truth_rot_noise_, period.toSec())); + ignition::math::Vector3d delta_euler(gazebo::addNoise(ground_truth_rot_curr_drift_.X(), ground_truth_rot_drift_, ground_truth_rot_drift_frequency_, 0, ground_truth_rot_noise_, period.toSec()), + gazebo::addNoise(ground_truth_rot_curr_drift_.Y(), ground_truth_rot_drift_, ground_truth_rot_drift_frequency_, 0, ground_truth_rot_noise_, period.toSec()), + gazebo::addNoise(ground_truth_rot_curr_drift_.Z(), ground_truth_rot_drift_, ground_truth_rot_drift_frequency_, 0, ground_truth_rot_noise_, period.toSec())); - ignition::math::Quaterniond q_noise(euler); + ignition::math::Quaterniond q_noise = q * ignition::math::Quaterniond(delta_euler); odom_msg.pose.pose.orientation.x = q_noise.X(); odom_msg.pose.pose.orientation.y = q_noise.Y(); odom_msg.pose.pose.orientation.z = q_noise.Z(); @@ -295,12 +300,10 @@ namespace gazebo_ros_control pose_msg.pose.position.y = odom_msg.pose.pose.position.y + gazebo::gaussianKernel(mocap_pos_noise_); pose_msg.pose.position.z = odom_msg.pose.pose.position.z + gazebo::gaussianKernel(mocap_pos_noise_); - // pose.pose.orientation = odom_msg.pose.pose.orientation; - euler = q.Euler(); - euler += ignition::math::Vector3d(gazebo::gaussianKernel(mocap_rot_noise_), - gazebo::gaussianKernel(mocap_rot_noise_), - gazebo::gaussianKernel(mocap_rot_noise_)); - q_noise = ignition::math::Quaterniond(euler); + delta_euler = ignition::math::Vector3d(gazebo::gaussianKernel(mocap_rot_noise_), + gazebo::gaussianKernel(mocap_rot_noise_), + gazebo::gaussianKernel(mocap_rot_noise_)); + q_noise = q * ignition::math::Quaterniond(delta_euler); pose_msg.pose.orientation.x = q_noise.X(); pose_msg.pose.orientation.y = q_noise.Y(); pose_msg.pose.orientation.z = q_noise.Z(); From 8acb4b2bc0fe96f900d963442d2fc8836f27fef4 Mon Sep 17 00:00:00 2001 From: sugihara Date: Sun, 18 Feb 2024 20:31:10 +0900 Subject: [PATCH 03/14] [aerial_robot_model] remove desire coordinate subscriber in robot model --- .../aerial_robot_model/model/aerial_robot_model_ros.h | 2 -- aerial_robot_model/src/model/base_model/robot_model_ros.cpp | 6 ------ 2 files changed, 8 deletions(-) diff --git a/aerial_robot_model/include/aerial_robot_model/model/aerial_robot_model_ros.h b/aerial_robot_model/include/aerial_robot_model/model/aerial_robot_model_ros.h index 3b9fc9c7b..74e7ca1f1 100644 --- a/aerial_robot_model/include/aerial_robot_model/model/aerial_robot_model_ros.h +++ b/aerial_robot_model/include/aerial_robot_model/model/aerial_robot_model_ros.h @@ -60,7 +60,6 @@ namespace aerial_robot_model { private: //private attributes ros::ServiceServer add_extra_module_service_; - ros::Subscriber desire_coordinate_sub_; ros::Subscriber joint_state_sub_; tf2_ros::TransformBroadcaster br_; tf2_ros::StaticTransformBroadcaster static_br_; @@ -74,6 +73,5 @@ namespace aerial_robot_model { //private functions void jointStateCallback(const sensor_msgs::JointStateConstPtr& state); bool addExtraModuleCallback(aerial_robot_model::AddExtraModule::Request& req, aerial_robot_model::AddExtraModule::Response& res); - void desireCoordinateCallback(const spinal::DesireCoordConstPtr& msg); }; } //namespace aerial_robot_model diff --git a/aerial_robot_model/src/model/base_model/robot_model_ros.cpp b/aerial_robot_model/src/model/base_model/robot_model_ros.cpp index 77de238e2..7163ab5a1 100644 --- a/aerial_robot_model/src/model/base_model/robot_model_ros.cpp +++ b/aerial_robot_model/src/model/base_model/robot_model_ros.cpp @@ -41,7 +41,6 @@ namespace aerial_robot_model { joint_state_sub_ = nh_.subscribe("joint_states", 1, &RobotModelRos::jointStateCallback, this); } - desire_coordinate_sub_ = nh_.subscribe("desire_coordinate", 1, &RobotModelRos::desireCoordinateCallback, this); add_extra_module_service_ = nh_.advertiseService("add_extra_module", &RobotModelRos::addExtraModuleCallback, this); } @@ -90,9 +89,4 @@ namespace aerial_robot_model { ROS_ERROR("[extra module]: should not reach here "); return false; } - - void RobotModelRos::desireCoordinateCallback(const spinal::DesireCoordConstPtr& msg) - { - robot_model_->setCogDesireOrientation(msg->roll, msg->pitch, msg->yaw); - } } //namespace aerial_robot_model From b7435889490907f74608e6746b93954c42ffc7e5 Mon Sep 17 00:00:00 2001 From: sugihara Date: Sun, 18 Feb 2024 22:44:52 +0900 Subject: [PATCH 04/14] [aerial_robot_estimation] work around state estimation with so3 system --- .../aerial_robot_estimation/sensor/imu.h | 21 +- .../aerial_robot_estimation/sensor/vo.h | 4 + .../state_estimation.h | 158 +++++++----- aerial_robot_estimation/src/sensor/imu.cpp | 231 +++++++++--------- aerial_robot_estimation/src/sensor/mocap.cpp | 72 +++--- aerial_robot_estimation/src/sensor/vo.cpp | 48 ++-- .../src/state_estimation.cpp | 33 +-- 7 files changed, 288 insertions(+), 279 deletions(-) diff --git a/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h b/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h index 5e30aee2b..3f2db495b 100644 --- a/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h +++ b/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h @@ -58,11 +58,8 @@ namespace sensor_plugin ~Imu() {} Imu(); - inline tf::Vector3 getAttitude(uint8_t frame) { return euler_; } inline ros::Time getStamp(){return imu_stamp_;} - inline void treatImuAsGroundTruth(bool flag) { treat_imu_as_ground_truth_ = flag; } - protected: ros::Publisher acc_pub_; ros::Publisher imu_pub_; @@ -80,19 +77,19 @@ namespace sensor_plugin double sensor_dt_; /* imu */ - tf::Vector3 euler_; /* the euler angle of both body and cog frame */ - tf::Vector3 omega_; /* the omega both body and cog frame */ - tf::Vector3 mag_; /* the magnetometer both body and cog frame */ + tf::Vector3 wz_b_; /* unit z axis of world frame in baselink frame */ + tf::Vector3 omega_; /* the omega both of body frame */ + tf::Vector3 mag_; /* the magnetometer of body frame */ /* acc */ tf::Vector3 acc_b_; /* the acceleration in baselink frame */ - tf::Vector3 acc_l_; /* the acceleration in level frame as to baselink frame: previously is acc_i */ - tf::Vector3 acc_w_; /* the acceleration in world frame */ - tf::Vector3 acc_non_bias_w_; /* the acceleration without bias in world frame */ + std::array acc_w_; /* the acceleration in world frame, for estimate_mode and expriment_mode */ + std::array acc_non_bias_w_; /* the acceleration without bias in world frame for estimate_mode and expriment_mode */ /* acc bias */ tf::Vector3 acc_bias_b_; /* the acceleration bias in baselink frame, only use z axis */ - tf::Vector3 acc_bias_l_; /* the acceleration bias in level frame as to baselink frame: previously is acc_i */ - tf::Vector3 acc_bias_w_; /* the acceleration bias in world frame */ - bool treat_imu_as_ground_truth_; /* whether use imu value as ground truth */ + std::array acc_bias_w_; /* the acceleration bias in world frame for estimate_mode and expriment_mode*/ + + /* orientation */ + std::array cog_rot_, base_rot_; aerial_robot_msgs::States state_; /* for debug */ diff --git a/aerial_robot_estimation/include/aerial_robot_estimation/sensor/vo.h b/aerial_robot_estimation/include/aerial_robot_estimation/sensor/vo.h index c9c86128b..631cf8b0a 100644 --- a/aerial_robot_estimation/include/aerial_robot_estimation/sensor/vo.h +++ b/aerial_robot_estimation/include/aerial_robot_estimation/sensor/vo.h @@ -70,6 +70,9 @@ namespace sensor_plugin bool reset(); + const bool rotValid() const { return rot_valid_; } + const tf::Transform& getRawBaselinkTF() const { return baselink_tf_; } + private: /* ros */ ros::Subscriber vo_sub_; @@ -93,6 +96,7 @@ namespace sensor_plugin /* heuristic sepecial flag for fusion */ bool outdoor_; bool z_no_delay_; + bool rot_valid_; // the estimated orientatin by VO is whether valid or not. /* servo */ diff --git a/aerial_robot_estimation/include/aerial_robot_estimation/state_estimation.h b/aerial_robot_estimation/include/aerial_robot_estimation/state_estimation.h index a67a4ada8..080b0e242 100644 --- a/aerial_robot_estimation/include/aerial_robot_estimation/state_estimation.h +++ b/aerial_robot_estimation/include/aerial_robot_estimation/state_estimation.h @@ -83,14 +83,25 @@ namespace State X_BASE, //x axis of Baselink Y_BASE, //y axis of Baselink Z_BASE, //y axis of Baselink - ROLL_COG, //roll of CoG in world coord - PITCH_COG, //pitch of CoG in world coord - YAW_COG, //yaw of CoG in world coord - ROLL_BASE, //roll of baselink in world coord - PITCH_BASE, //pitch of baselink in world coord - YAW_BASE, //yaw of baselink in world coord TOTAL_NUM, }; + + namespace CoG + { + enum + { + Rot = 10, + }; + }; + + namespace Base + { + enum + { + Rot = 11, + }; + }; + }; namespace Sensor @@ -135,29 +146,42 @@ namespace aerial_robot_estimation int getStateStatus(uint8_t axis, uint8_t estimate_mode) { boost::lock_guard lock(state_mutex_); - assert(axis < State::TOTAL_NUM); - return state_[axis][estimate_mode].first; + if(axis < State::TOTAL_NUM) + return state_[axis][estimate_mode].first; + else if(axis == State::Base::Rot) + return base_rot_status_.at(estimate_mode); + else if(axis == State::CoG::Rot) + return cog_rot_status_.at(estimate_mode); + else + return 0; } - void setStateStatus( uint8_t axis, uint8_t estimate_mode, bool status) + void setStateStatus(uint8_t axis, uint8_t estimate_mode, bool status) { boost::lock_guard lock(state_mutex_); - assert(axis < State::TOTAL_NUM); - if(status) state_[axis][estimate_mode].first ++; + + int* incre_status; + if(axis < State::TOTAL_NUM) + incre_status = &(state_[axis][estimate_mode].first); + else if(axis == State::Base::Rot) + incre_status = &(base_rot_status_.at(estimate_mode)); + else if(axis == State::CoG::Rot) + incre_status = &(cog_rot_status_.at(estimate_mode)); + else + return; + + if(status) (*incre_status) ++; else { - if(state_[axis][estimate_mode].first > 0) - state_[axis][estimate_mode].first --; + if(*incre_status > 0) (*incre_status) --; else - ROS_ERROR("wrong status update for axis: %d, estimate mode: %d", axis, estimate_mode); + ROS_WARN("wrong status update for axis: %d, estimate mode: %d", axis, estimate_mode); } } - /* axis: state axis (11) */ AxisState getState( uint8_t axis) { boost::lock_guard lock(state_mutex_); - assert(axis < State::TOTAL_NUM); return state_[axis]; } @@ -219,71 +243,72 @@ namespace aerial_robot_estimation tf::Matrix3x3 getOrientation(int frame, int estimate_mode) { boost::lock_guard lock(state_mutex_); - tf::Matrix3x3 r; - r.setRPY((state_[State::ROLL_COG + frame * 3][estimate_mode].second)[0], - (state_[State::PITCH_COG + frame * 3][estimate_mode].second)[0], - (state_[State::YAW_COG + frame * 3][estimate_mode].second)[0]); - return r; + + if(frame == Frame::COG) + return cog_rots_.at(estimate_mode); + else if(frame == Frame::BASELINK) + return base_rots_.at(estimate_mode); + + return tf::Matrix3x3::getIdentity(); } - tf::Vector3 getEuler(int frame, int estimate_mode) + void setOrientation(int frame, int estimate_mode, tf::Matrix3x3 rot) { boost::lock_guard lock(state_mutex_); - return tf::Vector3((state_[State::ROLL_COG + frame * 3][estimate_mode].second)[0], - (state_[State::PITCH_COG + frame * 3][estimate_mode].second)[0], - (state_[State::YAW_COG + frame * 3][estimate_mode].second)[0]); + if(frame == Frame::COG) + cog_rots_.at(estimate_mode) = rot; + else if(frame == Frame::BASELINK) + base_rots_.at(estimate_mode) = rot; } - void setEuler(int frame, int estimate_mode, tf::Vector3 euler) + tf::Vector3 getEuler(int frame, int estimate_mode) { - boost::lock_guard lock(state_mutex_); - (state_[State::ROLL_COG + frame * 3][estimate_mode].second)[0] = euler[0]; - (state_[State::PITCH_COG + frame * 3][estimate_mode].second)[0] = euler[1]; - (state_[State::YAW_COG + frame * 3][estimate_mode].second)[0] = euler[2]; + tf::Matrix3x3 rot = getOrientation(frame, estimate_mode); + tfScalar r = 0, p = 0, y = 0; + rot.getRPY(r, p, y); + + return tf::Vector3(r, p, y); } tf::Vector3 getAngularVel(int frame, int estimate_mode) { boost::lock_guard lock(state_mutex_); - return tf::Vector3((state_[State::ROLL_COG + frame * 3][estimate_mode].second)[1], - (state_[State::PITCH_COG + frame * 3][estimate_mode].second)[1], - (state_[State::YAW_COG + frame * 3][estimate_mode].second)[1]); + + if(frame == Frame::COG) + return cog_omegas_.at(estimate_mode); + else if(frame == Frame::BASELINK) + return base_omegas_.at(estimate_mode); + + return tf::Vector3(0,0,0); } void setAngularVel(int frame, int estimate_mode, tf::Vector3 omega) { boost::lock_guard lock(state_mutex_); - (state_[State::ROLL_COG + frame * 3][estimate_mode].second)[1] = omega[0]; - (state_[State::PITCH_COG + frame * 3][estimate_mode].second)[1] = omega[1]; - (state_[State::YAW_COG + frame * 3][estimate_mode].second)[1] = omega[2]; + + if(frame == Frame::COG) + cog_omegas_.at(estimate_mode) = omega; + else if(frame == Frame::BASELINK) + base_omegas_.at(estimate_mode) = omega; } inline void setQueueSize(const int& qu_size) {qu_size_ = qu_size;} - void updateQueue(const double timestamp, const double roll, const double pitch, const tf::Vector3 omega) + void updateQueue(const double timestamp, const tf::Matrix3x3 r_ee, const tf::Matrix3x3 r_ex, const tf::Vector3 omega) { - tf::Matrix3x3 r_ee, r_ex, r_gt; - r_ee.setRPY(roll, pitch, (getState(State::YAW_BASE, EGOMOTION_ESTIMATE))[0]); - r_ex.setRPY(roll, pitch, (getState(State::YAW_BASE, EXPERIMENT_ESTIMATE))[0]); - r_gt.setRPY(roll, pitch, (getState(State::YAW_BASE, GROUND_TRUTH))[0]); + boost::lock_guard lock(queue_mutex_); + timestamp_qu_.push_back(timestamp); + rot_ee_qu_.push_back(r_ee); + rot_ex_qu_.push_back(r_ex); + omega_qu_.push_back(omega); - { - boost::lock_guard lock(queue_mutex_); - timestamp_qu_.push_back(timestamp); - rot_ee_qu_.push_back(r_ee); - rot_ex_qu_.push_back(r_ex); - rot_gt_qu_.push_back(r_gt); - omega_qu_.push_back(omega); - - if(timestamp_qu_.size() > qu_size_) - { - timestamp_qu_.pop_front(); - rot_ee_qu_.pop_front(); - rot_ex_qu_.pop_front(); - rot_gt_qu_.pop_front(); - omega_qu_.pop_front(); - } - } + if(timestamp_qu_.size() > qu_size_) + { + timestamp_qu_.pop_front(); + rot_ee_qu_.pop_front(); + rot_ex_qu_.pop_front(); + omega_qu_.pop_front(); + } } bool findRotOmega(const double timestamp, const int mode, tf::Matrix3x3& r, tf::Vector3& omega, bool verbose = true) @@ -358,9 +383,6 @@ namespace aerial_robot_estimation case EXPERIMENT_ESTIMATE: r = rot_ex_qu_.at(candidate_index); break; - case GROUND_TRUTH: - r = rot_gt_qu_.at(candidate_index); - break; default: ROS_ERROR("estimation search state with timestamp: wrong mode %d", mode); return false; @@ -391,6 +413,8 @@ namespace aerial_robot_estimation /* latitude & longitude value for GPS based navigation */ inline void setCurrGpsPoint(const geographic_msgs::GeoPoint point) {curr_wgs84_poiont_ = point;} inline const geographic_msgs::GeoPoint& getCurrGpsPoint() const {return curr_wgs84_poiont_;} + inline const bool hasGroundTruthOdom() const {return has_groundtruth_odom_; } + inline void receiveGroundTruthOdom(bool flag) {has_groundtruth_odom_ = flag; } const SensorFuser& getFuser(int mode) @@ -452,13 +476,19 @@ namespace aerial_robot_estimation boost::shared_ptr robot_model_; std::string tf_prefix_; - /* 9: x_w, y_w, z_w, roll_w, pitch_w, yaw_cog_w, x_b, y_b, yaw_board_w */ - array state_; + /* 6: x_w, y_w, z_w, x_b, y_b */ + /* TODO: check to vector3 */ + array state_; + array base_rots_, cog_rots_; // TODO: should abolish the different between orientation of baselink and that of CoG + array base_omegas_, cog_omegas_; // TODO: should abolish the different between orientation of baselink and that of CoG + array base_rot_status_, cog_rot_status_; + + bool has_groundtruth_odom_; // whether receive entire groundthtruth odometry (e.g., for simulation mode) /* for calculate the sensor to baselink with the consideration of time delay */ int qu_size_; deque timestamp_qu_; - deque rot_ee_qu_, rot_ex_qu_, rot_gt_qu_; + deque rot_ee_qu_, rot_ex_qu_; deque omega_qu_; /* sensor fusion */ diff --git a/aerial_robot_estimation/src/sensor/imu.cpp b/aerial_robot_estimation/src/sensor/imu.cpp index 42cdf9e80..3a61abf12 100644 --- a/aerial_robot_estimation/src/sensor/imu.cpp +++ b/aerial_robot_estimation/src/sensor/imu.cpp @@ -34,6 +34,7 @@ *********************************************************************/ #include +#include namespace { @@ -47,17 +48,11 @@ namespace sensor_plugin sensor_plugin::SensorBase(), calib_count_(200), acc_b_(0, 0, 0), - euler_(0, 0, 0), + wz_b_(0, 0, 0), omega_(0, 0, 0), mag_(0, 0, 0), - acc_l_(0, 0, 0), - acc_w_(0, 0, 0), - acc_non_bias_w_(0, 0, 0), acc_bias_b_(0, 0, 0), - acc_bias_l_(0, 0, 0), - acc_bias_w_(0, 0, 0), - sensor_dt_(0), - treat_imu_as_ground_truth_(true) + sensor_dt_(0) { state_.states.resize(3); state_.states[0].id = "x"; @@ -66,6 +61,14 @@ namespace sensor_plugin state_.states[1].state.resize(2); state_.states[2].id = "z"; state_.states[2].state.resize(2); + + acc_w_.at(0) = tf::Vector3(0, 0, 0); + acc_w_.at(1) = tf::Vector3(0, 0, 0); + acc_non_bias_w_.at(0) = tf::Vector3(0, 0, 0); + acc_non_bias_w_.at(1) = tf::Vector3(0, 0, 0); + acc_bias_w_.at(0) = tf::Vector3(0, 0, 0); + acc_bias_w_.at(1) = tf::Vector3(0, 0, 0); + } void Imu::initialize(ros::NodeHandle nh, @@ -96,11 +99,11 @@ namespace sensor_plugin return; } - acc_b_[i] = imu_msg->acc_data[i]; - euler_[i] = imu_msg->angles[i]; - omega_[i] = imu_msg->gyro_data[i]; - mag_[i] = imu_msg->mag_data[i]; - } + acc_b_[i] = imu_msg->acc_data[i]; // baselink frame + omega_[i] = imu_msg->gyro_data[i]; // baselink frame + mag_[i] = imu_msg->mag_data[i]; // baselink frame + wz_b_[i] = imu_msg->angles[i]; // workaround to avoid the singularity of RPY Euler angles. + } estimateProcess(); updateHealthStamp(); @@ -118,64 +121,78 @@ namespace sensor_plugin /* set the time internal */ sensor_dt_ = imu_stamp_.toSec() - prev_time.toSec(); - tf::Matrix3x3 r; r.setRPY(euler_[0], euler_[1], euler_[2]); + tf::Transform cog2baselink_tf; + tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - /* project acc onto level frame using body frame value */ - tf::Matrix3x3 orientation; - orientation.setRPY(euler_[0], euler_[1], 0); -#if 1 - acc_l_ = orientation * acc_b_ - tf::Vector3(0, 0, aerial_robot_estimation::G); /* use x,y for factor4 and z for factor3 */ - //acc_l_.setZ((orientation * tf::Vector3(0, 0, acc_[Frame::BODY].z())).z() - aerial_robot_estimation::G); -#else // use approximation - acc_l_ = orientation * tf::Vector3(0, 0, acc_b_.z()) - tf::Vector3(0, 0, aerial_robot_estimation::G); -#endif + // convert to CoG frame, since in the current system, CoG frame is never being sigular due to RPY euler + tf::Vector3 wz_c = cog2baselink_tf.getBasis() * wz_b_; + tf::Vector3 cog_euler; + cog_euler[0] = atan2(wz_c.y() , wz_c.z()); + cog_euler[1] = atan2(-wz_c.x() , sqrt(wz_c.y() * wz_c.y() + wz_c.z() * wz_c.z())); - /* base link */ - /* roll & pitch */ - estimator_->setState(State::ROLL_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, 0, euler_[0]); - estimator_->setState(State::PITCH_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, 0, euler_[1]); - estimator_->setState(State::ROLL_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, 0, euler_[0]); - estimator_->setState(State::PITCH_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, 0, euler_[1]); + // 1. egomotion estimate mode + // check whether have valid rotation from VO sensor + for(const auto& handler: estimator_->getVoHandlers()) + { + if(handler->getStatus() == Status::ACTIVE) + { + auto vo_handler = boost::dynamic_pointer_cast(handler); - /* yaw */ - if(!estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) - estimator_->setState(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, 0, euler_[2]); + if (vo_handler->rotValid()) + { + tf::Matrix3x3 cog_rot_by_vo = vo_handler->getRawBaselinkTF().getBasis() * cog2baselink_tf.getBasis().inverse(); + // we replace the yaw angle from the spinal with the value from VO. + double r,p,y; + cog_rot_by_vo.getRPY(r, p, y); + cog_euler[2] = y; + break; + } + } + } - if(!estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE)) - estimator_->setState(State::YAW_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, 0, euler_[2]); + cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE).setRPY(cog_euler[0], cog_euler[1], cog_euler[2]); + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE)); + base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) * cog2baselink_tf.getBasis(); + estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE)); + estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog2baselink_tf.getBasis() * omega_); estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, omega_); - estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, omega_); - /* COG */ - /* TODO: only imu can assign to cog state for estimate mode and experiment mode */ - tf::Transform cog2baselink_tf; - tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - - double roll, pitch, yaw; - (estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE) * cog2baselink_tf.inverse().getBasis()).getRPY(roll, pitch, yaw); - estimator_->setEuler(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, tf::Vector3(roll, pitch, yaw)); - estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog2baselink_tf.getBasis() * omega_); + // 2. experiment estimate mode + if(estimator_->getStateStatus(State::CoG::Rot, aerial_robot_estimation::GROUND_TRUTH)) + { + tf::Matrix3x3 gt_cog_rot = estimator_->getOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH); + double r,p,y; + gt_cog_rot.getRPY(r, p, y); + cog_euler[2] = y; + } + cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE).setRPY(cog_euler[0], cog_euler[1], cog_euler[2]); + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE)); + base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) * cog2baselink_tf.getBasis(); + estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE)); - (estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE) * cog2baselink_tf.inverse().getBasis()).getRPY(roll, pitch, yaw); - estimator_->setEuler(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, tf::Vector3(roll, pitch, yaw)); estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, cog2baselink_tf.getBasis() * omega_); + estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, omega_); - /* Ground Truth if necessary */ - if(treat_imu_as_ground_truth_) + // 3. ground truth mode + if (!estimator_->hasGroundTruthOdom()) { - /* set baselink angles for roll and pitch, yaw is obtained from mocap */ - estimator_->setState(State::ROLL_BASE, aerial_robot_estimation::GROUND_TRUTH, 0, euler_[0]); - estimator_->setState(State::PITCH_BASE, aerial_robot_estimation::GROUND_TRUTH, 0, euler_[1]); - /* set cog angles for all axes */ - (estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH) * cog2baselink_tf.inverse().getBasis()).getRPY(roll, pitch, yaw); - estimator_->setEuler(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, tf::Vector3(roll, pitch, yaw)); /* set baselink angular velocity for all axes using imu omega */ estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH, omega_); /* set cog angular velocity for all axes using imu omega */ estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, cog2baselink_tf.getBasis() * omega_); } + // 4. set the rotation and angular velocity for the temporal queue for other sensor with time delay + estimator_->updateQueue(imu_stamp_.toSec(), base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE), + base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE), omega_); + + for (int i = 0; i < 2; i++) + { + acc_w_.at(i) = base_rot_.at(i) * acc_b_ - tf::Vector3(0, 0, aerial_robot_estimation::G); + acc_non_bias_w_.at(i) = acc_w_.at(i) - acc_bias_w_.at(i); + } + /* bais calibration */ if(bias_calib < calib_count_) { @@ -190,12 +207,16 @@ namespace sensor_plugin } /* acc bias */ - acc_bias_l_ += acc_l_; + for (int i = 0; i < 2; i++) + acc_bias_w_.at(i) += acc_w_.at(i); if(bias_calib == calib_count_) { - acc_bias_l_ /= calib_count_; - ROS_WARN("accX bias is %f, accY bias is %f, accZ bias is %f, dt is %f[sec]", acc_bias_l_.x(), acc_bias_l_.y(), acc_bias_l_.z(), sensor_dt_); + for (int i = 0; i < 2; i++) + acc_bias_w_.at(i) /= calib_count_; + + tf::Vector3 acc_bias_b = base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE).inverse() * acc_bias_w_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE); + ROS_INFO("acc bias w.r.t body frame: [%f, %f, %f], dt: %f[sec]", acc_bias_b.x(), acc_bias_b.y(), acc_bias_b.z(), sensor_dt_); estimator_->setQueueSize(1/sensor_dt_); @@ -206,10 +227,6 @@ namespace sensor_plugin { if(!getFuserActivate(mode)) continue; - tf::Matrix3x3 orientation; - orientation.setRPY(0, 0, (estimator_->getState(State::YAW_BASE, mode))[0]); - acc_bias_w_ = orientation * acc_bias_l_; - for(auto& fuser : estimator_->getFuser(mode)) { string plugin_name = fuser.first; @@ -227,15 +244,15 @@ namespace sensor_plugin kf->setPredictionNoiseCovariance(input_noise_sigma); if(level_acc_bias_noise_sigma_ > 0) { - if(id & (1 << State::X_BASE)) kf->setInitState(acc_bias_w_.x(),2); - if(id & (1 << State::Y_BASE)) kf->setInitState(acc_bias_w_.y(),2); + if(id & (1 << State::X_BASE)) kf->setInitState(acc_bias_w_.at(mode).x(),2); + if(id & (1 << State::Y_BASE)) kf->setInitState(acc_bias_w_.at(mode).y(),2); } } if(id & (1 << State::Z_BASE)) { input_noise_sigma << z_acc_noise_sigma_, z_acc_bias_noise_sigma_; kf->setPredictionNoiseCovariance(input_noise_sigma); - if(z_acc_bias_noise_sigma_ > 0) kf->setInitState(acc_bias_w_.z(), 2); + if(z_acc_bias_noise_sigma_ > 0) kf->setInitState(acc_bias_w_.at(mode).z(), 2); } start_predict = true; } @@ -272,11 +289,6 @@ namespace sensor_plugin { if(getFuserActivate(mode)) { - tf::Matrix3x3 orientation; - orientation.setRPY(0, 0, (estimator_->getState(State::YAW_BASE, mode))[0]); - - acc_w_ = orientation * acc_l_; - acc_non_bias_w_ = orientation * (acc_l_ - acc_bias_l_); for(auto& fuser : estimator_->getFuser(mode)) { @@ -293,17 +305,17 @@ namespace sensor_plugin VectorXd input_val(1); if(id & (1 << State::X_BASE)) { - input_val << ((level_acc_bias_noise_sigma_ > 0)?acc_w_.x(): acc_non_bias_w_.x()); + input_val << ((level_acc_bias_noise_sigma_ > 0)?acc_w_.at(mode).x(): acc_non_bias_w_.at(mode).x()); axis = State::X_BASE; } else if(id & (1 << State::Y_BASE)) { - input_val << ((level_acc_bias_noise_sigma_ > 0)?acc_w_.y(): acc_non_bias_w_.y()); + input_val << ((level_acc_bias_noise_sigma_ > 0)?acc_w_.at(mode).y(): acc_non_bias_w_.at(mode).y()); axis = State::Y_BASE; } else if(id & (1 << State::Z_BASE)) { - input_val << ((z_acc_bias_noise_sigma_ > 0)?acc_w_.z(): acc_non_bias_w_.z()); + input_val << ((z_acc_bias_noise_sigma_ > 0)?acc_w_.at(mode).z(): acc_non_bias_w_.at(mode).z()); axis = State::Z_BASE; /* considering the undescend mode, such as the phase of takeoff, the velocity should not below than 0 */ @@ -311,7 +323,7 @@ namespace sensor_plugin kf->resetState(); /* get the estiamted offset(bias) */ - if(z_acc_bias_noise_sigma_ > 0) acc_bias_b_.setZ((kf->getEstimateState())(2)); + if(z_acc_bias_noise_sigma_ > 0) acc_bias_w_.at(mode).setZ((kf->getEstimateState())(2)); } kf->prediction(input_val, imu_stamp_.toSec(), params); @@ -322,14 +334,18 @@ namespace sensor_plugin if(plugin_name == "aerial_robot_base/kf_xy_roll_pitch_bias") { + tf::Vector3 acc_bias_b = base_rot_.at(mode).inverse() * acc_bias_w_.at(mode); + + double r, p, y; + base_rot_.at(mode).getRPY(r, p, y); if(id & (1 << State::X_BASE) && (id & (1 << State::Y_BASE))) { - params.push_back(euler_[0]); - params.push_back(euler_[1]); - params.push_back(euler_[2]); + params.push_back(r); + params.push_back(p); + params.push_back(y); params.push_back(acc_b_[0]); params.push_back(acc_b_[1]); - params.push_back(acc_b_[2] - acc_bias_b_.z()); + params.push_back(acc_b_[2] - acc_bias_b.z()); VectorXd input_val(5); input_val << @@ -352,37 +368,22 @@ namespace sensor_plugin } /* TODO: set z acc: should use kf reuslt? */ - estimator_->setState(State::Z_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, 2, acc_non_bias_w_.z()); - estimator_->setState(State::Z_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, 2, acc_non_bias_w_.z()); - - /* set the rotation and angular velocity for the temporal queue for other sensor with time delay */ - estimator_->updateQueue(imu_stamp_.toSec(), euler_[0], euler_[1], omega_); - /* TODO: we ignore yaw becuase it is relatively slower than other axes in the case of under -actuated system */ - - /* 2017.7.25: calculate the state in COG frame using the Baselink frame */ - /* pos_cog = pos_baselink - R * pos_cog2baselink */ - int estimate_mode = aerial_robot_estimation::EGOMOTION_ESTIMATE; - estimator_->setPos(Frame::COG, estimate_mode, - estimator_->getPos(Frame::BASELINK, estimate_mode) - + estimator_->getOrientation(Frame::BASELINK, estimate_mode) - * cog2baselink_tf.inverse().getOrigin()); - estimator_->setVel(Frame::COG, estimate_mode, - estimator_->getVel(Frame::BASELINK, estimate_mode) - + estimator_->getOrientation(Frame::BASELINK, estimate_mode) - * (estimator_->getAngularVel(Frame::BASELINK, estimate_mode).cross(cog2baselink_tf.inverse().getOrigin()))); - - - estimate_mode = aerial_robot_estimation::EXPERIMENT_ESTIMATE; - estimator_->setPos(Frame::COG, estimate_mode, - estimator_->getPos(Frame::BASELINK, estimate_mode) - + estimator_->getOrientation(Frame::BASELINK, estimate_mode) - * cog2baselink_tf.inverse().getOrigin()); - estimator_->setVel(Frame::COG, estimate_mode, - estimator_->getVel(Frame::BASELINK, estimate_mode) - + estimator_->getOrientation(Frame::BASELINK, estimate_mode) - * (estimator_->getAngularVel(Frame::BASELINK, estimate_mode).cross(cog2baselink_tf.inverse().getOrigin()))); - - /* no acc, we do not have the angular acceleration */ + for (int i = 0; i < 2; i++) + estimator_->setState(State::Z_BASE, i, 2, acc_non_bias_w_.at(i).z()); + + /* calculate the state in COG frame using the Baselink frame */ + /* TODO: the joint velocity */ + for (int i = 0; i < 2; i++) + { + estimator_->setPos(Frame::COG, i, + estimator_->getPos(Frame::BASELINK, i) + + estimator_->getOrientation(Frame::BASELINK, i) + * cog2baselink_tf.inverse().getOrigin()); + estimator_->setVel(Frame::COG, i, + estimator_->getVel(Frame::BASELINK, i) + + estimator_->getOrientation(Frame::BASELINK, i) + * (estimator_->getAngularVel(Frame::BASELINK, i).cross(cog2baselink_tf.inverse().getOrigin()))); + } publishAccData(); publishRosImuData(); @@ -399,9 +400,9 @@ namespace sensor_plugin state_.states[0].state[i].y = vel.x(); state_.states[1].state[i].y = vel.y(); state_.states[2].state[i].y = vel.z(); - state_.states[0].state[i].z = acc_w_.x(); - state_.states[1].state[i].z = acc_w_.y(); - state_.states[2].state[i].z = acc_w_.z(); + state_.states[0].state[i].z = acc_w_.at(i).x(); + state_.states[1].state[i].z = acc_w_.at(i).y(); + state_.states[2].state[i].z = acc_w_.at(i).z(); } state_pub_.publish(state_); } @@ -414,8 +415,8 @@ namespace sensor_plugin acc_data.header.stamp = imu_stamp_; tf::vector3TFToMsg(acc_b_, acc_data.acc_body_frame); - tf::vector3TFToMsg(acc_w_, acc_data.acc_world_frame); - tf::vector3TFToMsg(acc_non_bias_w_, acc_data.acc_non_bias_world_frame); + tf::vector3TFToMsg(acc_w_.at(0), acc_data.acc_world_frame); + tf::vector3TFToMsg(acc_non_bias_w_.at(0), acc_data.acc_non_bias_world_frame); acc_pub_.publish(acc_data); } @@ -424,7 +425,9 @@ namespace sensor_plugin { sensor_msgs::Imu imu_data; imu_data.header.stamp = imu_stamp_; - imu_data.orientation = tf::createQuaternionMsgFromRollPitchYaw(euler_[0], euler_[1], euler_[2]); + tf::Quaternion q; + base_rot_.at(0).getRotation(q); + tf::quaternionTFToMsg(q, imu_data.orientation); tf::vector3TFToMsg(omega_, imu_data.angular_velocity); tf::vector3TFToMsg(acc_b_, imu_data.linear_acceleration); imu_pub_.publish(imu_data); diff --git a/aerial_robot_estimation/src/sensor/mocap.cpp b/aerial_robot_estimation/src/sensor/mocap.cpp index 7098045e4..d487a26da 100644 --- a/aerial_robot_estimation/src/sensor/mocap.cpp +++ b/aerial_robot_estimation/src/sensor/mocap.cpp @@ -103,11 +103,11 @@ namespace sensor_plugin ground_truth_pose_.states[1].state.resize(2); ground_truth_pose_.states[2].id = "z"; ground_truth_pose_.states[2].state.resize(2); - ground_truth_pose_.states[3].id = "roll"; + ground_truth_pose_.states[3].id = "rot_x"; ground_truth_pose_.states[3].state.resize(2); - ground_truth_pose_.states[4].id = "pitch"; + ground_truth_pose_.states[4].id = "rot_y"; ground_truth_pose_.states[4].state.resize(2); - ground_truth_pose_.states[5].id = "yaw"; + ground_truth_pose_.states[5].id = "rot_z"; ground_truth_pose_.states[5].state.resize(2); } @@ -145,9 +145,6 @@ namespace sensor_plugin tf::Quaternion q; tf::quaternionMsgToTF(msg->pose.orientation, q); - tfScalar r = 0, p = 0, y = 0; - tf::Matrix3x3(q).getRPY(r, p, y); - tf::Vector3 euler = tf::Vector3(r, p, y); if(!first_flag) { @@ -160,6 +157,7 @@ namespace sensor_plugin */ tf::Matrix3x3 r_delta(prev_raw_q_.inverse() * q); + double r, p, y; r_delta.getRPY(r, p, y); tf::Vector3 raw_omega(r/delta_t, p/delta_t, y/delta_t); @@ -173,10 +171,13 @@ namespace sensor_plugin if(!receive_groundtruth_odom_) { omega = lpf_angular_.filterFunction(raw_omega); - estimator_->setState(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH, 0, euler[2]); + estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH, tf::Matrix3x3(q)); + + tf::Transform cog2baselink_tf; + tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, + tf::Matrix3x3(q) * cog2baselink_tf.getBasis().inverse()); } - if(estimate_mode_ & (1 << aerial_robot_estimation::EXPERIMENT_ESTIMATE)) - estimator_->setState(State::YAW_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, 0, euler[2]); ground_truth_pose_.header.stamp = msg->header.stamp; @@ -191,7 +192,6 @@ namespace sensor_plugin } else { - ground_truth_pose_.states[i].state[0].x = euler[i - 3]; ground_truth_pose_.states[i].state[0].y = raw_omega[i - 3]; ground_truth_pose_.states[i].state[1].y = omega[i - 3]; } @@ -227,22 +227,19 @@ namespace sensor_plugin /* CoG */ /* 2017.7.25: calculate the state in COG frame using the Baselink frame */ - /* pos_cog = pos_baselink - R * pos_cog2baselink */ + tf::Transform cog2baselink_tf; tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - - int estimate_mode = aerial_robot_estimation::GROUND_TRUTH; - estimator_->setPos(Frame::COG, estimate_mode, - estimator_->getPos(Frame::BASELINK, estimate_mode) - + estimator_->getOrientation(Frame::BASELINK, estimate_mode) - * cog2baselink_tf.inverse().getOrigin()); - /* vel_cog = vel_baselink - R * (w x pos_cog2baselink) */ - estimator_->setVel(Frame::COG, estimate_mode, - estimator_->getVel(Frame::BASELINK, estimate_mode) - + estimator_->getOrientation(Frame::BASELINK, estimate_mode) - * (estimator_->getAngularVel(Frame::BASELINK, estimate_mode).cross(cog2baselink_tf.inverse().getOrigin()))); - - auto pos = estimator_->getPos(Frame::COG, estimate_mode); + tf::Vector3 base2cog_pos = cog2baselink_tf.inverse().getOrigin(); + tf::Matrix3x3 baselink_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH); + tf::Vector3 baselink_omega = estimator_->getAngularVel(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH); + + /* pos_cog = pos_baselink + R * pos_base2cog */ + estimator_->setPos(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, + baselink_pos + baselink_rot * base2cog_pos); + /* vel_cog = vel_baselink + R * (w x pos_base2cog) */ + estimator_->setVel(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, + baselink_vel + baselink_rot * (baselink_omega.cross(base2cog_pos))); } void groundTruthCallback(const nav_msgs::OdometryConstPtr & msg) @@ -256,25 +253,22 @@ namespace sensor_plugin /* baselink */ tf::Quaternion q; tf::quaternionMsgToTF(msg->pose.pose.orientation, q); - tfScalar r = 0, p = 0, y = 0; - tf::Matrix3x3(q).getRPY(r, p, y); - tf::Vector3 euler(r, p, y); tf::Vector3 omega; tf::vector3MsgToTF(msg->twist.twist.angular, omega); // This LPF simulates the smoothing of gyro in spinal (e.g., dragon, which use angular to do attitude control in ROS as well as spinal,) omega = lpf_angular_.filterFunction(omega); - estimator_->setEuler(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH, euler); + estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH, tf::Matrix3x3(q)); estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH, omega); /* cog */ tf::Transform cog2baselink_tf; tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - (tf::Matrix3x3(q) * cog2baselink_tf.inverse().getBasis()).getRPY(r, p, y); - euler.setValue(r, p, y); - estimator_->setEuler(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, euler); - estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, cog2baselink_tf.getBasis() * omega); // TODO: check the vibration + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, + tf::Matrix3x3(q) * cog2baselink_tf.getBasis().inverse()); + estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, + cog2baselink_tf.getBasis() * omega); // TODO: check the vibration setGroundTruthPosVel(baselink_pos, baselink_vel); } @@ -283,10 +277,8 @@ namespace sensor_plugin { ground_truth_first_flag = false; init(baselink_pos); + estimator_->receiveGroundTruthOdom(true); receive_groundtruth_odom_ = true; - - for(const auto& handler: estimator_->getImuHandlers()) - boost::dynamic_pointer_cast(handler)->treatImuAsGroundTruth(false); } } @@ -305,16 +297,14 @@ namespace sensor_plugin estimator_->setStateStatus(State::X_BASE, aerial_robot_estimation::GROUND_TRUTH, true); estimator_->setStateStatus(State::Y_BASE, aerial_robot_estimation::GROUND_TRUTH, true); estimator_->setStateStatus(State::Z_BASE, aerial_robot_estimation::GROUND_TRUTH, true); - estimator_->setStateStatus(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH, true); - estimator_->setStateStatus(State::YAW_COG, aerial_robot_estimation::GROUND_TRUTH, true); + estimator_->setStateStatus(State::Base::Rot, aerial_robot_estimation::GROUND_TRUTH, true); + estimator_->setStateStatus(State::CoG::Rot, aerial_robot_estimation::GROUND_TRUTH, true); if(estimate_mode_ & (1 << aerial_robot_estimation::EXPERIMENT_ESTIMATE)) { estimator_->setStateStatus(State::X_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, true); estimator_->setStateStatus(State::Y_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, true); estimator_->setStateStatus(State::Z_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, true); - estimator_->setStateStatus(State::YAW_BASE, aerial_robot_estimation::EXPERIMENT_ESTIMATE, true); - estimator_->setStateStatus(State::YAW_COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, true); for(auto& fuser : estimator_->getFuser(aerial_robot_estimation::EXPERIMENT_ESTIMATE)) { @@ -325,7 +315,7 @@ namespace sensor_plugin /* x, y, z */ if(plugin_name == "kalman_filter/kf_pos_vel_acc") { - if(id < (1 << State::ROLL_COG)) + if(id < (1 << State::TOTAL_NUM)) kf->setInitState(init_pos[id >> (State::X_BASE + 1)], 0); } @@ -362,7 +352,7 @@ namespace sensor_plugin int id = kf->getId(); /* x_w, y_w, z_w */ - if(id < (1 << State::ROLL_COG)) + if(id < (1 << State::TOTAL_NUM)) { if(plugin_name == "kalman_filter/kf_pos_vel_acc") { diff --git a/aerial_robot_estimation/src/sensor/vo.cpp b/aerial_robot_estimation/src/sensor/vo.cpp index 3e156dc37..31aae0426 100644 --- a/aerial_robot_estimation/src/sensor/vo.cpp +++ b/aerial_robot_estimation/src/sensor/vo.cpp @@ -80,6 +80,7 @@ namespace sensor_plugin if(time_sync_) queuse_size = 10; vo_sub_ = nh_.subscribe(topic_name, queuse_size, &VisualOdometry::voCallback, this); + rot_valid_ = false; /* servo control timer */ if(variable_sensor_tf_flag_) @@ -115,11 +116,7 @@ namespace sensor_plugin /* check whether is force att control mode */ if(estimator_->getForceAttControlFlag() && getStatus() == Status::ACTIVE) - { - if(estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) - estimator_->setStateStatus(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, false); - setStatus(Status::INVALID); - } + setStatus(Status::INVALID); /* check the sensor value whether valid */ if(std::isnan(vo_msg->pose.pose.position.x) || @@ -170,7 +167,6 @@ namespace sensor_plugin return; } - /* to get the correction rotation and omega of baselink with the consideration of time delay, along with the yaw problem */ bool imu_initialized = false; for(const auto& handler: estimator_->getImuHandlers()) { @@ -242,7 +238,11 @@ namespace sensor_plugin } /** step1: ^{w}H_{b'}, b': level frame of b **/ - tf::Transform w_bdash_f(tf::createQuaternionFromYaw(estimator_->getState(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)[0])); + tf::Transform w_bdash_f; + tf::Matrix3x3 base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE); + double r,p,y; + base_rot.getRPY(r, p, y); // we assume the orientation of baselink at the initial phase should not be entire vertical + w_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); tf::Vector3 baselink_pos = estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE); if(estimator_->getStateStatus(State::X_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) @@ -253,15 +253,16 @@ namespace sensor_plugin w_bdash_f.getOrigin().setZ(baselink_pos.z()); /* set the offset if we know the ground truth */ - if(estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH)) + if(estimator_->getStateStatus(State::Base::Rot, aerial_robot_estimation::GROUND_TRUTH)) { w_bdash_f.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH)); - w_bdash_f.setRotation(tf::createQuaternionFromYaw(estimator_->getState(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH)[0])); + base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH); + base_rot.getRPY(r, p, y); + w_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); } /** step2: ^{vo}H_{b'} **/ tf::Transform vo_bdash_f = raw_sensor_tf * sensor_tf_.inverse(); // ^{vo}H_{b} - double r,p,y; vo_bdash_f.getBasis().getRPY(r,p,y); vo_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); // ^{vo}H_{b'} @@ -287,7 +288,7 @@ namespace sensor_plugin if(plugin_name == "kalman_filter/kf_pos_vel_acc") { - if(id < (1 << State::ROLL_COG)) + if(id < (1 << State::TOTAL_NUM)) { /* not need to initialize */ if(estimator_->getStateStatus(State::X_BASE + (id >> (State::X_BASE + 1)), aerial_robot_estimation::EGOMOTION_ESTIMATE)) @@ -375,7 +376,6 @@ namespace sensor_plugin if (time_sync_) { // TODO: what is the following previous tricky code? - // int mode = estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH)?aerial_robot_estimation::GROUND_TRUTH:aerial_robot_estimation::EGOMOTION_ESTIMATE; int mode = aerial_robot_estimation::EGOMOTION_ESTIMATE; if (raw_vel == tf::Vector3(0.0,0.0,0.0)) @@ -419,16 +419,15 @@ namespace sensor_plugin baselink_tf_.getOrigin().x(), baselink_tf_.getOrigin().y(), baselink_tf_.getOrigin().z()); baselink_tf_.getBasis().getRPY(r, p, y); - ROS_INFO("mocap yaw: %f, vo rot: [%f, %f, %f]", estimator_->getState(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH)[0], r, p, y); + + double mocap_r, mocap_p, mocap_y; + tf::Matrix3x3 base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH); + base_rot.getRPY(mocap_r, mocap_p, mocap_y); + ROS_INFO("mocap yaw: %f, vo rot: [%f, %f, %f]", mocap_y, r, p, y); } double start_time = ros::Time::now().toSec(); estimateProcess(); - /* - double time_du = ros::Time::now().toSec() - start_time; - if(max_du < time_du) max_du = time_du; - ROS_INFO("max du: %f, time du: %f", max_du, time_du); - */ /* publish */ vo_state_.header.stamp.fromSec(curr_timestamp_); @@ -461,6 +460,7 @@ namespace sensor_plugin if((sensor_view_rot * tf::Vector3(1,0,0)).z() < -0.8) { double height = estimator_->getState(State::Z_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)[0]; + rot_valid_ = false; if(height < downwards_vo_min_height_ || height > downwards_vo_max_height_) { @@ -470,15 +470,7 @@ namespace sensor_plugin } else { - /* YAW */ - if(!estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) - { - estimator_->setStateStatus(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, true); - ROS_WARN_STREAM(indexed_nhp_.getNamespace() <<": set yaw estimate status true"); - } - tfScalar r,p,y; - baselink_tf_.getBasis().getRPY(r,p,y); - estimator_->setState(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE, 0, y); + rot_valid_ = true; } /* XYZ */ @@ -493,7 +485,7 @@ namespace sensor_plugin double timestamp = reference_timestamp_; double outlier_thresh = (fusion_mode_ == ONLY_VEL_MODE)?(vel_outlier_thresh_ / (vel_noise_sigma_) / (vel_noise_sigma_)):0; /* x_w, y_w, z_w */ - if(id < (1 << State::ROLL_COG)) + if(id < (1 << State::TOTAL_NUM)) { if(plugin_name == "kalman_filter/kf_pos_vel_acc") { diff --git a/aerial_robot_estimation/src/state_estimation.cpp b/aerial_robot_estimation/src/state_estimation.cpp index be3efd10b..162e20dbf 100644 --- a/aerial_robot_estimation/src/state_estimation.cpp +++ b/aerial_robot_estimation/src/state_estimation.cpp @@ -45,12 +45,13 @@ StateEstimator::StateEstimator() flying_flag_(false), un_descend_flag_(false), force_att_control_flag_(false), + has_groundtruth_odom_(false), imu_handlers_(0), alt_handlers_(0), vo_handlers_(0), gps_handlers_(0), plane_detection_handlers_(0) { fuser_[0].resize(0); fuser_[1].resize(0); - for(int i = 0; i < State::TOTAL_NUM; i ++) + for(int i = 0; i < 6; i ++) { for(int j = 0; j < 3; j++) { @@ -59,6 +60,16 @@ StateEstimator::StateEstimator() } } + for(int i = 0; i < 3; i ++) + { + base_rots_.at(i).setIdentity(); + cog_rots_.at(i).setIdentity(); + base_omegas_.at(i).setZero(); + cog_omegas_.at(i).setZero(); + base_rot_status_.at(i) = 0; + cog_rot_status_.at(i) = 0; + } + /* TODO: represented sensors unhealth level */ unhealth_level_ = 0; } @@ -88,7 +99,7 @@ void StateEstimator::statePublish(const ros::TimerEvent & e) aerial_robot_msgs::States full_state; full_state.header.stamp = imu_stamp; - for(int axis = 0; axis < State::TOTAL_NUM; axis++) + for(int axis = 0; axis < 6; axis++) { aerial_robot_msgs::State r_state; AxisState state = getState(axis); @@ -113,24 +124,6 @@ void StateEstimator::statePublish(const ros::TimerEvent & e) case State::Z_BASE: r_state.id = "z_b"; break; - case State::ROLL_COG: - r_state.id = "roll_cog"; - break; - case State::PITCH_COG: - r_state.id = "pitch_cog"; - break; - case State::YAW_COG: - r_state.id = "yaw_cog"; - break; - case State::ROLL_BASE: - r_state.id = "roll_b"; - break; - case State::PITCH_BASE: - r_state.id = "pitch_b"; - break; - case State::YAW_BASE: - r_state.id = "yaw_b"; - break; default: break; } From edc4afe46f0fb67849480749fc89426085811699 Mon Sep 17 00:00:00 2001 From: sugihara Date: Sun, 18 Feb 2024 22:59:51 +0900 Subject: [PATCH 05/14] [aerial_robot_control] refactor navigation and use so3 system for rotatonal motion --- .../aerial_robot_control/flight_navigation.h | 2 +- .../control/base/pose_linear_controller.cpp | 19 +++++++++++++++++-- .../src/flight_navigation.cpp | 8 ++++---- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/aerial_robot_control/include/aerial_robot_control/flight_navigation.h b/aerial_robot_control/include/aerial_robot_control/flight_navigation.h index 9e9aca157..2f5a193b0 100644 --- a/aerial_robot_control/include/aerial_robot_control/flight_navigation.h +++ b/aerial_robot_control/include/aerial_robot_control/flight_navigation.h @@ -557,7 +557,7 @@ namespace aerial_robot_navigation void setTargetYawFromCurrentState() { - double yaw = estimator_->getState(State::YAW_COG, estimate_mode_)[0]; + double yaw = estimator_->getEuler(Frame::COG, estimate_mode_).z(); setTargetYaw(yaw); // set the velocty to zero diff --git a/aerial_robot_control/src/control/base/pose_linear_controller.cpp b/aerial_robot_control/src/control/base/pose_linear_controller.cpp index 31623e3b5..f8dbf72f9 100644 --- a/aerial_robot_control/src/control/base/pose_linear_controller.cpp +++ b/aerial_robot_control/src/control/base/pose_linear_controller.cpp @@ -46,6 +46,7 @@ namespace aerial_robot_control vel_(0,0,0), target_vel_(0,0,0), rpy_(0,0,0), target_rpy_(0,0,0), target_acc_(0,0,0), + target_omega_(0,0,0), start_rp_integration_(false) { pid_msg_.x.total.resize(1); @@ -187,6 +188,12 @@ namespace aerial_robot_control start_rp_integration_ = false; for(auto& controller: pid_controllers_) controller.reset(); + + target_pos_.setValue(0, 0, 0); + target_vel_.setValue(0, 0, 0); + target_acc_.setValue(0, 0, 0); + target_rpy_.setValue(0, 0, 0); + target_omega_.setValue(0, 0, 0); } bool PoseLinearController::update() @@ -207,10 +214,18 @@ namespace aerial_robot_control target_vel_ = navigator_->getTargetVel(); target_acc_ = navigator_->getTargetAcc(); - rpy_ = estimator_->getEuler(Frame::COG, estimate_mode_); + // rpy_ = estimator_->getEuler(Frame::COG, estimate_mode_); + tf::Quaternion cog2baselink_rot; + tf::quaternionKDLToTF(robot_model_->getCogDesireOrientation(), cog2baselink_rot); + tf::Matrix3x3 cog_rot = estimator_->getOrientation(Frame::BASELINK, estimate_mode_) * tf::Matrix3x3(cog2baselink_rot).inverse(); + double r, p, y; cog_rot.getRPY(r, p, y); + rpy_.setValue(r, p, y); + omega_ = estimator_->getAngularVel(Frame::COG, estimate_mode_); target_rpy_ = navigator_->getTargetRPY(); - target_omega_ = navigator_->getTargetOmega(); + tf::Matrix3x3 target_rot; target_rot.setRPY(target_rpy_.x(), target_rpy_.y(), target_rpy_.z()); + tf::Vector3 target_omega = navigator_->getTargetOmega(); // w.r.t. target cog frame + target_omega_ = cog_rot.inverse() * target_rot * target_omega; // w.r.t. current cog frame target_ang_acc_ = navigator_->getTargetAngAcc(); // time diff diff --git a/aerial_robot_control/src/flight_navigation.cpp b/aerial_robot_control/src/flight_navigation.cpp index c6cc14f1f..72385e7bf 100644 --- a/aerial_robot_control/src/flight_navigation.cpp +++ b/aerial_robot_control/src/flight_navigation.cpp @@ -258,7 +258,7 @@ void BaseNavigator::naviCallback(const aerial_robot_msgs::FlightNavConstPtr & ms } case LOCAL_FRAME: { - double yaw_angle = estimator_->getState(State::YAW_COG, estimate_mode_)[0]; + double yaw_angle = estimator_->getEuler(Frame::COG, estimate_mode_).z(); tf::Vector3 target_vel = frameConversion(tf::Vector3(msg->target_vel_x, msg->target_vel_y, 0), yaw_angle); setTargetVelX(target_vel.x()); setTargetVelY(target_vel.y()); @@ -300,7 +300,7 @@ void BaseNavigator::naviCallback(const aerial_robot_msgs::FlightNavConstPtr & ms } case LOCAL_FRAME: { - double yaw_angle = estimator_->getState(State::YAW_COG, estimate_mode_)[0]; + double yaw_angle = estimator_->getEuler(Frame::COG, estimate_mode_).z(); tf::Vector3 target_acc = frameConversion(tf::Vector3(msg->target_acc_x, msg->target_acc_y, 0), yaw_angle); setTargetAccX(target_acc.x()); setTargetAccY(target_acc.y()); @@ -549,7 +549,7 @@ void BaseNavigator::joyStickControl(const sensor_msgs::JoyConstPtr & joy_msg) std::string baselink = robot_model_->getBaselinkName(); tf::transformKDLToTF(segments_tf.at(baselink).Inverse() * segments_tf.at(teleop_local_frame_), teleop_local_frame_tf); - double yaw_angle = estimator_->getState(State::YAW_COG, estimate_mode_)[0]; + double yaw_angle = estimator_->getEuler(Frame::COG, estimate_mode_).z(); local_frame_rot = tf::Matrix3x3(tf::createQuaternionFromYaw(yaw_angle)) * teleop_local_frame_tf.getBasis(); } @@ -735,7 +735,7 @@ void BaseNavigator::update() setTargetAngAccZ(target_ang_acc_z); tf::Vector3 curr_pos = estimator_->getPos(Frame::COG, estimate_mode_); - double yaw_angle = estimator_->getState(State::YAW_COG, estimate_mode_)[0]; + double yaw_angle = estimator_->getEuler(Frame::COG, estimate_mode_).z(); ROS_INFO_THROTTLE(0.5, "[Nav] trajectory mode, target pos&yaw: [%f, %f, %f, %f], curr pos&yaw: [%f, %f, %f, %f]", target_state.p(0), target_state.p(1), target_state.p(2), target_yaw, curr_pos.x(), curr_pos.y(), curr_pos.z(), yaw_angle); } } From 19da029376e2cc784bd9744489ec568a223c363b Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Mon, 19 Feb 2024 00:21:06 +0900 Subject: [PATCH 06/14] [aerial_robot_estimation][mocap] bug fix for singularoty pose --- aerial_robot_estimation/src/sensor/mocap.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/aerial_robot_estimation/src/sensor/mocap.cpp b/aerial_robot_estimation/src/sensor/mocap.cpp index d487a26da..a457632fa 100644 --- a/aerial_robot_estimation/src/sensor/mocap.cpp +++ b/aerial_robot_estimation/src/sensor/mocap.cpp @@ -265,8 +265,13 @@ namespace sensor_plugin /* cog */ tf::Transform cog2baselink_tf; tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - estimator_->setOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, - tf::Matrix3x3(q) * cog2baselink_tf.getBasis().inverse()); + double r, p, y; + cog2baselink_tf.getBasis().getRPY(r, p, y); + tf::Quaternion q_baselink2cog; + q_baselink2cog.setRPY(-r, -p, -y); + q = q_baselink2cog * q; + + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, tf::Matrix3x3(q)); estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, cog2baselink_tf.getBasis() * omega); // TODO: check the vibration From 8eeab6599464bd9a00471312d5b164b8916da9fe Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Mon, 19 Feb 2024 02:11:37 +0900 Subject: [PATCH 07/14] [dragon] workaround for so3 control --- .../aerial_robot_control/flight_navigation.h | 1 + .../dragon/control/full_vectoring_control.h | 1 + .../dragon/include/dragon/dragon_navigation.h | 19 +- .../dragon/model/full_vectoring_robot_model.h | 6 +- robots/dragon/scripts/desired_control_test.py | 15 +- robots/dragon/scripts/transformation_demo.py | 29 +-- .../scripts/transformation_demo_vertical.py | 7 - .../src/control/full_vectoring_control.cpp | 11 ++ robots/dragon/src/dragon_navigation.cpp | 180 ++++++++++++++++-- .../src/model/full_vectoring_robot_model.cpp | 103 +++++++--- robots/dragon/src/sensor/imu.cpp | 2 +- robots/dragon/test/dragon_control.test | 2 +- 12 files changed, 287 insertions(+), 89 deletions(-) diff --git a/aerial_robot_control/include/aerial_robot_control/flight_navigation.h b/aerial_robot_control/include/aerial_robot_control/flight_navigation.h index 2f5a193b0..c0055ec09 100644 --- a/aerial_robot_control/include/aerial_robot_control/flight_navigation.h +++ b/aerial_robot_control/include/aerial_robot_control/flight_navigation.h @@ -101,6 +101,7 @@ namespace aerial_robot_navigation inline void setTargetYaw(float value) { target_rpy_.setZ(value); } inline void addTargetYaw(float value) { setTargetYaw(angles::normalize_angle(target_rpy_.z() + value)); } inline void setTargetOmegaZ(float value) { target_omega_.setZ(value); } + inline void setTargetRPY(tf::Vector3 value) { target_rpy_ = value; } inline void setTargetAngAcc(tf::Vector3 acc) { target_ang_acc_ = acc; } inline void setTargetAngAcc(double x, double y, double z) { setTargetAngAcc(tf::Vector3(x, y, z)); } inline void setTargetZeroAngAcc() { setTargetAngAcc(tf::Vector3(0,0,0)); } diff --git a/robots/dragon/include/dragon/control/full_vectoring_control.h b/robots/dragon/include/dragon/control/full_vectoring_control.h index 380b3dd14..7d9c1d308 100644 --- a/robots/dragon/include/dragon/control/full_vectoring_control.h +++ b/robots/dragon/include/dragon/control/full_vectoring_control.h @@ -37,6 +37,7 @@ #include #include +#include #include #include #include diff --git a/robots/dragon/include/dragon/dragon_navigation.h b/robots/dragon/include/dragon/dragon_navigation.h index 92ed86255..ccf821f4d 100644 --- a/robots/dragon/include/dragon/dragon_navigation.h +++ b/robots/dragon/include/dragon/dragon_navigation.h @@ -36,7 +36,11 @@ #pragma once #include +#include +#include +#include #include +#include #include namespace aerial_robot_navigation @@ -55,12 +59,13 @@ namespace aerial_robot_navigation void update() override; inline const bool getLandingFlag() const { return landing_flag_; } + inline const bool getEqCoGWorldFlag() const { return eq_cog_world_; } private: - ros::Publisher curr_target_baselink_rot_pub_; + ros::Publisher target_baselink_rpy_pub_; // to spinal ros::Publisher joint_control_pub_; - ros::Subscriber final_target_baselink_rot_sub_; - ros::Subscriber target_baselink_rot_sub_; + ros::Subscriber final_target_baselink_rot_sub_, final_target_baselink_rpy_sub_; + ros::Subscriber target_rotation_motion_sub_; void halt() override; void reset() override; @@ -71,13 +76,15 @@ namespace aerial_robot_navigation void baselinkRotationProcess(); void rosParamInit() override; - void setFinalTargetBaselinkRotCallback(const spinal::DesireCoordConstPtr & msg); - void targetBaselinkRotCallback(const spinal::DesireCoordConstPtr& msg); + void targetBaselinkRotCallback(const geometry_msgs::QuaternionStampedConstPtr & msg); + void targetBaselinkRPYCallback(const geometry_msgs::Vector3StampedConstPtr & msg); + void targetRotationMotionCallback(const nav_msgs::OdometryConstPtr& msg); /* target baselink rotation */ double prev_rotation_stamp_; std::vector target_gimbal_angles_; - tf::Vector3 curr_target_baselink_rot_, final_target_baselink_rot_; + tf::Quaternion curr_target_baselink_rot_, final_target_baselink_rot_; + bool eq_cog_world_; /* landing process */ bool level_flag_; diff --git a/robots/dragon/include/dragon/model/full_vectoring_robot_model.h b/robots/dragon/include/dragon/model/full_vectoring_robot_model.h index 22bc859a3..556832fae 100644 --- a/robots/dragon/include/dragon/model/full_vectoring_robot_model.h +++ b/robots/dragon/include/dragon/model/full_vectoring_robot_model.h @@ -82,7 +82,7 @@ namespace Dragon // rewrite Eigen::VectorXd calcFeasibleControlFxyDists(const std::vector& gimbal_roll_lock, const std::vector& locked_roll_angles, int rotor_num, const std::vector& link_rot); - Eigen::VectorXd calcFeasibleControlTDists(const std::vector& gimbal_roll_lock, const std::vector& locked_roll_angles, int rotor_num, const std::vector& rotor_pos, const std::vector& link_rot); + Eigen::VectorXd calcFeasibleControlTDists(const std::vector& gimbal_roll_lock, const std::vector& locked_roll_angles, int rotor_num, const std::vector& rotor_pos, const std::vector& link_rot, const Eigen::Matrix3d& cog_rot); bool stabilityCheck(bool verbose) override; @@ -107,7 +107,9 @@ namespace Dragon double min_torque_weight_; double min_force_normalized_weight_; double min_torque_normalized_weight_; - std::vector prev_links_rotation_from_cog_; + std::vector prev_links_rotation_from_cog_; // for gimbal lock check + std::vector last_links_rotation_from_cog_; // for configuration check + double configuration_t_, confinguration_check_du_, fix_configuration_tresh_; int robot_model_refine_max_iteration_; double robot_model_refine_threshold_; std::mutex roll_locked_gimbal_mutex_; diff --git a/robots/dragon/scripts/desired_control_test.py b/robots/dragon/scripts/desired_control_test.py index cd016851e..7ec5f2dd6 100755 --- a/robots/dragon/scripts/desired_control_test.py +++ b/robots/dragon/scripts/desired_control_test.py @@ -4,24 +4,25 @@ import time import rospy import math +from geometry_msgs.msg import Vector3Stamped, Vector3 from sensor_msgs.msg import JointState -rospy.init_node("gimbal_control_test") +rospy.init_node("baselink_rot_test") -from spinal.msg import DesireCoord +att_control_pub = rospy.Publisher("/dragon/final_target_baselink_rpy", Vector3Stamped, queue_size=1) -att_control_pub = rospy.Publisher("/dragon/final_target_baselink_rot", DesireCoord, queue_size=1) +desire_att = Vector3Stamped(vector = Vector3(0,0,0)) +desire_att.header.stamp = ros.time.now() -desire_att = DesireCoord() time.sleep(0.5) while not rospy.is_shutdown(): - if desire_att.roll == -1.57: - desire_att.roll = 0 + if desire_att.vector.x == -1.57: + desire_att.vector.x = 0 else: - desire_att.roll = -1.57 + desire_att.vector.x = -1.57 att_control_pub.publish(desire_att) time.sleep(10) diff --git a/robots/dragon/scripts/transformation_demo.py b/robots/dragon/scripts/transformation_demo.py index 8e5f9aca3..abed68268 100755 --- a/robots/dragon/scripts/transformation_demo.py +++ b/robots/dragon/scripts/transformation_demo.py @@ -5,47 +5,48 @@ import rospy import math from sensor_msgs.msg import JointState -from spinal.msg import DesireCoord +from geometry_msgs.msg import Vector3Stamped, Vector3 rospy.init_node("dragon_transformation_demo") joint_control_pub = rospy.Publisher("/dragon/joints_ctrl", JointState, queue_size=10) -att_control_pub = rospy.Publisher("/dragon/final_target_baselink_rot", DesireCoord, queue_size=1) +att_control_pub = rospy.Publisher("/dragon/final_target_baselink_rpy", Vector3Stamped, queue_size=1) demo_mode = rospy.get_param("~mode", 0) reset = rospy.get_param("~reset", False) reverse_reset = rospy.get_param("~reverse_reset", False) desire_joint = JointState() -desire_att = DesireCoord() +desire_att = Vector3Stamped(vector = Vector3(0,0,0)) +desire_att.header.stamp = rospy.Time.now() half_pi = 1.56 # the limitation of the joint if demo_mode == 0: # sea horse desire_joint.position = [-0.6, -half_pi, -0.6, 0, 0, half_pi] - desire_att.pitch = -0.3 + desire_att.vector.y = -0.3 elif demo_mode == 1: # spiral model desire_joint.position = [-0.8, 1.2, -0.8, 1.2, -0.8, 1.2] - desire_att.pitch = -0.4 - desire_att.roll = 0.4 + desire_att.vector.x = 0.4 + desire_att.vector.y = -0.4 + elif demo_mode == 2: # mode model desire_joint.position = [0.0, half_pi, -1.5, 0.0, 0.0, half_pi] - desire_att.pitch = 0.75 -else: - sys.exit() + desire_att.vector.y = 0.75 if reset: desire_joint.position = [0.0, half_pi, 0, half_pi, 0.0, half_pi] - desire_att.pitch = 0 - desire_att.roll = 0 + desire_att.vector.x = 0.0 + desire_att.vector.y = 0.0 + if reverse_reset: desire_joint.position = [0.0, -half_pi, 0, -half_pi, 0.0, -half_pi] - desire_att.pitch = 0 - desire_att.roll = 0 + desire_att.vector.x = 0.0 + desire_att.vector.y = 0.0 + time.sleep(0.6) joint_control_pub.publish(desire_joint) att_control_pub.publish(desire_att) - diff --git a/robots/dragon/scripts/transformation_demo_vertical.py b/robots/dragon/scripts/transformation_demo_vertical.py index c0d096a84..25077a7d4 100755 --- a/robots/dragon/scripts/transformation_demo_vertical.py +++ b/robots/dragon/scripts/transformation_demo_vertical.py @@ -5,19 +5,16 @@ import rospy import math from sensor_msgs.msg import JointState -from spinal.msg import DesireCoord rospy.init_node("dragon_transformation_demo") joint_control_pub = rospy.Publisher("/dragon/joints_ctrl", JointState, queue_size=10) -att_control_pub = rospy.Publisher("/dragon/final_target_baselink_rot", DesireCoord, queue_size=1) demo_mode = rospy.get_param("~mode", 0) #reset = rospy.get_param("~reset", False) #reverse_reset = rospy.get_param("~reverse_reset", False) desire_joint = JointState() -desire_att = DesireCoord() half_pi = 1.56 # the limitation of the joint @@ -32,11 +29,7 @@ elif demo_mode == 2: # mode model desire_joint.name = ["joint2_yaw", "joint1_yaw"] desire_joint.position = [-half_pi, -half_pi] -else: - sys.exit() time.sleep(0.6) joint_control_pub.publish(desire_joint) - - diff --git a/robots/dragon/src/control/full_vectoring_control.cpp b/robots/dragon/src/control/full_vectoring_control.cpp index bbd11799b..b5d6610fc 100644 --- a/robots/dragon/src/control/full_vectoring_control.cpp +++ b/robots/dragon/src/control/full_vectoring_control.cpp @@ -469,6 +469,13 @@ void DragonFullVectoringController::rotorInterfereCompensation() void DragonFullVectoringController::controlCore() { /* TODO: saturation of z control */ + + // workaround to handle speical definition of CoG desired orientation + if(boost::dynamic_pointer_cast(navigator_)->getEqCoGWorldFlag()) + { + navigator_->setTargetYaw(0); + } + PoseLinearController::controlCore(); tf::Matrix3x3 uav_rot = estimator_->getOrientation(Frame::COG, estimate_mode_); @@ -543,6 +550,10 @@ void DragonFullVectoringController::controlCore() if(overlap_rotors_.size() > 0) ROS_DEBUG_STREAM("rotor interference: " << ss.str()); setTargetWrenchAccCog(target_wrench_acc_cog); + // TODO: we need to compensate a nonlinear term w x (Jw) for rotational motion. + // solution1: using the raw angular velocity: https://ieeexplore.ieee.org/document/5717652, problem is the noisy of raw omega + // solution2: using the target angular velocity: https://ieeexplore.ieee.org/document/6669644, problem is the larget gap between true omega and target one. but this paper claim this is oK + // solution3: ignore this term for low angular motion <- current is this #if 1 // iteratively find the target force and target gimbal angles KDL::Rotation cog_desire_orientation = robot_model_->getCogDesireOrientation(); diff --git a/robots/dragon/src/dragon_navigation.cpp b/robots/dragon/src/dragon_navigation.cpp index d75a9538a..e1e1d8ae1 100644 --- a/robots/dragon/src/dragon_navigation.cpp +++ b/robots/dragon/src/dragon_navigation.cpp @@ -8,9 +8,10 @@ DragonNavigator::DragonNavigator(): servo_torque_(false), level_flag_(false), landing_flag_(false), - curr_target_baselink_rot_(0, 0, 0), - final_target_baselink_rot_(0, 0, 0) + eq_cog_world_(false) { + curr_target_baselink_rot_.setRPY(0, 0, 0); + final_target_baselink_rot_.setRPY(0, 0, 0); } void DragonNavigator::initialize(ros::NodeHandle nh, ros::NodeHandle nhp, @@ -21,9 +22,11 @@ void DragonNavigator::initialize(ros::NodeHandle nh, ros::NodeHandle nhp, /* initialize the flight control */ BaseNavigator::initialize(nh, nhp, robot_model, estimator, loop_du); - curr_target_baselink_rot_pub_ = nh_.advertise("desire_coordinate", 1); + target_baselink_rpy_pub_ = nh_.advertise("desire_coordinate", 1); // to spinal joint_control_pub_ = nh_.advertise("joints_ctrl", 1); - final_target_baselink_rot_sub_ = nh_.subscribe("final_target_baselink_rot", 1, &DragonNavigator::setFinalTargetBaselinkRotCallback, this); + final_target_baselink_rot_sub_ = nh_.subscribe("final_target_baselink_rot", 1, &DragonNavigator::targetBaselinkRotCallback, this); + final_target_baselink_rpy_sub_ = nh_.subscribe("final_target_baselink_rpy", 1, &DragonNavigator::targetBaselinkRPYCallback, this); + target_rotation_motion_sub_ = nh_.subscribe("target_rotation_motion", 1, &DragonNavigator::targetRotationMotionCallback, this); prev_rotation_stamp_ = ros::Time::now().toSec(); } @@ -39,9 +42,76 @@ void DragonNavigator::update() } -void DragonNavigator::setFinalTargetBaselinkRotCallback(const spinal::DesireCoordConstPtr & msg) +void DragonNavigator::targetBaselinkRotCallback(const geometry_msgs::QuaternionStampedConstPtr & msg) { - final_target_baselink_rot_.setValue(msg->roll, msg->pitch, msg->yaw); + tf::quaternionMsgToTF(msg->quaternion, final_target_baselink_rot_); + target_omega_.setValue(0,0,0); // for sure to reset the target angular velocity + + // special process + if(getTargetRPY().z() != 0) + { + curr_target_baselink_rot_.setRPY(0, 0, getTargetRPY().z()); + eq_cog_world_ = true; + } +} + +void DragonNavigator::targetBaselinkRPYCallback(const geometry_msgs::Vector3StampedConstPtr & msg) +{ + final_target_baselink_rot_.setRPY(msg->vector.x, msg->vector.y, msg->vector.z); + target_omega_.setValue(0,0,0); // for sure to reset the target angular velocity +} + + +void DragonNavigator::targetRotationMotionCallback(const nav_msgs::OdometryConstPtr& msg) +{ + std::string frame = msg->header.frame_id; + + if(frame != std::string("cog") && frame != std::string("baselink")) + { + ROS_ERROR("frame %s is not support in target rotation motion",frame.c_str()); + return; + } + + tf::Quaternion q; + tf::quaternionMsgToTF(msg->pose.pose.orientation, q); + + tf::Vector3 w; + tf::vector3MsgToTF(msg->twist.twist.angular, w); + + // special process for current special definition of tranformation between CoG and Baselink + if(frame == std::string("baselink")) + { + // and directly send to aerial model + KDL::Rotation rot; + tf::quaternionMsgToKDL(msg->pose.pose.orientation, rot); + + double qx,qy,qz,qw; + rot.GetQuaternion(qx,qy,qz,qw); + robot_model_->setCogDesireOrientation(rot); + tf::quaternionMsgToTF(msg->pose.pose.orientation, curr_target_baselink_rot_); + final_target_baselink_rot_ = curr_target_baselink_rot_; + + // convert to target CoG frame from Baselink frame + eq_cog_world_ = true; + setTargetOmega(tf::Matrix3x3(q) * w); + + // send to spinal + spinal::DesireCoord msg; + double r,p,y; + rot.GetRPY(r, p, y); + msg.roll = r; + msg.pitch = p; + msg.yaw = y; + target_baselink_rpy_pub_.publish(msg); + } + else + { // CoG frame + + // This should be the standard process to receive desired rotation motion (orientation + angular velocity) + double r,p,y; tf::Matrix3x3(q).getRPY(r, p, y); + setTargetRPY(tf::Vector3(r, p, y)); + setTargetOmega(w); + } } void DragonNavigator::baselinkRotationProcess() @@ -50,15 +120,29 @@ void DragonNavigator::baselinkRotationProcess() if(ros::Time::now().toSec() - prev_rotation_stamp_ > baselink_rot_pub_interval_) { - if((final_target_baselink_rot_- curr_target_baselink_rot_).length() > baselink_rot_change_thresh_) - curr_target_baselink_rot_ += ((final_target_baselink_rot_ - curr_target_baselink_rot_).normalize() * baselink_rot_change_thresh_); + tf::Quaternion delta_q = curr_target_baselink_rot_.inverse() * final_target_baselink_rot_; + double angle = delta_q.getAngle(); + if (angle > M_PI) angle -= 2 * M_PI; + + if(fabs(angle) > baselink_rot_change_thresh_) + { + curr_target_baselink_rot_ *= tf::Quaternion(delta_q.getAxis(), fabs(angle) / angle * baselink_rot_change_thresh_); + } else curr_target_baselink_rot_ = final_target_baselink_rot_; - spinal::DesireCoord target_baselink_rot_msg; - target_baselink_rot_msg.roll = curr_target_baselink_rot_.x(); - target_baselink_rot_msg.pitch = curr_target_baselink_rot_.y(); - curr_target_baselink_rot_pub_.publish(target_baselink_rot_msg); + KDL::Rotation rot; + tf::quaternionTFToKDL(curr_target_baselink_rot_, rot); + robot_model_->setCogDesireOrientation(rot); + + // send to spinal + spinal::DesireCoord msg; + double r,p,y; + tf::Matrix3x3(curr_target_baselink_rot_).getRPY(r, p, y); + msg.roll = r; + msg.pitch = p; + msg.yaw = y; + target_baselink_rpy_pub_.publish(msg); prev_rotation_stamp_ = ros::Time::now().toSec(); } @@ -68,6 +152,8 @@ void DragonNavigator::landingProcess() { if(getForceLandingFlag() || getNaviState() == LAND_STATE) { + target_omega_.setValue(0,0,0); // for sure to target angular velocity is zero + if(!level_flag_) { const auto joint_state = robot_model_->kdlJointToMsg(robot_model_->getJointPositions()); @@ -86,18 +172,60 @@ void DragonNavigator::landingProcess() } } - joint_control_pub_.publish(joint_control_msg); - final_target_baselink_rot_.setValue(0, 0, 0); - double curr_roll = estimator_->getState(State::ROLL_BASE, estimate_mode_)[0]; - double curr_pitch = estimator_->getState(State::PITCH_BASE, estimate_mode_)[0]; + if (!eq_cog_world_) + { + final_target_baselink_rot_.setRPY(0, 0, 0); + + tf::Vector3 base_euler = estimator_->getEuler(Frame::BASELINK, estimate_mode_); + + if(fabs(fabs(base_euler.y()) - M_PI/2) > 0.2) + { + /* force set the current deisre tilt to current estimated tilt */ + curr_target_baselink_rot_.setRPY(base_euler.x(), base_euler.y(), 0); // case1 + } + else + { // singularity of XYZ Euler + + double r,p,y; + tf::Matrix3x3(curr_target_baselink_rot_).getRPY(r,p,y); + if(fabs(fabs(p) - M_PI/2) > 0.2) + curr_target_baselink_rot_.setRPY(0, base_euler.y(), 0); // case2 + + // case3: use the last curr_target_baselink_rot_ + } + } + else + { + tf::Quaternion base_rot; + estimator_->getOrientation(Frame::BASELINK, estimate_mode_).getRotation(base_rot); - if(fabs(fabs(curr_pitch) - M_PI/2) < 0.05 && fabs(curr_roll) > M_PI/2) // singularity of XYZ Euler - curr_roll = 0; + double delta_angle = (curr_target_baselink_rot_.inverse() * base_rot).getAngle(); + if (delta_angle > M_PI) delta_angle -= 2 * M_PI; + if(fabs(delta_angle) > 0.2) + { + curr_target_baselink_rot_ = base_rot; + } - /* force set the current deisre tilt to current estimated tilt */ - curr_target_baselink_rot_.setValue(curr_roll, curr_pitch, 0); + tf::Vector3 cross_v = tf::Vector3(0,0,1).cross(tf::Matrix3x3(curr_target_baselink_rot_.inverse()) * tf::Vector3(0,0,1)); + if(cross_v.length() < 0.001) // sine of delta_angle to world z frame + { + final_target_baselink_rot_ = curr_target_baselink_rot_; + } + else + { + // IMPORTANT: to avoid NaN because of asin(angle) when angle > 1, thus use tfAsin which has clamping process before do asin + tf::Quaternion delta_q(cross_v, tfAsin(cross_v.length())); + final_target_baselink_rot_ = curr_target_baselink_rot_ * delta_q; + // Note: normalize quaterinon just in case that curr_target_baselink_rot is not a perfect quaternion (e.g., manual rostopic pub) + double r,p,y; + tf::Matrix3x3(final_target_baselink_rot_).getRPY(r,p,y); + final_target_baselink_rot_.setRPY(0, 0, y); + ROS_DEBUG("refine final_target_baselink_rot_: [%f, %f, %f, %f]", final_target_baselink_rot_.x(), final_target_baselink_rot_.y(), + final_target_baselink_rot_.z(), final_target_baselink_rot_.w()); + } + } } level_flag_ = true; @@ -127,7 +255,9 @@ void DragonNavigator::landingProcess() } } - if(curr_target_baselink_rot_.length()) already_level = false; + double r,p,y; + tf::Matrix3x3(curr_target_baselink_rot_).getRPY(r,p,y); + if(fabs(r) > 0.01 || fabs(p) > 0.01) already_level = false; if(already_level && getNaviState() == HOVER_STATE) { @@ -189,6 +319,14 @@ void DragonNavigator::reset() level_flag_ = false; landing_flag_ = false; + + // reset SO3 + eq_cog_world_ = false; + curr_target_baselink_rot_.setRPY(0, 0, 0); + final_target_baselink_rot_.setRPY(0, 0, 0); + KDL::Rotation rot; + tf::quaternionTFToKDL(curr_target_baselink_rot_, rot); + robot_model_->setCogDesireOrientation(rot); } diff --git a/robots/dragon/src/model/full_vectoring_robot_model.cpp b/robots/dragon/src/model/full_vectoring_robot_model.cpp index 936707245..2694773ee 100644 --- a/robots/dragon/src/model/full_vectoring_robot_model.cpp +++ b/robots/dragon/src/model/full_vectoring_robot_model.cpp @@ -45,6 +45,7 @@ namespace int rotor_num = model->getRotorNum(); std::vector link_rot = model->getLinksRotationFromCog(); std::vector gimbal_roll_pos = model->getGimbalRollOriginFromCog(); + Eigen::Matrix3d cog_rot = model->getCogDesireOrientation(); auto model_for_plan = model->getRobotModelForPlan(); std::vector rotor_pos = model_for_plan->getRotorsOriginFromCog(); @@ -59,9 +60,9 @@ there is a diffiretial chain about the roll angle. But we here approximate it to } const auto f_min_list = model->calcFeasibleControlFxyDists(roll_locked_gimbal, x, rotor_num, link_rot); - const auto t_min_list = model->calcFeasibleControlTDists(roll_locked_gimbal, x, rotor_num, rotor_pos, link_rot); + const auto t_min_list = model->calcFeasibleControlTDists(roll_locked_gimbal, x, rotor_num, rotor_pos, link_rot, cog_rot); - return model->getMinForceNormalizedWeight() * f_min_list.minCoeff() + model->getMinTorqueNormalizedWeight() * t_min_list.minCoeff(); + return model->getMinForceNormalizedWeight() * f_min_list.minCoeff() + model->getMinTorqueNormalizedWeight() * t_min_list.minCoeff(); } } @@ -81,6 +82,7 @@ FullVectoringRobotModel::FullVectoringRobotModel(bool init_with_rosparam, bool v roll_locked_gimbal_.resize(rotor_num, 0); gimbal_roll_origin_from_cog_.resize(rotor_num); setGimbalNominalAngles(std::vector(0)); // for online initialize + configuration_t_ = 0; robot_model_for_plan_ = boost::make_shared(); @@ -106,6 +108,8 @@ void FullVectoringRobotModel::getParamFromRos() nh.param("gimbal_roll_change_threshold", gimbal_roll_change_threshold_, 0.02); // rad/s nh.param("min_force_weight", min_force_weight_, 1.0); nh.param("min_torque_weight", min_torque_weight_, 1.0); + nh.param("confinguration_check_du", confinguration_check_du_, 1.0); + nh.param("fix_configuration_tresh", fix_configuration_tresh_, 0.02); } void FullVectoringRobotModel::updateRobotModelImpl(const KDL::JntArray& joint_positions) @@ -152,6 +156,8 @@ void FullVectoringRobotModel::updateRobotModelImpl(const KDL::JntArray& joint_po robot_model_for_plan_->updateRobotModel(gimbal_processed_joint); setGimbalNominalAngles(gimbal_nominal_angles); + + last_links_rotation_from_cog_ = links_rotation_from_cog; // for configuration fix check } /* 2. check the orientation (pitch angle) of link to decide whether to lock gimbal roll */ @@ -213,11 +219,14 @@ void FullVectoringRobotModel::updateRobotModelImpl(const KDL::JntArray& joint_po /* robust: deal with the vibration of the link orientation from joint angles and cog attitude, not update the gimbal roll lock angles so often */ for(int i = 0; i < getRotorNum(); i++) { - tf::Quaternion delta_q; + tf::Quaternion delta_q, prev_q; tf::quaternionKDLToTF(prev_links_rotation_from_cog_.at(i).Inverse() * links_rotation_from_cog.at(i), delta_q); + tf::quaternionKDLToTF(prev_links_rotation_from_cog_.at(i), prev_q); + tf::Vector3 rotation_axis = tf::Matrix3x3(prev_q) * delta_q.getAxis(); if(fabs(delta_q.getAngle()) > link_att_change_threshold_) { - ROS_INFO_STREAM_NAMED("robot_model", "link " << i + 1 << ": the orientation is change more than threshold: " << delta_q.getAngle()); + ROS_INFO_STREAM_NAMED("robot_model", "link " << i + 1 << ": the orientation is change more than threshold: " << delta_q.getAngle() + << "; the axis: [" << rotation_axis.x() << ", " << rotation_axis.y() << ", " << rotation_axis.z() << "]"); roll_lock_status_change = true; } } @@ -246,6 +255,31 @@ void FullVectoringRobotModel::updateRobotModelImpl(const KDL::JntArray& joint_po prev_roll_locked_gimbal_ = roll_locked_gimbal; prev_links_rotation_from_cog_ = links_rotation_from_cog; } + else + { + // periodic check (e.g. 1s) whether the robot configuration is fixed with locked gimbal + if (ros::Time::now().toSec() - configuration_t_ > confinguration_check_du_) + { + bool fix_configuration = true; + for(int i = 0; i < getRotorNum(); ++i) + { + tf::Quaternion delta_q; + tf::quaternionKDLToTF(last_links_rotation_from_cog_.at(i).Inverse() * links_rotation_from_cog.at(i), delta_q); + double delta = delta_q.getAngle(); + if(delta > M_PI) delta -= 2* M_PI; + + if(fabs(delta) > fix_configuration_tresh_) fix_configuration = false; + } + + last_links_rotation_from_cog_ = links_rotation_from_cog; + configuration_t_ = ros::Time::now().toSec(); + + if(fix_configuration) + prev_links_rotation_from_cog_ = links_rotation_from_cog; + + ROS_DEBUG("the robot configuration is fix? %s", fix_configuration?std::string("Yes").c_str():std::string("No").c_str()); + } + } } else { @@ -454,64 +488,73 @@ void FullVectoringRobotModel::updateRobotModelImpl(const KDL::JntArray& joint_po Eigen::VectorXd FullVectoringRobotModel::calcFeasibleControlFxyDists(const std::vector& roll_locked_gimbal, const std::vector& locked_angles, int rotor_num, const std::vector& link_rot) { /* only consider F_x and F_y */ + // Note: no need to consider 2DoF gimbals because unit circle. std::vector u(0); - int gimbal_lock_index = 0; + int gimbal_lock_cnt = 0; for (int i = 0; i < rotor_num; ++i) { - if(roll_locked_gimbal.at(i) == 0) - { - // ominidirectional: 2DoF - u.push_back(Eigen::Vector2d(1, 0)); // from the tilted x force - u.push_back(Eigen::Vector2d(0, 1)); // from the tilted y force - } - else + if(roll_locked_gimbal.at(i) == 1) { // lock gimbal roll: 1DOF - Eigen::Vector3d gimbal_roll_z_axis = link_rot.at(i) * aerial_robot_model::kdlToEigen(KDL::Rotation::RPY(locked_angles.at(gimbal_lock_index), 0, 0)) * Eigen::Vector3d(0, 0, 1); + // Note: need to be describe in world frame, or the level cog frame + Eigen::Vector3d gimbal_roll_z_axis = link_rot.at(i) * aerial_robot_model::kdlToEigen(KDL::Rotation::RPY(locked_angles.at(gimbal_lock_cnt), 0, 0)) * Eigen::Vector3d(0, 0, 1); u.push_back(Eigen::Vector2d(gimbal_roll_z_axis(0), gimbal_roll_z_axis(1)).normalized()); - gimbal_lock_index++; + gimbal_lock_cnt++; } } Eigen::VectorXd f_min(u.size()); // f_min_i; i in [0, u.size()] + if (gimbal_lock_cnt == 1) + { + // no change due to the locked gimbal + f_min(0) = rotor_num - gimbal_lock_cnt; // because of three 2DoF gimbals + return f_min; + } + + + // only consider the convex composed from the locked gimbal for (int i = 0; i < u.size(); ++i) { - double f_min_ij = 0.0; + double f_min_ij = 0; for (int j = 0; j < u.size(); ++j) { if (i == j) continue; f_min_ij += fabs(u.at(i).x()*u.at(j).y() - u.at(j).x()*u.at(i).y()); // we omit the norm of u.at(i), since u is unit vector } - f_min(i) = f_min_ij; + f_min(i) = f_min_ij + (rotor_num - gimbal_lock_cnt); } return f_min; } -Eigen::VectorXd FullVectoringRobotModel::calcFeasibleControlTDists(const std::vector& roll_locked_gimbal, const std::vector& locked_angles, int rotor_num, const std::vector& rotor_pos, const std::vector& link_rot) + +Eigen::VectorXd FullVectoringRobotModel::calcFeasibleControlTDists(const std::vector& roll_locked_gimbal, const std::vector& locked_angles, int rotor_num, const std::vector& rotor_pos, const std::vector& link_rot, const Eigen::Matrix3d& cog_rot) { + // Note: calculate in baselink frame to remove the influence of desired rotation + // thus, only the joint angles affects the reuslt std::vector v(0); - int gimbal_lock_index = 0; + int gimbal_lock_cnt = 0; for (int i = 0; i < rotor_num; ++i) { + Eigen::Vector3d p = cog_rot.inverse() * rotor_pos.at(i); // w.r.t. baselink frame if(roll_locked_gimbal.at(i) == 0) { // ominidirectional: 3DoF - v.push_back(rotor_pos.at(i).cross(Eigen::Vector3d(1, 0, 0))); // from the tilted x force - v.push_back(rotor_pos.at(i).cross(Eigen::Vector3d(0, 1, 0))); // from the tilted y force - v.push_back(rotor_pos.at(i).cross(Eigen::Vector3d(0, 0, 1))); // from the z force + v.push_back(p.cross(Eigen::Vector3d(1, 0, 0))); // from the tilted x force + v.push_back(p.cross(Eigen::Vector3d(0, 1, 0))); // from the tilted y force + v.push_back(p.cross(Eigen::Vector3d(0, 0, 1))); // from the z force } else { // lock gimbal roll: 2DOF - Eigen::Matrix3d gimbal_roll_rot = link_rot.at(i) * aerial_robot_model::kdlToEigen(KDL::Rotation::RPY(locked_angles.at(gimbal_lock_index), 0, 0)); - v.push_back(rotor_pos.at(i).cross(gimbal_roll_rot * Eigen::Vector3d(1, 0, 0))); // from the x force - v.push_back(rotor_pos.at(i).cross(gimbal_roll_rot * Eigen::Vector3d(0, 0, 1))); // from the z force - gimbal_lock_index++; + Eigen::Matrix3d gimbal_roll_rot = cog_rot.inverse() * link_rot.at(i) * aerial_robot_model::kdlToEigen(KDL::Rotation::RPY(locked_angles.at(gimbal_lock_cnt), 0, 0)); // w.r.t. baselink frame, but not cog frame, to keep the invariance against the desired rotation of CoG frame. + v.push_back(p.cross(gimbal_roll_rot * Eigen::Vector3d(1, 0, 0))); // from the x force + v.push_back(p.cross(gimbal_roll_rot * Eigen::Vector3d(0, 0, 1))); // from the z force + gimbal_lock_cnt++; } } @@ -549,7 +592,6 @@ Eigen::VectorXd FullVectoringRobotModel::calcFeasibleControlTDists(const std::ve return t_min; } - std::vector FullVectoringRobotModel::calcBestLockGimbalRoll(const std::vector& roll_locked_gimbal, const std::vector& prev_roll_locked_gimbal, const std::vector& prev_opt_locked_angles) { int rotor_num = getRotorNum(); @@ -612,7 +654,8 @@ there is a diffiretial chain about the roll angle. But we here approximate it to /* normalized the weight */ min_force_normalized_weight_ = min_force_weight_ / rotor_num; - double max_min_torque = calcFeasibleControlTDists(std::vector(rotor_num, 0), std::vector(), rotor_num, rotor_pos, link_rot).minCoeff(); + Eigen::Matrix3d cog_desire_rot = getCogDesireOrientation(); + double max_min_torque = calcFeasibleControlTDists(std::vector(rotor_num, 0), std::vector(), rotor_num, rotor_pos, link_rot, cog_desire_rot).minCoeff(); ROS_DEBUG_STREAM_NAMED("robot_model", "max_min_torque: " << max_min_torque); min_torque_normalized_weight_ = min_torque_weight_ / max_min_torque; @@ -626,10 +669,10 @@ there is a diffiretial chain about the roll angle. But we here approximate it to std::stringstream ss; for(auto angle: opt_locked_angles) ss << angle << ", "; - ROS_INFO_STREAM_NAMED("robot_model", "nlopt: locked angles: " << ss.str()); + ROS_INFO_STREAM_NAMED("robot_model", "nlopt: locked angles: [" << ss.str() << "]"); const auto f_min_list = calcFeasibleControlFxyDists(roll_locked_gimbal, opt_locked_angles, rotor_num, link_rot); - const auto t_min_list = calcFeasibleControlTDists(roll_locked_gimbal, opt_locked_angles, rotor_num, rotor_pos, link_rot); + const auto t_min_list = calcFeasibleControlTDists(roll_locked_gimbal, opt_locked_angles, rotor_num, rotor_pos, link_rot, cog_desire_rot); ROS_DEBUG_NAMED("robot_model", "opt F min: %f, opt T min: %f", f_min_list.minCoeff(), t_min_list.minCoeff()); @@ -644,7 +687,7 @@ there is a diffiretial chain about the roll angle. But we here approximate it to } const auto f_min_list = calcFeasibleControlFxyDists(roll_locked_gimbal, locked_angles, rotor_num, link_rot); - const auto t_min_list = calcFeasibleControlTDists(roll_locked_gimbal, locked_angles, rotor_num, rotor_pos, link_rot); + const auto t_min_list = calcFeasibleControlTDists(roll_locked_gimbal, locked_angles, rotor_num, rotor_pos, link_rot, cog_desire_rot); ROS_DEBUG_NAMED("robot_model", "F min: %f, T min: %f", f_min_list.minCoeff(), t_min_list.minCoeff()); return locked_angles; diff --git a/robots/dragon/src/sensor/imu.cpp b/robots/dragon/src/sensor/imu.cpp index 7006624f1..66eb95165 100644 --- a/robots/dragon/src/sensor/imu.cpp +++ b/robots/dragon/src/sensor/imu.cpp @@ -76,7 +76,7 @@ namespace sensor_plugin } acc_b_[i] = imu_msg->acc_data[i]; - euler_[i] = imu_msg->angles[i]; + wz_b_[i] = imu_msg->angles[i]; omega_[i] = imu_msg->gyro_data[i]; mag_[i] = imu_msg->mag_data[i]; } diff --git a/robots/dragon/test/dragon_control.test b/robots/dragon/test/dragon_control.test index e22d7d8af..01904fe13 100644 --- a/robots/dragon/test/dragon_control.test +++ b/robots/dragon/test/dragon_control.test @@ -60,7 +60,7 @@ init_joint_names: ['joint1_pitch', 'joint1_yaw', 'joint2_pitch', 'joint2_yaw', 'joint3_pitch', 'joint3_yaw'] init_joint_angles: [0, 1.57, 0, 1.57, 0, 1.57] tasks: - - command: "rostopic pub -1 /dragon/final_target_baselink_rot spinal/DesireCoord -- -1.5708 0.0 0.0 0" + - command: "rostopic pub -1 /dragon/final_target_baselink_rpy geometry_msgs/Vector3Stamped '{vector: {x: -1.5708, y: 0.0, z: 0.0}}'" threshold: [0.06, 0.06, 0.06] angle_threshold: 0.02 timeout: 15 From 07f9bc74d75f389cbf2a847176f1a1d88b688514 Mon Sep 17 00:00:00 2001 From: sugihara Date: Tue, 2 Apr 2024 15:08:58 +0900 Subject: [PATCH 08/14] Revert "[aerial_robot_estimation][mocap] bug fix for singularoty pose" This reverts commit 641b9114daf83481803fed59b80408d04185f241. --- aerial_robot_estimation/src/sensor/mocap.cpp | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/aerial_robot_estimation/src/sensor/mocap.cpp b/aerial_robot_estimation/src/sensor/mocap.cpp index a457632fa..d487a26da 100644 --- a/aerial_robot_estimation/src/sensor/mocap.cpp +++ b/aerial_robot_estimation/src/sensor/mocap.cpp @@ -265,13 +265,8 @@ namespace sensor_plugin /* cog */ tf::Transform cog2baselink_tf; tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - double r, p, y; - cog2baselink_tf.getBasis().getRPY(r, p, y); - tf::Quaternion q_baselink2cog; - q_baselink2cog.setRPY(-r, -p, -y); - q = q_baselink2cog * q; - - estimator_->setOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, tf::Matrix3x3(q)); + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, + tf::Matrix3x3(q) * cog2baselink_tf.getBasis().inverse()); estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::GROUND_TRUTH, cog2baselink_tf.getBasis() * omega); // TODO: check the vibration From 778b509d953214e6aef906e07a51daf24e5a5882 Mon Sep 17 00:00:00 2001 From: sugihara Date: Tue, 2 Apr 2024 15:13:32 +0900 Subject: [PATCH 09/14] [aerial_robot_estimation][imu] bug fix for singularity pose: yaw angle from groundtruth --- aerial_robot_estimation/src/sensor/imu.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aerial_robot_estimation/src/sensor/imu.cpp b/aerial_robot_estimation/src/sensor/imu.cpp index 3a61abf12..c50a38b6f 100644 --- a/aerial_robot_estimation/src/sensor/imu.cpp +++ b/aerial_robot_estimation/src/sensor/imu.cpp @@ -161,7 +161,7 @@ namespace sensor_plugin // 2. experiment estimate mode if(estimator_->getStateStatus(State::CoG::Rot, aerial_robot_estimation::GROUND_TRUTH)) { - tf::Matrix3x3 gt_cog_rot = estimator_->getOrientation(Frame::COG, aerial_robot_estimation::GROUND_TRUTH); + tf::Matrix3x3 gt_cog_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH) * cog2baselink_tf.getBasis().inverse(); double r,p,y; gt_cog_rot.getRPY(r, p, y); cog_euler[2] = y; From 7b8a8cc5857f7b338b0f6ce891a7f2ceaa9ab2f5 Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Mon, 14 Oct 2024 20:46:12 +0900 Subject: [PATCH 10/14] [spinal] publish estimated mag data instead of raw mag data --- .../lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h index 5993df3ba..de0f31d9a 100644 --- a/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h +++ b/aerial_robot_nerve/spinal/mcu_project/lib/Jsk_Lib/state_estimate/attitude/attitude_estimate.h @@ -192,7 +192,7 @@ class AttitudeEstimate #endif for(int i = 0; i < 3 ; i ++) { - imu_msg_.mag_data[i] = estimator_->getMag(Frame::BODY)[i]; + imu_msg_.mag_data[i] = estimator_->getEstM(Frame::BODY)[i]; imu_msg_.acc_data[i] = estimator_->getAcc(Frame::BODY)[i]; imu_msg_.gyro_data[i] = estimator_->getAngular(Frame::BODY)[i]; #if ESTIMATE_TYPE == COMPLEMENTARY From 013e7565827fcf2700824c216c6d6045c441100e Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Mon, 14 Oct 2024 21:01:23 +0900 Subject: [PATCH 11/14] [aerial_robot_estimation][imu] calculate baselink rotation from imu and magnetmeter and update it with VO/mocap --- .../aerial_robot_estimation/sensor/imu.h | 2 +- aerial_robot_estimation/src/sensor/imu.cpp | 62 ++++++++++++------- 2 files changed, 39 insertions(+), 25 deletions(-) diff --git a/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h b/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h index 3f2db495b..a59e61599 100644 --- a/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h +++ b/aerial_robot_estimation/include/aerial_robot_estimation/sensor/imu.h @@ -77,7 +77,7 @@ namespace sensor_plugin double sensor_dt_; /* imu */ - tf::Vector3 wz_b_; /* unit z axis of world frame in baselink frame */ + tf::Vector3 g_b_; /* the *opposite* gravity vector in baselink frame */ tf::Vector3 omega_; /* the omega both of body frame */ tf::Vector3 mag_; /* the magnetometer of body frame */ /* acc */ diff --git a/aerial_robot_estimation/src/sensor/imu.cpp b/aerial_robot_estimation/src/sensor/imu.cpp index c50a38b6f..2e8fd9fb8 100644 --- a/aerial_robot_estimation/src/sensor/imu.cpp +++ b/aerial_robot_estimation/src/sensor/imu.cpp @@ -48,7 +48,7 @@ namespace sensor_plugin sensor_plugin::SensorBase(), calib_count_(200), acc_b_(0, 0, 0), - wz_b_(0, 0, 0), + g_b_(0, 0, 0), omega_(0, 0, 0), mag_(0, 0, 0), acc_bias_b_(0, 0, 0), @@ -102,7 +102,7 @@ namespace sensor_plugin acc_b_[i] = imu_msg->acc_data[i]; // baselink frame omega_[i] = imu_msg->gyro_data[i]; // baselink frame mag_[i] = imu_msg->mag_data[i]; // baselink frame - wz_b_[i] = imu_msg->angles[i]; // workaround to avoid the singularity of RPY Euler angles. + g_b_[i] = imu_msg->angles[i]; // workaround to avoid the singularity of RPY Euler angles. } estimateProcess(); @@ -124,11 +124,11 @@ namespace sensor_plugin tf::Transform cog2baselink_tf; tf::transformKDLToTF(robot_model_->getCog2Baselink(), cog2baselink_tf); - // convert to CoG frame, since in the current system, CoG frame is never being sigular due to RPY euler - tf::Vector3 wz_c = cog2baselink_tf.getBasis() * wz_b_; - tf::Vector3 cog_euler; - cog_euler[0] = atan2(wz_c.y() , wz_c.z()); - cog_euler[1] = atan2(-wz_c.x() , sqrt(wz_c.y() * wz_c.y() + wz_c.z() * wz_c.z())); + tf::Vector3 wz_b = g_b_.normalize(); + + tf::Vector3 mag = mag_.normalize(); + tf::Vector3 wx_b = mag.cross(wz_b); + wx_b.normalize(); // 1. egomotion estimate mode // check whether have valid rotation from VO sensor @@ -140,20 +140,26 @@ namespace sensor_plugin if (vo_handler->rotValid()) { - tf::Matrix3x3 cog_rot_by_vo = vo_handler->getRawBaselinkTF().getBasis() * cog2baselink_tf.getBasis().inverse(); - // we replace the yaw angle from the spinal with the value from VO. - double r,p,y; - cog_rot_by_vo.getRPY(r, p, y); - cog_euler[2] = y; + tf::Matrix3x3 vo_rot = vo_handler->getRawBaselinkTF().getBasis(); + // we replace the wx_b with the value from VO. + wx_b = vo_rot.transpose() * tf::Vector3(1,0,0); break; } } } - cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE).setRPY(cog_euler[0], cog_euler[1], cog_euler[2]); - estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE)); - base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) * cog2baselink_tf.getBasis(); - estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE)); + tf::Vector3 wy_b = wz_b.cross(wx_b); + wy_b.normalize(); + tf::Matrix3x3 rot(wx_b.x(), wx_b.y(), wx_b.z(), + wy_b.x(), wy_b.y(), wy_b.z(), + wz_b.x(), wz_b.y(), wz_b.z()); + + base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = rot; + estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, rot); + + tf::Matrix3x3 rot_c = rot * cog2baselink_tf.getBasis().transpose(); + cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = rot_c; + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, rot_c); estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog2baselink_tf.getBasis() * omega_); estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, omega_); @@ -161,15 +167,23 @@ namespace sensor_plugin // 2. experiment estimate mode if(estimator_->getStateStatus(State::CoG::Rot, aerial_robot_estimation::GROUND_TRUTH)) { - tf::Matrix3x3 gt_cog_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH) * cog2baselink_tf.getBasis().inverse(); - double r,p,y; - gt_cog_rot.getRPY(r, p, y); - cog_euler[2] = y; + tf::Matrix3x3 gt_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH); + // we replace the wx_b with the value from ground truth. + wx_b = gt_rot.transpose() * tf::Vector3(1,0,0); } - cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE).setRPY(cog_euler[0], cog_euler[1], cog_euler[2]); - estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE)); - base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) * cog2baselink_tf.getBasis(); - estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE)); + + wy_b = wz_b.cross(wx_b); + wy_b.normalize(); + rot = tf::Matrix3x3(wx_b.x(), wx_b.y(), wx_b.z(), + wy_b.x(), wy_b.y(), wy_b.z(), + wz_b.x(), wz_b.y(), wz_b.z()); + + base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = rot; + estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, rot); + + rot_c = rot * cog2baselink_tf.getBasis().transpose(); + cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = rot_c; + estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, rot_c); estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, cog2baselink_tf.getBasis() * omega_); estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, omega_); From af60251f67985b83ebb912eb201a8d72459fdbfc Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Mon, 14 Oct 2024 21:07:27 +0900 Subject: [PATCH 12/14] [aerial_robot_estimation][vo] fix initial vo sensor pose calculation method. use actual baselink orientation for coordinate transformation. --- aerial_robot_estimation/src/sensor/vo.cpp | 31 ++++++++++------------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/aerial_robot_estimation/src/sensor/vo.cpp b/aerial_robot_estimation/src/sensor/vo.cpp index 31aae0426..9e2b2e585 100644 --- a/aerial_robot_estimation/src/sensor/vo.cpp +++ b/aerial_robot_estimation/src/sensor/vo.cpp @@ -237,37 +237,32 @@ namespace sensor_plugin } } - /** step1: ^{w}H_{b'}, b': level frame of b **/ - tf::Transform w_bdash_f; + /** step1: ^{w}H_{b} **/ + tf::Transform w_b_f; tf::Matrix3x3 base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE); - double r,p,y; - base_rot.getRPY(r, p, y); // we assume the orientation of baselink at the initial phase should not be entire vertical - w_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); + w_b_f.setBasis(base_rot); tf::Vector3 baselink_pos = estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE); if(estimator_->getStateStatus(State::X_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) - w_bdash_f.getOrigin().setX(baselink_pos.x()); + w_b_f.getOrigin().setX(baselink_pos.x()); if(estimator_->getStateStatus(State::Y_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) - w_bdash_f.getOrigin().setY(baselink_pos.y()); + w_b_f.getOrigin().setY(baselink_pos.y()); if(estimator_->getStateStatus(State::Z_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)) - w_bdash_f.getOrigin().setZ(baselink_pos.z()); + w_b_f.getOrigin().setZ(baselink_pos.z()); /* set the offset if we know the ground truth */ if(estimator_->getStateStatus(State::Base::Rot, aerial_robot_estimation::GROUND_TRUTH)) { - w_bdash_f.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH)); + w_b_f.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH)); base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH); - base_rot.getRPY(r, p, y); - w_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); + w_b_f.setBasis(base_rot); } - /** step2: ^{vo}H_{b'} **/ - tf::Transform vo_bdash_f = raw_sensor_tf * sensor_tf_.inverse(); // ^{vo}H_{b} - vo_bdash_f.getBasis().getRPY(r,p,y); - vo_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); // ^{vo}H_{b'} + /** step2: ^{vo}H_{b} **/ + tf::Transform vo_b_f = raw_sensor_tf * sensor_tf_.inverse(); // ^{vo}H_{b} - /** step3: ^{w}H_{vo} = ^{w}H_{b'} * ^{b'}H_{vo} **/ - world_offset_tf_ = w_bdash_f * vo_bdash_f.inverse(); + /** step3: ^{w}H_{vo} = ^{w}H_{b} * ^{b}H_{vo} **/ + world_offset_tf_ = w_b_f * vo_b_f.inverse(); /* publish the offset tf if necessary */ geometry_msgs::TransformStamped static_transformStamped; @@ -277,7 +272,7 @@ namespace sensor_plugin tf::transformTFToMsg(world_offset_tf_, static_transformStamped.transform); static_broadcaster_.sendTransform(static_transformStamped); - tf::Vector3 init_pos = w_bdash_f.getOrigin(); + tf::Vector3 init_pos = w_b_f.getOrigin(); for(auto& fuser : estimator_->getFuser(aerial_robot_estimation::EGOMOTION_ESTIMATE)) From c0d532b8ad58c5e9a69f70e0a4cf1ecf49363c16 Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Mon, 14 Oct 2024 21:39:53 +0900 Subject: [PATCH 13/14] [dragon][imu] fix bug: changed protected variable name --- robots/dragon/src/sensor/imu.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/robots/dragon/src/sensor/imu.cpp b/robots/dragon/src/sensor/imu.cpp index 66eb95165..ce1428638 100644 --- a/robots/dragon/src/sensor/imu.cpp +++ b/robots/dragon/src/sensor/imu.cpp @@ -76,7 +76,7 @@ namespace sensor_plugin } acc_b_[i] = imu_msg->acc_data[i]; - wz_b_[i] = imu_msg->angles[i]; + g_b_[i] = imu_msg->angles[i]; omega_[i] = imu_msg->gyro_data[i]; mag_[i] = imu_msg->mag_data[i]; } From 47e6c27150ea0d87facfd0f49548196e42972d77 Mon Sep 17 00:00:00 2001 From: sugikazu75 Date: Fri, 18 Oct 2024 20:40:55 +0900 Subject: [PATCH 14/14] [aerial_robot_estimation] normalize quaternions before converting to message to suppress warning message --- aerial_robot_estimation/src/sensor/imu.cpp | 1 + aerial_robot_estimation/src/state_estimation.cpp | 2 ++ 2 files changed, 3 insertions(+) diff --git a/aerial_robot_estimation/src/sensor/imu.cpp b/aerial_robot_estimation/src/sensor/imu.cpp index 2e8fd9fb8..3de56673a 100644 --- a/aerial_robot_estimation/src/sensor/imu.cpp +++ b/aerial_robot_estimation/src/sensor/imu.cpp @@ -441,6 +441,7 @@ namespace sensor_plugin imu_data.header.stamp = imu_stamp_; tf::Quaternion q; base_rot_.at(0).getRotation(q); + q.normalize(); tf::quaternionTFToMsg(q, imu_data.orientation); tf::vector3TFToMsg(omega_, imu_data.angular_velocity); tf::vector3TFToMsg(acc_b_, imu_data.linear_acceleration); diff --git a/aerial_robot_estimation/src/state_estimation.cpp b/aerial_robot_estimation/src/state_estimation.cpp index 162e20dbf..bc89a7130 100644 --- a/aerial_robot_estimation/src/state_estimation.cpp +++ b/aerial_robot_estimation/src/state_estimation.cpp @@ -142,6 +142,7 @@ void StateEstimator::statePublish(const ros::TimerEvent & e) /* Baselink */ /* Rotation */ tf::Quaternion q; getOrientation(Frame::BASELINK, estimate_mode_).getRotation(q); + q.normalize(); tf::quaternionTFToMsg(q, odom_state.pose.pose.orientation); tf::vector3TFToMsg(getAngularVel(Frame::BASELINK, estimate_mode_), odom_state.twist.twist.angular); @@ -171,6 +172,7 @@ void StateEstimator::statePublish(const ros::TimerEvent & e) /* COG */ /* Rotation */ getOrientation(Frame::COG, estimate_mode_).getRotation(q); + q.normalize(); tf::quaternionTFToMsg(q, odom_state.pose.pose.orientation); tf::vector3TFToMsg(getAngularVel(Frame::COG, estimate_mode_), odom_state.twist.twist.angular); /* Translation */