git commit -m "first commit for v2"

This commit is contained in:
2025-12-29 16:21:22 +07:00
commit aa3d832d5c
1807 changed files with 307078 additions and 0 deletions

View File

@@ -0,0 +1,389 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ekf.h"
#include "robot_localization/filter_common.h"
#include <XmlRpcException.h>
#include <iomanip>
#include <limits>
#include <sstream>
#include <vector>
namespace RobotLocalization
{
Ekf::Ekf(std::vector<double>) :
FilterBase() // Must initialize filter base!
{
}
Ekf::~Ekf()
{
}
void Ekf::correct(const Measurement &measurement)
{
FB_DEBUG("---------------------- Ekf::correct ----------------------\n" <<
"State is:\n" << state_ << "\n"
"Topic is:\n" << measurement.topicName_ << "\n"
"Measurement is:\n" << measurement.measurement_ << "\n"
"Measurement topic name is:\n" << measurement.topicName_ << "\n\n"
"Measurement covariance is:\n" << measurement.covariance_ << "\n");
// We don't want to update everything, so we need to build matrices that only update
// the measured parts of our state vector. Throughout prediction and correction, we
// attempt to maximize efficiency in Eigen.
// First, determine how many state vector values we're updating
std::vector<size_t> updateIndices;
for (size_t i = 0; i < measurement.updateVector_.size(); ++i)
{
if (measurement.updateVector_[i])
{
// Handle nan and inf values in measurements
if (std::isnan(measurement.measurement_(i)))
{
FB_DEBUG("Value at index " << i << " was nan. Excluding from update.\n");
}
else if (std::isinf(measurement.measurement_(i)))
{
FB_DEBUG("Value at index " << i << " was inf. Excluding from update.\n");
}
else
{
updateIndices.push_back(i);
}
}
}
FB_DEBUG("Update indices are:\n" << updateIndices << "\n");
size_t updateSize = updateIndices.size();
// Now set up the relevant matrices
Eigen::VectorXd stateSubset(updateSize); // x (in most literature)
Eigen::VectorXd measurementSubset(updateSize); // z
Eigen::MatrixXd measurementCovarianceSubset(updateSize, updateSize); // R
Eigen::MatrixXd stateToMeasurementSubset(updateSize, state_.rows()); // H
Eigen::MatrixXd kalmanGainSubset(state_.rows(), updateSize); // K
Eigen::VectorXd innovationSubset(updateSize); // z - Hx
stateSubset.setZero();
measurementSubset.setZero();
measurementCovarianceSubset.setZero();
stateToMeasurementSubset.setZero();
kalmanGainSubset.setZero();
innovationSubset.setZero();
// Now build the sub-matrices from the full-sized matrices
for (size_t i = 0; i < updateSize; ++i)
{
measurementSubset(i) = measurement.measurement_(updateIndices[i]);
stateSubset(i) = state_(updateIndices[i]);
for (size_t j = 0; j < updateSize; ++j)
{
measurementCovarianceSubset(i, j) = measurement.covariance_(updateIndices[i], updateIndices[j]);
}
// Handle negative (read: bad) covariances in the measurement. Rather
// than exclude the measurement or make up a covariance, just take
// the absolute value.
if (measurementCovarianceSubset(i, i) < 0)
{
FB_DEBUG("WARNING: Negative covariance for index " << i <<
" of measurement (value is" << measurementCovarianceSubset(i, i) <<
"). Using absolute value...\n");
measurementCovarianceSubset(i, i) = ::fabs(measurementCovarianceSubset(i, i));
}
// If the measurement variance for a given variable is very
// near 0 (as in e-50 or so) and the variance for that
// variable in the covariance matrix is also near zero, then
// the Kalman gain computation will blow up. Really, no
// measurement can be completely without error, so add a small
// amount in that case.
if (measurementCovarianceSubset(i, i) < 1e-9)
{
FB_DEBUG("WARNING: measurement had very small error covariance for index " << updateIndices[i] <<
". Adding some noise to maintain filter stability.\n");
measurementCovarianceSubset(i, i) = 1e-9;
}
}
// The state-to-measurement function, h, will now be a measurement_size x full_state_size
// matrix, with ones in the (i, i) locations of the values to be updated
for (size_t i = 0; i < updateSize; ++i)
{
stateToMeasurementSubset(i, updateIndices[i]) = 1;
}
FB_DEBUG("Current state subset is:\n" << stateSubset <<
"\nMeasurement subset is:\n" << measurementSubset <<
"\nMeasurement covariance subset is:\n" << measurementCovarianceSubset <<
"\nState-to-measurement subset is:\n" << stateToMeasurementSubset << "\n");
// (1) Compute the Kalman gain: K = (PH') / (HPH' + R)
Eigen::MatrixXd pht = estimateErrorCovariance_ * stateToMeasurementSubset.transpose();
Eigen::MatrixXd hphrInv = (stateToMeasurementSubset * pht + measurementCovarianceSubset).inverse();
kalmanGainSubset.noalias() = pht * hphrInv;
innovationSubset = (measurementSubset - stateSubset);
// Wrap angles in the innovation
for (size_t i = 0; i < updateSize; ++i)
{
if (updateIndices[i] == StateMemberRoll ||
updateIndices[i] == StateMemberPitch ||
updateIndices[i] == StateMemberYaw)
{
while (innovationSubset(i) < -PI)
{
innovationSubset(i) += TAU;
}
while (innovationSubset(i) > PI)
{
innovationSubset(i) -= TAU;
}
}
}
// (2) Check Mahalanobis distance between mapped measurement and state.
if (checkMahalanobisThreshold(innovationSubset, hphrInv, measurement.mahalanobisThresh_))
{
// (3) Apply the gain to the difference between the state and measurement: x = x + K(z - Hx)
state_.noalias() += kalmanGainSubset * innovationSubset;
// (4) Update the estimate error covariance using the Joseph form: (I - KH)P(I - KH)' + KRK'
Eigen::MatrixXd gainResidual = identity_;
gainResidual.noalias() -= kalmanGainSubset * stateToMeasurementSubset;
estimateErrorCovariance_ = gainResidual * estimateErrorCovariance_ * gainResidual.transpose();
estimateErrorCovariance_.noalias() += kalmanGainSubset *
measurementCovarianceSubset *
kalmanGainSubset.transpose();
// Handle wrapping of angles
wrapStateAngles();
FB_DEBUG("Kalman gain subset is:\n" << kalmanGainSubset <<
"\nInnovation is:\n" << innovationSubset <<
"\nCorrected full state is:\n" << state_ <<
"\nCorrected full estimate error covariance is:\n" << estimateErrorCovariance_ <<
"\n\n---------------------- /Ekf::correct ----------------------\n");
}
}
void Ekf::predict(const double referenceTime, const double delta)
{
FB_DEBUG("---------------------- Ekf::predict ----------------------\n" <<
"delta is " << delta << "\n" <<
"state is " << state_ << "\n");
double roll = state_(StateMemberRoll);
double pitch = state_(StateMemberPitch);
double yaw = state_(StateMemberYaw);
double xVel = state_(StateMemberVx);
double yVel = state_(StateMemberVy);
double zVel = state_(StateMemberVz);
double pitchVel = state_(StateMemberVpitch);
double yawVel = state_(StateMemberVyaw);
double xAcc = state_(StateMemberAx);
double yAcc = state_(StateMemberAy);
double zAcc = state_(StateMemberAz);
// We'll need these trig calculations a lot.
double sp = ::sin(pitch);
double cp = ::cos(pitch);
double cpi = 1.0 / cp;
double tp = sp * cpi;
double sr = ::sin(roll);
double cr = ::cos(roll);
double sy = ::sin(yaw);
double cy = ::cos(yaw);
prepareControl(referenceTime, delta);
// Prepare the transfer function
transferFunction_(StateMemberX, StateMemberVx) = cy * cp * delta;
transferFunction_(StateMemberX, StateMemberVy) = (cy * sp * sr - sy * cr) * delta;
transferFunction_(StateMemberX, StateMemberVz) = (cy * sp * cr + sy * sr) * delta;
transferFunction_(StateMemberX, StateMemberAx) = 0.5 * transferFunction_(StateMemberX, StateMemberVx) * delta;
transferFunction_(StateMemberX, StateMemberAy) = 0.5 * transferFunction_(StateMemberX, StateMemberVy) * delta;
transferFunction_(StateMemberX, StateMemberAz) = 0.5 * transferFunction_(StateMemberX, StateMemberVz) * delta;
transferFunction_(StateMemberY, StateMemberVx) = sy * cp * delta;
transferFunction_(StateMemberY, StateMemberVy) = (sy * sp * sr + cy * cr) * delta;
transferFunction_(StateMemberY, StateMemberVz) = (sy * sp * cr - cy * sr) * delta;
transferFunction_(StateMemberY, StateMemberAx) = 0.5 * transferFunction_(StateMemberY, StateMemberVx) * delta;
transferFunction_(StateMemberY, StateMemberAy) = 0.5 * transferFunction_(StateMemberY, StateMemberVy) * delta;
transferFunction_(StateMemberY, StateMemberAz) = 0.5 * transferFunction_(StateMemberY, StateMemberVz) * delta;
transferFunction_(StateMemberZ, StateMemberVx) = -sp * delta;
transferFunction_(StateMemberZ, StateMemberVy) = cp * sr * delta;
transferFunction_(StateMemberZ, StateMemberVz) = cp * cr * delta;
transferFunction_(StateMemberZ, StateMemberAx) = 0.5 * transferFunction_(StateMemberZ, StateMemberVx) * delta;
transferFunction_(StateMemberZ, StateMemberAy) = 0.5 * transferFunction_(StateMemberZ, StateMemberVy) * delta;
transferFunction_(StateMemberZ, StateMemberAz) = 0.5 * transferFunction_(StateMemberZ, StateMemberVz) * delta;
transferFunction_(StateMemberRoll, StateMemberVroll) = delta;
transferFunction_(StateMemberRoll, StateMemberVpitch) = sr * tp * delta;
transferFunction_(StateMemberRoll, StateMemberVyaw) = cr * tp * delta;
transferFunction_(StateMemberPitch, StateMemberVpitch) = cr * delta;
transferFunction_(StateMemberPitch, StateMemberVyaw) = -sr * delta;
transferFunction_(StateMemberYaw, StateMemberVpitch) = sr * cpi * delta;
transferFunction_(StateMemberYaw, StateMemberVyaw) = cr * cpi * delta;
transferFunction_(StateMemberVx, StateMemberAx) = delta;
transferFunction_(StateMemberVy, StateMemberAy) = delta;
transferFunction_(StateMemberVz, StateMemberAz) = delta;
// Prepare the transfer function Jacobian. This function is analytically derived from the
// transfer function.
double xCoeff = 0.0;
double yCoeff = 0.0;
double zCoeff = 0.0;
double oneHalfATSquared = 0.5 * delta * delta;
yCoeff = cy * sp * cr + sy * sr;
zCoeff = -cy * sp * sr + sy * cr;
double dFx_dR = (yCoeff * yVel + zCoeff * zVel) * delta +
(yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
double dFR_dR = 1.0 + (cr * tp * pitchVel - sr * tp * yawVel) * delta;
xCoeff = -cy * sp;
yCoeff = cy * cp * sr;
zCoeff = cy * cp * cr;
double dFx_dP = (xCoeff * xVel + yCoeff * yVel + zCoeff * zVel) * delta +
(xCoeff * xAcc + yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
double dFR_dP = (cpi * cpi * sr * pitchVel + cpi * cpi * cr * yawVel) * delta;
xCoeff = -sy * cp;
yCoeff = -sy * sp * sr - cy * cr;
zCoeff = -sy * sp * cr + cy * sr;
double dFx_dY = (xCoeff * xVel + yCoeff * yVel + zCoeff * zVel) * delta +
(xCoeff * xAcc + yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
yCoeff = sy * sp * cr - cy * sr;
zCoeff = -sy * sp * sr - cy * cr;
double dFy_dR = (yCoeff * yVel + zCoeff * zVel) * delta +
(yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
double dFP_dR = (-sr * pitchVel - cr * yawVel) * delta;
xCoeff = -sy * sp;
yCoeff = sy * cp * sr;
zCoeff = sy * cp * cr;
double dFy_dP = (xCoeff * xVel + yCoeff * yVel + zCoeff * zVel) * delta +
(xCoeff * xAcc + yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
xCoeff = cy * cp;
yCoeff = cy * sp * sr - sy * cr;
zCoeff = cy * sp * cr + sy * sr;
double dFy_dY = (xCoeff * xVel + yCoeff * yVel + zCoeff * zVel) * delta +
(xCoeff * xAcc + yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
yCoeff = cp * cr;
zCoeff = -cp * sr;
double dFz_dR = (yCoeff * yVel + zCoeff * zVel) * delta +
(yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
double dFY_dR = (cr * cpi * pitchVel - sr * cpi * yawVel) * delta;
xCoeff = -cp;
yCoeff = -sp * sr;
zCoeff = -sp * cr;
double dFz_dP = (xCoeff * xVel + yCoeff * yVel + zCoeff * zVel) * delta +
(xCoeff * xAcc + yCoeff * yAcc + zCoeff * zAcc) * oneHalfATSquared;
double dFY_dP = (sr * tp * cpi * pitchVel + cr * tp * cpi * yawVel) * delta;
// Much of the transfer function Jacobian is identical to the transfer function
transferFunctionJacobian_ = transferFunction_;
transferFunctionJacobian_(StateMemberX, StateMemberRoll) = dFx_dR;
transferFunctionJacobian_(StateMemberX, StateMemberPitch) = dFx_dP;
transferFunctionJacobian_(StateMemberX, StateMemberYaw) = dFx_dY;
transferFunctionJacobian_(StateMemberY, StateMemberRoll) = dFy_dR;
transferFunctionJacobian_(StateMemberY, StateMemberPitch) = dFy_dP;
transferFunctionJacobian_(StateMemberY, StateMemberYaw) = dFy_dY;
transferFunctionJacobian_(StateMemberZ, StateMemberRoll) = dFz_dR;
transferFunctionJacobian_(StateMemberZ, StateMemberPitch) = dFz_dP;
transferFunctionJacobian_(StateMemberRoll, StateMemberRoll) = dFR_dR;
transferFunctionJacobian_(StateMemberRoll, StateMemberPitch) = dFR_dP;
transferFunctionJacobian_(StateMemberPitch, StateMemberRoll) = dFP_dR;
transferFunctionJacobian_(StateMemberYaw, StateMemberRoll) = dFY_dR;
transferFunctionJacobian_(StateMemberYaw, StateMemberPitch) = dFY_dP;
FB_DEBUG("Transfer function is:\n" << transferFunction_ <<
"\nTransfer function Jacobian is:\n" << transferFunctionJacobian_ <<
"\nProcess noise covariance is:\n" << processNoiseCovariance_ <<
"\nCurrent state is:\n" << state_ << "\n");
Eigen::MatrixXd *processNoiseCovariance = &processNoiseCovariance_;
if (useDynamicProcessNoiseCovariance_)
{
computeDynamicProcessNoiseCovariance(state_, delta);
processNoiseCovariance = &dynamicProcessNoiseCovariance_;
}
// (1) Apply control terms, which are actually accelerations
state_(StateMemberVroll) += controlAcceleration_(ControlMemberVroll) * delta;
state_(StateMemberVpitch) += controlAcceleration_(ControlMemberVpitch) * delta;
state_(StateMemberVyaw) += controlAcceleration_(ControlMemberVyaw) * delta;
state_(StateMemberAx) = (controlUpdateVector_[ControlMemberVx] ?
controlAcceleration_(ControlMemberVx) : state_(StateMemberAx));
state_(StateMemberAy) = (controlUpdateVector_[ControlMemberVy] ?
controlAcceleration_(ControlMemberVy) : state_(StateMemberAy));
state_(StateMemberAz) = (controlUpdateVector_[ControlMemberVz] ?
controlAcceleration_(ControlMemberVz) : state_(StateMemberAz));
// (2) Project the state forward: x = Ax + Bu (really, x = f(x, u))
state_ = transferFunction_ * state_;
// Handle wrapping
wrapStateAngles();
FB_DEBUG("Predicted state is:\n" << state_ <<
"\nCurrent estimate error covariance is:\n" << estimateErrorCovariance_ << "\n");
// (3) Project the error forward: P = J * P * J' + Q
estimateErrorCovariance_ = (transferFunctionJacobian_ *
estimateErrorCovariance_ *
transferFunctionJacobian_.transpose());
estimateErrorCovariance_.noalias() += delta * (*processNoiseCovariance);
FB_DEBUG("Predicted estimate error covariance is:\n" << estimateErrorCovariance_ <<
"\n\n--------------------- /Ekf::predict ----------------------\n");
}
} // namespace RobotLocalization

View File

@@ -0,0 +1,51 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_filter_types.h"
#include <cstdlib>
#include <ros/ros.h>
int main(int argc, char **argv)
{
ros::init(argc, argv, "ekf_navigation_node");
ros::NodeHandle nh;
ros::NodeHandle nh_priv("~");
RobotLocalization::RosEkf ekf(nh, nh_priv);
ekf.initialize();
ros::spin();
return EXIT_SUCCESS;
}

View File

@@ -0,0 +1,64 @@
/*
* Copyright (c) 2017 Simon Gene Gottlieb
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_filter_types.h"
#include <nodelet/nodelet.h>
#include <pluginlib/class_list_macros.h>
#include <ros/ros.h>
#include <memory>
namespace RobotLocalization
{
class EkfNodelet : public nodelet::Nodelet
{
private:
std::unique_ptr<RosEkf> ekf;
public:
virtual void onInit()
{
NODELET_DEBUG("Initializing nodelet...");
ros::NodeHandle nh = getNodeHandle();
ros::NodeHandle nh_priv = getPrivateNodeHandle();
ekf = std::make_unique<RosEkf>(nh, nh_priv, getName());
ekf->initialize();
}
};
} // namespace RobotLocalization
PLUGINLIB_EXPORT_CLASS(RobotLocalization::EkfNodelet, nodelet::Nodelet);

View File

@@ -0,0 +1,403 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/filter_base.h"
#include "robot_localization/filter_common.h"
#include <iomanip>
#include <iostream>
#include <limits>
#include <sstream>
#include <vector>
namespace RobotLocalization
{
FilterBase::FilterBase() :
initialized_(false),
useControl_(false),
useDynamicProcessNoiseCovariance_(false),
lastMeasurementTime_(0.0),
latestControlTime_(0.0),
controlTimeout_(0.0),
sensorTimeout_(0.0),
controlUpdateVector_(TWIST_SIZE, 0),
accelerationGains_(TWIST_SIZE, 0.0),
accelerationLimits_(TWIST_SIZE, 0.0),
decelerationGains_(TWIST_SIZE, 0.0),
decelerationLimits_(TWIST_SIZE, 0.0),
controlAcceleration_(TWIST_SIZE),
latestControl_(TWIST_SIZE),
predictedState_(STATE_SIZE),
state_(STATE_SIZE),
covarianceEpsilon_(STATE_SIZE, STATE_SIZE),
dynamicProcessNoiseCovariance_(STATE_SIZE, STATE_SIZE),
estimateErrorCovariance_(STATE_SIZE, STATE_SIZE),
identity_(STATE_SIZE, STATE_SIZE),
processNoiseCovariance_(STATE_SIZE, STATE_SIZE),
transferFunction_(STATE_SIZE, STATE_SIZE),
transferFunctionJacobian_(STATE_SIZE, STATE_SIZE),
debugStream_(NULL),
debug_(false)
{
reset();
}
FilterBase::~FilterBase()
{
}
void FilterBase::reset()
{
initialized_ = false;
// Clear the state and predicted state
state_.setZero();
predictedState_.setZero();
controlAcceleration_.setZero();
// Prepare the invariant parts of the transfer
// function
transferFunction_.setIdentity();
// Clear the Jacobian
transferFunctionJacobian_.setZero();
// Set the estimate error covariance. We want our measurements
// to be accepted rapidly when the filter starts, so we should
// initialize the state's covariance with large values.
estimateErrorCovariance_.setIdentity();
estimateErrorCovariance_ *= 1e-9;
// We need the identity for the update equations
identity_.setIdentity();
// Set the epsilon matrix to be a matrix with small values on the diagonal
// It is used to maintain the positive-definite property of the covariance
covarianceEpsilon_.setIdentity();
covarianceEpsilon_ *= 0.001;
// Assume 30Hz from sensor data (configurable)
sensorTimeout_ = 0.033333333;
// Initialize our measurement time
lastMeasurementTime_ = 0;
// These can be overridden via the launch parameters,
// but we need default values.
processNoiseCovariance_.setZero();
processNoiseCovariance_(StateMemberX, StateMemberX) = 0.05;
processNoiseCovariance_(StateMemberY, StateMemberY) = 0.05;
processNoiseCovariance_(StateMemberZ, StateMemberZ) = 0.06;
processNoiseCovariance_(StateMemberRoll, StateMemberRoll) = 0.03;
processNoiseCovariance_(StateMemberPitch, StateMemberPitch) = 0.03;
processNoiseCovariance_(StateMemberYaw, StateMemberYaw) = 0.06;
processNoiseCovariance_(StateMemberVx, StateMemberVx) = 0.025;
processNoiseCovariance_(StateMemberVy, StateMemberVy) = 0.025;
processNoiseCovariance_(StateMemberVz, StateMemberVz) = 0.04;
processNoiseCovariance_(StateMemberVroll, StateMemberVroll) = 0.01;
processNoiseCovariance_(StateMemberVpitch, StateMemberVpitch) = 0.01;
processNoiseCovariance_(StateMemberVyaw, StateMemberVyaw) = 0.02;
processNoiseCovariance_(StateMemberAx, StateMemberAx) = 0.01;
processNoiseCovariance_(StateMemberAy, StateMemberAy) = 0.01;
processNoiseCovariance_(StateMemberAz, StateMemberAz) = 0.015;
dynamicProcessNoiseCovariance_ = processNoiseCovariance_;
}
void FilterBase::computeDynamicProcessNoiseCovariance(const Eigen::VectorXd &state, const double delta)
{
// A more principled approach would be to get the current velocity from the state, make a diagonal matrix from it,
// and then rotate it to be in the world frame (i.e., the same frame as the pose data). We could then use this
// rotated velocity matrix to scale the process noise covariance for the pose variables as
// rotatedVelocityMatrix * poseCovariance * rotatedVelocityMatrix'
// However, this presents trouble for robots that may incur rotational error as a result of linear motion (and
// vice-versa). Instead, we create a diagonal matrix whose diagonal values are the vector norm of the state's
// velocity. We use that to scale the process noise covariance.
Eigen::MatrixXd velocityMatrix(TWIST_SIZE, TWIST_SIZE);
velocityMatrix.setIdentity();
velocityMatrix.diagonal() *= state.segment(POSITION_V_OFFSET, TWIST_SIZE).norm();
dynamicProcessNoiseCovariance_.block<TWIST_SIZE, TWIST_SIZE>(POSITION_OFFSET, POSITION_OFFSET) =
velocityMatrix *
processNoiseCovariance_.block<TWIST_SIZE, TWIST_SIZE>(POSITION_OFFSET, POSITION_OFFSET) *
velocityMatrix.transpose();
}
const Eigen::VectorXd& FilterBase::getControl()
{
return latestControl_;
}
double FilterBase::getControlTime()
{
return latestControlTime_;
}
bool FilterBase::getDebug()
{
return debug_;
}
const Eigen::MatrixXd& FilterBase::getEstimateErrorCovariance()
{
return estimateErrorCovariance_;
}
bool FilterBase::getInitializedStatus()
{
return initialized_;
}
double FilterBase::getLastMeasurementTime()
{
return lastMeasurementTime_;
}
const Eigen::VectorXd& FilterBase::getPredictedState()
{
return predictedState_;
}
const Eigen::MatrixXd& FilterBase::getProcessNoiseCovariance()
{
return processNoiseCovariance_;
}
double FilterBase::getSensorTimeout()
{
return sensorTimeout_;
}
const Eigen::VectorXd& FilterBase::getState()
{
return state_;
}
void FilterBase::processMeasurement(const Measurement &measurement)
{
FB_DEBUG("------ FilterBase::processMeasurement (" << measurement.topicName_ << ") ------\n");
double delta = 0.0;
// If we've had a previous reading, then go through the predict/update
// cycle. Otherwise, set our state and covariance to whatever we get
// from this measurement.
if (initialized_)
{
// Determine how much time has passed since our last measurement
delta = measurement.time_ - lastMeasurementTime_;
FB_DEBUG("Filter is already initialized. Carrying out predict/correct loop...\n"
"Measurement time is " << std::setprecision(20) << measurement.time_ <<
", last measurement time is " << lastMeasurementTime_ << ", delta is " << delta << "\n");
// Only want to carry out a prediction if it's
// forward in time. Otherwise, just correct.
if (delta > 0)
{
validateDelta(delta);
predict(measurement.time_, delta);
// Return this to the user
predictedState_ = state_;
}
correct(measurement);
}
else
{
FB_DEBUG("First measurement. Initializing filter.\n");
// Initialize the filter, but only with the values we're using
size_t measurementLength = measurement.updateVector_.size();
for (size_t i = 0; i < measurementLength; ++i)
{
state_[i] = (measurement.updateVector_[i] ? measurement.measurement_[i] : state_[i]);
}
// Same for covariance
for (size_t i = 0; i < measurementLength; ++i)
{
for (size_t j = 0; j < measurementLength; ++j)
{
estimateErrorCovariance_(i, j) = (measurement.updateVector_[i] && measurement.updateVector_[j] ?
measurement.covariance_(i, j) :
estimateErrorCovariance_(i, j));
}
}
initialized_ = true;
}
if (delta >= 0.0)
{
lastMeasurementTime_ = measurement.time_;
}
FB_DEBUG("------ /FilterBase::processMeasurement (" << measurement.topicName_ << ") ------\n");
}
void FilterBase::setControl(const Eigen::VectorXd &control, const double controlTime)
{
latestControl_ = control;
latestControlTime_ = controlTime;
}
void FilterBase::setControlParams(const std::vector<int> &updateVector, const double controlTimeout,
const std::vector<double> &accelerationLimits, const std::vector<double> &accelerationGains,
const std::vector<double> &decelerationLimits, const std::vector<double> &decelerationGains)
{
useControl_ = true;
controlUpdateVector_ = updateVector;
controlTimeout_ = controlTimeout;
accelerationLimits_ = accelerationLimits;
accelerationGains_ = accelerationGains;
decelerationLimits_ = decelerationLimits;
decelerationGains_ = decelerationGains;
}
void FilterBase::setDebug(const bool debug, std::ostream *outStream)
{
if (debug)
{
if (outStream != NULL)
{
debugStream_ = outStream;
debug_ = true;
}
else
{
debug_ = false;
}
}
else
{
debug_ = false;
}
}
void FilterBase::setUseDynamicProcessNoiseCovariance(const bool useDynamicProcessNoiseCovariance)
{
useDynamicProcessNoiseCovariance_ = useDynamicProcessNoiseCovariance;
}
void FilterBase::setEstimateErrorCovariance(const Eigen::MatrixXd &estimateErrorCovariance)
{
estimateErrorCovariance_ = estimateErrorCovariance;
}
void FilterBase::setLastMeasurementTime(const double lastMeasurementTime)
{
lastMeasurementTime_ = lastMeasurementTime;
}
void FilterBase::setProcessNoiseCovariance(const Eigen::MatrixXd &processNoiseCovariance)
{
processNoiseCovariance_ = processNoiseCovariance;
dynamicProcessNoiseCovariance_ = processNoiseCovariance_;
}
void FilterBase::setSensorTimeout(const double sensorTimeout)
{
sensorTimeout_ = sensorTimeout;
}
void FilterBase::setState(const Eigen::VectorXd &state)
{
state_ = state;
}
void FilterBase::validateDelta(double &delta)
{
// This handles issues with ROS time when use_sim_time is on and we're playing from bags.
if (delta > 100000.0)
{
FB_DEBUG("Delta was very large. Suspect playing from bag file. Setting to 0.01\n");
delta = 0.01;
}
}
void FilterBase::prepareControl(const double referenceTime, const double predictionDelta)
{
controlAcceleration_.setZero();
if (useControl_)
{
bool timedOut = ::fabs(referenceTime - latestControlTime_) >= controlTimeout_;
if (timedOut)
{
FB_DEBUG("Control timed out. Reference time was " << referenceTime << ", latest control time was " <<
latestControlTime_ << ", control timeout was " << controlTimeout_ << "\n");
}
for (size_t controlInd = 0; controlInd < TWIST_SIZE; ++controlInd)
{
if (controlUpdateVector_[controlInd])
{
controlAcceleration_(controlInd) = computeControlAcceleration(state_(controlInd + POSITION_V_OFFSET),
(timedOut ? 0.0 : latestControl_(controlInd)), accelerationLimits_[controlInd],
accelerationGains_[controlInd], decelerationLimits_[controlInd], decelerationGains_[controlInd]);
}
}
}
}
void FilterBase::wrapStateAngles()
{
state_(StateMemberRoll) = FilterUtilities::clampRotation(state_(StateMemberRoll));
state_(StateMemberPitch) = FilterUtilities::clampRotation(state_(StateMemberPitch));
state_(StateMemberYaw) = FilterUtilities::clampRotation(state_(StateMemberYaw));
}
bool FilterBase::checkMahalanobisThreshold(const Eigen::VectorXd &innovation,
const Eigen::MatrixXd &invCovariance,
const double nsigmas)
{
double sqMahalanobis = innovation.dot(invCovariance * innovation);
double threshold = nsigmas * nsigmas;
if (sqMahalanobis >= threshold)
{
FB_DEBUG("Innovation mahalanobis distance test failed. Squared Mahalanobis is: " << sqMahalanobis << "\n" <<
"Threshold is: " << threshold << "\n" <<
"Innovation is: " << innovation << "\n" <<
"Innovation covariance is:\n" << invCovariance << "\n");
return false;
}
return true;
}
} // namespace RobotLocalization

View File

@@ -0,0 +1,146 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/filter_utilities.h"
#include "robot_localization/filter_common.h"
#include <string>
#include <vector>
std::ostream& operator<<(std::ostream& os, const Eigen::MatrixXd &mat)
{
os << "[";
int rowCount = static_cast<int>(mat.rows());
for (int row = 0; row < rowCount; ++row)
{
if (row > 0)
{
os << " ";
}
for (int col = 0; col < mat.cols(); ++col)
{
os << std::setiosflags(std::ios::left) << std::setw(12) << std::setprecision(5) << mat(row, col);
}
if (row < rowCount - 1)
{
os << "\n";
}
}
os << "]\n";
return os;
}
std::ostream& operator<<(std::ostream& os, const Eigen::VectorXd &vec)
{
os << "[";
for (int dim = 0; dim < vec.rows(); ++dim)
{
os << std::setiosflags(std::ios::left) << std::setw(12) << std::setprecision(5) << vec(dim);
}
os << "]\n";
return os;
}
std::ostream& operator<<(std::ostream& os, const std::vector<size_t> &vec)
{
os << "[";
for (size_t dim = 0; dim < vec.size(); ++dim)
{
os << std::setiosflags(std::ios::left) << std::setw(12) << std::setprecision(5) << vec[dim];
}
os << "]\n";
return os;
}
std::ostream& operator<<(std::ostream& os, const std::vector<int> &vec)
{
os << "[";
for (size_t dim = 0; dim < vec.size(); ++dim)
{
os << std::setiosflags(std::ios::left) << std::setw(3) << (vec[dim] ? "t" : "f");
}
os << "]\n";
return os;
}
namespace RobotLocalization
{
namespace FilterUtilities
{
void appendPrefix(std::string tfPrefix, std::string &frameId)
{
// Strip all leading slashes for tf2 compliance
if (!frameId.empty() && frameId.at(0) == '/')
{
frameId = frameId.substr(1);
}
if (!tfPrefix.empty() && tfPrefix.at(0) == '/')
{
tfPrefix = tfPrefix.substr(1);
}
// If we do have a tf prefix, then put a slash in between
if (!tfPrefix.empty())
{
frameId = tfPrefix + "/" + frameId;
}
}
double clampRotation(double rotation)
{
while (rotation > PI)
{
rotation -= TAU;
}
while (rotation < -PI)
{
rotation += TAU;
}
return rotation;
}
} // namespace FilterUtilities
} // namespace RobotLocalization

View File

@@ -0,0 +1,915 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/navsat_transform.h"
#include "robot_localization/filter_common.h"
#include "robot_localization/filter_utilities.h"
#include "robot_localization/navsat_conversions.h"
#include "robot_localization/ros_filter_utilities.h"
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#include <XmlRpcException.h>
#include <string>
namespace RobotLocalization
{
NavSatTransform::NavSatTransform(ros::NodeHandle nh, ros::NodeHandle nh_priv) :
broadcast_cartesian_transform_(false),
broadcast_cartesian_transform_as_parent_frame_(false),
gps_updated_(false),
has_transform_gps_(false),
has_transform_imu_(false),
has_transform_odom_(false),
odom_updated_(false),
publish_gps_(false),
transform_good_(false),
use_manual_datum_(false),
use_odometry_yaw_(false),
use_local_cartesian_(false),
zero_altitude_(false),
magnetic_declination_(0.0),
yaw_offset_(0.0),
base_link_frame_id_("base_link"),
gps_frame_id_(""),
utm_zone_(0),
world_frame_id_("odom"),
transform_timeout_(ros::Duration(0)),
tf_listener_(tf_buffer_)
{
ROS_INFO("Waiting for valid clock time...");
ros::Time::waitForValid();
ROS_INFO("Valid clock time received. Starting node.");
latest_cartesian_covariance_.resize(POSE_SIZE, POSE_SIZE);
latest_odom_covariance_.resize(POSE_SIZE, POSE_SIZE);
double frequency;
double delay = 0.0;
double transform_timeout = 0.0;
// Load the parameters we need
nh_priv.getParam("magnetic_declination_radians", magnetic_declination_);
nh_priv.param("yaw_offset", yaw_offset_, 0.0);
nh_priv.param("broadcast_cartesian_transform", broadcast_cartesian_transform_, false);
nh_priv.param("broadcast_cartesian_transform_as_parent_frame",
broadcast_cartesian_transform_as_parent_frame_, false);
nh_priv.param("zero_altitude", zero_altitude_, false);
nh_priv.param("publish_filtered_gps", publish_gps_, false);
nh_priv.param("use_odometry_yaw", use_odometry_yaw_, false);
nh_priv.param("wait_for_datum", use_manual_datum_, false);
nh_priv.param("use_local_cartesian", use_local_cartesian_, false);
nh_priv.param("frequency", frequency, 10.0);
nh_priv.param("delay", delay, 0.0);
nh_priv.param("transform_timeout", transform_timeout, 0.0);
nh_priv.param("cartesian_frame_id", cartesian_frame_id_, std::string(use_local_cartesian_ ? "local_enu" : "utm"));
transform_timeout_.fromSec(transform_timeout);
// Check for deprecated parameters
if (nh_priv.getParam("broadcast_utm_transform", broadcast_cartesian_transform_))
{
ROS_WARN("navsat_transform, Parameter 'broadcast_utm_transform' has been deprecated. Please use"
"'broadcast_cartesian_transform' instead.");
}
if (nh_priv.getParam("broadcast_utm_transform_as_parent_frame", broadcast_cartesian_transform_as_parent_frame_))
{
ROS_WARN("navsat_transform, Parameter 'broadcast_utm_transform_as_parent_frame' has been deprecated. Please use"
"'broadcast_cartesian_transform_as_parent_frame' instead.");
}
// Check if tf warnings should be suppressed
nh.getParam("/silent_tf_failure", tf_silent_failure_);
// Subscribe to the messages and services we need
datum_srv_ = nh.advertiseService("datum", &NavSatTransform::datumCallback, this);
to_ll_srv_ = nh.advertiseService("toLL", &NavSatTransform::toLLCallback, this);
from_ll_srv_ = nh.advertiseService("fromLL", &NavSatTransform::fromLLCallback, this);
set_utm_zone_srv_ = nh.advertiseService("setUTMZone", &NavSatTransform::setUTMZoneCallback, this);
if (use_manual_datum_ && nh_priv.hasParam("datum"))
{
XmlRpc::XmlRpcValue datum_config;
try
{
double datum_lat;
double datum_lon;
double datum_yaw;
nh_priv.getParam("datum", datum_config);
// Handle datum specification. Users should always specify a baseLinkFrameId_ in the
// datum config, but we had a release where it wasn't used, so we'll maintain compatibility.
ROS_ASSERT(datum_config.getType() == XmlRpc::XmlRpcValue::TypeArray);
ROS_ASSERT(datum_config.size() >= 3);
if (datum_config.size() > 3)
{
ROS_WARN_STREAM("Deprecated datum parameter configuration detected. Only the first three parameters "
"(latitude, longitude, yaw) will be used. frame_ids will be derived from odometry and navsat inputs.");
}
std::ostringstream ostr;
ostr << std::setprecision(20) << datum_config[0] << " " << datum_config[1] << " " << datum_config[2];
std::istringstream istr(ostr.str());
istr >> datum_lat >> datum_lon >> datum_yaw;
// Try to resolve tf_prefix
std::string tf_prefix = "";
std::string tf_prefix_path = "";
if (nh_priv.searchParam("tf_prefix", tf_prefix_path))
{
nh_priv.getParam(tf_prefix_path, tf_prefix);
}
// Append the tf prefix in a tf2-friendly manner
FilterUtilities::appendPrefix(tf_prefix, world_frame_id_);
FilterUtilities::appendPrefix(tf_prefix, base_link_frame_id_);
robot_localization::SetDatum::Request request;
request.geo_pose.position.latitude = datum_lat;
request.geo_pose.position.longitude = datum_lon;
request.geo_pose.position.altitude = 0.0;
tf2::Quaternion quat;
quat.setRPY(0.0, 0.0, datum_yaw);
request.geo_pose.orientation = tf2::toMsg(quat);
robot_localization::SetDatum::Response response;
datumCallback(request, response);
}
catch (XmlRpc::XmlRpcException &e)
{
ROS_ERROR_STREAM("ERROR reading sensor config: " << e.getMessage() <<
" for process_noise_covariance (type: " << datum_config.getType() << ")");
}
}
odom_sub_ = nh.subscribe("odometry/filtered", 1, &NavSatTransform::odomCallback, this);
gps_sub_ = nh.subscribe("gps/fix", 1, &NavSatTransform::gpsFixCallback, this);
if (!use_odometry_yaw_ && !use_manual_datum_)
{
imu_sub_ = nh.subscribe("imu/data", 1, &NavSatTransform::imuCallback, this);
}
gps_odom_pub_ = nh.advertise<nav_msgs::Odometry>("odometry/gps", 10);
if (publish_gps_)
{
filtered_gps_pub_ = nh.advertise<sensor_msgs::NavSatFix>("gps/filtered", 10);
}
// Sleep for the parameterized amount of time, to give
// other nodes time to start up (not always necessary)
ros::Duration start_delay(delay);
start_delay.sleep();
periodicUpdateTimer_ = nh.createTimer(ros::Duration(1./frequency), &NavSatTransform::periodicUpdate, this);
}
NavSatTransform::~NavSatTransform()
{
}
// void NavSatTransform::run()
void NavSatTransform::periodicUpdate(const ros::TimerEvent& event)
{
if (!transform_good_)
{
computeTransform();
if (transform_good_ && !use_odometry_yaw_ && !use_manual_datum_)
{
// Once we have the transform, we don't need the IMU
imu_sub_.shutdown();
}
}
else
{
nav_msgs::Odometry gps_odom;
if (prepareGpsOdometry(gps_odom))
{
gps_odom_pub_.publish(gps_odom);
}
if (publish_gps_)
{
sensor_msgs::NavSatFix odom_gps;
if (prepareFilteredGps(odom_gps))
{
filtered_gps_pub_.publish(odom_gps);
}
}
}
}
void NavSatTransform::computeTransform()
{
// Only do this if:
// 1. We haven't computed the odom_frame->cartesian_frame transform before
// 2. We've received the data we need
if (!transform_good_ &&
has_transform_odom_ &&
has_transform_gps_ &&
has_transform_imu_)
{
// The cartesian pose we have is given at the location of the GPS sensor on the robot. We need to get the
// cartesian pose of the robot's origin.
tf2::Transform transform_cartesian_pose_corrected;
if (!use_manual_datum_)
{
getRobotOriginCartesianPose(transform_cartesian_pose_, transform_cartesian_pose_corrected, ros::Time(0));
}
else
{
transform_cartesian_pose_corrected = transform_cartesian_pose_;
}
// Get the IMU's current RPY values. Need the raw values (for yaw, anyway).
tf2::Matrix3x3 mat(transform_orientation_);
// Convert to RPY
double imu_roll;
double imu_pitch;
double imu_yaw;
mat.getRPY(imu_roll, imu_pitch, imu_yaw);
/* The IMU's heading was likely originally reported w.r.t. magnetic north.
* However, all the nodes in robot_localization assume that orientation data,
* including that reported by IMUs, is reported in an ENU frame, with a 0 yaw
* value being reported when facing east and increasing counter-clockwise (i.e.,
* towards north). To make the world frame ENU aligned, where X is east
* and Y is north, we have to take into account three additional considerations:
* 1. The IMU may have its non-ENU frame data transformed to ENU, but there's
* a possibility that its data has not been corrected for magnetic
* declination. We need to account for this. A positive magnetic
* declination is counter-clockwise in an ENU frame. Therefore, if
* we have a magnetic declination of N radians, then when the sensor
* is facing a heading of N, it reports 0. Therefore, we need to add
* the declination angle.
* 2. To account for any other offsets that may not be accounted for by the
* IMU driver or any interim processing node, we expose a yaw offset that
* lets users work with navsat_transform_node.
* 3. UTM grid isn't aligned with True East\North. To account for the difference
* we need to add meridian convergence angle when using UTM. This value will be
* 0.0 when use_local_cartesian is TRUE.
*/
imu_yaw += (magnetic_declination_ + yaw_offset_ + utm_meridian_convergence_);
ROS_INFO_STREAM("Corrected for magnetic declination of " << std::fixed << magnetic_declination_ <<
", user-specified offset of " << yaw_offset_ <<
" and meridian convergence of " << utm_meridian_convergence_ << "." <<
" Transform heading factor is now " << imu_yaw);
// Convert to tf-friendly structures
tf2::Quaternion imu_quat;
imu_quat.setRPY(0.0, 0.0, imu_yaw);
// The transform order will be orig_odom_pos * orig_cartesian_pos_inverse * cur_cartesian_pos.
// Doing it this way will allow us to cope with having non-zero odometry position
// when we get our first GPS message.
tf2::Transform cartesian_pose_with_orientation;
cartesian_pose_with_orientation.setOrigin(transform_cartesian_pose_corrected.getOrigin());
cartesian_pose_with_orientation.setRotation(imu_quat);
// Remove roll and pitch from odometry pose
// Must be done because roll and pitch is removed from cartesian_pose_with_orientation
double odom_roll, odom_pitch, odom_yaw;
tf2::Matrix3x3(transform_world_pose_.getRotation()).getRPY(odom_roll, odom_pitch, odom_yaw);
tf2::Quaternion odom_quat;
odom_quat.setRPY(0.0, 0.0, odom_yaw);
tf2::Transform transform_world_pose_yaw_only(transform_world_pose_);
transform_world_pose_yaw_only.setRotation(odom_quat);
cartesian_world_transform_.mult(transform_world_pose_yaw_only, cartesian_pose_with_orientation.inverse());
cartesian_world_trans_inverse_ = cartesian_world_transform_.inverse();
ROS_INFO_STREAM("Transform world frame pose is: " << transform_world_pose_);
ROS_INFO_STREAM("World frame->cartesian transform is " << cartesian_world_transform_);
transform_good_ = true;
// Send out the (static) Cartesian transform in case anyone else would like to use it.
if (broadcast_cartesian_transform_)
{
geometry_msgs::TransformStamped cartesian_transform_stamped;
cartesian_transform_stamped.header.stamp = ros::Time::now();
cartesian_transform_stamped.header.frame_id = (broadcast_cartesian_transform_as_parent_frame_ ?
cartesian_frame_id_ : world_frame_id_);
cartesian_transform_stamped.child_frame_id = (broadcast_cartesian_transform_as_parent_frame_ ?
world_frame_id_ : cartesian_frame_id_);
cartesian_transform_stamped.transform = (broadcast_cartesian_transform_as_parent_frame_ ?
tf2::toMsg(cartesian_world_trans_inverse_) :
tf2::toMsg(cartesian_world_transform_));
cartesian_transform_stamped.transform.translation.z = (zero_altitude_ ?
0.0 : cartesian_transform_stamped.transform.translation.z);
cartesian_broadcaster_.sendTransform(cartesian_transform_stamped);
}
}
}
bool NavSatTransform::datumCallback(robot_localization::SetDatum::Request& request,
robot_localization::SetDatum::Response&)
{
// If we get a service call with a manual datum, even if we already computed the transform using the robot's
// initial pose, then we want to assume that we are using a datum from now on, and we want other methods to
// not attempt to transform the values we are specifying here.
use_manual_datum_ = true;
transform_good_ = false;
sensor_msgs::NavSatFix *fix = new sensor_msgs::NavSatFix();
fix->latitude = request.geo_pose.position.latitude;
fix->longitude = request.geo_pose.position.longitude;
fix->altitude = request.geo_pose.position.altitude;
fix->header.stamp = ros::Time::now();
fix->position_covariance[0] = 0.1;
fix->position_covariance[4] = 0.1;
fix->position_covariance[8] = 0.1;
fix->position_covariance_type = sensor_msgs::NavSatStatus::STATUS_FIX;
sensor_msgs::NavSatFixConstPtr fix_ptr(fix);
setTransformGps(fix_ptr);
nav_msgs::Odometry *odom = new nav_msgs::Odometry();
odom->pose.pose.orientation.x = 0;
odom->pose.pose.orientation.y = 0;
odom->pose.pose.orientation.z = 0;
odom->pose.pose.orientation.w = 1;
odom->pose.pose.position.x = 0;
odom->pose.pose.position.y = 0;
odom->pose.pose.position.z = 0;
odom->header.frame_id = world_frame_id_;
odom->child_frame_id = base_link_frame_id_;
nav_msgs::OdometryConstPtr odom_ptr(odom);
setTransformOdometry(odom_ptr);
sensor_msgs::Imu *imu = new sensor_msgs::Imu();
imu->orientation = request.geo_pose.orientation;
imu->header.frame_id = base_link_frame_id_;
sensor_msgs::ImuConstPtr imu_ptr(imu);
imuCallback(imu_ptr);
return true;
}
bool NavSatTransform::toLLCallback(robot_localization::ToLL::Request& request,
robot_localization::ToLL::Response& response)
{
if (!transform_good_)
{
ROS_ERROR("No transform available (yet)");
return false;
}
tf2::Vector3 point;
tf2::fromMsg(request.map_point, point);
mapToLL(point, response.ll_point.latitude, response.ll_point.longitude, response.ll_point.altitude);
return true;
}
bool NavSatTransform::fromLLCallback(robot_localization::FromLL::Request& request,
robot_localization::FromLL::Response& response)
{
double altitude = request.ll_point.altitude;
double longitude = request.ll_point.longitude;
double latitude = request.ll_point.latitude;
tf2::Transform cartesian_pose;
double cartesian_x;
double cartesian_y;
double cartesian_z;
if (use_local_cartesian_)
{
gps_local_cartesian_.Forward(latitude, longitude, altitude, cartesian_x, cartesian_y, cartesian_z);
}
else
{
int zone_tmp;
bool nortp_tmp;
try
{
GeographicLib::UTMUPS::Forward(latitude, longitude, zone_tmp, nortp_tmp, cartesian_x, cartesian_y, utm_zone_);
}
catch (const GeographicLib::GeographicErr& e)
{
ROS_ERROR_STREAM_THROTTLE(1.0, e.what());
return false;
}
}
cartesian_pose.setOrigin(tf2::Vector3(cartesian_x, cartesian_y, altitude));
nav_msgs::Odometry gps_odom;
if (!transform_good_)
{
ROS_ERROR("No transform available (yet)");
return false;
}
response.map_point = cartesianToMap(cartesian_pose).pose.pose.position;
return true;
}
bool NavSatTransform::setUTMZoneCallback(robot_localization::SetUTMZone::Request& request,
robot_localization::SetUTMZone::Response& response)
{
double x_unused;
double y_unused;
int prec_unused;
GeographicLib::MGRS::Reverse(request.utm_zone, utm_zone_, northp_, x_unused, y_unused, prec_unused, true);
ROS_INFO("UTM zone set to %d %s", utm_zone_, northp_ ? "north" : "south");
return true;
}
nav_msgs::Odometry NavSatTransform::cartesianToMap(const tf2::Transform& cartesian_pose) const
{
nav_msgs::Odometry gps_odom{};
tf2::Transform transformed_cartesian_gps{};
transformed_cartesian_gps.mult(cartesian_world_transform_, cartesian_pose);
transformed_cartesian_gps.setRotation(tf2::Quaternion::getIdentity());
// Set header information stamp because we would like to know the robot's position at that timestamp
gps_odom.header.frame_id = world_frame_id_;
gps_odom.header.stamp = gps_update_time_;
// Now fill out the message. Set the orientation to the identity.
tf2::toMsg(transformed_cartesian_gps, gps_odom.pose.pose);
gps_odom.pose.pose.position.z = (zero_altitude_ ? 0.0 : gps_odom.pose.pose.position.z);
return gps_odom;
}
void NavSatTransform::mapToLL(const tf2::Vector3& point, double& latitude, double& longitude, double& altitude) const
{
tf2::Transform odom_as_cartesian{};
tf2::Transform pose{};
pose.setOrigin(point);
pose.setRotation(tf2::Quaternion::getIdentity());
odom_as_cartesian.mult(cartesian_world_trans_inverse_, pose);
odom_as_cartesian.setRotation(tf2::Quaternion::getIdentity());
if (use_local_cartesian_)
{
double altitude_tmp = 0.0;
gps_local_cartesian_.Reverse(odom_as_cartesian.getOrigin().getX(),
odom_as_cartesian.getOrigin().getY(),
0.0,
latitude,
longitude,
altitude_tmp);
altitude = odom_as_cartesian.getOrigin().getZ();
}
else
{
GeographicLib::UTMUPS::Reverse(utm_zone_,
northp_,
odom_as_cartesian.getOrigin().getX(),
odom_as_cartesian.getOrigin().getY(),
latitude,
longitude);
altitude = odom_as_cartesian.getOrigin().getZ();
}
}
void NavSatTransform::getRobotOriginCartesianPose(const tf2::Transform &gps_cartesian_pose,
tf2::Transform &robot_cartesian_pose,
const ros::Time &transform_time)
{
robot_cartesian_pose.setIdentity();
// Get linear offset from origin for the GPS
tf2::Transform offset;
bool can_transform = RosFilterUtilities::lookupTransformSafe(tf_buffer_,
base_link_frame_id_,
gps_frame_id_,
transform_time,
ros::Duration(transform_timeout_),
offset,
tf_silent_failure_);
if (can_transform)
{
// Get the orientation we'll use for our Cartesian->world transform
tf2::Quaternion cartesian_orientation = transform_orientation_;
tf2::Matrix3x3 mat(cartesian_orientation);
// Add the offsets
double roll;
double pitch;
double yaw;
mat.getRPY(roll, pitch, yaw);
yaw += (magnetic_declination_ + yaw_offset_ + utm_meridian_convergence_);
cartesian_orientation.setRPY(roll, pitch, yaw);
// Rotate the GPS linear offset by the orientation
// Zero out the orientation, because the GPS orientation is meaningless, and if it's non-zero, it will make the
// the computation of robot_cartesian_pose erroneous.
offset.setOrigin(tf2::quatRotate(cartesian_orientation, offset.getOrigin()));
offset.setRotation(tf2::Quaternion::getIdentity());
// Update the initial pose
robot_cartesian_pose = offset.inverse() * gps_cartesian_pose;
}
else
{
if (gps_frame_id_ != "")
{
ROS_WARN_STREAM_ONCE("Unable to obtain " << base_link_frame_id_ << "->" << gps_frame_id_ <<
" transform. Will assume navsat device is mounted at robot's origin");
}
robot_cartesian_pose = gps_cartesian_pose;
}
}
void NavSatTransform::getRobotOriginWorldPose(const tf2::Transform &gps_odom_pose,
tf2::Transform &robot_odom_pose,
const ros::Time &transform_time)
{
robot_odom_pose.setIdentity();
// Remove the offset from base_link
tf2::Transform gps_offset_rotated;
bool can_transform = RosFilterUtilities::lookupTransformSafe(tf_buffer_,
base_link_frame_id_,
gps_frame_id_,
transform_time,
transform_timeout_,
gps_offset_rotated,
tf_silent_failure_);
if (can_transform)
{
tf2::Transform robot_orientation;
can_transform = RosFilterUtilities::lookupTransformSafe(tf_buffer_,
world_frame_id_,
base_link_frame_id_,
transform_time,
transform_timeout_,
robot_orientation,
tf_silent_failure_);
if (can_transform)
{
// Zero out rotation because we don't care about the orientation of the
// GPS receiver relative to base_link
gps_offset_rotated.setOrigin(tf2::quatRotate(robot_orientation.getRotation(), gps_offset_rotated.getOrigin()));
gps_offset_rotated.setRotation(tf2::Quaternion::getIdentity());
robot_odom_pose = gps_offset_rotated.inverse() * gps_odom_pose;
}
else
{
ROS_WARN_STREAM_THROTTLE(5.0, "Could not obtain " << world_frame_id_ << "->" << base_link_frame_id_ <<
" transform. Will not remove offset of navsat device from robot's origin.");
}
}
else
{
ROS_WARN_STREAM_THROTTLE(5.0, "Could not obtain " << base_link_frame_id_ << "->" << gps_frame_id_ <<
" transform. Will not remove offset of navsat device from robot's origin.");
}
}
void NavSatTransform::gpsFixCallback(const sensor_msgs::NavSatFixConstPtr& msg)
{
gps_frame_id_ = msg->header.frame_id;
if (gps_frame_id_.empty())
{
ROS_WARN_STREAM_ONCE("NavSatFix message has empty frame_id. Will assume navsat device is mounted at robot's "
"origin.");
}
// Make sure the GPS data is usable
bool good_gps = (msg->status.status != sensor_msgs::NavSatStatus::STATUS_NO_FIX &&
!std::isnan(msg->altitude) &&
!std::isnan(msg->latitude) &&
!std::isnan(msg->longitude));
if (good_gps)
{
// If we haven't computed the transform yet, then
// store this message as the initial GPS data to use
if (!transform_good_ && !use_manual_datum_)
{
setTransformGps(msg);
}
double cartesian_x = 0.0;
double cartesian_y = 0.0;
double cartesian_z = 0.0;
if (use_local_cartesian_)
{
gps_local_cartesian_.Forward(msg->latitude, msg->longitude, msg->altitude,
cartesian_x, cartesian_y, cartesian_z);
}
else
{
// Transform to UTM using the fixed utm_zone_
int zone_tmp;
bool northp_tmp;
try
{
GeographicLib::UTMUPS::Forward(msg->latitude, msg->longitude,
zone_tmp, northp_tmp, cartesian_x, cartesian_y, utm_zone_);
}
catch (const GeographicLib::GeographicErr& e)
{
ROS_ERROR_STREAM_THROTTLE(1.0, e.what());
return;
}
}
latest_cartesian_pose_.setOrigin(tf2::Vector3(cartesian_x, cartesian_y, msg->altitude));
latest_cartesian_covariance_.setZero();
// Copy the measurement's covariance matrix so that we can rotate it later
for (size_t i = 0; i < POSITION_SIZE; i++)
{
for (size_t j = 0; j < POSITION_SIZE; j++)
{
latest_cartesian_covariance_(i, j) = msg->position_covariance[POSITION_SIZE * i + j];
}
}
gps_update_time_ = msg->header.stamp;
gps_updated_ = true;
}
}
void NavSatTransform::imuCallback(const sensor_msgs::ImuConstPtr& msg)
{
// We need the baseLinkFrameId_ from the odometry message, so
// we need to wait until we receive it.
if (has_transform_odom_)
{
/* This method only gets called if we don't yet have the
* IMU data (the subscriber gets shut down once we compute
* the transform), so we can assumed that every IMU message
* that comes here is meant to be used for that purpose. */
tf2::fromMsg(msg->orientation, transform_orientation_);
// Correct for the IMU's orientation w.r.t. base_link
tf2::Transform target_frame_trans;
bool can_transform = RosFilterUtilities::lookupTransformSafe(tf_buffer_,
base_link_frame_id_,
msg->header.frame_id,
msg->header.stamp,
transform_timeout_,
target_frame_trans,
tf_silent_failure_);
if (can_transform)
{
double roll_offset = 0;
double pitch_offset = 0;
double yaw_offset = 0;
double roll = 0;
double pitch = 0;
double yaw = 0;
RosFilterUtilities::quatToRPY(target_frame_trans.getRotation(), roll_offset, pitch_offset, yaw_offset);
RosFilterUtilities::quatToRPY(transform_orientation_, roll, pitch, yaw);
ROS_DEBUG_STREAM("Initial orientation is " << transform_orientation_);
// Apply the offset (making sure to bound them), and throw them in a vector
tf2::Vector3 rpy_angles(FilterUtilities::clampRotation(roll - roll_offset),
FilterUtilities::clampRotation(pitch - pitch_offset),
FilterUtilities::clampRotation(yaw - yaw_offset));
// Now we need to rotate the roll and pitch by the yaw offset value.
// Imagine a case where an IMU is mounted facing sideways. In that case
// pitch for the IMU's world frame is roll for the robot.
tf2::Matrix3x3 mat;
mat.setRPY(0.0, 0.0, yaw_offset);
rpy_angles = mat * rpy_angles;
transform_orientation_.setRPY(rpy_angles.getX(), rpy_angles.getY(), rpy_angles.getZ());
ROS_DEBUG_STREAM("Initial corrected orientation roll, pitch, yaw is (" <<
rpy_angles.getX() << ", " << rpy_angles.getY() << ", " << rpy_angles.getZ() << ")");
has_transform_imu_ = true;
}
}
}
void NavSatTransform::odomCallback(const nav_msgs::OdometryConstPtr& msg)
{
world_frame_id_ = msg->header.frame_id;
base_link_frame_id_ = msg->child_frame_id;
if (!transform_good_ && !use_manual_datum_)
{
setTransformOdometry(msg);
}
tf2::fromMsg(msg->pose.pose, latest_world_pose_);
latest_odom_covariance_.setZero();
for (size_t row = 0; row < POSE_SIZE; ++row)
{
for (size_t col = 0; col < POSE_SIZE; ++col)
{
latest_odom_covariance_(row, col) = msg->pose.covariance[row * POSE_SIZE + col];
}
}
odom_update_time_ = msg->header.stamp;
odom_updated_ = true;
}
bool NavSatTransform::prepareFilteredGps(sensor_msgs::NavSatFix &filtered_gps)
{
bool new_data = false;
if (transform_good_ && odom_updated_)
{
mapToLL(latest_world_pose_.getOrigin(), filtered_gps.latitude, filtered_gps.longitude, filtered_gps.altitude);
// Rotate the covariance as well
tf2::Matrix3x3 rot(cartesian_world_trans_inverse_.getRotation());
Eigen::MatrixXd rot_6d(POSE_SIZE, POSE_SIZE);
rot_6d.setIdentity();
for (size_t rInd = 0; rInd < POSITION_SIZE; ++rInd)
{
rot_6d(rInd, 0) = rot.getRow(rInd).getX();
rot_6d(rInd, 1) = rot.getRow(rInd).getY();
rot_6d(rInd, 2) = rot.getRow(rInd).getZ();
rot_6d(rInd+POSITION_SIZE, 3) = rot.getRow(rInd).getX();
rot_6d(rInd+POSITION_SIZE, 4) = rot.getRow(rInd).getY();
rot_6d(rInd+POSITION_SIZE, 5) = rot.getRow(rInd).getZ();
}
// Rotate the covariance
latest_odom_covariance_ = rot_6d * latest_odom_covariance_.eval() * rot_6d.transpose();
// Copy the measurement's covariance matrix back
for (size_t i = 0; i < POSITION_SIZE; i++)
{
for (size_t j = 0; j < POSITION_SIZE; j++)
{
filtered_gps.position_covariance[POSITION_SIZE * i + j] = latest_odom_covariance_(i, j);
}
}
filtered_gps.position_covariance_type = sensor_msgs::NavSatFix::COVARIANCE_TYPE_KNOWN;
filtered_gps.status.status = sensor_msgs::NavSatStatus::STATUS_GBAS_FIX;
filtered_gps.header.frame_id = base_link_frame_id_;
filtered_gps.header.stamp = odom_update_time_;
// Mark this GPS as used
odom_updated_ = false;
new_data = true;
}
return new_data;
}
bool NavSatTransform::prepareGpsOdometry(nav_msgs::Odometry &gps_odom)
{
bool new_data = false;
if (transform_good_ && gps_updated_ && odom_updated_)
{
gps_odom = cartesianToMap(latest_cartesian_pose_);
tf2::Transform transformed_cartesian_gps;
tf2::fromMsg(gps_odom.pose.pose, transformed_cartesian_gps);
// Want the pose of the vehicle origin, not the GPS
tf2::Transform transformed_cartesian_robot;
getRobotOriginWorldPose(transformed_cartesian_gps, transformed_cartesian_robot, gps_odom.header.stamp);
// Rotate the covariance as well
tf2::Matrix3x3 rot(cartesian_world_transform_.getRotation());
Eigen::MatrixXd rot_6d(POSE_SIZE, POSE_SIZE);
rot_6d.setIdentity();
for (size_t rInd = 0; rInd < POSITION_SIZE; ++rInd)
{
rot_6d(rInd, 0) = rot.getRow(rInd).getX();
rot_6d(rInd, 1) = rot.getRow(rInd).getY();
rot_6d(rInd, 2) = rot.getRow(rInd).getZ();
rot_6d(rInd+POSITION_SIZE, 3) = rot.getRow(rInd).getX();
rot_6d(rInd+POSITION_SIZE, 4) = rot.getRow(rInd).getY();
rot_6d(rInd+POSITION_SIZE, 5) = rot.getRow(rInd).getZ();
}
// Rotate the covariance
latest_cartesian_covariance_ = rot_6d * latest_cartesian_covariance_.eval() * rot_6d.transpose();
// Now fill out the message. Set the orientation to the identity.
tf2::toMsg(transformed_cartesian_robot, gps_odom.pose.pose);
gps_odom.pose.pose.position.z = (zero_altitude_ ? 0.0 : gps_odom.pose.pose.position.z);
// Copy the measurement's covariance matrix so that we can rotate it later
for (size_t i = 0; i < POSE_SIZE; i++)
{
for (size_t j = 0; j < POSE_SIZE; j++)
{
gps_odom.pose.covariance[POSE_SIZE * i + j] = latest_cartesian_covariance_(i, j);
}
}
// Mark this GPS as used
gps_updated_ = false;
new_data = true;
}
return new_data;
}
void NavSatTransform::setTransformGps(const sensor_msgs::NavSatFixConstPtr& msg)
{
double cartesian_x = 0;
double cartesian_y = 0;
double cartesian_z = 0;
if (use_local_cartesian_)
{
const double hae_altitude = 0.0;
gps_local_cartesian_.Reset(msg->latitude, msg->longitude, hae_altitude);
gps_local_cartesian_.Forward(msg->latitude, msg->longitude, msg->altitude, cartesian_x, cartesian_y, cartesian_z);
// UTM meridian convergence is not meaningful when using local cartesian, so set it to 0.0
utm_meridian_convergence_ = 0.0;
}
else
{
double k_tmp;
double utm_meridian_convergence_degrees;
GeographicLib::UTMUPS::Forward(msg->latitude, msg->longitude, utm_zone_, northp_,
cartesian_x, cartesian_y, utm_meridian_convergence_degrees, k_tmp);
utm_meridian_convergence_ = utm_meridian_convergence_degrees * NavsatConversions::RADIANS_PER_DEGREE;
}
ROS_INFO_STREAM("Datum (latitude, longitude, altitude) is (" << std::fixed << msg->latitude << ", " <<
msg->longitude << ", " << msg->altitude << ")");
ROS_INFO_STREAM("Datum " << ((use_local_cartesian_)? "Local Cartesian" : "UTM") <<
" coordinate is (" << std::fixed << cartesian_x << ", " << cartesian_y << ") zone " << utm_zone_);
transform_cartesian_pose_.setOrigin(tf2::Vector3(cartesian_x, cartesian_y, msg->altitude));
transform_cartesian_pose_.setRotation(tf2::Quaternion::getIdentity());
has_transform_gps_ = true;
}
void NavSatTransform::setTransformOdometry(const nav_msgs::OdometryConstPtr& msg)
{
tf2::fromMsg(msg->pose.pose, transform_world_pose_);
has_transform_odom_ = true;
ROS_INFO_STREAM_ONCE("Initial odometry pose is " << transform_world_pose_);
// Users can optionally use the (potentially fused) heading from
// the odometry source, which may have multiple fused sources of
// heading data, and so would act as a better heading for the
// Cartesian->world_frame transform.
if (!transform_good_ && use_odometry_yaw_ && !use_manual_datum_)
{
sensor_msgs::Imu *imu = new sensor_msgs::Imu();
imu->orientation = msg->pose.pose.orientation;
imu->header.frame_id = msg->child_frame_id;
imu->header.stamp = msg->header.stamp;
sensor_msgs::ImuConstPtr imuPtr(imu);
imuCallback(imuPtr);
}
}
} // namespace RobotLocalization

View File

@@ -0,0 +1,51 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/navsat_transform.h"
#include <ros/ros.h>
int main(int argc, char **argv)
{
ros::init(argc, argv, "navsat_transform_node");
ros::NodeHandle nh;
ros::NodeHandle nh_priv("~");
RobotLocalization::NavSatTransform trans(nh, nh_priv);
ros::spin();
return EXIT_SUCCESS;
}

View File

@@ -0,0 +1,63 @@
/*
* Copyright (c) 2017 Simon Gene Gottlieb
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/navsat_transform.h"
#include <nodelet/nodelet.h>
#include <pluginlib/class_list_macros.h>
#include <ros/ros.h>
#include <memory>
namespace RobotLocalization
{
class NavSatTransformNodelet : public nodelet::Nodelet
{
private:
std::unique_ptr<RobotLocalization::NavSatTransform> trans;
public:
virtual void onInit()
{
NODELET_DEBUG("Initializing nodelet...");
ros::NodeHandle nh = getNodeHandle();
ros::NodeHandle nh_priv = getPrivateNodeHandle();
trans = std::make_unique<RobotLocalization::NavSatTransform>(nh, nh_priv);
}
};
} // namespace RobotLocalization
PLUGINLIB_EXPORT_CLASS(RobotLocalization::NavSatTransformNodelet, nodelet::Nodelet);

View File

@@ -0,0 +1,212 @@
/*
* Copyright (c) 2016, TNO IVS Helmond.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/robot_localization_estimator.h"
#include "robot_localization/ekf.h"
#include "robot_localization/ukf.h"
#include <vector>
namespace RobotLocalization
{
RobotLocalizationEstimator::RobotLocalizationEstimator(unsigned int buffer_capacity,
FilterType filter_type,
const Eigen::MatrixXd& process_noise_covariance,
const std::vector<double>& filter_args)
{
state_buffer_.set_capacity(buffer_capacity);
// Set up the filter that is used for predictions
if ( filter_type == FilterTypes::EKF )
{
filter_ = new Ekf;
}
else if ( filter_type == FilterTypes::UKF )
{
filter_ = new Ukf(filter_args);
}
filter_->setProcessNoiseCovariance(process_noise_covariance);
}
RobotLocalizationEstimator::~RobotLocalizationEstimator()
{
delete filter_;
}
void RobotLocalizationEstimator::setState(const EstimatorState& state)
{
// If newly received state is newer than any in the buffer, push back
if ( state_buffer_.empty() || state.time_stamp > state_buffer_.back().time_stamp )
{
state_buffer_.push_back(state);
}
// If it is older, put it in the right position
else
{
for ( boost::circular_buffer<EstimatorState>::iterator it = state_buffer_.begin(); it != state_buffer_.end(); ++it )
{
if ( state.time_stamp < it->time_stamp )
{
state_buffer_.insert(it, state);
return;
}
}
}
}
EstimatorResult RobotLocalizationEstimator::getState(const double time,
EstimatorState& state) const
{
// If there's nothing in the buffer, there's nothing to give.
if ( state_buffer_.size() == 0 )
{
return EstimatorResults::EmptyBuffer;
}
// Set state to the most recent one for now
state = state_buffer_.back();
// Go through buffer from new to old
EstimatorState last_state_before_time = state_buffer_.front();
EstimatorState next_state_after_time = state_buffer_.back();
bool previous_state_found = false;
bool next_state_found = false;
for (boost::circular_buffer<EstimatorState>::const_reverse_iterator it = state_buffer_.rbegin();
it != state_buffer_.rend(); ++it)
{
/* If the time stamp of the current state from the buffer is
* older than the requested time, store it as the last state
* before the requested time. If it is younger, save it as the
* next one after, and go on to find the last one before.
*/
if ( it->time_stamp == time )
{
state = *it;
return EstimatorResults::Exact;
}
else if ( it->time_stamp <= time )
{
last_state_before_time = *it;
previous_state_found = true;
break;
}
else
{
next_state_after_time = *it;
next_state_found = true;
}
}
// If we found a previous state and a next state, we can do interpolation
if ( previous_state_found && next_state_found )
{
interpolate(last_state_before_time, next_state_after_time, time, state);
return EstimatorResults::Interpolation;
}
// If only a previous state is found, we can do extrapolation into the future
else if ( previous_state_found )
{
extrapolate(last_state_before_time, time, state);
return EstimatorResults::ExtrapolationIntoFuture;
}
// If only a next state is found, we'll have to extrapolate into the past.
else if ( next_state_found )
{
extrapolate(next_state_after_time, time, state);
return EstimatorResults::ExtrapolationIntoPast;
}
else
{
return EstimatorResults::Failed;
}
}
void RobotLocalizationEstimator::setBufferCapacity(const int capacity)
{
state_buffer_.set_capacity(capacity);
}
void RobotLocalizationEstimator::clearBuffer()
{
state_buffer_.clear();
}
unsigned int RobotLocalizationEstimator::getBufferCapacity() const
{
return state_buffer_.capacity();
}
unsigned int RobotLocalizationEstimator::getSize() const
{
return state_buffer_.size();
}
void RobotLocalizationEstimator::extrapolate(const EstimatorState& boundary_state,
const double requested_time,
EstimatorState& state_at_req_time) const
{
// Set up the filter with the boundary state
filter_->setState(boundary_state.state);
filter_->setEstimateErrorCovariance(boundary_state.covariance);
// Calculate how much time we need to extrapolate into the future
double delta = requested_time - boundary_state.time_stamp;
// Use the filter to predict
filter_->predict(boundary_state.time_stamp, delta);
state_at_req_time.time_stamp = requested_time;
state_at_req_time.state = filter_->getState();
state_at_req_time.covariance = filter_->getEstimateErrorCovariance();
return;
}
void RobotLocalizationEstimator::interpolate(const EstimatorState& given_state_1,
const EstimatorState& given_state_2,
const double requested_time,
EstimatorState& state_at_req_time) const
{
/*
* TODO: Right now, we only extrapolate from the last known state before the requested time. But as the state after
* the requested time is also known, we may want to perform interpolation between states.
*/
extrapolate(given_state_1, requested_time, state_at_req_time);
return;
}
} // namespace RobotLocalization

View File

@@ -0,0 +1,98 @@
/*
* Copyright (c) 2016, TNO IVS Helmond.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_robot_localization_listener.h"
#include "robot_localization/GetState.h"
#include <string>
namespace RobotLocalization
{
class RobotLocalizationListenerNode
{
public:
RobotLocalizationListenerNode()
{
service_ = n_.advertiseService("get_state", &RobotLocalizationListenerNode::getStateCallback, this);
}
std::string getService()
{
return service_.getService();
}
private:
RosRobotLocalizationListener rll_;
ros::NodeHandle n_;
ros::ServiceServer service_;
bool getStateCallback(robot_localization::GetState::Request &req,
robot_localization::GetState::Response &res)
{
Eigen::VectorXd state(STATE_SIZE);
Eigen::MatrixXd covariance(STATE_SIZE, STATE_SIZE);
if ( !rll_.getState(req.time_stamp, req.frame_id, state, covariance) )
{
ROS_ERROR("Robot Localization Listener Node: Listener instance returned false at getState call.");
return false;
}
for (size_t i = 0; i < STATE_SIZE; i++)
{
res.state[i] = (*(state.data() + i));
}
for (size_t i = 0; i < STATE_SIZE * STATE_SIZE; i++)
{
res.covariance[i] = (*(covariance.data() + i));
}
ROS_DEBUG("Robot Localization Listener Node: Listener responded with state and covariance at the requested time.");
return true;
}
};
} // namespace RobotLocalization
int main(int argc, char **argv)
{
ros::init(argc, argv, "robot_localization_listener_node");
RobotLocalization::RobotLocalizationListenerNode rlln;
ROS_INFO_STREAM("Robot Localization Listener Node: Ready to handle GetState requests at " << rlln.getService());
ros::spin();
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,195 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_filter_utilities.h"
#include "robot_localization/filter_common.h"
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#include <ros/console.h>
#include <string>
#include <vector>
std::ostream& operator<<(std::ostream& os, const tf2::Vector3 &vec)
{
os << "(" << std::setprecision(20) << vec.getX() << " " << vec.getY() << " " << vec.getZ() << ")\n";
return os;
}
std::ostream& operator<<(std::ostream& os, const tf2::Quaternion &quat)
{
double roll, pitch, yaw;
tf2::Matrix3x3 orTmp(quat);
orTmp.getRPY(roll, pitch, yaw);
os << "(" << std::setprecision(20) << roll << ", " << pitch << ", " << yaw << ")\n";
return os;
}
std::ostream& operator<<(std::ostream& os, const tf2::Transform &trans)
{
os << "Origin: " << trans.getOrigin() <<
"Rotation (RPY): " << trans.getRotation();
return os;
}
std::ostream& operator<<(std::ostream& os, const std::vector<double> &vec)
{
os << "(" << std::setprecision(20);
for (size_t i = 0; i < vec.size(); ++i)
{
os << vec[i] << " ";
}
os << ")\n";
return os;
}
namespace RobotLocalization
{
namespace RosFilterUtilities
{
double getYaw(const tf2::Quaternion quat)
{
tf2::Matrix3x3 mat(quat);
double dummy;
double yaw;
mat.getRPY(dummy, dummy, yaw);
return yaw;
}
bool lookupTransformSafe(const tf2_ros::Buffer &buffer,
const std::string &targetFrame,
const std::string &sourceFrame,
const ros::Time &time,
const ros::Duration &timeout,
tf2::Transform &targetFrameTrans,
const bool silent)
{
bool retVal = true;
// First try to transform the data at the requested time
try
{
tf2::fromMsg(buffer.lookupTransform(targetFrame, sourceFrame, time, timeout).transform,
targetFrameTrans);
}
catch (tf2::TransformException &ex)
{
// The issue might be that the transforms that are available are not close
// enough temporally to be used. In that case, just use the latest available
// transform and warn the user.
try
{
tf2::fromMsg(buffer.lookupTransform(targetFrame, sourceFrame, ros::Time(0)).transform,
targetFrameTrans);
if (!silent)
{
ROS_WARN_STREAM_THROTTLE(2.0, "Transform from " << sourceFrame << " to " << targetFrame <<
" was unavailable for the time requested. Using latest instead.\n");
}
}
catch(tf2::TransformException &ex)
{
if (!silent)
{
ROS_WARN_STREAM_THROTTLE(2.0, "Could not obtain transform from " << sourceFrame <<
" to " << targetFrame << ". Error was " << ex.what() << "\n");
}
retVal = false;
}
}
// Transforming from a frame id to itself can fail when the tf tree isn't
// being broadcast (e.g., for some bag files). This is the only failure that
// would throw an exception, so check for this situation before giving up.
if (!retVal)
{
if (targetFrame == sourceFrame)
{
targetFrameTrans.setIdentity();
retVal = true;
}
}
return retVal;
}
bool lookupTransformSafe(const tf2_ros::Buffer &buffer,
const std::string &targetFrame,
const std::string &sourceFrame,
const ros::Time &time,
tf2::Transform &targetFrameTrans,
const bool silent)
{
return lookupTransformSafe(buffer, targetFrame, sourceFrame, time, ros::Duration(0), targetFrameTrans, silent);
}
void quatToRPY(const tf2::Quaternion &quat, double &roll, double &pitch, double &yaw)
{
tf2::Matrix3x3 orTmp(quat);
orTmp.getRPY(roll, pitch, yaw);
}
void stateToTF(const Eigen::VectorXd &state, tf2::Transform &stateTF)
{
stateTF.setOrigin(tf2::Vector3(state(StateMemberX),
state(StateMemberY),
state(StateMemberZ)));
tf2::Quaternion quat;
quat.setRPY(state(StateMemberRoll),
state(StateMemberPitch),
state(StateMemberYaw));
stateTF.setRotation(quat);
}
void TFtoState(const tf2::Transform &stateTF, Eigen::VectorXd &state)
{
state(StateMemberX) = stateTF.getOrigin().getX();
state(StateMemberY) = stateTF.getOrigin().getY();
state(StateMemberZ) = stateTF.getOrigin().getZ();
quatToRPY(stateTF.getRotation(), state(StateMemberRoll), state(StateMemberPitch), state(StateMemberYaw));
}
} // namespace RosFilterUtilities
} // namespace RobotLocalization

View File

@@ -0,0 +1,539 @@
/*
* Copyright (c) 2016, TNO IVS Helmond.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_robot_localization_listener.h"
#include "robot_localization/ros_filter_utilities.h"
#include <map>
#include <string>
#include <vector>
#include <Eigen/Dense>
#include <tf2/LinearMath/Matrix3x3.h>
#include <tf2/LinearMath/Quaternion.h>
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#include <yaml-cpp/yaml.h>
#include <eigen_conversions/eigen_msg.h>
#include <XmlRpcException.h>
namespace RobotLocalization
{
FilterType filterTypeFromString(const std::string& filter_type_str)
{
if ( filter_type_str == "ekf" )
{
return FilterTypes::EKF;
}
else if ( filter_type_str == "ukf" )
{
return FilterTypes::UKF;
}
else
{
return FilterTypes::NotDefined;
}
}
RosRobotLocalizationListener::RosRobotLocalizationListener():
nh_p_("robot_localization"),
odom_sub_(nh_, "odometry/filtered", 1),
accel_sub_(nh_, "accel/filtered", 1),
sync_(odom_sub_, accel_sub_, 10),
base_frame_id_(""),
world_frame_id_(""),
tf_listener_(tf_buffer_)
{
int buffer_size;
nh_p_.param("buffer_size", buffer_size, 10);
std::string param_ns;
nh_p_.param("parameter_namespace", param_ns, nh_p_.getNamespace());
ros::NodeHandle nh_param(param_ns);
std::string filter_type_str;
nh_param.param("filter_type", filter_type_str, std::string("ekf"));
FilterType filter_type = filterTypeFromString(filter_type_str);
if ( filter_type == FilterTypes::NotDefined )
{
ROS_ERROR("RosRobotLocalizationListener: Parameter filter_type invalid");
return;
}
// Load up the process noise covariance (from the launch file/parameter server)
// TODO(reinzor): this code is copied from ros_filter. In a refactor, this could be moved to a function in
// ros_filter_utilities
Eigen::MatrixXd process_noise_covariance(STATE_SIZE, STATE_SIZE);
process_noise_covariance.setZero();
XmlRpc::XmlRpcValue process_noise_covar_config;
if (!nh_param.hasParam("process_noise_covariance"))
{
ROS_FATAL_STREAM("Process noise covariance not found in the robot localization listener config (namespace " <<
nh_param.getNamespace() << ")! Use the ~parameter_namespace to specify the parameter namespace.");
}
else
{
try
{
nh_param.getParam("process_noise_covariance", process_noise_covar_config);
ROS_ASSERT(process_noise_covar_config.getType() == XmlRpc::XmlRpcValue::TypeArray);
int mat_size = process_noise_covariance.rows();
for (int i = 0; i < mat_size; i++)
{
for (int j = 0; j < mat_size; j++)
{
try
{
// These matrices can cause problems if all the types
// aren't specified with decimal points. Handle that
// using string streams.
std::ostringstream ostr;
process_noise_covar_config[mat_size * i + j].write(ostr);
std::istringstream istr(ostr.str());
istr >> process_noise_covariance(i, j);
}
catch(XmlRpc::XmlRpcException &e)
{
throw e;
}
catch(...)
{
throw;
}
}
}
ROS_DEBUG_STREAM("Process noise covariance is:\n" << process_noise_covariance << "\n");
}
catch (XmlRpc::XmlRpcException &e)
{
ROS_ERROR_STREAM("ERROR reading robot localization listener config: " <<
e.getMessage() <<
" for process_noise_covariance (type: " <<
process_noise_covar_config.getType() << ")");
}
}
std::vector<double> filter_args;
nh_param.param("filter_args", filter_args, std::vector<double>());
estimator_ = new RobotLocalizationEstimator(buffer_size, filter_type, process_noise_covariance, filter_args);
sync_.registerCallback(&RosRobotLocalizationListener::odomAndAccelCallback, this);
ROS_INFO_STREAM("Ros Robot Localization Listener: Listening to topics " <<
odom_sub_.getTopic() << " and " << accel_sub_.getTopic());
// Wait until the base and world frames are set by the incoming messages
while (ros::ok() && base_frame_id_.empty())
{
ros::spinOnce();
ROS_INFO_STREAM_THROTTLE(1.0, "Ros Robot Localization Listener: Waiting for incoming messages on topics " <<
odom_sub_.getTopic() << " and " << accel_sub_.getTopic());
ros::Duration(0.1).sleep();
}
}
RosRobotLocalizationListener::~RosRobotLocalizationListener()
{
delete estimator_;
}
void RosRobotLocalizationListener::odomAndAccelCallback(const nav_msgs::Odometry& odom,
const geometry_msgs::AccelWithCovarianceStamped& accel)
{
// Instantiate a state that can be added to the robot localization estimator
EstimatorState state;
// Set its time stamp and the state received from the messages
state.time_stamp = odom.header.stamp.toSec();
// Get the base frame id from the odom message
if ( base_frame_id_.empty() )
base_frame_id_ = odom.child_frame_id;
// Get the world frame id from the odom message
if ( world_frame_id_.empty() )
world_frame_id_ = odom.header.frame_id;
// Pose: Position
state.state(StateMemberX) = odom.pose.pose.position.x;
state.state(StateMemberY) = odom.pose.pose.position.y;
state.state(StateMemberZ) = odom.pose.pose.position.z;
// Pose: Orientation
tf2::Quaternion orientation_quat;
tf2::fromMsg(odom.pose.pose.orientation, orientation_quat);
double roll, pitch, yaw;
RosFilterUtilities::quatToRPY(orientation_quat, roll, pitch, yaw);
state.state(StateMemberRoll) = roll;
state.state(StateMemberPitch) = pitch;
state.state(StateMemberYaw) = yaw;
// Pose: Covariance
for ( unsigned int i = 0; i < POSE_SIZE; i++ )
{
for ( unsigned int j = 0; j < POSE_SIZE; j++ )
{
state.covariance(POSITION_OFFSET + i, POSITION_OFFSET + j) = odom.pose.covariance[i*POSE_SIZE + j];
}
}
// Velocity: Linear
state.state(StateMemberVx) = odom.twist.twist.linear.x;
state.state(StateMemberVy) = odom.twist.twist.linear.y;
state.state(StateMemberVz) = odom.twist.twist.linear.z;
// Velocity: Angular
state.state(StateMemberVroll) = odom.twist.twist.angular.x;
state.state(StateMemberVpitch) = odom.twist.twist.angular.y;
state.state(StateMemberVyaw) = odom.twist.twist.angular.z;
// Velocity: Covariance
for ( unsigned int i = 0; i < TWIST_SIZE; i++ )
{
for ( unsigned int j = 0; j < TWIST_SIZE; j++ )
{
state.covariance(POSITION_V_OFFSET + i, POSITION_V_OFFSET + j) = odom.twist.covariance[i*TWIST_SIZE + j];
}
}
// Acceleration: Linear
state.state(StateMemberAx) = accel.accel.accel.linear.x;
state.state(StateMemberAy) = accel.accel.accel.linear.y;
state.state(StateMemberAz) = accel.accel.accel.linear.z;
// Acceleration: Angular is not available in state
// Acceleration: Covariance
for ( unsigned int i = 0; i < ACCELERATION_SIZE; i++ )
{
for ( unsigned int j = 0; j < ACCELERATION_SIZE; j++ )
{
state.covariance(POSITION_A_OFFSET + i, POSITION_A_OFFSET + j) = accel.accel.covariance[i*TWIST_SIZE + j];
}
}
// Add the state to the buffer, so that we can later interpolate between this and earlier states
estimator_->setState(state);
return;
}
bool findAncestorRecursiveYAML(YAML::Node& tree, const std::string& source_frame, const std::string& target_frame)
{
if ( source_frame == target_frame )
{
return true;
}
std::string parent_frame = tree[source_frame]["parent"].Scalar();
if ( parent_frame.empty() )
{
return false;
}
return findAncestorRecursiveYAML(tree, parent_frame, target_frame);
}
// Cache, assumption that the tree parent child order does not change over time
static std::map<std::string, std::vector<std::string> > ancestor_map;
static std::map<std::string, std::vector<std::string> > descendant_map;
bool findAncestor(const tf2_ros::Buffer& buffer, const std::string& source_frame, const std::string& target_frame)
{
// Check cache
const std::vector<std::string>& ancestors = ancestor_map[source_frame];
if (std::find(ancestors.begin(), ancestors.end(), target_frame) != ancestors.end())
{
return true;
}
const std::vector<std::string>& descendants = descendant_map[source_frame];
if (std::find(descendants.begin(), descendants.end(), target_frame) != descendants.end())
{
return false;
}
std::stringstream frames_stream(buffer.allFramesAsYAML());
YAML::Node frames_yaml = YAML::Load(frames_stream);
bool target_frame_is_ancestor = findAncestorRecursiveYAML(frames_yaml, source_frame, target_frame);
bool target_frame_is_descendant = findAncestorRecursiveYAML(frames_yaml, target_frame, source_frame);
// Caching
if (target_frame_is_ancestor)
{
ancestor_map[source_frame].push_back(target_frame);
}
if (target_frame_is_descendant)
{
descendant_map[source_frame].push_back(target_frame);
}
return target_frame_is_ancestor;
}
bool RosRobotLocalizationListener::getState(const double time,
const std::string& frame_id,
Eigen::VectorXd& state,
Eigen::MatrixXd& covariance,
std::string world_frame_id) const
{
EstimatorState estimator_state;
state.resize(STATE_SIZE);
state.setZero();
covariance.resize(STATE_SIZE, STATE_SIZE);
covariance.setZero();
if ( base_frame_id_.empty() || world_frame_id_.empty() )
{
if ( estimator_->getSize() == 0 )
{
ROS_WARN("Ros Robot Localization Listener: The base or world frame id is not set. "
"No odom/accel messages have come in.");
}
else
{
ROS_ERROR("Ros Robot Localization Listener: The base or world frame id is not set. "
"Are the child_frame_id and the header.frame_id in the odom messages set?");
}
return false;
}
if ( estimator_->getState(time, estimator_state) == EstimatorResults::ExtrapolationIntoPast )
{
ROS_WARN("Ros Robot Localization Listener: A state is requested at a time stamp older than the oldest in the "
"estimator buffer. The result is an extrapolation into the past. Maybe you should increase the buffer "
"size?");
}
// If no world_frame_id is specified, we will default to the world frame_id of the received odometry message
if (world_frame_id.empty())
{
world_frame_id = world_frame_id_;
}
if ( frame_id == base_frame_id_ && world_frame_id == world_frame_id_ )
{
// If the state of the base frame is requested and the world frame equals the world frame of the robot_localization
// estimator, we can simply return the state we got from the state estimator
state = estimator_state.state;
covariance = estimator_state.covariance;
return true;
}
// - - - - - - - - - - - - - - - - - -
// Get the transformation between the requested world frame and the world_frame of the estimator
// - - - - - - - - - - - - - - - - - -
Eigen::Affine3d world_pose_requested_frame;
// If the requested frame is the same as the tracker, set to identity
if (world_frame_id == world_frame_id_)
{
world_pose_requested_frame.setIdentity();
}
else
{
geometry_msgs::TransformStamped world_requested_to_world_transform;
try
{
// TODO(reinzor): magic number
world_requested_to_world_transform = tf_buffer_.lookupTransform(world_frame_id,
world_frame_id_,
ros::Time(time),
ros::Duration(0.1));
if ( findAncestor(tf_buffer_, world_frame_id, base_frame_id_) )
{
ROS_ERROR_STREAM("You are trying to get the state with respect to world frame " << world_frame_id <<
", but this frame is a child of robot base frame " << base_frame_id_ <<
", so this doesn't make sense.");
return false;
}
}
catch ( const tf2::TransformException &e )
{
ROS_WARN_STREAM("Ros Robot Localization Listener: Could not look up transform: " << e.what());
return false;
}
// Convert to pose
tf::transformMsgToEigen(world_requested_to_world_transform.transform, world_pose_requested_frame);
}
// - - - - - - - - - - - - - - - - - -
// Calculate the state of the requested frame from the state of the base frame.
// - - - - - - - - - - - - - - - - - -
// First get the transform from base to target
geometry_msgs::TransformStamped base_to_target_transform;
try
{
base_to_target_transform = tf_buffer_.lookupTransform(base_frame_id_,
frame_id,
ros::Time(time),
ros::Duration(0.1)); // TODO(reinzor): magic number
// Check that frame_id is a child of the base frame. If it is not, it does not make sense to request its state.
// Do this after tf lookup, so we know that there is a connection.
if ( !findAncestor(tf_buffer_, frame_id, base_frame_id_) )
{
ROS_ERROR_STREAM("You are trying to get the state of " << frame_id << ", but this frame is not a child of the "
"base frame: " << base_frame_id_ << ".");
return false;
}
}
catch ( const tf2::TransformException &e )
{
ROS_WARN_STREAM("Ros Robot Localization Listener: Could not look up transform: " << e.what());
return false;
}
// And convert it to an eigen Affine transformation
Eigen::Affine3d target_pose_base;
tf::transformMsgToEigen(base_to_target_transform.transform, target_pose_base);
// Then convert the base pose to an Eigen Affine transformation
Eigen::Vector3d base_position(estimator_state.state(StateMemberX),
estimator_state.state(StateMemberY),
estimator_state.state(StateMemberZ));
Eigen::AngleAxisd roll_angle(estimator_state.state(StateMemberRoll), Eigen::Vector3d::UnitX());
Eigen::AngleAxisd pitch_angle(estimator_state.state(StateMemberPitch), Eigen::Vector3d::UnitY());
Eigen::AngleAxisd yaw_angle(estimator_state.state(StateMemberYaw), Eigen::Vector3d::UnitZ());
Eigen::Quaterniond base_orientation(yaw_angle * pitch_angle * roll_angle);
Eigen::Affine3d base_pose(Eigen::Translation3d(base_position) * base_orientation);
// Now we can calculate the transform from odom to the requested frame (target)...
Eigen::Affine3d target_pose_odom = world_pose_requested_frame * base_pose * target_pose_base;
// ... and put it in the output state
state(StateMemberX) = target_pose_odom.translation().x();
state(StateMemberY) = target_pose_odom.translation().y();
state(StateMemberZ) = target_pose_odom.translation().z();
Eigen::Vector3d ypr = target_pose_odom.rotation().eulerAngles(2, 1, 0);
state(StateMemberRoll) = ypr[2];
state(StateMemberPitch) = ypr[1];
state(StateMemberYaw) = ypr[0];
// Now let's calculate the twist of the target frame
// First get the base's twist
Twist base_velocity;
Twist target_velocity_base;
base_velocity.linear = Eigen::Vector3d(estimator_state.state(StateMemberVx),
estimator_state.state(StateMemberVy),
estimator_state.state(StateMemberVz));
base_velocity.angular = Eigen::Vector3d(estimator_state.state(StateMemberVroll),
estimator_state.state(StateMemberVpitch),
estimator_state.state(StateMemberVyaw));
// Then calculate the target frame's twist as a result of the base's twist.
/*
* We first calculate the coordinates of the velocity vectors (linear and angular) in the base frame. We have to keep
* in mind that a rotation of the base frame, together with the translational offset of the target frame from the base
* frame, induces a translational velocity of the target frame.
*/
target_velocity_base.linear = base_velocity.linear + base_velocity.angular.cross(target_pose_base.translation());
target_velocity_base.angular = base_velocity.angular;
// Now we can transform that to the target frame
Twist target_velocity;
target_velocity.linear = target_pose_base.rotation().transpose() * target_velocity_base.linear;
target_velocity.angular = target_pose_base.rotation().transpose() * target_velocity_base.angular;
state(StateMemberVx) = target_velocity.linear(0);
state(StateMemberVy) = target_velocity.linear(1);
state(StateMemberVz) = target_velocity.linear(2);
state(StateMemberVroll) = target_velocity.angular(0);
state(StateMemberVpitch) = target_velocity.angular(1);
state(StateMemberVyaw) = target_velocity.angular(2);
// Rotate the covariance as well
Eigen::MatrixXd rot_6d(POSE_SIZE, POSE_SIZE);
rot_6d.setIdentity();
rot_6d.block<POSITION_SIZE, POSITION_SIZE>(POSITION_OFFSET, POSITION_OFFSET) = target_pose_base.rotation();
rot_6d.block<ORIENTATION_SIZE, ORIENTATION_SIZE>(ORIENTATION_OFFSET, ORIENTATION_OFFSET) =
target_pose_base.rotation();
// Rotate the covariance
covariance.block<POSE_SIZE, POSE_SIZE>(POSITION_OFFSET, POSITION_OFFSET) =
rot_6d * estimator_state.covariance.block<POSE_SIZE, POSE_SIZE>(POSITION_OFFSET, POSITION_OFFSET) *
rot_6d.transpose();
return true;
}
bool RosRobotLocalizationListener::getState(const ros::Time& ros_time, const std::string& frame_id,
Eigen::VectorXd& state, Eigen::MatrixXd& covariance,
const std::string& world_frame_id) const
{
double time;
if ( ros_time.isZero() )
{
ROS_INFO("Ros Robot Localization Listener: State requested at time = zero, returning state at current time");
time = ros::Time::now().toSec();
}
else
{
time = ros_time.toSec();
}
return getState(time, frame_id, state, covariance, world_frame_id);
}
const std::string& RosRobotLocalizationListener::getBaseFrameId() const
{
return base_frame_id_;
}
const std::string& RosRobotLocalizationListener::getWorldFrameId() const
{
return world_frame_id_;
}
} // namespace RobotLocalization

View File

@@ -0,0 +1,458 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ukf.h"
#include "robot_localization/filter_common.h"
#include <angles/angles.h>
#include <assert.h>
#include <Eigen/Cholesky>
#include <vector>
namespace RobotLocalization
{
Ukf::Ukf(std::vector<double> args) :
FilterBase(), // Must initialize filter base!
uncorrected_(true)
{
assert(args.size() == 3);
double alpha = args[0];
double kappa = args[1];
double beta = args[2];
size_t sigmaCount = (STATE_SIZE << 1) + 1;
sigmaPoints_.resize(sigmaCount, Eigen::VectorXd(STATE_SIZE));
// Prepare constants
lambda_ = alpha * alpha * (STATE_SIZE + kappa) - STATE_SIZE;
stateWeights_.resize(sigmaCount);
covarWeights_.resize(sigmaCount);
stateWeights_[0] = lambda_ / (STATE_SIZE + lambda_);
covarWeights_[0] = stateWeights_[0] + (1 - (alpha * alpha) + beta);
sigmaPoints_[0].setZero();
for (size_t i = 1; i < sigmaCount; ++i)
{
sigmaPoints_[i].setZero();
stateWeights_[i] = 1 / (2 * (STATE_SIZE + lambda_));
covarWeights_[i] = stateWeights_[i];
}
}
Ukf::~Ukf()
{
}
void Ukf::correct(const Measurement &measurement)
{
FB_DEBUG("---------------------- Ukf::correct ----------------------\n" <<
"State is:\n" << state_ <<
"\nMeasurement is:\n" << measurement.measurement_ <<
"\nMeasurement covariance is:\n" << measurement.covariance_ << "\n");
// In our implementation, it may be that after we call predict once, we call correct
// several times in succession (multiple measurements with different time stamps). In
// that event, the sigma points need to be updated to reflect the current state.
// Throughout prediction and correction, we attempt to maximize efficiency in Eigen.
if (!uncorrected_)
{
generateSigmaPoints();
}
// We don't want to update everything, so we need to build matrices that only update
// the measured parts of our state vector
// First, determine how many state vector values we're updating
std::vector<size_t> updateIndices;
for (size_t i = 0; i < measurement.updateVector_.size(); ++i)
{
if (measurement.updateVector_[i])
{
// Handle nan and inf values in measurements
if (std::isnan(measurement.measurement_(i)))
{
FB_DEBUG("Value at index " << i << " was nan. Excluding from update.\n");
}
else if (std::isinf(measurement.measurement_(i)))
{
FB_DEBUG("Value at index " << i << " was inf. Excluding from update.\n");
}
else
{
updateIndices.push_back(i);
}
}
}
FB_DEBUG("Update indices are:\n" << updateIndices << "\n");
size_t updateSize = updateIndices.size();
// Now set up the relevant matrices
Eigen::VectorXd stateSubset(updateSize); // x (in most literature)
Eigen::VectorXd measurementSubset(updateSize); // z
Eigen::MatrixXd measurementCovarianceSubset(updateSize, updateSize); // R
Eigen::MatrixXd stateToMeasurementSubset(updateSize, STATE_SIZE); // H
Eigen::MatrixXd kalmanGainSubset(STATE_SIZE, updateSize); // K
Eigen::VectorXd innovationSubset(updateSize); // z - Hx
Eigen::VectorXd predictedMeasurement(updateSize);
Eigen::VectorXd sigmaDiff(updateSize);
Eigen::MatrixXd predictedMeasCovar(updateSize, updateSize);
Eigen::MatrixXd crossCovar(STATE_SIZE, updateSize);
std::vector<Eigen::VectorXd> sigmaPointMeasurements(sigmaPoints_.size(), Eigen::VectorXd(updateSize));
stateSubset.setZero();
measurementSubset.setZero();
measurementCovarianceSubset.setZero();
stateToMeasurementSubset.setZero();
kalmanGainSubset.setZero();
innovationSubset.setZero();
predictedMeasurement.setZero();
predictedMeasCovar.setZero();
crossCovar.setZero();
// Now build the sub-matrices from the full-sized matrices
for (size_t i = 0; i < updateSize; ++i)
{
measurementSubset(i) = measurement.measurement_(updateIndices[i]);
stateSubset(i) = state_(updateIndices[i]);
for (size_t j = 0; j < updateSize; ++j)
{
measurementCovarianceSubset(i, j) = measurement.covariance_(updateIndices[i], updateIndices[j]);
}
// Handle negative (read: bad) covariances in the measurement. Rather
// than exclude the measurement or make up a covariance, just take
// the absolute value.
if (measurementCovarianceSubset(i, i) < 0)
{
FB_DEBUG("WARNING: Negative covariance for index " << i <<
" of measurement (value is" << measurementCovarianceSubset(i, i) <<
"). Using absolute value...\n");
measurementCovarianceSubset(i, i) = ::fabs(measurementCovarianceSubset(i, i));
}
// If the measurement variance for a given variable is very
// near 0 (as in e-50 or so) and the variance for that
// variable in the covariance matrix is also near zero, then
// the Kalman gain computation will blow up. Really, no
// measurement can be completely without error, so add a small
// amount in that case.
if (measurementCovarianceSubset(i, i) < 1e-9)
{
measurementCovarianceSubset(i, i) = 1e-9;
FB_DEBUG("WARNING: measurement had very small error covariance for index " <<
updateIndices[i] <<
". Adding some noise to maintain filter stability.\n");
}
}
// The state-to-measurement function, h, will now be a measurement_size x full_state_size
// matrix, with ones in the (i, i) locations of the values to be updated
for (size_t i = 0; i < updateSize; ++i)
{
stateToMeasurementSubset(i, updateIndices[i]) = 1;
}
FB_DEBUG("Current state subset is:\n" << stateSubset <<
"\nMeasurement subset is:\n" << measurementSubset <<
"\nMeasurement covariance subset is:\n" << measurementCovarianceSubset <<
"\nState-to-measurement subset is:\n" << stateToMeasurementSubset << "\n");
double roll_sum_x {};
double roll_sum_y {};
double pitch_sum_x {};
double pitch_sum_y {};
double yaw_sum_x {};
double yaw_sum_y {};
// (1) Generate sigma points, use them to generate a predicted measurement
for (size_t sigmaInd = 0; sigmaInd < sigmaPoints_.size(); ++sigmaInd)
{
sigmaPointMeasurements[sigmaInd] = stateToMeasurementSubset * sigmaPoints_[sigmaInd];
predictedMeasurement.noalias() += stateWeights_[sigmaInd] * sigmaPointMeasurements[sigmaInd];
// Euler angle averaging requires special care
for (size_t i = 0; i < updateSize; ++i)
{
if (updateIndices[i] == StateMemberRoll)
{
roll_sum_x += stateWeights_[sigmaInd] * ::cos(sigmaPointMeasurements[sigmaInd](i));
roll_sum_y += stateWeights_[sigmaInd] * ::sin(sigmaPointMeasurements[sigmaInd](i));
}
else if (updateIndices[i] == StateMemberPitch)
{
pitch_sum_x += stateWeights_[sigmaInd] * ::cos(sigmaPointMeasurements[sigmaInd](i));
pitch_sum_y += stateWeights_[sigmaInd] * ::sin(sigmaPointMeasurements[sigmaInd](i));
}
else if (updateIndices[i] == StateMemberYaw)
{
yaw_sum_x += stateWeights_[sigmaInd] * ::cos(sigmaPointMeasurements[sigmaInd](i));
yaw_sum_y += stateWeights_[sigmaInd] * ::sin(sigmaPointMeasurements[sigmaInd](i));
}
}
}
// Wrap angles in the innovation
for (size_t i = 0; i < updateSize; ++i)
{
if (updateIndices[i] == StateMemberRoll)
{
predictedMeasurement(i) = ::atan2(roll_sum_y, roll_sum_x);
}
else if (updateIndices[i] == StateMemberPitch)
{
predictedMeasurement(i) = ::atan2(pitch_sum_y, pitch_sum_x);
}
else if (updateIndices[i] == StateMemberYaw)
{
predictedMeasurement(i) = ::atan2(yaw_sum_y, yaw_sum_x);
}
}
// (2) Use the sigma point measurements and predicted measurement to compute a predicted
// measurement covariance matrix P_zz and a state/measurement cross-covariance matrix P_xz.
for (size_t sigmaInd = 0; sigmaInd < sigmaPoints_.size(); ++sigmaInd)
{
sigmaDiff = sigmaPointMeasurements[sigmaInd] - predictedMeasurement;
Eigen::VectorXd sigmaStateDiff = sigmaPoints_[sigmaInd] - state_;
for (size_t i = 0; i < updateSize; ++i)
{
if (updateIndices[i] == StateMemberRoll ||
updateIndices[i] == StateMemberPitch ||
updateIndices[i] == StateMemberYaw)
{
sigmaDiff(i) = angles::normalize_angle(sigmaDiff(i));
sigmaStateDiff(i) = angles::normalize_angle(sigmaStateDiff(i));
}
}
predictedMeasCovar.noalias() += covarWeights_[sigmaInd] * (sigmaDiff * sigmaDiff.transpose());
crossCovar.noalias() += covarWeights_[sigmaInd] * (sigmaStateDiff * sigmaDiff.transpose());
}
// (3) Compute the Kalman gain, making sure to use the actual measurement covariance: K = P_xz * (P_zz + R)^-1
Eigen::MatrixXd invInnovCov = (predictedMeasCovar + measurementCovarianceSubset).inverse();
kalmanGainSubset = crossCovar * invInnovCov;
// (4) Apply the gain to the difference between the actual and predicted measurements: x = x + K(z - z_hat)
innovationSubset = (measurementSubset - predictedMeasurement);
// Wrap angles in the innovation
for (size_t i = 0; i < updateSize; ++i)
{
if (updateIndices[i] == StateMemberRoll ||
updateIndices[i] == StateMemberPitch ||
updateIndices[i] == StateMemberYaw)
{
innovationSubset(i) = angles::normalize_angle(innovationSubset(i));
}
}
// (5) Check Mahalanobis distance of innovation
if (checkMahalanobisThreshold(innovationSubset, invInnovCov, measurement.mahalanobisThresh_))
{
state_.noalias() += kalmanGainSubset * innovationSubset;
// (6) Compute the new estimate error covariance P = P - (K * P_zz * K')
estimateErrorCovariance_.noalias() -= (kalmanGainSubset * predictedMeasCovar * kalmanGainSubset.transpose());
wrapStateAngles();
// Mark that we need to re-compute sigma points for successive corrections
uncorrected_ = false;
FB_DEBUG("Predicated measurement covariance is:\n" << predictedMeasCovar <<
"\nCross covariance is:\n" << crossCovar <<
"\nKalman gain subset is:\n" << kalmanGainSubset <<
"\nInnovation:\n" << innovationSubset <<
"\nCorrected full state is:\n" << state_ <<
"\nCorrected full estimate error covariance is:\n" << estimateErrorCovariance_ <<
"\n\n---------------------- /Ukf::correct ----------------------\n");
}
}
void Ukf::predict(const double referenceTime, const double delta)
{
FB_DEBUG("---------------------- Ukf::predict ----------------------\n" <<
"delta is " << delta <<
"\nstate is " << state_ << "\n");
prepareControl(referenceTime, delta);
generateSigmaPoints();
double roll_sum_x {};
double roll_sum_y {};
double pitch_sum_x {};
double pitch_sum_y {};
double yaw_sum_x {};
double yaw_sum_y {};
// Sum the weighted sigma points to generate a new state prediction
state_.setZero();
for (size_t sigmaInd = 0; sigmaInd < sigmaPoints_.size(); ++sigmaInd)
{
// Apply the state transition function to this sigma point
projectSigmaPoint(sigmaPoints_[sigmaInd], delta);
state_.noalias() += stateWeights_[sigmaInd] * sigmaPoints_[sigmaInd];
// Euler angle averaging requires special care
roll_sum_x += stateWeights_[sigmaInd] * ::cos(sigmaPoints_[sigmaInd](StateMemberRoll));
roll_sum_y += stateWeights_[sigmaInd] * ::sin(sigmaPoints_[sigmaInd](StateMemberRoll));
pitch_sum_x += stateWeights_[sigmaInd] * ::cos(sigmaPoints_[sigmaInd](StateMemberPitch));
pitch_sum_y += stateWeights_[sigmaInd] * ::sin(sigmaPoints_[sigmaInd](StateMemberPitch));
yaw_sum_x += stateWeights_[sigmaInd] * ::cos(sigmaPoints_[sigmaInd](StateMemberYaw));
yaw_sum_y += stateWeights_[sigmaInd] * ::sin(sigmaPoints_[sigmaInd](StateMemberYaw));
}
// Recover average Euler angles
state_(StateMemberRoll) = ::atan2(roll_sum_y, roll_sum_x);
state_(StateMemberPitch) = ::atan2(pitch_sum_y, pitch_sum_x);
state_(StateMemberYaw) = ::atan2(yaw_sum_y, yaw_sum_x);
// Now use the sigma points and the predicted state to compute a predicted covariance
estimateErrorCovariance_.setZero();
Eigen::VectorXd sigmaDiff(STATE_SIZE);
for (size_t sigmaInd = 0; sigmaInd < sigmaPoints_.size(); ++sigmaInd)
{
sigmaDiff = (sigmaPoints_[sigmaInd] - state_);
sigmaDiff(StateMemberRoll) = angles::normalize_angle(sigmaDiff(StateMemberRoll));
sigmaDiff(StateMemberPitch) = angles::normalize_angle(sigmaDiff(StateMemberPitch));
sigmaDiff(StateMemberYaw) = angles::normalize_angle(sigmaDiff(StateMemberYaw));
estimateErrorCovariance_.noalias() += covarWeights_[sigmaInd] * (sigmaDiff * sigmaDiff.transpose());
}
// Not strictly in the theoretical UKF formulation, but necessary here
// to ensure that we actually incorporate the processNoiseCovariance_
Eigen::MatrixXd *processNoiseCovariance = &processNoiseCovariance_;
if (useDynamicProcessNoiseCovariance_)
{
computeDynamicProcessNoiseCovariance(state_, delta);
processNoiseCovariance = &dynamicProcessNoiseCovariance_;
}
estimateErrorCovariance_.noalias() += delta * (*processNoiseCovariance);
// Keep the angles bounded
wrapStateAngles();
// Mark that we can keep these sigma points
uncorrected_ = true;
FB_DEBUG("Predicted state is:\n" << state_ <<
"\nPredicted estimate error covariance is:\n" << estimateErrorCovariance_ <<
"\n\n--------------------- /Ukf::predict ----------------------\n");
}
void Ukf::generateSigmaPoints()
{
// Take the square root of a small fraction of the estimateErrorCovariance_ using LL' decomposition
weightedCovarSqrt_ = ((static_cast<double>(STATE_SIZE) + lambda_) * estimateErrorCovariance_).llt().matrixL();
// Compute sigma points
// First sigma point is the current state
sigmaPoints_[0] = state_;
// Next STATE_SIZE sigma points are state + weightedCovarSqrt_[ith column]
// STATE_SIZE sigma points after that are state - weightedCovarSqrt_[ith column]
for (size_t sigmaInd = 0; sigmaInd < STATE_SIZE; ++sigmaInd)
{
sigmaPoints_[sigmaInd + 1] = state_ + weightedCovarSqrt_.col(sigmaInd);
sigmaPoints_[sigmaInd + 1 + STATE_SIZE] = state_ - weightedCovarSqrt_.col(sigmaInd);
}
}
void Ukf::projectSigmaPoint(Eigen::VectorXd& sigmaPoint, double delta)
{
double roll = sigmaPoint(StateMemberRoll);
double pitch = sigmaPoint(StateMemberPitch);
double yaw = sigmaPoint(StateMemberYaw);
// We'll need these trig calculations a lot.
double sp = ::sin(pitch);
double cp = ::cos(pitch);
double cpi = 1.0 / cp;
double tp = sp * cpi;
double sr = ::sin(roll);
double cr = ::cos(roll);
double sy = ::sin(yaw);
double cy = ::cos(yaw);
// Prepare the transfer function
transferFunction_(StateMemberX, StateMemberVx) = cy * cp * delta;
transferFunction_(StateMemberX, StateMemberVy) = (cy * sp * sr - sy * cr) * delta;
transferFunction_(StateMemberX, StateMemberVz) = (cy * sp * cr + sy * sr) * delta;
transferFunction_(StateMemberX, StateMemberAx) = 0.5 * transferFunction_(StateMemberX, StateMemberVx) * delta;
transferFunction_(StateMemberX, StateMemberAy) = 0.5 * transferFunction_(StateMemberX, StateMemberVy) * delta;
transferFunction_(StateMemberX, StateMemberAz) = 0.5 * transferFunction_(StateMemberX, StateMemberVz) * delta;
transferFunction_(StateMemberY, StateMemberVx) = sy * cp * delta;
transferFunction_(StateMemberY, StateMemberVy) = (sy * sp * sr + cy * cr) * delta;
transferFunction_(StateMemberY, StateMemberVz) = (sy * sp * cr - cy * sr) * delta;
transferFunction_(StateMemberY, StateMemberAx) = 0.5 * transferFunction_(StateMemberY, StateMemberVx) * delta;
transferFunction_(StateMemberY, StateMemberAy) = 0.5 * transferFunction_(StateMemberY, StateMemberVy) * delta;
transferFunction_(StateMemberY, StateMemberAz) = 0.5 * transferFunction_(StateMemberY, StateMemberVz) * delta;
transferFunction_(StateMemberZ, StateMemberVx) = -sp * delta;
transferFunction_(StateMemberZ, StateMemberVy) = cp * sr * delta;
transferFunction_(StateMemberZ, StateMemberVz) = cp * cr * delta;
transferFunction_(StateMemberZ, StateMemberAx) = 0.5 * transferFunction_(StateMemberZ, StateMemberVx) * delta;
transferFunction_(StateMemberZ, StateMemberAy) = 0.5 * transferFunction_(StateMemberZ, StateMemberVy) * delta;
transferFunction_(StateMemberZ, StateMemberAz) = 0.5 * transferFunction_(StateMemberZ, StateMemberVz) * delta;
transferFunction_(StateMemberRoll, StateMemberVroll) = delta;
transferFunction_(StateMemberRoll, StateMemberVpitch) = sr * tp * delta;
transferFunction_(StateMemberRoll, StateMemberVyaw) = cr * tp * delta;
transferFunction_(StateMemberPitch, StateMemberVpitch) = cr * delta;
transferFunction_(StateMemberPitch, StateMemberVyaw) = -sr * delta;
transferFunction_(StateMemberYaw, StateMemberVpitch) = sr * cpi * delta;
transferFunction_(StateMemberYaw, StateMemberVyaw) = cr * cpi * delta;
transferFunction_(StateMemberVx, StateMemberAx) = delta;
transferFunction_(StateMemberVy, StateMemberAy) = delta;
transferFunction_(StateMemberVz, StateMemberAz) = delta;
sigmaPoint.applyOnTheLeft(transferFunction_);
}
} // namespace RobotLocalization

View File

@@ -0,0 +1,57 @@
/*
* Copyright (c) 2014, 2015, 2016, Charles River Analytics, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_filter_types.h"
#include <ros/ros.h>
#include <cstdlib>
#include <vector>
int main(int argc, char **argv)
{
ros::init(argc, argv, "ukf_navigation_node");
ros::NodeHandle nh;
ros::NodeHandle nhLocal("~");
std::vector<double> args(3, 0);
nhLocal.param("alpha", args[0], 0.001);
nhLocal.param("kappa", args[1], 0.0);
nhLocal.param("beta", args[2], 2.0);
RobotLocalization::RosUkf ukf(nh, nhLocal, args);
ukf.initialize();
ros::spin();
return EXIT_SUCCESS;
}

View File

@@ -0,0 +1,71 @@
/*
* Copyright (c) 2017 Simon Gene Gottlieb
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "robot_localization/ros_filter_types.h"
#include <nodelet/nodelet.h>
#include <pluginlib/class_list_macros.h>
#include <ros/ros.h>
#include <memory>
#include <vector>
namespace RobotLocalization
{
class UkfNodelet : public nodelet::Nodelet
{
private:
std::unique_ptr<RosUkf> ukf;
public:
virtual void onInit()
{
NODELET_DEBUG("Initializing nodelet...");
ros::NodeHandle nh = getNodeHandle();
ros::NodeHandle nh_priv = getPrivateNodeHandle();
std::vector<double> args(3, 0);
nh_priv.param("alpha", args[0], 0.001);
nh_priv.param("kappa", args[1], 0.0);
nh_priv.param("beta", args[2], 2.0);
ukf = std::make_unique<RosUkf>(nh, nh_priv, getName(), args);
ukf->initialize();
}
};
} // namespace RobotLocalization
PLUGINLIB_EXPORT_CLASS(RobotLocalization::UkfNodelet, nodelet::Nodelet);