29#include <gtsam/nonlinear/GncParams.h>
31#include <boost/math/distributions/chi_squared.hpp>
38static double Chi2inv(
const double alpha,
const size_t dofs) {
39 boost::math::chi_squared_distribution<double> chi2(dofs);
40 return boost::math::quantile(chi2, alpha);
44template<
class GncParameters>
53 GncParameters params_;
60 const GncParameters& params = GncParameters())
61 : state_(initialValues),
66 for (
size_t i = 0; i < graph.size(); i++) {
70 auto robust = boost::dynamic_pointer_cast<
73 nfg_[i] = robust ? factor-> cloneWithNewNoiseModel(robust->noise()) : factor;
78 std::vector<size_t> inconsistentlySpecifiedWeights;
80 std::set_intersection(params.knownInliers.begin(),params.knownInliers.end(),
81 params.knownOutliers.begin(),params.knownOutliers.end(),
82 std::inserter(inconsistentlySpecifiedWeights, inconsistentlySpecifiedWeights.begin()));
83 if(inconsistentlySpecifiedWeights.size() > 0){
84 params.print(
"params\n");
85 throw std::runtime_error(
"GncOptimizer::constructor: the user has selected one or more measurements"
86 " to be BOTH a known inlier and a known outlier.");
89 for (
size_t i = 0; i < params.knownInliers.size(); i++){
90 if( params.knownInliers[i] > nfg_.
size()-1 ){
91 throw std::runtime_error(
"GncOptimizer::constructor: the user has selected one or more measurements"
92 "that are not in the factor graph to be known inliers.");
96 for (
size_t i = 0; i < params.knownOutliers.size(); i++){
97 if( params.knownOutliers[i] > nfg_.
size()-1 ){
98 throw std::runtime_error(
"GncOptimizer::constructor: the user has selected one or more measurements"
99 "that are not in the factor graph to be known outliers.");
104 weights_ = initializeWeightsFromKnownInliersAndOutliers();
118 barcSq_ = inth * Vector::Ones(nfg_.
size());
133 barcSq_ = Vector::Ones(nfg_.
size());
134 for (
size_t k = 0; k < nfg_.
size(); k++) {
136 barcSq_[k] = 0.5 * Chi2inv(alpha, nfg_[k]->dim());
145 if (
size_t(w.size()) != nfg_.
size()) {
146 throw std::runtime_error(
147 "GncOptimizer::setWeights: the number of specified weights"
148 " does not match the size of the factor graph.");
160 const GncParameters&
getParams()
const {
return params_;}
176 Vector initializeWeightsFromKnownInliersAndOutliers()
const{
177 Vector weights = Vector::Ones(nfg_.
size());
178 for (
size_t i = 0; i < params_.knownOutliers.size(); i++){
179 weights[ params_.knownOutliers[i] ] = 0.0;
188 graph_initial, state_, params_.baseOptimizerParams);
189 Values result = baseOptimizer.optimize();
191 double prev_cost = graph_initial.
error(result);
198 int nrUnknownInOrOut = nfg_.
size() - ( params_.knownInliers.size() + params_.knownOutliers.size() );
200 if (mu <= 0 || nrUnknownInOrOut == 0) {
201 if (mu <= 0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
202 std::cout <<
"GNC Optimizer stopped because maximum residual at "
203 "initialization is small."
206 if (nrUnknownInOrOut==0 && params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
207 std::cout <<
"GNC Optimizer stopped because all measurements are already known to be inliers or outliers"
210 if (params_.verbosity >= GncParameters::Verbosity::MU) {
211 std::cout <<
"mu: " << mu << std::endl;
213 if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
214 result.print(
"result\n");
220 for (iter = 0; iter < params_.maxIterations; iter++) {
223 if (params_.verbosity >= GncParameters::Verbosity::MU) {
224 std::cout <<
"iter: " << iter << std::endl;
225 std::cout <<
"mu: " << mu << std::endl;
227 if (params_.verbosity >= GncParameters::Verbosity::WEIGHTS) {
228 std::cout <<
"weights: " << weights_ << std::endl;
230 if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
231 result.print(
"result\n");
239 graph_iter, state_, params_.baseOptimizerParams);
240 result = baseOptimizer_iter.optimize();
243 cost = graph_iter.
error(result);
255 if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
256 std::cout <<
"previous cost: " << prev_cost << std::endl;
257 std::cout <<
"current cost: " << cost << std::endl;
261 if (params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
262 std::cout <<
"final iterations: " << iter << std::endl;
263 std::cout <<
"final mu: " << mu << std::endl;
264 std::cout <<
"previous cost: " << prev_cost << std::endl;
265 std::cout <<
"current cost: " << cost << std::endl;
267 if (params_.verbosity >= GncParameters::Verbosity::WEIGHTS) {
268 std::cout <<
"final weights: " << weights_ << std::endl;
276 double mu_init = 0.0;
278 switch (params_.lossType) {
279 case GncLossType::GM:
283 for (
size_t k = 0; k < nfg_.
size(); k++) {
285 mu_init = std::max(mu_init, 2 * nfg_[k]->error(state_) / barcSq_[k]);
289 case GncLossType::TLS:
296 mu_init = std::numeric_limits<double>::infinity();
297 for (
size_t k = 0; k < nfg_.
size(); k++) {
299 double rk = nfg_[k]->
error(state_);
300 mu_init = (2 * rk - barcSq_[k]) > 0 ?
301 std::min(mu_init, barcSq_[k] / (2 * rk - barcSq_[k]) ) : mu_init;
304 if (mu_init >= 0 && mu_init < 1e-6){
309 return mu_init > 0 && !std::isinf(mu_init) ? mu_init : -1;
313 throw std::runtime_error(
314 "GncOptimizer::initializeMu: called with unknown loss type.");
320 switch (params_.lossType) {
321 case GncLossType::GM:
323 return std::max(1.0, mu / params_.muStep);
324 case GncLossType::TLS:
326 return mu * params_.muStep;
328 throw std::runtime_error(
329 "GncOptimizer::updateMu: called with unknown loss type.");
335 bool muConverged =
false;
336 switch (params_.lossType) {
337 case GncLossType::GM:
338 muConverged = std::fabs(mu - 1.0) < 1e-9;
340 case GncLossType::TLS:
344 throw std::runtime_error(
345 "GncOptimizer::checkMuConvergence: called with unknown loss type.");
347 if (muConverged && params_.verbosity >= GncParameters::Verbosity::SUMMARY)
348 std::cout <<
"muConverged = true " << std::endl;
354 bool costConverged = std::fabs(cost - prev_cost) / std::max(prev_cost, 1e-7)
355 < params_.relativeCostTol;
356 if (costConverged && params_.verbosity >= GncParameters::Verbosity::SUMMARY){
357 std::cout <<
"checkCostConvergence = true (prev. cost = " << prev_cost
358 <<
", curr. cost = " << cost <<
")" << std::endl;
360 return costConverged;
365 bool weightsConverged =
false;
366 switch (params_.lossType) {
367 case GncLossType::GM:
368 weightsConverged =
false;
370 case GncLossType::TLS:
371 weightsConverged =
true;
372 for (
int i = 0; i < weights.size(); i++) {
373 if (std::fabs(weights[i] - std::round(weights[i]))
374 > params_.weightsTol) {
375 weightsConverged =
false;
381 throw std::runtime_error(
382 "GncOptimizer::checkWeightsConvergence: called with unknown loss type.");
385 && params_.verbosity >= GncParameters::Verbosity::SUMMARY)
386 std::cout <<
"weightsConverged = true " << std::endl;
387 return weightsConverged;
392 const double cost,
const double prev_cost)
const {
402 for (
size_t i = 0; i < nfg_.
size(); i++) {
404 auto factor = boost::dynamic_pointer_cast<
407 boost::dynamic_pointer_cast<noiseModel::Gaussian>(
408 factor->noiseModel());
410 Matrix newInfo = weights[i] * noiseModel->information();
412 newGraph[i] = factor->cloneWithNewNoiseModel(newNoiseModel);
414 throw std::runtime_error(
415 "GncOptimizer::makeWeightedGraph: unexpected non-Gaussian noise model.");
424 Vector weights = initializeWeightsFromKnownInliersAndOutliers();
427 std::vector<size_t> allWeights;
428 for (
size_t k = 0; k < nfg_.
size(); k++) {
429 allWeights.push_back(k);
431 std::vector<size_t> knownWeights;
432 std::set_union(params_.knownInliers.begin(), params_.knownInliers.end(),
433 params_.knownOutliers.begin(), params_.knownOutliers.end(),
434 std::inserter(knownWeights, knownWeights.begin()));
436 std::vector<size_t> unknownWeights;
437 std::set_difference(allWeights.begin(), allWeights.end(),
438 knownWeights.begin(), knownWeights.end(),
439 std::inserter(unknownWeights, unknownWeights.begin()));
442 switch (params_.lossType) {
443 case GncLossType::GM: {
444 for (
size_t k : unknownWeights) {
446 double u2_k = nfg_[k]->
error(currentEstimate);
447 weights[k] = std::pow(
448 (mu * barcSq_[k]) / (u2_k + mu * barcSq_[k]), 2);
453 case GncLossType::TLS: {
454 for (
size_t k : unknownWeights) {
456 double u2_k = nfg_[k]->
error(currentEstimate);
457 double upperbound = (mu + 1) / mu * barcSq_[k];
458 double lowerbound = mu / (mu + 1) * barcSq_[k];
459 weights[k] = std::sqrt(barcSq_[k] * mu * (mu + 1) / u2_k) - mu;
460 if (u2_k >= upperbound || weights[k] < 0) {
462 }
else if (u2_k <= lowerbound || weights[k] > 1) {
470 throw std::runtime_error(
471 "GncOptimizer::calculateWeights: called with unknown loss type.");
Factor Graph consisting of non-linear factors.
Global functions in a separate testing namespace.
Definition: chartTesting.h:28
bool equal(const T &obj1, const T &obj2, double tol)
Call equal on the object.
Definition: Testable.h:84
virtual void resize(size_t size)
Directly resize the number of factors in the graph.
Definition: FactorGraph.h:381
size_t size() const
return the number of factors (including any null factors set by remove() ).
Definition: FactorGraph.h:326
static shared_ptr Information(const Matrix &M, bool smart=true)
A Gaussian noise model created by specifying an information matrix.
Definition: NoiseModel.cpp:100
Base class for robust error models The robust M-estimators above simply tell us how to re-weight the ...
Definition: NoiseModel.h:642
Definition: GncOptimizer.h:45
bool checkWeightsConvergence(const Vector &weights) const
Check convergence of weights to binary values.
Definition: GncOptimizer.h:364
void setWeights(const Vector w)
Set weights for each factor.
Definition: GncOptimizer.h:144
bool checkMuConvergence(const double mu) const
Check if we have reached the value of mu for which the surrogate loss matches the original loss.
Definition: GncOptimizer.h:334
GncParameters::OptimizerType BaseOptimizer
For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimi...
Definition: GncOptimizer.h:48
Values optimize()
Compute optimal solution using graduated non-convexity.
Definition: GncOptimizer.h:185
const Vector & getInlierCostThresholds() const
Get the inlier threshold.
Definition: GncOptimizer.h:166
bool checkCostConvergence(const double cost, const double prev_cost) const
Check convergence of relative cost differences.
Definition: GncOptimizer.h:353
const Values & getState() const
Access a copy of the internal values.
Definition: GncOptimizer.h:157
void setInlierCostThresholds(const Vector &inthVec)
Set the maximum weighted residual error for an inlier (one for each factor).
Definition: GncOptimizer.h:125
Vector calculateWeights(const Values ¤tEstimate, const double mu)
Calculate gnc weights.
Definition: GncOptimizer.h:423
double initializeMu() const
Initialize the gnc parameter mu such that loss is approximately convex (remark 5 in GNC paper).
Definition: GncOptimizer.h:274
double updateMu(const double mu) const
Update the gnc parameter mu to gradually increase nonconvexity.
Definition: GncOptimizer.h:319
bool equals(const GncOptimizer &other, double tol=1e-9) const
Equals.
Definition: GncOptimizer.h:169
bool checkConvergence(const double mu, const Vector &weights, const double cost, const double prev_cost) const
Check for convergence between consecutive GNC iterations.
Definition: GncOptimizer.h:391
void setInlierCostThresholds(const double inth)
Set the maximum weighted residual error for an inlier (same for all factors).
Definition: GncOptimizer.h:117
void setInlierCostThresholdsAtProbability(const double alpha)
Set the maximum weighted residual error threshold by specifying the probability alpha that the inlier...
Definition: GncOptimizer.h:132
NonlinearFactorGraph makeWeightedGraph(const Vector &weights) const
Create a graph where each factor is weighted by the gnc weights.
Definition: GncOptimizer.h:398
const NonlinearFactorGraph & getFactors() const
Access a copy of the internal factor graph.
Definition: GncOptimizer.h:154
const Vector & getWeights() const
Access a copy of the GNC weights.
Definition: GncOptimizer.h:163
const GncParameters & getParams() const
Access a copy of the parameters.
Definition: GncOptimizer.h:160
GncOptimizer(const NonlinearFactorGraph &graph, const Values &initialValues, const GncParameters ¶ms=GncParameters())
Constructor.
Definition: GncOptimizer.h:59
A nonlinear sum-of-squares factor with a zero-mean noise model implementing the density Templated on...
Definition: NonlinearFactor.h:174
boost::shared_ptr< This > shared_ptr
Noise model.
Definition: NonlinearFactor.h:186
Definition: NonlinearFactorGraph.h:55
bool equals(const NonlinearFactorGraph &other, double tol=1e-9) const
Test equality.
Definition: NonlinearFactorGraph.cpp:97
double error(const Values &values) const
unnormalized error, in the most common case
Definition: NonlinearFactorGraph.cpp:170
A non-templated config holding any types of Manifold-group elements.
Definition: Values.h:65