Skip to content

Commit

Permalink
move all implementation to cpp file
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal committed Oct 15, 2024
1 parent 1de1386 commit 69729d6
Show file tree
Hide file tree
Showing 2 changed files with 134 additions and 128 deletions.
139 changes: 131 additions & 8 deletions gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
* @date Jun 11, 2012
*/

#include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
#include <gtsam/nonlinear/internal/NonlinearOptimizerState.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/internal/NonlinearOptimizerState.h>

#include <cmath>

Expand All @@ -34,8 +34,8 @@ typedef internal::NonlinearOptimizerState State;
* @param values a linearization point
* Can be moved to NonlinearFactorGraph.h if desired
*/
static VectorValues gradientInPlace(const NonlinearFactorGraph &nfg,
const Values &values) {
static VectorValues gradientInPlace(const NonlinearFactorGraph& nfg,
const Values& values) {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = nfg.linearize(values);
return linear->gradientAtZero();
Expand All @@ -48,8 +48,7 @@ NonlinearConjugateGradientOptimizer::NonlinearConjugateGradientOptimizer(
new State(initialValues, graph.error(initialValues)))),
params_(params) {}

double NonlinearConjugateGradientOptimizer::error(
const Values& state) const {
double NonlinearConjugateGradientOptimizer::error(const Values& state) const {
return graph_.error(state);
}

Expand Down Expand Up @@ -83,5 +82,129 @@ const Values& NonlinearConjugateGradientOptimizer::optimize() {
return state_->values;
}

} /* namespace gtsam */
double NonlinearConjugateGradientOptimizer::lineSearch(
const Values& currentValues, const VectorValues& gradient) const {
/* normalize it such that it becomes a unit vector */
const double g = gradient.norm();

// perform the golden section search algorithm to decide the the optimal
// step size detail refer to
// http://en.wikipedia.org/wiki/Golden_section_search
const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi,
tau = 1e-5;
double minStep = -1.0 / g, maxStep = 0,
newStep = minStep + (maxStep - minStep) / (phi + 1.0);

Values newValues = advance(currentValues, newStep, gradient);
double newError = error(newValues);

while (true) {
const bool flag = (maxStep - newStep > newStep - minStep) ? true : false;
const double testStep = flag ? newStep + resphi * (maxStep - newStep)
: newStep - resphi * (newStep - minStep);

if ((maxStep - minStep) < tau * (std::abs(testStep) + std::abs(newStep))) {
return 0.5 * (minStep + maxStep);
}

const Values testValues = advance(currentValues, testStep, gradient);
const double testError = error(testValues);

// update the working range
if (testError >= newError) {
if (flag)
maxStep = testStep;
else
minStep = testStep;
} else {
if (flag) {
minStep = newStep;
newStep = testStep;
newError = testError;
} else {
maxStep = newStep;
newStep = testStep;
newError = testError;
}
}
}
return 0.0;
}

std::tuple<Values, int>
NonlinearConjugateGradientOptimizer::nonlinearConjugateGradient(
const Values& initial, const NonlinearOptimizerParams& params,
const bool singleIteration, const bool gradientDescent) const {
size_t iteration = 0;

// check if we're already close enough
double currentError = error(initial);
if (currentError <= params.errorTol) {
if (params.verbosity >= NonlinearOptimizerParams::ERROR) {
std::cout << "Exiting, as error = " << currentError << " < "
<< params.errorTol << std::endl;
}
return {initial, iteration};
}

Values currentValues = initial;
VectorValues currentGradient = gradient(currentValues), prevGradient,
direction = currentGradient;

/* do one step of gradient descent */
Values prevValues = currentValues;
double prevError = currentError;
double alpha = lineSearch(currentValues, direction);
currentValues = advance(prevValues, alpha, direction);
currentError = error(currentValues);

// Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
std::cout << "Initial error: " << currentError << std::endl;

// Iterative loop
do {
if (gradientDescent == true) {
direction = gradient(currentValues);
} else {
prevGradient = currentGradient;
currentGradient = gradient(currentValues);
// Polak-Ribiere: beta = g'*(g_n-g_n-1)/g_n-1'*g_n-1
const double beta =
std::max(0.0, currentGradient.dot(currentGradient - prevGradient) /
prevGradient.dot(prevGradient));
direction = currentGradient + (beta * direction);
}

alpha = lineSearch(currentValues, direction);

prevValues = currentValues;
prevError = currentError;

currentValues = advance(prevValues, alpha, direction);
currentError = error(currentValues);

// User hook:
if (params.iterationHook)
params.iterationHook(iteration, prevError, currentError);

// Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
std::cout << "iteration: " << iteration
<< ", currentError: " << currentError << std::endl;
} while (++iteration < params.maxIterations && !singleIteration &&
!checkConvergence(params.relativeErrorTol, params.absoluteErrorTol,
params.errorTol, prevError, currentError,
params.verbosity));

// Printing if verbose
if (params.verbosity >= NonlinearOptimizerParams::ERROR &&
iteration >= params.maxIterations)
std::cout << "nonlinearConjugateGradient: Terminating because reached "
"maximum iterations"
<< std::endl;

return {currentValues, iteration};
}

} /* namespace gtsam */
123 changes: 3 additions & 120 deletions gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -65,54 +65,8 @@ class GTSAM_EXPORT NonlinearConjugateGradientOptimizer
const Values &optimize() override;

/** Implement the golden-section line search algorithm */
double lineSearch(const Values &currentValues, const VectorValues &gradient) const {
/* normalize it such that it becomes a unit vector */
const double g = gradient.norm();

// perform the golden section search algorithm to decide the the optimal
// step size detail refer to
// http://en.wikipedia.org/wiki/Golden_section_search
const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi,
tau = 1e-5;
double minStep = -1.0 / g, maxStep = 0,
newStep = minStep + (maxStep - minStep) / (phi + 1.0);

Values newValues = advance(currentValues, newStep, gradient);
double newError = error(newValues);

while (true) {
const bool flag = (maxStep - newStep > newStep - minStep) ? true : false;
const double testStep = flag ? newStep + resphi * (maxStep - newStep)
: newStep - resphi * (newStep - minStep);

if ((maxStep - minStep) <
tau * (std::abs(testStep) + std::abs(newStep))) {
return 0.5 * (minStep + maxStep);
}

const Values testValues = advance(currentValues, testStep, gradient);
const double testError = error(testValues);

// update the working range
if (testError >= newError) {
if (flag)
maxStep = testStep;
else
minStep = testStep;
} else {
if (flag) {
minStep = newStep;
newStep = testStep;
newError = testError;
} else {
maxStep = newStep;
newStep = testStep;
newError = testError;
}
}
}
return 0.0;
}
double lineSearch(const Values &currentValues,
const VectorValues &gradient) const;

/**
* Implement the nonlinear conjugate gradient method using the Polak-Ribiere
Expand All @@ -124,78 +78,7 @@ class GTSAM_EXPORT NonlinearConjugateGradientOptimizer
*/
std::tuple<Values, int> nonlinearConjugateGradient(
const Values &initial, const NonlinearOptimizerParams &params,
const bool singleIteration, const bool gradientDescent = false) const {
size_t iteration = 0;

// check if we're already close enough
double currentError = error(initial);
if (currentError <= params.errorTol) {
if (params.verbosity >= NonlinearOptimizerParams::ERROR) {
std::cout << "Exiting, as error = " << currentError << " < "
<< params.errorTol << std::endl;
}
return {initial, iteration};
}

Values currentValues = initial;
VectorValues currentGradient = gradient(currentValues), prevGradient,
direction = currentGradient;

/* do one step of gradient descent */
Values prevValues = currentValues;
double prevError = currentError;
double alpha = lineSearch(currentValues, direction);
currentValues = advance(prevValues, alpha, direction);
currentError = error(currentValues);

// Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
std::cout << "Initial error: " << currentError << std::endl;

// Iterative loop
do {
if (gradientDescent == true) {
direction = gradient(currentValues);
} else {
prevGradient = currentGradient;
currentGradient = gradient(currentValues);
// Polak-Ribiere: beta = g'*(g_n-g_n-1)/g_n-1'*g_n-1
const double beta =
std::max(0.0, currentGradient.dot(currentGradient - prevGradient) /
prevGradient.dot(prevGradient));
direction = currentGradient + (beta * direction);
}

alpha = lineSearch(currentValues, direction);

prevValues = currentValues;
prevError = currentError;

currentValues = advance(prevValues, alpha, direction);
currentError = error(currentValues);

// User hook:
if (params.iterationHook)
params.iterationHook(iteration, prevError, currentError);

// Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
std::cout << "iteration: " << iteration
<< ", currentError: " << currentError << std::endl;
} while (++iteration < params.maxIterations && !singleIteration &&
!checkConvergence(params.relativeErrorTol, params.absoluteErrorTol,
params.errorTol, prevError, currentError,
params.verbosity));

// Printing if verbose
if (params.verbosity >= NonlinearOptimizerParams::ERROR &&
iteration >= params.maxIterations)
std::cout << "nonlinearConjugateGradient: Terminating because reached "
"maximum iterations"
<< std::endl;

return {currentValues, iteration};
}
const bool singleIteration, const bool gradientDescent = false) const;
};

} // namespace gtsam

0 comments on commit 69729d6

Please sign in to comment.