From 69d4adf22828f26858e89de560078e1b5ed1714a Mon Sep 17 00:00:00 2001 From: Michael Orlitzky Date: Fri, 8 Mar 2013 20:51:40 -0500 Subject: [PATCH] Add some step length functions, untested. --- optimization/step_length_cgm.m | 40 ++++++++++++++++++++ optimization/step_length_positive_definite.m | 36 ++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 optimization/step_length_cgm.m create mode 100644 optimization/step_length_positive_definite.m diff --git a/optimization/step_length_cgm.m b/optimization/step_length_cgm.m new file mode 100644 index 0000000..60f7cd4 --- /dev/null +++ b/optimization/step_length_cgm.m @@ -0,0 +1,40 @@ +function alpha = step_length_cgm(r, A, p) + ## + ## Compute the step length for the conjugate gradient method (CGM). + ## The CGM attempts to solve, + ## + ## Ax = b + ## + ## or equivalently, + ## + ## min[phi(x) = (1/2) - ] + ## + ## where ``A`` is positive-definite. In the process, we need to + ## compute a number of search directions ``p`` and optimal step + ## lengths ``alpha``; i.e., + ## + ## x_{k+1} = x_{k} + alpha_{k}*p_{k} + ## + ## This function computes alpha_{k} in the formula above. + ## + ## INPUT: + ## + ## - ``r`` -- The residual, Ax - b, at the current step. + ## + ## - ``A`` -- The matrix ``A`` in the formulation above. + ## + ## - ``p`` -- The current search direction. + ## + ## OUTPUT: + ## + ## - ``alpha`` -- The minimizer of ``f(x) = x + alpha*p`` along ``p`. + ## + ## NOTES: + ## + ## All vectors are assumed to be *column* vectors. + ## + + ## A simple calculation should convince you that the gradient of + ## phi(x) above is Ax - b == r. + alpha = step_length_positive_definite(r, A, p); +end diff --git a/optimization/step_length_positive_definite.m b/optimization/step_length_positive_definite.m new file mode 100644 index 0000000..b1e5c48 --- /dev/null +++ b/optimization/step_length_positive_definite.m @@ -0,0 +1,36 @@ +function alpha = step_length_positive_definite(g, Q, p) + ## + ## Find the minimizer alpha of, + ## + ## phi(alpha) = f(x + alpha*p) + ## + ## where ``p`` is a descent direction, + ## + ## f(x) = (1/2) - + ## + ## and ``Q`` is positive-definite. + ## + ## The closed-form solution to this problem is given in Nocedal and + ## Wright, (3.55). + ## + ## INPUT: + ## + ## - ``g`` -- The gradient of f. + ## + ## - ``Q`` -- The positive-definite matrix in the definition of + ## ``f`` above. + ## + ## - ``p`` -- The direction in which ``f`` decreases. The line + ## along which we minimize f(x + alpha*p). + ## + ## OUTPUT: + ## + ## - ``alpha`` -- The value which causes ``f`` to decrease the + ## most. + ## + ## NOTES: + ## + ## All vectors are assumed to be *column* vectors. + ## + alpha = -(g' * p)/(p' * Q * p) +end -- 2.44.2