From: Michael Orlitzky Date: Fri, 22 Mar 2013 01:51:22 +0000 (-0400) Subject: Update comments in the *conjugate_gradient_method() functions. X-Git-Url: https://gitweb.michael.orlitzky.com/?a=commitdiff_plain;h=af1b47d92cb94b9289987babefd34633f3bbe804;p=octave.git Update comments in the *conjugate_gradient_method() functions. --- diff --git a/optimization/conjugate_gradient_method.m b/optimization/conjugate_gradient_method.m index a6401e5..0acf5b2 100644 --- a/optimization/conjugate_gradient_method.m +++ b/optimization/conjugate_gradient_method.m @@ -35,6 +35,9 @@ function [x, k] = conjugate_gradient_method(A, b, x0, tolerance, max_iterations) % % All vectors are assumed to be *column* vectors. % + % The rather verbose name of this function was chosen to avoid + % conflicts with other implementations. + % n = length(x0); M = eye(n); diff --git a/optimization/preconditioned_conjugate_gradient_method.m b/optimization/preconditioned_conjugate_gradient_method.m index 6394348..eb2089f 100644 --- a/optimization/preconditioned_conjugate_gradient_method.m +++ b/optimization/preconditioned_conjugate_gradient_method.m @@ -39,7 +39,7 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, % % OUTPUT: % - % - ``x`` - The solution to Qx=b. + % - ``x`` - The computed solution to Qx=b. % % - ``k`` - The ending value of k; that is, the number of % iterations that were performed. @@ -52,6 +52,9 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, % Conjugate-Gradient Method", we are supposed to define % d_{0} = -z_{0}, not -r_{0} as written. % + % The rather verbose name of this function was chosen to avoid + % conflicts with other implementations. + % % REFERENCES: % % 1. Guler, Osman. Foundations of Optimization. New York, Springer, @@ -59,7 +62,7 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, % % Set k=0 first, that way the references to xk,rk,zk,dk which - % immediately follow correspond to x0,r0,z0,d0 respectively. + % immediately follow correspond (semantically) to x0,r0,z0,d0. k = 0; xk = x0; @@ -68,9 +71,11 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, dk = -zk; for k = [ 0 : max_iterations ] + if (norm(rk) < tolerance) - x = xk; - return; + % Check our stopping condition. This should catch the k=0 case. + x = xk; + return; end % Used twice, avoid recomputation. @@ -80,6 +85,8 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, % do them both, so we precompute the more expensive operation. Qdk = Q * dk; + % After substituting the two previously-created variables, the + % following algorithm occurs verbatim in the reference. alpha_k = rkzk/(dk' * Qdk); x_next = xk + (alpha_k * dk); r_next = rk + (alpha_k * Qdk); @@ -94,5 +101,7 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, dk = d_next; end + % The algorithm didn't converge, but we still want to return the + % terminal value of xk. x = xk; end