X-Git-Url: http://gitweb.michael.orlitzky.com/?a=blobdiff_plain;f=optimization%2Fpreconditioned_conjugate_gradient_method.m;h=35fecaa99cdb81b7f22836791797c0bb2fe1acac;hb=8df0aa3b8e47b596626cf0f833d9e0c0143bf4d5;hp=4e68ccb583e7fab2bc8cf9f34ce170eeab7fe3a2;hpb=48a7e4e418ee26465a4d3a24e45e26cf7e90eb71;p=octave.git diff --git a/optimization/preconditioned_conjugate_gradient_method.m b/optimization/preconditioned_conjugate_gradient_method.m index 4e68ccb..35fecaa 100644 --- a/optimization/preconditioned_conjugate_gradient_method.m +++ b/optimization/preconditioned_conjugate_gradient_method.m @@ -77,7 +77,7 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, ... zk = M \ rk; dk = -zk; - for k = [ 0 : max_iterations ] + while (k <= max_iterations) if (norm(rk) < tolerance) % Check our stopping condition. This should catch the k=0 case. @@ -109,6 +109,10 @@ function [x, k] = preconditioned_conjugate_gradient_method(Q, ... beta_next = (r_next' * z_next)/rkzk; d_next = -z_next + beta_next*dk; + % We potentially just performed one more iteration than necessary + % in order to simplify the loop. Note that due to the structure of + % our loop, we will have k > max_iterations when we fail to + % converge. k = k + 1; xk = x_next; rk = r_next;