From: Michael Orlitzky Date: Mon, 14 Jun 2010 08:14:09 +0000 (-0400) Subject: Added the SimplexIteration class to the LinearProgramming module. X-Git-Url: https://gitweb.michael.orlitzky.com/?a=commitdiff_plain;h=9e873928a42a91b468e56bf95217f8504c12dad2;p=dead%2Fcensus-tools.git Added the SimplexIteration class to the LinearProgramming module. --- diff --git a/src/LinearProgramming.py b/src/LinearProgramming.py index d779dc0..ceb2e5f 100644 --- a/src/LinearProgramming.py +++ b/src/LinearProgramming.py @@ -4,6 +4,7 @@ integration with lp_solve. """ import fractions +from numpy import * import os import site import sys @@ -11,8 +12,10 @@ import sys # Add LP_SOLVE_PATH to our path. There is no point to this variable # other than to make the site.addsitedir() line fit within 80 # characters. -LP_SOLVE_PATH = '/../../lib/lp_solve' -site.addsitedir(os.path.dirname(os.path.abspath(sys.argv[0])) + LP_SOLVE_PATH) +LP_SOLVE_PATH1 = '/../../lib/lp_solve' +LP_SOLVE_PATH2 = '/../lib/lp_solve' +site.addsitedir(os.path.dirname(os.path.abspath(sys.argv[0])) + LP_SOLVE_PATH1) +site.addsitedir(os.path.dirname(os.path.abspath(sys.argv[0])) + LP_SOLVE_PATH2) from lpsolve55 import * @@ -335,3 +338,152 @@ class LinearProgram(object): Return the GCD of all objective function coefficients. """ return reduce(fractions.gcd, self.objective_coefficients) + + + +class SimplexIteration(object): + """ + Represents the 'current' iteration of the simplex method at some + point. It needs an A,b,x, and c corresponding to the linear + program in standard form. It can then determine whether or not the + current vertex (x) is optimal, and possible move to a better one. + """ + + @property + def constraint_matrix(self): + return self._constraint_matrix.tolist() + + @constraint_matrix.setter + def constraint_matrix(self, value): + self._constraint_matrix = matrix(value) + + @property + def rhs(self): + return self._rhs + + @rhs.setter + def rhs(self, value): + self._rhs = value + + @property + def objective_coefficients(self): + return self._objective_coefficients + + @objective_coefficients.setter + def objective_coefficients(self, value): + self._objective_coefficients = value + + @property + def solution_vector(self): + return self._solution_vector.tolist() + + @solution_vector.setter + def solution_vector(self, value): + self._solution_vector = array(value) + + + @property + def basic_variables(self): + # The current set of basic variables. Constructed from the + # "true" source, our list of basic indices. + idxs = self.basic_indices + return map(lambda x: "x" + str(x+1), idxs) + + + @basic_variables.setter + def basic_variables(self, value): + """ + Syntactic sugar to set the basic indices. We take a string + of the form x1,x2,...xn, and subtract one from each of the + subscripts to get the basic indices. + """ + basic_indices = [] + vars = value.split(',') + + for var in vars: + var = var.strip() + var = var.replace('x', '') + basic_indices.append(int(var)-1) + + self.basic_indices = basic_indices + + + @property + def nonbasic_variables(self): + # All elements of the solution vector that have value zero. + idxs = self.nonbasic_indices + return map(lambda x: "x" + str(x+1), idxs) + + + @property + def basic_indices(self): + return self._basic_indices + + + @basic_indices.setter + def basic_indices(self, value): + self._basic_indices = value + + + @property + def nonbasic_indices(self): + all_indices = range(0, len(self.solution_vector)) + return list(set(all_indices) - set(self.basic_indices)) + + + @property + def optimal(self): + # True if the current solution is optimal, false if not. + for idx in self.nonbasic_indices: + if self.reduced_cost(idx) < 0.0: + return False + + return True + + + def constraint_column(self, idx): + # Return the column of the constraint matrix corresponding + # to index idx. + bm = matrix(self.constraint_matrix).transpose().tolist() + return bm[idx] + + def negative_constraint_column(self, idx): + # Return the column of the constraint matrix corresponding + # to index idx, multiplied by negative one. + bm = (-matrix(self.constraint_matrix).transpose()).tolist() + return bm[idx] + + def basis_matrix(self): + # Return the columns of our constraint matrix corresponding + # to the basic variables. + idxs = self.nonbasic_indices + bm = matrix(self.constraint_matrix) + bm = delete(bm, idxs, axis=1) + return bm.tolist() + + def reduced_cost(self, idx): + # Find the reduced cost ofthe variable whose column has index + # idx. + dx = array(self.delta_x(idx)) + c = array(self.objective_coefficients) + return dot(dx, c) + + def delta_x(self, idx): + # Return the array of deltas that would result from increasing + # the variable with index idx by one. + A = matrix(self.basis_matrix()) + b = array(self.negative_constraint_column(idx)) + x = linalg.solve(A,b).tolist() + + # Now we get to add back the nonbasic columns. The 'idx' column + # should have a value of 1, and the other two non-basics should + # have a value of 0. + xfull = [0.0] * len(self.solution_vector) + bidxs = self.basic_indices + + for bidx in bidxs: + xfull[bidx] = x.pop(0) + + xfull[idx] = 1.0 + + return xfull