summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorNao Pross <np@0hm.ch>2024-05-08 10:18:37 +0200
committerNao Pross <np@0hm.ch>2024-05-08 10:21:00 +0200
commit462d0a61d8bd2286b0708dc1f08fcdd672c91414 (patch)
tree215b51e78861c60cad73f45c718de36c30eb3b11
parentAdd __str__ dunder methods to a few classes (diff)
downloadsumofsquares-462d0a61d8bd2286b0708dc1f08fcdd672c91414.tar.gz
sumofsquares-462d0a61d8bd2286b0708dc1f08fcdd672c91414.zip
Rewrite and rename solve_cone to solve_sos_cone with new structure
This is a first draft, it has some bugs
-rw-r--r--sumofsquares/__init__.py5
-rw-r--r--sumofsquares/abc.py2
-rw-r--r--sumofsquares/cvxopt.py561
-rw-r--r--sumofsquares/problems.py34
4 files changed, 173 insertions, 429 deletions
diff --git a/sumofsquares/__init__.py b/sumofsquares/__init__.py
index d71d38f..27f5e87 100644
--- a/sumofsquares/__init__.py
+++ b/sumofsquares/__init__.py
@@ -95,10 +95,11 @@ def make_problem(
def solve_problem(
cost: Expression,
constraints: Iterable[Constraint] = (),
- solver: Solver = Solver.CVXOPT
+ solver: Solver = Solver.CVXOPT,
+ verbose: bool = False
) -> tuple[Problem, Result]:
"""
Solve a sum-of-squares optimization problem.
"""
prob = make_problem(cost, constraints, solver)
- return prob, prob.solve()
+ return prob, prob.solve(verbose)
diff --git a/sumofsquares/abc.py b/sumofsquares/abc.py
index 95eacad..78e485a 100644
--- a/sumofsquares/abc.py
+++ b/sumofsquares/abc.py
@@ -99,5 +99,5 @@ class Problem(ABC):
# --- Methods ---
@abstractmethod
- def solve(self) -> Result:
+ def solve(self, verbose: bool = False) -> Result:
""" Solve the optimization problem. """
diff --git a/sumofsquares/cvxopt.py b/sumofsquares/cvxopt.py
index d656d83..ce7b093 100644
--- a/sumofsquares/cvxopt.py
+++ b/sumofsquares/cvxopt.py
@@ -1,424 +1,149 @@
-import dataclasses
import cvxopt
-import polymatrix
import numpy as np
-import math
-@dataclasses.dataclass
-class CVXOptConeQPResult:
- x: np.ndarray
- y: np.ndarray
- s: np.ndarray
- z: np.ndarray
- status: str
- gap: float
- relative_gap: float
- primal_objective: float
- dual_objective: float
- primal_infeasibility: float
- dual_infeasibility: float
- primal_slack: float
- dual_slack: float
- iterations: int
- x0: dict
-
-
-
-def solve_cone(
- state,
- variables,
- cost,
- s_inequality = tuple(),
- q_inequality = tuple(),
- l_inequality = tuple(),
- # previous_result=None,
- print_info=False,
- print_matrix=False,
- ):
-
- try:
- state, matrix_repr = polymatrix.to_matrix_repr(
- (cost,) + l_inequality + q_inequality + s_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
- except:
- state, matrix_repr = polymatrix.to_matrix_repr(
- (cost,),
- polymatrix.v_stack(variables),
- ).apply(state)
-
- if len(l_inequality):
- state, matrix_repr = polymatrix.to_matrix_repr(
- l_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
-
- if len(q_inequality):
- state, matrix_repr = polymatrix.to_matrix_repr(
- q_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
-
- if len(s_inequality):
- state, matrix_repr = polymatrix.to_matrix_repr(
- s_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
-
-
- # maximum degree of cost function must be 2
- max_degree_cost = matrix_repr.data[0].get_max_degree()
- assert max_degree_cost <= 2, f'{max_degree_cost=}'
-
- # maximum degree of constraint must be 1
- max_degree_constraints = max(data.get_max_degree() for data in matrix_repr.data[1:])
- assert max_degree_constraints == 1, f'{max_degree_constraints=}'
-
- cost = matrix_repr.data[0]
-
- # cost[1] is a 1 x n vector
- q = cost[1] / 2
-
- Gl = matrix_repr.data[1:1+len(l_inequality)]
- dim_l = sum(G[0].shape[0] for G in Gl)
-
- Gq = matrix_repr.data[1+len(l_inequality):1+len(l_inequality)+len(q_inequality)]
- dim_q = list(G[0].shape[0] for G in Gq)
-
- Gs = matrix_repr.data[1+len(l_inequality)+len(q_inequality):]
-
- def gen_square_matrix_dim():
- for Gs_raw in Gs:
- dim = np.sqrt(Gs_raw[0].shape[0])
-
- assert math.isclose(int(dim), dim), f'{dim=}'
-
- yield int(dim)
-
- dim_s = list(gen_square_matrix_dim())
-
- G = np.vstack(tuple(-v[1] for v in matrix_repr.data[1:]))
- h = np.vstack(tuple(v[0] for v in matrix_repr.data[1:]))
-
- if print_info:
- print(f'number of variables: {G.shape[1]}')
- print(f'{dim_l=}, {dim_q=}, {dim_s=}')
- n_constr_q = sum(dim_q)
- n_constr_s = sum(i**2 for i in dim_s)
- print(f'{n_constr_q=}, {n_constr_s=}')
-
- if print_matrix:
- print(f'cost={q}')
- print(f'G={G}')
- print(f'h={h}')
-
- # if previous_result is None:
- # primalstart = None
-
- # else:
- # primalstart = {
- # 'x': cvxopt.matrix(previous_result.x),
- # 'y': cvxopt.matrix(previous_result.y),
- # 's': cvxopt.matrix(previous_result.s),
- # 'z': cvxopt.matrix(previous_result.z),
- # }
-
- if max_degree_cost == 1:
- return_val = cvxopt.solvers.conelp(
- c=cvxopt.matrix(q.reshape(-1, 1)),
- G=cvxopt.matrix(G),
- h=cvxopt.matrix(h),
- dims={'l': dim_l, 'q': dim_q, 's': dim_s},
- # primalstart=primalstart,
- )
-
- else:
- # cost[2] is a 1 x n*n vector
- nP = int(np.sqrt(cost[2].shape[1]))
- P = cost[2].reshape(nP, nP).toarray()
-
- if print_matrix:
- print(f'{P=}')
-
- if print_matrix:
- print(f'{dim_l=}, {dim_q=}, {dim_s=}')
- print(f'{P=}')
- print(f'{q=}')
- print(f'{G=}')
- print(f'{h=}')
-
- return_val = cvxopt.solvers.coneqp(
- P=cvxopt.matrix(P),
- q=cvxopt.matrix(q.reshape(-1, 1)),
- G=cvxopt.matrix(G),
- h=cvxopt.matrix(h),
- dims={'l': dim_l, 'q': dim_q, 's': dim_s},
- # initvals=primalstart,
- )
-
- x = np.array(return_val['x']).reshape(-1)
-
- if x[0] == None:
- x0 = {}
- else:
- x0 = {var.name: matrix_repr.get_value(var, x) for var in variables}
-
- result = CVXOptConeQPResult(
- x=x,
- y=np.array(return_val['y']).reshape(-1),
- s=np.array(return_val['s']).reshape(-1),
- z=np.array(return_val['z']).reshape(-1),
- status=return_val['status'],
- gap=return_val['gap'],
- relative_gap=return_val['relative gap'],
- primal_objective=return_val['primal objective'],
- dual_objective=return_val['dual objective'],
- primal_infeasibility=return_val['primal infeasibility'],
- dual_infeasibility=return_val['dual infeasibility'],
- primal_slack=return_val['primal slack'],
- dual_slack=return_val['dual slack'],
- iterations=return_val['iterations'],
- x0=x0
- )
-
- return state, result
-
-def solve_cone2(
- state,
- variables,
- cost,
- s_inequality = tuple(),
- q_inequality = tuple(),
- l_inequality = tuple(),
- previous_result=None,
- print_info=False,
- print_matrix=False,
- ):
-
- try:
- state, matrix_repr = polymatrix.to_matrix_repr(
- (cost,) + l_inequality + q_inequality + s_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
- except:
- state, matrix_repr = polymatrix.to_matrix_repr(
- (cost,),
- polymatrix.v_stack(variables),
- ).apply(state)
-
- if len(l_inequality):
- state, matrix_repr = polymatrix.to_matrix_repr(
- l_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
-
- if len(q_inequality):
- state, matrix_repr = polymatrix.to_matrix_repr(
- q_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
-
- if len(s_inequality):
- state, matrix_repr = polymatrix.to_matrix_repr(
- s_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
-
-
-
- # maximum degree of cost function must be 2
- max_degree_cost = matrix_repr.data[0].get_max_degree()
- assert max_degree_cost <= 2, f'{max_degree_cost=}'
-
- # maximum degree of constraint must be 1
- max_degree_constraints = max(data.get_max_degree() for data in matrix_repr.data[1:])
- assert max_degree_constraints == 1, f'{max_degree_constraints=}'
-
- cost = matrix_repr.data[0]
-
- # cost[1] is a 1 x n vector
- q = cost[1] / 2
-
- Gl = matrix_repr.data[1:1+len(l_inequality)]
- dim_l = sum(G[0].shape[0] for G in Gl)
-
- Gq = matrix_repr.data[1+len(l_inequality):1+len(l_inequality)+len(q_inequality)]
- dim_q = list(G[0].shape[0] for G in Gq)
-
- Gs = matrix_repr.data[1+len(l_inequality)+len(q_inequality):]
-
- def gen_square_matrix_dim():
- for Gs_raw in Gs:
- dim = np.sqrt(Gs_raw[0].shape[0])
-
- assert math.isclose(int(dim), dim), f'{dim=}'
-
- yield int(dim)
-
- dim_s = list(gen_square_matrix_dim())
-
- G = np.vstack(tuple(-v[1] for v in matrix_repr.data[1:]))
- h = np.vstack(tuple(v[0] for v in matrix_repr.data[1:]))
-
- if print_info:
- print(f'number of variables: {G.shape[1]}')
- print(f'{dim_l=}, {dim_q=}, {dim_s=}')
-
- if print_matrix:
- print(f'{q=}')
- print(f'{G=}')
- print(f'{h=}')
-
- if previous_result is None:
- primalstart = None
-
- else:
- primalstart = {
- 'x': cvxopt.matrix(previous_result.x),
- 'y': cvxopt.matrix(previous_result.y),
- 's': cvxopt.matrix(previous_result.s),
- 'z': cvxopt.matrix(previous_result.z),
- }
-
- if max_degree_cost == 1:
- return_val = cvxopt.solvers.conelp(
- c=cvxopt.matrix(q.reshape(-1, 1)),
- G=cvxopt.matrix(G),
- h=cvxopt.matrix(h),
- dims={'l': dim_l, 'q': dim_q, 's': dim_s},
- primalstart=primalstart,
- )
-
+from collections import UserDict
+from numpy.typing import NDArray
+from dataclasses import dataclass, fields
+from pprint import pprint
+
+import polymatrix as poly
+from polymatrix.expression.expression import Expression
+from polymatrix.polymatrix.index import MonomialIndex, VariableIndex
+
+from .abc import Problem, SolverInfo
+from .constraints import NonNegative, EqualToZero
+from .variable import OptVariable
+
+
+class CVXOPTInfo(SolverInfo, UserDict):
+ """ Dictionary returned by CVXOPT. """
+
+
+def solve_sos_cone(prob: Problem, verbose: bool = False, *args, **kwargs) -> tuple[dict[OptVariable, float], CVXOPTInfo]:
+ r"""
+ Solve a conic problem in the cone of SOS polynomials :math:`\mathbf{\Sigma}_d(x)`.
+
+ Any `*args` and `**kwargs` other than `prob` are passed to the CVXOPT solver.
+ """
+ # Check that the problem can be solved by CVXOpt, i.e.
+ # - Cost function is at most quadratic
+ # - Constraints are at most linear
+
+ cost = poly.to_affine(prob.cost.read(prob.state))
+ if cost.degree > 2:
+ raise ValueError("CVXOpt can solve at most quadratic conic programs, "
+ f"but the given problem has degree {cost.degree} > 2.")
+
+ is_qp = (cost.degree == 2)
+
+ # We need to convert problem to the primal canonical form (given in the
+ # CVXOPT doc) with x = vstack(prob.variables)
+ #
+ # minimize .5 * x.T @ P @ x + q.T @ x
+ #
+ # subject to G @ x + s = h
+ # A @ x = b
+ # s >= 0
+ #
+ # If the problem is an LP, the CVXOPT doc call the coefficient in the cost
+ # functions `c`, here we will use `q` for both LP and QP.
+
+ A_rows, b_rows = [], []
+ G_rows, h_rows= [], []
+ l_dims, q_dims, s_dims = [], [], []
+
+ # Collect variable indices in dictionary and sort them by value
+ # i.e. we want variable_indices.values() to be sorted
+ variable_indices: dict[OptVariable, VariableIndex] = dict(sorted({
+ # FIXME there can be more than one indices associated to v
+ v: next(prob.state.get_indices_as_variable_index(v))
+ for v in prob.variables
+ }.items(), key=lambda item: item[1]))
+
+ # cost linear term
+ cost_coeffs = dict(cost.affine_coefficients_by_degrees(variable_indices.values()))
+ q = cost_coeffs[1].T
+
+ # cost quadratic term
+ if is_qp:
+ n = len(prob.variables)
+ P = np.zeros((n, n))
+ # This works because of how monomial indices are sorted
+ # see also polymatrix.index.MonomialIndex.__lt__
+ P[np.triu_indices(n)] = cost_coeffs[2]
+
+ # Make symmetric. There is no 0.5 factor because there it is already
+ # there in the canonical form of CVXOPT
+ P = (P + P.T)
+
+ x = poly.v_stack(prob.polynomial_variables)
+ for c in prob.constraints:
+ # Convert constraints to affine expression in decision variables
+ constr = poly.to_affine(c.expression.quadratic_in(x).read(prob.state))
+ if constr.degree > 1:
+ raise ValueError("CVXOpt can solve problem with constraints that are "
+ f"at most linear, but {str(c.expression)} has degree {constr.degree}.")
+
+ # For each constraint we add PSD matrix
+ nrows, ncols = constr.shape
+ s_dims.append(nrows)
+
+ # Constant term
+ c_coeff = constr.affine_coefficient(MonomialIndex.constant())
+ c_stacked = c_coeff.T.reshape((nrows * ncols,))
+
+ # Linear term
+ l_stacked_rows = []
+ for v in variable_indices.values():
+ # Get the affine coefficient associated to this variable
+ linear = MonomialIndex((v,))
+ v_coeff = constr.affine_coefficient(linear)
+
+ # stack the columns of coeff matrix into a fat row vector (column major order).
+ v_stacked = v_coeff.T.reshape((nrows * ncols,))
+ l_stacked_rows.append(v_stacked)
+
+ l_stacked = np.vstack(l_stacked_rows)
+
+ if isinstance(c, EqualToZero):
+ A_rows.append(l_stacked)
+ # Factor of -1 because it is on the right hand side
+ b_rows.append(-1 * c_stacked)
+
+ elif isinstance(c, NonNegative):
+ if c.domain:
+ raise ValueError("Cannot convert non-negativity constraint to CVXOPT format. "
+ "Domain restricted non-negativity must be "
+ "preprocessed by using a Positivstellensatz!")
+
+ # Factor of -1 because it is on the right hand side
+ G_rows.append(-1 * l_stacked)
+ h_rows.append(c_stacked)
+
+ else:
+ raise NotImplementedError(f"Cannot convert constraint of type {type(c)} "
+ "into the canonical form of CVXOPT.")
+
+ # Convert to CVXOPT matrices
+ q = cvxopt.matrix(q)
+ G = cvxopt.matrix(np.hstack(G_rows).T) if G_rows else None
+ h = cvxopt.matrix(np.hstack(h_rows)) if h_rows else None
+ A = cvxopt.matrix(np.hstack(A_rows).T) if A_rows else None
+ b = cvxopt.matrix(np.hstack(b_rows)) if b_rows else None
+ dims = {'l': l_dims, 'q': q_dims, 's': s_dims}
+
+ cvxopt.solvers.options['show_progress'] = verbose
+
+ if is_qp:
+ P = cvxopt.matrix(P)
+ info = cvxopt.solvers.coneqp(P=P, q=q, G=G, h=h, A=A, b=b,
+ dims=dims, *args, **kwargs)
else:
- # cost[2] is a 1 x n*n vector
- nP = int(np.sqrt(cost[2].shape[1]))
- P = cost[2].reshape(nP, nP).toarray()
-
- if print_matrix:
- print(f'{P=}')
-
- if print_matrix:
- print(f'{dim_l=}, {dim_q=}, {dim_s=}')
- print(f'{P=}')
- print(f'{q=}')
- print(f'{G=}')
- print(f'{h=}')
-
- return_val = cvxopt.solvers.coneqp(
- P=cvxopt.matrix(P),
- q=cvxopt.matrix(q.reshape(-1, 1)),
- G=cvxopt.matrix(G),
- h=cvxopt.matrix(h),
- dims={'l': dim_l, 'q': dim_q, 's': dim_s,},
- initvals=primalstart,
- )
-
- x = np.array(return_val['x']).reshape(-1)
-
- # if x[0] == None:
- # x0 = {}
- # else:
- # x0 = {var.name: matrix_repr.get_value(var, x) for var in variables}
-
- # result = CVXOptConeQPResult(
- # x=x,
- # y=np.array(return_val['y']).reshape(-1),
- # s=np.array(return_val['s']).reshape(-1),
- # z=np.array(return_val['z']).reshape(-1),
- # status=return_val['status'],
- # gap=return_val['gap'],
- # relative_gap=return_val['relative gap'],
- # primal_objective=return_val['primal objective'],
- # dual_objective=return_val['dual objective'],
- # primal_infeasibility=return_val['primal infeasibility'],
- # dual_infeasibility=return_val['dual infeasibility'],
- # primal_slack=return_val['primal slack'],
- # dual_slack=return_val['dual slack'],
- # iterations=return_val['iterations'],
- # x0=x0
- # )
-
- # return state, result
- return matrix_repr.get_value(variables[0], x), return_val['status']
-
-
-def solve_sos_problem(
- cost: tuple[polymatrix.Expression],
- sos_constraints: tuple[SOSExpr],
- state: polymatrix.ExpressionState,
- free_param: tuple[ParamSOSExpr] | None = None,
- x0: dict[ParamSOSExpr, np.ndarray] | None = None,
-):
- if x0 is None:
- x0 = {}
-
- def gen_all_param_expr():
- for sos_constraint in sos_constraints:
- yield from sos_constraint.dependence
-
- all_param_expr = tuple(set(gen_all_param_expr()))
-
- if free_param is None:
- free_param = all_param_expr
-
- sub_vals = tuple((param_expr.param, x0[param_expr.param.name]) for param_expr in all_param_expr if param_expr not in free_param)
-
- def gen_inequality():
- for sos_constraint in sos_constraints:
- if any(param_expr in free_param for param_expr in sos_constraint.dependence):
- yield sos_constraint.sos_matrix_vec.eval(sub_vals)
-
- inequality = tuple(gen_inequality())
-
- state, result = solve_cone(
- state,
- variables=tuple(param_expr.param for param_expr in free_param),
- cost=sum(cost),
- s_inequality=inequality,
- )
-
- return state, result
-
+ info = cvxopt.solvers.conelp(c=q, G=G, h=h, A=A, b=b,
+ *args, **kwargs)
-def solve_sos_problem2(
- cost: tuple[polymatrix.Expression],
- sos_constraints: tuple[SOSExpr],
- state: polymatrix.ExpressionState,
- subs: tuple[ParamSOSExpr] | None = None,
- x0: dict[ParamSOSExpr, np.ndarray] | None = None,
- print_info = False,
-):
- if x0 is None:
- x0 = {}
-
- if subs is None:
- subs = tuple()
-
- def gen_free_param_expr():
- for sos_constraint in sos_constraints:
- for param_expr in sos_constraint.dependence:
- if param_expr not in subs:
- yield param_expr
-
- free_param_expr = tuple(set(gen_free_param_expr()))
+ results = {}
+ for i, v in enumerate(variable_indices.keys()):
+ results[v] = info['x'][i]
- # print(f'{tuple(val.param.name for val in free_param_expr)=}')
-
- sub_vals = tuple((sub.param, x0[sub.param.name]) for sub in subs if sub is not None and sub.param.name in x0)
-
- def gen_inequality():
- for sos_constraint in sos_constraints:
- if any(param_expr in free_param_expr for param_expr in sos_constraint.dependence):
- yield sos_constraint.sos_matrix_vec.eval(sub_vals)
-
- inequality = tuple(gen_inequality())
-
- state, result = solve_cone(
- state,
- variables=tuple(param_expr.param for param_expr in free_param_expr),
- cost=sum(cost),
- s_inequality=inequality,
- print_info=print_info,
- )
-
- return state, result
+ return results, CVXOPTInfo(info)
diff --git a/sumofsquares/problems.py b/sumofsquares/problems.py
index 035e756..8b3eaae 100644
--- a/sumofsquares/problems.py
+++ b/sumofsquares/problems.py
@@ -10,7 +10,7 @@ from typing import Any, Sequence
from typing_extensions import override
import polymatrix as poly
-from polymatrix.expression.expression import Expression
+from polymatrix.expression.expression import Expression, VariableExpression
from polymatrix.expressionstate.abc import ExpressionState
from polymatrix.variable.abc import Variable
@@ -26,7 +26,19 @@ class SOSResult(Result):
solver_info: Any
@override
- def value_of(self, var: OptVariable) -> float:
+ def value_of(self, var: OptVariable | VariableExpression) -> float:
+ if isinstance(var, VariableExpression):
+ if not isinstance(var.underlying, OptVariable):
+ # TODO: error message
+ raise ValueError
+
+ # Unwrap the expression
+ var = var.underlying
+
+ if var not in self.values:
+ # TODO: error message
+ raise KeyError
+
return self.values[var]
@@ -56,19 +68,25 @@ class PutinarSOSProblem(Problem):
# Add langrange-multiplier-like polynomials to SOS constraints, while
# leaving the rest untouched.
variables = list(self.variables)
- constraints = list(filterfalse(need_positivstellensatz, self.constraints))
- for c in filter(need_positivstellensatz, self.constraints):
- # FIXME: implement
+ constraints = filterfalse(need_positivstellensatz, self.constraints)
+ multipliers = []
+ for i, c in enumerate(filter(need_positivstellensatz, self.constraints)):
+ d = c.expression.degree()
+ x = poly.v_stack(self.polynomial_variables)
+ # FIXME: check that there are no variables with this names
+ # m = x.combinations(d).parametrize(fr"\gamma_{i}")
+ # multipliers.append(NonNegative(m))
raise NotImplementedError
- return replace(self, constraints=constraints, variables=variables)
+ return replace(self, variables=variables,
+ constraints=tuple(constraints) + tuple(multipliers))
@override
- def solve(self) -> Result:
+ def solve(self, verbose: bool = False) -> Result:
if not self.solver == Solver.CVXOPT:
raise NotImplementedError(f"Cannot solve problem {self} using solver {str(self.solver)}.")
prob = self.after_positivstellensatz()
- result, info = cvxopt_solve_sos_cone(prob)
+ result, info = cvxopt_solve_sos_cone(prob, verbose)
return SOSResult(result, info)