summaryrefslogtreecommitdiffstats
path: root/sumofsquares/cvxopt.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--sumofsquares/cvxopt.py216
1 files changed, 216 insertions, 0 deletions
diff --git a/sumofsquares/cvxopt.py b/sumofsquares/cvxopt.py
new file mode 100644
index 0000000..72dd869
--- /dev/null
+++ b/sumofsquares/cvxopt.py
@@ -0,0 +1,216 @@
+import dataclasses
+import cvxopt
+import polymatrix
+import numpy as np
+import math
+from sumofsquares.abc.sosexpr import ParamSOSExpr
+
+from sumofsquares.sosconstraint.abc import SOSConstraint
+
+
+@dataclasses.dataclass
+class CVXOptConeQPResult:
+ x: np.ndarray
+ y: np.ndarray
+ s: np.ndarray
+ z: np.ndarray
+ status: str
+ gap: float
+ relative_gap: float
+ primal_objective: float
+ dual_objective: float
+ primal_infeasibility: float
+ dual_infeasibility: float
+ primal_slack: float
+ dual_slack: float
+ iterations: int
+ x0: dict
+
+
+
+def solve_cone(state, variables, cost, s_inequality, l_inequality = tuple(), previous_result=None, print_info=False, print_matrix=False):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ (cost,) + l_inequality + s_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ # maximum degree of cost function must be 2
+ max_degree_cost = matrix_repr.data[0].get_max_degree()
+ assert max_degree_cost <= 2, f'{max_degree_cost=}'
+
+ # maximum degree of constraint must be 1
+ max_degree_constraints = max(data.get_max_degree() for data in matrix_repr.data[1:])
+ assert max_degree_constraints == 1, f'{max_degree_constraints=}'
+
+ cost = matrix_repr.data[0]
+
+ # cost[1] is a 1 x n vector
+ q = cost[1] / 2
+
+ Gl = matrix_repr.data[1:1+len(l_inequality)]
+ l = sum(G[0].shape[0] for G in Gl)
+
+ Gs = matrix_repr.data[1+len(l_inequality):]
+
+ def gen_square_matrix_dim():
+ for Gs_raw in Gs:
+ dim = np.sqrt(Gs_raw[0].shape[0])
+
+ assert math.isclose(int(dim), dim), f'{dim=}'
+
+ yield int(dim)
+
+ s = list(gen_square_matrix_dim())
+
+ G = np.vstack(tuple(-v[1] for v in matrix_repr.data[1:]))
+ h = np.vstack(tuple(v[0] for v in matrix_repr.data[1:]))
+
+ if print_info:
+ print(f'number of variables: {G.shape[1]}')
+ print(f'PSD matrix dimensions: {s}')
+
+ if print_matrix:
+ print(f'cost={q}')
+ print(f'G={G}')
+ print(f'h={h}')
+
+ if previous_result is None:
+ primalstart = None
+
+ else:
+ primalstart = {
+ 'x': cvxopt.matrix(previous_result.x),
+ 'y': cvxopt.matrix(previous_result.y),
+ 's': cvxopt.matrix(previous_result.s),
+ 'z': cvxopt.matrix(previous_result.z),
+ }
+
+ if max_degree_cost == 1:
+ return_val = cvxopt.solvers.conelp(
+ c=cvxopt.matrix(q.reshape(-1, 1)),
+ G=cvxopt.matrix(G),
+ h=cvxopt.matrix(h),
+ dims={'l': 0, 'q': [], 's': s},
+ primalstart=primalstart,
+ )
+
+ else:
+ # cost[2] is a 1 x n*n vector
+ nP = int(np.sqrt(cost[2].shape[1]))
+ P = cost[2].reshape(nP, nP).toarray()
+
+ return_val = cvxopt.solvers.coneqp(
+ P=cvxopt.matrix(P),
+ q=cvxopt.matrix(q.reshape(-1, 1)),
+ G=cvxopt.matrix(G),
+ h=cvxopt.matrix(h),
+ dims={'l': l, 'q': [], 's': s,},
+ initvals=primalstart,
+ )
+
+ x = np.array(return_val['x']).reshape(-1)
+
+ if x[0] == None:
+ x0 = {}
+ else:
+ x0 = {var: matrix_repr.get_value(var, x) for var in variables}
+
+ result = CVXOptConeQPResult(
+ x=x,
+ y=np.array(return_val['y']).reshape(-1),
+ s=np.array(return_val['s']).reshape(-1),
+ z=np.array(return_val['z']).reshape(-1),
+ status=return_val['status'],
+ gap=return_val['gap'],
+ relative_gap=return_val['relative gap'],
+ primal_objective=return_val['primal objective'],
+ dual_objective=return_val['dual objective'],
+ primal_infeasibility=return_val['primal infeasibility'],
+ dual_infeasibility=return_val['dual infeasibility'],
+ primal_slack=return_val['primal slack'],
+ dual_slack=return_val['dual slack'],
+ iterations=return_val['iterations'],
+ x0=x0
+ )
+
+ return state, result
+
+
+def solve_sos_problem(
+ cost: tuple[polymatrix.Expression],
+ sos_constraints: tuple[SOSConstraint],
+ state: polymatrix.ExpressionState,
+ free_param: tuple[ParamSOSExpr] = None,
+ x0: dict[ParamSOSExpr, np.ndarray] = None,
+):
+ if x0 is None:
+ x0 = {}
+
+ def gen_all_param_expr():
+ for sos_constraint in sos_constraints:
+ for param_expr in sos_constraint.dependence:
+ yield param_expr
+
+ all_param_expr = tuple(set(gen_all_param_expr()))
+
+ if free_param is None:
+ free_param = all_param_expr
+
+ sub_vals = tuple((param_expr.param, x0[param_expr.param]) for param_expr in all_param_expr if param_expr not in free_param)
+
+ def gen_inequality():
+ for sos_constraint in sos_constraints:
+ if any(param_expr in free_param for param_expr in sos_constraint.dependence):
+ yield sos_constraint.constraint.eval(sub_vals)
+
+ inequality = tuple(gen_inequality())
+
+ state, result = solve_cone(
+ state,
+ variables=tuple(param_expr.param for param_expr in free_param),
+ cost=sum(cost),
+ s_inequality=inequality,
+ )
+
+ return state, result
+
+
+# def solve_sos_problem(
+# cost: tuple[polymatrix.Expression],
+# sos_constraints: tuple[SOSConstraint],
+# state: polymatrix.ExpressionState,
+# subs: tuple[ParamSOSExpr] = None,
+# x0: dict[ParamSOSExpr, np.ndarray] = None,
+# ):
+# if x0 is None:
+# x0 = {}
+
+# if subs is None:
+# subs = tuple()
+
+# def gen_free_param_expr():
+# for sos_constraint in sos_constraints:
+# for param_expr in sos_constraint.dependence:
+# if param_expr not in subs:
+# yield param_expr
+
+# free_param_expr = tuple(set(gen_free_param_expr()))
+
+# sub_vals = tuple((sub.param, x0[sub.param]) for sub in subs)
+
+# def gen_inequality():
+# for sos_constraint in sos_constraints:
+# if any(param_expr in free_param_expr for param_expr in sos_constraint.dependence):
+# # print(tuple(d.name for d in sos_constraint.dependence))
+# yield sos_constraint.constraint.eval(sub_vals)
+
+# inequality = tuple(gen_inequality())
+
+# state, result = solve_cone(
+# state,
+# variables=tuple(param_expr.param for param_expr in free_param_expr),
+# cost=sum(cost),
+# s_inequality=inequality,
+# )
+
+# return state, result