summaryrefslogtreecommitdiffstats
path: root/sumofsquares/cvxopt.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--sumofsquares/cvxopt.py233
1 files changed, 201 insertions, 32 deletions
diff --git a/sumofsquares/cvxopt.py b/sumofsquares/cvxopt.py
index ed2146d..cc7d555 100644
--- a/sumofsquares/cvxopt.py
+++ b/sumofsquares/cvxopt.py
@@ -28,7 +28,17 @@ class CVXOptConeQPResult:
-def solve_cone(state, variables, cost, s_inequality, l_inequality = tuple(), previous_result=None, print_info=False, print_matrix=False):
+def solve_cone(
+ state,
+ variables,
+ cost,
+ s_inequality,
+ # q_inequality = tuple(),
+ l_inequality = tuple(),
+ previous_result=None,
+ print_info=False,
+ print_matrix=False,
+ ):
state, matrix_repr = polymatrix.to_matrix_repr(
(cost,) + l_inequality + s_inequality,
polymatrix.v_stack(variables),
@@ -135,6 +145,164 @@ def solve_cone(state, variables, cost, s_inequality, l_inequality = tuple(), pre
return state, result
+def solve_cone2(
+ state,
+ variables,
+ cost,
+ s_inequality = tuple(),
+ q_inequality = tuple(),
+ l_inequality = tuple(),
+ previous_result=None,
+ print_info=False,
+ print_matrix=False,
+ ):
+
+ try:
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ (cost,) + l_inequality + q_inequality + s_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+ except:
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ (cost,),
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ if len(l_inequality):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ l_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ if len(q_inequality):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ q_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ if len(s_inequality):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ s_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+
+
+ # maximum degree of cost function must be 2
+ max_degree_cost = matrix_repr.data[0].get_max_degree()
+ assert max_degree_cost <= 2, f'{max_degree_cost=}'
+
+ # maximum degree of constraint must be 1
+ max_degree_constraints = max(data.get_max_degree() for data in matrix_repr.data[1:])
+ assert max_degree_constraints == 1, f'{max_degree_constraints=}'
+
+ cost = matrix_repr.data[0]
+
+ # cost[1] is a 1 x n vector
+ q = cost[1] / 2
+
+ Gl = matrix_repr.data[1:1+len(l_inequality)]
+ dim_l = sum(G[0].shape[0] for G in Gl)
+
+ Gq = matrix_repr.data[1+len(l_inequality):1+len(l_inequality)+len(q_inequality)]
+ dim_q = list(G[0].shape[0] for G in Gq)
+
+ Gs = matrix_repr.data[1+len(l_inequality)+len(q_inequality):]
+
+ def gen_square_matrix_dim():
+ for Gs_raw in Gs:
+ dim = np.sqrt(Gs_raw[0].shape[0])
+
+ assert math.isclose(int(dim), dim), f'{dim=}'
+
+ yield int(dim)
+
+ dim_s = list(gen_square_matrix_dim())
+
+ G = np.vstack(tuple(-v[1] for v in matrix_repr.data[1:]))
+ h = np.vstack(tuple(v[0] for v in matrix_repr.data[1:]))
+
+ if print_info:
+ print(f'number of variables: {G.shape[1]}')
+ print(f'{dim_l=}, {dim_q=}, {dim_s=}')
+
+ if print_matrix:
+ print(f'{q=}')
+ print(f'{G=}')
+ print(f'{h=}')
+
+ if previous_result is None:
+ primalstart = None
+
+ else:
+ primalstart = {
+ 'x': cvxopt.matrix(previous_result.x),
+ 'y': cvxopt.matrix(previous_result.y),
+ 's': cvxopt.matrix(previous_result.s),
+ 'z': cvxopt.matrix(previous_result.z),
+ }
+
+ if max_degree_cost == 1:
+ return_val = cvxopt.solvers.conelp(
+ c=cvxopt.matrix(q.reshape(-1, 1)),
+ G=cvxopt.matrix(G),
+ h=cvxopt.matrix(h),
+ dims={'l': dim_l, 'q': dim_q, 's': dim_s},
+ primalstart=primalstart,
+ )
+
+ else:
+ # cost[2] is a 1 x n*n vector
+ nP = int(np.sqrt(cost[2].shape[1]))
+ P = cost[2].reshape(nP, nP).toarray()
+
+ if print_matrix:
+ print(f'{P=}')
+
+ if print_matrix:
+ print(f'{dim_l=}, {dim_q=}, {dim_s=}')
+ print(f'{P=}')
+ print(f'{q=}')
+ print(f'{G=}')
+ print(f'{h=}')
+
+ return_val = cvxopt.solvers.coneqp(
+ P=cvxopt.matrix(P),
+ q=cvxopt.matrix(q.reshape(-1, 1)),
+ G=cvxopt.matrix(G),
+ h=cvxopt.matrix(h),
+ dims={'l': dim_l, 'q': dim_q, 's': dim_s,},
+ initvals=primalstart,
+ )
+
+ x = np.array(return_val['x']).reshape(-1)
+
+ # if x[0] == None:
+ # x0 = {}
+ # else:
+ # x0 = {var.name: matrix_repr.get_value(var, x) for var in variables}
+
+ # result = CVXOptConeQPResult(
+ # x=x,
+ # y=np.array(return_val['y']).reshape(-1),
+ # s=np.array(return_val['s']).reshape(-1),
+ # z=np.array(return_val['z']).reshape(-1),
+ # status=return_val['status'],
+ # gap=return_val['gap'],
+ # relative_gap=return_val['relative gap'],
+ # primal_objective=return_val['primal objective'],
+ # dual_objective=return_val['dual objective'],
+ # primal_infeasibility=return_val['primal infeasibility'],
+ # dual_infeasibility=return_val['dual infeasibility'],
+ # primal_slack=return_val['primal slack'],
+ # dual_slack=return_val['dual slack'],
+ # iterations=return_val['iterations'],
+ # x0=x0
+ # )
+
+ # return state, result
+ return matrix_repr.get_value(variables[0], x), return_val['status']
+
def solve_sos_problem(
cost: tuple[polymatrix.Expression],
@@ -175,42 +343,43 @@ def solve_sos_problem(
return state, result
-# def solve_sos_problem(
-# cost: tuple[polymatrix.Expression],
-# sos_constraints: tuple[SOSConstraint],
-# state: polymatrix.ExpressionState,
-# subs: tuple[ParamSOSExpr] = None,
-# x0: dict[ParamSOSExpr, np.ndarray] = None,
-# ):
-# if x0 is None:
-# x0 = {}
+def solve_sos_problem2(
+ cost: tuple[polymatrix.Expression],
+ sos_constraints: tuple[SOSConstraint],
+ state: polymatrix.ExpressionState,
+ subs: tuple[ParamSOSExpr] = None,
+ x0: dict[ParamSOSExpr, np.ndarray] = None,
+):
+ if x0 is None:
+ x0 = {}
-# if subs is None:
-# subs = tuple()
+ if subs is None:
+ subs = tuple()
-# def gen_free_param_expr():
-# for sos_constraint in sos_constraints:
-# for param_expr in sos_constraint.dependence:
-# if param_expr not in subs:
-# yield param_expr
+ def gen_free_param_expr():
+ for sos_constraint in sos_constraints:
+ for param_expr in sos_constraint.dependence:
+ if param_expr not in subs:
+ yield param_expr
-# free_param_expr = tuple(set(gen_free_param_expr()))
+ free_param_expr = tuple(set(gen_free_param_expr()))
+
+ # print(f'{tuple(val.param.name for val in free_param_expr)=}')
-# sub_vals = tuple((sub.param, x0[sub.param]) for sub in subs)
+ sub_vals = tuple((sub.param, x0[sub.param.name]) for sub in subs if sub is not None and sub.param.name in x0)
-# def gen_inequality():
-# for sos_constraint in sos_constraints:
-# if any(param_expr in free_param_expr for param_expr in sos_constraint.dependence):
-# # print(tuple(d.name for d in sos_constraint.dependence))
-# yield sos_constraint.constraint.eval(sub_vals)
+ def gen_inequality():
+ for sos_constraint in sos_constraints:
+ if any(param_expr in free_param_expr for param_expr in sos_constraint.dependence):
+ yield sos_constraint.constraint.eval(sub_vals)
-# inequality = tuple(gen_inequality())
+ inequality = tuple(gen_inequality())
-# state, result = solve_cone(
-# state,
-# variables=tuple(param_expr.param for param_expr in free_param_expr),
-# cost=sum(cost),
-# s_inequality=inequality,
-# )
+ state, result = solve_cone(
+ state,
+ variables=tuple(param_expr.param for param_expr in free_param_expr),
+ cost=sum(cost),
+ s_inequality=inequality,
+ )
-# return state, result
+ return state, result