summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMichael Schneeberger <michael.schneeberger@fhnw.ch>2023-12-13 10:02:36 +0100
committerMichael Schneeberger <michael.schneeberger@fhnw.ch>2023-12-13 10:02:36 +0100
commita5f4e44293c017b80688090aab92fcf9c7f584da (patch)
tree9ce8676a23970d3117cd73aa0b3fd685ee593af3
parentadd putinar n-satz (diff)
downloadsumofsquares-a5f4e44293c017b80688090aab92fcf9c7f584da.tar.gz
sumofsquares-a5f4e44293c017b80688090aab92fcf9c7f584da.zip
include q_inequality to solve_cone
-rw-r--r--sumofsquares/cvxopt.py87
-rw-r--r--sumofsquares/init/initputinarepsilon.py1
-rw-r--r--sumofsquares/mixins/sosexpropmixin.py10
3 files changed, 73 insertions, 25 deletions
diff --git a/sumofsquares/cvxopt.py b/sumofsquares/cvxopt.py
index cc7d555..a703104 100644
--- a/sumofsquares/cvxopt.py
+++ b/sumofsquares/cvxopt.py
@@ -32,17 +32,43 @@ def solve_cone(
state,
variables,
cost,
- s_inequality,
- # q_inequality = tuple(),
+ s_inequality = tuple(),
+ q_inequality = tuple(),
l_inequality = tuple(),
- previous_result=None,
+ # previous_result=None,
print_info=False,
print_matrix=False,
):
- state, matrix_repr = polymatrix.to_matrix_repr(
- (cost,) + l_inequality + s_inequality,
- polymatrix.v_stack(variables),
- ).apply(state)
+
+ try:
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ (cost,) + l_inequality + q_inequality + s_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+ except:
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ (cost,),
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ if len(l_inequality):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ l_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ if len(q_inequality):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ q_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
+ if len(s_inequality):
+ state, matrix_repr = polymatrix.to_matrix_repr(
+ s_inequality,
+ polymatrix.v_stack(variables),
+ ).apply(state)
+
# maximum degree of cost function must be 2
max_degree_cost = matrix_repr.data[0].get_max_degree()
@@ -58,9 +84,12 @@ def solve_cone(
q = cost[1] / 2
Gl = matrix_repr.data[1:1+len(l_inequality)]
- l = sum(G[0].shape[0] for G in Gl)
+ dim_l = sum(G[0].shape[0] for G in Gl)
+
+ Gq = matrix_repr.data[1+len(l_inequality):1+len(l_inequality)+len(q_inequality)]
+ dim_q = list(G[0].shape[0] for G in Gq)
- Gs = matrix_repr.data[1+len(l_inequality):]
+ Gs = matrix_repr.data[1+len(l_inequality)+len(q_inequality):]
def gen_square_matrix_dim():
for Gs_raw in Gs:
@@ -70,52 +99,62 @@ def solve_cone(
yield int(dim)
- s = list(gen_square_matrix_dim())
+ dim_s = list(gen_square_matrix_dim())
G = np.vstack(tuple(-v[1] for v in matrix_repr.data[1:]))
h = np.vstack(tuple(v[0] for v in matrix_repr.data[1:]))
if print_info:
print(f'number of variables: {G.shape[1]}')
- print(f'PSD matrix dimensions: {s}')
+ print(f'{dim_l=}, {dim_q=}, {dim_s=}')
if print_matrix:
print(f'cost={q}')
print(f'G={G}')
print(f'h={h}')
- if previous_result is None:
- primalstart = None
+ # if previous_result is None:
+ # primalstart = None
- else:
- primalstart = {
- 'x': cvxopt.matrix(previous_result.x),
- 'y': cvxopt.matrix(previous_result.y),
- 's': cvxopt.matrix(previous_result.s),
- 'z': cvxopt.matrix(previous_result.z),
- }
+ # else:
+ # primalstart = {
+ # 'x': cvxopt.matrix(previous_result.x),
+ # 'y': cvxopt.matrix(previous_result.y),
+ # 's': cvxopt.matrix(previous_result.s),
+ # 'z': cvxopt.matrix(previous_result.z),
+ # }
if max_degree_cost == 1:
return_val = cvxopt.solvers.conelp(
c=cvxopt.matrix(q.reshape(-1, 1)),
G=cvxopt.matrix(G),
h=cvxopt.matrix(h),
- dims={'l': 0, 'q': [], 's': s},
- primalstart=primalstart,
+ dims={'l': dim_l, 'q': dim_q, 's': dim_s},
+ # primalstart=primalstart,
)
else:
# cost[2] is a 1 x n*n vector
nP = int(np.sqrt(cost[2].shape[1]))
P = cost[2].reshape(nP, nP).toarray()
+
+ if print_matrix:
+ print(f'{P=}')
+
+ if print_matrix:
+ print(f'{dim_l=}, {dim_q=}, {dim_s=}')
+ print(f'{P=}')
+ print(f'{q=}')
+ print(f'{G=}')
+ print(f'{h=}')
return_val = cvxopt.solvers.coneqp(
P=cvxopt.matrix(P),
q=cvxopt.matrix(q.reshape(-1, 1)),
G=cvxopt.matrix(G),
h=cvxopt.matrix(h),
- dims={'l': l, 'q': [], 's': s,},
- initvals=primalstart,
+ dims={'l': dim_l, 'q': dim_q, 's': dim_s},
+ # initvals=primalstart,
)
x = np.array(return_val['x']).reshape(-1)
diff --git a/sumofsquares/init/initputinarepsilon.py b/sumofsquares/init/initputinarepsilon.py
index d3e348d..6a4ccb2 100644
--- a/sumofsquares/init/initputinarepsilon.py
+++ b/sumofsquares/init/initputinarepsilon.py
@@ -23,7 +23,6 @@ def init_putinar_epsilon(
yield key, init_param_expr_from_reference(
name=f'gamma_{key}_{name}',
reference=f0,
- # reference=condition,
multiplicand=val,
)
diff --git a/sumofsquares/mixins/sosexpropmixin.py b/sumofsquares/mixins/sosexpropmixin.py
index f7cf0c2..9d6dacd 100644
--- a/sumofsquares/mixins/sosexpropmixin.py
+++ b/sumofsquares/mixins/sosexpropmixin.py
@@ -102,6 +102,16 @@ class SOSExprOPMixin(SOSExprMixin):
def __sub__(self, other: typing.Union[polymatrix.Expression, 'SOSExprOPMixin']) -> 'SOSExprOPMixin':
return self._binary(Expression.__sub__, self, other)
+ def __getitem__(self, key: tuple[int, int]):
+ return dataclasses.replace(
+ self,
+ underlying=init_sos_expr_base(
+ expr=self.expr[key[0], key[1]],
+ variables=self.variables,
+ dependence=self.dependence,
+ ),
+ )
+
def cache(self) -> 'SOSExprOPMixin':
return self._unary(Expression.cache, self)