-
Notifications
You must be signed in to change notification settings - Fork 20
/
dual_example.py
40 lines (30 loc) · 958 Bytes
/
dual_example.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import diffcp
import numpy as np
import utils
np.set_printoptions(precision=5, suppress=True)
# We generate a random cone program with a cone
# defined as a product of a 3-d fixed cone, 3-d positive orthant cone,
# and a 5-d second order cone.
K = {
'f': 3,
'l': 3,
'q': [5]
}
m = 3 + 3 + 5
n = 5
np.random.seed(0)
A, b, c = utils.random_cone_prog(m, n, K)
# We solve the cone program and get the derivative and its adjoint
x, y, s, derivative, adjoint_derivative = diffcp.solve_and_derivative(
A, b, c, K, eps=1e-10)
print("x =", x)
print("y =", y)
print("s =", s)
# We evaluate the gradient of the objective with respect to A, b and c.
dA, db, dc = adjoint_derivative(c, np.zeros(
m), np.zeros(m), atol=1e-10, btol=1e-10)
# The gradient of the objective with respect to b should be
# equal to minus the dual variable y (see, e.g., page 268 of Convex Optimization by
# Boyd & Vandenberghe).
print("db =", db)
print("-y =", -y)