Skip to content

Commit

Permalink
few fixes to pass tests
Browse files Browse the repository at this point in the history
  • Loading branch information
riceric22 committed May 3, 2017
1 parent f7383d7 commit a59c1d8
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 36 deletions.
41 changes: 8 additions & 33 deletions newton_admm/newton_admm.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,33 +165,11 @@ def _setup_benchmark():

def error_from_x(x, benchmark, m):
""" From x, return beta """
# prob, beta, betastar = benchmark
# scs_output = {
# "info" : {
# 'status': 'Solved',
# 'statusVal': 1,
# 'resPri': 1,
# 'resInfeas': 1,
# 'solveTime': 1,
# 'relGap': 1,
# 'iter': 1,
# 'dobj': 1,
# 'pobj': 1,
# 'setupTime': 1,
# 'resUnbdd': 1,
# 'resDual': 1},
# "y" : np.zeros(m),
# "x" : x,
# "s" : np.zeros(m)
# }
# prob.unpack_results(cp.SCS, scs_output)
beta_from_x, betastar = benchmark
return np.linalg.norm(beta_from_x(x) - betastar)
# if isinstance(beta, cp.Variable):
# return np.linalg.norm(beta.value - betastar)
# else:
# return np.sum(np.linalg.norm(b1.value-b2)
# for b1, b2 in zip(beta, betastar))
if isinstance(betastar, (tuple, list)):
return sum(np.linalg.norm(a-b) for a,b in zip(beta_from_x(x), betastar))
else:
return np.linalg.norm(beta_from_x(x) - betastar)

def update_benchmark(b, u, v, c, t, zerotol, benchmark, m):
n = len(c)
Expand All @@ -215,7 +193,7 @@ def J_matvec(x, n, m, k, d, ridge, IplusQ, J_cones, cone_lengths):
for K_op, K_len in zip(J_cones, cone_lengths):
out[k+n+i:k+n+i+K_len] = -K_op(x[n+i:n+i+K_len]) + (1+ridge)*x[k+n+i:k+n+i+K_len] + K_op(x[2*k+n+i:2*k+n+i+K_len])
i += K_len

assert(m==i)
out[k+n+m] = -d*x[n+m] + (1+ridge)*x[k+n+m] + d*x[2*k+n+m]
out[2*k:] = x[:k] - x[k:-k]
return out
Expand Down Expand Up @@ -257,7 +235,7 @@ def newton_admm(data, cone, maxiters=100, admm_maxiters=1,

# save benchmarks in a dictionary
b = _setup_benchmark()
extra_iters = 1
extra_iters = 0

for iters in range(admm_maxiters):
if benchmark is not None:
Expand All @@ -267,8 +245,6 @@ def newton_admm(data, cone, maxiters=100, admm_maxiters=1,
utilde = solve_IplusQ(u + v)
u = _proj_onto_C(utilde - v, cone, n)
v = v - utilde + u
# print("#0", utilde, u)
# assert False

if benchmark is not None:
update_benchmark(b, u, v, c, time() - start_time, zerotol, benchmark, m)
Expand Down Expand Up @@ -313,7 +289,7 @@ def newton_admm(data, cone, maxiters=100, admm_maxiters=1,
d = np.array(utilde[-1] - v[-1] >=
0.0).astype(np.float64).reshape(1, 1)
J_cones, cone_lengths = _J_onto_Kstar((utilde - v)[n:-1], cone)
# print("#1", u)

J_lo = sla.LinearOperator((3 * k, 3 * k), matvec=
lambda x: J_matvec(x, n, m, k, d, ridge, IplusQ, J_cones, cone_lengths))
# approximately solve then newton step
Expand All @@ -322,8 +298,7 @@ def newton_admm(data, cone, maxiters=100, admm_maxiters=1,
M=M_lo,
tol=1e-12,
maxiter=gmres_maxiters(iters) + extra_iters)
# print("#2")
# assert False

dutilde, du, dv = dall[0:k], dall[k:2 * k], dall[2 * k:]

# backtracking line search
Expand Down
41 changes: 38 additions & 3 deletions newton_admm/problems.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,34 @@ def portfolio_opt(p):
})


def cvxpy_beta_from_x(prob, beta, m):
def beta_from_x(x):
""" From x, return beta """
scs_output = {
"info" : {
'status': 'Solved',
'statusVal': 1,
'resPri': 1,
'resInfeas': 1,
'solveTime': 1,
'relGap': 1,
'iter': 1,
'dobj': 1,
'pobj': 1,
'setupTime': 1,
'resUnbdd': 1,
'resDual': 1},
"y" : np.zeros(m),
"x" : x,
"s" : np.zeros(m)
}
prob.unpack_results(cp.SCS, scs_output)
if isinstance(beta, cp.Variable):
return beta.value
else:
return tuple(b.value for b in beta)
return beta_from_x

def logistic_regression(N, p, suppfrac):
""" Create a logistic regression problem with N examples, p dimensions,
and at most suppfrac of the optimal solution to be non-zero. """
Expand Down Expand Up @@ -129,8 +157,11 @@ def logistic_regression(N, p, suppfrac):
betahat = cp.Variable(p)
logloss = sum(cp.log_sum_exp(
cp.hstack(0, y[i] * X[i, :] * betahat)) for i in range(N))
return (betahat,
cp.Problem(cp.Minimize(logloss + lam * cp.norm(betahat, 1))))
prob = cp.Problem(cp.Minimize(logloss + lam * cp.norm(betahat, 1)))

data = prob.get_problem_data(cp.SCS)
data['beta_from_x'] = cvxpy_beta_from_x(prob, betahat, data['A'].shape[0])
return (betahat, prob, data)


def robust_pca(p, suppfrac):
Expand Down Expand Up @@ -162,5 +193,9 @@ def robust_pca(p, suppfrac):

Lhat = cp.Variable(p, p)
Mhat = cp.Variable(p, p)
return cp.Problem(cp.Minimize(cp.norm(Lhat, "nuc") + cp.sum_squares(Lhat)),
prob = cp.Problem(cp.Minimize(cp.norm(Lhat, "nuc") + cp.sum_squares(Lhat)),
[cp.norm(Mhat, 1) <= lam, Lhat + Mhat == X])

data = prob.get_problem_data(cp.SCS)
data['beta_from_x'] = cvxpy_beta_from_x(prob, (Lhat, Mhat), data['A'].shape[0])
return ((Lhat, Mhat), prob, data)

0 comments on commit a59c1d8

Please sign in to comment.