Skip to content
Snippets Groups Projects
Commit 5e306099 authored by asilador's avatar asilador
Browse files

Cleaning up code

parent b794f878
No related branches found
No related tags found
No related merge requests found
......@@ -7,6 +7,7 @@ c = np.asmatrix(np.loadtxt('c.txt'))
b = np.transpose(b) #make b a column vector
D = np.asmatrix(np.ones(np.size(b)))
m = 0
# Make a guess for x vector
#x = np.asmatrix(np.zeros(np.size(b)))
#x = np.transpose(x) #make column vector
......@@ -55,16 +56,16 @@ def grad_opt(epsilon,x,count,alpha):
#print('alpha0 is ', alpha)
count += 1
if count%1000==0:
print('f(x) is ', f(Q,b,c,x))
print 'f(x) is ', f(Q,b,c,x)
#print('norm of gradf(x) is ', np.linalg.norm(gradf(Q,b,x)))
x -= alpha*gradf(Q,b,x)
alpha = alpha0
print('Done')
print('x* is ', x)
print('f(x*) is ', f(Q,b,c,x))
print('epsilon is ', epsilon)
print '\nDone at ', count,'th iteration'
print 'x* is ', x
print 'f(x*) is ', f(Q,b,c,x)
print 'epsilon is ', epsilon
return 0
......@@ -74,11 +75,7 @@ def run(epsilon):
grad_opt(epsilon,xstart,countstart,alpha0)
return 0
run(0.889)
run(0.8)
#def simpgrad_opt(e)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment