diff --git a/Assignment 1/assignment1.py b/Assignment 1/assignment1.py index 7938956970908526ef8687a03dc700e8b9da7891..5c132534b2bbe77e2dccee56ca60dbb6b584c109 100644 --- a/Assignment 1/assignment1.py +++ b/Assignment 1/assignment1.py @@ -7,6 +7,7 @@ c = np.asmatrix(np.loadtxt('c.txt')) b = np.transpose(b) #make b a column vector D = np.asmatrix(np.ones(np.size(b))) m = 0 + # Make a guess for x vector #x = np.asmatrix(np.zeros(np.size(b))) #x = np.transpose(x) #make column vector @@ -55,16 +56,16 @@ def grad_opt(epsilon,x,count,alpha): #print('alpha0 is ', alpha) count += 1 if count%1000==0: - print('f(x) is ', f(Q,b,c,x)) + print 'f(x) is ', f(Q,b,c,x) #print('norm of gradf(x) is ', np.linalg.norm(gradf(Q,b,x))) x -= alpha*gradf(Q,b,x) alpha = alpha0 - print('Done') - print('x* is ', x) - print('f(x*) is ', f(Q,b,c,x)) - print('epsilon is ', epsilon) + print '\nDone at ', count,'th iteration' + print 'x* is ', x + print 'f(x*) is ', f(Q,b,c,x) + print 'epsilon is ', epsilon return 0 @@ -74,11 +75,7 @@ def run(epsilon): grad_opt(epsilon,xstart,countstart,alpha0) return 0 -run(0.889) - +run(0.8) - - -#def simpgrad_opt(e)