Skip to content
Snippets Groups Projects
Commit a4e1f547 authored by asilador's avatar asilador
Browse files

Successful optimization

parent 77ce72ec
No related branches found
No related tags found
No related merge requests found
iimport numpy as np
import numpy as np
# Initialize iteration counter
# Import text files
Q = np.asmatrix(np.loadtxt('Q.txt'))
......@@ -10,7 +10,7 @@ m = 0
# Make a guess for x vector
x = np.asmatrix(np.zeros(np.size(b)))
x = np.transpose(x) #make column vector
alpha0 = 1
alpha0 = 10
count = 0
# Define f(x)
def f(Q,b,c,x):
......@@ -24,18 +24,19 @@ def gradf(Q,b,x):
def armijo(alpha0,Q,b,c,D,m):
alpha = alpha0
print('alpha is ', alpha)
s = 1
sigma = 10e-1
beta = 1/10
if f(Q,b,c,x+np.transpose(alpha*D)) <= f(Q,b,c,x) + sigma*alpha*(np.transpose(gradf(Q,b,x)))*np.transpose(D):
return alpha
else:
beta = 1.0/2
while f(Q,b,c,x+np.transpose(alpha*D)) > f(Q,b,c,x) + sigma*alpha*(np.transpose(gradf(Q,b,x)))*np.transpose(D):
m+=1
#print('m is ', m)
alpha = beta**m*s
alpha = armijo(alpha,Q,b,c,D,m)
#print('alpha is ', alpha)
return alpha
return alpha
def xval():
return x
......@@ -45,32 +46,29 @@ def countval():
# Begin Gradient Descent Algorithm
def grad_opt(epsilon,x,count):
alpha = armijo(alpha0,Q,b,c,D,m)
print('alpha is ', alpha)
f1 = f(Q,b,c,x)
x = x - alpha*np.transpose(gradf(Q,b,x))*(np.identity(np.size(b)))*gradf(Q,b,x)
if np.linalg.norm(gradf(Q,b,x))>= epsilon:
xnew = x
#print('alpha is ', alpha)
while np.linalg.norm(gradf(Q,b,x))>= epsilon:
count += 1
print('x is ', x)
grad_opt(epsilon,x,count)
return 0
else:
print('Done')
print('x* is ', x)
print('f(x*) is ', f(Q,b,c,x))
print('epsilon is ', epsilon)
return 0
#print('f(x) is ', f(Q,b,c,x))
print('norm of gradf(x) is ', np.linalg.norm(gradf(Q,b,x)))
xnew -= alpha*gradf(Q,b,x)
print('Done')
print('x* is ', x)
print('f(x*) is ', f(Q,b,c,x))
print('epsilon is ', epsilon)
return 0
def run(epsilon):
xstart = xval()
countstart = countval()
epsilon = 0.1
grad_opt(epsilon,xstart,countstart)
return 0
run(0.8895)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment