Skip to content
Snippets Groups Projects
Commit 8e98296b authored by rcam2's avatar rcam2
Browse files

Upload New File

parent 05813132
No related branches found
No related tags found
No related merge requests found
lab1.py 0 → 100644
import numpy as np
from scipy import optimize
import matplotlib.pyplot as plt
def f(x):
first_elm = np.dot(np.dot(x, Q), x)
second_elm = np.dot(b, x)
f_x = first_elm + second_elm + c
return f_x
global Q
global b
global c
Q = np.loadtxt("Q.txt")
b = np.loadtxt("b.txt")
c = np.loadtxt("c.txt")
print(Q)
print(b)
print(c)
print(b.size)
n = b.size
err_tol = 1e-5
alpha = 1
# recommended sigma [10^-5, 10^-1]
sigma = 1e-1
# recommended beta [1/10, 1/2]
beta = 0.1
x = np.random.uniform(-1e4, 1e4, n)
gradient = np.add(np.dot(2, np.dot(Q, x)), b)
print(x)
print(gradient)
alpha_arr = np.array([])
f_x_arr = np.array([])
x_arr = np.array([])
while np.linalg.norm(gradient) >= err_tol:
# Calculation of f(x_k)
first_elm = np.dot(np.dot(x, Q), x)
second_elm = np.dot(b, x)
'''
print(first_elm.shape)
print(second_elm.shape)
'''
f_x = first_elm + second_elm + c
alpha_arr = np.append(alpha_arr, alpha)
f_x_arr = np.append(f_x_arr, f_x)
x_arr = np.append(x_arr, x)
# Calculation of f(x_k - alpha*gradient(f(x)))
x_ = np.subtract(x, alpha*gradient)
first_elm_ = np.dot(np.dot(x_, Q), x_)
second_elm_ = np.dot(b, x_)
f_x_ = first_elm_ + second_elm_ + c
while f_x_ > (f_x - alpha*sigma*np.linalg.norm(gradient)*np.linalg.norm(gradient)):
alpha = alpha*beta
# Calculation of f(x_k - alpha*gradient(f(x)))
x_ = np.subtract(x, alpha * gradient)
first_elm_ = np.dot(np.dot(x_, Q), x_)
second_elm_ = np.dot(b, x_)
f_x_ = first_elm_ + second_elm_ + c
x = x_
gradient = np.add(np.dot(2, np.dot(Q, x)), b)
print(alpha)
print(x)
print(f_x)
plt.plot(alpha_arr)
plt.title("Alpha values over iterations")
plt.show()
plt.plot(f_x_arr)
plt.title("Function values over iterations")
plt.show()
# matrix inversion
x_min = np.dot(np.linalg.inv(Q), -b/2)
print(x_min)
print(f(x_min))
# scipy optimization
x0 = np.random.uniform(-1e5, 1e5, n)
minimum = optimize.fmin(f, x0)
print(minimum)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment