-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgradientDescent_regLinearRegression.py
101 lines (87 loc) · 2.92 KB
/
gradientDescent_regLinearRegression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import statistics as stat;
from numpy.linalg import inv
from scipy import misc
""" GET THE Left 100x100 Green intensity Pixels and stack them int the vector """
def get_feature_vector():
x = np.zeros((10,100*100));
width = 100; height=100;
for i in range(N): #FOR ALL IMAGES
im = Image.open("MontBlanc{}.png".format(i+1));
rgb_im = im.convert('RGB'); # Converts the image into RGB
count = 0;
for row in range(width):
for col in range(height):
r,g,b = rgb_im.getpixel((row,col));
x[i,count] = g/(255);
count +=1;
""" standardized the values"""
_mean = np.mean(x);
_std_dev = np.std(x);
x = (x - _mean)/_std_dev;
return x;
""" Gradient Descent Algorithm"""
"""
Equation to find is:
Gradw = 2/N * [xTxw * x - xTy];
which will give Gradw with dimensions: 1x10000
w = 10000x1;
X = 10x10000;
y = 10x1"""
""" gradient_descent(feature_vector,trainng_data,error_margin)"""
def gradient_descent(x,y,a,error_margin):
ER = [];
a = a;
N = 10;
w = np.zeros((10**4,1));
xT = np.transpose(x);
xTx = np.dot(xT,x);
xTy = np.dot(xT,y);
epsilon = error_margin;
e = 9999999999;
_iter = 0;
#error_margin = 10*3;
_lambda = error_margin;
for i in range(4000):
""" Calculate Gradw """
Gradw = (2/10)*(np.dot(xTx,w) - xTy) + 2*_lambda*w;
w = w - a*Gradw;
""" CALCULATE EMPIRICAL RISK = 1/N [y(i) - w^T x(i)]^2"""
xw_y = np.dot(x,w) - y;
e = (1/N)*np.dot(np.transpose(xw_y),xw_y);
#print (e[0,0]);
ER.append(e[0,0]);
return ER;
if __name__ == '__main__':
""" Original Vector Dimensions
x = [10x10^4];
w = [10000x1];
y = [10x1];
"""
ER = [];
y = np.array([[211],[271],[121],[31],[341],[401],[241],[181],[301],[301]]);
N = 10; # sample size
x = get_feature_vector();
k = 10; # ITERATIONS...
a = [10**-4,7*10**-5,5*10**-5];
_lam0 = 2;
_lam1 = 5;
ER1 = gradient_descent(x,y,a[0],_lam0);
ER2 = gradient_descent(x,y,a[1],_lam0);
ER3 = gradient_descent(x,y,a[2],_lam0);
ER4 = gradient_descent(x,y,a[0],_lam1);
ER5 = gradient_descent(x,y,a[1],_lam1);
ER6 = gradient_descent(x,y,a[2],_lam1);
x_range = list(range(4000));
plt.xlabel('Iterations', fontsize=14)
plt.ylabel('Empirical Risk', fontsize=14);
plt.plot(x_range,ER1,label='lambda = 2, sigma=10^-4');
plt.plot(x_range,ER2,label='lambda = 2, sigma=7x10^-5');
plt.plot(x_range,ER3,label='lambda = 2, sigma=5x10^-5');
plt.plot(x_range,ER4,label='lambda = 5, sigma=10^-4');
plt.plot(x_range,ER5,label='lambda = 5, sigma=7x10^-5');
plt.plot(x_range,ER6,label='lambda = 5, sigma=5x10^-5');
plt.legend(loc = 'best');
plt.show();