-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathml_hw2.py
98 lines (74 loc) · 2.32 KB
/
ml_hw2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# -*- coding: utf-8 -*-
"""ML-HW2.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1Xs6z2FGDz3glvuD0XBmqLboFPEBx6bO_
Anshika Gupta [ag8800]
Q1. Perceptron with stochastic gradient descent
"""
import scipy.io
import numpy as np
import matplotlib.pyplot as plt
Dataset_MATLAB = scipy.io.loadmat('data3.mat')
Dataset_MATLAB["data"]
X = Dataset_MATLAB["data"][:,:2]
Y= Dataset_MATLAB["data"][:,-1:]
X
Y
#classifying y as per given constraints
def y_func(y):
y[y<0]= -1
y[y>=0]= 1
return y
# y_func(Y)
# Y
def get_risk(y, y_pred, N):
risk = np.sum(y_func(np.multiply(y_pred, -y)))
avg_risk = float(risk/N)
return (avg_risk)
def get_error(y, y_pred, N):
error = np.count_nonzero(y_func(y_pred)!=y)
avg_error = float(error/N)
return avg_error
def perceptron(x, y):
num_sample, num_feature = x.shape
lr = 1
weights = np.random.random_sample((num_feature, 1))
error_list = []
risk_list = []
iters = 0
while iters < max_iters:
i = np.random.randint(num_sample)
output = x[i].reshape((1,3)).dot(weights)
if y_func(np.multiply(y[i],output))[0]==-1:
weights += lr * x[i].reshape((3, 1)).dot(y[i].reshape((1,1)))
y_pred = x.dot(weights)
error_list.append(get_error(y, y_pred, num_sample))
risk_list.append(get_risk(y, y_pred, num_sample))
iters += 1
if error_list[-1]==0:
break
return weights, iters, error_list, risk_list
X_new = np.concatenate((X, np.ones((X.shape[0],1))), axis=1)
max_iters = 1000
weights, iters, error_list, risk_list= perceptron(X_new, Y)
num_sample = X_new.shape[0]
for i in range(num_sample):
plt.plot(X_new[i,0], X_new[i,1], 'r.')
max_x = max(X_new[:,0])
min_x = min(X_new[:,0])
y_max_x = float(-weights[2] - weights[0] * max_x) / weights[1]
y_min_x = float(-weights[2] - weights[0] * min_x) / weights[1]
plt.plot([min_x, max_x], [y_min_x, y_max_x], '-b')
plt.xlabel('x1')
plt.ylabel('x2')
plt.title("Decision Boundary")
plt.show()
#plotting the error and risk graph
plt.plot(range(iters), error_list,'r-',label='Error')
plt.plot(range(iters), risk_list, 'b-',label='Risk')
plt.xlabel('Number of iterations')
plt.legend()
plt.show()
iters
"""Hence, the given function takes the above number of iterations to converge."""