-
Notifications
You must be signed in to change notification settings - Fork 0
/
K-nearest_neighbors.py
47 lines (35 loc) · 1.31 KB
/
K-nearest_neighbors.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import numpy as np
class KNN:
def __init__(self, k=3):
self.k = k
def fit(self, X, y):
self.X_train = X
self.y_train = y
def euclidean_distance(self, x1, x2):
return np.sqrt(np.sum((x1 - x2) ** 2))
def predict(self, X):
y_pred = [self._predict(x) for x in X]
return np.array(y_pred)
def _predict(self, x):
# Compute distances between x and all examples in the training set
distances = [self.euclidean_distance(x, x_train) for x_train in self.X_train]
# Sort by distance and return indices of the first k neighbors
k_indices = np.argsort(distances)[:self.k]
# Extract the labels of the k nearest neighbor training samples
k_nearest_labels = [self.y_train[i] for i in k_indices]
# Return the most common class label
most_common = np.argmax(np.bincount(k_nearest_labels))
return most_common
# Create a simple dataset
X = np.array([[1, 2], [1.5, 1.8], [5, 8], [8, 8], [1, 0.6], [9, 11]])
y = np.array([0, 0, 1, 1, 0, 1])
# Instantiate the KNN classifier
knn = KNN(k=3)
# Train the classifier
knn.fit(X, y)
# Test data
X_test = np.array([[3, 4], [5, 6], [2, 2], [4, 4]])
# Predict the labels for the test data
predictions = knn.predict(X_test)
# Print the predicted labels
print(predictions)