from sklearn base import BaseEstimator ClassifierMixin from sklearn ne

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.neighbors import KDTree
from math import fabs
from operator import itemgetter
# weights(i, u, k, **args) is functions, when shows important i-neighbor u object
class MetricClassifier(BaseEstimator, ClassifierMixin):
def __init__(self, k=1, metric='euclidean'):
self.k = k
self.weight = lambda x: (15.0 / 16.0) * (1 - x ** 2) ** 2 * int(fabs(x) <= 1)
self.metric = metric
self.collections = None
self.kdtree = None
def fit(self, data, label):
self.data = data
self.labels = set(label)
self.kdtree = KDTree(data, metric=self.metric)
self.data_label_map = dict(zip(map(tuple, data), label))
def predict(self, items):
proximity_function = lambda label, neighs, pk: sum([int(label_ == label) * self.weight(dist / pk)
for i, (label_, dist) in neighs])
get_label = lambda item: self.data_label_map[tuple(self.data[item])]
def find_k_neighbor(u):
dist, idx_obj = self.kdtree.query(u, k=self.k+1, return_distance=True)
dist = dist[0]
idx_obj = idx_obj[0]
label_obj = map(get_label, idx_obj)
return list(enumerate(zip(label_obj, dist)))
results = []
for item in items:
neighs = find_k_neighbor(item)
pn = neighs.pop()
predict = sorted([(label, proximity_function(label, neighs, pn[1][1])) for label in self.labels], key=itemgetter(1))
results.append(predict[-1][0])
return results