-
Notifications
You must be signed in to change notification settings - Fork 182
/
Copy pathkdtree_knn_classification.py
67 lines (53 loc) · 2.59 KB
/
kdtree_knn_classification.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# ==============================================================================
# Copyright 2014 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# daal4py KD-Tree KNN example for shared memory systems
import os
from pathlib import Path
import numpy as np
from readcsv import pd_read_csv
import daal4py as d4p
def main(readcsv=pd_read_csv):
# Input data set parameters
data_path = Path(__file__).parent / "data" / "batch"
train_file = data_path / "k_nearest_neighbors_train.csv"
predict_file = data_path / "k_nearest_neighbors_test.csv"
# Read data. Let's use 5 features per observation
nFeatures = 5
nClasses = 5
train_data = readcsv(train_file, usecols=range(nFeatures))
train_labels = readcsv(train_file, usecols=range(nFeatures, nFeatures + 1))
# Create an algorithm object and call compute
train_algo = d4p.kdtree_knn_classification_training(nClasses=nClasses)
# 'weights' is optional argument, let's use equal weights
# in this case results must be the same as without weights
weights = np.ones((train_data.shape[0], 1))
train_result = train_algo.compute(train_data, train_labels, weights)
# Now let's do some prediction
predict_data = readcsv(predict_file, range(nFeatures))
predict_labels = readcsv(predict_file, range(nFeatures, nFeatures + 1))
# Create an algorithm object and call compute
predict_algo = d4p.kdtree_knn_classification_prediction(nClasses=nClasses)
predict_result = predict_algo.compute(predict_data, train_result.model)
# We expect less than 180 mispredicted values
assert np.count_nonzero(predict_labels != predict_result.prediction) < 180
return (train_result, predict_result, predict_labels)
if __name__ == "__main__":
(train_result, predict_result, predict_labels) = main()
print("KD-tree based kNN classification results:")
print("Ground truth(observations #30-34):\n", predict_labels[30:35])
print(
"Classification results(observations #30-34):\n", predict_result.prediction[30:35]
)