I am using the machine learning algorithm kNN and instead of dividing the dataset into 66,6% for training and 33,4% for tests I need to use cross-validation with the following parameters: K=3, 1/euclidean.
K=3 has no mystery, I simply add to the code:
Classifier = KNeighborsClassifier(n_neighbors=3, p=2, metric='euclidean')
and it's solved. What I can't understand is the 1/euclidean, and how I could apply that to the code?
import pandas as pd
import time
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import cross_val_score
from sklearn import metrics
def openfile():
df = pd.read_csv('Testfile - kNN.csv')
return df
def main():
start_time = time.time()
dataset = openfile()
X = dataset.drop(columns=['Label'])
y = dataset['Label'].values
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
Classifier = KNeighborsClassifier(n_neighbors=3, p=2, metric='euclidean')
Classifier.fit(X_train, y_train)
y_pred_class = Classifier.predict(X_test)
score = cross_val_score(Classifier, X, y, cv=10)
y_pred_prob = Classifier.predict_proba(X_test)[:, 1]
print("accuracy_score:", metrics.accuracy_score(y_test, y_pred_class),'\n')
print("confusion matrix")
print(metrics.confusion_matrix(y_test, y_pred_class),'\n')
print("Background precision score:", metrics.precision_score(y_test, y_pred_class, labels=['background'], average='micro')*100,"%")
print("Botnet precision score:", metrics.precision_score(y_test, y_pred_class, labels=['bot'], average='micro')*100,"%")
print("Normal precision score:", metrics.precision_score(y_test, y_pred_class, labels=['normal'], average='micro')*100,"%",'\n')
print(metrics.classification_report(y_test, y_pred_class, digits=2),'\n')
print(score,'\n')
print(score.mean(),'\n')
print("--- %s seconds ---" % (time.time() - start_time))
You can create your own function and pass it as a callable to metric
param.
Create your function something like below:
from scipy.spatial import distance
def inverse_euc(a,b):
return 1/distance.euclidean(a, b)
Now use it as callable
in your KNN
function:
Classifier = KNeighborsClassifier(algorithm='ball_tree',n_neighbors=3, p=2, metric=inverse_euc)