diff --git a/Chapter 02/code/naive_bayes.py b/Chapter 02/code/naive_bayes.py index 3268e49..58eced1 100644 --- a/Chapter 02/code/naive_bayes.py +++ b/Chapter 02/code/naive_bayes.py @@ -1,7 +1,7 @@ import numpy as np import matplotlib.pyplot as plt from sklearn.naive_bayes import GaussianNB -from sklearn import cross_validation +from sklearn import model_selection from utilities import visualize_classifier @@ -32,7 +32,7 @@ # Cross validation # Split data into training and test data -X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.2, random_state=3) +X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size=0.2, random_state=3) classifier_new = GaussianNB() classifier_new.fit(X_train, y_train) y_test_pred = classifier_new.predict(X_test) @@ -48,19 +48,19 @@ # Scoring functions num_folds = 3 -accuracy_values = cross_validation.cross_val_score(classifier, +accuracy_values = model_selection.cross_val_score(classifier, X, y, scoring='accuracy', cv=num_folds) print("Accuracy: " + str(round(100*accuracy_values.mean(), 2)) + "%") -precision_values = cross_validation.cross_val_score(classifier, +precision_values = model_selection.cross_val_score(classifier, X, y, scoring='precision_weighted', cv=num_folds) print("Precision: " + str(round(100*precision_values.mean(), 2)) + "%") -recall_values = cross_validation.cross_val_score(classifier, +recall_values = model_selection.cross_val_score(classifier, X, y, scoring='recall_weighted', cv=num_folds) print("Recall: " + str(round(100*recall_values.mean(), 2)) + "%") -f1_values = cross_validation.cross_val_score(classifier, +f1_values = model_selection.cross_val_score(classifier, X, y, scoring='f1_weighted', cv=num_folds) print("F1: " + str(round(100*f1_values.mean(), 2)) + "%")