Skip to content
Snippets Groups Projects
Commit 4dcecf0f authored by Jaralus's avatar Jaralus
Browse files

update Part_A.py and Part_B.py

parent a83d569e
No related branches found
No related tags found
No related merge requests found
File added
# Import libraries and modules
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from skopt import BayesSearchCV
from skopt.space import Real, Categorical, Integer
from sklearn.model_selection import cross_val_score
from sklearn.utils import shuffle
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, f1_score
from sklearn.metrics import accuracy_score, f1_score, confusion_matrix, ConfusionMatrixDisplay
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier
from sklearn.svm import SVC
......@@ -56,7 +57,7 @@ def main():
# Train the classifiers
classifiers = [
("Logistical Regression" , LogisticRegression(max_iter = 10000, solver = "newton-cg", C = 9.416).fit(train_features_scaled, train_labels)),
("Logistic Regression" , LogisticRegression(max_iter = 10000, solver = "newton-cg", C = 9.416).fit(train_features_scaled, train_labels)),
("Multi-layer Perceptron" , MLPClassifier(max_iter = 10000, solver = "adam", activation = "tanh", learning_rate = "constant").fit(train_features_scaled, train_labels)),
("C-Support Vector" , SVC(C = 7.989999999999979, kernel = "linear").fit(train_features_scaled, train_labels)),
......@@ -92,9 +93,14 @@ def main():
best_classifier_name = classifier_name
best_classifier = classifier
best_classifier_average = cv_average
best_classifier_predictions = train_predicted_labels
print(f"The best classifier is {best_classifier_name}.\n")
# Draw confusion matrix for the best classifier
ConfusionMatrixDisplay(confusion_matrix = confusion_matrix(train_labels, best_classifier_predictions)).plot()
plt.show()
# Read the training data
test_features = pd.read_csv("TestingDataBinary.csv", header = None)
......
# Import libraries and modules
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from skopt import BayesSearchCV
from skopt.space import Real, Categorical, Integer
from sklearn.model_selection import cross_val_score
from sklearn.utils import shuffle
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, f1_score
from sklearn.metrics import accuracy_score, f1_score, confusion_matrix, ConfusionMatrixDisplay
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier
from sklearn.svm import SVC
......@@ -56,7 +57,7 @@ def main():
# Train the classifiers
classifiers = [
("Logistical Regression" , LogisticRegression(max_iter = 10000, solver = "newton-cg", C = 9.088000000000001).fit(train_features_scaled, train_labels)),
("Logistic Regression" , LogisticRegression(max_iter = 10000, solver = "newton-cg", C = 9.088000000000001).fit(train_features_scaled, train_labels)),
("Multi-layer Perceptron" , MLPClassifier(max_iter = 10000, solver = "adam", activation = "tanh", learning_rate = "adaptive").fit(train_features_scaled, train_labels)),
("C-Support Vector" , SVC(C = 9.59, kernel = "linear").fit(train_features_scaled, train_labels))
]
......@@ -87,15 +88,14 @@ def main():
best_classifier_name = classifier_name
best_classifier = classifier
best_classifier_average = cv_average
# Update the best classifier if the current classifier has a better average score
if (best_classifier_average < cv_average):
best_classifier_name = classifier_name
best_classifier = classifier
best_classifier_average = cv_average
best_classifier_predictions = train_predicted_labels
print(f"The best classifier is {best_classifier_name}.\n")
# Draw confusion matrix for the best classifier
ConfusionMatrixDisplay(confusion_matrix = confusion_matrix(train_labels, best_classifier_predictions)).plot()
plt.show()
# Read the training data
test_features = pd.read_csv("TestingDataMulti.csv", header = None)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment