Skip to content
Snippets Groups Projects
Commit a83d569e authored by Jaralus's avatar Jaralus
Browse files

update Part_A.py and Part_B.py

parent 33072e24
No related branches found
No related tags found
No related merge requests found
...@@ -60,7 +60,7 @@ def main(): ...@@ -60,7 +60,7 @@ def main():
("Multi-layer Perceptron" , MLPClassifier(max_iter = 10000, solver = "adam", activation = "tanh", learning_rate = "constant").fit(train_features_scaled, train_labels)), ("Multi-layer Perceptron" , MLPClassifier(max_iter = 10000, solver = "adam", activation = "tanh", learning_rate = "constant").fit(train_features_scaled, train_labels)),
("C-Support Vector" , SVC(C = 7.989999999999979, kernel = "linear").fit(train_features_scaled, train_labels)), ("C-Support Vector" , SVC(C = 7.989999999999979, kernel = "linear").fit(train_features_scaled, train_labels)),
# These algorithms were optimised and trained but were found to be overfitting # These algorithms were optimised and can be trained, but were commented out as they were found to be overfitting
#("Random Forest" , RandomForestClassifier(n_estimators = 418, max_depth = 5).fit(train_features_scaled, train_labels)), #("Random Forest" , RandomForestClassifier(n_estimators = 418, max_depth = 5).fit(train_features_scaled, train_labels)),
#("Decision Tree" , DecisionTreeClassifier(max_features = "sqrt", criterion = "gini", max_depth = 19).fit(train_features_scaled, train_labels)), #("Decision Tree" , DecisionTreeClassifier(max_features = "sqrt", criterion = "gini", max_depth = 19).fit(train_features_scaled, train_labels)),
#("K-Nearest Neighbours" , KNeighborsClassifier(n_neighbors = 4, n_jobs = -1, leaf_size = 68, metric = "manhattan", weights = "distance", algorithm = "kd_tree").fit(train_features_scaled, train_labels)) #("K-Nearest Neighbours" , KNeighborsClassifier(n_neighbors = 4, n_jobs = -1, leaf_size = 68, metric = "manhattan", weights = "distance", algorithm = "kd_tree").fit(train_features_scaled, train_labels))
......
...@@ -56,9 +56,9 @@ def main(): ...@@ -56,9 +56,9 @@ def main():
# Train the classifiers # Train the classifiers
classifiers = [ classifiers = [
#("Logistical Regression" , LogisticRegression(max_iter = 10000, solver = "newton-cg", C = 9.088000000000001).fit(train_features_scaled, train_labels)), ("Logistical Regression" , LogisticRegression(max_iter = 10000, solver = "newton-cg", C = 9.088000000000001).fit(train_features_scaled, train_labels)),
("Multi-layer Perceptron" , MLPClassifier(max_iter = 10000, solver = "adam", activation = "tanh", learning_rate = "adaptive").fit(train_features_scaled, train_labels)), ("Multi-layer Perceptron" , MLPClassifier(max_iter = 10000, solver = "adam", activation = "tanh", learning_rate = "adaptive").fit(train_features_scaled, train_labels)),
#("C-Support Vector" , SVC(C = 9.59, kernel = "linear").fit(train_features_scaled, train_labels)) ("C-Support Vector" , SVC(C = 9.59, kernel = "linear").fit(train_features_scaled, train_labels))
] ]
# Evaluate the performance of the trained classifiers # Evaluate the performance of the trained classifiers
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment