LogisticRegression |
Penalty: [“l1,” “l2,” “elasticnet,” “none”]
C: {1, 0.1, 0.01}
Solver: [“lbfgs,” “newton-cg,” “liblinear,” “sag,” “saga”]
|
Penalty=“11”
C=0.1
Solver=“liblinear”
|
SVMb (kernel = “rbf,” degree = 3) |
|
|
DecisionTree |
criterion: [“gini,” “entropy”]
max_depth: [None, 4, 5, 6, 7, 8]
min_samples_split: {2, 4, 8, 10, 20, 30, 40}
|
Criterion=“entropy”
Max_depth=None
Min_samples_split=2
|
RandomForest |
n_estimators: {30, 100, 200}
max_depth: {4, 5, 6, 7, 8}
min_samples_split: {2, 4, 6, 8}
min_samples_leaf: {1, 2, 3}
|
n_estimators=100
Max_depth=8
Min_samples_split=2
Min_samples_leaf=1
|
KNeighborsClassifier |
n_neighbors: {5, 10, 15, 20, 30}
weights: [“uniform,” “distance”]
metric: [“minkowski,” “euclidean,” “manhattan”]
|
n_neighbors=5
weight=“distance”
metric=“manhattan”
|
AdaBoostClassifier |
n_estimators: {5, 10, 30, 50, 100, 500}
learning_rate: [0.01, 0.05, 0.1, 0.15, 0.2, 0.3, 0.5, 1.0]
algorithm: [“SAMME,” “SAMME.R”]
|
n_estimator=500
learning_rate=1.0
algorithm=“SAMME.R”
|
XGBClassifier (eval_metric = “aucpr,” n_estimators = 100, booster = “gbtree,” colsample_bytree = 1, learning_rate = 0.3) |
eta: [0.01, 0.05, 0.1, 0.15, 0.2, 0.3]
gamma: {0, 1, 5}
max_depth: {5, 6, 8, 10}
min_child_weight: {0, 1, 2, 5, 10}
|
min_child_weight=0
max_depth=5
gamma=0
eta=0.2
|
MLPClassifier (hidden_layer_sizes = (64, 64, 64), activation = “relu”) |
solver: [“adam,” “lbfgs”]
alpha: [0.0001, 0.001, 0.01, 0.05, 0.1]
learning_rate: [“constant,” “adaptive”]
learning_rate_init: [0.01, 0.005, 0.001]
batch_size: {150, 300, 500}
|
Solver=“lbfgs”
Alpha=0.1
Learning_rate=“constant”
Batch_size=150
Learning_rate_init=0.01
|