Adaboost |
depth = {“max depth”: [1,2,3], |
|
learning rate = [0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1], |
|
‘n estimators’: [50, 100, 150, 200], |
|
‘min samples leaf’: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]} |
Random Forest |
depth = {“max depth”: [3,4, 5, 6, 7, 8, 9, 10], |
|
“min samples split”: [2, 4, 8, 12, 16], |
|
‘n estimators’: [50, 100, 150], |
|
‘criterion’: [gini, entropy], |
|
‘max features’: [auto, 2, 3, 4, 6, 8, 10,11], |
|
‘min samples leaf’: [2, 3, 4, 5, 6, 7, 8] |
XGBoost |
depth = {[3, 4, 5, 6], |
|
‘learning_rate’: [0.01, 0.05, 0.1, 0.2], |
|
‘n_estimators’: [50, 100, 150, 200], |
|
‘min_child_weight’: [1, 2, 3, 4], |
|
‘subsample’: [0.8, 0.9, 1], |
|
‘colsample_bytree’: [0.8, 0.9, 1]} |
Logistic Regression |
{‘penalty’: [‘l1’, ‘l2’, ‘elasticnet’, ‘none’], |
|
‘C’: [0.001, 0.01, 0.1, 1, 10], |
|
‘solver’: [‘newton-cg’, ‘lbfgs’, ‘liblinear’, ‘sag’, ‘saga’], |
|
‘max_iter’: [50, 100, 200]} |