|
a |
|
b/baselines/classifiers.py |
|
|
1 |
from sklearn.linear_model import LogisticRegression |
|
|
2 |
from xgboost import XGBClassifier |
|
|
3 |
from sklearn.svm import SVC as SVM |
|
|
4 |
from sklearn.ensemble import RandomForestClassifier |
|
|
5 |
from sklearn.neighbors import KNeighborsClassifier |
|
|
6 |
|
|
|
7 |
|
|
|
8 |
# logistic regression |
|
|
9 |
lr = LogisticRegression(C = 0.01, dual = False, penalty='l1', verbose=100, max_iter=5000) |
|
|
10 |
# nearest neibors classification |
|
|
11 |
knn = KNeighborsClassifier(n_neighbors=5, algorithm='auto') |
|
|
12 |
# xgboost |
|
|
13 |
xgb = XGBClassifier(min_child_weight=7, n_estimators =160, nthread=1, subsample=0.8) |
|
|
14 |
# c-support vector classification |
|
|
15 |
svm = SVM(probability=True, max_iter= 1000) |
|
|
16 |
# random forest |
|
|
17 |
# rf = RandomForestClassifier(n_estimators = 160, criterion = 'entropy', max_features = 'sqrt') |
|
|
18 |
rf = RandomForestClassifier(warm_start=True, n_estimators = 160, criterion = 'entropy', max_features = 'sqrt') |