Diff of /Hyperparameter.py [000000] .. [0bd3e5]

Switch to unified view

a b/Hyperparameter.py
1
# Hyperparameter Optimization
2
tuned_parameters = [
3
    # KNN Classifier(2,3,4)
4
    {'featureSelection__w': N_GENES,
5
      'classify': [KNeighborsClassifier()],
6
      'classify__n_neighbors': [2, 3, 4]
7
      },
8
    # Decision Tree Classifier(J48 algorithm)
9
    {'featureSelection__w': N_GENES,
10
      'classify': [tree.DecisionTreeClassifier()],
11
      'classify__criterion':['gini', 'entropy'],
12
      'classify__min_samples_leaf': [1, 3, 5],
13
      'classify__max_depth': [3, 6, 9],
14
      'classify__presort': [True]
15
      },
16
    # Neural Network Multi-label Classifier
17
    {'featureSelection__w': N_GENES,
18
      'classify': [MLPClassifier()],
19
      'classify__hidden_layer_sizes': N_LAYERS,
20
      'classify__activation': ['logistic'],
21
      'classify__alpha':[0.05, 0.01, 0.005, 0.001],
22
      'classify__max_iter':[1000],
23
      'classify__solver': ['lbfgs'],
24
      'classify__verbose': [True]
25
      },
26
    # Naïve Bayes Classifier
27
    {'featureSelection__w': N_GENES,
28
      'classify': [naive_bayes.GaussianNB()]
29
      },
30
    # AdaBoost Classifier
31
    {'featureSelection__w': N_GENES,
32
      'classify': [AdaBoostClassifier()]
33
      }
34
]
35
36
# Model Selection using Pipeline and Cross validation
37
kfolds = KFold(n_splits=5, shuffle=True, random_state=myRndSeeds)
38
model = GridSearchCV(pipe, tuned_parameters, cv=kfolds,
39
                      return_train_score=True)
40
model.fit(X, y)
41
results = pd.DataFrame(model.cv_results_