|
a |
|
b/src/biodiscml/Main.java |
|
|
1 |
/* |
|
|
2 |
* Run all routine to execute the training |
|
|
3 |
*/ |
|
|
4 |
package biodiscml; |
|
|
5 |
|
|
|
6 |
import java.io.BufferedReader; |
|
|
7 |
import java.io.File; |
|
|
8 |
import java.io.FileReader; |
|
|
9 |
import java.io.FileWriter; |
|
|
10 |
import java.io.IOException; |
|
|
11 |
import java.io.InputStreamReader; |
|
|
12 |
import java.io.PrintWriter; |
|
|
13 |
import java.nio.file.Files; |
|
|
14 |
import java.nio.file.Path; |
|
|
15 |
import java.nio.file.Paths; |
|
|
16 |
import java.util.ArrayList; |
|
|
17 |
import java.util.HashMap; |
|
|
18 |
import java.nio.file.StandardCopyOption; |
|
|
19 |
|
|
|
20 |
/** |
|
|
21 |
* |
|
|
22 |
* @author Mickael |
|
|
23 |
*/ |
|
|
24 |
public class Main { |
|
|
25 |
|
|
|
26 |
public static boolean debug = false; |
|
|
27 |
public static boolean debug2 = false; |
|
|
28 |
public static boolean printFailedModels = false; //print errors of failed models |
|
|
29 |
public static boolean isClassification = true; |
|
|
30 |
|
|
|
31 |
public static String wd = ""; |
|
|
32 |
public static String project = "myProject"; |
|
|
33 |
|
|
|
34 |
//program functions |
|
|
35 |
public static String configFile = ""; //config file |
|
|
36 |
public static boolean needConfigFile = true; |
|
|
37 |
public static boolean training = false; |
|
|
38 |
public static boolean predictNewData = false; |
|
|
39 |
public static boolean trainingBestModel = false; |
|
|
40 |
public static HashMap<String, String> hmTrainingBestModelList = new HashMap<>();//modelID, identifier prefix |
|
|
41 |
|
|
|
42 |
public static String modelFile = ""; |
|
|
43 |
public static HashMap<String, String> hmExcludedFeatures = new HashMap<>();//features to exclude from the final dataset |
|
|
44 |
|
|
|
45 |
//config |
|
|
46 |
public static String mergingID = "Instance"; |
|
|
47 |
|
|
|
48 |
//source files |
|
|
49 |
public static HashMap<String, String> hmTrainFiles = new HashMap<>();//filename, identifier prefix |
|
|
50 |
public static HashMap<String, String> hmNewDataFiles = new HashMap<>();//filename, identifier prefix |
|
|
51 |
|
|
|
52 |
//options |
|
|
53 |
public static Boolean doClassification = false; |
|
|
54 |
public static String classificationClassName = "class"; |
|
|
55 |
public static String regressionClassName = "class"; |
|
|
56 |
public static String separator = ""; |
|
|
57 |
public static Boolean doRegression = false; |
|
|
58 |
public static Boolean classificationFastWay = false; |
|
|
59 |
public static Integer numberOfBestModels = 1; |
|
|
60 |
public static ArrayList<String> classificationFastWayCommands = new ArrayList<>(); //classifier, optimizer |
|
|
61 |
public static ArrayList<String> classificationBruteForceCommands = new ArrayList<>(); //classifier, optimizer |
|
|
62 |
public static String classificationOptimizers = "auc, mcc, fdr, ber, acc"; |
|
|
63 |
public static String searchmodes = "f,fb,b,bf,top1,top5,top10,top15,top20,top30,top40,top50,top75,top100,top200,all"; |
|
|
64 |
public static Boolean metaCostSensitiveClassifier = false; |
|
|
65 |
public static Boolean regressionFastWay = false; |
|
|
66 |
public static String regressionOptimizers = "CC, RMSE"; |
|
|
67 |
public static Boolean metaAdditiveRegression = false; |
|
|
68 |
public static ArrayList<String> regressionFastWayCommands = new ArrayList<>(); //classifier, optimizer |
|
|
69 |
public static ArrayList<String> regressionBruteForceCommands = new ArrayList<>(); //classifier, optimizer |
|
|
70 |
public static double pAUC_lower = 0; |
|
|
71 |
public static double pAUC_upper = 0.3; |
|
|
72 |
public static double spearmanCorrelation_lower = -0.99; |
|
|
73 |
public static double spearmanCorrelation_upper = 0.99; |
|
|
74 |
public static double pearsonCorrelation_lower = -0.99; |
|
|
75 |
public static double pearsonCorrelation_upper = 0.99; |
|
|
76 |
public static String bestModelsSortingMetric = "AVG_MCC"; |
|
|
77 |
public static double bestModelsSortingMetricThreshold = 0.1; |
|
|
78 |
public static Integer maxNumberOfFeaturesInModel = 200; |
|
|
79 |
public static int maxNumberOfSelectedFeatures = 1000; |
|
|
80 |
public static boolean doSampling = true; |
|
|
81 |
public static int samplingFold = 3; //separate the set in x parts, keep 1 for test, others for training |
|
|
82 |
public static int bootstrapAndRepeatedHoldoutFolds = 100; // Also used for repeated holdout |
|
|
83 |
public static String cpus = "max"; |
|
|
84 |
public static boolean combineModels = false; |
|
|
85 |
public static boolean retrieveCorrelatedGenes = true; |
|
|
86 |
|
|
|
87 |
public static String combinationRule = "AVG"; |
|
|
88 |
public static double maxRankingScoreDifference = 0.005; //for correlated gene retreiving |
|
|
89 |
public static boolean loocv = true; |
|
|
90 |
public static boolean repeatedHoldout = true; |
|
|
91 |
public static boolean bootstrap = true; |
|
|
92 |
public static boolean computeBestModel = true; |
|
|
93 |
|
|
|
94 |
public static boolean resumeTraining = false; |
|
|
95 |
public static boolean restoreRun = false; |
|
|
96 |
public static String previousRunPath = ""; |
|
|
97 |
public static String previousRunProjectName = ""; |
|
|
98 |
|
|
|
99 |
public static Boolean noFeatureSelection = false; |
|
|
100 |
public static boolean generateModelWithCorrelatedGenes = false; |
|
|
101 |
public static String missingValueToReplace = "?"; //for testing, if a feature is missing, set a value to replace missing data |
|
|
102 |
|
|
|
103 |
//benchmark |
|
|
104 |
public static String bench_AUC = ""; |
|
|
105 |
|
|
|
106 |
//TODO |
|
|
107 |
static boolean retreiveCorrelatedGenesByRankingScore = true; //avoid for non-binary classes and regression |
|
|
108 |
static boolean ROCcurves = false; //experimental |
|
|
109 |
static boolean UpSetR = false; //experimental |
|
|
110 |
static boolean performShortTest = true; |
|
|
111 |
|
|
|
112 |
public static void main(String[] args) throws IOException { |
|
|
113 |
System.out.println("#### BioDiscML ####\n"); |
|
|
114 |
// check java version |
|
|
115 |
String version = System.getProperty("java.version"); |
|
|
116 |
if (!version.contains("1.8")) { |
|
|
117 |
String arg = ""; |
|
|
118 |
for (String argl : args) { |
|
|
119 |
arg += argl + " "; |
|
|
120 |
} |
|
|
121 |
System.out.println("ERROR: Java version is " + version + "." |
|
|
122 |
+ "\nYou need java 1.8. If it is installed, try this command:\n" |
|
|
123 |
); |
|
|
124 |
if (System.getProperty("os.name").toLowerCase().contains("windows")) { |
|
|
125 |
System.out.println("\"c:\\Program Files\\Java\\jdk1.8.0_111\\bin\\java.exe\" -jar biodiscml.jar " + arg); |
|
|
126 |
} else { |
|
|
127 |
System.out.println("/mnt/software/jvm/jdk1.8.0_371/bin/java -jar biodiscml.jar " + arg); |
|
|
128 |
} |
|
|
129 |
System.exit(0); |
|
|
130 |
} |
|
|
131 |
//read configuration file |
|
|
132 |
System.out.println("#### Parsing options..."); |
|
|
133 |
setOptionsFromCommandLine(args); //from command line |
|
|
134 |
|
|
|
135 |
if (!configFile.isEmpty() && needConfigFile) { |
|
|
136 |
setConfiguration(); |
|
|
137 |
} |
|
|
138 |
|
|
|
139 |
//set models |
|
|
140 |
if (!classificationFastWay && !regressionFastWay) { |
|
|
141 |
try { |
|
|
142 |
String line = ""; |
|
|
143 |
File classifiers = new File(wd + "/" + "classifiers.conf"); |
|
|
144 |
BufferedReader br; |
|
|
145 |
if (!classifiers.exists()) { |
|
|
146 |
try { |
|
|
147 |
br = new BufferedReader(new InputStreamReader(Main.class.getResourceAsStream("/classifiers.conf"))); |
|
|
148 |
} catch (Exception e) { |
|
|
149 |
br = new BufferedReader(new FileReader("/classifiers.conf")); |
|
|
150 |
} |
|
|
151 |
} else { |
|
|
152 |
br = new BufferedReader(new FileReader(classifiers)); |
|
|
153 |
} |
|
|
154 |
line = ""; |
|
|
155 |
while (br.ready()) { |
|
|
156 |
if (!line.startsWith("#") && !line.trim().isEmpty()) { |
|
|
157 |
String option = line.split("=")[0].trim(); |
|
|
158 |
String value = line.split("=")[1].trim(); |
|
|
159 |
switch (option) { |
|
|
160 |
case "ccmd": |
|
|
161 |
classificationBruteForceCommands.add(value.trim()); |
|
|
162 |
break; |
|
|
163 |
case "rcmd": |
|
|
164 |
regressionBruteForceCommands.add(value.trim()); |
|
|
165 |
break; |
|
|
166 |
} |
|
|
167 |
} |
|
|
168 |
line = br.readLine(); |
|
|
169 |
} |
|
|
170 |
} catch (Exception e) { |
|
|
171 |
e.printStackTrace(); |
|
|
172 |
} |
|
|
173 |
} else { |
|
|
174 |
System.out.println("Model search mode: Fast way mode"); |
|
|
175 |
} |
|
|
176 |
|
|
|
177 |
//set number of max cpus to use |
|
|
178 |
if (!cpus.equals("max")) { |
|
|
179 |
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", cpus); |
|
|
180 |
} |
|
|
181 |
|
|
|
182 |
// Retrieve previously existing a (train and test data) and b (infogain) files |
|
|
183 |
if (restoreRun) { |
|
|
184 |
resumeTraining = true; |
|
|
185 |
// get current directory |
|
|
186 |
if (wd.isEmpty()) { |
|
|
187 |
wd = new java.io.File(".").getCanonicalPath() + java.io.File.separator; |
|
|
188 |
} |
|
|
189 |
System.out.println("Restoring data from previous project " + previousRunProjectName |
|
|
190 |
+ "\nCopying files from " + previousRunPath); |
|
|
191 |
//copy previous run |
|
|
192 |
try { |
|
|
193 |
Path source = Paths.get(previousRunPath + previousRunProjectName + "_a.classification.all_data.arff"); |
|
|
194 |
Path destination = Paths.get(wd + "/" + project + "a.classification.all_data.arff"); |
|
|
195 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
196 |
|
|
|
197 |
source = Paths.get(previousRunPath + previousRunProjectName + "_a.classification.all_data.csv"); |
|
|
198 |
destination = Paths.get(wd + "/" + project + "a.classification.all_data.csv"); |
|
|
199 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
200 |
|
|
|
201 |
source = Paths.get(previousRunPath + previousRunProjectName + "_a.classification.data_to_test.arff"); |
|
|
202 |
destination = Paths.get(wd + "/" + project + "a.classification.data_to_test.arff"); |
|
|
203 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
204 |
|
|
|
205 |
source = Paths.get(previousRunPath + previousRunProjectName + "_a.classification.data_to_test.csv"); |
|
|
206 |
destination = Paths.get(wd + "/" + project + "a.classification.data_to_test.csv"); |
|
|
207 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
208 |
|
|
|
209 |
source = Paths.get(previousRunPath + previousRunProjectName + "_a.classification.data_to_train.arff"); |
|
|
210 |
destination = Paths.get(wd + "/" + project + "a.classification.data_to_train.arff"); |
|
|
211 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
212 |
|
|
|
213 |
source = Paths.get(previousRunPath + previousRunProjectName + "_a.classification.data_to_train.csv"); |
|
|
214 |
destination = Paths.get(wd + "/" + project + "a.classification.data_to_train.csv"); |
|
|
215 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
216 |
|
|
|
217 |
source = Paths.get(previousRunPath + previousRunProjectName + "_b.featureSelection.infoGain.arff"); |
|
|
218 |
destination = Paths.get(wd + "/" + project + "b.featureSelection.infoGain.arff"); |
|
|
219 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
220 |
|
|
|
221 |
source = Paths.get(previousRunPath + previousRunProjectName + "_b.featureSelection.infoGain.csv"); |
|
|
222 |
destination = Paths.get(wd + "/" + project + "b.featureSelection.infoGain.csv"); |
|
|
223 |
Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING); |
|
|
224 |
|
|
|
225 |
File f = new File(previousRunPath + previousRunProjectName + "_c.classification.results.csv"); |
|
|
226 |
f.createNewFile(); |
|
|
227 |
} catch (Exception e) { |
|
|
228 |
e.printStackTrace(); |
|
|
229 |
} |
|
|
230 |
|
|
|
231 |
} |
|
|
232 |
|
|
|
233 |
// Go to training |
|
|
234 |
if (training) { |
|
|
235 |
System.out.println("#### Start training..."); |
|
|
236 |
//CLASSIFICATION |
|
|
237 |
if (doClassification) { |
|
|
238 |
isClassification = true; |
|
|
239 |
//put data together in the same file for ML |
|
|
240 |
System.out.println("## Preprocessing of the input file(s)"); |
|
|
241 |
String CLASSIFICATION_FILE = wd + project + "a.classification.data_to_train.csv"; //output of AdaptDatasetToWeka() |
|
|
242 |
if (debug) { |
|
|
243 |
System.out.println("CLASSIFICATION_FILE: " + CLASSIFICATION_FILE); |
|
|
244 |
} |
|
|
245 |
if (new File(CLASSIFICATION_FILE).exists() && resumeTraining) { |
|
|
246 |
System.out.println("Preprocessing of the input file(s) already done... skipping"); |
|
|
247 |
} else { |
|
|
248 |
AdaptDatasetToTraining c = new AdaptDatasetToTraining(CLASSIFICATION_FILE); |
|
|
249 |
} |
|
|
250 |
//execute feature selection and training |
|
|
251 |
System.out.println("## Feature selection and training"); |
|
|
252 |
String FEATURE_SELECTION_FILE = wd + project + "b.featureSelection.infoGain.csv"; // output of Training(), feature selection result |
|
|
253 |
String TRAINING_RESULTS_FILE = wd + project + "c.classification.results.csv"; // output of Training(), models performances |
|
|
254 |
Training m = new Training(CLASSIFICATION_FILE, TRAINING_RESULTS_FILE, FEATURE_SELECTION_FILE, "class"); |
|
|
255 |
|
|
|
256 |
//choose best model |
|
|
257 |
if (computeBestModel) { |
|
|
258 |
System.out.println("## Best model selection"); |
|
|
259 |
BestModelSelectionAndReport b = new BestModelSelectionAndReport(CLASSIFICATION_FILE, FEATURE_SELECTION_FILE, TRAINING_RESULTS_FILE, |
|
|
260 |
"classification"); |
|
|
261 |
} |
|
|
262 |
} |
|
|
263 |
|
|
|
264 |
//REGRESSION |
|
|
265 |
if (doRegression) { |
|
|
266 |
isClassification = false; |
|
|
267 |
//put data together in the same file for ML |
|
|
268 |
System.out.println("## Preprocessing of the input file(s)"); |
|
|
269 |
String REGRESSION_FILE = wd + project + "a.regression.data_to_train.csv"; |
|
|
270 |
if (new File(REGRESSION_FILE).exists() && resumeTraining) { |
|
|
271 |
System.out.println("Preprocessing of the input file(s) already done... skipped by resumeTraining"); |
|
|
272 |
} else { |
|
|
273 |
AdaptDatasetToTraining c = new AdaptDatasetToTraining(REGRESSION_FILE); |
|
|
274 |
} |
|
|
275 |
|
|
|
276 |
//execute training |
|
|
277 |
System.out.println("## Feature selection and training"); |
|
|
278 |
String FEATURE_SELECTION_FILE = wd + project + "b.featureSelection.RELIEFF.csv";//filled by feature selection algo type |
|
|
279 |
String TRAINING_RESULTS_FILE = wd + project + "c.regression.results.csv"; |
|
|
280 |
Training m = new Training(REGRESSION_FILE, TRAINING_RESULTS_FILE, FEATURE_SELECTION_FILE, "reg"); |
|
|
281 |
|
|
|
282 |
//choose best model |
|
|
283 |
if (computeBestModel) { |
|
|
284 |
System.out.println("## Best model selection"); |
|
|
285 |
BestModelSelectionAndReport b = new BestModelSelectionAndReport(REGRESSION_FILE, FEATURE_SELECTION_FILE, TRAINING_RESULTS_FILE, |
|
|
286 |
"regression"); |
|
|
287 |
} |
|
|
288 |
} |
|
|
289 |
|
|
|
290 |
if (!doClassification && !doRegression) { |
|
|
291 |
System.err.println("[error] No prediction type has been set (classification or regression)." |
|
|
292 |
+ " Set doClassification or doRegression at true"); |
|
|
293 |
System.exit(0); |
|
|
294 |
} |
|
|
295 |
} |
|
|
296 |
|
|
|
297 |
if (predictNewData) { |
|
|
298 |
System.out.println("#### Start predicting new data..."); |
|
|
299 |
if (modelFile.isEmpty()) { |
|
|
300 |
System.err.println("[error] No model file have been provided (Set a modelFile in config file)"); |
|
|
301 |
System.exit(0); |
|
|
302 |
} |
|
|
303 |
if (hmNewDataFiles.isEmpty()) { |
|
|
304 |
System.err.println("[error] No new data file have been provided (Set a newDataFile in config file)"); |
|
|
305 |
System.exit(0); |
|
|
306 |
} |
|
|
307 |
|
|
|
308 |
//put data together in the same file for ML |
|
|
309 |
String NEWDATA_FILE = wd + project + ".data_to_predict.csv"; //output of AdaptDatasetToWeka() |
|
|
310 |
AdaptDatasetToTesting c = null; |
|
|
311 |
if (doClassification) { |
|
|
312 |
c = new AdaptDatasetToTesting(classificationClassName, hmNewDataFiles, |
|
|
313 |
NEWDATA_FILE, separator, wd + modelFile); |
|
|
314 |
} else { |
|
|
315 |
c = new AdaptDatasetToTesting(regressionClassName, hmNewDataFiles, |
|
|
316 |
NEWDATA_FILE, separator, wd + modelFile); |
|
|
317 |
} |
|
|
318 |
|
|
|
319 |
//execute feature selection and training |
|
|
320 |
String PREDICTIONS_RESULTS_FILE = wd + project + modelFile + "_" + ".prediction.results.txt"; // output of Testing |
|
|
321 |
TestingAndEvaluate t = new TestingAndEvaluate(); |
|
|
322 |
t.TestingAndEvaluate(wd + modelFile, NEWDATA_FILE, PREDICTIONS_RESULTS_FILE, c.isMissingClass()); |
|
|
323 |
} |
|
|
324 |
|
|
|
325 |
if (trainingBestModel) { |
|
|
326 |
System.out.println("#### Start best model selection..."); |
|
|
327 |
if (doClassification) { |
|
|
328 |
String CLASSIFICATION_FILE = wd + project + "a.classification.data_to_train.csv"; |
|
|
329 |
String TRAINING_RESULTS_FILE = wd + project + "c.classification.results.csv"; // output of Training(), models performances |
|
|
330 |
String FEATURE_SELECTION_FILE = wd + project + "b.featureSelection.infoGain.csv"; // output of Training(), feature selection result |
|
|
331 |
BestModelSelectionAndReport b = new BestModelSelectionAndReport(CLASSIFICATION_FILE, FEATURE_SELECTION_FILE, TRAINING_RESULTS_FILE, |
|
|
332 |
"classification"); |
|
|
333 |
|
|
|
334 |
} else { |
|
|
335 |
String REGRESSION_FILE = wd + project + "a.regression.data_to_train.csv"; |
|
|
336 |
String TRAINING_RESULTS_FILE = wd + project + "c.regression.results.csv"; // output of Training(), models performances |
|
|
337 |
String FEATURE_SELECTION_FILE = wd + project + "b.featureSelection.RELIEFF.csv"; // output of Training(), feature selection result |
|
|
338 |
BestModelSelectionAndReport b = new BestModelSelectionAndReport(REGRESSION_FILE, FEATURE_SELECTION_FILE, TRAINING_RESULTS_FILE, |
|
|
339 |
"regression"); |
|
|
340 |
} |
|
|
341 |
} |
|
|
342 |
|
|
|
343 |
/* |
|
|
344 |
Exit |
|
|
345 |
*/ |
|
|
346 |
//System.out.println("##Finished with success !"); |
|
|
347 |
//System.exit(0); |
|
|
348 |
} |
|
|
349 |
|
|
|
350 |
public static void Main() { |
|
|
351 |
setConfiguration(); |
|
|
352 |
} |
|
|
353 |
|
|
|
354 |
/** |
|
|
355 |
* set options read from command line |
|
|
356 |
* |
|
|
357 |
* @param args |
|
|
358 |
*/ |
|
|
359 |
public static void setOptionsFromCommandLine(String[] args) { |
|
|
360 |
//parse option |
|
|
361 |
String cmd = " "; |
|
|
362 |
for (String s : args) { |
|
|
363 |
cmd += s + " "; |
|
|
364 |
} |
|
|
365 |
String[] options = cmd.split(" -"); |
|
|
366 |
|
|
|
367 |
//in case of ccmd present (only authorized AT THE END for now) |
|
|
368 |
//this implementation is temporary, need for something more generalized |
|
|
369 |
for (int i = 0; i < options.length; i++) { |
|
|
370 |
if (options[i].startsWith("ccmd") || options[i].startsWith("rcmd")) { |
|
|
371 |
for (int j = i + 1; j < options.length; j++) { |
|
|
372 |
if (!options[j].startsWith("ccmd") |
|
|
373 |
&& !options[j].startsWith("rcmd") //TODO implement: && NOT A RESERVED KEYWORD |
|
|
374 |
) { |
|
|
375 |
options[i] += " -" + options[j]; |
|
|
376 |
options[j] = ""; |
|
|
377 |
} else { |
|
|
378 |
break; |
|
|
379 |
} |
|
|
380 |
} |
|
|
381 |
} |
|
|
382 |
|
|
|
383 |
} |
|
|
384 |
|
|
|
385 |
//set options |
|
|
386 |
boolean prefixesDefined = false; |
|
|
387 |
for (String s : options) { |
|
|
388 |
if (s.equals("help")) { |
|
|
389 |
System.out.println("Read readme.md file or https://github.com/mickaelleclercq/BioDiscML"); |
|
|
390 |
System.exit(0); |
|
|
391 |
} |
|
|
392 |
if (s.contains("=")) { |
|
|
393 |
setOption(s.split("=")[0], s.split("=")[1]); |
|
|
394 |
} |
|
|
395 |
// get config |
|
|
396 |
if (s.startsWith("config") && configFile.isEmpty()) { |
|
|
397 |
configFile = s.split(" ")[1].trim(); |
|
|
398 |
} |
|
|
399 |
// training |
|
|
400 |
if (s.trim().equals("train")) { |
|
|
401 |
training = true; |
|
|
402 |
} |
|
|
403 |
|
|
|
404 |
//bestmodel |
|
|
405 |
if (s.startsWith("bestmodel")) { |
|
|
406 |
trainingBestModel = true; |
|
|
407 |
if (s.trim().contains(" ")) { |
|
|
408 |
String modelID[] = s.split(" "); |
|
|
409 |
for (int i = 1; i < modelID.length; i++) { |
|
|
410 |
hmTrainingBestModelList.put(modelID[i], i + ""); |
|
|
411 |
} |
|
|
412 |
} |
|
|
413 |
} |
|
|
414 |
|
|
|
415 |
//Predict new data |
|
|
416 |
if (s.startsWith("predict")) { |
|
|
417 |
predictNewData = true; |
|
|
418 |
} |
|
|
419 |
|
|
|
420 |
} |
|
|
421 |
|
|
|
422 |
//if no config file is provided for training, check if we have enough |
|
|
423 |
//information to start biodiscml |
|
|
424 |
if (configFile.isEmpty() && training) { |
|
|
425 |
if (!hmTrainFiles.isEmpty()) { |
|
|
426 |
needConfigFile = false; |
|
|
427 |
} |
|
|
428 |
} |
|
|
429 |
|
|
|
430 |
if (!prefixesDefined) { |
|
|
431 |
for (String file : hmTrainFiles.keySet()) { |
|
|
432 |
hmTrainFiles.put(file, ""); |
|
|
433 |
} |
|
|
434 |
} |
|
|
435 |
|
|
|
436 |
if (predictNewData) { |
|
|
437 |
System.out.println("#### Mode: Prediction"); |
|
|
438 |
System.out.println("Merging ID: " + mergingID); |
|
|
439 |
System.out.println("Configuration file: " + configFile); |
|
|
440 |
if (isClassification) { |
|
|
441 |
System.out.println("Prediction type: Classification"); |
|
|
442 |
} else { |
|
|
443 |
System.out.println("Prediction type: Regression"); |
|
|
444 |
} |
|
|
445 |
} else if (training) { |
|
|
446 |
System.out.println("#### Mode: Training"); |
|
|
447 |
} else if (trainingBestModel) { |
|
|
448 |
System.out.println("#### Mode: Best model"); |
|
|
449 |
} else { |
|
|
450 |
System.err.println("[error] No mode selected (train, bestmodel or predict). " |
|
|
451 |
+ "Add -train or -bestmodel or -predict to your command line"); |
|
|
452 |
} |
|
|
453 |
|
|
|
454 |
//export config file when executing command line options |
|
|
455 |
if (configFile.isEmpty()) { |
|
|
456 |
try { |
|
|
457 |
if (debug) { |
|
|
458 |
System.out.println("Export config file in " + wd + project + ".config"); |
|
|
459 |
} |
|
|
460 |
PrintWriter pw = new PrintWriter(new FileWriter(wd + project + ".config")); |
|
|
461 |
for (String option : options) { |
|
|
462 |
if (option.contains("=")) { |
|
|
463 |
pw.println(option); |
|
|
464 |
} |
|
|
465 |
} |
|
|
466 |
pw.close(); |
|
|
467 |
} catch (Exception e) { |
|
|
468 |
e.printStackTrace(); |
|
|
469 |
} |
|
|
470 |
|
|
|
471 |
} |
|
|
472 |
|
|
|
473 |
} |
|
|
474 |
|
|
|
475 |
/** |
|
|
476 |
* read config file |
|
|
477 |
*/ |
|
|
478 |
public static void setConfiguration() { |
|
|
479 |
if (!new File(configFile).exists()) { |
|
|
480 |
if (configFile.isEmpty()) { |
|
|
481 |
configFile = "empty"; |
|
|
482 |
} |
|
|
483 |
System.err.println("[error] Configuration file not found (provided source: " + configFile + "). Set config file with -config option"); |
|
|
484 |
System.exit(0); |
|
|
485 |
} |
|
|
486 |
System.out.println("#### Reading configuration file " + configFile); |
|
|
487 |
String line = null; |
|
|
488 |
try { |
|
|
489 |
BufferedReader br = new BufferedReader(new FileReader(configFile)); |
|
|
490 |
while (br.ready()) { |
|
|
491 |
line = br.readLine(); |
|
|
492 |
if (!line.startsWith("#") && !line.trim().isEmpty()) { |
|
|
493 |
String option = line.split("=")[0].trim(); |
|
|
494 |
String value = line.split("=")[1].trim(); |
|
|
495 |
//System.out.println(option + ":" + value); |
|
|
496 |
setOption(option, value); |
|
|
497 |
} |
|
|
498 |
} |
|
|
499 |
|
|
|
500 |
} catch (Exception e) { |
|
|
501 |
System.err.println("Parsing error in config file at line " + line); |
|
|
502 |
e.printStackTrace(); |
|
|
503 |
System.exit(0); |
|
|
504 |
} |
|
|
505 |
|
|
|
506 |
} |
|
|
507 |
|
|
|
508 |
private static void setOption(String option, String value) { |
|
|
509 |
switch (option) { |
|
|
510 |
case "config": |
|
|
511 |
configFile = value.trim(); |
|
|
512 |
break; |
|
|
513 |
case "debug": |
|
|
514 |
debug = Boolean.valueOf(value.trim()); |
|
|
515 |
break; |
|
|
516 |
case "debug2": |
|
|
517 |
debug2 = Boolean.valueOf(value.trim()); |
|
|
518 |
break; |
|
|
519 |
case "wd": |
|
|
520 |
wd = value.trim(); |
|
|
521 |
if (!wd.endsWith(File.separator)) { |
|
|
522 |
wd = wd + File.separator; |
|
|
523 |
} |
|
|
524 |
break; |
|
|
525 |
case "project": |
|
|
526 |
project = value.trim() + "_"; |
|
|
527 |
System.out.println("Project name: " + project); |
|
|
528 |
break; |
|
|
529 |
case "trainFile": |
|
|
530 |
try { |
|
|
531 |
hmTrainFiles.put(wd + value.split(",")[0].trim(), value.split(",")[1].trim()); //filename,prefix |
|
|
532 |
} catch (Exception e) { |
|
|
533 |
hmTrainFiles.put(wd + value.replace(",", "").trim(), ""); //filename |
|
|
534 |
} |
|
|
535 |
break; |
|
|
536 |
case "newDataFile": |
|
|
537 |
try { |
|
|
538 |
hmNewDataFiles.put(wd + value.split(",")[0].trim(), value.split(",")[1].trim()); //filename,prefix |
|
|
539 |
} catch (Exception e) { |
|
|
540 |
hmNewDataFiles.put(wd + value.replace(",", "").trim(), ""); //filename |
|
|
541 |
} |
|
|
542 |
break; |
|
|
543 |
case "validationFile": |
|
|
544 |
try { |
|
|
545 |
hmNewDataFiles.put(wd + value.split(",")[0].trim(), value.split(",")[1].trim()); //filename,prefix |
|
|
546 |
} catch (Exception e) { |
|
|
547 |
hmNewDataFiles.put(wd + value.replace(",", "").trim(), ""); //filename |
|
|
548 |
} |
|
|
549 |
break; |
|
|
550 |
case "excluded": |
|
|
551 |
String excluded[] = value.split(","); |
|
|
552 |
for (String ex : excluded) { |
|
|
553 |
hmExcludedFeatures.put(ex.trim(), ""); |
|
|
554 |
} |
|
|
555 |
break; |
|
|
556 |
case "mergingID": |
|
|
557 |
mergingID = value.trim(); |
|
|
558 |
if (!trainingBestModel) { |
|
|
559 |
System.out.println("Merging ID: " + mergingID); |
|
|
560 |
} |
|
|
561 |
break; |
|
|
562 |
case "separator": |
|
|
563 |
separator = value.trim(); |
|
|
564 |
break; |
|
|
565 |
case "classification": |
|
|
566 |
isClassification = true; |
|
|
567 |
break; |
|
|
568 |
case "regression": |
|
|
569 |
isClassification = false; |
|
|
570 |
break; |
|
|
571 |
|
|
|
572 |
case "doClassification": |
|
|
573 |
doClassification = Boolean.valueOf(value.trim()); |
|
|
574 |
break; |
|
|
575 |
|
|
|
576 |
case "classificationClassName": |
|
|
577 |
classificationClassName = value.trim(); |
|
|
578 |
if (doClassification) { |
|
|
579 |
System.out.println("ClassificationClassName: " + classificationClassName); |
|
|
580 |
} |
|
|
581 |
break; |
|
|
582 |
case "classificationFastWay": |
|
|
583 |
classificationFastWay = Boolean.valueOf(value.trim()); |
|
|
584 |
break; |
|
|
585 |
case "numberOfBestModels": |
|
|
586 |
numberOfBestModels = Integer.valueOf(value.trim()); |
|
|
587 |
break; |
|
|
588 |
case "numberOfBestModelsSortingMetric": |
|
|
589 |
bestModelsSortingMetric = value.trim().toUpperCase(); |
|
|
590 |
break; |
|
|
591 |
case "numberOfBestModelsSortingMetricThreshold": |
|
|
592 |
bestModelsSortingMetricThreshold = Double.valueOf(value.trim()); |
|
|
593 |
break; |
|
|
594 |
case "ccmd": |
|
|
595 |
switch (value.split(",").length) { |
|
|
596 |
case 1: |
|
|
597 |
classificationFastWayCommands.add(value.trim() |
|
|
598 |
+ ":allopt:allsearch"); |
|
|
599 |
break; |
|
|
600 |
case 2: |
|
|
601 |
classificationFastWayCommands.add(value.split(",")[0].trim() |
|
|
602 |
+ ":" + value.split(",")[1].trim().toLowerCase() + ":allsearch"); |
|
|
603 |
break; |
|
|
604 |
case 3: |
|
|
605 |
classificationFastWayCommands.add(value.split(",")[0].trim() |
|
|
606 |
+ ":" + value.split(",")[1].trim().toLowerCase() |
|
|
607 |
+ ":" + value.split(",")[2].trim().toLowerCase()); |
|
|
608 |
break; |
|
|
609 |
default: |
|
|
610 |
break; |
|
|
611 |
} |
|
|
612 |
break; |
|
|
613 |
case "rcmd": |
|
|
614 |
switch (value.split(",").length) { |
|
|
615 |
case 1: |
|
|
616 |
regressionFastWayCommands.add(value.trim() |
|
|
617 |
+ ":allopt:allsearch"); |
|
|
618 |
break; |
|
|
619 |
case 2: |
|
|
620 |
regressionFastWayCommands.add(value.split(",")[0].trim() |
|
|
621 |
+ ":" + value.split(",")[1].trim().toLowerCase() + ":allsearch"); |
|
|
622 |
break; |
|
|
623 |
case 3: |
|
|
624 |
regressionFastWayCommands.add(value.split(",")[0].trim() |
|
|
625 |
+ ":" + value.split(",")[1].trim().toLowerCase() |
|
|
626 |
+ ":" + value.split(",")[2].trim().toLowerCase()); |
|
|
627 |
break; |
|
|
628 |
default: |
|
|
629 |
break; |
|
|
630 |
} |
|
|
631 |
break; |
|
|
632 |
case "coptimizers": |
|
|
633 |
classificationOptimizers = value.trim().toLowerCase(); |
|
|
634 |
break; |
|
|
635 |
case "searchmodes": |
|
|
636 |
searchmodes = value.trim().toLowerCase(); |
|
|
637 |
break; |
|
|
638 |
case "doRegression": |
|
|
639 |
doRegression = Boolean.valueOf(value.trim()); |
|
|
640 |
break; |
|
|
641 |
case "regressionClassName": |
|
|
642 |
regressionClassName = value.trim(); |
|
|
643 |
break; |
|
|
644 |
case "regressionFastWay": |
|
|
645 |
regressionFastWay = Boolean.valueOf(value.trim()); |
|
|
646 |
break; |
|
|
647 |
|
|
|
648 |
case "roptimizers": |
|
|
649 |
regressionOptimizers = value.trim().toLowerCase(); |
|
|
650 |
break; |
|
|
651 |
case "maxNumberOfSelectedFeatures": |
|
|
652 |
maxNumberOfSelectedFeatures = Integer.valueOf(value.trim()); |
|
|
653 |
break; |
|
|
654 |
case "maxNumberOfFeaturesInModel": |
|
|
655 |
maxNumberOfFeaturesInModel = Integer.valueOf(value.trim()); |
|
|
656 |
break; |
|
|
657 |
case "bootstrapFolds": |
|
|
658 |
bootstrapAndRepeatedHoldoutFolds = Integer.valueOf(value.trim()); |
|
|
659 |
break; |
|
|
660 |
case "spearmanCorrelation_lower": |
|
|
661 |
spearmanCorrelation_lower = Double.valueOf(value.trim()); |
|
|
662 |
break; |
|
|
663 |
case "spearmanCorrelation_upper": |
|
|
664 |
spearmanCorrelation_upper = Double.valueOf(value.trim()); |
|
|
665 |
break; |
|
|
666 |
case "pearsonCorrelation_lower": |
|
|
667 |
pearsonCorrelation_lower = Double.valueOf(value.trim()); |
|
|
668 |
break; |
|
|
669 |
case "pearsonCorrelation_upper": |
|
|
670 |
pearsonCorrelation_upper = Double.valueOf(value.trim()); |
|
|
671 |
break; |
|
|
672 |
case "maxRankingScoreDifference": |
|
|
673 |
maxRankingScoreDifference = Double.valueOf(value.trim()); |
|
|
674 |
break; |
|
|
675 |
case "retreiveCorrelatedGenesByRankingScore": |
|
|
676 |
retreiveCorrelatedGenesByRankingScore = Boolean.valueOf(value.trim()); |
|
|
677 |
break; |
|
|
678 |
case "combineModels": |
|
|
679 |
combineModels = Boolean.valueOf(value.trim()); |
|
|
680 |
break; |
|
|
681 |
case "retrieveCorrelatedGenes": |
|
|
682 |
retrieveCorrelatedGenes = Boolean.valueOf(value.trim()); |
|
|
683 |
break; |
|
|
684 |
case "generateModelWithCorrelatedGenes": |
|
|
685 |
generateModelWithCorrelatedGenes = Boolean.valueOf(value.trim()); |
|
|
686 |
break; |
|
|
687 |
case "combinationRule": |
|
|
688 |
combinationRule = value.trim().toUpperCase(); |
|
|
689 |
break; |
|
|
690 |
case "sampling": |
|
|
691 |
doSampling = Boolean.valueOf(value.trim()); |
|
|
692 |
break; |
|
|
693 |
case "roc_curves": |
|
|
694 |
ROCcurves = Boolean.valueOf(value.trim()); |
|
|
695 |
break; |
|
|
696 |
case "loocv": |
|
|
697 |
loocv = Boolean.valueOf(value.trim()); |
|
|
698 |
break; |
|
|
699 |
case "samplingFold": |
|
|
700 |
samplingFold = Integer.valueOf(value.trim()); |
|
|
701 |
break; |
|
|
702 |
case "cpus": |
|
|
703 |
cpus = value.trim(); |
|
|
704 |
break; |
|
|
705 |
case "computeBestModel": |
|
|
706 |
computeBestModel = Boolean.valueOf(value.trim()); |
|
|
707 |
break; |
|
|
708 |
case "modelFile": |
|
|
709 |
modelFile = value.trim(); |
|
|
710 |
break; |
|
|
711 |
case "printFailedModels": |
|
|
712 |
printFailedModels = Boolean.valueOf(value.trim()); |
|
|
713 |
break; |
|
|
714 |
case "resumeTraining": |
|
|
715 |
resumeTraining = Boolean.valueOf(value.trim()); |
|
|
716 |
break; |
|
|
717 |
case "upsetr": |
|
|
718 |
UpSetR = Boolean.valueOf(value.trim()); |
|
|
719 |
break; |
|
|
720 |
case "repeatedHoldoutTrain": |
|
|
721 |
repeatedHoldout = Boolean.valueOf(value.trim()); |
|
|
722 |
break; |
|
|
723 |
case "bootstrap": |
|
|
724 |
bootstrap = Boolean.valueOf(value.trim()); |
|
|
725 |
break; |
|
|
726 |
case "restoreRun": |
|
|
727 |
restoreRun = Boolean.valueOf(value.trim()); |
|
|
728 |
break; |
|
|
729 |
case "noFeatureSelection": |
|
|
730 |
noFeatureSelection = Boolean.valueOf(value.trim()); |
|
|
731 |
break; |
|
|
732 |
case "previousRunPath": |
|
|
733 |
previousRunPath = value.trim(); |
|
|
734 |
break; |
|
|
735 |
case "previousRunProjectName": |
|
|
736 |
previousRunProjectName = value.trim(); |
|
|
737 |
break; |
|
|
738 |
case "performShortTest": |
|
|
739 |
performShortTest = Boolean.valueOf(value.trim()); |
|
|
740 |
break; |
|
|
741 |
case "missingValueToReplace": |
|
|
742 |
missingValueToReplace = value.trim(); |
|
|
743 |
break; |
|
|
744 |
} |
|
|
745 |
} |
|
|
746 |
|
|
|
747 |
} |