[c36663]: / examples / datasets_io / plot_load_save_datasets.py

Download this file

95 lines (79 with data), 3.5 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
"""
Load and save dataset example
=============================
In this example, we show how to load and save braindecode datasets.
"""
# Authors: Lukas Gemein <l.gemein@gmail.com>
#
# License: BSD (3-clause)
import tempfile
from braindecode.datasets import MOABBDataset
from braindecode.preprocessing import preprocess, Preprocessor
from braindecode.datautil import load_concat_dataset
from braindecode.preprocessing import create_windows_from_events
###############################################################################
# First, we load some dataset using MOABB.
dataset = MOABBDataset(
dataset_name="BNCI2014001",
subject_ids=[1],
)
###############################################################################
# We can apply preprocessing steps to the dataset. It is also possible to skip
# this step and not apply any preprocessing.
preprocess(concat_ds=dataset, preprocessors=[Preprocessor(fn="resample", sfreq=10)])
###############################################################################
# We save the dataset to a an existing directory. It will create a '.fif' file
# for every dataset in the concat dataset. Additionally it will create two
# JSON files, the first holding the description of the dataset, the second
# holding the name of the target. If you want to store to the same directory
# several times, for example due to trying different preprocessing, you can
# choose to overwrite the existing files.
tmpdir = tempfile.mkdtemp() # write in a temporary directory
dataset.save(
path=tmpdir,
overwrite=False,
)
##############################################################################
# We load the saved dataset from a directory. Signals can be preloaded in
# compliance with mne. Optionally, only specific '.fif' files can be loaded
# by specifying their ids. The target name can be changed, if the dataset
# supports it (TUHAbnormal for example supports 'pathological', 'age', and
# 'gender'. If you stored a preprocessed version with target 'pathological'
# it is possible to change the target upon loading).
dataset_loaded = load_concat_dataset(
path=tmpdir,
preload=True,
ids_to_load=[1, 3],
target_name=None,
)
##############################################################################
# The serialization utility also supports WindowsDatasets, so we create
# compute windows next.
windows_dataset = create_windows_from_events(
concat_ds=dataset_loaded,
trial_start_offset_samples=0,
trial_stop_offset_samples=0,
)
windows_dataset.description
##############################################################################
# Again, we save the dataset to an existing directory. It will create a
# '-epo.fif' file for every dataset in the concat dataset. Additionally it
# will create a JSON file holding the description of the dataset. If you
# want to store to the same directory several times, for example due to
# trying different windowing parameters, you can choose to overwrite the
# existing files.
windows_dataset.save(
path=tmpdir,
overwrite=True,
)
##############################################################################
# Load the saved dataset from a directory. Signals can be preloaded in
# compliance with mne. Optionally, only specific '-epo.fif' files can be
# loaded by specifying their ids.
windows_dataset_loaded = load_concat_dataset(
path=tmpdir,
preload=False,
ids_to_load=[0],
target_name=None,
)
windows_dataset_loaded.description