@@ -10,14 +10,21 @@ from torch_models.torch_utils.utils import get_gpu_memory
classEEGNet(BaseNet):
"""
The EEGNet architecture used as baseline. This is the architecture explained in the paper
'EEGNet: A Compact Convolutional Network for EEG-based Brain-Computer Interfaces' with authors
Vernon J. Lawhern, Amelia J. Solon, Nicholas R. Waytowich, Stephen M. Gordon, Chou P. Hung, Brent J. Lance
In our implementation it is built on BaseNet and can therefore use the same interface for training as models based on ConvNet.
We only define the layers we need, the forward pass, and a method that returns the number of hidden units before the output layer, which is accessed by BaseNet to create the same output layer as for ConvNet models.
# Remove the subject counter from the labels and extract only angle for direction task
# Remove the subject counter from the labels and extract the proper label for angle and amplitude task. structure of direction data: [subjectid, amplitude, angle]
ifconfig['task']=='amplitude-reg':
y_train=y_train[:,1:2]
y_val=y_val[:,1:2]# only extract the amplitude, not the subjectid or angle
y_val=y_val[:,1:2]
y_test=y_test[:,1:2]
else:
offset=2ifconfig['task']=='angle-reg'else1
offset=2ifconfig['task']=='angle-reg'else1# for the other datasets it is [subjectid, label]
y_train=y_train[:,offset:]
y_val=y_val[:,offset:]
y_test=y_test[:,offset:]
# Log shapes
logging.info(f"Training data shapes X, y: {X_train.shape,y_train.shape}")
logging.info(f"Test data shapes X, y: {X_test.shape,y_test.shape}")
...
...
@@ -76,37 +78,22 @@ class Ensemble_torch:
# Metrics to save across the ensemble
loss=[]
accuracy=[]
prediction_list=[]# saves the predicted batches in the test dataloader across models