To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit 75060c3f authored by Lukas Wolf's avatar Lukas Wolf
Browse files

update config'

parent 6f80708a
......@@ -40,15 +40,13 @@ config['root_dir'] = '.'
# Choose task and dataset
##################################################################
config['preprocessing'] = 'max' # options: min and max
config['preprocessing'] = 'min' # options: min and max
config['task'] = 'prosaccade-clf'
#config['task'] = 'gaze-reg'
#config['task'] = 'prosaccade-clf'
config['task'] = 'gaze-reg'
#config['task'] = 'angle-reg'
if config['task'] != 'prosaccade-clf':
#config['dataset'] = 'processing_speed_task'
config['dataset'] = 'calibration_task'
config['input_shape'] = (500,129)
##################################################################
# Choose framework
......@@ -61,15 +59,15 @@ config['framework'] = 'pytorch'
# Choose model
##################################################################
config['ensemble'] = 2 #number of models in the ensemble
config['ensemble'] = 5 #number of models in the ensemble
config['pretrained'] = False # We can use a model pretrained on processing speed task
# MODELS FOR BENCHMARK
#config['model'] = 'cnn'
config['model'] = 'cnn'
#config['model'] = 'inception'
config['model'] = 'eegnet'
#config['model'] = 'eegnet'
#config['model'] = 'xception'
#config['model'] = 'pyramidal_cnn'
#config['model'] = 'pyramidal_cnn'
# EXPERIMENTAL MODELS
#config['model'] = 'deepeye'
......@@ -84,10 +82,10 @@ with open('hyperparams.json', 'r') as file:
params = json.load(file)
config['learning_rate'] = params[config['model']][config['task']]['learning_rate']
config['regularization'] = params[config['model']][config['task']]['regularization']
config['epochs'] = 5
config['epochs'] = 50
config['batch_size'] = 64
config['early_stopping'] = False
config['patience'] = 20
config['patience'] = 10
##################################################################
# Choose between ensemble and kerasTuner
......@@ -110,6 +108,58 @@ config['downsampled'] = False
config['split'] = False
#config['cluster'] = clustering()
##################################################################
# Manage the model directory and output directory structure
##################################################################
# Create a unique output directory for this experiment.
timestamp = str(int(time.time()))
model_folder_name = timestamp
model_folder_name += "_tf" if config['framework']=='tensorflow' else '_pytorch'
model_folder_name += "_pretrained_" + config['model'] if config['pretrained'] else "_" + config['model']
# Modify the model folder name depending on which task tuns
model_folder_name += "_" + config['task']
model_folder_name += "_prep" + config['preprocessing']
if config['split']:
model_folder_name += '_cluster'
if config['downsampled']:
model_folder_name += '_downsampled'
if config['ensemble']>1:
model_folder_name += '_ensemble'
config['model_dir'] = os.path.abspath(os.path.join(config['log_dir'], model_folder_name))
if not os.path.exists(config['model_dir']):
os.makedirs(config['model_dir'])
config['info_log'] = config['model_dir'] + '/' + 'info.log'
config['batches_log'] = config['model_dir'] + '/' + 'batches.log'
# Create a directory to store logs for tensorboard
if config['tensorboard_on']:
config['tensorboard_log_dir'] = config['model_dir'] + "/logs/fit/" + timestamp
if not os.path.exists(config['tensorboard_log_dir']):
os.makedirs(config['tensorboard_log_dir'])
if not os.path.exists(config['model_dir'] + "/best_models/"):
os.makedirs(config['model_dir'] + "/best_models/")
if not os.path.exists(config['model_dir'] + "/plots/"):
os.makedirs(config['model_dir'] + "/plots/")
if not os.path.exists(config['model_dir'] + "/metrics/"):
os.makedirs(config['model_dir'] + "/metrics/")
# Save config to model dir
import pickle
config_path = config['model_dir'] + "/config.p"
pickle.dump(config, open(config_path, "wb"))
"""
We don't use this settings anymore since we have prepared our datasets
##################################################################
# Parameter for the padding
##################################################################
......@@ -136,6 +186,7 @@ elif config['task'] == 'angle-reg':
##################################################################
# Define parameter and input_shape for each model
##################################################################
# CNN
config['cnn'] = {}
# PyrCNN
......@@ -205,45 +256,4 @@ else:
config['eegnet']['channels'] = 129
config['eegnet']['samples'] = 125 if config['downsampled'] else 500
# Create a unique output directory for this experiment.
timestamp = str(int(time.time()))
model_folder_name = timestamp
model_folder_name += "_tf" if config['framework']=='tensorflow' else '_pytorch'
model_folder_name += "_pretrained_" + config['model'] if config['pretrained'] else "_" + config['model']
# Modify the model folder name depending on which task tuns
model_folder_name += "_" + config['task']
model_folder_name += "_prep" + config['preprocessing']
if config['split']:
model_folder_name += '_cluster'
if config['downsampled']:
model_folder_name += '_downsampled'
if config['ensemble']>1:
model_folder_name += '_ensemble'
config['model_dir'] = os.path.abspath(os.path.join(config['log_dir'], model_folder_name))
if not os.path.exists(config['model_dir']):
os.makedirs(config['model_dir'])
config['info_log'] = config['model_dir'] + '/' + 'info.log'
config['batches_log'] = config['model_dir'] + '/' + 'batches.log'
# Create a directory to store logs for tensorboard
if config['tensorboard_on']:
config['tensorboard_log_dir'] = config['model_dir'] + "/logs/fit/" + timestamp
if not os.path.exists(config['tensorboard_log_dir']):
os.makedirs(config['tensorboard_log_dir'])
if not os.path.exists(config['model_dir'] + "/best_models/"):
os.makedirs(config['model_dir'] + "/best_models/")
if not os.path.exists(config['model_dir'] + "/plots/"):
os.makedirs(config['model_dir'] + "/plots/")
if not os.path.exists(config['model_dir'] + "/metrics/"):
os.makedirs(config['model_dir'] + "/metrics/")
# Save config to model dir
import pickle
config_path = config['model_dir'] + "/config.p"
pickle.dump(config, open(config_path, "wb"))
\ No newline at end of file
"""
......@@ -59,7 +59,7 @@ class Ensemble_torch:
# Log shapes
logging.info(f"Training data shapes X, y: {X_train.shape, y_train.shape}")
logging.info(f"Test data shapes X, y: {X_test.shape, y_test.shape}")
logging.info(f"Validation data shapes: {X_val.shape, y_val.shape}")
logging.info(f"Validation data shapes X, y: {X_val.shape, y_val.shape}")
# Create dataloaders
train_dataloader = create_dataloader(X_train, y_train, config['batch_size'], 'train')
validation_dataloader = create_dataloader(X_val, y_val, config['batch_size'], 'val')
......@@ -106,20 +106,20 @@ def create_model(model_type, model_number):
Returns the specified torch model as nn.Module built on BaseNet
"""
if model_type == 'cnn':
model = CNN(input_shape=config['cnn']['input_shape'], kernel_size=64, epochs = config['epochs'], nb_filters=16, batch_size=config['batch_size'],
model = CNN(input_shape=config['input_shape'], kernel_size=64, epochs = config['epochs'], nb_filters=16, batch_size=config['batch_size'],
verbose=True, use_residual=True, depth=12, model_number=model_number)
elif model_type == 'inception':
model = Inception(input_shape=config['inception']['input_shape'], use_residual=True, model_number=model_number, batch_size=config['batch_size'],
model = Inception(input_shape=config['input_shape'], use_residual=True, model_number=model_number, batch_size=config['batch_size'],
kernel_size=64, nb_filters=16, depth=12, bottleneck_size=16, epochs=config['epochs'])
elif model_type == 'xception':
model = XCEPTION(input_shape=config['inception']['input_shape'], use_residual=True, model_number=model_number,
model = XCEPTION(input_shape=config['input_shape'], use_residual=True, model_number=model_number,
kernel_size=40, nb_filters=64, depth=18, epochs=config['epochs'], batch_size=config['batch_size'])
elif model_type == 'eegnet':
model = EEGNet(input_shape=(config['eegnet']['samples'], config['eegnet']['channels']), batch_size=config['batch_size'],
model = EEGNet(input_shape=(config['input_shape'], config['eegnet']['channels']), batch_size=config['batch_size'],
model_number=model_number, epochs=config['epochs'])
elif model_type == 'pyramidal_cnn':
model = PyramidalCNN(input_shape=config['cnn']['input_shape'], epochs=config['epochs'],
model = PyramidalCNN(input_shape=config['input_shape'], epochs=config['epochs'],
model_number=model_number, batch_size=config['batch_size'])
# elif model_type == 'gazenet':
# model = gazeNET(input_shape=config['gazenet']['input_shape'], seed=42, batch_size=config['batch_size'])
# model = gazeNET(input_shape=config['input_shape'], seed=42, batch_size=config['batch_size'])
return model
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment