from scipy.io import savemat from config import config from ensemble import run from SimpleClassifiers.sklearnclassifier import try_sklearn_classifiers, cross_validate_kNN, cross_validate_RFC, \ cross_validate_SVC import numpy as np import scipy from utils.utils import select_best_model, comparison_plot_loss, comparison_plot_accuracy from utils import IOHelper from scipy import io import h5py import logging import time from kerasTuner import tune from utils import IOHelper import sys class Tee(object): def __init__(self, *files): self.files = files def write(self, obj): for f in self.files: f.write(obj) f.flush() # If you want the output to be visible immediately def flush(self) : for f in self.files: f.flush() def main(): # For Logging the data logging.basicConfig(filename=config['info_log'], level=logging.INFO) logging.info('Started the Logging') # For being able to see progress that some methods use verbose f = open(config['model_dir'] + '/console.out', 'w') original = sys.stdout sys.stdout = Tee(sys.stdout, f) # Start of the main program start_time = time.time() # try: trainX, trainY = IOHelper.get_mat_data(config['data_dir'], verbose=True) print(trainX.shape) print(trainY.shape) if config['model'] == 'simple_classifier': try_sklearn_classifiers(trainX, trainY) # cross_validate_kNN(trainX, trainY) # cross_validate_SVC(trainX, trainY) cross_validate_RFC(trainX, trainY) else: # tune(trainX,trainY) run(trainX,trainY) # comparison_plot_accuracy() # comparison_plot_loss() logging.info("--- Runtime: %s seconds ---" % (time.time() - start_time)) logging.info('Finished Logging') if __name__=='__main__': main()