Commit bf70e31c authored by Ard Kastrati's avatar Ard Kastrati
Browse files

Added all results for pyramidal CNN

parent 74df1c7f
......@@ -48,7 +48,7 @@ config['split'] = False
config['cluster'] = clustering()
config['ensemble'] = 1 #number of models in the ensemble method
config['ensemble'] = 5 #number of models in the ensemble method
config['trainX_file'] = 'noweEEG.mat' if config['downsampled'] else 'all_EEGprocuesan.mat'
config['trainY_file'] = 'all_trialinfoprosan.mat'
......
......@@ -35,9 +35,11 @@ def run(trainX, trainY):
elif config['model'] == 'cnn':
classifier = Classifier_CNN(input_shape=config['cnn']['input_shape'])
elif config['model'] == 'pyramidal_cnn':
classifier = Classifier_PyramidalCNN(input_shape=config['cnn']['input_shape'])
classifier = Classifier_PyramidalCNN(input_shape=config['cnn']['input_shape'], epochs=50)
elif config['model'] == 'eegnet':
classifier = Classifier_EEGNet()
classifier = Classifier_EEGNet(dropoutRate = 0.5, kernLength = 250, F1 = 16,
D = 4, F2 = 256, norm_rate = 0.5, dropoutType = 'Dropout',
epochs = 50)
elif config['model'] == 'inception':
classifier = Classifier_INCEPTION(input_shape=config['inception']['input_shape'], use_residual=True,
kernel_size=64, nb_filters=16, depth=12, bottleneck_size=16, epochs=50)
......
This diff is collapsed.
INFO:root:Started the Logging
INFO:root:X training loaded.
INFO:root:(129, 500, 36223)
INFO:root:y training loaded.
INFO:root:(1, 36223)
INFO:root:Setting the shapes
INFO:root:(36223, 500, 129)
INFO:root:(36223, 1)
INFO:root:Started running pyramidal_cnn. If you want to run other methods please choose another model in the config.py file.
INFO:root:Parameters:
INFO:root:--------------- use residual : False
INFO:root:--------------- depth : 6
INFO:root:--------------- batch size : 64
INFO:root:--------------- kernel size : 16
INFO:root:--------------- nb filters : 16
INFO:root:--------------- preprocessing: False
INFO:root:Parameters:
INFO:root:--------------- use residual : False
INFO:root:--------------- depth : 6
INFO:root:--------------- batch size : 64
INFO:root:--------------- kernel size : 16
INFO:root:--------------- nb filters : 16
INFO:root:--------------- preprocessing: False
INFO:root:Parameters:
INFO:root:--------------- use residual : False
INFO:root:--------------- depth : 6
INFO:root:--------------- batch size : 64
INFO:root:--------------- kernel size : 16
INFO:root:--------------- nb filters : 16
INFO:root:--------------- preprocessing: False
INFO:root:Parameters:
INFO:root:--------------- use residual : False
INFO:root:--------------- depth : 6
INFO:root:--------------- batch size : 64
INFO:root:--------------- kernel size : 16
INFO:root:--------------- nb filters : 16
INFO:root:--------------- preprocessing: False
INFO:root:Parameters:
INFO:root:--------------- use residual : False
INFO:root:--------------- depth : 6
INFO:root:--------------- batch size : 64
INFO:root:--------------- kernel size : 16
INFO:root:--------------- nb filters : 16
INFO:root:--------------- preprocessing: False
INFO:root:**********
INFO:root:--- Runtime: 3552.163224220276 seconds ---
INFO:root:Finished Logging
best_model_train_loss,best_model_val_loss,best_model_train_acc,best_model_val_acc
0.014074278995394707,0.1619231104850769,0.9947891235351562,0.9387163561076605
loss,accuracy,val_loss,val_accuracy
0.5229793190956116,0.7286907434463501,0.4002792537212372,0.824568668046929
0.3748096525669098,0.8308026790618896,0.28798213601112366,0.895376121463078
0.25542691349983215,0.8919870257377625,0.24307061731815338,0.8989648033126294
0.19659088551998138,0.9171095490455627,0.21363912522792816,0.9247757073844031
0.16168683767318726,0.9324315190315247,0.21933799982070923,0.9180124223602485
0.13632336258888245,0.9446821808815002,0.22479282319545746,0.9137336093857833
0.11832697689533234,0.9528262615203857,0.19527417421340942,0.9214630779848171
0.09940213710069656,0.9597970843315125,0.26611241698265076,0.8829537612146308
0.0888088122010231,0.9652494788169861,0.3040655851364136,0.8623878536922015
0.07452460378408432,0.9708744287490845,0.19487033784389496,0.9318150448585231
0.06493431329727173,0.9750845432281494,0.16251970827579498,0.9363699102829538
0.0541088841855526,0.9787769913673401,0.23134759068489075,0.8956521739130435
0.04514310508966446,0.9825729727745056,0.1648266315460205,0.9327812284334024
0.03617515787482262,0.9872316718101501,0.22155921161174774,0.90904071773637
0.04141424223780632,0.9840568900108337,0.17723608016967773,0.9319530710835059
0.02809196524322033,0.9892331957817078,0.23127569258213043,0.9228433402346445
0.031985290348529816,0.988163411617279,0.2066156566143036,0.9146997929606625
0.022428086027503014,0.9916833639144897,0.240288645029068,0.9101449275362319
0.026490220800042152,0.9899923801422119,0.17816978693008423,0.9330572808833678
0.025913532823324203,0.9904755353927612,0.17431998252868652,0.9359558316080056
0.019009703770279884,0.9935468435287476,0.1768779158592224,0.9315389924085576
0.021951014176011086,0.9919593930244446,0.16628137230873108,0.937888198757764
0.02061690390110016,0.9925460815429688,0.24237284064292908,0.9086266390614217
0.019781801849603653,0.9930636882781982,0.1749597191810608,0.9323671497584541
0.017480894923210144,0.9937539100646973,0.21324694156646729,0.9206349206349206
0.01854133792221546,0.9926496148109436,0.19413962960243225,0.9244996549344375
0.016374070197343826,0.9939609169960022,0.17688243091106415,0.9318150448585231
0.012890681624412537,0.9956173896789551,0.18018275499343872,0.9272601794340924
0.017822811380028725,0.9933397769927979,0.1621977984905243,0.9354037267080745
0.01794067770242691,0.9945476055145264,0.17774489521980286,0.9337474120082816
0.014074278995394707,0.9947891235351562,0.1619231104850769,0.9387163561076605
0.018548591062426567,0.9939609169960022,0.1732415109872818,0.936231884057971
0.010394802317023277,0.9964110851287842,0.18912450969219208,0.9305728088336784
0.014689675532281399,0.9948581457138062,0.18225820362567902,0.9327812284334024
0.013839841820299625,0.994961678981781,0.21405284106731415,0.916632160110421
0.010851029306650162,0.995858907699585,0.1782805174589157,0.9342995169082126
0.012510291300714016,0.9961349964141846,0.18734367191791534,0.9268461007591442
0.01152802538126707,0.9957553744316101,0.18014012277126312,0.9336093857832988
0.005823355168104172,0.9982055425643921,0.18190234899520874,0.9342995169082126
0.01612170971930027,0.9942024946212769,0.22751621901988983,0.9302967563837129
0.013920621015131474,0.9955828785896301,0.178383469581604,0.9341614906832298
0.011387034319341183,0.9962385296821594,0.18261277675628662,0.93567977915804
0.01182890497148037,0.9957208633422852,0.1837872564792633,0.9352657004830918
0.009694692678749561,0.996514618396759,0.1846180260181427,0.9300207039337474
0.007035564631223679,0.9975498914718628,0.1772298812866211,0.9325051759834369
0.008658022619783878,0.9963075518608093,0.17883582413196564,0.9342995169082126
0.01129349134862423,0.99565190076828,0.18449130654335022,0.9302967563837129
0.009486435912549496,0.996721625328064,0.18653514981269836,0.9311249137336094
0.008878315798938274,0.9964455962181091,0.20953772962093353,0.9286404416839199
0.009336929768323898,0.996549129486084,0.2040371596813202,0.9225672877846791
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment