deepEye3.py 4.47 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import tensorflow as tf
import numpy as np
import tensorflow.keras as keras
from config import config
from utils.utils import *
import logging
from keras.callbacks import CSVLogger


def run(trainX, trainY):
    logging.info("Starting DeepEye3.")
    classifier = Classifier_DEEPEYE3(input_shape=config['deepeye3']['input_shape'])
    hist = classifier.fit(trainX, trainY)
    plot_loss(hist, config['model_dir'], config['model'], True)
    plot_acc(hist, config['model_dir'], config['model'], True)
    save_logs(hist, config['model_dir'], config['model'], pytorch=False)
    save_model_param(classifier.model, config['model_dir'], config['model'], pytorch=False)

class Classifier_DEEPEYE3:
    def __init__(self, input_shape, verbose=True, build=True, batch_size=64, nb_filters=32,
                 use_residual=True, use_bottleneck=True, depth=6, kernel_size=40, nb_epochs=1500):

        self.nb_filters = nb_filters
        self.use_residual = use_residual
        self.use_bottleneck = use_bottleneck
        self.depth = depth
        self.kernel_size = kernel_size
        self.callbacks = None
        self.batch_size = batch_size
        self.bottleneck_size = 32
        self.nb_epochs = nb_epochs
        self.verbose = verbose

        if build:
            self.model = self._build_model(input_shape)
            if self.verbose:
                self.model.summary()

    def _inception_module(self, input_tensor, nb_filters=32, use_bottleneck=True, kernel_size=40, bottleneck_size=32,
                          stride=1, activation='linear'):

        if use_bottleneck and int(input_tensor.shape[-1]) > 1:
            input_inception = tf.keras.layers.Conv1D(filters=bottleneck_size, kernel_size=1, padding='same', activation=activation, use_bias=False)(input_tensor)
        else:
            input_inception = input_tensor

        # kernel_size_s = [3, 5, 8, 11, 17]
        kernel_size_s = [kernel_size // (2 ** i) for i in range(2)]
        conv_list = []

        for i in range(len(kernel_size_s)):
            conv_list.append(
                tf.keras.layers.Conv1D(filters=nb_filters, kernel_size=kernel_size_s[i], strides=stride, padding='same', activation=activation, use_bias=False)(input_inception))

        max_pool_1 = tf.keras.layers.MaxPool1D(pool_size=10, strides=stride, padding='same')(input_tensor)
        conv_6 = tf.keras.layers.Conv1D(filters=nb_filters, kernel_size=1, padding='same', activation=activation, use_bias=False)(max_pool_1)

        max_pool_2 = tf.keras.layers.MaxPool1D(pool_size=10, strides=stride, padding='same')(input_tensor)
        conv_7 = tf.keras.layers.Conv1D(filters=nb_filters/8, kernel_size=16, padding='same', activation=activation, use_bias=False)(max_pool_2)

        conv_8 = tf.keras.layers.SeparableConv1D(filters=nb_filters, kernel_size=32, padding='same', activation=activation, use_bias=False, depth_multiplier=1)(input_tensor)
        conv_list.append(conv_6)
        conv_list.append(conv_7)
        conv_list.append(conv_8)

        x = tf.keras.layers.Concatenate(axis=2)(conv_list)
        x = tf.keras.layers.BatchNormalization()(x)
        x = tf.keras.layers.Activation(activation='relu')(x)
        return x

    def _shortcut_layer(self, input_tensor, out_tensor):

        shortcut_y = tf.keras.layers.Conv1D(filters=int(out_tensor.shape[-1]), kernel_size=1, padding='same', use_bias=False)(input_tensor)
        shortcut_y = tf.keras.layers.BatchNormalization()(shortcut_y)
        x = keras.layers.Add()([shortcut_y, out_tensor])
        x = keras.layers.Activation('relu')(x)
        return x

    def _build_model(self, input_shape, use_residual=True, depth=9):
        input_layer = tf.keras.layers.Input(input_shape)

        x = input_layer
        input_res = input_layer

        for d in range(depth):
            x = self._inception_module(x)

            if use_residual and d % 3 == 2:
                x = self._shortcut_layer(input_res, x)
                input_res = x

        gap_layer = tf.keras.layers.GlobalAveragePooling1D()(x)
        output_layer = tf.keras.layers.Dense(1, activation='sigmoid')(gap_layer)
        model = tf.keras.models.Model(inputs=input_layer, outputs=output_layer)
        model.compile(loss='binary_crossentropy', optimizer=keras.optimizers.Adam(), metrics=['accuracy'])
        return model

    def fit(self, inception_x, y):
        csv_logger = CSVLogger(config['batches_log'], append=True, separator=';')
        hist = self.model.fit(inception_x, y, verbose=1, validation_split=0.2, epochs=1, callbacks=[csv_logger])
        return hist