Commit af594851 authored by Lukas Wolf's avatar Lukas Wolf
Browse files

added xception in torch

parent 7b735dae
from torch.nn.modules.activation import ReLU
from torch.nn.modules.batchnorm import BatchNorm1d
from torch_models.ConvNetTorch import ConvNet
import torch.nn as nn
from torch_models.Modules import Pad_Conv, Pad_Pool, TCSConv1d
class XCEPTION(ConvNet):
"""
The Xception architecture. This is inspired by Xception paper, which describes how 'extreme' convolutions can be represented
as separable convolutions and can achieve better accuracy than the Inception architecture. It is made of modules in a specific depth.
Each module, in our implementation, consists of a separable convolution followed by batch normalization and a ReLu activation layer.
"""
def __init__(self, input_shape, kernel_size=40, nb_filters=128, verbose=True, epochs=1, batch_size=64,
use_residual=True, depth=6, model_number=0, regularization=0):
self.regularization = regularization
self.nb_features = nb_filters # Exception passes a tensor of shape (timesamples, nb_filters) through the network
super(XCEPTION, self).__init__(input_shape, kernel_size=kernel_size, nb_filters=nb_filters,
verbose=verbose, epochs=epochs, batch_size=batch_size, use_residual=use_residual, depth=depth,
model_number=model_number, preprocessing=False)
def _module(self, depth):
"""
The module of Xception. Consists of a separable convolution followed by batch normalization and a ReLu activation function.
"""
return nn.Sequential(
TCSConv1d(mother=self, depth=depth),
nn.BatchNorm1d(num_features=self.nb_features),
nn.ReLU()
)
"""
Tensorflow code:
x = tf.keras.layers.SeparableConv1D(filters=self.nb_filters, kernel_size=self.kernel_size, padding='same',
use_bias=False, depth_multiplier=1)(input_tensor)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Activation(activation='relu')(x)
"""
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment