processing.py 7.81 KB
Newer Older
matthmey's avatar
matthmey committed
1
2
3
4
from ..global_config import get_setting
from ..core.graph import StuettNode

import dask
matthmey's avatar
matthmey committed
5
6
import numpy as np
import xarray as xr
7
8
import scipy.signal
import pandas as pd
matthmey's avatar
matthmey committed
9
import lttb
matthmey's avatar
matthmey committed
10
11
12


class MinMaxDownsampling(StuettNode):
13
14
    def __init__(self, rate=1, dim=None):
        # dim is by default the last dimension
matthmey's avatar
matthmey committed
15
16
        # since we always choose two values (min and max) per bucket the
        # the internal downsampling rate must be of factor two larger than
matthmey's avatar
matthmey committed
17
        # the effective (and desired) downsampling rate
matthmey's avatar
matthmey committed
18
        self.rate = rate * 2
19
        self.dim = dim
matthmey's avatar
matthmey committed
20

21
    def forward(self, data=None, request=None):
22
23
24
        dim = self.dim
        if dim is None:
            dim = data.dims[-1]
matthmey's avatar
matthmey committed
25

26
27
28
        # rolling = data.rolling({dim:self.rate})
        # x_min = rolling.construct("buckets", stride=self.rate).min("time").rename({"buckets":"time"}).dropna('time')
        # x_max = rolling.construct("buckets", stride=self.rate).max("time").rename({"buckets":"time"}).dropna('time')
matthmey's avatar
matthmey committed
29

30
31
32
        rolling = data.rolling({dim: self.rate}, stride=self.rate)
        x_min = rolling.min().dropna(dim)
        x_max = rolling.max().dropna(dim)
matthmey's avatar
matthmey committed
33

matthmey's avatar
matthmey committed
34
        x_ds = xr.concat(
35
            [x_min, x_max], dim
matthmey's avatar
matthmey committed
36
        )  # TODO: better interleave instead of concat
matthmey's avatar
matthmey committed
37

38
        x_ds = x_ds.sortby(dim)  # TODO: try to avoid this by using interleaving
matthmey's avatar
matthmey committed
39
40

        return x_ds
matthmey's avatar
matthmey committed
41

42

matthmey's avatar
matthmey committed
43
class LTTBDownsampling(StuettNode):
44
    def __init__(self, rate=1, dim=None):
matthmey's avatar
matthmey committed
45
46
        # Based on and many thanks to https://github.com/javiljoen/lttb.py
        self.rate = rate
47
        self.dim = dim
matthmey's avatar
matthmey committed
48
49

    def forward(self, data=None, request=None):
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
        dim = self.dim
        if dim is None:
            dim = data.dims[-1]

        n_out = data.sizes[dim] // self.rate

        """ The following tries to re-implement the LTTB for xarray
            There are several issues:
            1. We cannot use the LTTB package
               it expects the index and the data in one numpy array. Since our index is
               usually in a xarray coordinate, we would need to disassemble the xarray
               and then reassemble it. (As done in the current implementation).
               We would potentially loose some information from the original datastructure
               and we would need to assume certain datatypes for certain dimensions.
            2. For multi-dimensional arrays, we run into issues since since the dimension
               coordinate we want to reduce on applies to multiple axes and on each axis
               LTTB chooses a different pair of (index, value). Solution would be to choose
               the label for each window statically but choose the value depending on LTTB.
        """
        # rolling = data.rolling({dim: self.rate},stride=self.rate)
        # print(rolling)
        # # print(data.sizes['time']//self.rate)

        # time_bins = data[dim].rolling({dim: self.rate},stride=self.rate).construct("buckets")
        # print('time',time_bins)

        # data_bins = rolling.construct("buckets")
        # print(data_bins)

        # # return
        # # rolling_dim = utils.get_temp_dimname(self.obj.dims, "_rolling_dim")
        # # windows = self.construct(rolling_dim, stride=self.stride)
        # # result = windows.reduce(func, dim=rolling_dim, **kwargs)
        # # # Find valid windows based on count.
        # # counts = self._counts()
        # # return result.where(counts >= self._min_periods)
        # # mean = rolling.mean()

        # mean = data_bins.mean("buckets")
        # mean_time = time_bins.mean("buckets")
        # print(mean_time)
        # out = []
        # prev = None
        # for i in range(data_bins.sizes[dim]-1):
        #     print(i)
        #     item = data_bins.isel({dim:i})
        #     time = time_bins.isel({dim:i})
        #     if i == 0:
        #         prev_data = item.isel(buckets=-1)
        #         prev_time = time.isel(buckets=-1)
        #         out.append(prev)
        #         continue

        #     a = prev
        #     a_time = prev_time
        #     bs = item
        #     bs_time = time
        #     if i < data_bins.sizes[dim]-1:
        #         c = mean.isel({dim:i+1})
        #         c_time = mean_time.isel({dim:i+1})
        #     else:
        #         c = data_bins.isel({dim:-1})
        #         c_time = time_bins.isel({dim:-1})

        #     # calculate areas of triangle
        #     bs_minus_a = bs - a
        #     a_minus_bs = a - bs

        #     areas = 0.5 * abs((a[0] - c[0]) * (bs_minus_a[:, 1])
        #                 - (a_minus_bs[:, 0]) * (c[1] - a[1]))
matthmey's avatar
matthmey committed
120

121
        # x_min = rolling.min().dropna("time")
matthmey's avatar
matthmey committed
122

123
124
125
        # This is quite hacky and works only if the the selected dimension is a datetime
        if len(data.squeeze().dims) > 1:
            raise RuntimeError("Can only work with arrays of one dimension")
matthmey's avatar
matthmey committed
126

127
128
        d = np.array([data["time"].values.astype(np.int64), data.squeeze().values]).T
        small_data = lttb.downsample(d, n_out=n_out)
matthmey's avatar
matthmey committed
129

130
131
132
133
134
        array = xr.DataArray(
            small_data[:, 1],
            dims=[dim],
            coords={dim: (dim, pd.to_datetime(small_data[:, 0]))},
        )
matthmey's avatar
matthmey committed
135

136
137
        array.attrs = data.attrs
        return array
matthmey's avatar
matthmey committed
138

matthmey's avatar
matthmey committed
139
140
141
142

class Downsampling(StuettNode):
    def __init__(self):
        raise NotImplementedError()
matthmey's avatar
matthmey committed
143
        # TODO: high level downsampling node which uses one of the other downsampling
matthmey's avatar
matthmey committed
144
        #       classes depending on the user request
matthmey's avatar
matthmey committed
145
        pass
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181


class Spectrogram(StuettNode):
    def __init__(
        self,
        nfft=2048,
        stride=1024,
        dim=None,
        sampling_rate=None,
        window=("tukey", 0.25),
        detrend="constant",
        return_onesided=True,
        scaling="density",
        mode="psd",
    ):
        super().__init__(
            nfft=nfft,
            stride=stride,
            dim=dim,
            sampling_rate=sampling_rate,
            window=window,
            detrend=detrend,
            return_onesided=return_onesided,
            scaling=scaling,
            mode=mode,
        )

    def forward(self, data=None, request=None):
        config = self.config.copy()  # TODO: do we need a deep copy?
        if request is not None:
            config.update(request)

        if config["dim"] is None:
            config["dim"] = data.dims[-1]

        axis = data.get_axis_num(config["dim"])
182

183
        if config["sampling_rate"] is None:
184
185
186
187
188
            if "sampling_rate" not in data.attrs:
                raise RuntimeError(
                    "Please provide a sampling_rate attribute "
                    "to your config or your input data"
                )
matthmey's avatar
matthmey committed
189
190
            else:
                config["sampling_rate"] = data.attrs["sampling_rate"]
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217

        samples = data.values
        noverlap = config["nfft"] - config["stride"]

        freqs, spectrum_time, spectrum = scipy.signal.spectrogram(
            samples,
            nfft=config["nfft"],
            nperseg=config["nfft"],
            noverlap=noverlap,
            fs=config["sampling_rate"],
            axis=axis,
            detrend=config["detrend"],
            scaling=config["scaling"],
            return_onesided=config["return_onesided"],
            mode=config["mode"],
            window=config["window"],
        )

        # TODO: check if this is what we want. it's: the earliest timestamp of input + the delta computed by scipy
        ds_coords = pd.to_datetime(
            pd.to_datetime(data[config["dim"]][0].values)
            + pd.to_timedelta(spectrum_time, "s")
        ).tz_localize(
            None
        )  # TODO: change when xarray #3291 is fixed

        # Create a new DataArray for the spectogram
matthmey's avatar
matthmey committed
218

219
220
221
        dims = (
            data.dims[:axis] + ("frequency",) + data.dims[axis + 1 :] + (config["dim"],)
        )
222
        coords = dict(data.coords)
223
        coords.update({"frequency": ("frequency", freqs), config["dim"]: ds_coords})
224
225
226
        dataarray = xr.DataArray(spectrum, dims=dims, coords=coords)

        return dataarray