To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit 2d528049 authored by matthmey's avatar matthmey
Browse files

merge fork

parents 43a909bd 9c624c2d
This diff is collapsed.
...@@ -27,7 +27,7 @@ toolz = "^0.10.0" ...@@ -27,7 +27,7 @@ toolz = "^0.10.0"
obspy = "^1.1.1" obspy = "^1.1.1"
numpy = "1.16.5" numpy = "1.16.5"
appdirs = "^1.4.3" appdirs = "^1.4.3"
obsplus = {git = "https://github.com/niosh-mining/obsplus" } obsplus = { git = "https://github.com/niosh-mining/obsplus" }
zarr = "^2.3.2" zarr = "^2.3.2"
xarray = { git = "https://github.com/niowniow/xarray.git", branch = "strided_rolling" } xarray = { git = "https://github.com/niowniow/xarray.git", branch = "strided_rolling" }
pillow = "^6.2.1" pillow = "^6.2.1"
...@@ -36,6 +36,7 @@ lttb = "^0.2.0" ...@@ -36,6 +36,7 @@ lttb = "^0.2.0"
pyarrow = "^0.15.1" pyarrow = "^0.15.1"
torch = "^1.3.1" torch = "^1.3.1"
torchvision = "^0.4.2" torchvision = "^0.4.2"
tqdm = "^4.39.0"
# Optional dependencies (extras) # Optional dependencies (extras)
......
...@@ -64,4 +64,10 @@ def read_csv_with_store(store, filename, pandas_kwargs=None): ...@@ -64,4 +64,10 @@ def read_csv_with_store(store, filename, pandas_kwargs=None):
bytes_buffer = io.BytesIO(store[str(filename)]) bytes_buffer = io.BytesIO(store[str(filename)])
StreamReader = codecs.getreader("utf-8") StreamReader = codecs.getreader("utf-8")
string_buffer = StreamReader(bytes_buffer) string_buffer = StreamReader(bytes_buffer)
return pd.read_csv(string_buffer,**pandas_kwargs) return pd.read_csv(string_buffer)
def indexers_to_request(indexers):
request = {"start_" + k: v.start for k, v in indexers.items()}
request.update({"end_" + k: v.stop for k, v in indexers.items()})
return request
This diff is collapsed.
...@@ -194,8 +194,6 @@ def test_mhdslrimage(): ...@@ -194,8 +194,6 @@ def test_mhdslrimage():
config["output_format"] = "base64" config["output_format"] = "base64"
data = node(config) data = node(config)
# TODO: assert data
from PIL import Image from PIL import Image
img = Image.open(base_dir.joinpath("2017-08-06", "20170806_095212.JPG")) img = Image.open(base_dir.joinpath("2017-08-06", "20170806_095212.JPG"))
...@@ -203,6 +201,18 @@ def test_mhdslrimage(): ...@@ -203,6 +201,18 @@ def test_mhdslrimage():
assert data[0].values == img_base64 assert data[0].values == img_base64
# Check a period where there is no image
start_time = dt.datetime(2017, 8, 6, 9, 55, 12, tzinfo=dt.timezone.utc)
end_time = dt.datetime(2017, 8, 6, 10, 10, 10, tzinfo=dt.timezone.utc)
config = {
"start_time": start_time,
"end_time": end_time,
}
data = node(config)
# print(data)
assert data.shape == (0, 0, 0, 0)
# test_mhdslrimage() # test_mhdslrimage()
...@@ -230,7 +240,7 @@ def test_csv(): ...@@ -230,7 +240,7 @@ def test_csv():
# TODO: test with start and end time # TODO: test with start and end time
test_csv() # test_csv()
def test_annotations(): def test_annotations():
...@@ -272,7 +282,7 @@ def test_datasets(): ...@@ -272,7 +282,7 @@ def test_datasets():
batch_dims={"time": pd.to_timedelta(24, "m")}, batch_dims={"time": pd.to_timedelta(24, "m")},
) )
x = dataset[0] # x = dataset[0]
test_datasets() test_datasets()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment