Commit 0cb254a2 authored by Remy Moll's avatar Remy Moll
Browse files

Last data-point. Sorry for the delay

parent 11f2e86f
*.npy
data/data_converted/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -3,7 +3,7 @@ import matplotlib.pyplot as plt
#%% Measurements
voltage_1 = np.array([1900, 1975, 2000, 2010, 2015, 2020, 2030, 2050, 2075, 2100]) # We define 2020 as end of the plateau
count_1 = np.array([ 224, 685, 1200, 1938, 2322, 2388, 4788, 6588, 21124, 51420])
count_1 = np.array([224, 685, 1200, 1938, 2322, 2388, 4788, 6588, 21124, 51420])
count_frequency_1 = count_1/120
voltage_2 = np.array([1975, 2015, 2010, 2020, 2030, 2050, 2100])
......
This source diff could not be displayed because it is stored in LFS. You can view the blob instead.
......@@ -207,6 +207,7 @@ class ArrayLoader:
for f in files:
if fname_fuzzy in f:
matches.append(f)
if len(matches) > 1:
print("Warning: The measurement file was not unambiguous. Please specify more arguments")
return
......@@ -214,7 +215,10 @@ class ArrayLoader:
print("Warning: No data found.")
return
else:
fname = self.base_path + matches[0]
f = matches[0]
delay = int(f[f.find("_d") + 2 : f.find(".dat")])
fname = self.base_path + f
fname_converted = fname.replace(self.base_path, self.base_path + "data_converted/").replace(".dat", ".npy")
# change /data/... to /data/data_converted and change .dat to .npy
......
from os import device_encoding
import numpy as np
from scipy import signal
import decode
DECODER = decode.ArrayLoader("data/")
from . import decode
DECODER = decode.ArrayLoader("driftchamber_computations/data/")
......@@ -96,9 +95,18 @@ class Event:
return (good >= self.min_good_channels)# and ends_good
def recreate_muon_path(self, drift_velocity):
pass
def recreate_muon_path(self, drift_velocity, delta_v, minimum_times):
x_pos = []
x_err = []
for i,ch in enumerate(self.channels):
delta_t = ch.peak_time - minimum_times[i] # each channel has a different minimum time
x_pos.append(drift_velocity * delta_t)
if not ch.is_usable:
x_err.append(0.3)
else:
x_err.append(delta_v * delta_t)
return np.array(x_pos), np.array(x_err)
class Measurement:
......@@ -114,6 +122,7 @@ class Measurement:
self.events.append(Event(loaded[i,...], nbins, min_good_channels))
del loaded
self.n_events = len(self.events)
self.n_channels = self.events[0].n_channels
def filter_events(self):
......@@ -134,4 +143,37 @@ class Measurement:
else:
timings[j,i] = -1
return timings
\ No newline at end of file
return timings
def get_per_channel_histogram(self, nbins):
"""Returns the binned histograms as well as their bin-centers"""
timings = self.get_peak_timings()
hists = []
bin_centers = []
raw_datas = []
self._min_times = []
for i in range(timings.shape[0]): # usually 8
hist, bin_edges = np.histogram(timings[i,...], bins=nbins, density=True)#, stacked=True)
bin_center = (bin_edges[:-1] + bin_edges[1:]) / 2
self._min_times.append(bin_center[1]) # 0th element is 0
hists.append(hist)
bin_centers.append(bin_center)
raw_datas.append(timings[i,...])
return raw_datas, hists, bin_centers
@property
def per_channel_min_times(self):
try:
return self._min_times
except:
raise Exception("Please call get_per_channel_histogram first.")
def get_total_histogram(self, nbins):
hists, bin_centers = self.get_per_channel_histogram(nbins)
avg_hist = np.mean(hists, axis=0)
avg_bin_centers = np.mean(bin_centers, axis=0)
return avg_hist, avg_bin_centers
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment