Commit d5a41e0f authored by Remy Moll's avatar Remy Moll
Browse files

First non-binary files

parents
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
\ No newline at end of file
import numpy as np
import matplotlib.pyplot as plt
#%% Measurements
voltage_1 = np.array([1900, 1975, 2000, 2010, 2015, 2020, 2030, 2050, 2075, 2100]) # We define 2020 as end of the plateau
count_1 = np.array([ 224, 685, 1200, 1938, 2322, 2388, 4788, 6588, 21124, 51420])
count_frequency_1 = count_1/120
voltage_2 = np.array([1975, 2015, 2010, 2020, 2030, 2050, 2100])
count_2 = np.array([866, 1657, 1573, 2330, 3424, 10252, 64206])
new_voltage_2 = np.array([2075, 1975, 2015, 2010, 2020, 2030, 2050, 2100, 1900, 2000, 2040])
new_count_2 = np.array([5769, 866, 1435, 1331, 1583, 1977, 2944, 7691, 286, 1164, 2302])
count_frequency_2 = count_2/120
new_count_frequency_2 = new_count_2/120
array_1 = np.vstack([voltage_1, count_frequency_1])
array_1.sort(axis=1)
print(array_1)
new_array = np.vstack([new_voltage_2, new_count_frequency_2])
new_array.sort(axis=1)
print(new_array)
#%% Anode Measurement
anode_voltage = np.array([2000, 2050, 2100, 2150, 2200, 2250, 2300, 2350, 2400, 2450, 2500])
mean_peak_1 = np.array([20.5, 33.2, 65.0, 75.0, 144, 200, 357, 401, 544, 502, 545])
uncertainty_1 = np.array([0.2, 0.2, 0.6, 0.7, 1.0, 1.5, 1.5, 2, 3, 5, 5])
mean_peak_2 = np.array([15.2, 24.0, 41.0, 48.6, 93.5, 163.2, 269, 325, 450, 409, 480])
uncertainty_2 = np.array([0.1, 0.1, 0.5, 0.4, 0.5, 0.3, 2.5, 1.5, 10, 5, 5])
mean_peak_3 = np.array([8.2, 11.3, 17.0, 25.5, 49.3, 81.0, 167, 253, 350, 320, 385])
uncertainty_3 = np.array([0.05, 0.07, 0.2, 0.5, 0.3, 0.5, 1.5, 1, 10, 15, 8])
mean_peak_4 = np.array([6.8, 8.0, 11.7, 16.8, 31.6, 55.0, 117.5, 200, 280, 260, 304])
uncertainty_4 = np.array([0.025, 0.04, 0.1, 0.2, 0.2, 1, 1.5, 1.5, 20, 8, 10])
# 578, 0.8; 523, 1; 423, 1.5;360, 0.8
#%% Functions
def plot_count(voltage, count_frequency, number):
dpi_set = 600
#plt.figure(figsize=(12,8))
plt.scatter(voltage, count_frequency)
#plt.plot(voltage,count)
plt.xlabel('Voltage [V]')
plt.ylabel('Countfrequency [Hz]')
plt.title(f'Countsfrequency vs PMT {number} Voltage')
plt.legend(loc='best')
plt.savefig(f"Countfrequency vs PMT {number} Voltage.pdf", dpi=dpi_set)
plt.show()
def plot_anode(voltage, mean_peak, uncertainty, number):
dpi_set = 600
#plt.figure(figsize=(12,8))
plt.errorbar(voltage, mean_peak, yerr=uncertainty)
#plt.plot(voltage,count)
plt.xlabel('Anode Voltage [V]')
plt.ylabel('Mean Peak Voltage [mV]')
plt.title(f'Mean Peak Voltage Channel {number} vs Anode Voltage')
plt.legend(loc='best')
plt.savefig(f"Mean Peak Voltage Channel {number} vs Anode Voltage.pdf", dpi=dpi_set)
plt.show()
def plot_anode_average(voltage, mean_peak_list, uncertainty_list):
dpi_set = 600
# Create average over channels
mean_peak_average = np.zeros_like(mean_peak[0])
for mean_peak in mean_peak_list:
return
#plt.figure(figsize=(12,8))
plt.errorbar(voltage, mean_peak, yerr=uncertainty)
#plt.plot(voltage,count)
plt.xlabel('Anode Voltage [V]')
plt.ylabel('Mean Peak Voltage [mV]')
plt.title(f'Mean Peak Voltage Channel {number} vs Anode Voltage')
plt.legend(loc='best')
plt.savefig(f"Mean Peak Voltage Channel {number} vs Anode Voltage.pdf", dpi=dpi_set)
plt.show()
#%% Main
def count_frequency_plots():
plot_count(voltage_1,count_frequency_1, 1)
plot_count(voltage_2, count_frequency_2, 2)
plot_count(new_voltage_2, new_count_frequency_2, 'New Count 2')
def anode_voltage_plots():
plot_anode(anode_voltage, mean_peak_1, uncertainty_1, 1)
plot_anode(anode_voltage, mean_peak_2, uncertainty_2, 2)
plot_anode(anode_voltage, mean_peak_3, uncertainty_3, 3)
plot_anode(anode_voltage, mean_peak_4, uncertainty_4, 4)
if __name__=='__main__':
#count_frequency_plots()
anode_voltage_plots()
#10252 for 2050 initial measurement
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
#!/usr/bin/env python
"""
Script to convert binary format to root for DRS4 evaluation boards.
http://www.psi.ch/drs/evaluation-board
Jonas Rembser (rembserj@phys.ethz.ch), 2016-04-15 based on work by
Gregor Kasieczka, ETHZ, 2014-01-15
based on decode.C by Dmitry Hits
"""
import sys, os
from numpy import array, uint32, cumsum, roll, zeros, float32, save, load
from struct import unpack
class BinaryConverter:
"""Converts .dat binary files into numpy arrays and stores them in readable array-files"""
def __init__(self) -> None:
pass
def convert(self, fname):
print("Now converting '{}'".format(fname))
with open(fname, "rb") as fstream:
result = self.read_channels(fstream)
fbase = os.path.splitext(fname)[0] # discard the .dat ending
location, f = os.path.split(fbase)
fnew = location + "/data_converted/" + f + ".npy"
os.makedirs(os.path.dirname(fnew), exist_ok=True) # create directories of non-existent
with open(fnew, "wb") as fn:
save(fn, result)
print("Successfully converted '{}' -> saved into '{}'".format(fname, fnew))
return fnew
def read_channels(self, fstream):
"""
This is the main loop One iteration corresponds to reading one channel every
few channels a new event can start We know that this if the case if we see
"EHDR" instead of "C00x" (x=1..4) If we have a new event: Fill the tree, reset
the branches, increment event counter The binary format is described in:
http://www.psi.ch/drs/DocumentationEN/manual_rev40.pdf (page 24)
What happens when multiple boards are daisychained: after the C004 voltages of
the first board, there is the serial number of the next board before it starts
again with C001.
"""
event_serial = array([0], dtype=uint32)
events = []
current_board = 0
tcell = 0 # current trigger cell
t_00 = 0 # time in first cell in first channel for alignment
is_new_event = True
timebins, channels_t, channels_v = self.get_cell_information(fstream)
# Read the header, this is either
# EHDR -> finish event
# C00x -> read the data
# "" -> end of file
header = ""
while header != b"":
# Start of Event
if is_new_event:
event_serial[0] = unpack("I", fstream.read(4))[0]
if event_serial[0]%100 == 0:
print("Event : ", event_serial[0], ' ', end='\r')
is_new_event = False
# Set the timestamp, where the milliseconds need to be converted to
# nanoseconds to fit the function arguments
dt_list = unpack("H"*8, fstream.read(16))
# Fluff the serial number and read in trigger cell
fluff = fstream.read(4)
tcell = unpack('H', fstream.read(4)[2:])[0]
# Reset current board number
current_board = 0
continue
# Read the header, this is either
# EHDR -> finish event
# C00x -> read the data
# "" -> end of file
header = fstream.read(4)
# Handle next board
if header.startswith(b"B#"):
current_board = current_board + 1
tcell = unpack(b'H', fstream.read(4)[2:])[0]
continue
# End of Event
elif header == b"EHDR":
event = []
for i in range(len(channels_t)): # number of channels
event.append(list(zip(channels_t[i],channels_v[i])))
events.append(array(event))
print("Parsing event no. {} ...".format(len(events)), end="\r")
is_new_event = True
# Read and store data from a channel read from the header
elif header.startswith(b'C'):
# the voltage info is 1024 floats with 2-byte precision
chn_i = int(header.decode('ascii')[-1]) + current_board * 4
scaler = unpack('I', fstream.read(4))
voltage_ints = unpack(b'H'*1024, fstream.read(2*1024))
"""
Calculate precise timing using the time bins and trigger cell
see p. 24 of the DRS4 manual for the explanation
the following lines sum up the times of all cells starting from the trigger cell
to the i_th cell and select only even members, because the amplitude of the adjacent cells are averaged.
The width of the bins 1024-2047 is identical to the bins 0-1023, that is why the arrays are simply extended
before performing the cumsum operation
"""
timebins_full = list(roll(timebins[chn_i-1], -tcell))+list(roll(timebins[chn_i-1], -tcell))
t = cumsum(timebins_full)[::2]
# time of first cell for correction, find the time of the first cell for each channel,
# because only these cells are aligned in time
t_0 = t[(1024-tcell)%1024]
if chn_i % 4 == 1:
t_00 = t_0
# Align all channels with the first channel
t = t - (t_0 - t_00) # correction
# TODO: it is a bit unclear how to do the correction with
# TODO: multiple boards, so the boards are just corrected independently for now
# TODO: find the alignment of the boards by sending the same signal to both boards
for i, x in enumerate(voltage_ints): # i is the sample number, x is the voltage value of the sample in ADC channels
channels_v[chn_i-1][i] = ((x / 65535.) - 0.5)
channels_t[chn_i-1][i] = t[i]
print("Conversion of {} events done.".format(len(events)))
return events
def get_cell_information(self, fstream):
"""This loop extracts time information for each DRS4 cell"""
# To hold to the total number of channels and boards
n_ch = 0
n_boards = 0
# Empty lists for containing the variables connected to the tree branches
channels_t = []
channels_v = []
timebins = []
board_serials = []
header = fstream.read(4)
while header != b"EHDR": # End the loop if header is not CXX or a serial number
# only consider the following two headers, ignore the initial time header for instance
if header.startswith(b"C"):
n_ch = n_ch + 1
# Create variables ...
channels_t.append(zeros(1024, dtype=float32))
channels_v.append(zeros(1024, dtype=float32))
# Write timebins to numpy array
timebins.append(array(unpack('f'*1024, fstream.read(4*1024))))
# Increment the number of boards when seeing a new serial number
# and store the serial numbers in the board serial numbers vector
elif header.startswith(b"B#"):
board_serial = unpack(b'H', header[2:])[0]
board_serials.append(board_serial)
n_boards = n_boards + 1
header = fstream.read(4)
print("Reading in events measurend with {} channels on {} board(s).".format(n_ch, n_boards))
return timebins, channels_t, channels_v
class ArrayLoader:
"""Loads data for a given cathode voltage. This part assumes the following naming scheme for the data:
data_v{}.dat where {} gets replaced by the voltage in Volts
"""
def __init__(self, base_path) -> None:
self.converter = BinaryConverter()
self.converted_path_base = base_path + "data_converted/data{}.npy"
self.binary_path_base = base_path + "data{}.dat"
self.voltage_placeholder = "_v{}"
self.events_placeholder = "_e{}"
self.delay_placeholder = "_d{}"
def get_data(self, voltage, events=0, delay=0):
f_extension = self.voltage_placeholder.format(voltage)
if events != 0:
f_extension += self.events_placeholder.format(events)
if delay != 0:
f_extension += self.delay_placeholder.format(delay)
fname_converted = self.converted_path_base.format(f_extension)
fname_binary = self.binary_path_base.format(f_extension)
if not os.path.exists(fname_converted):
# If the file was never converted before, do it once and save it to the data_converted directory. Only then proceed to the loading
converted = self.converter.convert(fname_binary)
return load(fname_converted)
if __name__ == "__main__":
os.chdir(sys.path[0])
if not len(sys.argv) == 2:
print("Wrong number of arguments!")
print("Usage: python decode.py filename.dat")
print("Exiting...")
sys.exit(1)
input_filename = sys.argv[1]
conv = BinaryConverter()
outfile = conv.convert(input_filename)
print("Numpy array saved to {}".format(outfile))
\ No newline at end of file
# Experiment: Drift-chamber
---
[TOC]
---
## Questions
* Muons tunnel through matter? eg. cathode sheets?
## Preparation (calibration)
### Trigger calibration
Goal: maximize signal efficiency (high gain, low noise)
@PMT fix: 2000V
Scintillator 1:
| Threshold (mV) | Counts / 20 s |
| -------------- | ------------- |
| 304 | 148 |
| 296,3 | 183 / 187 |
| 301,9 | 149 |
| 315,1 | 146 |
| 335,0 | 115 |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
Scintillator 2:
| Threshold (mV) | Counts / 20s |
| -------------- | ------------ |
| 475,9 | 195 ? |
| 500,2 | 129 |
| 430,1 | 196 |
| 406,3 | 242 |
| 451,0 | 172 |
| 441,0 | 189 |
| | |
| | |
| | |
Logically we want both scintillators to register a similar amount of hits, we chose thresholds
of **296,3** and **441,0** and set them as fixed.
### Optimize input voltage
Set the threshold voltages fixed as above, vary the PMT voltage for each scintillator:
Scintillator 1:
| PMT Voltage (V) | Counts / 2 min |
| --------------- | -------------- |
| 1900 | 224 |
| 2000 | 1200 |
| 2100 | 51420 |
| 2050 | 6588 |
| 2030 | 4788 |
| 2020 | 2384 |
| 2010 | 1938 |
| 2015 | 2322 |
| 1975 | 658 |
| 2075 | 21124 |
Preferred voltage: **2020** -> end of plateau
Scintillator 2:
| PMT Voltage (V) | Counts / 2 min |
| --------------- | --------------------- |
| 1900 | 286 |
| 2000 | 1164 |
| 2100 | 64206 -> 7691 -> 9207 |
| 2050 | 10252 nope -> 2944 |
| 2030 | 3424 nope -> 1977 |
| 2020 | 2330 nope -> 1583 |
| 2010 | 1573 nope -> 1331 |
| 2015 | 1657 nope -> 1435 |
| 1975 | 866 |
| 2075 | 5769 |
| 2040 | 2302 |
Preferred voltage: **2020** -> end of plateau AS WELL
NOTE: The uncertainty of this voltage must be estimated to at least $\pm 1 V$ since an exact setting was hard to maintain. For instance even if set very carefully, after a few minutes the PSU voltage would jump back and forth between 2019 and 2020.
### Calibration of anode wire amplification
Remove scintillators, use arbitrary electron source (SR 90). Cathode Voltage fixed, 3500.
Vary anode voltage (2000 - 2500 V), measure the mean signal voltage (after amplification). For this we have to dynamically trigger when a threshold is met (the threshold increases with amplification).
We measure the mean peak-to-peak amplitude and plot it for each voltage. As the exponential increase breaks off we have reached the optimal amplification voltage. In our case this corresponds to about **2400V** (see Michels plots).
### Timing and offset calibration
With the delay set to the software-maximum (1690 ns) we measure a few calibrated events. The curves already look gaussian and symmetric, the first channels do however show a clear cutoff suggesting that the recording range is ill-limited by the delay. Reducing the delay to 1012 ns causes the histograms to shift their $\mu$ value which is coherent across channels.
## Experimental procedure
## Measurements
## Data analysis
## Error analysis:
## Footnotes:
brachistochrone?
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment