Skip to content
Snippets Groups Projects
Commit 659f32a1 authored by Günter Quast's avatar Günter Quast
Browse files

added components of mimoCoRB

parent 996e82c7
No related branches found
No related tags found
No related merge requests found
Showing
with 621 additions and 0 deletions
# Dict with uid's as key and a nested dict with configuration variables
general:
runtime: 600 # desired runtime in seconds
runevents: &number_of_events 100000
number_of_samples: &number_of_samples 2048
analogue_offset: &analogue_offset 0
sample_time_ns: &sample_time_ns 32
trigger_level: &trigger_level 50
trigger_channel: &trigger_channel '1'
trigger_direction: &trigger_direction 'rising'
pre_trigger_samples: &pre_trigger_samples 103 # 5%
find_peaks:
sample_time_ns: *sample_time_ns
analogue_offset: *analogue_offset
number_of_samples: *number_of_samples
pre_trigger_samples: *pre_trigger_samples
peak_minimal_prominence: 50 # has to be positive and higher than avg. noise peaks to not cause havoc!
trigger_channel: *trigger_channel
peak_minimal_distance: 10 # minimal distance between two peaks in number of samples
peak_minimal_width: 7 # in number of samples
trigger_channel: *trigger_channel
trigger_position_tolerance: 7 # in number of samples
# dict for RedPitaya redPoscidaq
redP_to_rb:
ip_address: '192.168.0.103'
eventcount: *number_of_events
sample_time_ns: *sample_time_ns
number_of_samples: *number_of_samples
pre_trigger_samples: *pre_trigger_samples
trigger_channel: *trigger_channel
trigger_level: *trigger_level
trigger_mode: 'norm'
# Dict for simul_source.py
simul_source:
sample_time_ns: *sample_time_ns
number_of_samples: *number_of_samples
pre_trigger_samples: *pre_trigger_samples
analogue_offset: *analogue_offset
eventcount: *number_of_events
sleeptime: 0.03
random: true
# Dict for push_simul
push_simul:
sample_time_ns: *sample_time_ns
number_of_samples: *number_of_samples
pre_trigger_samples: *pre_trigger_samples
analogue_offset: *analogue_offset
eventcount: *number_of_events
sleeptime: 0.03
random: true
save_to_txt:
filename: "spectrum"
save_parquet:
filename: "spectrum"
plot_waveform:
title: "Muon waveform"
min_sleeptime: 0.5 # time to wait between graphics updates
number_of_samples: *number_of_samples
sample_time_ns: *sample_time_ns
analogue_offset: *analogue_offset # analogue offset in V
pre_trigger_samples: *pre_trigger_samples
channel_range: 4096 # channel range in mV
trigger_channel: *trigger_channel # Channel name in the PicoScope. Valid values are 'A', 'B', 'C' or 'D'
trigger_level: *trigger_level # value in mV, take account of analogue_offset, which is added to input voltage !
plot_histograms:
title: "on-line histograms"
# define histograms
histograms:
# name min max nbins ymax name lin/log
chA_height: [50., 3000., 250, 5.9, "ph 1A", 0]
chB_height: [50., 3000., 250, 5.9, "ph 1B", 0]
"""
**redPitaya_source**: mimoCoRB source compatible to redPoscdaq
Input data is provided as numpy-arry of shape (number_of_channels, number_of_samples).
"""
from mimocorb.buffer_control import rbImport
import numpy as np
import sys, time
from mutiprocessing import Event
def simul_source(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
"""
Generate simulated data and pass data to buffer
The class mimocorb.buffer_control/rbImport is used to interface to the
newBuffer and Writer classes of the package mimoCoRB.mimo_buffer
This example may serve as a template for other data sources
:param config_dict: configuration dictionary
- events_required: number of events to be simulated or 0 for infinite
- sleeptime: (mean) time between events
- random: random time between events according to a Poission process
- number_of_samples, sample_time_ns, pretrigger_samples and analogue_offset
describe the waveform data to be generated (as for oscilloscope setup)
Internal parameters of the simulated physics process (the decay of a muon)
are (presently) not exposed to user.
"""
global databuffer
data_ready = Event()
data_ready.clear()
run_rpControl(callback=rp_data)
def rp_data(data):
while data_ready.is_set():
time.sleep(0.01)
databuffer = data
data_ready.set()
events_required = 1000 if "eventcount" not in config_dict else config_dict["eventcount"]
def yield_data():
"""generate simulated data, called by instance of class mimoCoRB.rbImport"""
event_count = 0
while events_required == 0 or event_count < events_required:
data_ready.wait()
# deliver pulse data and no metadata
yield (databuffer, None)
data_ready.clear()
event_count += 1
datasource = rbImport(config_dict=config_dict, sink_list=sink_list, ufunc=yield_data, **rb_info)
# possibly check consistency of provided dtype with simulation !
# TODO: Change to logger!
# print("** simul_source ** started, config_dict: \n", config_dict)
# print("?> sample interval: {:02.1f}ns".format(osci.time_interval_ns.value))
datasource()
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
from mimocorb import mimo_buffer as bm
from scipy import signal
import numpy as np
from numpy.lib import recfunctions as rfn
import sys
import os
def normed_pulse(ch_input, position, prominence, analogue_offset):
# > Compensate for analogue offset
ch_data = ch_input - analogue_offset
# > Find pulse area
# rel_height is not good because of the quantized nature of the picoscope data
# so we have to "hack" a little bit to always cut 10mV above the analogue offset
width_data = signal.peak_widths(ch_data, [int(position)], rel_height=(ch_data[int(position)] - 10) / prominence)
left_ips, right_ips = width_data[2], width_data[3]
# Crop pulse area and normalize
pulse_data = ch_data[int(np.floor(left_ips)) : int(np.ceil(right_ips))]
pulse_int = sum(pulse_data)
pulse_data *= 1 / pulse_int
return pulse_data, int(np.floor(left_ips)), pulse_int
def correlate_pulses(data_pulse, reference_pulse):
correlation = signal.correlate(data_pulse, reference_pulse, mode="same")
shift_array = signal.correlation_lags(data_pulse.size, reference_pulse.size, mode="same")
shift = shift_array[np.argmax(correlation)]
return shift
def tag_peaks(input_data, prominence, distance, width):
peaks = {}
peaks_prop = {}
for key in input_data.dtype.names:
peaks[key], peaks_prop[key] = signal.find_peaks(
input_data[key], prominence=prominence, distance=distance, width=width
)
return peaks, peaks_prop
def correlate_peaks(peaks, tolerance):
m_dtype = []
for key in peaks.keys():
m_dtype.append((key, np.int32))
next_peak = {}
for key, data in peaks.items():
if len(data) > 0:
next_peak[key] = data[0]
correlation_list = []
while len(next_peak) > 0:
minimum = min(next_peak.values())
line = []
for key, data in peaks.items():
if key in next_peak:
if abs(next_peak[key] - minimum) < tolerance:
idx = data.tolist().index(next_peak[key])
line.append(idx)
if len(data) > idx + 1:
next_peak[key] = data[idx + 1]
else:
del next_peak[key]
else:
line.append(-1)
else:
line.append(-1)
correlation_list.append(line)
array = np.zeros(len(correlation_list), dtype=m_dtype)
for idx, line in enumerate(correlation_list):
array[idx] = tuple(line)
return array
def match_signature(peak_matrix, signature):
if len(signature) > len(peak_matrix):
return False
# Boolean array with found peaks
input_peaks = rfn.structured_to_unstructured(peak_matrix) >= 0
must_have_peak = np.array(signature, dtype=np.str0) == "+"
must_not_have_peak = np.array(signature, dtype=np.str0) == "-"
match = True
# Check the signature for each peak (1st peak with 1st signature, 2nd peak with 2nd signature, ...)
for idx in range(len(signature)):
# Is everywhere a peak, where the signature expects one -> Material_conditial(A, B): (not A) OR B
first = (~must_have_peak[idx]) | input_peaks[idx]
# Is everywhere no peak, where the signature expects no peak -> NAND(A, B): not (A and B)
second = ~(must_not_have_peak[idx] & input_peaks[idx])
match = match & (np.all(first) & np.all(second))
return match
if __name__ == "__main__":
print("Script: " + os.path.basename(sys.argv[0]))
print("Python: ", sys.version, "\n".ljust(22, "-"))
print("THIS IS A MODULE AND NOT MEANT FOR STANDALONE EXECUTION")
"""
**plot_histograms**: histogram variable(s) from buffer using mimoCoRB.histogram_buffer
"""
import sys
import os
from mimocorb.histogram_buffer import histogram_buffer
import matplotlib
# select matplotlib frontend if needed
matplotlib.use("TkAgg")
def plot_histograms(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
"""
Online display of histogram(s) of variable(s) from mimiCoRB buffer
:param input: configuration dictionary
"""
histbuf = histogram_buffer(source_list, sink_list, observe_list, config_dict, **rb_info)
histbuf()
if __name__ == "__main__":
print("Script: " + os.path.basename(sys.argv[0]))
print("Python: ", sys.version, "\n".ljust(22, "-"))
print("THIS IS A MODULE AND NOT MEANT FOR STANDALONE EXECUTION")
"""
**plot**: plotting waveforms from buffer using mimoCoRB.buffer_control.OberserverData
"""
import sys
import os
from mimocorb.plot_buffer import plot_buffer
import matplotlib
# select matplotlib frontend if needed
matplotlib.use("TkAgg")
def plot_waveform(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
"""
Plot waveform data from mimiCoRB buffer
:param input: configuration dictionary
- plot_title: graphics title to be shown on graph
- min_sleeptime: time between updates
- sample_time_ns, channel_range, pretrigger_samples and analogue_offset
describe the waveform data as for oscilloscope setup
"""
pltbuf = plot_buffer(source_list, sink_list, observe_list, config_dict, **rb_info)
pltbuf()
if __name__ == "__main__":
print("Script: " + os.path.basename(sys.argv[0]))
print("Python: ", sys.version, "\n".ljust(22, "-"))
print("THIS IS A MODULE AND NOT MEANT FOR STANDALONE EXECUTION")
"""
**simul_source**: a simple template for a mimoCoRB source to
enter simulated wave form data in a mimoCoRB buffer.
Input data is provided as numpy-arry of shape (number_of_channels, number_of_samples).
"""
from mimocorb.buffer_control import rbImport
import numpy as np
import sys, time
from pulseSimulator import pulseSimulator
def simul_source(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
"""
Generate simulated data and pass data to buffer
The class mimocorb.buffer_control/rbImport is used to interface to the
newBuffer and Writer classes of the package mimoCoRB.mimo_buffer
This example may serve as a template for other data sources
:param config_dict: configuration dictionary
- events_required: number of events to be simulated or 0 for infinite
- sleeptime: (mean) time between events
- random: random time between events according to a Poission process
- number_of_samples, sample_time_ns, pretrigger_samples and analogue_offset
describe the waveform data to be generated (as for oscilloscope setup)
Internal parameters of the simulated physics process (the decay of a muon)
are (presently) not exposed to user.
"""
events_required = 1000 if "eventcount" not in config_dict else config_dict["eventcount"]
def yield_data():
"""generate simulated data, called by instance of class mimoCoRB.rbImport"""
event_count = 0
while events_required == 0 or event_count < events_required:
pulse = dataSource(number_of_channels)
# deliver pulse data and no metadata
yield (pulse, None)
event_count += 1
dataSource = pulseSimulator(config_dict)
simulsource = rbImport(config_dict=config_dict, sink_list=sink_list, ufunc=yield_data, **rb_info)
number_of_channels = len(simulsource.sink.dtype)
# possibly check consistency of provided dtype with simulation !
# TODO: Change to logger!
# print("** simul_source ** started, config_dict: \n", config_dict)
# print("?> sample interval: {:02.1f}ns".format(osci.time_interval_ns.value))
simulsource()
"""Module save_files to handle file I/O for data in txt and parquet format
This module relies on classes in mimocorb.buffer_control
"""
import sys
import os
from mimocorb.buffer_control import rb_toTxtfile, rb_toParquetfile
# def save_to_txt(source_dict):
def save_to_txt(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
sv = rb_toTxtfile(source_list=source_list, config_dict=config_dict, **rb_info)
sv()
# print("\n ** save_to_txt: end seen")
def save_parquet(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
sv = rb_toParquetfile(source_list=source_list, config_dict=config_dict, **rb_info)
sv()
if __name__ == "__main__":
print("Script: " + os.path.basename(sys.argv[0]))
print("Python: ", sys.version, "\n".ljust(22, "-"))
print("THIS IS A MODULE AND NOT MEANT FOR STANDALONE EXECUTION")
"""Module **pulse_filter**
This (rather complex) module filters waveform data to search for valid signal pulses.
The code first validates the trigger pulse, identifies coincidences of signals in
different layers (indiating the passage of a cosmic ray particle, a muon) and finally
searches for double-pulse signatures indicating that a muon was stopped in or near
a detection layer where the resulting decay-electron produced a delayed pulse.
The time difference between the initial and the delayed pulses is the individual
lifetime of the muon.
The decay time and the properties of the signal pulses (height, integral and
postition in time) are written to a buffer; the raw wave forms are optionally
also written to another buffer.
The callable functions *find_peaks()* and *calulate_decay_time()* depend on the
buffer manager *mimoCoRB* and provide the filter functionality described above.
These functions support multiple sinks to be configured for output.
The relevant configuration parameters can be found in the section *find_peaks:*
and *calculate_decay_time:* in the configuration file.
"""
from mimocorb.buffer_control import rbTransfer
import numpy as np
import pandas as pd
import os, sys
from filters import *
def find_peaks(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
"""filter client for mimoCoRB: Find valid signal pulses in waveform data
Input:
- wave form data from source buffer defined in source_list
Returns:
- None if filter not passed
- list of list(s) of pulse parameters, written to sinks defined in sink_list
"""
if config_dict is None:
raise ValueError("ERROR! Wrong configuration passed (in lifetime_modules: calculate_decay_time)!!")
# Load configuration
sample_time_ns = config_dict["sample_time_ns"]
analogue_offset = config_dict["analogue_offset"]*1000
peak_minimal_prominence = config_dict["peak_minimal_prominence"]
peak_minimal_distance = config_dict["peak_minimal_distance"]
peak_minimal_width = config_dict["peak_minimal_width"]
pre_trigger_samples = config_dict["pre_trigger_samples"]
trigger_channel = config_dict["trigger_channel"]
if trigger_channel not in ['A','B','C','D']:
trigger_channel = None
trigger_position_tolerance = config_dict["trigger_position_tolerance"]
pulse_par_dtype = sink_list[-1]['dtype']
def tag_pulses(input_data):
"""find all valid pulses
This function to be called by instance of class mimoCoRB.rbTransfer
Args: input data as structured ndarray
Returns: list of parameterized pulses
"""
# Find all the peaks and store them in a dictionary
peaks, peaks_prop = tag_peaks(input_data, peak_minimal_prominence, peak_minimal_distance, peak_minimal_width)
# identify trigger channel, validate trigger pulse and get time of trigger pulse
if trigger_channel is not None:
trigger_peaks = peaks['ch' + trigger_channel]
if len(trigger_peaks) == 0:
return None
reference_position = trigger_peaks[np.argmin(np.abs(trigger_peaks - pre_trigger_samples))]
else: # external or no trigger: set to nominal position
reference_position = pre_trigger_samples
peak_data= np.zeros( (1,), dtype=pulse_par_dtype)
for key in peaks.keys():
for position, height, left_ips, right_ips in zip(
peaks[key], peaks_prop[key]['prominences'],
peaks_prop[key]['left_ips'], peaks_prop[key]['right_ips']):
if np.abs(reference_position - position) < trigger_position_tolerance:
peak_data[0][key+'_position'] = position
peak_data[0][key+'_height'] = input_data[key][position] - analogue_offset #height
left = int(np.floor(left_ips))
right = int(np.ceil(right_ips))
peak_data[0][key+'_integral'] = \
sum(input_data[key][left:right] - analogue_offset) * sample_time_ns * 1e-9/50/5
return [peak_data]
p_filter = rbTransfer(source_list=source_list, sink_list=sink_list, config_dict=config_dict,
ufunc=tag_pulses, **rb_info)
p_filter()
if __name__ == "__main__":
print("Script: " + os.path.basename(sys.argv[0]))
print("Python: ", sys.version, "\n".ljust(22, '-'))
print("THIS IS A MODULE AND NOT MEANT FOR STANDALONE EXECUTION")
"""
**redP_mimoCoRB**: a simple template to use mimoCoRB with the RedPitaya and redPoscdaq.py
Input data is provided as numpy-arry of shape (number_of_channels, number_of_samples).
"""
import time
import sys
import redPoscdaq as rp
def redP_to_rb(source_list=None, sink_list=None, observe_list=None, config_dict=None, **rb_info):
"""
Get data from RedPitaya and pass data to buffer
The class mimocorb.buffer_control/rbImport is used to interface to the
newBuffer and Writer classes of the package mimoCoRB.mimo_buffer
This example may serve as a template for other data sources
:param config_dict: configuration dictionary
- events_required: number of events to be simulated or 0 for infinite
- sleeptime: (mean) time between events
- random: random time between events according to a Poission process
- number_of_samples, sample_time_ns, pretrigger_samples and analogue_offset
describe the waveform data to be generated (as for oscilloscope setup)
Internal parameters of the simulated physics process (the decay of a muon)
are (presently) not exposed to user.
"""
# initialize mimocorb class inside redPoscidaq
datasource= rp.redP_mimocorb(config_dict=config_dict, sink_list=sink_list, **rb_info)
#print("data source initialized")
# start oscilloscope
#print("starting osci")
rp.run_rpControl(callback=datasource.data_sink, conf_dict=config_dict)
#daq = run_mimoDAQ('redP_mimoCoRB.yaml')
#daq.setup()
#RB_1 = daq.ringbuffers['RB_1']
#sink_dict = RB_1.new_writer()
#datasource= rp.redP_mimocorb(config_dict={}, sink_list=[sink_dict], RB_1='write')
#print("data source initialized")
#rp.run_rpControl(callback=datasource.data_sink)
#print("starting DAQ")
#daq.run()
# Configuration for recording two channels with mimoCoRB
# -----------------------------------------------------
#
# configure two buffers:
# - RB_1 for raw waveforms
# - RB_2 for derived pulse parameters
# data from RB_2, the result buffer, are saved to a file in csv (text) format.
#
# - data from RB_1 are also passed to an obsever process driving a real-time display
# - data from RB_2 are passed to a Reader process driving a real-time histogram display
#
# Notes:
#
# 1. additional config files controlling the user functions are
# located in the subdirectory config/
# 2. necessary application-specific user code is located
# in the subdirectory modules/
#
# ----------------------------------------------------------------------------
#
RingBuffer:
# define ring buffers
- RB_1:
# raw input data buffer (from picoScope, file or simulation)
number_of_slots: 16
channel_per_slot: 2048
data_type:
1: ['chA', "float32"]
2: ['chB', "float32"]
- RB_2:
# buffer with correct signature double pulse parameters
number_of_slots: 16
channel_per_slot: 1
data_type:
data_type:
1: ['chA_height', "float32"]
2: ['chA_position', "int32"]
3: ['chA_integral', "float32"]
4: ['chB_height', "float32"]
5: ['chB_position', "int32"]
6: ['chB_integral', "float32"]
Functions:
# define functions and ringbuffer assignment
- Fkt_main:
config_file: "config/spectrum_config.yaml"
- Fkt_1:
## for simulation with rbPut
file_name: "redP_mimoCoRB"
fkt_name: "redP_to_rb"
num_process: 1
RB_assign:
RB_1: "write"
- Fkt_2:
file_name: "modules/spectrum_filter"
fkt_name: "find_peaks"
num_process: 2
RB_assign:
RB_1: "read"
RB_2: "write"
- Fkt_3:
file_name: "modules/save_files"
fkt_name: "save_to_txt"
num_process: 1
RB_assign:
RB_2: "read"
# --- the following functions are optioal
- Fkt_4:
file_name: "modules/plot_waveform"
fkt_name: "plot_waveform"
num_process: 1
RB_assign:
RB_1: "observe"
- Fkt_5:
file_name: "modules/plot_histograms"
fkt_name: "plot_histograms"
num_process: 1
RB_assign:
RB_2: "read" # pulse parameters
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment