Skip to content
Snippets Groups Projects
Commit 5525a430 authored by Laurent Mercadier's avatar Laurent Mercadier
Browse files

clean up get_digitizer_peaks() function

parent 90c9a732
No related branches found
No related tags found
1 merge request!215Clean digitizers peak functions and add documentation
......@@ -24,7 +24,8 @@ __all__ = [
'get_laser_peaks',
'get_peaks',
'get_tim_peaks',
'digitizer_signal_description'
'digitizer_signal_description',
'get_dig_avg_trace'
]
log = logging.getLogger(__name__)
......@@ -261,6 +262,7 @@ def get_peaks(run,
pattern = bunchPattern
if bunchPattern == 'None':
bpt = None
# 1. Peak-integrated data from digitizer
if useRaw is False:
# 1.1 No bunch pattern provided
......@@ -297,16 +299,17 @@ def get_peaks(run,
integParams = find_integ_params(trace, min_distance=min_distance)
log.debug(f'Auto find peaks result: {integParams}')
required_keys = ['pulseStart', 'pulseStop', 'baseStart',
'baseStop', 'period', 'npulses']
if integParams is None or not all(name in integParams
for name in required_keys):
raise TypeError('All keys of integParams argument '
f'{required_keys} are required when '
'bunch pattern info is missing.')
# 2.1. No bunch pattern provided
if bpt is None:
log.info('Missing bunch pattern info.')
required_keys = ['pulseStart', 'pulseStop', 'baseStart',
'baseStop', 'period', 'npulses']
if integParams is None or not all(name in integParams
for name in required_keys):
raise TypeError('All keys of integParams argument '
f'{required_keys} are required when '
'bunch pattern info is missing.')
log.debug(f'Retrieving {integParams["npulses"]} pulses.')
if extra_dim is None:
extra_dim = 'pulseId'
......@@ -320,7 +323,8 @@ def get_peaks(run,
# 2.2 Bunch pattern is provided
# load mask and extract pulse Id:
dim_names = {'sase3': 'sa3_pId', 'sase1': 'sa1_pId', 'scs_ppl': 'ol_pId'}
dim_names = {'sase3': 'sa3_pId', 'sase1': 'sa1_pId',
'scs_ppl': 'ol_pId', 'None': 'pulseId'}
extra_dim = dim_names[pattern]
valid_tid = np.intersect1d(arr.trainId, bpt.trainId, assume_unique=True)
mask = is_pulse_at(bpt.sel(trainId=valid_tid), pattern)
......@@ -592,13 +596,13 @@ def get_peak_params(run, mnemonic, raw_trace=None, ntrains=200):
min_distance = 24 if digitizer == "FastADC" else 440
title = 'Auto-find peak params'
if raw_trace is None:
raw_trace = get_coarse_average(run, mnemonic)
raw_trace = get_dig_avg_trace(run, mnemonic)
params = find_integ_params(raw_trace, min_distance=min_distance)
log.debug(f'{title} for {mnemonic}: {params}')
return params
def get_coarse_average(run, mnemonic, ntrains=200):
def get_dig_avg_trace(run, mnemonic, ntrains=None):
"""
Compute the average over ntrains evenly spaced accross all trains
of a digitizer trace.
......@@ -611,6 +615,7 @@ def get_coarse_average(run, mnemonic, ntrains=200):
ToolBox mnemonic of the digitizer data, e.g. 'MCP2apd'.
ntrains: int
Number of trains used to calculate the average raw trace.
If None, all trains are used.
Returns
-------
......@@ -619,6 +624,8 @@ def get_coarse_average(run, mnemonic, ntrains=200):
"""
run_mnemonics = mnemonics_for_run(run)
total_tid = len(run.train_ids)
if ntrains is None:
ntrains = total_tid
stride = int(np.max([1, np.floor(total_tid/ntrains)]))
s = np.s_[0:None:stride]
sel = run.select_trains(s)
......@@ -673,7 +680,7 @@ def check_peak_params(run, mnemonic, raw_trace=None, ntrains=200, params=None,
mnemo_raw = mnemonic.replace('peaks', 'raw').replace('apd', 'raw')
title = 'Digitizer peak params'
if raw_trace is None:
raw_trace = get_coarse_average(run, mnemonic)
raw_trace = get_dig_avg_trace(run, mnemonic)
if params is None:
params = get_peak_params(run, mnemonic, raw_trace)
if 'enable' in params and params['enable'] == 0:
......@@ -839,7 +846,7 @@ def get_tim_peaks(run, mnemonics=None, merge_with=None,
the peak caclulated values (e.g. "MCP2raw" becomes
"MCP2peaks"), merged with Dataset *merge_with* if provided.
"""
return get_digitizer_peaks(run, mnemonics, 'ADQ412', merge_with,
return get_digitizer_peaks(run, mnemonics, merge_with,
bunchPattern, integParams,
keepAllSase)
......@@ -880,13 +887,13 @@ def get_laser_peaks(run, mnemonics=None, merge_with=None,
the peak caclulated values (e.g. "FastADC2raw" becomes
"FastADC2peaks").
"""
return get_digitizer_peaks(run, mnemonics, 'FastADC', merge_with,
return get_digitizer_peaks(run, mnemonics, merge_with,
bunchPattern, integParams, False)
def get_digitizer_peaks(run, mnemonics, digitizer, merge_with=None,
bunchPattern='sase3', integParams=None,
keepAllSase=False):
def get_digitizer_peaks(run, mnemonics=None, merge_with=None,
bunchPattern='None', integParams=None,
digitizer=None, keepAllSase=False):
"""
Automatically computes digitizer peaks. Sources can be loaded on the
fly via the mnemonics argument, or processed from an existing data set
......@@ -902,14 +909,12 @@ def get_digitizer_peaks(run, mnemonics, digitizer, merge_with=None,
"MCP3apd"]. If None and no merge_with dataset is provided,
defaults to "MCP2apd" if digitizer is ADQ412 or
"FastADC5raw" if digitizer is FastADC.
digitizer: str
value in {'FastADC', 'ADQ412'}
merge_with: xarray Dataset
If provided, the resulting Dataset will be merged with this
one. The FastADC variables of merge_with (if any) will also be
computed and merged.
bunchPattern: str
'sase1' or 'sase3' or 'scs_ppl', bunch pattern
'sase1' or 'sase3' or 'scs_ppl', 'None': bunch pattern
used to extract peaks.
integParams: dict
dictionnary for raw trace integration, e.g.
......@@ -926,11 +931,35 @@ def get_digitizer_peaks(run, mnemonics, digitizer, merge_with=None,
the peak caclulated values (e.g. "FastADC2raw" becomes
"FastADC2peaks").
"""
# get the list of mnemonics to process
if mnemonics is None and merge_with is None:
raise ValueError("at least one of mnemonics or merge_with "
"arguments is expected.")
run_mnemonics = mnemonics_for_run(run)
# find digitizer type and get the list of mnemonics to process
def to_processed_name(name):
return name.replace('raw', 'peaks').replace('apd', 'peaks')
mnemonics = mnemonics_to_process(mnemonics, merge_with,
digitizer, to_processed_name)
if isinstance(mnemonics, str):
mnemonics = [mnemonics]
digitizer = digitizer_type(mnemonics[0], run_mnemonics)
else:
# find mnemonics to process in merge_with
for v in merge_with:
if 'FastADC2_' in v:
digitizer = 'FastADC2'
break
if 'FastADC' in v:
digitizer = 'FastADC'
break
if 'MCP' in v:
digitizer = 'ADQ412'
break
if digitizer is None:
log.warning(f'No array with digitizer data '
'to extract. Skipping.')
return merge_with
mnemonics = mnemonics_to_process(mnemonics, merge_with,
digitizer, to_processed_name)
if len(mnemonics) == 0:
log.info(f'No array with unaligned {digitizer} peaks to extract. '
......@@ -939,7 +968,6 @@ def get_digitizer_peaks(run, mnemonics, digitizer, merge_with=None,
else:
log.info(f'Extracting {digitizer} peaks from {mnemonics}.')
run_mnemonics = mnemonics_for_run(run)
# check if bunch pattern table exists
if bool(merge_with) and 'bunchPatternTable' in merge_with:
bpt = merge_with['bunchPatternTable']
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment