Skip to content
Snippets Groups Projects
Commit 7a4075f0 authored by Laurent Mercadier's avatar Laurent Mercadier
Browse files

Merge branch 'no-bpt' into 'master'

Only use bunch pattern table when necessary

See merge request !234
parents de92cf11 2a85e89d
No related branches found
No related tags found
1 merge request!234Only use bunch pattern table when necessary
This diff is collapsed.
......@@ -4,18 +4,17 @@ The data stored in the bunch pattern table (mnemonic *bunchPatternTable*) can be
.. code:: ipython3
import toolbox_scs as tb
import toolbox_scs.misc as tbm
proposalNB = 2511
runNB = 176
proposalNB = 2953
runNB = 507
run, data = tb.load(proposalNB, runNB, "bunchPatternTable")
ppl_mask = tbm.is_ppl(data.bunchPatternTable)
run, ds = tb.load(proposalNB, runNB, "bunchPatternTable")
ppl_mask = tb.is_ppl(ds["bunchPatternTable"])
ppl_mask is a boolean DataArray of dimensions trainId x 2700, where True values indicate where a laser pulse from the PP laser was triggered.
.. note::
The position of the PP laser pulses with respect to that of the SASE 3 pulses is arbitrary. The PP laser pattern always starts at pulse Id 0, while that of SASE 3 can vary, depending on the machine parameters.
The position of the PP laser pulses with respect to that of the SASE 3 pulses is arbitrary. The PP laser pattern always starts at pulse Id 0, while that of SASE 3 can vary, depending on the machine parameters. One can use the ToolBox function `align_ol_to_fel_pId()` to shift the PP laser pulse Ids to match those of SASE 3.
From this mask, one can obtain the number of pulses per train by summing along the 'pulse_slot' dimension:
......@@ -23,3 +22,14 @@ From this mask, one can obtain the number of pulses per train by summing along t
ppl_npulses = ppl_mask.sum(dim='pulse_slot')
There is also the function `get_sase_pId()` that can be used to directly extract the pulse Ids of a particular location:
.. code:: ipython3
ppl_pId = tb.get_sase_pId(run, loc='scs_ppl')
This provides a list of the pulse Ids used during a run but does not track changes of number of pulses. For this, the mnemonic `npulses_laser` can be loaded to get the number of pulses in each trainId and the mnemonic `laser` can be loaded to get the pulse Ids of the scs_ppl in each trainId:
.. code:: ipython3
run, ds = tb.load(proposalNB, runNB, ['laser', 'npulses_laser'])
......@@ -17,6 +17,7 @@ unreleased
- Improved hRIXS class and utilities :mr:`182`
- Documentation on extracting digitizer peaks, clean up of digitizer functions :mr:`215`
- Improved peak-finding algorithm for digitizer traces :mr:`216`, :mr:`227`
- Only load bunch pattern table when necessary :mr:`234`
- **New Features**
......
......@@ -17,7 +17,7 @@ Loading data in memory is performed as follows:
run, data = tb.load(proposalNr, runNr, fields)
run is an extra_data dataCollection and data is an xarray Dataset containing all variables listed in fields. For convinience, data also contains the variable bunchPatternTable, which is used by other functions of the ToolBox. All variables are aligned by train Id.
run is an extra_data dataCollection and data is an xarray Dataset containing all variables listed in fields. All variables are aligned by train Id.
**Option 2**:
......@@ -26,11 +26,10 @@ run is an extra_data dataCollection and data is an xarray Dataset containing all
import toolbox_scs as tb
# get entry for single data source defined in mnemonics
mnemonic = tb.mnemonics["scannerX"]
proposalNr = 2565
runNr = 19
run, _ = tb.load(proposalNr, runNr)
data = run.get_array(*mnemonic.values())
run = tb.open_run(proposalNr, runNr)
data = tb.get_array(run, "scannerX")
run is an extra_data dataCollection and data an xarray dataArray for a single data source.
......@@ -14,6 +14,9 @@ mnemonics = {
"sase1": ({'source': 'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key': 'sase1.pulseIds.value',
'dim': ['bunchId']},),
"laser": ({'source': 'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key': 'laser.pulseIds.value',
'dim': ['bunchId']},),
"maindump": ({'source': 'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key': 'maindump.pulseIds.value',
'dim': ['bunchId']},),
......@@ -33,6 +36,9 @@ mnemonics = {
"npulses_sase1": ({'source': 'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key': 'sase1.nPulses.value',
'dim': None},),
"npulses_laser": ({'source': 'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key': 'laser.nPulses.value',
'dim': None},),
"bunchPatternTable_SA3": (
{'source': 'SA3_BR_UTC/TSYS/TIMESERVER:outputBunchPattern',
'key': 'data.bunchPatternTable',
......
......@@ -7,14 +7,14 @@
"""
import logging
import numpy as np
import xarray as xr
from ..misc.bunch_pattern_external import is_pulse_at
from ..mnemonics_machinery import (mnemonics_to_process,
mnemonics_for_run)
from ..mnemonics_machinery import mnemonics_for_run
from ..constants import mnemonics as _mnemonics
from ..misc.bunch_pattern import (npulses_has_changed,
get_unique_sase_pId, load_bpt)
from toolbox_scs.load import get_array
__all__ = [
'get_bam',
......@@ -24,7 +24,110 @@ __all__ = [
log = logging.getLogger(__name__)
def get_bam(run, mnemonics=None, merge_with=None, bunchPattern='sase3'):
def get_bam(run, mnemonics=None, merge_with=None, bunchPattern='sase3',
pulseIds=None):
"""
Load beam arrival monitor (BAM) data and align their pulse ID
according to the bunch pattern. Sources can be loaded on the fly
via the mnemonics argument, or processed from an existing data set
(merge_with).
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the bam data.
mnemonics: str or list of str
mnemonics for BAM, e.g. "BAM1932M" or ["BAM414", "BAM1932M"].
If None, defaults to "BAM1932M" in case no merge_with dataset
is provided.
merge_with: xarray Dataset
If provided, the resulting Dataset will be merged with this
one. The BAM variables of merge_with (if any) will also be
selected, aligned and merged.
bunchPattern: str
'sase1' or 'sase3' or 'scs_ppl', bunch pattern
used to extract peaks. The pulse ID dimension will be named
'sa1_pId', 'sa3_pId' or 'ol_pId', respectively.
pulseIds: list, 1D array
Pulse Ids. If None, they are automatically loaded.
Returns
-------
xarray Dataset with pulse-resolved BAM variables aligned,
merged with Dataset *merge_with* if provided.
Example
-------
>>> import toolbox_scs as tb
>>> run, ds = tb.load(2711, 303, 'BAM1932S')
>>> ds['BAM1932S']
"""
bam_mnemos = ['BAM4', 'BAM1']
m2 = []
if mnemonics is not None:
mnemonics = [mnemonics] if isinstance(mnemonics, str) else mnemonics
for m in mnemonics:
if any([(k in m) for k in bam_mnemos]):
m2.append(m)
if merge_with is not None:
in_mw = []
for m, da in merge_with.items():
if any([(k in m) for k in bam_mnemos]) and 'BAMbunchId' in da.dims:
in_mw.append(m)
m2 += in_mw
if len(m2) == 0:
log.info('no BAM mnemonics to process. Skipping.')
return merge_with
mnemonics = list(set(m2))
# Prepare the dataset of non-BAM data to merge with
if bool(merge_with):
ds_mw = merge_with.drop(mnemonics, errors='ignore')
else:
ds_mw = xr.Dataset()
ds = xr.Dataset()
dim_names = {'sase3': 'sa3_pId', 'sase1': 'sa1_pId',
'scs_ppl': 'ol_pId'}
run_mnemonics = mnemonics_for_run(run)
if pulseIds is None:
npulses_changed = npulses_has_changed(run, bunchPattern,
run_mnemonics)
if npulses_changed is False:
pulseIds = get_unique_sase_pId(run, bunchPattern,
run_mnemonics)
for m in mnemonics:
if merge_with is not None and m in merge_with:
da_bam = merge_with[m]
else:
da_bam = get_array(run, m)
da_bam = da_bam.sel(BAMbunchId=slice(0, 5400, 2))
if npulses_changed is False:
da_bam = da_bam.isel(BAMbunchId=pulseIds)
da_bam = da_bam.assign_coords(BAMbunchId=pulseIds)
da_bam = da_bam.rename(BAMbunchId=dim_names[bunchPattern])
else:
# align the pulse Id
bpt = load_bpt(run, ds_mw)
if bpt is not None and len(ds.variables) > 0:
if bpt.name not in ds_mw:
log.warning('Number of pulses changed during '
'the run. Loading bunch pattern table.')
ds_mw = ds_mw.merge(bpt, join='inner')
mask = is_pulse_at(bpt, bunchPattern)
mask = mask.rename({'pulse_slot': dim_names[bunchPattern]})
da_bam = da_bam.rename(BAMbunchId=dim_names[bunchPattern])
da_bam = da_bam.where(mask, drop=True)
if run_mnemonics[m]['key'] != 'data.lowChargeArrivalTime':
da_bam *= 1e-3
ds = ds.merge(da_bam, join='inner')
# merge with non-BAM dataset
ds = ds_mw.merge(ds, join='inner')
return ds
'''
def get_bam_old(run, mnemonics=None, merge_with=None, bunchPattern='sase3'):
"""
Load beam arrival monitor (BAM) data and align their pulse ID
according to the bunch pattern. Sources can be loaded on the fly
......@@ -117,6 +220,7 @@ def get_bam(run, mnemonics=None, merge_with=None, bunchPattern='sase3'):
ds = mw_ds.merge(ds, join='inner')
return ds
'''
def get_bam_params(run, mnemo_or_source='BAM1932S'):
......
......@@ -13,8 +13,10 @@ import xarray as xr
import matplotlib.pyplot as plt
from ..misc.bunch_pattern_external import is_sase_1, is_sase_3
from ..mnemonics_machinery import (mnemonics_to_process,
mnemonics_for_run)
from ..misc.bunch_pattern import (npulses_has_changed,
get_unique_sase_pId, load_bpt)
from ..mnemonics_machinery import mnemonics_for_run
from toolbox_scs.load import get_array
__all__ = [
'calibrate_xgm',
......@@ -25,7 +27,160 @@ __all__ = [
log = logging.getLogger(__name__)
def get_xgm(run, mnemonics=None, merge_with=None, keepAllSase=False,
def get_xgm(run, mnemonics=None, merge_with=None,
indices=slice(0, None), sase3=None):
"""
Load and/or computes XGM data. Sources can be loaded on the
fly via the mnemonics argument, or processed from an existing dataset
(merge_with). The bunch pattern table is used to assign the pulse
id coordinates if the number of pulses has changed during the run.
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the xgm data.
mnemonics: str or list of str
mnemonics for XGM, e.g. "SCS_SA3" or ["XTD10_XGM", "SCS_XGM"].
If None, defaults to "SCS_SA3" in case no merge_with dataset
is provided.
merge_with: xarray Dataset
If provided, the resulting Dataset will be merged with this
one. The XGM variables of merge_with (if any) will also be
computed and merged.
indices: slice, list, 1D array
Pulse indices of the XGM array in case bunch pattern is missing.
sase3: list, 1D array
Pulse Ids of sase3. If None, they are automatically loaded.
Returns
-------
xarray Dataset with pulse-resolved XGM variables aligned,
merged with Dataset *merge_with* if provided.
Example
-------
>>> import toolbox_scs as tb
>>> run, ds = tb.load(2212, 213, 'SCS_SA3')
>>> ds['SCS_SA3']
"""
xgm_mnemos = ['XTD10_SA', 'XTD10_XGM', 'SCS_SA', 'SCS_XGM']
m2 = []
if mnemonics is not None:
mnemonics = [mnemonics] if isinstance(mnemonics, str) else mnemonics
for m in mnemonics:
if any([(k in m) for k in xgm_mnemos]):
m2.append(m)
if merge_with is not None:
in_mw = []
for m, da in merge_with.items():
if any([(k in m) for k in xgm_mnemos]) and 'XGMbunchId' in da.dims:
in_mw.append(m)
m2 += in_mw
if len(m2) == 0:
log.info('no XGM mnemonics to process. Skipping.')
return merge_with
mnemonics = list(set(m2))
# Prepare the dataset of non-XGM data to merge with
if bool(merge_with):
ds_mw = merge_with.drop(mnemonics, errors='ignore')
else:
ds_mw = xr.Dataset()
sase1 = None
sase1_changed = sase3_changed = False
run_mnemonics = mnemonics_for_run(run)
ds = xr.Dataset()
for m in mnemonics:
if merge_with is not None and m in merge_with:
da_xgm = merge_with[m]
else:
da_xgm = get_array(run, m)
if sase1 is None and ('XGM' in m or 'SA1' in m):
sase1_changed = npulses_has_changed(run, 'sase1',
run_mnemonics)
if sase3 is None and ('XGM' in m or 'SA3' in m):
sase3_changed = npulses_has_changed(run, 'sase3',
run_mnemonics)
if sase3_changed is False and sase1_changed is False:
sase1 = get_unique_sase_pId(run, 'sase1', run_mnemonics)
sase3 = get_unique_sase_pId(run, 'sase3', run_mnemonics)
ds_xgm = load_xgm_array(run, da_xgm, m, sase1, sase3)
else:
bpt = load_bpt(run, merge_with)
if bpt is not None:
ds_xgm = align_xgm_array(da_xgm, bpt)
if bpt.name not in ds_mw:
log.warning('Number of pulses changed during '
'the run. Loading bunch pattern table.')
ds_mw = ds_mw.merge(bpt, join='inner')
else:
xgm_val = da_xgm.values
xgm_val[xgm_val == 1] = np.nan
xgm_val[xgm_val == 0] = np.nan
da_xgm.values = xgm_val
da_xgm = da_xgm.dropna(dim='XGMbunchId', how='all')
ds_xgm = da_xgm.fillna(0).sel(XGMbunchId=indices).to_dataset()
ds = ds.merge(ds_xgm, join='inner')
# merge with non-BAM dataset
ds = ds_mw.merge(ds, join='inner')
return ds
def load_xgm_array(run, xgm, mnemonic, sase1, sase3):
"""
from a raw array xgm, extract and assign pulse Id coordinates
when the number of pulses does not change during the run.
If 'XGM' in mnemonic, the data is split in two variables
'SA1' and 'SA3'.
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the xgm data.
xgm: xarray.DataArray
the raw XGM array
mnemonic: str
the XGM mnemonic
sase1: list or 1D array
the sase1 pulse ids
sase3: list or 1D array
the sase3 pulse ids
Returns
-------
ds_xgm: xarray.Dataset
the dataset containing the aligned XGM variable(s).
"""
xgm_val = xgm.values
xgm_val[xgm_val == 1] = np.nan
xgm_val[xgm_val == 0] = np.nan
xgm.values = xgm_val
xgm = xgm.dropna(dim='XGMbunchId', how='all')
xgm = xgm.fillna(0)
if 'XGM' in mnemonic:
sase1_3 = np.sort(np.concatenate([sase1, sase3]))
sase1_idx = [np.argwhere(sase1_3 == i)[0][0] for i in sase1]
sase3_idx = [np.argwhere(sase1_3 == i)[0][0] for i in sase3]
xgm_sa1 = xgm.isel(XGMbunchId=sase1_idx).rename(XGMbunchId='sa1_pId')
xgm_sa1 = xgm_sa1.assign_coords(sa1_pId=sase1)
xgm_sa1 = xgm_sa1.rename(mnemonic.replace('XGM', 'SA1'))
xgm_sa3 = xgm.isel(XGMbunchId=sase3_idx).rename(XGMbunchId='sa3_pId')
xgm_sa3 = xgm_sa3.assign_coords(sa3_pId=sase3)
xgm_sa3 = xgm_sa3.rename(mnemonic.replace('XGM', 'SA3'))
xgm = xr.merge([xgm_sa1, xgm_sa3])
elif 'SA1' in mnemonic:
xgm = xgm.rename(XGMbunchId='sa1_pId')
xgm = xgm.assign_coords(sa1_pId=sase1).rename(mnemonic)
xgm = xgm.to_dataset()
elif 'SA3' in mnemonic:
xgm = xgm.rename(XGMbunchId='sa3_pId')
xgm = xgm.assign_coords(sa3_pId=sase3).rename(mnemonic)
xgm = xgm.to_dataset()
return xgm
'''
def get_xgm_old(run, mnemonics=None, merge_with=None, keepAllSase=False,
indices=slice(0, None)):
"""
Load and/or computes XGM data. Sources can be loaded on the
......@@ -103,6 +258,7 @@ def get_xgm(run, mnemonics=None, merge_with=None, keepAllSase=False,
arr = arr.where(arr != 1., drop=True).sel(XGMbunchId=indices)
ds = ds.merge(arr, join='inner')
return ds
'''
def align_xgm_array(xgm_arr, bpt):
......
......@@ -20,6 +20,8 @@ from .constants import mnemonics as _mnemonics
from .mnemonics_machinery import mnemonics_for_run
from .util.exceptions import ToolBoxValueError
import toolbox_scs.detectors as tbdet
from .misc.bunch_pattern import (npulses_has_changed,
get_unique_sase_pId, load_bpt)
__all__ = [
'concatenateRuns',
......@@ -55,7 +57,7 @@ def load(proposalNB=None, runNB=None,
laser_bp=None,
):
"""
Load a run and extract the data. Output is an xarray with aligned
Load a run and extract the data. Output is an xarray with aligned
trainIds.
Parameters
......@@ -97,9 +99,9 @@ def load(proposalNB=None, runNB=None,
'FastADC3peaks') and aligns the pulse Id according to the fadc_bp bunch
pattern.
extract_fadc2: bool
If True, extracts the peaks from FastADC2 variables (e.g. 'FastADC2_5raw',
'FastADC2_3peaks') and aligns the pulse Id according to the fadc2_bp bunch
pattern.
If True, extracts the peaks from FastADC2 variables (e.g.
'FastADC2_5raw', 'FastADC2_3peaks') and aligns the pulse Id according
to the fadc2_bp bunch pattern.
extract_xgm: bool
If True, extracts the values from XGM variables (e.g. 'SCS_SA3',
'XTD10_XGM') and aligns the pulse Id with the sase1 / sase3 bunch
......@@ -152,12 +154,18 @@ def load(proposalNB=None, runNB=None,
data_arrays = []
run_mnemonics = mnemonics_for_run(run)
# load pulse pattern info
bpt = load_bpt(run, run_mnemonics=run_mnemonics)
if bpt is None:
log.warning('Bunch pattern table not found in run. Skipping!')
# load pulse pattern info only if number of sase 3 pulses changed
sase3 = None
if npulses_has_changed(run, run_mnemonics=run_mnemonics) is False:
sase3 = get_unique_sase_pId(run, run_mnemonics=run_mnemonics)
else:
data_arrays.append(bpt)
log.warning('Number of pulses changed during the run. '
'Loading bunch pattern table.')
bpt = load_bpt(run, run_mnemonics=run_mnemonics)
if bpt is None:
log.warning('Bunch pattern table not found in run. Skipping!')
else:
data_arrays.append(bpt)
for f in fields:
if type(f) == dict:
......@@ -216,7 +224,7 @@ def load(proposalNB=None, runNB=None,
data = xr.merge(data_arrays, join='inner')
data.attrs['runFolder'] = runFolder
# backward compatibility with old-defined variables:
if extract_tim is not None:
extract_adq412 = extract_tim
......@@ -226,13 +234,14 @@ def load(proposalNB=None, runNB=None,
adq412_bp = tim_bp
if laser_bp is not None:
fadc_bp = laser_bp
adq412 = [k for k in run_mnemonics if 'MCP' in k and k in data]
if extract_adq412 and len(adq412) > 0:
data = tbdet.get_digitizer_peaks(run, mnemonics=adq412, merge_with=data,
bunchPattern=adq412_bp)
data = tbdet.get_digitizer_peaks(run, mnemonics=adq412,
merge_with=data,
bunchPattern=adq412_bp)
fadc = [k for k in run_mnemonics if ('FastADC' in k)
fadc = [k for k in run_mnemonics if ('FastADC' in k)
and ('FastADC2_' not in k) and (k in data)]
if extract_fadc and len(fadc) > 0:
data = tbdet.get_digitizer_peaks(run, mnemonics=fadc, merge_with=data,
......@@ -247,7 +256,8 @@ def load(proposalNB=None, runNB=None,
'SCS_SA1', 'SCS_SA1_sigma', 'SCS_SA3', 'SCS_SA3_sigma']
xgm = [k for k in xgm if k in data]
if extract_xgm and len(xgm) > 0:
data = tbdet.get_xgm(run, mnemonics=xgm, merge_with=data)
data = tbdet.get_xgm(run, mnemonics=xgm, merge_with=data,
sase3=sase3)
bam = [k for k in run_mnemonics if 'BAM' in k and k in data]
if extract_bam and len(bam) > 0:
......@@ -492,20 +502,3 @@ def concatenateRuns(runs):
for k in orderedRuns[0].attrs.keys():
result.attrs[k] = [run.attrs[k] for run in orderedRuns]
return result
def load_bpt(run, merge_with=None, run_mnemonics=None):
if run_mnemonics is None:
run_mnemonics = mnemonics_for_run(run)
for key in ['bunchPatternTable', 'bunchPatternTable_SA3']:
if bool(merge_with) and key in merge_with:
log.debug(f'Using {key} from merge_with dataset.')
return merge_with[key]
if key in run_mnemonics:
bpt = run.get_array(*run_mnemonics[key].values(),
name='bunchPatternTable')
log.debug(f'Loaded {key} from DataCollection.')
return bpt
log.debug('Could not find bunch pattern table.')
return None
# -*- coding: utf-8 -*-
""" Toolbox for SCS.
Various utilities function to quickly process data measured at the SCS instruments.
Various utilities function to quickly process data
measured at the SCS instruments.
Copyright (2019) SCS Team.
"""
import os
import logging
import numpy as np
import xarray as xr
......@@ -15,13 +17,172 @@ from extra_data import RunDirectory
# import and hide variable, such that it does not alter namespace.
from ..constants import mnemonics as _mnemonics_bp
from ..mnemonics_machinery import mnemonics_for_run
from .bunch_pattern_external import is_pulse_at
__all__ = [
'extractBunchPattern',
'get_sase_pId',
'npulses_has_changed',
'pulsePatternInfo',
'repRate'
'repRate',
]
log = logging.getLogger(__name__)
def npulses_has_changed(run, loc='sase3', run_mnemonics=None):
"""
Checks if the number of pulses has changed during the run for
a specific location `loc` (='sase1', 'sase3', 'scs_ppl' or 'laser')
If the source is not found in the run, returns True.
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the data.
loc: str
The location where to check: {'sase1', 'sase3', 'scs_ppl'}
run_mnemonics: dict
the mnemonics for the run (see `menonics_for_run`)
Returns
-------
ret: bool
True if the number of pulses has changed or the source was not
found, False if the number of pulses did not change.
"""
sase_list = ['sase1', 'sase3', 'laser', 'scs_ppl']
if loc not in sase_list:
raise ValueError(f"Unknow sase location '{loc}'. Expected one in"
f"{sase_list}")
if run_mnemonics is None:
run_mnemonics = mnemonics_for_run(run)
if loc == 'scs_ppl':
loc = 'laser'
if loc not in run_mnemonics:
return True
if run_mnemonics[loc]['key'] not in run[run_mnemonics[loc]['source']].keys():
log.info(f'Mnemonic {loc} not found in run.')
return True
npulses = run.get_array(*run_mnemonics['npulses_'+loc].values())
if len(np.unique(npulses)) == 1:
return False
return True
def get_unique_sase_pId(run, loc='sase3', run_mnemonics=None):
"""
Assuming that the number of pulses did not change during the run,
returns the pulse Ids as the run value of the sase mnemonic.
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the data.
loc: str
The location where to check: {'sase1', 'sase3', 'scs_ppl'}
run_mnemonics: dict
the mnemonics for the run (see `menonics_for_run`)
Returns
-------
pulseIds: np.array
the pulse ids at the specified location. Returns None if the
mnemonic is not in the run.
"""
if run_mnemonics is None:
run_mnemonics = mnemonics_for_run(run)
if loc == 'scs_ppl':
loc = 'laser'
if loc not in run_mnemonics:
# bunch pattern not recorded
return None
npulses = run.get_run_value(run_mnemonics['npulses_'+loc]['source'],
run_mnemonics['npulses_'+loc]['key'])
pulseIds = run.get_run_value(run_mnemonics[loc]['source'],
run_mnemonics[loc]['key'])[:npulses]
return pulseIds
def get_sase_pId(run, loc='sase3', run_mnemonics=None,
bpt=None, merge_with=None):
"""
Returns the pulse Ids of the specified `loc` during a run.
If the number of pulses has changed during the run, it loads the
bunch pattern table and extract all pulse Ids used.
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the data.
loc: str
The location where to check: {'sase1', 'sase3', 'scs_ppl'}
run_mnemonics: dict
the mnemonics for the run (see `menonics_for_run`)
bpt: 2D-array
The bunch pattern table. Used only if the number of pulses
has changed. If None, it is loaded on the fly.
merge_with: xarray.Dataset
dataset that may contain the bunch pattern table to use in
case the number of pulses has changed. If merge_with does
not contain the bunch pattern table, it is loaded and added
as a variable 'bunchPatternTable' to merge_with.
Returns
-------
pulseIds: np.array
the pulse ids at the specified location. Returns None if the
mnemonic is not in the run.
"""
if npulses_has_changed(run, loc, run_mnemonics) is False:
return get_unique_sase_pId(run, loc, run_mnemonics)
if bpt is None:
bpt = load_bpt(run, merge_with, run_mnemonics)
if bpt is not None:
mask = is_pulse_at(bpt, loc)
return np.unique(np.nonzero(mask.values)[1])
return None
def load_bpt(run, merge_with=None, run_mnemonics=None):
"""
Load the bunch pattern table. It returns the one contained in
merge_with if possible. Or, it adds it to merge_with once it is
loaded.
Parameters
----------
run: extra_data.DataCollection
DataCollection containing the data.
merge_with: xarray.Dataset
dataset that may contain the bunch pattern table or to which
add the bunch pattern table once loaded.
run_mnemonics: dict
the mnemonics for the run (see `menonics_for_run`)
Returns
-------
bpt: xarray.Dataset
the bunch pattern table as specified by the mnemonics
'bunchPatternTable'
"""
if run_mnemonics is None:
run_mnemonics = mnemonics_for_run(run)
for key in ['bunchPatternTable', 'bunchPatternTable_SA3']:
if merge_with is not None and key in merge_with:
log.debug(f'Using {key} from merge_with dataset.')
return merge_with[key]
if key in run_mnemonics:
bpt = run.get_array(*run_mnemonics[key].values(),
name='bunchPatternTable')
log.debug(f'Loaded {key} from DataCollection.')
if merge_with is not None:
merge_with = merge_with.merge(bpt, join='inner')
return bpt
log.debug('Could not find bunch pattern table.')
return None
def extractBunchPattern(bp_table=None, key='sase3', runDir=None):
''' generate the bunch pattern and number of pulses of a source directly from the
......@@ -177,13 +338,16 @@ def repRate(data=None, runNB=None, proposalNB=None, key='sase3'):
''' Calculates the pulse repetition rate (in kHz) in sase
according to the bunch pattern and assuming a grid of
4.5 MHz.
Inputs:
-------
data: xarray Dataset containing pulse pattern, needed if runNB is none
runNB: int or str, run number. Needed if data is None
proposal: int or str, proposal where to find the run. Needed if data is None
key: str in [sase1, sase2, sase3, scs_ppl], source for which the
repetition rate is calculated
Output:
-------
f: repetition rate in kHz
'''
if runNB is None and data is None:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment