diff --git a/Load.py b/Load.py index 2eed6eccf1fe87514824fd32b723424a31728176..0d343b71f1f0955707c6f26ccd00f7f386579872 100644 --- a/Load.py +++ b/Load.py @@ -10,6 +10,7 @@ from karabo_data import by_index, RunDirectory from karabo_data.read_machinery import find_proposal import xarray as xr import os +from ToolBox.bunch_pattern import extractBunchPattern mnemonics = { # Machine @@ -28,6 +29,9 @@ mnemonics = { "bunchpattern": {'source':'SCS_RR_UTC/TSYS/TIMESERVER', 'key':'readBunchPatternTable.value', 'dim':None}, + "bunchPatternTable": {'source':'SCS_RR_UTC/TSYS/TIMESERVER', + 'key':'bunchPatternTable.value', + 'dim':['pulse_slot']}, "npulses_sase3": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER', 'key':'sase3.nPulses.value', 'dim':None}, @@ -377,7 +381,7 @@ mnemonics = { } def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=False, - subset=by_index[:], rois={}): + subset=by_index[:], rois={}, useBPTable=True): """ Load a run and extract the data. Output is an xarray with aligned trainIds Inputs: @@ -396,6 +400,9 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal names, for example {'fastccd':{'ref':{'roi':by_index[730:890, 535:720], 'dim': ['ref_x', 'ref_y']}, 'sam':{'roi':by_index[1050:1210, 535:720], 'dim': ['sam_x', 'sam_y']}}} + useBPTable: If True, uses the raw bunch pattern table to extract sase pulse + number and indices in the trains. If false, load the data from BUNCH_DECODER + middle layer device. Outputs: res: an xarray DataSet with aligned trainIds @@ -417,8 +424,21 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal keys = [] vals = [] - # always load pulse pattern infos - fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"] + # load pulse pattern info + if useBPTable: + bp_mnemo = mnemonics['bunchPatternTable'] + if bp_mnemo['source'] not in run.all_sources: + print('Source {} not found in run. Skipping!'.format( + mnemonics['bunchPatternTable']['source'])) + else: + bp_table = run.get_array(bp_mnemo['source'],bp_mnemo['key'], + extra_dims=bp_mnemo['dim']) + sase1, npulses_sase1 = extractBunchPattern(bp_table, 'sase1') + sase3, npulses_sase3 = extractBunchPattern(bp_table, 'sase3') + keys += ["sase1", "npulses_sase1", "sase3", "npulses_sase3"] + vals += [sase1, npulses_sase1, sase3, npulses_sase3] + else: + fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"] for f in fields: @@ -456,7 +476,7 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal for nk,nv in rois[k].items(): vals.append(run.get_array(v['source'], v['key'], extra_dims=nv['dim'], roi=nv['roi'])) keys.append(nk) - + aligned_vals = xr.align(*vals, join='inner') result = dict(zip(keys, aligned_vals)) result = xr.Dataset(result) @@ -486,3 +506,4 @@ def concatenateRuns(runs): for k in orderedRuns[0].attrs.keys(): result.attrs[k] = [run.attrs[k] for run in orderedRuns] return result + diff --git a/__init__.py b/__init__.py index 4af2bd2dcf5cdc52da5618e658f1064cee5a412f..ee34d75e82c0a49e2a417f4f11842be049b66376 100644 --- a/__init__.py +++ b/__init__.py @@ -6,4 +6,5 @@ from ToolBox.Laser_utils import * from ToolBox.DSSC import DSSC from ToolBox.azimuthal_integrator import * from ToolBox.DSSC1module import * +from ToolBox.bunch_pattern import * from ToolBox.FastCCD import * diff --git a/bunch_pattern.py b/bunch_pattern.py new file mode 100644 index 0000000000000000000000000000000000000000..8e2b8557a8a02bf412a5f12f6a0dec48b707341b --- /dev/null +++ b/bunch_pattern.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +""" Toolbox for SCS. + + Various utilities function to quickly process data measured at the SCS instruments. + + Copyright (2019) SCS Team. +""" + +import numpy as np +import xarray as xr +import ToolBox as tb + +def extractBunchPattern(bp_table=None, key='sase3', runDir=None): + ''' generate the bunch pattern and number of pulses of a source directly from the + bunch pattern table and not using the MDL device BUNCH_DECODER. This is + inspired by the euxfel_bunch_pattern package, + https://git.xfel.eu/gitlab/karaboDevices/euxfel_bunch_pattern + Inputs: + bp_table: DataArray corresponding to the mnemonics "bunchPatternTable". + If None, the bunch pattern table is loaded using runDir. + key: str, ['sase1', 'sase2', 'sase3', 'scs_ppl'] + runDir: karabo_data run directory. Required only if bp_table is None. + + Outputs: + bunchPattern: DataArray containing indices of the sase/laser pulses for + each train + npulses: DataArray containing the number of pulses for each train + + ''' + keys=['sase1', 'sase2', 'sase3', 'scs_ppl'] + if key not in keys: + raise ValueError(f'Invalid key "{key}", possible values are {keys}') + if bp_table is None: + if runDir is None: + raise ValueError('bp_table and runDir cannot both be None') + bp_mnemo = tb.mnemonics['bunchPatternTable'] + if bp_mnemo['source'] not in runDir.all_sources: + raise ValueError('Source {} not found in run'.format( + bp_mnemo['source'])) + else: + bp_table = runDir.get_array(bp_mnemo['source'],bp_mnemo['key'], + extra_dims=bp_mnemo['dim']) + # define relevant masks, see euxfel_bunch_pattern package for details + DESTINATION_MASK = 0xf << 18 + DESTINATION_T4D = 4 << 18 # SASE1/3 dump + DESTINATION_T5D = 2 << 18 # SASE2 dump + PHOTON_LINE_DEFLECTION = 1 << 27 # Soft kick (e.g. SA3) + LASER_SEED6 = 1 << 13 + if 'sase' in key: + sase = int(key[4]) + destination = DESTINATION_T5D if (sase == 2) else DESTINATION_T4D + matched = (bp_table & DESTINATION_MASK) == destination + if sase == 1: + # Pulses to SASE 1 when soft kick is off + matched &= (bp_table & PHOTON_LINE_DEFLECTION) == 0 + elif sase == 3: + # Pulses to SASE 3 when soft kick is on + matched &= (bp_table & PHOTON_LINE_DEFLECTION) != 0 + elif key=='scs_ppl': + matched = (bp_table & LASER_SEED6) != 0 + + # create table of indices where bunch pattern and mask match + nz = np.nonzero(matched.values) + dim_pId = matched.shape[1] + bunchPattern = np.ones(matched.shape, dtype=np.uint64)*dim_pId + bunchPattern[nz] = nz[1] + bunchPattern = np.sort(bunchPattern) + npulses = np.count_nonzero(bunchPattern<dim_pId, axis=1) + bunchPattern[bunchPattern == dim_pId] = 0 + + bunchPattern = xr.DataArray(bunchPattern[:,:1000], dims=['trainId', 'bunchId'], + coords={'trainId':matched.trainId}, + name=key) + npulses = xr.DataArray(npulses, dims=['trainId'], + coords={'trainId':matched.trainId}, + name=f'npulses_{key}') + return bunchPattern, npulses + + +def pulsePatternInfo(data, plot=False): + ''' display general information on the pulse patterns operated by SASE1 and SASE3. + This is useful to track changes of number of pulses or mode of operation of + SASE1 and SASE3. It also determines which SASE comes first in the train and + the minimum separation between the two SASE sub-trains. + + Inputs: + data: xarray Dataset containing pulse pattern info from the bunch decoder MDL: + {'sase1, sase3', 'npulses_sase1', 'npulses_sase3'} + plot: bool enabling/disabling the plotting of the pulse patterns + + Outputs: + print of pulse pattern info. If plot==True, plot of the pulse pattern. + ''' + #Which SASE comes first? + npulses_sa3 = data['npulses_sase3'] + npulses_sa1 = data['npulses_sase1'] + dedicated = False + if np.all(npulses_sa1.where(npulses_sa3 !=0, drop=True) == 0): + dedicated = True + print('No SASE 1 pulses during SASE 3 operation') + if np.all(npulses_sa3.where(npulses_sa1 !=0, drop=True) == 0): + dedicated = True + print('No SASE 3 pulses during SASE 1 operation') + if dedicated==False: + pulseIdmin_sa1 = data['sase1'].where(npulses_sa1 != 0).where(data['sase1']>1).min().values + pulseIdmax_sa1 = data['sase1'].where(npulses_sa1 != 0).where(data['sase1']>1).max().values + pulseIdmin_sa3 = data['sase3'].where(npulses_sa3 != 0).where(data['sase3']>1).min().values + pulseIdmax_sa3 = data['sase3'].where(npulses_sa3 != 0).where(data['sase3']>1).max().values + #print(pulseIdmin_sa1, pulseIdmax_sa1, pulseIdmin_sa3, pulseIdmax_sa3) + if pulseIdmin_sa1 > pulseIdmax_sa3: + t = 0.220*(pulseIdmin_sa1 - pulseIdmax_sa3 + 1) + print('SASE 3 pulses come before SASE 1 pulses (minimum separation %.1f µs)'%t) + elif pulseIdmin_sa3 > pulseIdmax_sa1: + t = 0.220*(pulseIdmin_sa3 - pulseIdmax_sa1 + 1) + print('SASE 1 pulses come before SASE 3 pulses (minimum separation %.1f µs)'%t) + else: + print('Interleaved mode') + + #What is the pulse pattern of each SASE? + for key in['sase3', 'sase1']: + print('\n*** %s pulse pattern: ***'%key.upper()) + npulses = data['npulses_%s'%key] + sase = data[key] + if not np.all(npulses == npulses[0]): + print('Warning: number of pulses per train changed during the run!') + #take the derivative along the trainId to track changes in pulse number: + diff = npulses.diff(dim='trainId') + #only keep trainIds where a change occured: + diff = diff.where(diff !=0, drop=True) + #get a list of indices where a change occured: + idx_change = np.argwhere(np.isin(npulses.trainId.values, + diff.trainId.values, assume_unique=True))[:,0] + #add index 0 to get the initial pulse number per train: + idx_change = np.insert(idx_change, 0, 0) + print('npulses\tindex From\tindex To\ttrainId From\ttrainId To\trep. rate [kHz]') + for i,idx in enumerate(idx_change): + n = npulses[idx] + idxFrom = idx + trainIdFrom = npulses.trainId[idx] + if i < len(idx_change)-1: + idxTo = idx_change[i+1]-1 + else: + idxTo = npulses.shape[0]-1 + trainIdTo = npulses.trainId[idxTo] + if n <= 1: + print('%i\t%i\t\t%i\t\t%i\t%i'%(n, idxFrom, idxTo, trainIdFrom, trainIdTo)) + else: + f = 1/((sase[idxFrom,1] - sase[idxFrom,0])*222e-6) + print('%i\t%i\t\t%i\t\t%i\t%i\t%.0f'%(n, idxFrom, idxTo, trainIdFrom, trainIdTo, f)) + print('\n') + if plot: + plt.figure(figsize=(6,3)) + plt.plot(data['npulses_sase3'].trainId, data['npulses_sase3'], 'o-', + ms=3, label='SASE 3') + plt.xlabel('trainId') + plt.ylabel('pulses per train') + plt.plot(data['npulses_sase1'].trainId, data['npulses_sase1'], '^-', + ms=3, color='C2', label='SASE 1') + plt.legend() + plt.tight_layout() + + +def repRate(data, sase='sase3'): + ''' Calculates the pulse repetition rate (in kHz) in sase + according to the bunch pattern and assuming a grid of + 4.5 MHz. + Inputs: + data: xarray Dataset containing pulse pattern + sase: sase in which the repetition rate is + calculated (1,2 or 3) + Output: + f: repetition rate in kHz + ''' + assert sase in data, 'key "{}" not found in data!'.format(sase) + sase = data[sase].where(data['npulses_{}'.format(sase)]>1, + drop=True).values + if len(sase)==0: + print('Not enough pulses to extract repetition rate') + return 0 + f = 1/((sase[0,1] - sase[0,0])*12e-3/54.1666667) + return f diff --git a/xgm.py b/xgm.py index dbc47a40921ffb49aabe0c9d392ea15a75807b5d..d38165e6b2485f86b598aa00e89813f4aee6b94e 100644 --- a/xgm.py +++ b/xgm.py @@ -10,111 +10,6 @@ import numpy as np import xarray as xr from scipy.signal import find_peaks -# Machine -def pulsePatternInfo(data, plot=False): - ''' display general information on the pulse patterns operated by SASE1 and SASE3. - This is useful to track changes of number of pulses or mode of operation of - SASE1 and SASE3. It also determines which SASE comes first in the train and - the minimum separation between the two SASE sub-trains. - - Inputs: - data: xarray Dataset containing pulse pattern info from the bunch decoder MDL: - {'sase1, sase3', 'npulses_sase1', 'npulses_sase3'} - plot: bool enabling/disabling the plotting of the pulse patterns - - Outputs: - print of pulse pattern info. If plot==True, plot of the pulse pattern. - ''' - #Which SASE comes first? - npulses_sa3 = data['npulses_sase3'] - npulses_sa1 = data['npulses_sase1'] - dedicated = False - if np.all(npulses_sa1.where(npulses_sa3 !=0, drop=True) == 0): - dedicated = True - print('No SASE 1 pulses during SASE 3 operation') - if np.all(npulses_sa3.where(npulses_sa1 !=0, drop=True) == 0): - dedicated = True - print('No SASE 3 pulses during SASE 1 operation') - if dedicated==False: - pulseIdmin_sa1 = data['sase1'].where(npulses_sa1 != 0).where(data['sase1']>1).min().values - pulseIdmax_sa1 = data['sase1'].where(npulses_sa1 != 0).where(data['sase1']>1).max().values - pulseIdmin_sa3 = data['sase3'].where(npulses_sa3 != 0).where(data['sase3']>1).min().values - pulseIdmax_sa3 = data['sase3'].where(npulses_sa3 != 0).where(data['sase3']>1).max().values - #print(pulseIdmin_sa1, pulseIdmax_sa1, pulseIdmin_sa3, pulseIdmax_sa3) - if pulseIdmin_sa1 > pulseIdmax_sa3: - t = 0.220*(pulseIdmin_sa1 - pulseIdmax_sa3 + 1) - print('SASE 3 pulses come before SASE 1 pulses (minimum separation %.1f µs)'%t) - elif pulseIdmin_sa3 > pulseIdmax_sa1: - t = 0.220*(pulseIdmin_sa3 - pulseIdmax_sa1 + 1) - print('SASE 1 pulses come before SASE 3 pulses (minimum separation %.1f µs)'%t) - else: - print('Interleaved mode') - - #What is the pulse pattern of each SASE? - for key in['sase3', 'sase1']: - print('\n*** %s pulse pattern: ***'%key.upper()) - npulses = data['npulses_%s'%key] - sase = data[key] - if not np.all(npulses == npulses[0]): - print('Warning: number of pulses per train changed during the run!') - #take the derivative along the trainId to track changes in pulse number: - diff = npulses.diff(dim='trainId') - #only keep trainIds where a change occured: - diff = diff.where(diff !=0, drop=True) - #get a list of indices where a change occured: - idx_change = np.argwhere(np.isin(npulses.trainId.values, - diff.trainId.values, assume_unique=True))[:,0] - #add index 0 to get the initial pulse number per train: - idx_change = np.insert(idx_change, 0, 0) - print('npulses\tindex From\tindex To\ttrainId From\ttrainId To\trep. rate [kHz]') - for i,idx in enumerate(idx_change): - n = npulses[idx] - idxFrom = idx - trainIdFrom = npulses.trainId[idx] - if i < len(idx_change)-1: - idxTo = idx_change[i+1]-1 - else: - idxTo = npulses.shape[0]-1 - trainIdTo = npulses.trainId[idxTo] - if n <= 1: - print('%i\t%i\t\t%i\t\t%i\t%i'%(n, idxFrom, idxTo, trainIdFrom, trainIdTo)) - else: - f = 1/((sase[idxFrom,1] - sase[idxFrom,0])*222e-6) - print('%i\t%i\t\t%i\t\t%i\t%i\t%.0f'%(n, idxFrom, idxTo, trainIdFrom, trainIdTo, f)) - print('\n') - if plot: - plt.figure(figsize=(6,3)) - plt.plot(data['npulses_sase3'].trainId, data['npulses_sase3'], 'o-', - ms=3, label='SASE 3') - plt.xlabel('trainId') - plt.ylabel('pulses per train') - plt.plot(data['npulses_sase1'].trainId, data['npulses_sase1'], '^-', - ms=3, color='C2', label='SASE 1') - plt.legend() - plt.tight_layout() - - -def repRate(data, sase='sase3'): - ''' Calculates the pulse repetition rate in sase according - to the bunch pattern and assuming a minimum pulse - separation of 221.54e-9 seconds. - Inputs: - data: xarray Dataset containing pulse pattern - sase: sase in which the repetition rate is - calculated (1,2 or 3) - Output: - f: repetition rate in kHz - ''' - assert sase in data, 'key "{}" not found in data!'.format(sase) - sase = data[sase].where(data['npulses_{}'.format(sase)]>1, - drop=True).values - if len(sase)==0: - print('Not enough pulses to extract repetition rate') - return 0 - f = 1/((sase[0,1] - sase[0,0])*221.54e-6) - return f - - # XGM def cleanXGMdata(data, npulses=None, sase3First=True): ''' Cleans the XGM data arrays obtained from load() function.