Newer
Older
# -*- coding: utf-8 -*-
""" Toolbox for SCS.
Various utilities function to quickly process data measured at the SCS instruments.
Copyright (2019) SCS Team.
"""
import numpy as np
import xarray as xr
import ToolBox as tb
def extractBunchPattern(bp_table=None, key='sase3', runDir=None):
''' generate the bunch pattern and number of pulses of a source directly from the
bunch pattern table and not using the MDL device BUNCH_DECODER. This is
inspired by the euxfel_bunch_pattern package,
https://git.xfel.eu/gitlab/karaboDevices/euxfel_bunch_pattern
Inputs:
bp_table: DataArray corresponding to the mnemonics "bunchPatternTable".
If None, the bunch pattern table is loaded using runDir.
key: str, ['sase1', 'sase2', 'sase3', 'scs_ppl']
runDir: karabo_data run directory. Required only if bp_table is None.
Outputs:
bunchPattern: DataArray containing indices of the sase/laser pulses for
each train
npulses: DataArray containing the number of pulses for each train
Laurent Mercadier
committed
matched: 2-D DataArray mask (trainId x 2700), True where 'key' has pulses
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
'''
keys=['sase1', 'sase2', 'sase3', 'scs_ppl']
if key not in keys:
raise ValueError(f'Invalid key "{key}", possible values are {keys}')
if bp_table is None:
if runDir is None:
raise ValueError('bp_table and runDir cannot both be None')
bp_mnemo = tb.mnemonics['bunchPatternTable']
if bp_mnemo['source'] not in runDir.all_sources:
raise ValueError('Source {} not found in run'.format(
bp_mnemo['source']))
else:
bp_table = runDir.get_array(bp_mnemo['source'],bp_mnemo['key'],
extra_dims=bp_mnemo['dim'])
# define relevant masks, see euxfel_bunch_pattern package for details
DESTINATION_MASK = 0xf << 18
DESTINATION_T4D = 4 << 18 # SASE1/3 dump
DESTINATION_T5D = 2 << 18 # SASE2 dump
PHOTON_LINE_DEFLECTION = 1 << 27 # Soft kick (e.g. SA3)
LASER_SEED6 = 1 << 13
if 'sase' in key:
sase = int(key[4])
destination = DESTINATION_T5D if (sase == 2) else DESTINATION_T4D
matched = (bp_table & DESTINATION_MASK) == destination
if sase == 1:
# Pulses to SASE 1 when soft kick is off
matched &= (bp_table & PHOTON_LINE_DEFLECTION) == 0
elif sase == 3:
# Pulses to SASE 3 when soft kick is on
matched &= (bp_table & PHOTON_LINE_DEFLECTION) != 0
elif key=='scs_ppl':
matched = (bp_table & LASER_SEED6) != 0
# create table of indices where bunch pattern and mask match
nz = np.nonzero(matched.values)
dim_pId = matched.shape[1]
bunchPattern = np.ones(matched.shape, dtype=np.uint64)*dim_pId
bunchPattern[nz] = nz[1]
bunchPattern = np.sort(bunchPattern)
npulses = np.count_nonzero(bunchPattern<dim_pId, axis=1)
bunchPattern[bunchPattern == dim_pId] = 0
bunchPattern = xr.DataArray(bunchPattern[:,:1000], dims=['trainId', 'bunchId'],
coords={'trainId':matched.trainId},
name=key)
npulses = xr.DataArray(npulses, dims=['trainId'],
coords={'trainId':matched.trainId},
name=f'npulses_{key}')
Laurent Mercadier
committed
return bunchPattern, npulses, matched
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
def pulsePatternInfo(data, plot=False):
''' display general information on the pulse patterns operated by SASE1 and SASE3.
This is useful to track changes of number of pulses or mode of operation of
SASE1 and SASE3. It also determines which SASE comes first in the train and
the minimum separation between the two SASE sub-trains.
Inputs:
data: xarray Dataset containing pulse pattern info from the bunch decoder MDL:
{'sase1, sase3', 'npulses_sase1', 'npulses_sase3'}
plot: bool enabling/disabling the plotting of the pulse patterns
Outputs:
print of pulse pattern info. If plot==True, plot of the pulse pattern.
'''
#Which SASE comes first?
npulses_sa3 = data['npulses_sase3']
npulses_sa1 = data['npulses_sase1']
dedicated = False
if np.all(npulses_sa1.where(npulses_sa3 !=0, drop=True) == 0):
dedicated = True
print('No SASE 1 pulses during SASE 3 operation')
if np.all(npulses_sa3.where(npulses_sa1 !=0, drop=True) == 0):
dedicated = True
print('No SASE 3 pulses during SASE 1 operation')
if dedicated==False:
pulseIdmin_sa1 = data['sase1'].where(npulses_sa1 != 0).where(data['sase1']>1).min().values
pulseIdmax_sa1 = data['sase1'].where(npulses_sa1 != 0).where(data['sase1']>1).max().values
pulseIdmin_sa3 = data['sase3'].where(npulses_sa3 != 0).where(data['sase3']>1).min().values
pulseIdmax_sa3 = data['sase3'].where(npulses_sa3 != 0).where(data['sase3']>1).max().values
#print(pulseIdmin_sa1, pulseIdmax_sa1, pulseIdmin_sa3, pulseIdmax_sa3)
if pulseIdmin_sa1 > pulseIdmax_sa3:
t = 0.220*(pulseIdmin_sa1 - pulseIdmax_sa3 + 1)
print('SASE 3 pulses come before SASE 1 pulses (minimum separation %.1f µs)'%t)
elif pulseIdmin_sa3 > pulseIdmax_sa1:
t = 0.220*(pulseIdmin_sa3 - pulseIdmax_sa1 + 1)
print('SASE 1 pulses come before SASE 3 pulses (minimum separation %.1f µs)'%t)
else:
print('Interleaved mode')
#What is the pulse pattern of each SASE?
for key in['sase3', 'sase1']:
print('\n*** %s pulse pattern: ***'%key.upper())
npulses = data['npulses_%s'%key]
sase = data[key]
if not np.all(npulses == npulses[0]):
print('Warning: number of pulses per train changed during the run!')
#take the derivative along the trainId to track changes in pulse number:
diff = npulses.diff(dim='trainId')
#only keep trainIds where a change occured:
diff = diff.where(diff !=0, drop=True)
#get a list of indices where a change occured:
idx_change = np.argwhere(np.isin(npulses.trainId.values,
diff.trainId.values, assume_unique=True))[:,0]
#add index 0 to get the initial pulse number per train:
idx_change = np.insert(idx_change, 0, 0)
print('npulses\tindex From\tindex To\ttrainId From\ttrainId To\trep. rate [kHz]')
for i,idx in enumerate(idx_change):
n = npulses[idx]
idxFrom = idx
trainIdFrom = npulses.trainId[idx]
if i < len(idx_change)-1:
idxTo = idx_change[i+1]-1
else:
idxTo = npulses.shape[0]-1
trainIdTo = npulses.trainId[idxTo]
if n <= 1:
print('%i\t%i\t\t%i\t\t%i\t%i'%(n, idxFrom, idxTo, trainIdFrom, trainIdTo))
else:
f = 1/((sase[idxFrom,1] - sase[idxFrom,0])*222e-6)
print('%i\t%i\t\t%i\t\t%i\t%i\t%.0f'%(n, idxFrom, idxTo, trainIdFrom, trainIdTo, f))
print('\n')
if plot:
plt.figure(figsize=(6,3))
plt.plot(data['npulses_sase3'].trainId, data['npulses_sase3'], 'o-',
ms=3, label='SASE 3')
plt.xlabel('trainId')
plt.ylabel('pulses per train')
plt.plot(data['npulses_sase1'].trainId, data['npulses_sase1'], '^-',
ms=3, color='C2', label='SASE 1')
plt.legend()
plt.tight_layout()
def repRate(data, sase='sase3'):
''' Calculates the pulse repetition rate (in kHz) in sase
according to the bunch pattern and assuming a grid of
4.5 MHz.
Inputs:
data: xarray Dataset containing pulse pattern
sase: sase in which the repetition rate is
calculated (1,2 or 3)
Output:
f: repetition rate in kHz
'''
assert sase in data, 'key "{}" not found in data!'.format(sase)
sase = data[sase].where(data['npulses_{}'.format(sase)]>1,
drop=True).values
if len(sase)==0:
print('Not enough pulses to extract repetition rate')
return 0
f = 1/((sase[0,1] - sase[0,0])*12e-3/54.1666667)
return f
Laurent Mercadier
committed
def sortBAMdata(data, key='sase3'):
''' Extracts beam arrival monitor data from the raw arrays 'BAM6', 'BAM7', etc...
according to the bunchPatternTable. The BAM arrays contain 7220 values, which
corresponds to FLASH busrt length of 800 us @ 9 MHz. The bunchPatternTable
only has 2700 values, corresponding to XFEL 600 us burst length @ 4.5 MHz.
Hence, we truncate the BAM arrays to 5400 with a stride of 2 and match them
to the bunchPatternTable. If key is one of the sase, the given dimension name
of the bam arrays is 'sa[sase number]_pId', to match other data (XGM, TIM...).
Laurent Mercadier
committed
Inputs:
data: xarray Dataset containing BAM arrays
key: str, ['sase1', 'sase2', 'sase3', 'scs_ppl']
Output:
ndata: xarray Dataset with same keys as input data (but new bam arrays)
'''
a, b, mask = extractBunchPattern(key=key, runDir=data.attrs['run'])
mask = mask.rename({'pulse_slot':'BAMbunchId'})
ndata = data
dropList = []
mergeList = []
for k in data:
if 'BAM' in k:
dropList.append(k)
bam = data[k].isel(BAMbunchId=slice(0,5400,2))
Laurent Mercadier
committed
bam = bam.where(mask, drop=True)
if 'sase' in key:
name = f'sa{key[4]}_pId'
bam = bam.rename({'BAMbunchId':name})
Laurent Mercadier
committed
mergeList.append(bam)
mergeList.append(data.drop(dropList))
ndata = xr.merge(mergeList, join='inner')
for k in data.attrs.keys():
ndata.attrs[k] = data.attrs[k]
return ndata