Skip to content
Snippets Groups Projects
Commit fd8ae729 authored by Loïc Le Guyader's avatar Loïc Le Guyader
Browse files

Merge branch 'XTD10_XGM_SA1' into 'master'

Correct SA3 slow data by SASE1 contribution

See merge request !30
parents e3ce00fe 57925d4a
No related branches found
No related tags found
No related merge requests found
......@@ -59,31 +59,81 @@ mnemonics = {
'key':'data.image.pixels',
'dim':['x','y']},
# XGMs
"SA3_XGM": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
# XTD10 XGM
## keithley
"XTD10_photonFlux": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
"XTD10_photonFlux_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFluxSigma.value',
'dim':None},
## ADC
"XTD10_XGM": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensityTD',
'dim':['XGMbunchId']},
"SA3_XGM_SA3": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
"XTD10_XGM_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySigmaTD',
'dim':['XGMbunchId']},
"XTD10_SA3": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3TD',
'dim':['XGMbunchId']},
"SA3_XGM_SA1": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
"XTD10_SA3_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3SigmaTD',
'dim':['XGMbunchId']},
"XTD10_SA1": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1TD',
'dim':['XGMbunchId']},
"SA3_XGM_SLOW": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
"XTD10_SA1_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1SigmaTD',
'dim':['XGMbunchId']},
## low pass averaged ADC
"XTD10_slowTrain": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'controlData.slowTrain.value',
'dim':None},
"XTD10_slowTrain_SA1": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa1.value',
'dim':None},
"XTD10_slowTrain_SA3": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa3.value',
'dim':None},
# SCS XGM
## keithley
"SCS_photonFlux": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
"SCS_photonFlux_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFluxSigma.value',
'dim':None},
## ADC
"SCS_XGM": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensityTD',
'dim':['XGMbunchId']},
"SCS_XGM_SA1": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
"SCS_XGM_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySigmaTD',
'dim':['XGMbunchId']},
"SCS_SA1": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1TD',
'dim':['XGMbunchId']},
"SCS_XGM_SA3": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
"SCS_SA1_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1SigmaTD',
'dim':['XGMbunchId']},
"SCS_SA3": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3TD',
'dim':['XGMbunchId']},
"SCS_XGM_SLOW": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
"SCS_SA3_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3SigmaTD',
'dim':['XGMbunchId']},
## low pass averaged ADC
"SCS_slowTrain": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'controlData.slowTrain.value',
'dim':None},
"SCS_slowTrain_SA1": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa1.value',
'dim':None},
"SCS_slowTrain_SA3": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa3.value',
'dim':None},
# KBS
"HFM_CAPB": {'source':'SCS_KBS_HFM/ASENS/CAPB',
......@@ -325,7 +375,8 @@ def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False,
k = f
else:
print('Unknow mnemonic "{}". Skipping!'.format(f))
continue
if k in keys:
continue # already loaded, skip
......
......@@ -97,7 +97,7 @@ def binning(x, data, func, bins=100, bin_length=None):
return bins, res
def xas(nrun, bins=None, Iokey='SCS_XGM', Itkey='MCP3apd', nrjkey='nrj', Iooffset=0):
def xas(nrun, bins=None, Iokey='SCS_SA3', Itkey='MCP3apd', nrjkey='nrj', Iooffset=0):
""" Compute the XAS spectra from a xarray nrun.
Inputs:
......@@ -119,7 +119,7 @@ def xas(nrun, bins=None, Iokey='SCS_XGM', Itkey='MCP3apd', nrjkey='nrj', Iooffse
counts: the number of events in each bin
"""
stacked = nrun.stack(x=['trainId','pId'])
stacked = nrun.stack(x=['trainId','sa3_pId'])
Io = stacked[Iokey].values + Iooffset
It = stacked[Itkey].values
......
......@@ -97,7 +97,7 @@ def pulsePatternInfo(data, plot=False):
def repRate(data, sase='sase3'):
''' Calculates the pulse repetition rate in sase according
to the bunch pattern and assuming a minimum pulse
separation of 222e-9 seconds.
separation of 221.54e-9 seconds.
Inputs:
data: xarray Dataset containing pulse pattern
sase: sase in which the repetition rate is
......@@ -111,11 +111,76 @@ def repRate(data, sase='sase3'):
if len(sase)==0:
print('Not enough pulses to extract repetition rate')
return 0
f = 1/((sase[0,1] - sase[0,0])*222e-6)
f = 1/((sase[0,1] - sase[0,0])*221.54e-6)
return f
# XGM
def cleanXGMdata(data, npulses=None, sase3First=True):
''' Cleans the XGM data arrays obtained from load() function.
The XGM "TD" data arrays have arbitrary size of 1000 and default value 1.0
when there is no pulse. This function sorts the SASE 1 and SASE 3 pulses.
For recent DAQ runs, sase-resolved arrays can be used. For older runs,
the function selectSASEinXGM can be used to extract sase-resolved pulses.
Inputs:
data: xarray Dataset containing XGM TD arrays.
npulses: number of pulses, needed if pulse pattern not available.
sase3First: bool, needed if pulse pattern not available.
Output:
xarray Dataset containing sase- and pulse-resolved XGM data, with
dimension names 'sa1_pId' and 'sa3_pId'
'''
dropList = []
mergeList = []
if ("XTD10_SA3" not in data and "XTD10_XGM" in data) or (
"SCS_SA3" not in data and "SCS_XGM" in data):
#no SASE-resolved arrays available
if 'SCS_XGM' in data:
sa3 = selectSASEinXGM(data, xgm='SCS_XGM', sase='sase3', npulses=npulses,
sase3First=sase3First).rename({'XGMbunchId':'sa3_pId'}).rename('SCS_SA3')
mergeList.append(sa3)
sa1 = selectSASEinXGM(data, xgm='SCS_XGM', sase='sase1', npulses=npulses,
sase3First=sase3First).rename({'XGMbunchId':'sa1_pId'}).rename('SCS_SA1')
mergeList.append(sa1)
dropList.append('SCS_XGM')
if 'XTD10_XGM' in data:
sa3 = selectSASEinXGM(data, xgm='XTD10_XGM', sase='sase3', npulses=npulses,
sase3First=sase3First).rename({'XGMbunchId':'sa3_pId'}).rename('XTD10_SA3')
mergeList.append(sa3)
sa1 = selectSASEinXGM(data, xgm='XTD10_XGM', sase='sase1', npulses=npulses,
sase3First=sase3First).rename({'XGMbunchId':'sa1_pId'}).rename('XTD10_SA1')
mergeList.append(sa1)
dropList.append('XTD10_XGM')
keys = []
else:
keys = ["XTD10_XGM", "XTD10_SA3", "XTD10_SA1",
"XTD10_XGM_sigma", "XTD10_SA3_sigma", "XTD10_SA1_sigma"]
keys += ["SCS_XGM", "SCS_SA3", "SCS_SA1",
"SCS_XGM_sigma", "SCS_SA3_sigma", "SCS_SA1_sigma"]
for key in keys:
if key not in data:
continue
if "sa3" in key.lower():
sase = 'sa3_'
elif "sa1" in key.lower():
sase = 'sa1_'
else:
dropList.append(key)
continue
res = data[key].where(data[key] != 1.0, drop=True).rename(
{'XGMbunchId':'{}pId'.format(sase)}).rename(key)
dropList.append(key)
mergeList.append(res)
mergeList.append(data.drop(dropList))
subset = xr.merge(mergeList, join='inner')
subset.attrs['run'] = data.attrs['run']
return subset
def selectSASEinXGM(data, sase='sase3', xgm='SCS_XGM', sase3First=True, npulses=None):
''' Extract SASE1- or SASE3-only XGM data.
There are various cases depending on i) the mode of operation (10 Hz
......@@ -241,7 +306,7 @@ def selectSASEinXGM(data, sase='sase3', xgm='SCS_XGM', sase3First=True, npulses=
return result
def saseContribution(data, sase='sase1', xgm='SA3_XGM'):
def saseContribution(data, sase='sase1', xgm='XTD10_XGM'):
''' Calculate the relative contribution of SASE 1 or SASE 3 pulses
for each train in the run. Supports fresh bunch, dedicated trains
and pulse on demand modes.
......@@ -249,7 +314,7 @@ def saseContribution(data, sase='sase1', xgm='SA3_XGM'):
Inputs:
data: xarray Dataset containing xgm data
sase: key of sase for which the contribution is computed: {'sase1', 'sase3'}
xgm: key of xgm to select: {'SA3_XGM', 'SCS_XGM'}
xgm: key of xgm to select: {'XTD10_XGM', 'SCS_XGM'}
Output:
1D DataArray equal to sum(sase)/sum(sase1+sase3)
......@@ -268,23 +333,49 @@ def saseContribution(data, sase='sase1', xgm='SA3_XGM'):
else:
return 1 - contrib
def calibrateXGMs(data, rollingWindow=200, plot=False):
''' Calibrate the fast (pulse-resolved) signals of the XTD10 and SCS XGM
(read in intensityTD property) to the respective slow ion signal
(photocurrent read by Keithley, channel 'pulseEnergy.photonFlux.value').
If the sase-resolved signal (introduced in May 2019) are recorded, the
calibration is defined as the mean ratio between the photocurrent and
the low-pass slowTrain signal. Otherwise, calibrateXGMsFromAllPulses()
is called.
def filterOnTrains(data, key='sase3'):
''' Removes train ids for which there was no pulse in sase='sase1' or 'sase3' branch
Inputs:
data: xarray Dataset
sase: SASE onwhich to filter: {'sase1', 'sase3'}
rollingWindow: length of running average to calculate E_fast_avg
plot: boolean, plot the calibration output
Output:
filtered xarray Dataset
factors: numpy ndarray of shape 1 x 2 containing
[XTD10 calibration factor, SCS calibration factor]
'''
key = 'npulses_' + key
res = data.where(data[key]>0, drop=True)
return res
def calibrateXGMs(data, rollingWindow=200, plot=False):
XTD10_factor = np.nan
SCS_factor = np.nan
if "XTD10_slowTrain" in data or "SCS_slowTrain" in data:
if "XTD10_slowTrain" in data:
XTD10_factor = np.mean(data.XTD10_photonFlux/data.XTD10_slowTrain)
else:
print('no XTD10 XGM data. Skipping calibration for XTD10 XGM')
if "SCS_slowTrain" in data:
#XTD10_SA3_contrib = data.XTD10_slowTrain_SA3 * data.npulses_sase3 / (
# data.XTD10_slowTrain * (data.npulses_sase3+data.npulses_sase1))
#SCS_SA3_SLOW = data.SCS_photonFlux*(data.npulses_sase3+
# data.npulses_sase1)*XTD10_SA3_contrib/data.npulses_sase3
#SCS_factor = np.mean(SCS_SA3_SLOW/data.SCS_slowTrain_SA3)
SCS_factor = np.mean(data.SCS_photonFlux/data.SCS_slowTrain)
else:
print('no SCS XGM data. Skipping calibration for SCS XGM')
#TODO: plot the results of calibration
return np.array([XTD10_factor, SCS_factor])
else:
return calibrateXGMsFromAllPulses(data, rollingWindow, plot)
def calibrateXGMsFromAllPulses(data, rollingWindow=200, plot=False):
''' Calibrate the fast (pulse-resolved) signals of the XTD10 and SCS XGM
(read in intensityTD property) to the respective slow ion signal
(photocurrent read by Keithley, channel 'pulseEnergy.photonFlux.value').
......@@ -300,27 +391,30 @@ def calibrateXGMs(data, rollingWindow=200, plot=False):
F = (N1+N3) * E_avg * C/(N3 * E_fast_avg_sase3), where N1 and N3 are the number
of pulses in SASE1 and SASE3, E_fast_avg_sase3 is the rolling average (with window
rollingWindow) of the SASE3-only fast signal.
Inputs:
data: xarray Dataset
rollingWindow: length of running average to calculate E_fast_avg
plot: boolean, plot the calibration output
Output:
factors: numpy ndarray of shape 1 x 2 containing
[XTD10 calibration factor, SCS calibration factor]
'''
noSCS = noSA3 = False
sa3_calib_factor = None
scs_calib_factor = None
XTD10_factor = np.nan
SCS_factor = np.nan
noSCS = noXTD10 = False
if 'SCS_XGM' not in data:
print('no SCS XGM data. Skipping calibration for SCS XGM')
noSCS = True
if 'SA3_XGM' not in data:
print('no SASE3 XGM data. Skipping calibration for SASE3 XGM')
noSA3 = True
if noSCS and noSA3:
return np.array([None, None])
if 'XTD10_XGM' not in data:
print('no XTD10 XGM data. Skipping calibration for XTD10 XGM')
noXTD10 = True
if noSCS and noXTD10:
return np.array([XTD10_factor, SCS_factor])
if not noSCS and noXTD10:
print('XTD10 data is needed to calibrate SCS XGM.')
return np.array([XTD10_factor, SCS_factor])
start = 0
stop = None
npulses = data['npulses_sase3']
......@@ -335,56 +429,59 @@ def calibrateXGMs(data, rollingWindow=200, plot=False):
print('not enough consecutive data points with the largest number of pulses per train')
start += rollingWindow
stop = np.min((ntrains, stop+rollingWindow))
# Calibrate SASE3 XGM with all signal from SASE1 and SASE3
if not noSA3:
xgm_avg = data['SA3_XGM'].where(data['SA3_XGM'] != 1.0).mean(axis=1)
# Calculate SASE3 slow data
sa3contrib = saseContribution(data, 'sase3', 'XTD10_XGM')
SA3_SLOW = data['XTD10_photonFlux']*(data['npulses_sase3']+data['npulses_sase1'])*sa3contrib/data['npulses_sase3']
SA1_SLOW = data['XTD10_photonFlux']*(data['npulses_sase3']+data['npulses_sase1'])*(1-sa3contrib)/data['npulses_sase1']
# Calibrate XTD10 XGM with all signal from SASE1 and SASE3
if not noXTD10:
xgm_avg = selectSASEinXGM(data, 'sase3', 'XTD10_XGM').mean(axis=1)
rolling_sa3_xgm = xgm_avg.rolling(trainId=rollingWindow).mean()
ratio = data['SA3_XGM_SLOW']/rolling_sa3_xgm
sa3_calib_factor = ratio[start:stop].mean().values
print('calibration factor SA3 XGM: %f'%sa3_calib_factor)
ratio = SA3_SLOW/rolling_sa3_xgm
XTD10_factor = ratio[start:stop].mean().values
print('calibration factor XTD10 XGM: %f'%XTD10_factor)
# Calibrate SCS XGM with SASE3-only contribution
sa3contrib = saseContribution(data, 'sase3', 'SA3_XGM')
if not noSCS:
SCS_SLOW = data['SCS_photonFlux']*(data['npulses_sase3']+data['npulses_sase1'])*sa3contrib/data['npulses_sase3']
scs_sase3_fast = selectSASEinXGM(data, 'sase3', 'SCS_XGM').mean(axis=1)
meanFast = scs_sase3_fast.rolling(trainId=rollingWindow).mean()
ratio = ((data['npulses_sase3']+data['npulses_sase1']) *
data['SCS_XGM_SLOW'] * sa3contrib) / (meanFast * data['npulses_sase3'])
scs_calib_factor = ratio[start:stop].median().values
print('calibration factor SCS XGM: %f'%scs_calib_factor)
ratio = SCS_SLOW/meanFast
SCS_factor = ratio[start:stop].median().values
print('calibration factor SCS XGM: %f'%SCS_factor)
if plot:
plt.figure(figsize=(8,8))
if noSCS ^ noXTD10:
plt.figure(figsize=(8,4))
else:
plt.figure(figsize=(8,8))
plt.subplot(211)
plt.title('E[uJ] = %.2f x IntensityTD' %(sa3_calib_factor))
plt.plot(data['SA3_XGM_SLOW'], label='SA3 slow', color='C1')
plt.plot(rolling_sa3_xgm*sa3_calib_factor,
plt.title('E[uJ] = %.2f x IntensityTD' %(XTD10_factor))
plt.plot(SA3_SLOW, label='SA3 slow', color='C1')
plt.plot(rolling_sa3_xgm*XTD10_factor,
label='SA3 fast signal rolling avg', color='C4')
plt.plot(xgm_avg*XTD10_factor, label='SA3 fast signal train avg', alpha=0.2, color='C4')
plt.ylabel('Energy [uJ]')
plt.xlabel('train in run')
plt.legend(loc='upper left', fontsize=10)
plt.twinx()
plt.plot(xgm_avg*sa3_calib_factor, label='SA3 fast signal train avg', alpha=0.2, color='C4')
plt.ylabel('Calibrated SA3 fast signal [uJ]')
plt.plot(SA1_SLOW, label='SA1 slow', alpha=0.2, color='C2')
plt.ylabel('SA1 slow signal [uJ]')
plt.legend(loc='lower right', fontsize=10)
plt.subplot(212)
plt.title('E[uJ] = %.2f x HAMP' %scs_calib_factor)
plt.plot(data['SCS_XGM_SLOW'], label='SCS slow (all SASE)', color='C0')
slow_avg_sase3 = data['SCS_XGM_SLOW']*(data['npulses_sase1']
+data['npulses_sase3'])*sa3contrib/data['npulses_sase3']
plt.plot(slow_avg_sase3, label='SCS slow (SASE3 only)', color='C1')
plt.plot(meanFast*scs_calib_factor, label='SCS HAMP rolling avg', color='C2')
plt.title('E[uJ] = %.2g x HAMP' %SCS_factor)
plt.plot(SCS_SLOW, label='SCS slow', color='C1')
plt.plot(meanFast*SCS_factor, label='SCS HAMP rolling avg', color='C2')
plt.ylabel('Energy [uJ]')
plt.xlabel('train in run')
plt.plot(scs_sase3_fast*SCS_factor, label='SCS HAMP train avg', alpha=0.2, color='C2')
plt.legend(loc='upper left', fontsize=10)
plt.twinx()
plt.plot(scs_sase3_fast*scs_calib_factor, label='SCS HAMP train avg', alpha=0.2, color='C2')
plt.ylabel('Calibrated HAMP signal [uJ]')
plt.legend(loc='lower right', fontsize=10)
plt.tight_layout()
return np.array([sa3_calib_factor, scs_calib_factor])
return np.array([XTD10_factor, SCS_factor])
# TIM
......@@ -581,18 +678,18 @@ def calibrateTIM(data, rollingWindow=200, mcp=1, plot=False, use_apd=True, intst
start += rollingWindow
stop = np.min((ntrains, stop+rollingWindow))
filteredTIM = getTIMapd(data, mcp, use_apd, intstart, intstop, bkgstart, bkgstop, t_offset, npulses_apd)
sa3contrib = saseContribution(data, 'sase3', 'SA3_XGM')
sa3contrib = saseContribution(data, 'sase3', 'XTD10_XGM')
avgFast = filteredTIM.mean(axis=1).rolling(trainId=rollingWindow).mean()
ratio = ((data['npulses_sase3']+data['npulses_sase1']) *
data['SCS_XGM_SLOW'] * sa3contrib) / (avgFast*data['npulses_sase3'])
data['SCS_photonFlux'] * sa3contrib) / (avgFast*data['npulses_sase3'])
F = float(ratio[start:stop].median().values)
if plot:
fig = plt.figure(figsize=(8,5))
ax = plt.subplot(211)
ax.set_title('E[uJ] = {:2e} x TIM (MCP{})'.format(F, mcp))
ax.plot(data['SCS_XGM_SLOW'], label='SCS XGM slow (all SASE)', color='C0')
slow_avg_sase3 = data['SCS_XGM_SLOW']*(data['npulses_sase1']
ax.plot(data['SCS_photonflux'], label='SCS XGM slow (all SASE)', color='C0')
slow_avg_sase3 = data['SCS_photonflux']*(data['npulses_sase1']
+data['npulses_sase3'])*sa3contrib/data['npulses_sase3']
ax.plot(slow_avg_sase3, label='SCS XGM slow (SASE3 only)', color='C1')
ax.plot(avgFast*F, label='Calibrated TIM rolling avg', color='C2')
......@@ -816,40 +913,34 @@ def matchXgmTimPulseId(data, use_apd=True, intstart=None, intstop=None,
no bunch pattern info is available.
Output:
xr DataSet containing XGM and TIM signals with the share
dimension 'pId'. Raw traces, raw XGM and raw APD are dropped.
xr DataSet containing XGM and TIM signals with the share d
dimension 'sa3_pId'. Raw traces, raw XGM and raw APD are dropped.
'''
res = selectSASEinXGM(data, xgm='SCS_XGM', npulses=npulses,
sase3First=sase3First).rename({'XGMbunchId':'pId'}).rename('SCS_XGM')
dropList = ['SCS_XGM']
mergeList = [res]
if 'SA3_XGM' in data:
res2 = selectSASEinXGM(data, xgm='SA3_XGM', npulses=npulses,
sase3First=sase3First).rename({'XGMbunchId':'pId'}).rename('SA3_XGM')
dropList.append('SA3_XGM')
mergeList.append(res2)
dropList = []
mergeList = []
ndata = cleanXGMdata(data, npulses, sase3First)
for mcp in range(1,5):
if 'MCP{}apd'.format(mcp) in data or 'MCP{}raw'.format(mcp) in data:
MCPapd = getTIMapd(data, mcp=mcp, use_apd=use_apd, intstart=intstart,
MCPapd = getTIMapd(ndata, mcp=mcp, use_apd=use_apd, intstart=intstart,
intstop=intstop,bkgstart=bkgstart, bkgstop=bkgstop,
t_offset=t_offset, npulses=npulses,
stride=stride).rename('MCP{}apd'.format(mcp))
if use_apd:
MCPapd = MCPapd.rename({'apdId':'pId'})
MCPapd = MCPapd.rename({'apdId':'sa3_pId'})
else:
MCPapd = MCPapd.rename({'MCP{}fromRaw'.format(mcp):'pId'})
MCPapd = MCPapd.rename({'MCP{}fromRaw'.format(mcp):'sa3_pId'})
mergeList.append(MCPapd)
if 'MCP{}raw'.format(mcp) in data:
if 'MCP{}raw'.format(mcp) in ndata:
dropList.append('MCP{}raw'.format(mcp))
if 'MCP{}apd'.format(mcp) in data:
dropList.append('MCP{}apd'.format(mcp))
mergeList.append(data.drop(dropList))
mergeList.append(ndata.drop(dropList))
subset = xr.merge(mergeList, join='inner')
subset.attrs['run'] = data.attrs['run']
subset.attrs['run'] = ndata.attrs['run']
return subset
# Fast ADC
def fastAdcPeaks(data, channel, intstart, intstop, bkgstart, bkgstop, period=None, npulses=None):
''' Computes peak integration from raw FastADC traces.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment