-
Martin Teichmann authoredMartin Teichmann authored
Load.py 8.24 KiB
# -*- coding: utf-8 -*-
""" Toolbox for SCS.
Various utilities function to quickly process data measured at the SCS instruments.
Copyright (2019) SCS Team.
"""
import numpy as np
from karabo_data import RunDirectory
import xarray as xr
mnemonics = {
# Machine
"sase3": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase3.pulseIds.value',
'dim':['bunchId']},
"sase2": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase2.pulseIds.value',
'dim':['bunchId']},
"sase1": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase1.pulseIds.value',
'dim':['bunchId']},
"maindump": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'maindump.pulseIds.value',
'dim':['bunchId']},
"bunchpattern": {'source':'SCS_RR_UTC/TSYS/TIMESERVER',
'key':'readBunchPatternTable.value',
'dim':None},
"npulses_sase3": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase3.nPulses.value',
'dim':None},
"npulses_sase1": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase1.nPulses.value',
'dim':None},
# SA3
"nrj": {'source':'SA3_XTD10_MONO/MDL/PHOTON_ENERGY',
'key':'actualEnergy.value',
'dim':None},
"M2BEND": {'source': 'SA3_XTD10_MIRR-2/MOTOR/BENDER',
'key': 'actualPosition.value',
'dim':None},
"VSLIT": {'source':'SA3_XTD10_VSLIT/MDL/BLADE',
'key':'actualGap.value',
'dim':None},
"ESLIT": {'source':'SCS_XTD10_ESLIT/MDL/MAIN',
'key':'actualGap.value',
'dim':None},
"HSLIT": {'source':'SCS_XTD10_HSLIT/MDL/BLADE',
'key':'actualGap.value',
'dim':None},
"transmission": {'source':'SA3_XTD10_GATT/MDL/GATT_TRANSMISSION_MONITOR',
'key':'Estimated_Tr.value',
'dim':None},
"GATT_pressure": {'source':'P_GATT',
'key':'value.value',
'dim':None},
# XGMs
"SA3_XGM": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensityTD',
'dim':['XGMbunchId']},
"SA3_XGM_SLOW": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
"SCS_XGM": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensityTD',
'dim':['XGMbunchId']},
"SCS_XGM_SLOW": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
# KBS
"HFM_CAPB": {'source':'SCS_KBS_HFM/ASENS/CAPB',
'key':'value.value',
'dim':None},
"HFM_CAPF": {'source':'SCS_KBS_HFM/ASENS/CAPF',
'key':'value.value',
'dim':None},
"HFM_CAPM": {'source':'SCS_KBS_HFM/ASENS/CAPM',
'key':'value.value',
'dim':None},
"HFM_BENDERB": {'source':'SCS_KBS_HFM/MOTOR/BENDERB',
'key':'encoderPosition.value',
'dim':None},
"HFM_BENDERF": {'source':'SCS_KBS_HFM/MOTOR/BENDERF',
'key':'encoderPosition.value',
'dim':None},
"VFM_CAPB": {'source':'SCS_KBS_VFM/ASENS/CAPB',
'key':'value.value',
'dim':None},
"VFM_CAPF": {'source':'SCS_KBS_VFM/ASENS/CAPF',
'key':'value.value',
'dim':None},
"VFM_CAPM": {'source':'SCS_KBS_VFM/ASENS/CAPM',
'key':'value.value',
'dim':None},
"VFM_BENDERB": {'source':'SCS_KBS_VFM/MOTOR/BENDERB',
'key':'encoderPosition.value',
'dim':None},
"VFM_BENDERF": {'source':'SCS_KBS_VFM/MOTOR/BENDERF',
'key':'encoderPosition.value',
'dim':None},
# FFT
"scannerX": {'source':'SCS_CDIFFT_SAM/LMOTOR/SCANNERX',
'key':'actualPosition.value',
'dim':None},
"scannerY": {'source':'SCS_CDIFFT_SAM/MOTOR/SCANNERY',
'key':'actualPosition.value',
'dim':None},
"scannerY_enc": {'source':'SCS_CDIFFT_SAM/ENC/SCANNERY',
'key':'value.value',
'dim':None},
"SAM-Z": {'source':'SCS_CDIFFT_MOV/ENC/SAM_Z',
'key':'value.value',
'dim':None},
"magnet": {'source':'SCS_CDIFFT_MAG/SUPPLY/CURRENT',
'key':'actual_current.value',
'dim':None},
# FastCCD
"fastccd": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.pixels',
'dim':['x', 'y']},
# TIM
"MCP1apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.apd.pulseIntegral',
'dim':['apdId']},
"MCP1raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.raw.samples',
'dim':['samplesId']},
"MCP2apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_C.apd.pulseIntegral',
'dim':['apdId']},
"MCP2raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.raw.samples',
'dim':['samplesId']},
"MCP3apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_B.apd.pulseIntegral',
'dim':['apdId']},
"MCP3raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.raw.samples',
'dim':['samplesId']},
"MCP4apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_A.apd.pulseIntegral',
'dim':['apdId']},
"MCP4raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.raw.samples',
'dim': ['samplesId']}
}
def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False, validate=False):
""" Load a run and extract the data. Output is an xarray with aligned trainIds
Inputs:
fields: list of mnemonic strings to load specific data such as "fastccd", "SCS_XGM",
or dictionnaries defining a custom mnemonic such as
{"extra": {'SCS_CDIFFT_MAG/SUPPLY/CURRENT', 'actual_current.value', None}}
runNB: run number as integer
proposalNB: string of the proposal number
semesterNB: string of the semester number where the proposal data are saved
topic: string of the topic
display: boolean, whether to show the run.info or not
validate: boolean, whether to run karabo-data-validate or not
Outputs:
res: an xarray DataSet with aligned trainIds
"""
runFolder = '/gpfs/exfel/exp/{}/{}/{}/raw/r{:04d}/'.format(topic, semesterNB, proposalNB, runNB)
run = RunDirectory(runFolder)
if validate:
get_ipython().system('karabo-data-validate ' + runFolder)
if display:
run.info()
keys = []
vals = []
# always load pulse pattern infos
fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"]
for f in fields:
if type(f) == dict:
# extracting mnemomic defined on the spot
if len(f.keys()) > 1:
print('Loading only one "on-the-spot" mnemonic at a time, skipping all others !')
k = list(f.keys())[0]
v = f[k]
else:
# extracting mnemomic from the table
if f in mnemonics:
v = mnemonics[f]
k = f
else:
print('Unknow mnemonic "{}". Skipping!'.format(f))
if k in keys:
continue # already loaded, skip
if display:
print('Loading {}'.format(k))
if v['source'] not in run.all_sources:
print('Source {} not found in run. Skipping!'.format(v['source']))
continue
vals.append(run.get_array(v['source'], v['key'], extra_dims=v['dim']))
keys.append(k)
aligned_vals = xr.align(*vals, join='inner')
result = dict(zip(keys, aligned_vals))
return xr.Dataset(result)