Skip to content
Snippets Groups Projects
Load.py 4.87 KiB
Newer Older
# -*- coding: utf-8 -*-
""" Toolbox for SCS.

    Various utilities function to quickly process data measured at the SCS instruments.

    Copyright (2019) SCS Team.
"""

import numpy as np
from karabo_data import RunDirectory
import xarray as xr

mnemonics = {
    "SCS_XGM": ['SCS_BLU_XGM/XGM/DOOCS:output', 'data.intensityTD', 1],
    "SCS_XGM_SLOW": ['SCS_BLU_XGM/XGM/DOOCS', 'pulseEnergy.photonFlux.value', 0],
    "SA3_XGM": ['SA3_XTD10_XGM/XGM/DOOCS:output', 'data.intensityTD', 1],
    "SA3_XGM_SLOW": ['SA3_XTD10_XGM/XGM/DOOCS', 'pulseEnergy.photonFlux.value', 0],
    "MCP1apd": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_D.apd.pulseIntegral', 1],
    "MCP1raw": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_D.raw.samples', 1],
    "MCP2apd": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_C.apd.pulseIntegral', 1],
    "MCP2raw": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_D.raw.samples', 1],
    "MCP3apd": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_B.apd.pulseIntegral', 1],
    "MCP3raw": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_D.raw.samples', 1],
    "MCP4apd": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_A.apd.pulseIntegral', 1],
    "MCP4raw": ['SCS_UTC1_ADQ/ADC/1:network', 'digitizers.channel_1_D.raw.samples', 1],
    "nrj": ['SA3_XTD10_MONO/MDL/PHOTON_ENERGY','actualEnergy.value', 0],
    "sase3": ['SCS_RR_UTC/MDL/BUNCH_DECODER','sase3.pulseIds.value', 1],
    "sase2": ['SCS_RR_UTC/MDL/BUNCH_DECODER','sase2.pulseIds.value', 1],
    "sase1": ['SCS_RR_UTC/MDL/BUNCH_DECODER','sase1.pulseIds.value', 1],
    "maindump": ['SCS_RR_UTC/MDL/BUNCH_DECODER','maindump.pulseIds.value', 1],
    "bunchpattern": ['SCS_RR_UTC/TSYS/TIMESERVER','readBunchPatternTable.value', 0],
    "npulses_sase3": ['SCS_RR_UTC/MDL/BUNCH_DECODER','sase3.nPulses.value',0],
    "npulses_sase1": ['SCS_RR_UTC/MDL/BUNCH_DECODER','sase1.nPulses.value',0],
    "fastccd": ['SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput','data.image.pixels', 0],
    "transmission": ['SA3_XTD10_GATT/MDL/GATT_TRANSMISSION_MONITOR', 'Estimated_Tr.value', 0],
    "GATT_pressure": ['P_GATT', 'value.value', 0],
    "scannerX": ['SCS_CDIFFT_SAM/LMOTOR/SCANNERX', 'actualPosition.value', 0],
    "scannerY": ['SCS_CDIFFT_SAM/MOTOR/SCANNERY', 'actualPosition.value', 0],
    "scannerY_enc": ['SCS_CDIFFT_SAM/ENC/SCANNERY', 'value.value', 0],
    "magnet": ['SCS_CDIFFT_MAG/SUPPLY/CURRENT', 'actual_current.value', 0]
}

def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False):
    """ Load a run and extract the data. Output is record array

        Inputs:
            fields: list of mnemonic strings to load specific data such as "fastccd", "SCS_XGM",
                or dictionnaries defining a custom mnemonic such as 
                {"extra": ['SCS_CDIFFT_MAG/SUPPLY/CURRENT', 'actual_current.value', 0]}
            runNB: run number as integer
            proposalNB: string of the proposal number
            semesterNB: string of the semester number where the proposal data are saved
            topic: string of the topic
            display: boolean, whether to show the run.info or not

        Outputs:
            res: an xarray DataSet with aligned trainIds
    """

    runFolder = '/gpfs/exfel/exp/{}/{}/{}/raw/r{:04d}/'.format(topic, semesterNB, proposalNB, runNB)
    run = RunDirectory(runFolder)
    get_ipython().system('karabo-data-validate ' + runFolder)
    if display:
        run.info()

    keys = []
    vals = []
    
    # always load pulse pattern infos
    fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"]
    
    for f in fields:
        if type(f) == dict:
            # extracting mnemomic defined on the spot
            for k in f.keys():
                if display:
                    print('Loading {}'.format(f))
                    
                if f[k][2]:
                    vals.append(run.get_array(f[k][0], f[k][1], extra_dims=[k+'_dim',]))
                else:
                    vals.append(run.get_array(f[k][0], f[k][1]))
                keys.append(k)
        else:
            if f in mnemonics:
                if f in keys:
                    continue
                    
                if display:
                    print('Loading {}'.format(f))
                    
                if mnemonics[f][0] not in run.all_sources:
                    print('source {} not found in run. Skipping!'.format(mnemonics[f][0]))
                    continue
                    
                if mnemonics[f][2]:
                    vals.append(run.get_array(mnemonics[f][0], mnemonics[f][1], extra_dims=[f+'_dim',]))
                else:
                    vals.append(run.get_array(mnemonics[f][0], mnemonics[f][1]))
                keys.append(f)
            else:
                print('Unknow mnemonic "{}". Skipping!'.format(f))

    aligned_vals = xr.align(*vals, join='inner')
    result = dict(zip(keys, aligned_vals))
    return xr.Dataset(result)