Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • SCS/ToolBox
  • kluyvert/ToolBox
2 results
Show changes
Commits on Source (710)
Showing
with 14837 additions and 642 deletions
doc/changelog.rst merge=union
.ipynb*
src/*.egg*
*.pyc
*__pycache__*
tmp/
# The Docker image that will be used to build your app
image: sphinxdoc/sphinx
# Functions that should be executed before the build script is run
before_script: []
pages:
script:
- apt-get update
- apt-get install -y pandoc
- pip3 install sphinx-autoapi
- pip3 install nbsphinx
- pip3 install pydata-sphinx-theme
- sphinx-build -b html doc public
pages: True
artifacts:
paths:
# The folder that contains the files to be exposed at the Page URL
- public
rules:
# This ensures that only pushes to the default branch will trigger
# a pages deploy
- if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
def positionToDelay(pos, origin=0, invert = False, reflections=1):
''' converts a motor position in mm into optical delay in picosecond
Inputs:
pos: array-like delay stage motor position
origin: motor position of time zero in mm
invert: bool, inverts the sign of delay if True
reflections: number of bounces in the delay stage
Output:
delay in picosecond
'''
c_ = 299792458 *1e-9 # speed of light in mm/ps
x = -1 if invert else 1
return 2*reflections*(pos-origin)*x/c_
def degToRelPower(x, theta0=0):
''' converts a half-wave plate position in degrees into relative power
between 0 and 1.
Inputs:
x: array-like positions of half-wave plate, in degrees
theta0: position for which relative power is zero
Output:
array-like relative power
'''
return np.sin(2*(x-theta0)*np.pi/180)**2
# -*- coding: utf-8 -*-
""" Toolbox for SCS.
Various utilities function to quickly process data measured at the SCS instruments.
Copyright (2019) SCS Team.
"""
import numpy as np
from karabo_data import by_index, RunDirectory
from karabo_data.read_machinery import find_proposal
import xarray as xr
import os
from ToolBox.bunch_pattern import extractBunchPattern
mnemonics = {
# Machine
"sase3": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase3.pulseIds.value',
'dim':['bunchId']},
"sase2": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase2.pulseIds.value',
'dim':['bunchId']},
"sase1": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase1.pulseIds.value',
'dim':['bunchId']},
"maindump": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'maindump.pulseIds.value',
'dim':['bunchId']},
"bunchpattern": {'source':'SCS_RR_UTC/TSYS/TIMESERVER',
'key':'readBunchPatternTable.value',
'dim':None},
"bunchPatternTable": {'source':'SCS_RR_UTC/TSYS/TIMESERVER',
'key':'bunchPatternTable.value',
'dim':['pulse_slot']},
"npulses_sase3": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase3.nPulses.value',
'dim':None},
"npulses_sase1": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
'key':'sase1.nPulses.value',
'dim':None},
#Bunch Arrival Monitors
"BAM6": {'source':'SCS_ILH_LAS/DOOCS/BAM_414_B2:output',
'key':'data.lowChargeArrivalTime',
'dim':['BAMbunchId']},
"BAM7": {'source':'SCS_ILH_LAS/DOOCS/BAM_1932M_TL:output',
'key':'data.lowChargeArrivalTime',
'dim':['BAMbunchId']},
"BAM8": {'source':'SCS_ILH_LAS/DOOCS/BAM_1932S_TL:output',
'key':'data.lowChargeArrivalTime',
'dim':['BAMbunchId']},
# SA3
"nrj": {'source':'SA3_XTD10_MONO/MDL/PHOTON_ENERGY',
'key':'actualEnergy.value',
'dim':None},
"M2BEND": {'source': 'SA3_XTD10_MIRR-2/MOTOR/BENDER',
'key': 'actualPosition.value',
'dim':None},
"VSLIT": {'source':'SA3_XTD10_VSLIT/MDL/BLADE',
'key':'actualGap.value',
'dim':None},
"ESLIT": {'source':'SCS_XTD10_ESLIT/MDL/MAIN',
'key':'actualGap.value',
'dim':None},
"HSLIT": {'source':'SCS_XTD10_HSLIT/MDL/BLADE',
'key':'actualGap.value',
'dim':None},
"transmission": {'source':'SA3_XTD10_GATT/MDL/GATT_TRANSMISSION_MONITOR',
'key':'Estimated_Tr.value',
'dim':None},
"GATT_pressure": {'source':'P_GATT',
'key':'value.value',
'dim':None},
"navitar": {'source':'SCS_XTD10_IMGES/CAM/BEAMVIEW_NAVITAR:daqOutput',
'key':'data.image.pixels',
'dim':['x','y']},
"UND": {'source':'SA3_XTD10_UND/DOOCS/PHOTON_ENERGY',
'key':'actualPosition.value',
'dim':None},
#DPS imagers
"DPS2CAM2": {'source':'SCS_BLU_DPS-2/CAM/IMAGER2CAMERA:daqOutput',
'key':'data.image.pixels',
'dim':['dps2cam2_y', 'dps2cam2_x']},
# XTD10 XGM
## keithley
"XTD10_photonFlux": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
"XTD10_photonFlux_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFluxSigma.value',
'dim':None},
## ADC
"XTD10_XGM": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensityTD',
'dim':['XGMbunchId']},
"XTD10_XGM_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySigmaTD',
'dim':['XGMbunchId']},
"XTD10_SA3": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3TD',
'dim':['XGMbunchId']},
"XTD10_SA3_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3SigmaTD',
'dim':['XGMbunchId']},
"XTD10_SA1": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1TD',
'dim':['XGMbunchId']},
"XTD10_SA1_sigma": {'source':'SA3_XTD10_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1SigmaTD',
'dim':['XGMbunchId']},
## low pass averaged ADC
"XTD10_slowTrain": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'controlData.slowTrain.value',
'dim':None},
"XTD10_slowTrain_SA1": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa1.value',
'dim':None},
"XTD10_slowTrain_SA3": {'source':'SA3_XTD10_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa3.value',
'dim':None},
# SCS XGM
## keithley
"SCS_photonFlux": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFlux.value',
'dim':None},
"SCS_photonFlux_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'pulseEnergy.photonFluxSigma.value',
'dim':None},
## ADC
"SCS_XGM": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensityTD',
'dim':['XGMbunchId']},
"SCS_XGM_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySigmaTD',
'dim':['XGMbunchId']},
"SCS_SA1": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1TD',
'dim':['XGMbunchId']},
"SCS_SA1_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa1SigmaTD',
'dim':['XGMbunchId']},
"SCS_SA3": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3TD',
'dim':['XGMbunchId']},
"SCS_SA3_sigma": {'source':'SCS_BLU_XGM/XGM/DOOCS:output',
'key':'data.intensitySa3SigmaTD',
'dim':['XGMbunchId']},
## low pass averaged ADC
"SCS_slowTrain": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'controlData.slowTrain.value',
'dim':None},
"SCS_slowTrain_SA1": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa1.value',
'dim':None},
"SCS_slowTrain_SA3": {'source':'SCS_BLU_XGM/XGM/DOOCS',
'key':'controlData.slowTrainSa3.value',
'dim':None},
# KBS
"HFM_CAPB": {'source':'SCS_KBS_HFM/ASENS/CAPB',
'key':'value.value',
'dim':None},
"HFM_CAPF": {'source':'SCS_KBS_HFM/ASENS/CAPF',
'key':'value.value',
'dim':None},
"HFM_CAPM": {'source':'SCS_KBS_HFM/ASENS/CAPM',
'key':'value.value',
'dim':None},
"HFM_BENDERB": {'source':'SCS_KBS_HFM/MOTOR/BENDERB',
'key':'encoderPosition.value',
'dim':None},
"HFM_BENDERF": {'source':'SCS_KBS_HFM/MOTOR/BENDERF',
'key':'encoderPosition.value',
'dim':None},
"VFM_CAPB": {'source':'SCS_KBS_VFM/ASENS/CAPB',
'key':'value.value',
'dim':None},
"VFM_CAPF": {'source':'SCS_KBS_VFM/ASENS/CAPF',
'key':'value.value',
'dim':None},
"VFM_CAPM": {'source':'SCS_KBS_VFM/ASENS/CAPM',
'key':'value.value',
'dim':None},
"VFM_BENDERB": {'source':'SCS_KBS_VFM/MOTOR/BENDERB',
'key':'encoderPosition.value',
'dim':None},
"VFM_BENDERF": {'source':'SCS_KBS_VFM/MOTOR/BENDERF',
'key':'encoderPosition.value',
'dim':None},
# AFS LASER
"AFS_PhaseShifter": {'source':'SCS_ILH_LAS/PHASESHIFTER/DOOCS',
'key':'actualPosition.value',
'dim':None},
"AFS_DelayLine": {'source':'SCS_ILH_LAS/MOTOR/LT3',
'key':'AActualPosition.value',
'dim':None},
"AFS_HalfWP": {'source':'SCS_ILH_LAS/MOTOR/ROT_OPA_BWP1',
'key':'actualPosition.value',
'dim':None},
"AFS_FocusLens": {'source':'SCS_ILH_LAS/MOTOR/LT_SPARE1',
'key':'actualPosition.value',
'dim':None},
# 2nd lens of telescope
"AFS_TeleLens": {'source':'SCS_ILH_LAS/MOTOR/LT2',
'key':'actualPosition.value',
'dim':None},
# PP LASER 800 nm path
"PP800_PhaseShifter": {'source':'SCS_ILH_LAS/DOOCS/PP800_PHASESHIFTER',
'key':'actualPosition.value',
'dim':None},
"PP800_SynchDelayLine": {'source':'SCS_ILH_LAS/DOOCS/PPL_OPT_DELAY',
'key':'actualPosition.value',
'dim':None},
"PP800_DelayLine": {'source':'SCS_ILH_LAS/MOTOR/LT3',
'key':'AActualPosition.value',
'dim':None},
"PP800_HalfWP": {'source':'SCS_ILH_LAS/MOTOR/ROT8WP1',
'key':'actualPosition.value',
'dim':None},
"PP800_FocusLens": {'source':'SCS_ILH_LAS/MOTOR/LT_SPARE1',
'key':'actualPosition.value',
'dim':None},
# 1st lens of telescope (setup of August 2019)
"PP800_TeleLens": {'source':'SCS_ILH_LAS/MOTOR/LT7',
'key':'actualPosition.value',
'dim':None},
"ILH_8CAM1": {'source':'SCS_ILH_LAS/CAM/8CAM1:daqOutput',
'key':'data.image.pixels',
'dim':['8cam1_y', '8cam1_x']},
# FFT
"scannerX": {'source':'SCS_CDIFFT_SAM/LMOTOR/SCANNERX',
'key':'actualPosition.value',
'dim':None},
"scannerY": {'source':'SCS_CDIFFT_SAM/MOTOR/SCANNERY',
'key':'actualPosition.value',
'dim':None},
"scannerY_enc": {'source':'SCS_CDIFFT_SAM/ENC/SCANNERY',
'key':'value.value',
'dim':None},
"SAM-Z": {'source':'SCS_CDIFFT_MOV/ENC/SAM_Z',
'key':'value.value',
'dim':None},
"magnet": {'source':'SCS_CDIFFT_MAG/ASENS/CURRENT',
'key':'value.value',
'dim':None},
"magnet_old": {'source':'SCS_CDIFFT_MAG/SUPPLY/CURRENT',
'key':'actualCurrent.value',
'dim':None},
"Vertical_FDM": {'source':'SCS_CDIFFT_LDM/CAM/CAMERA1A:daqOutput',
'key':'data.image.pixels',
'dim':['vfdm_y', 'vfdm_x']},
# FastCCD, if in raw folder, raw images
# if in proc folder, dark substracted and relative gain corrected
"fastccd": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.pixels',
'dim':['x', 'y']},
# FastCCD with common mode correction
"fastccd_cm": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.pixels_cm',
'dim':['x', 'y']},
# FastCCD charge split correction in very low photon count regime
"fastccd_classified": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.pixels_classified',
'dim':['x', 'y']},
# FastCCD event multiplicity from the charge split correction:
# 0: no events
# 100, 101: single events
# 200-203: charge split into two pixels in four different orientations
# 300-303: charge split into three pixels in four different orientations
# 400-403: charge split into four pixels in four different orientations
# 1000: charge in more than four neighboring pixels. Cannot be produced by a single photon alone.
"fastccd_patterns": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.patterns',
'dim':['x', 'y']},
# FastCCD gain map, 0 high gain, 1 medium gain, 2 low gain
"fastccd_gain": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.gain',
'dim':['x', 'y']},
# FastCCD mask, bad pixel map to be ignored if > 0
"fastccd_mask": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput',
'key':'data.image.mask',
'dim':['x', 'y']},
# TIM
"MCP1apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.apd.pulseIntegral',
'dim':['apdId']},
"MCP1raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_D.raw.samples',
'dim':['samplesId']},
"MCP2apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_C.apd.pulseIntegral',
'dim':['apdId']},
"MCP2raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_C.raw.samples',
'dim':['samplesId']},
"MCP3apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_B.apd.pulseIntegral',
'dim':['apdId']},
"MCP3raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_B.raw.samples',
'dim':['samplesId']},
"MCP4apd": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_A.apd.pulseIntegral',
'dim':['apdId']},
"MCP4raw": {'source':'SCS_UTC1_ADQ/ADC/1:network',
'key':'digitizers.channel_1_A.raw.samples',
'dim': ['samplesId']},
# FastADC
"FastADC0peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_0.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC0raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_0.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC1peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_1.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC1raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_1.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC2peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_2.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC2raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_2.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC3peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_3.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC3raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_3.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC4peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_4.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC4raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_4.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC5peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_5.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC5raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_5.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC6peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_6.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC6raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_6.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC7peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_7.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC7raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_7.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC8peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_8.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC8raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_8.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
"FastADC9peaks": {'source':'SCS_UTC1_MCP/ADC/1:channel_9.output',
'key':'data.peaks',
'dim':['peakId']},
"FastADC9raw": {'source':'SCS_UTC1_MCP/ADC/1:channel_9.output',
'key':'data.rawData',
'dim':['fadc_samplesId']},
# KARABACON
"KARABACON": {'source':'SCS_DAQ_SCAN/MDL/KARABACON',
'key': 'actualStep.value',
'dim': None},
#GOTTHARD
"Gotthard1": {'source':'SCS_PAM_XOX/DET/GOTTHARD_RECEIVER1:daqOutput',
'key': 'data.adc',
'dim': ['gott_pId','pixelId']},
"Gotthard2": {'source':'SCS_PAM_XOX/DET/GOTTHARD_RECEIVER2:daqOutput',
'key': 'data.adc',
'dim': ['gott_pId','pixelId']}
}
def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=False,
subset=by_index[:], rois={}, useBPTable=True):
""" Load a run and extract the data. Output is an xarray with aligned trainIds
Inputs:
fields: list of mnemonic strings to load specific data such as "fastccd", "SCS_XGM",
or dictionnaries defining a custom mnemonic such as
{"extra": {'SCS_CDIFFT_MAG/SUPPLY/CURRENT', 'actual_current.value', None}}
runNB: (str, int) run number as integer
proposalNB: (str, int) of the proposal number e.g. 'p002252' or 2252
subFolder: (str) sub-folder from which to load the data. Use 'raw' for raw
data or 'proc' for processed data.
display: (bool) whether to show the run.info or not
validate: (bool) whether to run karabo-data-validate or not
subset: a subset of train that can be load with by_index[:5] for the
first 5 trains
rois: a dictionnary of mnemonics with a list of rois definition and the desired
names, for example {'fastccd':{'ref':{'roi':by_index[730:890, 535:720],
'dim': ['ref_x', 'ref_y']}, 'sam':{'roi':by_index[1050:1210, 535:720],
'dim': ['sam_x', 'sam_y']}}}
useBPTable: If True, uses the raw bunch pattern table to extract sase pulse
number and indices in the trains. If false, load the data from BUNCH_DECODER
middle layer device.
Outputs:
res: an xarray DataSet with aligned trainIds
"""
if isinstance(runNB, int):
runNB = 'r{:04d}'.format(runNB)
if isinstance(proposalNB,int):
proposalNB = 'p{:06d}'.format(proposalNB)
runFolder = os.path.join(find_proposal(proposalNB), subFolder, runNB)
run = RunDirectory(runFolder).select_trains(subset)
if validate:
get_ipython().system('karabo-data-validate ' + runFolder)
if display:
print('Loading data from {}'.format(runFolder))
run.info()
keys = []
vals = []
# load pulse pattern info
if useBPTable:
bp_mnemo = mnemonics['bunchPatternTable']
if bp_mnemo['source'] not in run.all_sources:
print('Source {} not found in run. Skipping!'.format(
mnemonics['bunchPatternTable']['source']))
else:
bp_table = run.get_array(bp_mnemo['source'],bp_mnemo['key'],
extra_dims=bp_mnemo['dim'])
sase1, npulses_sase1, dummy = extractBunchPattern(bp_table, 'sase1')
sase3, npulses_sase3, dummy = extractBunchPattern(bp_table, 'sase3')
keys += ["sase1", "npulses_sase1", "sase3", "npulses_sase3"]
vals += [sase1, npulses_sase1, sase3, npulses_sase3]
else:
fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"]
for f in fields:
if type(f) == dict:
# extracting mnemomic defined on the spot
if len(f.keys()) > 1:
print('Loading only one "on-the-spot" mnemonic at a time, skipping all others !')
k = list(f.keys())[0]
v = f[k]
else:
# extracting mnemomic from the table
if f in mnemonics:
v = mnemonics[f]
k = f
else:
print('Unknow mnemonic "{}". Skipping!'.format(f))
continue
if k in keys:
continue # already loaded, skip
if display:
print('Loading {}'.format(k))
if v['source'] not in run.all_sources:
print('Source {} not found in run. Skipping!'.format(v['source']))
continue
if k not in rois:
# no ROIs selection, we read everything
vals.append(run.get_array(v['source'], v['key'], extra_dims=v['dim']))
keys.append(k)
else:
# ROIs selection, for each ROI we select a region of the data and save it with new name and dimensions
for nk,nv in rois[k].items():
vals.append(run.get_array(v['source'], v['key'], extra_dims=nv['dim'], roi=nv['roi']))
keys.append(nk)
aligned_vals = xr.align(*vals, join='inner')
result = dict(zip(keys, aligned_vals))
result = xr.Dataset(result)
result.attrs['run'] = run
result.attrs['runFolder'] = runFolder
return result
def concatenateRuns(runs):
""" Sorts and concatenate a list of runs with identical data variables along the
trainId dimension.
Input:
runs: (list) the xarray Datasets to concatenate
Output:
a concatenated xarray Dataset
"""
firstTid = {i: int(run.trainId[0].values) for i,run in enumerate(runs)}
orderedDict = dict(sorted(firstTid.items(), key=lambda t: t[1]))
orderedRuns = [runs[i] for i in orderedDict]
keys = orderedRuns[0].keys()
for run in orderedRuns[1:]:
if run.keys() != keys:
print('data fields between different runs are not identical. Cannot combine runs.')
return
result = xr.concat(orderedRuns, dim='trainId')
for k in orderedRuns[0].attrs.keys():
result.attrs[k] = [run.attrs[k] for run in orderedRuns]
return result
Documentation
#############
Online documentation can be found `here <https://scs.pages.xfel.eu/toolbox/>`_.
# SCS ToolBox
## Kernel
The SCS ToolBox is design to work in the exfel_anaconda3 environement. This can
be selected on the online cluster by:
`module load exfel exfel_anaconda3`
before launching the jupyter-notebook or on max-jhub by selecting the 'xfel'
kernel instead of the 'Python 3' anaconda environement maintained by DESY.
\ No newline at end of file
1.7.0
from ToolBox.Load import *
from ToolBox.xgm import *
from ToolBox.XAS import *
from ToolBox.knife_edge import *
from ToolBox.Laser_utils import *
from ToolBox.DSSC import DSSC
from ToolBox.azimuthal_integrator import *
from ToolBox.DSSC1module import *
from ToolBox.bunch_pattern import *
from ToolBox.FastCCD import *
import numpy as np
class azimuthal_integrator(object):
def __init__(self, imageshape, center, polar_range, dr=2, aspect=204/236):
'''
Create a reusable integrator for repeated azimuthal integration of similar
images. Calculates array indices for a given parameter set that allows
fast recalculation.
Parameters
==========
imageshape : tuple of ints
The shape of the images to be integrated over.
center : tuple of ints
center coordinates in pixels
polar_range : tuple of ints
start and stop polar angle (in degrees) to restrict integration to wedges
dr : int, default 2
radial width of the integration slices. Takes non-square DSSC pixels into account.
aspect: float, default 204/236 for DSSC
aspect ratio of the pixel pitch
Returns
=======
ai : azimuthal_integrator instance
Instance can directly be called with image data:
> az_intensity = ai(image)
radial distances and the polar mask are accessible as attributes:
> ai.distance
> ai.polar_mask
'''
self.shape = imageshape
cx, cy = center
print(f'azimuthal center: {center}')
sx, sy = imageshape
xcoord, ycoord = np.ogrid[:sx, :sy]
xcoord -= cx
ycoord -= cy
# distance from center, hexagonal pixel shape taken into account
dist_array = np.hypot(xcoord * aspect, ycoord)
# array of polar angles
if np.abs(polar_range[1]-polar_range[0]) > 180:
raise ValueError('Integration angle too wide, should be within 180 degrees')
if np.abs(polar_range[1]-polar_range[0]) < 1e-6:
raise ValueError('Integration angle too narrow')
tmin, tmax = np.deg2rad(np.sort(polar_range)) % np.pi
polar_array = np.arctan2(xcoord, ycoord)
polar_array = np.mod(polar_array, np.pi)
self.polar_mask = (polar_array > tmin) * (polar_array < tmax)
self.maxdist = max(sx - cx, sy - cy)
ix, iy = np.indices(dimensions=(sx, sy))
self.index_array = np.ravel_multi_index((ix, iy), (sx, sy))
self.distance = np.array([])
self.flat_indices = []
for dist in range(dr, self.maxdist, dr):
ring_mask = self.polar_mask * (dist_array >= (dist - dr)) * (dist_array < dist)
self.flat_indices.append(self.index_array[ring_mask])
self.distance = np.append(self.distance, dist)
def __call__(self, image):
assert self.shape == image.shape, 'image shape does not match'
image_flat = image.flatten()
return np.array([np.nansum(image_flat[indices]) for indices in self.flat_indices])
source diff could not be displayed: it is too large. Options to address this: view the blob.
source diff could not be displayed: it is too large. Options to address this: view the blob.
source diff could not be displayed: it is too large. Options to address this: view the blob.
source diff could not be displayed: it is too large. Options to address this: view the blob.
source diff could not be displayed: it is too large. Options to address this: view the blob.
source diff could not be displayed: it is too large. Options to address this: view the blob.
BOZ: Beam-Splitting Off-axis Zone plate analysis
------------------------------------------------
The BOZ analysis consists of 4 notebooks and a script. The first notebook
:doc:`BOZ analysis part I.a Correction determination <BOZ analysis part I.a Correction determination>`
is used to determine all the necessary correction, that is the flat field
correction from the zone plate optics and the non-linearity correction from the
DSSC gain. The inputs are a dark run and a run with X-rays on three broken or
empty membranes. For the latter, an alternative is to use pre-edge data on an
actual sample. The result is a JSON file that contains the flat field and
non-linearity correction as well as the parameters used for their determination
such that this can be reproduced and investigated in case of issues. The
determination of the flat field correction is rather quick, few minutes and is
the most important correction for the change in XAS computed from the -1st and
+1st order. For quick correction of the online preview one can bypass the
non-linearity calculation by taking the JSON file as soon as it appears.
The determination of the non-linearity correction is a lot longer and can take
some 2 to 8 hours depending on the number of pulses in the
train. For this reason, the computation can also be done on GPUs in 30min
instead. A GPU notebook adapted for CHEM experiment with liquid jet and
normalization implement for S K-edge is available at
:doc:`OnlineGPU BOZ analysis part I.a Correction determination S K-egde <OnlineGPU BOZ analysis part I.a Correction determination S K-egde>`.
The other option is to use a script
that can be downloaded from :download:`scripts/boz_parameters_job.sh` and
reads as:
.. literalinclude:: scripts/boz_parameters_job.sh
:language: bash
:linenos:
It uses the first notebook and is launched via slurm:
``sbatch ./boz_parameters_job.sh -p 2937 -d 615 -r 614 -g 3``
where 2937 is the proposal run number, where 615 is the dark run number,
614 is the run on 3 broken membranes and 3 is
the DSSC gain in photon per bin. The proposal run number is defined inside the
script file.
The second notebook
:doc:`BOZ analysis part I.b Correction validation <BOZ analysis part I.b Correction validation>` can be used to check how well the calculated correction still
work on a characterization run recorded later, i.e. on 3 broken membrane or empty membranes.
The third notebook
:doc:`BOZ analysis part II.1 Small data <BOZ analysis part II.1 Small data>`
then use the JSON correction file to load all needed corrections and
process an run, saving the rois extracted DSSC as well as aligning them to
photon energy and delay stage in a small data h5 file.
That small data h5 file can then be loaded and the data binned to compute a
spectrum or a time resolved XAS scan using the fourth and final notebook
:doc:`BOZ analysis part II.2 Binning <BOZ analysis part II.2 Binning>`
source diff could not be displayed: it is too large. Options to address this: view the blob.
%% Cell type:code id:1bfd1581 tags:
``` python
import numpy as np
%matplotlib inline
import matplotlib.pyplot as plt
plt.rcParams['figure.constrained_layout.use'] = True
import dask
print(f'dask: {dask.__version__}')
import toolbox_scs as tb
print(tb.__file__)
import toolbox_scs.routines.boz as boz
```
%% Output
dask: 2.11.0
/home/lleguy/notebooks/ToolBox/src/toolbox_scs/__init__.py
%% Cell type:markdown id:e94a5c90 tags:
# Improved method
The delay is scan and saved in the DAQ while recording a single run.
%% Cell type:code id:975a36d1 tags:
``` python
proposal = 900257
dNB = 67
rNB = 68
```
%% Cell type:code id:23144fcb tags:
``` python
arr_dark, tid_dark = boz.load_dssc_module(proposal, dNB, drop_intra_darks=False)
```
%% Cell type:code id:6a7a4169 tags:
``` python
arr_dark
```
%% Output
dask.array<reshape, shape=(758, 82, 128, 512), dtype=uint16, chunksize=(128, 82, 128, 512), chunktype=numpy.ndarray>
%% Cell type:code id:6d233216 tags:
``` python
dark = arr_dark.mean(axis=0).compute()
```
%% Cell type:code id:43934f12 tags:
``` python
arr, tid = boz.load_dssc_module(proposal, rNB, drop_intra_darks=False)
arr = arr - dark
```
%% Cell type:code id:300ce9c6 tags:
``` python
fields = ["DSSC_delay"]
run, data = tb.load(proposal, rNB, fields)
```
%% Output
Bunch pattern table not found in run. Skipping!
%% Cell type:code id:555a7a99 tags:
``` python
data
```
%% Output
<xarray.Dataset>
Dimensions: (trainId: 3987)
Coordinates:
* trainId (trainId) uint64 1356431136 1356431137 ... 1356435121 1356435122
Data variables:
DSSC_delay (trainId) int32 4756934 4756934 4756934 ... 4756963 4756963
Attributes:
runFolder: /gpfs/exfel/exp/SCS/202230/p900257/raw/r0068
%% Cell type:code id:133621fc tags:
``` python
intensity = (arr[:,::2,:,:] - arr[:,1::2,:,:]).mean(axis=(1,2,3)).compute()
```
%% Cell type:code id:aa37ecee tags:
``` python
import xarray as xr
```
%% Cell type:code id:b2ed51a8 tags:
``` python
xrintensity = xr.DataArray(intensity, coords={'trainId':tid}, dims=['trainId']).to_dataset(name='intensity')
```
%% Cell type:code id:40ecf913 tags:
``` python
xrintensity
```
%% Output
<xarray.Dataset>
Dimensions: (trainId: 3987)
Coordinates:
* trainId (trainId) uint64 1356431136 1356431137 ... 1356435121 1356435122
Data variables:
intensity (trainId) float64 -0.05101 -0.05173 -0.04971 ... 0.1101 0.09043
%% Cell type:code id:b12050ba tags:
``` python
r = xr.merge([data, xrintensity], join='inner', combine_attrs='no_conflicts')
```
%% Cell type:code id:6c222b5e tags:
``` python
rbin = r.groupby('DSSC_delay').mean()
```
%% Cell type:code id:be0a0b31 tags:
``` python
offset = 4756949
plt.figure()
plt.plot(rbin['DSSC_delay'] - offset, rbin['intensity'], marker='o')
plt.title(f'{proposal} r:{rNB} d:{dNB}')
plt.xlabel(f'DSSC fine delay - {offset} (delay unit)')
plt.ylabel('Intensity')
```
%% Output
Text(0, 0.5, 'Intensity')
%% Cell type:markdown id:d1e47f42 tags:
# Legacy
One run is recorded for one fine delay in sequence, the delay itself is not saved in the data
%% Cell type:code id:69c17cda tags:
``` python
runs = np.arange(775, 814+1)
delay = np.arange(4756930, 4756969+1)
```
%% Cell type:code id:b436e44e tags:
``` python
proposal = 2937
```
%% Cell type:markdown id:fdec2470 tags:
use first run as dark
%% Cell type:code id:6d5673f2 tags:
``` python
arr_dark, tid_dark = boz.load_dssc_module(proposal, runs[0])
dark = boz.average_module(arr_dark).compute()
```
%% Cell type:code id:542d140c tags:
``` python
intensity = np.zeros((len(runs)))
for k,r in enumerate(runs):
arr, tid = boz.load_dssc_module(proposal, r)
data = boz.average_module(arr, dark=dark).compute()
sensor = data[:,:,:256]
intensity[k] = sensor.mean(axis=(0,1,2))
```
%% Cell type:code id:eab988af tags:
``` python
plt.figure()
plt.plot(delay-delay[0], intensity)
plt.xlabel(f'delay - {delay[0]}')
plt.ylabel('<intensity>')
```
%% Output
Text(0, 0.5, '<intensity>')
%% Cell type:code id:369b3289 tags:
``` python
```
source diff could not be displayed: it is too large. Options to address this: view the blob.