Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • SCS/ToolBox
  • kluyvert/ToolBox
2 results
Show changes
Commits on Source (16)
This diff is collapsed.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import numpy as np import numpy as np
%matplotlib notebook %matplotlib notebook
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
plt.rcParams['figure.constrained_layout.use'] = True plt.rcParams['figure.constrained_layout.use'] = True
import dask import dask
print(f'dask: {dask.__version__}') print(f'dask: {dask.__version__}')
import dask.array as da import dask.array as da
import xarray as xr import xarray as xr
``` ```
%% Output %% Output
dask: 2.11.0 dask: 2.11.0
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from psutil import virtual_memory from psutil import virtual_memory
import gc import gc
# gc.collect() # run garbage collection to free possible memory # gc.collect() # run garbage collection to free possible memory
mem = virtual_memory() mem = virtual_memory()
print(f'Physical memory: {mem.total/1024/1024/1024:.0f} Gb') # total physical memory available print(f'Physical memory: {mem.total/1024/1024/1024:.0f} Gb') # total physical memory available
``` ```
%% Output %% Output
Physical memory: 504 Gb Physical memory: 504 Gb
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import logging import logging
logging.basicConfig(filename='example.log', level=logging.DEBUG) logging.basicConfig(filename='example.log', level=logging.DEBUG)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
%load_ext autoreload %load_ext autoreload
%autoreload 2 %autoreload 2
import toolbox_scs as tb import toolbox_scs as tb
print(tb.__file__) print(tb.__file__)
from toolbox_scs.routines.boz import load_dssc_module from toolbox_scs.routines.boz import load_dssc_module
from extra_data import open_run from extra_data import open_run
``` ```
%% Output %% Output
/home/lleguy/notebooks/ToolBox/src/toolbox_scs/__init__.py /home/lleguy/notebooks/ToolBox/src/toolbox_scs/__init__.py
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Parameters # Parameters
%% Cell type:code id: tags:parameters %% Cell type:code id: tags:parameters
``` python ``` python
proposalNB = 2719 proposalNB = 2719
dark_runNB = 180 dark_runNB = 180
runNB = 179 runNB = 179
module_group = 0 module_group = 0
pulse_pattern = ['pumped', 'unpumped'] pulse_pattern = ['pumped', 'unpumped']
xaxis = 'delay' # 'nrj' xaxis = 'delay' # 'nrj'
bin_width = 0.1 # [ps] bin_width = 0.1 # [ps]
path = f'/gpfs/exfel/exp/SCS/202002/p002719/scratch/tests/r{runNB}/' path = f'/gpfs/exfel/exp/SCS/202002/p002719/scratch/tests/r{runNB}/'
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
proposalNB = int(proposalNB) proposalNB = int(proposalNB)
dark_runNB = int(dark_runNB) dark_runNB = int(dark_runNB)
runNB = int(runNB) runNB = int(runNB)
module_group = int(module_group) module_group = int(module_group)
bin_width = float(bin_width) bin_width = float(bin_width)
moduleNB = list(range(module_group*4, (module_group+1)*4)) moduleNB = list(range(module_group*4, (module_group+1)*4))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Processing function # Processing function
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def process(module): def process(module):
# Load dark # Load dark
arr_dark, tid_dark = load_dssc_module(proposalNB, dark_runNB, module, drop_intra_darks=False) arr_dark, tid_dark = load_dssc_module(proposalNB, dark_runNB, module, drop_intra_darks=False)
arr_dark = arr_dark.rechunk((100, -1, -1, -1)) arr_dark = arr_dark.rechunk((100, -1, -1, -1))
dark_img = arr_dark.mean(axis=0).compute() dark_img = arr_dark.mean(axis=0).compute()
# Load module data # Load module data
arr, tid = load_dssc_module(proposalNB, runNB, module, drop_intra_darks=False) arr, tid = load_dssc_module(proposalNB, runNB, module, drop_intra_darks=False)
arr = arr.rechunk((100, -1, -1, -1)) arr = arr.rechunk((100, -1, -1, -1))
# dark and intra dark correction # dark and intra dark correction
arr = arr - dark_img arr = arr - dark_img
arr = arr[:, ::2, :, :] - arr[:, 1::2, :, :] arr = arr[:, ::2, :, :] - arr[:, 1::2, :, :]
# Load slow data against which to bin # Load slow data against which to bin
if xaxis == 'delay': if xaxis == 'delay':
run, v = tb.load(proposalNB, runNB, ['PP800_DelayLine', 'BAM1932M', 'SCS_XGM']) run, v = tb.load(proposalNB, runNB, ['PP800_DelayLine', 'BAM1932M', 'SCS_XGM'])
else: else:
run, v = tb.load(proposalNB, runNB, [xaxis, 'SCS_XGM']) run, v = tb.load(proposalNB, runNB, [xaxis, 'SCS_XGM'])
# select part of the run # select part of the run
# v = v.isel({'trainId':slice(0,3000)}) # v = v.isel({'trainId':slice(0,3000)})
# combine slow and DSSC module data # combine slow and DSSC module data
xr_data = xr.DataArray(arr, xr_data = xr.DataArray(arr,
coords={'trainId': tid, coords={'trainId': tid,
'sa3_pId': v['sa3_pId'].values}, 'sa3_pId': v['sa3_pId'].values},
dims = ['trainId', 'sa3_pId', 'y', 'x']) dims = ['trainId', 'sa3_pId', 'y', 'x'])
xr_data = xr_data.expand_dims(module=[module], axis=2) xr_data = xr_data.expand_dims(module=[module], axis=2)
r = xr.merge([xr_data.to_dataset(name='DSSC'), v], join='inner') r = xr.merge([xr_data.to_dataset(name='DSSC'), v], join='inner')
# calculate bins # calculate bins
if xaxis == 'delay': if xaxis == 'delay':
r['delay'] = tb.misc.positionToDelay(r['PP800_DelayLine']) r['delay'] = tb.misc.positionToDelay(r['PP800_DelayLine'])
bam = r['BAM1932M'] - r['BAM1932M'].mean() bam = r['BAM1932M'] - r['BAM1932M'].mean()
r['bin_delay'] = ((r['delay'] - bam)/bin_width).round()*bin_width r['bin_delay'] = ((r['delay'] - bam)/bin_width).round()*bin_width
else: else:
r['bin_' + xaxis] = (r[xaxis]/bin_width).round()*bin_width r['bin_' + xaxis] = (r[xaxis]/bin_width).round()*bin_width
# add the pulse pattern coordinates # add the pulse pattern coordinates
Nrepeats = int(len(v['sa3_pId'].values)/len(pulse_pattern)) Nrepeats = int(len(v['sa3_pId'].values)/len(pulse_pattern))
pp = pulse_pattern*Nrepeats pp = pulse_pattern*Nrepeats
pp = np.array(pp) pp = np.array(pp)
r = r.assign_coords(pp=("sa3_pId", pp)) r = r.assign_coords(pp=("sa3_pId", pp))
# select pattern and bin data # select pattern and bin data
bin_data = None bin_data = None
for p in np.unique(pp): for p in np.unique(pp):
# slice using non-index coordinates # slice using non-index coordinates
# https://github.com/pydata/xarray/issues/2028 # https://github.com/pydata/xarray/issues/2028
sub_r = r.sel(sa3_pId=(r.pp == p)) sub_r = r.sel(sa3_pId=(r.pp == p))
res = sub_r.groupby('bin_'+xaxis).mean() # calculate mean on bin, then mean to remove the dimension
res = sub_r.groupby('bin_'+xaxis).mean().mean(['sa3_pId'])
if bin_data is None: if bin_data is None:
bin_data = res bin_data = res
bin_data['DSSC'] = res['DSSC'].expand_dims(pp=[p]) bin_data['DSSC'] = res['DSSC'].expand_dims(pp=[p])
bin_data['SCS_SA3'] = res['SCS_SA3'].expand_dims(pp=[p]) bin_data['SCS_SA3'] = res['SCS_SA3'].expand_dims(pp=[p])
else: else:
bin_data = xr.merge([bin_data, bin_data = xr.merge([bin_data,
res['DSSC'].expand_dims(pp=[p]), res['DSSC'].expand_dims(pp=[p]),
res['SCS_SA3'].expand_dims(pp=[p])]) res['SCS_SA3'].expand_dims(pp=[p])])
# save the result # save the result
fname = path + f'run{runNB}-darkrun{dark_runNB}-module{module}.h5' fname = path + f'run{runNB}-darkrun{dark_runNB}-module{module}.h5'
print(fname) print(fname)
bin_data.to_netcdf(fname, format='NETCDF4', engine='h5netcdf') bin_data.to_netcdf(fname, format='NETCDF4', engine='h5netcdf')
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Processing # Processing
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
for m in moduleNB: for m in moduleNB:
process(m) process(m)
``` ```
......
...@@ -5,9 +5,15 @@ unreleased ...@@ -5,9 +5,15 @@ unreleased
---------- ----------
- **Bug fixes** - **Bug fixes**
- fix :issue:`45` SLURM scripts embedded in and download link available from documentation :mr:`171`
- fix :issue:`8` regarding azimuthal integration with pyFAI and hexagonal DSSC pixel splitting by providing an example notebook :mr:`174`
- fix :issue:`46` with a change in dask groupby mean behavior :mr:`174`
- **Improvements** - **Improvements**
- update version of BAM mnemonics :mr:`175`
- update version GATT-related mnemonics, add `transmission_col2` :mr:`172`
- reorganize the Howto section :mr:`169` - reorganize the Howto section :mr:`169`
- **New Features** - **New Features**
......
...@@ -41,7 +41,14 @@ which will be repeated. XGM data will also be binned similarly to the DSSC ...@@ -41,7 +41,14 @@ which will be repeated. XGM data will also be binned similarly to the DSSC
data. data.
Since this data reduction step can be quite time consuming for large datasets, Since this data reduction step can be quite time consuming for large datasets,
it is recommended to launch the notebook via a SLURM script: it is recommended to launch the notebook via a SLURM script. The script can be
downloaded from :download:`scripts/bin_dssc_module_job.sh` and reads as:
.. literalinclude:: scripts/bin_dssc_module_job.sh
:language: bash
:linenos:
It is launched with the following:
.. code:: bash .. code:: bash
...@@ -66,7 +73,11 @@ toolbox source. This files can then be loaded and combined with: ...@@ -66,7 +73,11 @@ toolbox source. This files can then be loaded and combined with:
DSSC azimuthal integration DSSC azimuthal integration
########################## ##########################
*To be documented*. Azimuthal integration can be performed with pyFAI_ which can utilize the
hexagonal pixel shape information from the DSSC geometry to split
the intensity in a pixel in the bins covered by it. An example notebook
:doc:`Azimuthal integration of DSSC with pyFAI <Azimuthal integration of DSSC with pyFAI>` is available.
Legacy DSSC binning procedure Legacy DSSC binning procedure
############################# #############################
...@@ -101,8 +112,14 @@ non-linearity calculation by taking the JSON file as soon as it appears. ...@@ -101,8 +112,14 @@ non-linearity calculation by taking the JSON file as soon as it appears.
The determination of the non-linearity correction is a lot longer and can take The determination of the non-linearity correction is a lot longer and can take
some 2 to 8 hours depending on the number of pulses in the some 2 to 8 hours depending on the number of pulses in the
train. For this reason it is possible to use a script train. For this reason it is possible to use a script
``scripts/boz_parameters_job.sh`` in the toolbox to launch the first notebook that can be downloaded from :download:`scripts/boz_parameters_job.sh` and
via slurm: reads as:
.. literalinclude:: scripts/boz_parameters_job.sh
:language: bash
:linenos:
It uses the first notebook and is launched via slurm:
``sbatch ./boz_parameters_job.sh 615 614 3`` ``sbatch ./boz_parameters_job.sh 615 614 3``
...@@ -130,3 +147,5 @@ Detectors that produce one point per pulse, or 0D detectors, are all handled in ...@@ -130,3 +147,5 @@ Detectors that produce one point per pulse, or 0D detectors, are all handled in
* :doc:`extract data from point detectors <point_detectors/point_detectors>`. * :doc:`extract data from point detectors <point_detectors/point_detectors>`.
.. _pyFAI: https://pyfai.readthedocs.io
File moved
File moved
...@@ -40,12 +40,21 @@ mnemonics = { ...@@ -40,12 +40,21 @@ mnemonics = {
# Bunch Arrival Monitors # Bunch Arrival Monitors
"BAM414": ({'source': 'SCS_ILH_LAS/DOOCS/BAM_414_B2:output', "BAM414": ({'source': 'SCS_ILH_LAS/DOOCS/BAM_414_B2:output',
'key': 'data.absoluteTD',
'dim': ['BAMbunchId']},
{'source': 'SCS_ILH_LAS/DOOCS/BAM_414_B2:output',
'key': 'data.lowChargeArrivalTime', 'key': 'data.lowChargeArrivalTime',
'dim': ['BAMbunchId']},), 'dim': ['BAMbunchId']},),
"BAM1932M": ({'source': 'SCS_ILH_LAS/DOOCS/BAM_1932M_TL:output', "BAM1932M": ({'source': 'SCS_ILH_LAS/DOOCS/BAM_1932M_TL:output',
'key': 'data.absoluteTD',
'dim': ['BAMbunchId']},
{'source': 'SCS_ILH_LAS/DOOCS/BAM_1932M_TL:output',
'key': 'data.lowChargeArrivalTime', 'key': 'data.lowChargeArrivalTime',
'dim': ['BAMbunchId']},), 'dim': ['BAMbunchId']},),
"BAM1932S": ({'source': 'SCS_ILH_LAS/DOOCS/BAM_1932S_TL:output', "BAM1932S": ({'source': 'SCS_ILH_LAS/DOOCS/BAM_1932S_TL:output',
'key': 'data.absoluteTD',
'dim': ['BAMbunchId']},
{'source': 'SCS_ILH_LAS/DOOCS/BAM_1932S_TL:output',
'key': 'data.lowChargeArrivalTime', 'key': 'data.lowChargeArrivalTime',
'dim': ['BAMbunchId']},), 'dim': ['BAMbunchId']},),
...@@ -66,10 +75,20 @@ mnemonics = { ...@@ -66,10 +75,20 @@ mnemonics = {
"HSLIT": ({'source': 'SCS_XTD10_HSLIT/MDL/BLADE', "HSLIT": ({'source': 'SCS_XTD10_HSLIT/MDL/BLADE',
'key': 'actualGap.value', 'key': 'actualGap.value',
'dim': None},), 'dim': None},),
"transmission": ({'source': 'SA3_XTD10_GATT/MDL/GATT_TRANSMISSION_MONITOR', "transmission": ({'source': 'SA3_XTD10_VAC/MDL/GATT_TRANSMISSION_MONITOR',
'key': 'Estimated_Tr.value',
'dim': None},
{'source': 'SA3_XTD10_GATT/MDL/GATT_TRANSMISSION_MONITOR',
'key': 'Estimated_Tr.value', 'key': 'Estimated_Tr.value',
'dim': None},), 'dim': None},),
"GATT_pressure": ({'source': 'P_GATT', "transmission_col2": (
{'source': 'SA3_XTD10_VAC/MDL/GATT_TRANSMISSION_MONITOR',
'key': 'second_color_Estimated_Tr.value',
'dim': None},),
"GATT_pressure": ({'source': 'SA3_XTD10_VAC/MDL/GATT_P_CELL',
'key': 'value.value',
'dim': None},
{'source': 'P_GATT',
'key': 'value.value', 'key': 'value.value',
'dim': None},), 'dim': None},),
"navitar": ({'source': 'SCS_XTD10_IMGES/CAM/BEAMVIEW_NAVITAR:daqOutput', "navitar": ({'source': 'SCS_XTD10_IMGES/CAM/BEAMVIEW_NAVITAR:daqOutput',
...@@ -78,6 +97,12 @@ mnemonics = { ...@@ -78,6 +97,12 @@ mnemonics = {
"UND": ({'source': 'SA3_XTD10_UND/DOOCS/PHOTON_ENERGY', "UND": ({'source': 'SA3_XTD10_UND/DOOCS/PHOTON_ENERGY',
'key': 'actualPosition.value', 'key': 'actualPosition.value',
'dim': None},), 'dim': None},),
"UND2": ({'source': 'SA3_XTD10_UND/DOOCS/PHOTON_ENERGY_COLOR2',
'key': 'actualPosition.value',
'dim': None},),
"UND3": ({'source': 'SA3_XTD10_UND/DOOCS/PHOTON_ENERGY_COLOR3',
'key': 'actualPosition.value',
'dim': None},),
# PES # PES
"PES_N_raw": ({'source': 'SA3_XTD10_PES/ADC/1:network', "PES_N_raw": ({'source': 'SA3_XTD10_PES/ADC/1:network',
......