Skip to content
Snippets Groups Projects
Commit 42769e28 authored by Loïc Le Guyader's avatar Loïc Le Guyader
Browse files

Merge branch 'pyFAI' into 'master'

Example on using pyFAI and pixel splitting for azimuthal integration of DSSC

Closes #46 and #8

See merge request !174
parents ab937207 cc6e5156
No related branches found
No related tags found
1 merge request!174Example on using pyFAI and pixel splitting for azimuthal integration of DSSC
source diff could not be displayed: it is too large. Options to address this: view the blob.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import numpy as np import numpy as np
%matplotlib notebook %matplotlib notebook
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
plt.rcParams['figure.constrained_layout.use'] = True plt.rcParams['figure.constrained_layout.use'] = True
import dask import dask
print(f'dask: {dask.__version__}') print(f'dask: {dask.__version__}')
import dask.array as da import dask.array as da
import xarray as xr import xarray as xr
``` ```
%% Output %% Output
dask: 2.11.0 dask: 2.11.0
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from psutil import virtual_memory from psutil import virtual_memory
import gc import gc
# gc.collect() # run garbage collection to free possible memory # gc.collect() # run garbage collection to free possible memory
mem = virtual_memory() mem = virtual_memory()
print(f'Physical memory: {mem.total/1024/1024/1024:.0f} Gb') # total physical memory available print(f'Physical memory: {mem.total/1024/1024/1024:.0f} Gb') # total physical memory available
``` ```
%% Output %% Output
Physical memory: 504 Gb Physical memory: 504 Gb
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import logging import logging
logging.basicConfig(filename='example.log', level=logging.DEBUG) logging.basicConfig(filename='example.log', level=logging.DEBUG)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
%load_ext autoreload %load_ext autoreload
%autoreload 2 %autoreload 2
import toolbox_scs as tb import toolbox_scs as tb
print(tb.__file__) print(tb.__file__)
from toolbox_scs.routines.boz import load_dssc_module from toolbox_scs.routines.boz import load_dssc_module
from extra_data import open_run from extra_data import open_run
``` ```
%% Output %% Output
/home/lleguy/notebooks/ToolBox/src/toolbox_scs/__init__.py /home/lleguy/notebooks/ToolBox/src/toolbox_scs/__init__.py
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Parameters # Parameters
%% Cell type:code id: tags:parameters %% Cell type:code id: tags:parameters
``` python ``` python
proposalNB = 2719 proposalNB = 2719
dark_runNB = 180 dark_runNB = 180
runNB = 179 runNB = 179
module_group = 0 module_group = 0
pulse_pattern = ['pumped', 'unpumped'] pulse_pattern = ['pumped', 'unpumped']
xaxis = 'delay' # 'nrj' xaxis = 'delay' # 'nrj'
bin_width = 0.1 # [ps] bin_width = 0.1 # [ps]
path = f'/gpfs/exfel/exp/SCS/202002/p002719/scratch/tests/r{runNB}/' path = f'/gpfs/exfel/exp/SCS/202002/p002719/scratch/tests/r{runNB}/'
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
proposalNB = int(proposalNB) proposalNB = int(proposalNB)
dark_runNB = int(dark_runNB) dark_runNB = int(dark_runNB)
runNB = int(runNB) runNB = int(runNB)
module_group = int(module_group) module_group = int(module_group)
bin_width = float(bin_width) bin_width = float(bin_width)
moduleNB = list(range(module_group*4, (module_group+1)*4)) moduleNB = list(range(module_group*4, (module_group+1)*4))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Processing function # Processing function
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def process(module): def process(module):
# Load dark # Load dark
arr_dark, tid_dark = load_dssc_module(proposalNB, dark_runNB, module, drop_intra_darks=False) arr_dark, tid_dark = load_dssc_module(proposalNB, dark_runNB, module, drop_intra_darks=False)
arr_dark = arr_dark.rechunk((100, -1, -1, -1)) arr_dark = arr_dark.rechunk((100, -1, -1, -1))
dark_img = arr_dark.mean(axis=0).compute() dark_img = arr_dark.mean(axis=0).compute()
# Load module data # Load module data
arr, tid = load_dssc_module(proposalNB, runNB, module, drop_intra_darks=False) arr, tid = load_dssc_module(proposalNB, runNB, module, drop_intra_darks=False)
arr = arr.rechunk((100, -1, -1, -1)) arr = arr.rechunk((100, -1, -1, -1))
# dark and intra dark correction # dark and intra dark correction
arr = arr - dark_img arr = arr - dark_img
arr = arr[:, ::2, :, :] - arr[:, 1::2, :, :] arr = arr[:, ::2, :, :] - arr[:, 1::2, :, :]
# Load slow data against which to bin # Load slow data against which to bin
if xaxis == 'delay': if xaxis == 'delay':
run, v = tb.load(proposalNB, runNB, ['PP800_DelayLine', 'BAM1932M', 'SCS_XGM']) run, v = tb.load(proposalNB, runNB, ['PP800_DelayLine', 'BAM1932M', 'SCS_XGM'])
else: else:
run, v = tb.load(proposalNB, runNB, [xaxis, 'SCS_XGM']) run, v = tb.load(proposalNB, runNB, [xaxis, 'SCS_XGM'])
# select part of the run # select part of the run
# v = v.isel({'trainId':slice(0,3000)}) # v = v.isel({'trainId':slice(0,3000)})
# combine slow and DSSC module data # combine slow and DSSC module data
xr_data = xr.DataArray(arr, xr_data = xr.DataArray(arr,
coords={'trainId': tid, coords={'trainId': tid,
'sa3_pId': v['sa3_pId'].values}, 'sa3_pId': v['sa3_pId'].values},
dims = ['trainId', 'sa3_pId', 'y', 'x']) dims = ['trainId', 'sa3_pId', 'y', 'x'])
xr_data = xr_data.expand_dims(module=[module], axis=2) xr_data = xr_data.expand_dims(module=[module], axis=2)
r = xr.merge([xr_data.to_dataset(name='DSSC'), v], join='inner') r = xr.merge([xr_data.to_dataset(name='DSSC'), v], join='inner')
# calculate bins # calculate bins
if xaxis == 'delay': if xaxis == 'delay':
r['delay'] = tb.misc.positionToDelay(r['PP800_DelayLine']) r['delay'] = tb.misc.positionToDelay(r['PP800_DelayLine'])
bam = r['BAM1932M'] - r['BAM1932M'].mean() bam = r['BAM1932M'] - r['BAM1932M'].mean()
r['bin_delay'] = ((r['delay'] - bam)/bin_width).round()*bin_width r['bin_delay'] = ((r['delay'] - bam)/bin_width).round()*bin_width
else: else:
r['bin_' + xaxis] = (r[xaxis]/bin_width).round()*bin_width r['bin_' + xaxis] = (r[xaxis]/bin_width).round()*bin_width
# add the pulse pattern coordinates # add the pulse pattern coordinates
Nrepeats = int(len(v['sa3_pId'].values)/len(pulse_pattern)) Nrepeats = int(len(v['sa3_pId'].values)/len(pulse_pattern))
pp = pulse_pattern*Nrepeats pp = pulse_pattern*Nrepeats
pp = np.array(pp) pp = np.array(pp)
r = r.assign_coords(pp=("sa3_pId", pp)) r = r.assign_coords(pp=("sa3_pId", pp))
# select pattern and bin data # select pattern and bin data
bin_data = None bin_data = None
for p in np.unique(pp): for p in np.unique(pp):
# slice using non-index coordinates # slice using non-index coordinates
# https://github.com/pydata/xarray/issues/2028 # https://github.com/pydata/xarray/issues/2028
sub_r = r.sel(sa3_pId=(r.pp == p)) sub_r = r.sel(sa3_pId=(r.pp == p))
res = sub_r.groupby('bin_'+xaxis).mean() # calculate mean on bin, then mean to remove the dimension
res = sub_r.groupby('bin_'+xaxis).mean().mean(['sa3_pId'])
if bin_data is None: if bin_data is None:
bin_data = res bin_data = res
bin_data['DSSC'] = res['DSSC'].expand_dims(pp=[p]) bin_data['DSSC'] = res['DSSC'].expand_dims(pp=[p])
bin_data['SCS_SA3'] = res['SCS_SA3'].expand_dims(pp=[p]) bin_data['SCS_SA3'] = res['SCS_SA3'].expand_dims(pp=[p])
else: else:
bin_data = xr.merge([bin_data, bin_data = xr.merge([bin_data,
res['DSSC'].expand_dims(pp=[p]), res['DSSC'].expand_dims(pp=[p]),
res['SCS_SA3'].expand_dims(pp=[p])]) res['SCS_SA3'].expand_dims(pp=[p])])
# save the result # save the result
fname = path + f'run{runNB}-darkrun{dark_runNB}-module{module}.h5' fname = path + f'run{runNB}-darkrun{dark_runNB}-module{module}.h5'
print(fname) print(fname)
bin_data.to_netcdf(fname, format='NETCDF4', engine='h5netcdf') bin_data.to_netcdf(fname, format='NETCDF4', engine='h5netcdf')
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Processing # Processing
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
for m in moduleNB: for m in moduleNB:
process(m) process(m)
``` ```
......
...@@ -7,7 +7,9 @@ unreleased ...@@ -7,7 +7,9 @@ unreleased
- **Bug fixes** - **Bug fixes**
- fix :issue:`45` SLURM scripts embedded in and download link available from documentation :mr:`171` - fix :issue:`45` SLURM scripts embedded in and download link available from documentation :mr:`171`
- fix :issue:`8` regarding azimuthal integration with pyFAI and hexagonal DSSC pixel splitting by providing an example notebook :mr:`174`
- fix :issue:`46` with a change in dask groupby mean behavior :mr:`174`
- **Improvements** - **Improvements**
- update version of BAM mnemonics :mr:`175` - update version of BAM mnemonics :mr:`175`
......
...@@ -73,7 +73,11 @@ toolbox source. This files can then be loaded and combined with: ...@@ -73,7 +73,11 @@ toolbox source. This files can then be loaded and combined with:
DSSC azimuthal integration DSSC azimuthal integration
########################## ##########################
*To be documented*. Azimuthal integration can be performed with pyFAI_ which can utilize the
hexagonal pixel shape information from the DSSC geometry to split
the intensity in a pixel in the bins covered by it. An example notebook
:doc:`Azimuthal integration of DSSC with pyFAI <Azimuthal integration of DSSC with pyFAI>` is available.
Legacy DSSC binning procedure Legacy DSSC binning procedure
############################# #############################
...@@ -143,3 +147,5 @@ Detectors that produce one point per pulse, or 0D detectors, are all handled in ...@@ -143,3 +147,5 @@ Detectors that produce one point per pulse, or 0D detectors, are all handled in
* :doc:`extract data from point detectors <point_detectors/point_detectors>`. * :doc:`extract data from point detectors <point_detectors/point_detectors>`.
.. _pyFAI: https://pyfai.readthedocs.io
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment