Skip to content
Snippets Groups Projects
Commit db7d2c83 authored by Karim Ahmed's avatar Karim Ahmed
Browse files

Limit number of plotted trains to 500 Jungfrau Correct

parent a6c9a7ce
No related branches found
No related tags found
1 merge request!732Limit number of plotted trains to 500 Jungfrau Correct
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Jungfrau Offline Correction # # Jungfrau Offline Correction #
Author: European XFEL Detector Group, Version: 2.0 Author: European XFEL Detector Group, Version: 2.0
Offline Calibration for the Jungfrau Detector Offline Calibration for the Jungfrau Detector
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
in_folder = "/gpfs/exfel/exp/SPB/202130/p900204/raw" # the folder to read data from, required in_folder = "/gpfs/exfel/exp/SPB/202130/p900204/raw" # the folder to read data from, required
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/remove" # the folder to output to, required out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/remove" # the folder to output to, required
run = 91 # run to process, required run = 91 # run to process, required
metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate
sequences = [-1] # sequences to correct, set to [-1] for all, range allowed sequences = [-1] # sequences to correct, set to [-1] for all, range allowed
sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel
# Parameters used to access raw data. # Parameters used to access raw data.
karabo_id = "SPB_IRDA_JF4M" # karabo prefix of Jungfrau devices karabo_id = "SPB_IRDA_JF4M" # karabo prefix of Jungfrau devices
karabo_da = ['JNGFR01', 'JNGFR02', 'JNGFR03', 'JNGFR04', 'JNGFR05', 'JNGFR06', 'JNGFR07', 'JNGFR08'] # data aggregators karabo_da = ['JNGFR01', 'JNGFR02', 'JNGFR03', 'JNGFR04', 'JNGFR05', 'JNGFR06', 'JNGFR07', 'JNGFR08'] # data aggregators
receiver_template = "JNGFR{:02d}" # Detector receiver template for accessing raw data files. e.g. "JNGFR{:02d}" receiver_template = "JNGFR{:02d}" # Detector receiver template for accessing raw data files. e.g. "JNGFR{:02d}"
instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput' instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'
ctrl_source_template = '{}/DET/CONTROL' # template for control source name (filled with karabo_id_control) ctrl_source_template = '{}/DET/CONTROL' # template for control source name (filled with karabo_id_control)
karabo_id_control = "" # if control is on a different ID, set to empty string if it is the same a karabo-id karabo_id_control = "" # if control is on a different ID, set to empty string if it is the same a karabo-id
# Parameters for calibration database. # Parameters for calibration database.
use_dir_creation_date = True # use the creation data of the input dir for database queries use_dir_creation_date = True # use the creation data of the input dir for database queries
cal_db_interface = "tcp://max-exfl016:8017#8025" # the database interface to use cal_db_interface = "tcp://max-exfl016:8017#8025" # the database interface to use
cal_db_timeout = 180000 # timeout on caldb requests cal_db_timeout = 180000 # timeout on caldb requests
# Parameters affecting corrected data. # Parameters affecting corrected data.
relative_gain = True # do relative gain correction relative_gain = True # do relative gain correction
limit_images = 0 # ONLY FOR TESTING. process only first N images, Use 0 to process all. limit_images = 0 # ONLY FOR TESTING. process only first N images, Use 0 to process all.
# Parameters for retrieving calibration constants # Parameters for retrieving calibration constants
manual_slow_data = False # if true, use manually entered bias_voltage, integration_time, gain_setting, and gain_mode values manual_slow_data = False # if true, use manually entered bias_voltage, integration_time, gain_setting, and gain_mode values
integration_time = 4.96 # integration time in us, will be overwritten by value in file integration_time = 4.96 # integration time in us, will be overwritten by value in file
gain_setting = 0 # 0 for dynamic gain, 1 for dynamic HG0, will be overwritten by value in file gain_setting = 0 # 0 for dynamic gain, 1 for dynamic HG0, will be overwritten by value in file
gain_mode = 0 # 0 for runs with dynamic gain setting, 1 for fixgain. It will be overwritten by value in file, if manual_slow_data is set to True. gain_mode = 0 # 0 for runs with dynamic gain setting, 1 for fixgain. It will be overwritten by value in file, if manual_slow_data is set to True.
mem_cells = -1 # Set mem_cells to -1 to automatically use the value stored in RAW data. mem_cells = -1 # Set mem_cells to -1 to automatically use the value stored in RAW data.
bias_voltage = 180 # will be overwritten by value in file bias_voltage = 180 # will be overwritten by value in file
# Parameters for plotting # Parameters for plotting
skip_plots = False # exit after writing corrected files skip_plots = False # exit after writing corrected files
plot_images = -1 # Number of images to plot for RAW and CORRECTED plots. plot_trains = 500 # Number of trains to plot for RAW and CORRECTED plots. Set to -1 to automatically plot all trains.
cell_id_preview = 15 # cell Id used for preview in single-shot plots cell_id_preview = 15 # cell Id used for preview in single-shot plots
# Parameters for ROI selection and reduction # Parameters for ROI selection and reduction
roi_definitions = [-1] # List with groups of 6 values defining ROIs, e.g. [3, 120, 180, 200, 550, -2] for module 3 (JNGFR03), slice 120:180, 200:550, average along axis -2 (slow scan, or -1 for fast scan) roi_definitions = [-1] # List with groups of 6 values defining ROIs, e.g. [3, 120, 180, 200, 550, -2] for module 3 (JNGFR03), slice 120:180, 200:550, average along axis -2 (slow scan, or -1 for fast scan)
def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da): def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):
from xfel_calibrate.calibrate import balance_sequences as bs from xfel_calibrate.calibrate import balance_sequences as bs
return bs(in_folder, run, sequences, sequences_per_node, karabo_da) return bs(in_folder, run, sequences, sequences_per_node, karabo_da)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import multiprocessing import multiprocessing
import sys import sys
import warnings import warnings
from functools import partial from functools import partial
from pathlib import Path from pathlib import Path
import h5py import h5py
import matplotlib import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
import pasha as psh import pasha as psh
import tabulate import tabulate
from IPython.display import Latex, Markdown, display from IPython.display import Latex, Markdown, display
from extra_data import H5File, RunDirectory, by_id, components from extra_data import H5File, RunDirectory, by_id, components
from extra_geom import JUNGFRAUGeometry from extra_geom import JUNGFRAUGeometry
from matplotlib.colors import LogNorm from matplotlib.colors import LogNorm
from cal_tools import h5_copy_except from cal_tools import h5_copy_except
from cal_tools.jungfraulib import JungfrauCtrl from cal_tools.jungfraulib import JungfrauCtrl
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from cal_tools.step_timing import StepTimer from cal_tools.step_timing import StepTimer
from cal_tools.tools import ( from cal_tools.tools import (
get_constant_from_db_and_time, get_constant_from_db_and_time,
get_dir_creation_date, get_dir_creation_date,
get_pdu_from_db, get_pdu_from_db,
map_seq_files, map_seq_files,
write_compressed_frames, write_compressed_frames,
CalibrationMetadata, CalibrationMetadata,
) )
from iCalibrationDB import Conditions, Constants from iCalibrationDB import Conditions, Constants
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
matplotlib.use('agg') matplotlib.use('agg')
%matplotlib inline %matplotlib inline
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
in_folder = Path(in_folder) in_folder = Path(in_folder)
out_folder = Path(out_folder) out_folder = Path(out_folder)
run_folder = in_folder / f'r{run:04d}' run_folder = in_folder / f'r{run:04d}'
run_dc = RunDirectory(run_folder) run_dc = RunDirectory(run_folder)
instrument_src = instrument_source_template.format(karabo_id, receiver_template) instrument_src = instrument_source_template.format(karabo_id, receiver_template)
out_folder.mkdir(parents=True, exist_ok=True) out_folder.mkdir(parents=True, exist_ok=True)
print(f"Run is: {run}") print(f"Run is: {run}")
print(f"Instrument H5File source: {instrument_src}") print(f"Instrument H5File source: {instrument_src}")
print(f"Process modules: {karabo_da}") print(f"Process modules: {karabo_da}")
creation_time = None creation_time = None
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
print(f"Using {creation_time} as creation time") print(f"Using {creation_time} as creation time")
if karabo_id_control == "": if karabo_id_control == "":
karabo_id_control = karabo_id karabo_id_control = karabo_id
if any(axis_no not in {-2, -1, 2, 3} for axis_no in roi_definitions[5::6]): if any(axis_no not in {-2, -1, 2, 3} for axis_no in roi_definitions[5::6]):
print("ROI averaging must be on axis 2/3 (or equivalently -2/-1). " print("ROI averaging must be on axis 2/3 (or equivalently -2/-1). "
f"Axis numbers given: {roi_definitions[5::6]}") f"Axis numbers given: {roi_definitions[5::6]}")
sys.exit(1) sys.exit(1)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Read available sequence files to correct. # Read available sequence files to correct.
mapped_files, num_seq_files = map_seq_files( mapped_files, num_seq_files = map_seq_files(
run_folder, karabo_da, sequences) run_folder, karabo_da, sequences)
if not len(mapped_files): if not len(mapped_files):
raise IndexError( raise IndexError(
"No sequence files available to correct for the selected sequences and karabo_da.") "No sequence files available to correct for the selected sequences and karabo_da.")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print(f"Processing a total of {num_seq_files} sequence files") print(f"Processing a total of {num_seq_files} sequence files")
table = [] table = []
fi = 0 fi = 0
for kda, sfiles in mapped_files.items(): for kda, sfiles in mapped_files.items():
for k, f in enumerate(sfiles): for k, f in enumerate(sfiles):
if k == 0: if k == 0:
table.append((fi, kda, k, f)) table.append((fi, kda, k, f))
else: else:
table.append((fi, "", k, f)) table.append((fi, "", k, f))
fi += 1 fi += 1
md = display(Latex(tabulate.tabulate( md = display(Latex(tabulate.tabulate(
table, tablefmt='latex', table, tablefmt='latex',
headers=["#", "module", "# module", "file"]))) headers=["#", "module", "# module", "file"])))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
ctrl_src = ctrl_source_template.format(karabo_id_control) ctrl_src = ctrl_source_template.format(karabo_id_control)
ctrl_data = JungfrauCtrl(run_dc, ctrl_src) ctrl_data = JungfrauCtrl(run_dc, ctrl_src)
if mem_cells < 0: if mem_cells < 0:
memory_cells, sc_start = ctrl_data.get_memory_cells() memory_cells, sc_start = ctrl_data.get_memory_cells()
mem_cells_name = "single cell" if memory_cells == 1 else "burst" mem_cells_name = "single cell" if memory_cells == 1 else "burst"
print(f"Run is in {mem_cells_name} mode.\nStorage cell start: {sc_start:02d}") print(f"Run is in {mem_cells_name} mode.\nStorage cell start: {sc_start:02d}")
else: else:
memory_cells = mem_cells memory_cells = mem_cells
mem_cells_name = "single cell" if memory_cells == 1 else "burst" mem_cells_name = "single cell" if memory_cells == 1 else "burst"
print(f"Run is in manually set to {mem_cells_name} mode. With {memory_cells} memory cells") print(f"Run is in manually set to {mem_cells_name} mode. With {memory_cells} memory cells")
if not manual_slow_data: if not manual_slow_data:
integration_time = ctrl_data.get_integration_time() integration_time = ctrl_data.get_integration_time()
bias_voltage = ctrl_data.get_bias_voltage() bias_voltage = ctrl_data.get_bias_voltage()
gain_setting = ctrl_data.get_gain_setting() gain_setting = ctrl_data.get_gain_setting()
gain_mode = ctrl_data.get_gain_mode() gain_mode = ctrl_data.get_gain_mode()
print(f"Integration time is {integration_time} us") print(f"Integration time is {integration_time} us")
print(f"Gain setting is {gain_setting} (run settings: " print(f"Gain setting is {gain_setting} (run settings: "
f"{ctrl_data.run_settings.value if ctrl_data.run_settings else ctrl_data.run_settings})") # noqa f"{ctrl_data.run_settings.value if ctrl_data.run_settings else ctrl_data.run_settings})") # noqa
print(f"Gain mode is {gain_mode}") print(f"Gain mode is {gain_mode}")
print(f"Bias voltage is {bias_voltage} V") print(f"Bias voltage is {bias_voltage} V")
print(f"Number of memory cells are {memory_cells}") print(f"Number of memory cells are {memory_cells}")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Retrieving calibration constants ### ### Retrieving calibration constants ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
condition = Conditions.Dark.jungfrau( condition = Conditions.Dark.jungfrau(
memory_cells=memory_cells, memory_cells=memory_cells,
bias_voltage=bias_voltage, bias_voltage=bias_voltage,
integration_time=integration_time, integration_time=integration_time,
gain_setting=gain_setting, gain_setting=gain_setting,
gain_mode=gain_mode, gain_mode=gain_mode,
) )
empty_constants = { empty_constants = {
"Offset": np.zeros((512, 1024, memory_cells, 3), dtype=np.float32), "Offset": np.zeros((512, 1024, memory_cells, 3), dtype=np.float32),
"BadPixelsDark": np.zeros((512, 1024, memory_cells, 3), dtype=np.uint32), "BadPixelsDark": np.zeros((512, 1024, memory_cells, 3), dtype=np.uint32),
"RelativeGain": None, "RelativeGain": None,
"BadPixelsFF": None, "BadPixelsFF": None,
} }
metadata = CalibrationMetadata(metadata_folder or out_folder) metadata = CalibrationMetadata(metadata_folder or out_folder)
# NOTE: this notebook will not overwrite calibration metadata file # NOTE: this notebook will not overwrite calibration metadata file
const_yaml = metadata.get("retrieved-constants", {}) const_yaml = metadata.get("retrieved-constants", {})
def get_constants_for_module(karabo_da: str): def get_constants_for_module(karabo_da: str):
""" Get calibration constants for given module of Jungfrau """ Get calibration constants for given module of Jungfrau
:return: :return:
offset_map (offset map), offset_map (offset map),
mask (mask of bad pixels), mask (mask of bad pixels),
gain_map (map of relative gain factors), gain_map (map of relative gain factors),
db_module (name of DB module), db_module (name of DB module),
when (dictionary: constant - creation time) when (dictionary: constant - creation time)
""" """
when = dict() when = dict()
const_data = dict() const_data = dict()
if const_yaml: if const_yaml:
for cname, mdata in const_yaml[karabo_da]["constants"].items(): for cname, mdata in const_yaml[karabo_da]["constants"].items():
const_data[cname] = dict() const_data[cname] = dict()
when[cname] = mdata["creation-time"] when[cname] = mdata["creation-time"]
if when[cname]: if when[cname]:
with h5py.File(mdata["file-path"], "r") as cf: with h5py.File(mdata["file-path"], "r") as cf:
const_data[cname] = np.copy( const_data[cname] = np.copy(
cf[f"{mdata['dataset-name']}/data"]) cf[f"{mdata['dataset-name']}/data"])
else: else:
const_data[cname] = empty_constants[cname] const_data[cname] = empty_constants[cname]
else: else:
retrieval_function = partial( retrieval_function = partial(
get_constant_from_db_and_time, get_constant_from_db_and_time,
karabo_id=karabo_id, karabo_id=karabo_id,
karabo_da=karabo_da, karabo_da=karabo_da,
cal_db_interface=cal_db_interface, cal_db_interface=cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
timeout=cal_db_timeout, timeout=cal_db_timeout,
print_once=False, print_once=False,
) )
for cname, cempty in empty_constants.items(): for cname, cempty in empty_constants.items():
const_data[cname], when[cname] = retrieval_function( const_data[cname], when[cname] = retrieval_function(
condition=condition, condition=condition,
constant=getattr(Constants.jungfrau, cname)(), constant=getattr(Constants.jungfrau, cname)(),
empty_constant=cempty, empty_constant=cempty,
) )
offset_map = const_data["Offset"] offset_map = const_data["Offset"]
mask = const_data["BadPixelsDark"] mask = const_data["BadPixelsDark"]
gain_map = const_data["RelativeGain"] gain_map = const_data["RelativeGain"]
mask_ff = const_data["BadPixelsFF"] mask_ff = const_data["BadPixelsFF"]
# Combine masks # Combine masks
if mask_ff is not None: if mask_ff is not None:
mask |= np.moveaxis(mask_ff, 0, 1) mask |= np.moveaxis(mask_ff, 0, 1)
if memory_cells > 1: if memory_cells > 1:
# move from x, y, cell, gain to cell, x, y, gain # move from x, y, cell, gain to cell, x, y, gain
offset_map = np.moveaxis(offset_map, [0, 1], [1, 2]) offset_map = np.moveaxis(offset_map, [0, 1], [1, 2])
mask = np.moveaxis(mask, [0, 1], [1, 2]) mask = np.moveaxis(mask, [0, 1], [1, 2])
else: else:
offset_map = np.squeeze(offset_map) offset_map = np.squeeze(offset_map)
mask = np.squeeze(mask) mask = np.squeeze(mask)
# masking double size pixels # masking double size pixels
mask[..., [255, 256], :, :] |= BadPixels.NON_STANDARD_SIZE mask[..., [255, 256], :, :] |= BadPixels.NON_STANDARD_SIZE
mask[..., [255, 256, 511, 512, 767, 768], :] |= BadPixels.NON_STANDARD_SIZE mask[..., [255, 256, 511, 512, 767, 768], :] |= BadPixels.NON_STANDARD_SIZE
if gain_map is not None: if gain_map is not None:
if memory_cells > 1: if memory_cells > 1:
gain_map = np.moveaxis(gain_map, [0, 2], [2, 0]) gain_map = np.moveaxis(gain_map, [0, 2], [2, 0])
# add extra empty cell constant # add extra empty cell constant
b = np.ones(((1,)+gain_map.shape[1:])) b = np.ones(((1,)+gain_map.shape[1:]))
gain_map = np.concatenate((gain_map, b), axis=0) gain_map = np.concatenate((gain_map, b), axis=0)
else: else:
gain_map = np.moveaxis(np.squeeze(gain_map), 1, 0) gain_map = np.moveaxis(np.squeeze(gain_map), 1, 0)
return offset_map, mask, gain_map, karabo_da, when return offset_map, mask, gain_map, karabo_da, when
with multiprocessing.Pool() as pool: with multiprocessing.Pool() as pool:
r = pool.map(get_constants_for_module, karabo_da) r = pool.map(get_constants_for_module, karabo_da)
# Print timestamps for the retrieved constants. # Print timestamps for the retrieved constants.
constants = {} constants = {}
for offset_map, mask, gain_map, k_da, when in r: for offset_map, mask, gain_map, k_da, when in r:
print(f'Constants for module {k_da}:') print(f'Constants for module {k_da}:')
for const in when: for const in when:
print(f' {const} injected at {when[const]}') print(f' {const} injected at {when[const]}')
if gain_map is None: if gain_map is None:
print("No gain map found") print("No gain map found")
relative_gain = False relative_gain = False
constants[k_da] = (offset_map, mask, gain_map) constants[k_da] = (offset_map, mask, gain_map)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Correct a chunk of images for offset and gain # Correct a chunk of images for offset and gain
def correct_train(wid, index, d): def correct_train(wid, index, d):
d = d.astype(np.float32) # [cells, x, y] d = d.astype(np.float32) # [cells, x, y]
g = gain[index] g = gain[index]
# Jungfrau gains 0[00], 1[01], 3[11] # Jungfrau gains 0[00], 1[01], 3[11]
g[g==3] = 2 g[g==3] = 2
# Select memory cells # Select memory cells
if memory_cells > 1: if memory_cells > 1:
""" """
Even though it is correct to assume that memory cells pattern Even though it is correct to assume that memory cells pattern
can be the same across all trains (for one correction run can be the same across all trains (for one correction run
taken with one acquisition), it is preferred to not assume taken with one acquisition), it is preferred to not assume
this to account for exceptions that can happen. this to account for exceptions that can happen.
""" """
m = memcells[index].copy() m = memcells[index].copy()
# 255 is a cell value pointing to no cell image data (image of 0 pixels). # 255 is a cell value pointing to no cell image data (image of 0 pixels).
# Corresponding image will be corrected with constant of cell 0. To avoid values of 0. # Corresponding image will be corrected with constant of cell 0. To avoid values of 0.
# This line is depending on not storing the modified memory cells in the corrected data. # This line is depending on not storing the modified memory cells in the corrected data.
m[m==255] = 0 m[m==255] = 0
offset_map_cell = offset_map[m, ...] # [16 + empty cell, x, y] offset_map_cell = offset_map[m, ...] # [16 + empty cell, x, y]
mask_cell = mask[m, ...] mask_cell = mask[m, ...]
else: else:
offset_map_cell = offset_map offset_map_cell = offset_map
mask_cell = mask mask_cell = mask
# Offset correction # Offset correction
offset = np.choose(g, np.moveaxis(offset_map_cell, -1, 0)) offset = np.choose(g, np.moveaxis(offset_map_cell, -1, 0))
d -= offset d -= offset
# Gain correction # Gain correction
if relative_gain: if relative_gain:
if memory_cells > 1: if memory_cells > 1:
gain_map_cell = gain_map[m, ...] gain_map_cell = gain_map[m, ...]
else: else:
gain_map_cell = gain_map gain_map_cell = gain_map
cal = np.choose(g, np.moveaxis(gain_map_cell, -1, 0)) cal = np.choose(g, np.moveaxis(gain_map_cell, -1, 0))
d /= cal d /= cal
msk = np.choose(g, np.moveaxis(mask_cell, -1, 0)) msk = np.choose(g, np.moveaxis(mask_cell, -1, 0))
data_corr[index, ...] = d data_corr[index, ...] = d
mask_corr[index, ...] = msk mask_corr[index, ...] = msk
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
step_timer = StepTimer() step_timer = StepTimer()
n_cpus = multiprocessing.cpu_count() n_cpus = multiprocessing.cpu_count()
context = psh.context.ProcessContext(num_workers=n_cpus) context = psh.context.ProcessContext(num_workers=n_cpus)
print(f"Using {n_cpus} workers for correction.") print(f"Using {n_cpus} workers for correction.")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def save_reduced_rois(ofile, data_corr, mask_corr, karabo_da): def save_reduced_rois(ofile, data_corr, mask_corr, karabo_da):
"""If ROIs are defined for this karabo_da, reduce them and save to the output file""" """If ROIs are defined for this karabo_da, reduce them and save to the output file"""
rois_defined = 0 rois_defined = 0
module_no = int(karabo_da[-2:]) module_no = int(karabo_da[-2:])
params_source = f'{karabo_id}/ROIPROC/{karabo_da}' params_source = f'{karabo_id}/ROIPROC/{karabo_da}'
rois_source = f'{params_source}:output/data' rois_source = f'{params_source}:output/data'
for i in range(len(roi_definitions) // 6): for i in range(len(roi_definitions) // 6):
roi_module, a1, a2, b1, b2, mean_axis = roi_definitions[i*6 : (i+1)*6] roi_module, a1, a2, b1, b2, mean_axis = roi_definitions[i*6 : (i+1)*6]
if roi_module == module_no: if roi_module == module_no:
rois_defined += 1 rois_defined += 1
# Apply the mask and average remaining pixels to 1D # Apply the mask and average remaining pixels to 1D
roi_data = data_corr[..., a1:a2, b1:b2].mean( roi_data = data_corr[..., a1:a2, b1:b2].mean(
axis=mean_axis, where=(mask_corr[..., a1:a2, b1:b2] == 0) axis=mean_axis, where=(mask_corr[..., a1:a2, b1:b2] == 0)
) )
ofile.create_dataset( ofile.create_dataset(
f'INSTRUMENT/{rois_source}/roi{rois_defined}/data', f'INSTRUMENT/{rois_source}/roi{rois_defined}/data',
data=roi_data data=roi_data
) )
ofile.require_group(f'CONTROL/{params_source}') ofile.require_group(f'CONTROL/{params_source}')
params_grp = ofile.create_group(f'RUN/{params_source}/roi{rois_defined}') params_grp = ofile.create_group(f'RUN/{params_source}/roi{rois_defined}')
params_grp['region'] = np.array([[a1, a2, b1, b2]]) params_grp['region'] = np.array([[a1, a2, b1, b2]])
params_grp['reduce_axis'] = np.array([mean_axis]) params_grp['reduce_axis'] = np.array([mean_axis])
if rois_defined: if rois_defined:
# Copy the index for the new source # Copy the index for the new source
ofile.copy(f'INDEX/{karabo_id}/DET/{karabo_da}:daqOutput/data', ofile.copy(f'INDEX/{karabo_id}/DET/{karabo_da}:daqOutput/data',
f'INDEX/{rois_source}') f'INDEX/{rois_source}')
ntrains = ofile['INDEX/trainId'].shape[0] ntrains = ofile['INDEX/trainId'].shape[0]
ofile.create_dataset(f'INDEX/{params_source}/count', shape=(ntrains,), dtype=np.uint64) ofile.create_dataset(f'INDEX/{params_source}/count', shape=(ntrains,), dtype=np.uint64)
ofile.create_dataset(f'INDEX/{params_source}/first', shape=(ntrains,), dtype=np.uint64) ofile.create_dataset(f'INDEX/{params_source}/first', shape=(ntrains,), dtype=np.uint64)
# Add the new source to the list in METADATA # Add the new source to the list in METADATA
if 'dataSourceId' in ofile['METADATA']: if 'dataSourceId' in ofile['METADATA']:
# Older file format # Older file format
data_sources_grp = ofile['METADATA'] data_sources_grp = ofile['METADATA']
else: else:
# Newer file format # Newer file format
data_sources_grp = ofile['METADATA/dataSources'] data_sources_grp = ofile['METADATA/dataSources']
def extend(dset, values): def extend(dset, values):
dset.resize(dset.shape[0] + len(values), axis=0) dset.resize(dset.shape[0] + len(values), axis=0)
dset[-len(values):] = values dset[-len(values):] = values
extend(data_sources_grp['root'], ['CONTROL', 'INSTRUMENT']) extend(data_sources_grp['root'], ['CONTROL', 'INSTRUMENT'])
extend(data_sources_grp['deviceId'], [params_source, rois_source]) extend(data_sources_grp['deviceId'], [params_source, rois_source])
extend(data_sources_grp['dataSourceId'], [ extend(data_sources_grp['dataSourceId'], [
f'CONTROL/{params_source}', f'INSTRUMENT/{rois_source}'] f'CONTROL/{params_source}', f'INSTRUMENT/{rois_source}']
) )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Correcting RAW data ### ### Correcting RAW data ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Loop over modules # Loop over modules
for local_karabo_da, mapped_files_module in mapped_files.items(): for local_karabo_da, mapped_files_module in mapped_files.items():
instrument_src_kda = instrument_src.format(int(local_karabo_da[-2:])) instrument_src_kda = instrument_src.format(int(local_karabo_da[-2:]))
data_path = "INSTRUMENT/"+instrument_src_kda+"/data" data_path = "INSTRUMENT/"+instrument_src_kda+"/data"
for sequence_file in mapped_files_module: # noqa for sequence_file in mapped_files_module: # noqa
sequence_file = Path(sequence_file) sequence_file = Path(sequence_file)
seq_dc = H5File(sequence_file) seq_dc = H5File(sequence_file)
# Save corrected data in an output file with name # Save corrected data in an output file with name
# of corresponding raw sequence file. # of corresponding raw sequence file.
ofile_name = sequence_file.name.replace("RAW", "CORR") ofile_name = sequence_file.name.replace("RAW", "CORR")
out_file = out_folder / ofile_name out_file = out_folder / ofile_name
# load shape of data for memory cells, and detector size (imgs, cells, x, y) # load shape of data for memory cells, and detector size (imgs, cells, x, y)
# dshape[0] = number of available images to correct. # dshape[0] = number of available images to correct.
dshape = seq_dc[instrument_src_kda, "data.adc"].shape dshape = seq_dc[instrument_src_kda, "data.adc"].shape
if dshape[0] == 0: if dshape[0] == 0:
print(f"\t- WARNING: No image data for {ofile_name}: data shape is {dshape}") print(f"\t- WARNING: No image data for {ofile_name}: data shape is {dshape}")
continue continue
# load number of data available, including trains with empty data. # load number of data available, including trains with empty data.
n_trains = len(seq_dc.train_ids) n_trains = len(seq_dc.train_ids)
n_imgs = dshape[0] n_imgs = dshape[0]
# For testing, only correct limit_images # For testing, only correct limit_images
if limit_images > 0: if limit_images > 0:
n_imgs = min(n_imgs, limit_images) n_imgs = min(n_imgs, limit_images)
print(f"\nNumber of images to correct: {n_imgs} for {ofile_name}") print(f"\nNumber of images to correct: {n_imgs} for {ofile_name}")
if n_trains - dshape[0] != 0: if n_trains - dshape[0] != 0:
print(f"\t- WARNING: {sequence_file.name} has {n_trains - dshape[0]} " print(f"\t- WARNING: {sequence_file.name} has {n_trains - dshape[0]} "
"trains with empty data.") "trains with empty data.")
# load constants from the constants dictionary. # load constants from the constants dictionary.
offset_map, mask, gain_map = constants[local_karabo_da] offset_map, mask, gain_map = constants[local_karabo_da]
# Allocate shared arrays for corrected data. # Allocate shared arrays for corrected data.
data_corr = context.alloc(shape=dshape, dtype=np.float32) data_corr = context.alloc(shape=dshape, dtype=np.float32)
mask_corr = context.alloc(shape=dshape, dtype=np.uint32) mask_corr = context.alloc(shape=dshape, dtype=np.uint32)
step_timer.start() step_timer.start()
seq_dc = seq_dc.select( seq_dc = seq_dc.select(
instrument_src_kda, "*", require_all=True).select_trains(np.s_[:n_imgs]) instrument_src_kda, "*", require_all=True).select_trains(np.s_[:n_imgs])
data = seq_dc[instrument_src_kda, "data.adc"].ndarray() data = seq_dc[instrument_src_kda, "data.adc"].ndarray()
gain = seq_dc[instrument_src_kda, "data.gain"].ndarray() gain = seq_dc[instrument_src_kda, "data.gain"].ndarray()
memcells = seq_dc[instrument_src_kda, "data.memoryCell"].ndarray() memcells = seq_dc[instrument_src_kda, "data.memoryCell"].ndarray()
if memory_cells > 1: if memory_cells > 1:
# For plotting, assuming that memory cells are sorted the same for all trains. # For plotting, assuming that memory cells are sorted the same for all trains.
found_cells = memcells[0] == cell_id_preview found_cells = memcells[0] == cell_id_preview
if any(found_cells): if any(found_cells):
cell_idx_preview = np.where(found_cells)[0][0] cell_idx_preview = np.where(found_cells)[0][0]
else: else:
print(f"WARNING: The selected cell_id_preview {cell_id_preview} is not available in burst mode." print(f"WARNING: The selected cell_id_preview {cell_id_preview} is not available in burst mode."
f"Previewing cell `{memcells[0]}`.") f"Previewing cell `{memcells[0]}`.")
cell_idx_preview = 0 cell_idx_preview = 0
else: else:
cell_idx_preview = 0 cell_idx_preview = 0
context.map(correct_train, data) context.map(correct_train, data)
step_timer.done_step(f'Correction time.') step_timer.done_step(f'Correction time.')
step_timer.start() step_timer.start()
# Create CORR files and add corrected data sources. # Create CORR files and add corrected data sources.
# Exclude raw data images (data/adc) # Exclude raw data images (data/adc)
with h5py.File(out_file, 'w') as ofile: with h5py.File(out_file, 'w') as ofile:
# Copy RAW non-calibrated sources. # Copy RAW non-calibrated sources.
with h5py.File(sequence_file, 'r') as sfile: with h5py.File(sequence_file, 'r') as sfile:
h5_copy_except.h5_copy_except_paths( h5_copy_except.h5_copy_except_paths(
sfile, ofile, [f"{data_path}/adc"]) sfile, ofile, [f"{data_path}/adc"])
# Create datasets with the available corrected data # Create datasets with the available corrected data
ddset = ofile.create_dataset( ddset = ofile.create_dataset(
f"{data_path}/adc", f"{data_path}/adc",
data=data_corr, data=data_corr,
chunks=((1,) + dshape[1:]), # 1 chunk == 1 image chunks=((1,) + dshape[1:]), # 1 chunk == 1 image
dtype=np.float32, dtype=np.float32,
) )
write_compressed_frames( write_compressed_frames(
mask_corr, mask_corr,
ofile, ofile,
dataset_path=f"{data_path}/mask", dataset_path=f"{data_path}/mask",
comp_threads=n_cpus, comp_threads=n_cpus,
) )
save_reduced_rois(ofile, data_corr, mask_corr, local_karabo_da) save_reduced_rois(ofile, data_corr, mask_corr, local_karabo_da)
step_timer.done_step(f'Saving data time.') step_timer.done_step(f'Saving data time.')
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Processing time summary ### ### Processing time summary ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print(f"Total processing time {step_timer.timespan():.01f} s") print(f"Total processing time {step_timer.timespan():.01f} s")
step_timer.print_summary() step_timer.print_summary()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if skip_plots: if skip_plots:
print('Skipping plots') print('Skipping plots')
import sys import sys
sys.exit(0) sys.exit(0)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Positions are given in pixels # Positions are given in pixels
mod_width = (256 * 4) + (2 * 3) # inc. 2px gaps between tiles mod_width = (256 * 4) + (2 * 3) # inc. 2px gaps between tiles
mod_height = (256 * 2) + 2 mod_height = (256 * 2) + 2
if karabo_id == "SPB_IRDA_JF4M": if karabo_id == "SPB_IRDA_JF4M":
# The first 4 modules are rotated 180 degrees relative to the others. # The first 4 modules are rotated 180 degrees relative to the others.
# We pass the bottom, beam-right corner of the module regardless of its # We pass the bottom, beam-right corner of the module regardless of its
# orientation, requiring a subtraction from the symmetric positions we'd # orientation, requiring a subtraction from the symmetric positions we'd
# otherwise calculate. # otherwise calculate.
x_start, y_start = 1125, 1078 x_start, y_start = 1125, 1078
module_pos = [ module_pos = [
(x_start - mod_width, y_start - mod_height - (i * (mod_height + 33))) (x_start - mod_width, y_start - mod_height - (i * (mod_height + 33)))
for i in range(4) for i in range(4)
] + [ ] + [
(-x_start, -y_start + (i * (mod_height + 33))) for i in range(4) (-x_start, -y_start + (i * (mod_height + 33))) for i in range(4)
] ]
orientations = [(-1, -1) for _ in range(4)] + [(1, 1) for _ in range(4)] orientations = [(-1, -1) for _ in range(4)] + [(1, 1) for _ in range(4)]
elif karabo_id == "FXE_XAD_JF1M": elif karabo_id == "FXE_XAD_JF1M":
module_pos = ((-mod_width//2, 33),(-mod_width//2, -mod_height -33)) module_pos = ((-mod_width//2, 33),(-mod_width//2, -mod_height -33))
orientations = [(-1,-1), (1,1)] orientations = [(-1,-1), (1,1)]
else: else:
module_pos = ((-mod_width//2,-mod_height//2),) module_pos = ((-mod_width//2,-mod_height//2),)
orientations = None orientations = None
geom = JUNGFRAUGeometry.from_module_positions(module_pos, orientations=orientations, asic_gap=0) geom = JUNGFRAUGeometry.from_module_positions(module_pos, orientations=orientations, asic_gap=0)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
first_seq = 0 if sequences == [-1] else sequences[0] first_seq = 0 if sequences == [-1] else sequences[0]
with RunDirectory(out_folder, f"*{run}*S{first_seq:05d}*") as corr_dc: with RunDirectory(out_folder, f"*{run}*S{first_seq:05d}*") as corr_dc:
# Reading CORR data for plotting. # Reading CORR data for plotting.
jf_corr = components.JUNGFRAU( jf_corr = components.JUNGFRAU(
corr_dc, corr_dc,
detector_name=karabo_id, detector_name=karabo_id,
).select_trains(np.s_[:plot_images]) ).select_trains(np.s_[:plot_trains])
tid, jf_corr_data = next(iter(jf_corr.trains(require_all=True))) tid, jf_corr_data = next(iter(jf_corr.trains(require_all=True)))
# Shape = [modules, trains, cells, x, y] # Shape = [modules, trains, cells, x, y]
corrected = jf_corr.get_array("data.adc")[:, :, cell_idx_preview, ...].values corrected = jf_corr.get_array("data.adc")[:, :, cell_idx_preview, ...].values
corrected_train = jf_corr_data["data.adc"][ corrected_train = jf_corr_data["data.adc"][
:, cell_idx_preview, ... :, cell_idx_preview, ...
].values # loose the train axis. ].values # loose the train axis.
mask = jf_corr.get_array("data.mask")[:, :, cell_idx_preview, ...].values mask = jf_corr.get_array("data.mask")[:, :, cell_idx_preview, ...].values
mask_train = jf_corr_data["data.mask"][:, cell_idx_preview, ...].values mask_train = jf_corr_data["data.mask"][:, cell_idx_preview, ...].values
with RunDirectory(f"{in_folder}/r{run:04d}/", f"*S{first_seq:05d}*") as raw_dc: with RunDirectory(f"{in_folder}/r{run:04d}/", f"*S{first_seq:05d}*") as raw_dc:
# Reading RAW data for plotting. # Reading RAW data for plotting.
jf_raw = components.JUNGFRAU(raw_dc, detector_name=karabo_id).select_trains( jf_raw = components.JUNGFRAU(raw_dc, detector_name=karabo_id).select_trains(
np.s_[:plot_images] np.s_[:plot_trains]
) )
raw = jf_raw.get_array("data.adc")[:, :, cell_idx_preview, ...].values raw = jf_raw.get_array("data.adc")[:, :, cell_idx_preview, ...].values
raw_train = ( raw_train = (
jf_raw.select_trains(by_id[[tid]]) jf_raw.select_trains(by_id[[tid]])
.get_array("data.adc")[:, 0, cell_idx_preview, ...] .get_array("data.adc")[:, 0, cell_idx_preview, ...]
.values .values
) )
gain = jf_raw.get_array("data.gain")[:, :, cell_idx_preview, ...].values gain = jf_raw.get_array("data.gain")[:, :, cell_idx_preview, ...].values
gain_train_cells = ( gain_train_cells = (
jf_raw.select_trains(by_id[[tid]]).get_array("data.gain")[:, :, :, ...].values jf_raw.select_trains(by_id[[tid]]).get_array("data.gain")[:, :, :, ...].values
) )
``` ```
%% Cell type:markdown id: tags:
### Single Train Preview ###
A single image from the first train
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
db_modules = get_pdu_from_db( db_modules = get_pdu_from_db(
karabo_id=karabo_id, karabo_id=karabo_id,
karabo_da=karabo_da, karabo_da=karabo_da,
constant=Constants.jungfrau.Offset(), constant=Constants.jungfrau.Offset(),
condition=condition, condition=condition,
cal_db_interface=cal_db_interface, cal_db_interface=cal_db_interface,
snapshot_at=creation_time, snapshot_at=creation_time,
) )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Mean RAW Preview ### ### Mean RAW Preview
The per pixel mean of the sequence file of RAW data
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print(f"The per pixel mean of the first {raw.shape[1]} trains of the first sequence file")
fig, ax = plt.subplots(figsize=(18, 10)) fig, ax = plt.subplots(figsize=(18, 10))
raw_mean = np.mean(raw, axis=1) raw_mean = np.mean(raw, axis=1)
geom.plot_data_fast( geom.plot_data_fast(
raw_mean, raw_mean,
ax=ax, ax=ax,
vmin=min(0.75*np.median(raw_mean[raw_mean > 0]), 2000), vmin=min(0.75*np.median(raw_mean[raw_mean > 0]), 2000),
vmax=max(1.5*np.median(raw_mean[raw_mean > 0]), 16000), vmax=max(1.5*np.median(raw_mean[raw_mean > 0]), 16000),
cmap="jet", cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01}, colorbar={'shrink': 1, 'pad': 0.01},
) )
ax.set_title(f'{karabo_id} - Mean RAW', size=18) ax.set_title(f'{karabo_id} - Mean RAW', size=18)
plt.show() plt.show()
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Mean CORRECTED Preview ### ### Mean CORRECTED Preview
The per pixel mean of the sequence file of CORR data
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print(f"The per pixel mean of the first {corrected.shape[1]} trains of the first sequence file")
fig, ax = plt.subplots(figsize=(18, 10)) fig, ax = plt.subplots(figsize=(18, 10))
corrected_mean = np.mean(corrected, axis=1) corrected_mean = np.mean(corrected, axis=1)
_corrected_vmin = min(0.75*np.median(corrected_mean[corrected_mean > 0]), -0.5) _corrected_vmin = min(0.75*np.median(corrected_mean[corrected_mean > 0]), -0.5)
_corrected_vmax = max(2.*np.median(corrected_mean[corrected_mean > 0]), 100) _corrected_vmax = max(2.*np.median(corrected_mean[corrected_mean > 0]), 100)
geom.plot_data_fast( geom.plot_data_fast(
corrected_mean, corrected_mean,
ax=ax, ax=ax,
vmin=_corrected_vmin, vmin=_corrected_vmin,
vmax=_corrected_vmax, vmax=_corrected_vmax,
cmap="jet", cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01}, colorbar={'shrink': 1, 'pad': 0.01},
) )
ax.set_title(f'{karabo_id} - Mean CORRECTED', size=18) ax.set_title(f'{karabo_id} - Mean CORRECTED', size=18)
plt.show() plt.show()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig, ax = plt.subplots(figsize=(18, 10)) fig, ax = plt.subplots(figsize=(18, 10))
corrected_masked = corrected.copy() corrected_masked = corrected.copy()
corrected_masked[mask != 0] = np.nan corrected_masked[mask != 0] = np.nan
corrected_masked_mean = np.nanmean(corrected_masked, axis=1) corrected_masked_mean = np.nanmean(corrected_masked, axis=1)
del corrected_masked del corrected_masked
geom.plot_data_fast( geom.plot_data_fast(
corrected_masked_mean, corrected_masked_mean,
ax=ax, ax=ax,
vmin=_corrected_vmin, vmin=_corrected_vmin,
vmax=_corrected_vmax, vmax=_corrected_vmax,
cmap="jet", cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01}, colorbar={'shrink': 1, 'pad': 0.01},
) )
ax.set_title(f'{karabo_id} - Mean CORRECTED with mask', size=18) ax.set_title(f'{karabo_id} - Mean CORRECTED with mask', size=18)
plt.show() plt.show()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
display(Markdown((f"#### A single image from train {tid}"))) display(Markdown((f"#### A single image from train {tid}")))
fig, ax = plt.subplots(figsize=(18, 10)) fig, ax = plt.subplots(figsize=(18, 10))
geom.plot_data_fast( geom.plot_data_fast(
corrected_train, corrected_train,
ax=ax, ax=ax,
vmin=min(0.75 * np.median(corrected_train[corrected_train > 0]), -0.5), vmin=min(0.75 * np.median(corrected_train[corrected_train > 0]), -0.5),
vmax=max(2.0 * np.median(corrected_train[corrected_train > 0]), 100), vmax=max(2.0 * np.median(corrected_train[corrected_train > 0]), 100),
cmap="jet", cmap="jet",
colorbar={"shrink": 1, "pad": 0.01}, colorbar={"shrink": 1, "pad": 0.01},
) )
ax.set_title(f"{karabo_id} - CORRECTED train: {tid}", size=18) ax.set_title(f"{karabo_id} - CORRECTED train: {tid}", size=18)
plt.show() plt.show()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def do_2d_plot(data, edges, y_axis, x_axis, title): def do_2d_plot(data, edges, y_axis, x_axis, title):
fig = plt.figure(figsize=(10, 10)) fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
extent = [ extent = [
np.min(edges[1]), np.min(edges[1]),
np.max(edges[1]), np.max(edges[1]),
np.min(edges[0]), np.min(edges[0]),
np.max(edges[0]), np.max(edges[0]),
] ]
im = ax.imshow( im = ax.imshow(
data[::-1, :], data[::-1, :],
extent=extent, extent=extent,
aspect="auto", aspect="auto",
norm=LogNorm(vmin=1, vmax=np.max(data)) norm=LogNorm(vmin=1, vmax=np.max(data))
) )
ax.set_xlabel(x_axis) ax.set_xlabel(x_axis)
ax.set_ylabel(y_axis) ax.set_ylabel(y_axis)
ax.set_title(title) ax.set_title(title)
cb = fig.colorbar(im) cb = fig.colorbar(im)
cb.set_label("Counts") cb.set_label("Counts")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Gain Bit Value ### Gain Bit Value
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
for i, (pdu, mod) in enumerate(zip(db_modules, karabo_da)): for i, (pdu, mod) in enumerate(zip(db_modules, karabo_da)):
h, ex, ey = np.histogram2d( h, ex, ey = np.histogram2d(
raw[i].flatten(), raw[i].flatten(),
gain[i].flatten(), gain[i].flatten(),
bins=[100, 4], bins=[100, 4],
range=[[0, 10000], [0, 4]], range=[[0, 10000], [0, 4]],
) )
do_2d_plot( do_2d_plot(
h, h,
(ex, ey), (ex, ey),
"Signal (ADU)", "Signal (ADU)",
"Gain Bit Value (high gain=0[00], medium gain=1[01], low gain=3[11])", "Gain Bit Value (high gain=0[00], medium gain=1[01], low gain=3[11])",
f"Module {mod} ({pdu})", f"Module {mod} ({pdu})",
) )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Signal Distribution ## ## Signal Distribution ##
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
for i, (pdu, mod) in enumerate(zip(db_modules, karabo_da)): for i, (pdu, mod) in enumerate(zip(db_modules, karabo_da)):
fig, axs = plt.subplots(nrows=2, ncols=1, figsize=(18, 10)) fig, axs = plt.subplots(nrows=2, ncols=1, figsize=(18, 10))
corrected_flatten = corrected[i].flatten() corrected_flatten = corrected[i].flatten()
for ax, hist_range in zip(axs, [(-100, 1000), (-1000, 10000)]): for ax, hist_range in zip(axs, [(-100, 1000), (-1000, 10000)]):
h = ax.hist( h = ax.hist(
corrected_flatten, corrected_flatten,
bins=1000, bins=1000,
range=hist_range, range=hist_range,
log=True, log=True,
) )
l = ax.set_xlabel("Signal (keV)") l = ax.set_xlabel("Signal (keV)")
l = ax.set_ylabel("Counts") l = ax.set_ylabel("Counts")
_ = ax.set_title(f'Module {mod} ({pdu})') _ = ax.set_title(f'Module {mod} ({pdu})')
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Maximum GAIN Preview ### ### Maximum GAIN Preview
The per pixel maximum of the first train of the GAIN data
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
display(Markdown((f"#### The per pixel maximum of train {tid} of the GAIN data"))) display(Markdown((f"#### The per pixel maximum of train {tid} of the GAIN data")))
fig, ax = plt.subplots(figsize=(18, 10)) fig, ax = plt.subplots(figsize=(18, 10))
gain_max = np.max(gain_train_cells, axis=(1, 2)) gain_max = np.max(gain_train_cells, axis=(1, 2))
geom.plot_data_fast( geom.plot_data_fast(
gain_max, gain_max,
ax=ax, ax=ax,
cmap="jet", cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01}, colorbar={'shrink': 1, 'pad': 0.01},
) )
plt.show() plt.show()
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Bad Pixels ## ## Bad Pixels ##
The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as: The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
table = [] table = []
for item in BadPixels: for item in BadPixels:
table.append( table.append(
(item.name, f"{item.value:016b}")) (item.name, f"{item.value:016b}"))
md = display(Latex(tabulate.tabulate( md = display(Latex(tabulate.tabulate(
table, tablefmt='latex', table, tablefmt='latex',
headers=["Bad pixel type", "Bit mask"]))) headers=["Bad pixel type", "Bit mask"])))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Single Image Bad Pixels ### ### Single Image Bad Pixels ###
A single image bad pixel map for the first image of the first train A single image bad pixel map for the first image of the first train
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
display(Markdown(f"#### Bad pixels image for train {tid}")) display(Markdown(f"#### Bad pixels image for train {tid}"))
fig, ax = plt.subplots(figsize=(18, 10)) fig, ax = plt.subplots(figsize=(18, 10))
geom.plot_data_fast( geom.plot_data_fast(
np.log2(mask_train), np.log2(mask_train),
ax=ax, ax=ax,
vmin=0, vmax=32, cmap="jet", vmin=0, vmax=32, cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01}, colorbar={'shrink': 1, 'pad': 0.01},
) )
plt.show() plt.show()
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment