Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • calibration/pycalibration
1 result
Show changes
Commits on Source (33)
%% Cell type:markdown id: tags:
# Jungfrau Offline Correction #
Author: European XFEL Detector Group, Version: 2.0
Offline Calibration for the Jungfrau Detector
%% Cell type:code id: tags:
``` python
in_folder = "/gpfs/exfel/exp/SPB/202130/p900204/raw" # the folder to read data from, required
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/remove" # the folder to output to, required
run = 91 # run to process, required
metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate
sequences = [-1] # sequences to correct, set to [-1] for all, range allowed
sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel
# Parameters used to access raw data.
karabo_id = "SPB_IRDA_JF4M" # karabo prefix of Jungfrau devices
karabo_da = ['JNGFR01', 'JNGFR02', 'JNGFR03', 'JNGFR04', 'JNGFR05', 'JNGFR06', 'JNGFR07', 'JNGFR08'] # data aggregators
receiver_template = "JNGFR{:02d}" # Detector receiver template for accessing raw data files. e.g. "JNGFR{:02d}"
instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'
ctrl_source_template = '{}/DET/CONTROL' # template for control source name (filled with karabo_id_control)
karabo_id_control = "" # if control is on a different ID, set to empty string if it is the same a karabo-id
# Parameters for calibration database.
cal_db_interface = "tcp://max-exfl016:8017#8025" # the database interface to use
cal_db_timeout = 180000 # timeout on caldb requests
creation_time = "" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC e.g. "2022-06-28 13:00:00"
# Parameters affecting corrected data.
relative_gain = True # do relative gain correction.
strixel_sensor = False # reordering for strixel detector layout.
strixel_double_norm = 2.0 # normalization to use for double-size pixels, only applied for strixel sensors.
limit_trains = 0 # ONLY FOR TESTING. process only first N trains, Use 0 to process all.
chunks_ids = 32 # HDF chunk size for memoryCell and frameNumber.
chunks_data = 1 # HDF chunk size for pixel data in number of frames.
# Parameters for retrieving calibration constants
manual_slow_data = False # if true, use manually entered bias_voltage, integration_time, gain_setting, and gain_mode values
integration_time = 4.96 # integration time in us, will be overwritten by value in file
gain_setting = 0 # 0 for dynamic gain, 1 for dynamic HG0, will be overwritten by value in file
gain_mode = 0 # 0 for runs with dynamic gain setting, 1 for fixgain. It will be overwritten by value in file, if manual_slow_data is set to True.
mem_cells = -1 # Set mem_cells to -1 to automatically use the value stored in RAW data.
bias_voltage = 180 # will be overwritten by value in file
# Parameters for plotting
skip_plots = False # exit after writing corrected files
plot_trains = 500 # Number of trains to plot for RAW and CORRECTED plots. Set to -1 to automatically plot all trains.
cell_id_preview = 15 # cell Id used for preview in single-shot plots
# Parameters for ROI selection and reduction
roi_definitions = [-1] # List with groups of 6 values defining ROIs, e.g. [3, 120, 180, 200, 550, -2] for module 3 (JNGFR03), slice 120:180, 200:550, average along axis -2 (slow scan, or -1 for fast scan)
def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):
from xfel_calibrate.calibrate import balance_sequences as bs
return bs(in_folder, run, sequences, sequences_per_node, karabo_da)
```
%% Cell type:code id: tags:
``` python
import fnmatch
import multiprocessing
import sys
import warnings
from logging import warning
from pathlib import Path
import h5py
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pasha as psh
import tabulate
from IPython.display import Latex, Markdown, display
from extra_data import DataCollection, H5File, RunDirectory, by_id, components
from extra_geom import JUNGFRAUGeometry
from matplotlib.colors import LogNorm
import cal_tools.restful_config as rest_cfg
from cal_tools.calcat_interface import JUNGFRAU_CalibrationData
from cal_tools.jungfraulib import JungfrauCtrl
from cal_tools.enums import BadPixels
from cal_tools.jungfraulib import JungfrauCtrl
from cal_tools.plotting import init_jungfrau_geom
from cal_tools.files import DataFile
from cal_tools.step_timing import StepTimer
from cal_tools.tools import (
calcat_creation_time,
map_seq_files,
CalibrationMetadata,
)
warnings.filterwarnings('ignore')
matplotlib.use('agg')
%matplotlib inline
```
%% Cell type:code id: tags:
``` python
in_folder = Path(in_folder)
out_folder = Path(out_folder)
run_folder = in_folder / f'r{run:04d}'
run_dc = RunDirectory(run_folder)
instrument_src = instrument_source_template.format(karabo_id, receiver_template)
metadata = CalibrationMetadata(metadata_folder or out_folder)
# NOTE: this notebook will not overwrite calibration metadata file
const_yaml = metadata.get("retrieved-constants", {})
out_folder.mkdir(parents=True, exist_ok=True)
print(f"Run is: {run}")
print(f"Instrument H5File source: {instrument_src}")
karabo_da = sorted(karabo_da)
print(f"Process modules: {karabo_da}")
# Run's creation time:
creation_time = calcat_creation_time(in_folder, run, creation_time)
print(f"Creation time: {creation_time}")
if karabo_id_control == "":
karabo_id_control = karabo_id
if any(axis_no not in {-2, -1, 2, 3} for axis_no in roi_definitions[5::6]):
print("ROI averaging must be on axis 2/3 (or equivalently -2/-1). "
f"Axis numbers given: {roi_definitions[5::6]}")
sys.exit(1)
```
%% Cell type:code id: tags:
``` python
ctrl_src = ctrl_source_template.format(karabo_id_control)
ctrl_data = JungfrauCtrl(run_dc, ctrl_src)
if mem_cells < 0:
memory_cells, sc_start = ctrl_data.get_memory_cells()
mem_cells_name = "single cell" if memory_cells == 1 else "burst"
print(f"Run is in {mem_cells_name} mode.\nStorage cell start: {sc_start:02d}")
else:
memory_cells = mem_cells
mem_cells_name = "single cell" if memory_cells == 1 else "burst"
print(f"Run is in manually set to {mem_cells_name} mode. With {memory_cells} memory cells")
if not manual_slow_data:
integration_time = ctrl_data.get_integration_time()
bias_voltage = ctrl_data.get_bias_voltage()
gain_setting = ctrl_data.get_gain_setting()
gain_mode = ctrl_data.get_gain_mode()
print(f"Integration time is {integration_time} us")
print(f"Gain setting is {gain_setting} (run settings: {ctrl_data.run_settings})")
print(f"Gain mode is {gain_mode} ({ctrl_data.run_mode})")
print(f"Bias voltage is {bias_voltage} V")
print(f"Number of memory cells are {memory_cells}")
```
%% Cell type:markdown id: tags:
### Retrieving calibration constants
%% Cell type:code id: tags:
``` python
jf_cal = JUNGFRAU_CalibrationData(
detector_name=karabo_id,
sensor_bias_voltage=bias_voltage,
event_at=creation_time,
modules=karabo_da,
memory_cells=memory_cells,
integration_time=integration_time,
gain_setting=gain_setting,
gain_mode=gain_mode,
client=rest_cfg.calibration_client(),
)
da_to_pdu = {}
for mod_info in jf_cal.physical_detector_units.values():
da_to_pdu[mod_info["karabo_da"]] = mod_info["physical_name"]
if const_yaml:
const_data = dict()
for mod, constants in const_yaml.items():
if mod not in karabo_da:
continue # skip other keys like time-summary
const_data[mod] = dict()
for cname, mdata in constants["constants"].items():
const_data[mod][cname] = dict()
if mdata["creation-time"]:
with h5py.File(mdata["path"], "r") as cf:
const_data[mod][cname] = np.copy(
cf[f"{mdata['dataset']}/data"])
else:
constant_names = ["Offset10Hz", "BadPixelsDark10Hz"]
if relative_gain:
constant_names += ["BadPixelsFF10Hz", "RelativeGain10Hz"]
const_data = jf_cal.ndarray_map(calibrations=constant_names)
```
%% Cell type:code id: tags:
``` python
# Validate the constants availability and raise/warn correspondingly.
for mod in karabo_da[:]:
calibrations = const_data.get(mod, {})
missing_dark_constants = {"Offset10Hz", "BadPixelsDark10Hz"} - set(calibrations)
missing_gain_constants = {"BadPixelsFF10Hz", "RelativeGain10Hz"} - set(calibrations)
if missing_dark_constants:
warning(
f"Dark constants {missing_dark_constants} are not available to correct {mod}."
f" Module {mod} won't be corrected.")
karabo_da.remove(mod)
if relative_gain and missing_gain_constants:
warning(f"Gain constants {missing_gain_constants} were not retrieved for {mod}."
" No Relative gain correction for this module")
if not karabo_da: # Dark constants are missing for all modules.
raise ValueError("Dark constants are missing for all modules.")
```
%% Cell type:code id: tags:
``` python
def prepare_constants(module: str):
"""Prepare constant arrays.
:param module: The module name (karabo_da)
:return:
offset_map (offset map),
mask (mask of bad pixels),
gain_map (map of relative gain factors),
module (name of module),
"""
constant_arrays = const_data[module]
offset_map = constant_arrays["Offset10Hz"]
mask = constant_arrays["BadPixelsDark10Hz"]
gain_map = constant_arrays.get("RelativeGain10Hz")
mask_ff = constant_arrays.get("BadPixelsFF10Hz")
# Combine masks
if mask_ff is not None:
mask |= np.moveaxis(mask_ff, 0, 1)
if memory_cells > 1:
# move from x, y, cell, gain to cell, x, y, gain
offset_map = np.moveaxis(offset_map, [0, 1], [1, 2])
mask = np.moveaxis(mask, [0, 1], [1, 2])
else:
offset_map = np.squeeze(offset_map)
mask = np.squeeze(mask)
# masking double size pixels
mask[..., [255, 256], :, :] |= BadPixels.NON_STANDARD_SIZE
mask[..., [255, 256, 511, 512, 767, 768], :] |= BadPixels.NON_STANDARD_SIZE
if gain_map is not None:
if memory_cells > 1:
gain_map = np.moveaxis(gain_map, [0, 2], [2, 0])
# add extra empty cell constant
b = np.ones(((1,)+gain_map.shape[1:]))
gain_map = np.concatenate((gain_map, b), axis=0)
else:
gain_map = np.moveaxis(np.squeeze(gain_map), 1, 0)
return offset_map, mask, gain_map, module
with multiprocessing.Pool() as pool:
r = pool.map(prepare_constants, karabo_da)
# Print timestamps for the retrieved constants.
constants = {}
for offset_map, mask, gain_map, k_da in r:
constants[k_da] = (offset_map, mask, gain_map)
const_data.clear()
```
%% Cell type:code id: tags:
``` python
# Read available sequence files to correct.
mapped_files, num_seq_files = map_seq_files(
run_folder, karabo_da, sequences)
if not len(mapped_files):
raise IndexError(
"No sequence files available to correct for the selected sequences and karabo_da.")
```
%% Cell type:code id: tags:
``` python
print(f"Processing a total of {num_seq_files} sequence files")
table = []
fi = 0
for kda, sfiles in mapped_files.items():
for k, f in enumerate(sfiles):
if k == 0:
table.append((fi, kda, k, f))
else:
table.append((fi, "", k, f))
fi += 1
md = display(Latex(tabulate.tabulate(
table, tablefmt='latex',
headers=["#", "module", "# module", "file"])))
```
%% Cell type:code id: tags:
``` python
if strixel_sensor:
from cal_tools.jfstrixel import STRIXEL_SHAPE as strixel_frame_shape, double_pixel_indices, to_strixel
Ydouble, Xdouble = double_pixel_indices()
print('Strixel sensor transformation enabled')
```
%% Cell type:code id: tags:
``` python
# Correct a chunk of images for offset and gain
def correct_train(wid, index, d):
d = d.astype(np.float32) # [cells, x, y]
g = gain[index]
# Copy gain over first to keep it at the original 3 for low gain.
if strixel_sensor:
to_strixel(g, out=gain_corr[index, ...])
else:
gain_corr[index, ...] = g
# Jungfrau gains 0[00], 1[01], 3[11]
# Change low gain to 2 for indexing purposes.
g[g==3] = 2
# Select memory cells
if memory_cells > 1:
"""
Even though it is correct to assume that memory cells pattern
can be the same across all trains (for one correction run
taken with one acquisition), it is preferred to not assume
this to account for exceptions that can happen.
"""
m = memcells[index].copy()
# 255 is a cell value pointing to no cell image data (image of 0 pixels).
# Corresponding image will be corrected with constant of cell 0. To avoid values of 0.
# This line is depending on not storing the modified memory cells in the corrected data.
m[m==255] = 0
offset_map_cell = offset_map[m, ...] # [16 + empty cell, x, y]
mask_cell = mask[m, ...]
else:
offset_map_cell = offset_map
mask_cell = mask
# Offset correction
offset = np.choose(g, np.moveaxis(offset_map_cell, -1, 0))
d -= offset
# Gain correction
if relative_gain and gain_map is not None:
if memory_cells > 1:
gain_map_cell = gain_map[m, ...]
else:
gain_map_cell = gain_map
cal = np.choose(g, np.moveaxis(gain_map_cell, -1, 0))
d /= cal
msk = np.choose(g, np.moveaxis(mask_cell, -1, 0))
if strixel_sensor:
to_strixel(d, out=data_corr[index, ...])
data_corr[index, :, Ydouble, Xdouble] /= strixel_double_norm
to_strixel(msk, out=mask_corr[index, ...])
else:
data_corr[index, ...] = d
mask_corr[index, ...] = msk
```
%% Cell type:code id: tags:
``` python
step_timer = StepTimer()
n_cpus = multiprocessing.cpu_count()
context = psh.context.ProcessContext(num_workers=n_cpus)
print(f"Using {n_cpus} workers for correction.")
```
%% Cell type:code id: tags:
``` python
def save_reduced_rois(ofile, data_corr, mask_corr, karabo_da):
"""If ROIs are defined for this karabo_da, reduce them and save to the output file"""
rois_defined = 0
module_no = int(karabo_da[-2:])
params_source = f'{karabo_id}/ROIPROC/{karabo_da}'
rois_source = f'{params_source}:output'
if roi_definitions != [-1]:
# Create Instrument and Control sections to later add datasets.
outp_source = ofile.create_instrument_source(rois_source)
ctrl_source = ofile.create_control_source(params_source)
for i in range(len(roi_definitions) // 6):
roi_module, a1, a2, b1, b2, mean_axis = roi_definitions[i*6 : (i+1)*6]
if roi_module == module_no:
rois_defined += 1
# Apply the mask and average remaining pixels to 1D
roi_data = data_corr[..., a1:a2, b1:b2].mean(
axis=mean_axis, where=(mask_corr[..., a1:a2, b1:b2] == 0)
)
# Add roi corrected datasets
outp_source.create_key(f'data.roi{rois_defined}.data', data=roi_data)
# Add roi run control datasets.
ctrl_source.create_run_key(f'roi{rois_defined}.region', np.array([[a1, a2, b1, b2]]))
ctrl_source.create_run_key(f'roi{rois_defined}.reduce_axis', np.array([mean_axis]))
if rois_defined:
# Copy the index for the new source
# Create count/first datasets at INDEX source.
ofile.copy(f'INDEX/{karabo_id}/DET/{karabo_da}:daqOutput/data',
f'INDEX/{rois_source}/data')
ntrains = ofile['INDEX/trainId'].shape[0]
ctrl_source.create_index(ntrains)
```
%% Cell type:markdown id: tags:
### Correcting RAW data ###
%% Cell type:code id: tags:
``` python
# Loop over modules
empty_seq = 0
corrected_files = []
for local_karabo_da, mapped_files_module in mapped_files.items():
instrument_src_kda = instrument_src.format(int(local_karabo_da[-2:]))
for sequence_file in mapped_files_module:
# Save corrected data in an output file with name
# of corresponding raw sequence file.
ofile_name = sequence_file.name.replace("RAW", "CORR")
out_file = out_folder / ofile_name
corrected_files.append(ofile_name)
# Load sequence file data collection, data.adc keydata,
# the shape for data to later created arrays of the same shape,
# and number of available trains to correct.
seq_dc = H5File(sequence_file)
seq_dc_adc = seq_dc[instrument_src_kda, "data.adc"]
ishape = seq_dc_adc.shape # input shape.
corr_ntrains = ishape[0] # number of available trains to correct.
all_train_ids = seq_dc_adc.train_ids
# Raise a WARNING if this sequence has no trains to correct.
# Otherwise, print number of trains with no data.
if corr_ntrains == 0:
warning(f"No trains to correct for {sequence_file.name}: "
"Skipping the processing of this file.")
empty_seq += 1
continue
elif len(all_train_ids) != corr_ntrains:
print(f"{sequence_file.name} has {len(seq_dc_adc.train_ids) - corr_ntrains} "
"trains with missing data.")
# For testing, limit corrected trains. i.e. Getting output faster.
if limit_trains > 0:
print(f"\nCorrected trains are limited to: {limit_trains} trains")
corr_ntrains = min(corr_ntrains, limit_trains)
print(f"\nNumber of corrected trains are: {corr_ntrains} for {ofile_name}")
# Load constants from the constants dictionary.
# These arrays are used by `correct_train()` function
offset_map, mask, gain_map = constants[local_karabo_da]
# Determine total output shape.
if strixel_sensor:
oshape = (*ishape[:-2], *strixel_frame_shape)
else:
oshape = ishape
# Allocate shared arrays for corrected data. Used in `correct_train()`
data_corr = context.alloc(shape=oshape, dtype=np.float32)
gain_corr = context.alloc(shape=oshape, dtype=np.uint8)
mask_corr = context.alloc(shape=oshape, dtype=np.uint32)
step_timer.start()
# Overwrite seq_dc after eliminating empty trains or/and applying limited images.
seq_dc = seq_dc.select(
instrument_src_kda, "*", require_all=True).select_trains(np.s_[:corr_ntrains])
# Load raw images(adc), gain, memcells, and frame numbers.
data = seq_dc[instrument_src_kda, "data.adc"].ndarray()
gain = seq_dc[instrument_src_kda, "data.gain"].ndarray()
memcells = seq_dc[instrument_src_kda, "data.memoryCell"].ndarray()
frame_number = seq_dc[instrument_src_kda, "data.frameNumber"].ndarray()
# Validate that the selected cell id to preview is available in raw data.
if memory_cells > 1:
# For plotting, assuming that memory cells are sorted the same for all trains.
found_cells = memcells[0] == cell_id_preview
if any(found_cells):
cell_idx_preview = np.where(found_cells)[0][0]
else:
print(f"The selected cell_id_preview {cell_id_preview} is not available in burst mode. "
f"Previewing cell `{memcells[0]}`.")
cell_idx_preview = 0
else:
cell_idx_preview = 0
# Correct data per train
context.map(correct_train, data)
step_timer.done_step(f"Correction time.")
step_timer.start()
# Create CORR files and add corrected data sections.
image_counts = seq_dc[instrument_src_kda, "data.adc"].data_counts(labelled=False)
with DataFile(out_file, 'w') as outp_file:
# Create INDEX datasets.
outp_file.create_index(seq_dc.train_ids, from_file=seq_dc.files[0])
# Create Instrument section to later add corrected datasets.
outp_source = outp_file.create_instrument_source(instrument_src_kda)
# Create count/first datasets at INDEX source.
outp_source.create_index(data=image_counts)
# RAW memoryCell and frameNumber are not corrected. But we are storing only
# the values for the corrected trains.
outp_source.create_key(
"data.memoryCell", data=memcells,
chunks=(min(chunks_ids, memcells.shape[0]), 1))
outp_source.create_key(
"data.frameNumber", data=frame_number,
chunks=(min(chunks_ids, frame_number.shape[0]), 1))
# Add main corrected `data.adc`` dataset and store corrected data.
outp_source.create_key(
"data.adc", data=data_corr,
chunks=(min(chunks_data, data_corr.shape[0]), *oshape[1:]))
outp_source.create_compressed_key(
"data.gain", data=gain_corr)
outp_source.create_compressed_key(
"data.mask", data=mask_corr)
# Temporary hotfix for FXE assuming this dataset is in corrected files.
outp_source.create_key(
"data.trainId", data=seq_dc.train_ids,
chunks=(min(50, len(seq_dc.train_ids))))
save_reduced_rois(outp_file, data_corr, mask_corr, local_karabo_da)
# Create METDATA datasets
outp_file.create_metadata(like=seq_dc)
step_timer.done_step(f'Saving data time.')
if empty_seq == sum([len(i) for i in mapped_files.values()]):
warning("No valid trains for RAW data to correct.")
sys.exit(0)
```
%% Cell type:markdown id: tags:
### Processing time summary ###
%% Cell type:code id: tags:
``` python
print(f"Total processing time {step_timer.timespan():.01f} s")
step_timer.print_summary()
```
%% Cell type:code id: tags:
``` python
if skip_plots:
print('Skipping plots')
sys.exit(0)
```
%% Cell type:code id: tags:
``` python
# Positions are given in pixels
mod_width = (256 * 4) + (2 * 3) # inc. 2px gaps between tiles
mod_height = (256 * 2) + 2
if karabo_id == "SPB_IRDA_JF4M":
# The first 4 modules are rotated 180 degrees relative to the others.
# We pass the bottom, beam-right corner of the module regardless of its
# orientation, requiring a subtraction from the symmetric positions we'd
# otherwise calculate.
x_start, y_start = 1125, 1078
module_pos = [
(x_start - mod_width, y_start - mod_height - (i * (mod_height + 33)))
for i in range(4)
] + [
(-x_start, -y_start + (i * (mod_height + 33))) for i in range(4)
]
orientations = [(-1, -1) for _ in range(4)] + [(1, 1) for _ in range(4)]
elif karabo_id == "FXE_XAD_JF1M":
module_pos = ((-mod_width//2, 33),(-mod_width//2, -mod_height -33))
orientations = [(-1,-1), (1,1)]
else:
module_pos = ((-mod_width//2,-mod_height//2),)
orientations = None
geom = JUNGFRAUGeometry.from_module_positions(module_pos, orientations=orientations, asic_gap=0)
_, geom = init_jungfrau_geom(karabo_id=karabo_id, karabo_da=karabo_da)
```
%% Cell type:code id: tags:
``` python
first_seq = 0 if sequences == [-1] else sequences[0]
corrected_files = [
out_folder / f for f in fnmatch.filter(corrected_files, f"*{run}*S{first_seq:05d}*")
]
with DataCollection.from_paths(corrected_files) as corr_dc:
# Reading CORR data for plotting.
jf_corr = components.JUNGFRAU(
corr_dc,
detector_name=karabo_id,
).select_trains(np.s_[:plot_trains])
tid, jf_corr_data = next(iter(jf_corr.trains(require_all=True)))
# Shape = [modules, trains, cells, x, y]
# TODO: Fix the case if not all modules were requested to be corrected.
# For example if only one modules was corrected. An assertion error is expected
# at `geom.plot_data_fast`, while plotting corrected images.
corrected = jf_corr.get_array("data.adc")[:, :, cell_idx_preview, ...].values
corrected_train = jf_corr_data["data.adc"][
:, cell_idx_preview, ...
].values # loose the train axis.
mask = jf_corr.get_array("data.mask")[:, :, cell_idx_preview, ...].values
mask_train = jf_corr_data["data.mask"][:, cell_idx_preview, ...].values
with RunDirectory(f"{in_folder}/r{run:04d}/", f"*S{first_seq:05d}*", _use_voview=False) as raw_dc:
# Reading RAW data for plotting.
jf_raw = components.JUNGFRAU(raw_dc, detector_name=karabo_id).select_trains(
np.s_[:plot_trains]
)
raw = jf_raw.get_array("data.adc")[:, :, cell_idx_preview, ...].values
raw_train = (
jf_raw.select_trains(by_id[[tid]])
.get_array("data.adc")[:, 0, cell_idx_preview, ...]
.values
)
gain = jf_raw.get_array("data.gain")[:, :, cell_idx_preview, ...].values
gain_train_cells = (
jf_raw.select_trains(by_id[[tid]]).get_array("data.gain")[:, :, :, ...].values
)
```
%% Cell type:markdown id: tags:
### Mean RAW Preview
%% Cell type:code id: tags:
``` python
print(f"The per pixel mean of the first {raw.shape[1]} trains of the first sequence file")
fig, ax = plt.subplots(figsize=(18, 10))
raw_mean = np.mean(raw, axis=1)
geom.plot_data_fast(
raw_mean,
ax=ax,
vmin=min(0.75*np.median(raw_mean[raw_mean > 0]), 2000),
vmax=max(1.5*np.median(raw_mean[raw_mean > 0]), 16000),
cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01},
)
ax.set_title(f'{karabo_id} - Mean RAW', size=18)
plt.show()
```
%% Cell type:markdown id: tags:
### Mean CORRECTED Preview
%% Cell type:code id: tags:
``` python
print(f"The per pixel mean of the first {corrected.shape[1]} trains of the first sequence file")
fig, ax = plt.subplots(figsize=(18, 10))
corrected_mean = np.mean(corrected, axis=1)
_corrected_vmin = min(0.75*np.median(corrected_mean[corrected_mean > 0]), -0.5)
_corrected_vmax = max(2.*np.median(corrected_mean[corrected_mean > 0]), 100)
mean_plot_kwargs = dict(
vmin=_corrected_vmin, vmax=_corrected_vmax, cmap="jet"
)
if not strixel_sensor:
geom.plot_data_fast(
corrected_mean,
ax=ax,
colorbar={'shrink': 1, 'pad': 0.01},
**mean_plot_kwargs
)
else:
ax.imshow(corrected_mean.squeeze(), aspect=10, **mean_plot_kwargs)
ax.set_title(f'{karabo_id} - Mean CORRECTED', size=18)
plt.show()
```
%% Cell type:code id: tags:
``` python
fig, ax = plt.subplots(figsize=(18, 10))
corrected_masked = corrected.copy()
corrected_masked[mask != 0] = np.nan
corrected_masked_mean = np.nanmean(corrected_masked, axis=1)
del corrected_masked
if not strixel_sensor:
geom.plot_data_fast(
corrected_masked_mean,
ax=ax,
colorbar={'shrink': 1, 'pad': 0.01},
**mean_plot_kwargs
)
else:
ax.imshow(corrected_mean.squeeze(), aspect=10, **mean_plot_kwargs)
ax.set_title(f'{karabo_id} - Mean CORRECTED with mask', size=18)
plt.show()
```
%% Cell type:code id: tags:
``` python
display(Markdown((f"#### A single image from train {tid}")))
fig, ax = plt.subplots(figsize=(18, 10))
single_plot_kwargs = dict(
vmin=min(0.75 * np.median(corrected_train[corrected_train > 0]), -0.5),
vmax=max(2.0 * np.median(corrected_train[corrected_train > 0]), 100),
cmap="jet"
)
if not strixel_sensor:
geom.plot_data_fast(
corrected_train,
ax=ax,
colorbar={"shrink": 1, "pad": 0.01},
**single_plot_kwargs
)
else:
ax.imshow(corrected_train.squeeze(), aspect=10, **single_plot_kwargs)
ax.set_title(f"{karabo_id} - CORRECTED train: {tid}", size=18)
plt.show()
```
%% Cell type:code id: tags:
``` python
def do_2d_plot(data, edges, y_axis, x_axis, title):
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(111)
extent = [
np.min(edges[1]),
np.max(edges[1]),
np.min(edges[0]),
np.max(edges[0]),
]
im = ax.imshow(
data[::-1, :],
extent=extent,
aspect="auto",
norm=LogNorm(vmin=1, vmax=np.max(data))
)
ax.set_xlabel(x_axis)
ax.set_ylabel(y_axis)
ax.set_title(title)
cb = fig.colorbar(im)
cb.set_label("Counts")
```
%% Cell type:markdown id: tags:
### Gain Bit Value
%% Cell type:code id: tags:
``` python
for i, mod in enumerate(karabo_da):
pdu = da_to_pdu[mod]
h, ex, ey = np.histogram2d(
raw[i].flatten(),
gain[i].flatten(),
bins=[100, 4],
range=[[0, 10000], [0, 4]],
)
do_2d_plot(
h,
(ex, ey),
"Signal (ADU)",
"Gain Bit Value (high gain=0[00], medium gain=1[01], low gain=3[11])",
f"Module {mod} ({pdu})",
)
```
%% Cell type:markdown id: tags:
## Signal Distribution ##
%% Cell type:code id: tags:
``` python
for i, mod in enumerate(karabo_da):
pdu = da_to_pdu[mod]
fig, axs = plt.subplots(nrows=2, ncols=1, figsize=(18, 10))
corrected_flatten = corrected[i].flatten()
for ax, hist_range in zip(axs, [(-100, 1000), (-1000, 10000)]):
h = ax.hist(
corrected_flatten,
bins=1000,
range=hist_range,
log=True,
)
l = ax.set_xlabel("Signal (keV)")
l = ax.set_ylabel("Counts")
_ = ax.set_title(f'Module {mod} ({pdu})')
```
%% Cell type:markdown id: tags:
### Maximum GAIN Preview
%% Cell type:code id: tags:
``` python
display(Markdown((f"#### The per pixel maximum of train {tid} of the GAIN data")))
fig, ax = plt.subplots(figsize=(18, 10))
gain_max = np.max(gain_train_cells, axis=(1, 2))
geom.plot_data_fast(
gain_max,
ax=ax,
cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01},
)
plt.show()
```
%% Cell type:markdown id: tags:
## Bad Pixels ##
The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as:
%% Cell type:code id: tags:
``` python
table = []
for item in BadPixels:
table.append(
(item.name, f"{item.value:016b}"))
md = display(Latex(tabulate.tabulate(
table, tablefmt='latex',
headers=["Bad pixel type", "Bit mask"])))
```
%% Cell type:markdown id: tags:
### Single Image Bad Pixels ###
A single image bad pixel map for the first image of the first train
%% Cell type:code id: tags:
``` python
display(Markdown(f"#### Bad pixels image for train {tid}"))
fig, ax = plt.subplots(figsize=(18, 10))
if not strixel_sensor:
geom.plot_data_fast(
np.log2(mask_train),
ax=ax,
vmin=0, vmax=32, cmap="jet",
colorbar={'shrink': 1, 'pad': 0.01},
)
else:
ax.imshow(np.log2(mask_train).squeeze(), vmin=0, vmax=32, cmap='jet', aspect=10)
plt.show()
```
......
%% Cell type:markdown id: tags:
# Jungfrau Dark Summary
Author: European XFEL Detector Department, Version: 1.0
Summary for process dark constants and a comparison with previously injected constants with the same conditions.
%% Cell type:code id: tags:
``` python
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/jungfrau_assembeled_dark" # path to output to, required
metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate.
# Parameters used to access raw data.
karabo_da = [] # list of data aggregators, which corresponds to different JF modules. This is only needed for the detectors of one module.
karabo_id = "FXE_XAD_JF1M" # detector identifier.
# Parameters to be used for injecting dark calibration constants.
local_output = True # Boolean indicating that local constants were stored in the out_folder
# Skip the whole notebook if local_output is false in the preceding notebooks.
if not local_output:
print('No local constants saved. Skipping summary plots')
import sys
sys.exit(0)
```
%% Cell type:code id: tags:
``` python
import warnings
from collections import OrderedDict
from pathlib import Path
warnings.filterwarnings('ignore')
import h5py
import matplotlib
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import yaml
from IPython.display import Markdown, display
matplotlib.use("agg")
%matplotlib inline
from cal_tools.enums import BadPixels, JungfrauSettings
from cal_tools.ana_tools import get_range
from cal_tools.plotting import init_jungfrau_geom, show_processed_modules_jungfrau
from cal_tools.tools import CalibrationMetadata
from XFELDetAna.plotting.simpleplot import simplePlot
```
%% Cell type:code id: tags:
``` python
# Prepare paths and load previous constants' metadata.
out_folder = Path(out_folder)
metadata = CalibrationMetadata(metadata_folder or out_folder)
mod_mapping = metadata.setdefault("modules-mapping", {})
dark_constants = ["Offset", "Noise", "BadPixelsDark"]
prev_const_metadata = {}
for fn in Path(metadata_folder or out_folder).glob("module_metadata_*.yml"):
with fn.open("r") as fd:
fdict = yaml.safe_load(fd)
module = fdict["module"]
mod_mapping[module] = fdict["pdu"]
prev_const_metadata[module] = fdict["old-constants"]
metadata.save()
```
%% Cell type:code id: tags:
``` python
expected_modules, geom = init_jungfrau_geom(
karabo_id=karabo_id, karabo_da=karabo_da)
nmods = len(expected_modules)
```
%% Cell type:markdown id: tags:
Preparing newly injected and previous constants from produced local folder in out_folder.
%% Cell type:code id: tags:
``` python
fixed_gain = False # constant is adaptive by default.
# Get the constant shape from one of the local constants.
# This is one way to realize the number of memory cells.
with h5py.File(list(out_folder.glob("const_Offset_*"))[0], 'r') as f:
const_shape = f["data"][()].shape
# Get fixed gain value to decide offset vmin, vmax
# for later constant map plots.
gain_mode = "condition/Gain mode/value"
if gain_mode in f:
fixed_gain = f[gain_mode][()]
initial_stacked_constants = np.full(((nmods,)+const_shape), np.nan)
curr_constants = { c: initial_stacked_constants.copy() for c in dark_constants}
prev_constants = { c: initial_stacked_constants.copy() for c in dark_constants}
exculded_constants = [] # constants excluded from comparison plots.
# Loop over modules
for cname in dark_constants:
excluded_modules = [] # modules with no previous constants.
for i, mod in enumerate(sorted(expected_modules)):
# Loop over expected dark constants in out_folder.
# Some constants can be missing in out_folder.
pdu = mod_mapping[mod]
# first load new constant
fpath = out_folder / f"const_{cname}_{pdu}.h5"
with h5py.File(fpath, 'r') as f:
curr_constants[cname][i, ...] = f["data"][()]
# Load previous constants.
old_mod_mdata = prev_const_metadata[mod]
if cname in old_mod_mdata: # a module can be missing from detector dark processing.
filepath = old_mod_mdata[cname]["filepath"]
h5path = old_mod_mdata[cname]["h5path"]
if not filepath or not h5path:
excluded_modules.append(mod)
prev_constants[cname][i, ...].fill(np.nan)
else:
with h5py.File(filepath, "r") as fd:
prev_constants[cname][i, ...] = fd[f"{h5path}/data"][()]
if excluded_modules:
print(f"Previous {cname} constants for {excluded_modules} are not available.\n.")
# Exclude constants from comparison plots, if the corresponding
# previous constants are not available for all modules.
if len(excluded_modules) == nmods:
exculded_constants.append(cname)
print(f"No comparison plots for {cname}.\n")
```
%% Cell type:code id: tags:
``` python
display(Markdown('## Processed modules'))
processed_modules = list(mod_mapping.keys())
processed_pdus = list(mod_mapping.values())
show_processed_modules_jungfrau(
jungfrau_geom=geom,
constants=curr_constants,
processed_modules=processed_modules,
expected_modules=expected_modules,
display_module_names=processed_pdus,
)
```
%% Cell type:code id: tags:
``` python
gainstages = 3
gain_names = ["High Gain", "Medium Gain", "Low Gain" ]
const_range = {
"Offset": [(0, 8000), (8000, 16000), (8000, 16000)],
"Noise": [(0., 50.), (0., 50.), (0., 50.)],
"BadPixelsDark": [(0., 5.), (0., 5.), (0., 5.)],
}
# vmin and vmax are different for Offset for fixed gain constants.
if fixed_gain:
const_range["Offset"] = [(0, 8000), (0, 8000), (0, 8000)]
diff_const_range = {
"Offset": [(0, 500), (0, 500), (0, 500)],
"Noise": [(0., 5.), (0., 5.), (0., 5.)],
"BadPixelsDark": [(0., 5.), (0., 5.), (0., 5.)],
}
percentage_range = (0, 100)
perc_const_range = {c: [percentage_range]*3 for c in dark_constants}
gs = gridspec.GridSpec(2, 4)
axes = {
"ax0": {
"gs": gs[0, 1:3],
"shrink": 0.7,
"pad": 0.05,
"label": "ADCu",
"title": "{}",
"location": "right",
"range": const_range,
},
"ax1": {
"gs": gs[1, :2],
"shrink": 0.7,
"pad": 0.02,
"label": "ADCu",
"location": "left",
"title": "Difference with previous {}",
"range": diff_const_range,
},
"ax2": {
"gs": gs[1, 2:],
"shrink": 0.7,
"pad": 0.02,
"label": "%",
"location": "right",
"title": "Difference with previous {} %",
"range": perc_const_range,
},
}
```
%% Cell type:markdown id: tags:
## Summary figures across pixels and memory cells.
The following plots give an overview of calibration constants averaged across pixels and memory cells. A bad pixel mask is applied.
%% Cell type:code id: tags:
``` python
for cname, const in curr_constants.items():
# Prepare the stacked mean of constant,
# the difference with the previous constant
# and the fraction of that difference.
mean_const = np.nanmean(const, axis=3)
mean_diff = np.abs(np.nanmean(const, axis=3) - np.nanmean(prev_constants[cname], axis=3)) # noqa
mean_frac = np.abs(mean_diff / mean_const) * 100
for gain in range(gainstages):
data_to_plot = {
f'ax0': mean_const[..., gain],
f'ax1': mean_diff[..., gain],
f'ax2': mean_frac[..., gain],
}
# Plotting constant overall modules.
display(Markdown(f'### {cname} - {gain_names[gain]}'))
if nmods > 1:
fig = plt.figure(figsize=(20, 20))
else:
fig = plt.figure(figsize=(20, 10))
for axname, axv in axes.items():
# Avoid difference plots if previous constants
# are missing for the detector.
if cname in exculded_constants and axname != "ax0":
break
ax = fig.add_subplot(axv["gs"])
vmin, vmax = axv["range"][cname][gain]
geom.plot_data(
data_to_plot[axname],
vmin=vmin, vmax=vmax, ax=ax,
colorbar={
"shrink": axv["shrink"],
"pad": axv["pad"],
"location": axv["location"],
}
)
colorbar = ax.images[0].colorbar
colorbar.set_label(axv["label"], fontsize=15)
colorbar.ax.tick_params(labelsize=15)
ax.tick_params(labelsize=1)
ax.set_title(axv["title"].format(
f"{cname} {gain_names[gain]}"), fontsize=15)
if axname == "ax0":
ax.set_xlabel('Columns', fontsize=15)
ax.set_ylabel('Rows', fontsize=15)
ax.tick_params(labelsize=15)
else:
ax.tick_params(labelsize=0)
# Remove axes labels for comparison plots.
ax.set_xlabel('')
ax.set_ylabel('')
plt.show()
```
%% Cell type:code id: tags:
``` python
if curr_constants["Offset"].shape[-2] > 1:
display(Markdown("## Summary across pixels per memory cells"))
# Plot mean and std of memcells for each module, gain, and constant
# across trains.
for const_name, const in curr_constants.items():
display(Markdown(f'### {const_name}'))
for gain in range(gainstages):
data = np.copy(const[..., gain])
if const_name == 'BadPixelsDark':
data[data > 0] = 1.0
datamean = np.nanmean(data, axis=(1, 2))
datamean[datamean == 1.0] = np.nan
fig = plt.figure(
figsize=(15, 6),
tight_layout={'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})
label = 'Fraction of bad pixels'
ax = fig.add_subplot(1, 1, 1)
else:
datamean = np.nanmean(data, axis=(1, 2))
fig = plt.figure(
figsize=(15, 6),
tight_layout={'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})
label = f'{const_name} value [ADU], good pixels only'
ax = fig.add_subplot(1, 2, 1)
d = []
for i, mod in enumerate(datamean):
d.append({
'x': np.arange(mod.shape[0]),
'y': mod,
'drawstyle': 'steps-pre',
'label': processed_modules[i],
})
simplePlot(
d, figsize=(10, 10), xrange=(-12, 510),
x_label='Memory Cell ID',
y_label=label,
use_axis=ax,
title=f'{gain_names[gain]}',
title_position=[0.5, 1.18],
legend='outside-top-ncol6-frame',
legend_size='18%',
legend_pad=0.00,
)
# Extra Sigma plot for Offset and Noise constants.
if const_name != 'BadPixelsDark':
ax = fig.add_subplot(1, 2, 2)
label = f'$\sigma$ {const_name} [ADU], good pixels only'
d = []
for i, mod in enumerate(np.nanstd(data, axis=(1, 2))):
d.append({
'x': np.arange(mod.shape[0]),
'y': mod,
'drawstyle': 'steps-pre',
'label': processed_modules[i],
})
simplePlot(
d, figsize=(10, 10), xrange=(-12, 510),
x_label='Memory Cell ID',
y_label=label,
use_axis=ax,
title=f'{gain_names[gain]} $\sigma$',
title_position=[0.5, 1.18],
legend='outside-top-ncol6-frame',
legend_size='18%',
legend_pad=0.00,
)
plt.show()
```
......
This diff is collapsed.
This diff is collapsed.
......@@ -64,7 +64,7 @@ install_requires = [
"dynaconf==3.1.4",
"env_cache==0.1",
"extra_data==1.12.0",
"extra_geom==1.8.0",
"extra_geom==1.10.0",
"gitpython==3.1.0",
"h5py==3.5.0",
"iminuit==1.3.8",
......
"""Interfaces to calibration constant data."""
import re
import socket
from datetime import date, datetime, time, timezone
from functools import lru_cache
from os import getenv
from pathlib import Path
from weakref import WeakKeyDictionary
......@@ -367,6 +364,7 @@ class CalibrationData:
calibrations = set()
default_client = None
_default_caldb_root = ...
def __init__(
self,
......@@ -375,6 +373,7 @@ class CalibrationData:
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
"""Initialize a new CalibrationData object.
......@@ -397,6 +396,8 @@ class CalibrationData:
integers in karabo_da.
`qm`: QxMx naming convention is used. Virtual names for
AGIPD, DSSC, and LPD.
caldb_root (str or None): Path to the root directory for caldb
files, finds folder for production caldb by default.
**condition_params: Operating condition parameters defined
on an instance level.
"""
......@@ -406,6 +407,10 @@ class CalibrationData:
self.event_at = event_at
self.pdu_snapshot_at = event_at
self.module_naming = module_naming
if caldb_root is None:
self.caldb_root = self._get_default_caldb_root()
else:
self.caldb_root = Path(caldb_root)
if client is None:
......@@ -486,29 +491,19 @@ class CalibrationData:
)
return CalibrationData.default_client
@property
def caldb_root(self):
"""Root directory for calibration constant data.
Returns:
(Path or None) Location of caldb store or
None if not available.
"""
if not hasattr(CalibrationData, "_caldb_root"):
if getenv("SASE"):
# ONC
CalibrationData._caldb_root = Path("/common/cal/caldb_store")
elif re.match(r"^max-(.+)\.desy\.de$", socket.getfqdn()):
# Maxwell
CalibrationData._caldb_root = Path(
"/gpfs/exfel/d/cal/caldb_store"
)
@staticmethod
def _get_default_caldb_root():
if CalibrationData._default_caldb_root is ...:
onc_path = Path("/common/cal/caldb_store")
maxwell_path = Path("/gpfs/exfel/d/cal/caldb_store")
if onc_path.is_dir():
CalibrationData._default_caldb_root = onc_path
elif maxwell_path.is_dir():
CalibrationData._default_caldb_root = maxwell_path
else:
# Probably unavailable
CalibrationData._caldb_root = None
CalibrationData._default_caldb_root = None
return CalibrationData._caldb_root
return CalibrationData._default_caldb_root
@property
def client(self):
......@@ -947,6 +942,7 @@ class AGIPD_CalibrationData(SplitConditionCalibrationData):
gain_setting=None,
gain_mode=None,
module_naming="da",
caldb_root=None,
integration_time=12,
source_energy=9.2,
pixels_x=512,
......@@ -958,6 +954,7 @@ class AGIPD_CalibrationData(SplitConditionCalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......@@ -1021,6 +1018,7 @@ class LPD_CalibrationData(SplitConditionCalibrationData):
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
super().__init__(
detector_name,
......@@ -1028,6 +1026,7 @@ class LPD_CalibrationData(SplitConditionCalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......@@ -1072,6 +1071,7 @@ class DSSC_CalibrationData(CalibrationData):
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
super().__init__(
detector_name,
......@@ -1079,6 +1079,7 @@ class DSSC_CalibrationData(CalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......@@ -1126,6 +1127,7 @@ class JUNGFRAU_CalibrationData(CalibrationData):
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
super().__init__(
detector_name,
......@@ -1133,6 +1135,7 @@ class JUNGFRAU_CalibrationData(CalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......@@ -1193,6 +1196,7 @@ class PNCCD_CalibrationData(SplitConditionCalibrationData):
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
# Ignore modules for this detector.
super().__init__(
......@@ -1201,6 +1205,7 @@ class PNCCD_CalibrationData(SplitConditionCalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......@@ -1249,6 +1254,7 @@ class EPIX100_CalibrationData(SplitConditionCalibrationData):
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
# Ignore modules for this detector.
super().__init__(
......@@ -1257,6 +1263,7 @@ class EPIX100_CalibrationData(SplitConditionCalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......@@ -1299,6 +1306,7 @@ class GOTTHARD2_CalibrationData(CalibrationData):
client=None,
event_at=None,
module_naming="da",
caldb_root=None,
):
# Ignore modules for this detector.
super().__init__(
......@@ -1307,6 +1315,7 @@ class GOTTHARD2_CalibrationData(CalibrationData):
client,
event_at,
module_naming,
caldb_root,
)
self.sensor_bias_voltage = sensor_bias_voltage
......
......@@ -415,7 +415,7 @@ def init_jungfrau_geom(
karabo_da: List[str]
) -> Tuple[List[str], JUNGFRAUGeometry]:
""" Initiate JUNGFRAUGeometry object based on the selected detector
(SPB_IRDA_JF4M, FXE_XAD_JF1M, or a single module detector).
(JF4M, JF1M, or JF500K detectors).
:param karabo_id: the detector identifer of an expected multimodular
detector or a single module detector.
......@@ -429,7 +429,7 @@ def init_jungfrau_geom(
mod_width = (256 * 4) + (2 * 3) # inc. 2px gaps between tiles
mod_height = (256 * 2) + 2
if karabo_id == "SPB_IRDA_JF4M":
if "JF4M" in karabo_id:
nmods = 8
expected_modules = [f"JNGFR{i:02d}" for i in range(1, nmods+1)]
# The first 4 modules are rotated 180 degrees relative to the others.
......@@ -445,12 +445,12 @@ def init_jungfrau_geom(
]
orientations = [
(-1, -1) for _ in range(4)] + [(1, 1) for _ in range(4)]
elif karabo_id == "FXE_XAD_JF1M":
elif "JF1M" in karabo_id:
nmods = 2
expected_modules = [f"JNGFR{i:02d}" for i in range(1, nmods+1)]
module_pos = ((-mod_width//2, 33), (-mod_width//2, -mod_height-33))
orientations = [(-1,-1), (1,1)]
else:
else: # e.g. HED_IA1_JF500K1, FXE_XAD_JF500K, FXE_XAD_JFHZ
nmods = 1
expected_modules = karabo_da
module_pos = ((-mod_width//2, -mod_height//2),)
......
......@@ -98,6 +98,19 @@ notebooks = {
"cluster cores": 1},
}
},
"LPDMINI": {
"DARK": {
"notebook": "notebooks/LPDMini/LPD_Mini_Char_Darks_NBC.ipynb",
"concurrency": {"parameter": None},
},
"CORRECT": {
"notebook": "notebooks/LPDMini/LPD_Mini_Correct.ipynb",
"concurrency": {"parameter": "sequences",
"default concurrency": [-1],
"use function": "balance_sequences",
"cluster cores": 16},
},
},
"PNCCD": {
"DARK": {
"notebook": "notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb",
......
......@@ -1029,8 +1029,15 @@ class ActionsServer:
dconfig = data_conf[karabo_id]
# check for files according to mapping in raw run dir.
# data-mapping for LPD mini uses karabo-da names like
# LPDMINI00/8 to identify individual modules. The /8 is not
# part of the file name
data_agg_names = {
kda.split('/')[0] for kda in dconfig['karabo-da']
}
if any(y in x for x in fl
for y in dconfig['karabo-da']):
for y in data_agg_names):
thisconf = copy.copy(dconfig)
if isinstance(pconf[karabo_id], dict):
thisconf.update(copy.copy(pconf[karabo_id]))
......