Skip to content
Snippets Groups Projects
Commit d94695b8 authored by Karim Ahmed's avatar Karim Ahmed
Browse files

keep track of latest badpixels modifications

parent b7feeb4e
No related branches found
No related tags found
1 merge request!684[Gotthard-II] Dark Processing.
%% Cell type:markdown id:49b6577f-96a5-4dd2-bdd9-da661b2c4619 tags: %% Cell type:markdown id:49b6577f-96a5-4dd2-bdd9-da661b2c4619 tags:
# Gotthard2 Dark Image Characterization # Gotthard2 Dark Image Characterization
Author: European XFEL Detector Group, Version: 1.0 Author: European XFEL Detector Group, Version: 1.0
%% Cell type:code id:818e24e8 tags: %% Cell type:code id:818e24e8 tags:
``` python ``` python
in_folder = "/gpfs/exfel/exp/DETLAB/202230/p900276/raw" # the folder to read data from, required in_folder = "/gpfs/exfel/exp/DETLAB/202230/p900276/raw" # the folder to read data from, required
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/gotthard2/darks" # the folder to output to, required out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/gotthard2/darks" # the folder to output to, required
run_high = 10 # run number for G0 dark run, required run_high = 10 # run number for G0 dark run, required
run_med = 10 # run number for G1 dark run, required run_med = 10 # run number for G1 dark run, required
run_low = 10 # run number for G2 dark run, required run_low = 10 # run number for G2 dark run, required
sequences = [-1] # sequences to correct, set to [-1] for all, range allowed sequences = [-1] # sequences to correct, set to [-1] for all, range allowed
sequences_per_node = 1 # number of sequence files per node if notebook executed through xfel-calibrate, set to 0 to not run SLURM parallel sequences_per_node = 1 # number of sequence files per node if notebook executed through xfel-calibrate, set to 0 to not run SLURM parallel
# Parameters used to access raw data. # Parameters used to access raw data.
karabo_id = "DET_LAB_G2" # karabo prefix of Jungfrau devices karabo_id = "DET_LAB_G2" # karabo prefix of Jungfrau devices
karabo_da = ["DA01"] # data aggregators karabo_da = ["DA01"] # data aggregators
receiver_template = "GOT{:02d}" # receiver template used to read INSTRUMENT keys. receiver_template = "GOT{:02d}" # receiver template used to read INSTRUMENT keys.
control_template = "CTRL{:02d}" # control template used to read CONTROL keys. control_template = "CTRL{:02d}" # control template used to read CONTROL keys.
instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput' instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'
ctrl_source_template = '{}/DET/{}' # template for control source name (filled with karabo_id_control) ctrl_source_template = '{}/DET/{}' # template for control source name (filled with karabo_id_control)
karabo_id_control = "" # if control is on a different ID, set to empty string if it is the same a karabo-id karabo_id_control = "" # if control is on a different ID, set to empty string if it is the same a karabo-id
# Parameters for the calibration database. # Parameters for the calibration database.
use_dir_creation_date = True use_dir_creation_date = True
cal_db_interface = "tcp://max-exfl016:8020" # calibration DB interface to use cal_db_interface = "tcp://max-exfl016:8020" # calibration DB interface to use
cal_db_timeout = 300000 # timeout on caldb requests cal_db_timeout = 300000 # timeout on caldb requests
db_output = False # Output constants to the calibration database db_output = False # Output constants to the calibration database
local_output = True # Output constants locally local_output = True # Output constants locally
# Conditions used for injected calibration constants. # Conditions used for injected calibration constants.
# Parameters used during selecting raw data trains. # Parameters used during selecting raw data trains.
min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1. min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.
max_trains = 1000 # Maximum number of trains to use for processing dark constants. Set to 0 to use all available trains. max_trains = 1000 # Maximum number of trains to use for processing dark constants. Set to 0 to use all available trains.
# Don't delete! myMDC sends this by default. # Don't delete! myMDC sends this by default.
operation_mode = '' # Detector operation mode, optional operation_mode = '' # Detector operation mode, optional
``` ```
%% Cell type:code id:8085f9aa tags: %% Cell type:code id:8085f9aa tags:
``` python ``` python
import h5py import h5py
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import multiprocessing import multiprocessing
import pasha as psh import pasha as psh
from extra_data import RunDirectory from extra_data import RunDirectory
from pathlib import Path from pathlib import Path
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from cal_tools.gotthard2algs import convert_to_10bit from cal_tools.gotthard2algs import convert_to_10bit
from cal_tools.step_timing import StepTimer from cal_tools.step_timing import StepTimer
from cal_tools.tools import ( from cal_tools.tools import (
get_dir_creation_date, get_dir_creation_date,
get_from_db, get_from_db,
get_pdu_from_db, get_pdu_from_db,
get_report, get_report,
save_const_to_h5, save_const_to_h5,
send_to_db, send_to_db,
) )
from iCalibrationDB import Conditions, Constants from iCalibrationDB import Conditions, Constants
``` ```
%% Cell type:code id:18fe4379 tags: %% Cell type:code id:18fe4379 tags:
``` python ``` python
run_nums = [run_high, run_med, run_low] run_nums = [run_high, run_med, run_low]
in_folder = Path(in_folder) in_folder = Path(in_folder)
out_folder = Path(out_folder) out_folder = Path(out_folder)
out_folder.mkdir(exist_ok=True) out_folder.mkdir(exist_ok=True)
print(f"Process modules: {karabo_da}") print(f"Process modules: {karabo_da}")
run_dc = RunDirectory(in_folder / f"r{run_high:04d}") run_dc = RunDirectory(in_folder / f"r{run_high:04d}")
file_loc = f"proposal:{run_dc.run_metadata()['proposalNumber']} runs:{run_high} {run_med} {run_low}" # noqa file_loc = f"proposal:{run_dc.run_metadata()['proposalNumber']} runs:{run_high} {run_med} {run_low}" # noqa
instrument_src = instrument_source_template.format( instrument_src = instrument_source_template.format(
karabo_id, receiver_template) karabo_id, receiver_template)
ctrl_src = ctrl_source_template.format( ctrl_src = ctrl_source_template.format(
karabo_id_control, control_template) karabo_id_control, control_template)
# Read report path to associate it later with injected constants. # Read report path to associate it later with injected constants.
report = get_report(out_folder) report = get_report(out_folder)
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run_high) creation_time = get_dir_creation_date(in_folder, run_high)
print(f"Using {creation_time.isoformat()} as creation time") print(f"Using {creation_time.isoformat()} as creation time")
if not karabo_id_control: if not karabo_id_control:
karabo_id_control = karabo_id karabo_id_control = karabo_id
``` ```
%% Cell type:code id:108be688 tags: %% Cell type:code id:108be688 tags:
``` python ``` python
step_timer = StepTimer() step_timer = StepTimer()
``` ```
%% Cell type:code id:fb80b98e tags: %% Cell type:code id:fb80b98e tags:
``` python ``` python
# load constants temporarely using defined local paths. # load constants temporarely using defined local paths.
constants_file = "/gpfs/exfel/data/user/mramilli/gotthard2/constants/GH2-0124/calibration_constants_GH2-0124.h5" constants_file = "/gpfs/exfel/data/user/mramilli/gotthard2/constants/GH2-0124/calibration_constants_GH2-0124.h5"
with h5py.File(constants_file, 'r') as cfile: with h5py.File(constants_file, 'r') as cfile:
lut = cfile["LUT"][()] lut = cfile["LUT"][()]
``` ```
%% Cell type:code id:ff9149fc tags: %% Cell type:code id:ff9149fc tags:
``` python ``` python
# Read parameter conditions # Read parameter conditions
step_timer.start() step_timer.start()
run_dcs_dict = dict() run_dcs_dict = dict()
ctrl_src = ctrl_source_template.format(karabo_id_control, control_template) ctrl_src = ctrl_source_template.format(karabo_id_control, control_template)
for gain, run in enumerate(run_nums): for gain, run in enumerate(run_nums):
run_dc = RunDirectory(in_folder / f"r{run:04d}/") run_dc = RunDirectory(in_folder / f"r{run:04d}/")
run_dcs_dict[run] = [gain, run_dc] run_dcs_dict[run] = [gain, run_dc]
# TODO: Read every condition from slow data. # TODO: Read every condition from slow data.
# TODO: Validate that the conditions are the same and as expected for all runs. # TODO: Validate that the conditions are the same and as expected for all runs.
``` ```
%% Cell type:code id:ac9c5dc3-bc66-4e7e-b6a1-360259be535c tags: %% Cell type:code id:ac9c5dc3-bc66-4e7e-b6a1-360259be535c tags:
``` python ``` python
def specifiy_trains_to_process( def specify_trains_to_process(
img_key_data: "extra_data.KeyData", # noqa img_key_data: "extra_data.KeyData", # noqa
max_trains: int = 0, max_trains: int = 0,
min_trains: int = 0, min_trains: int = 0,
): ):
"""Specify total number of trains to process. """Specify total number of trains to process.
Based on given min_trains and max_trains, if given. Based on given min_trains and max_trains, if given.
Print number of trains to process and number of empty trains. Print number of trains to process and number of empty trains.
Raise ValueError if specified trains are less than min_trains. Raise ValueError if specified trains are less than min_trains.
""" """
# Specifies total number of trains to proccess. # Specifies total number of trains to proccess.
n_trains = img_key_data.shape[0] n_trains = img_key_data.shape[0]
all_trains = len(img_key_data.train_ids) all_trains = len(img_key_data.train_ids)
print(f"{mod} has {all_trains - n_trains} " print(f"{mod} has {all_trains - n_trains} "
f"trains with empty frames out of {all_trains} trains") f"trains with empty frames out of {all_trains} trains")
if n_trains < min_trains: if n_trains < min_trains:
raise ValueError( raise ValueError(
f"Less than {min_trains} trains are available in RAW data." f"Less than {min_trains} trains are available in RAW data."
" Not enough data to process darks.") " Not enough data to process darks.")
if max_trains > 0: if max_trains > 0:
n_trains = min(n_trains, max_trains) n_trains = min(n_trains, max_trains)
print(f"Processing {n_trains} trains.") print(f"Processing {n_trains} trains.")
return n_trains return n_trains
``` ```
%% Cell type:code id:e2eb2fc0-df9c-4887-9691-f81474f8c131 tags: %% Cell type:code id:e2eb2fc0-df9c-4887-9691-f81474f8c131 tags:
``` python ``` python
def convert_train(wid, index, tid, d): def convert_train(wid, index, tid, d):
"""Convert a Gotthard2 train from 12bit to 10bit.""" """Convert a Gotthard2 train from 12bit to 10bit."""
d_10bit = np.zeros_like(d[instr_mod_src]["data.adc"], dtype=np.float32) d_10bit = np.zeros_like(d[instr_mod_src]["data.adc"], dtype=np.float32)
convert_to_10bit(d[instr_mod_src]["data.adc"], lut, d_10bit) convert_to_10bit(d[instr_mod_src]["data.adc"], lut, d_10bit)
data_10bit[index, ...] = d_10bit data_10bit[index, ...] = d_10bit
``` ```
%% Cell type:code id:4e8ffeae tags: %% Cell type:code id:4e8ffeae tags:
``` python ``` python
# Calculate noise and offset per pixel and global average, std and median # Calculate noise and offset per pixel and global average, std and median
noise_map = dict() noise_map = dict()
offset_map = dict() offset_map = dict()
bad_pixels_map = dict() badpixels_map = dict()
context = psh.context.ProcessContext(num_workers=multiprocessing.cpu_count()) context = psh.context.ProcessContext(num_workers=multiprocessing.cpu_count())
for mod in karabo_da: for mod in karabo_da:
# Path to pixels ADC values # Path to pixels ADC values
instr_mod_src = instrument_src.format(int(mod[-2:])) instr_mod_src = instrument_src.format(int(mod[-2:]))
pixels_src = (instr_mod_src, "data.adc")
data_path = "INSTRUMENT/"+instr_mod_src+"/data" data_path = "INSTRUMENT/"+instr_mod_src+"/data"
# TODO: Validate the final shape to store constants. # TODO: Validate the final shape to store constants.
cshape = (3, 2, 1280) cshape = (3, 2, 1280)
offset_map[mod] = np.zeros(cshape, dtype=np.float32) offset_map[mod] = np.zeros(cshape, dtype=np.float32)
noise_map[mod] = np.zeros_like(offset_map[mod]) noise_map[mod] = np.zeros_like(offset_map[mod])
bad_pixels_map[mod] = np.zeros_like(offset_map[mod], dtype=np.uint32) badpixels_map[mod] = np.zeros_like(offset_map[mod], dtype=np.uint32)
for run_num, [gain, run_dc] in run_dcs_dict.items(): for run_num, [gain, run_dc] in run_dcs_dict.items():
step_timer.start() step_timer.start()
n_trains = specifiy_trains_to_process(run_dc[pixels_src]) n_trains = specify_trains_to_process(run_dc[instr_mod_src, "data.adc"])
# Select requested number of trains to process. # Select requested number of trains to process.
dc = run_dc.select(*pixels_src, require_all=True).select_trains(np.s_[:n_trains]) # noqa dc = run_dc.select(
dshape = dc[instr_mod_src, "data.adc"].shape instr_mod_src, require_all=True).select_trains(np.s_[:n_trains]) # noqa
step_timer.done_step("preparing raw data") step_timer.done_step("preparing raw data")
step_timer.start() step_timer.start()
# Convert 12bit data to 10bit # Convert 12bit data to 10bit
data_10bit = context.alloc(shape=dshape, dtype=np.float32) data_10bit = context.alloc(
shape=dc[instr_mod_src, "data.adc"].shape,
dtype=np.float32
)
context.map(convert_train, dc) context.map(convert_train, dc)
step_timer.done_step("convert to 10bit") step_timer.done_step("convert to 10bit")
step_timer.start() step_timer.start()
# Split even and odd data to calculate the two storage cell constants.
even_data = data_10bit[:, ::2, :] even_data = data_10bit[:, ::2, :]
odd_data = data_10bit[:, 1::2, :] odd_data = data_10bit[:, 1::2, :]
data_gain = dc[instr_mod_src, "data.gain"].ndarray()
even_gain = data_gain[:, ::2, :]
odd_gain = data_gain[:, 1::2, :]
def offset_noise_cell(wid, index, d): def offset_noise_cell(wid, index, d):
offset[index] = np.mean(d, axis=(0, 1)) offset_map[mod][gain, index, ...] = np.mean(d, axis=(0, 1))
noise[index] = np.std(d, axis=(0, 1)) noise_map[mod][gain, index, ...] = np.std(d, axis=(0, 1))
offset = context.alloc(shape=cshape[-2:], dtype=np.float32) offset_map[mod] = context.alloc(shape=cshape, dtype=np.float32)
noise = context.alloc(like=offset) noise_map[mod] = context.alloc(like=offset_map[mod])
badpixels_map[mod] = context.alloc(like=offset_map[mod], dtype=np.int32)
context.map(offset_noise_cell, (even_data, odd_data)) context.map(offset_noise_cell, (even_data, odd_data))
offset_map[mod][gain, ...] = offset.copy()
noise_map[mod][gain, ...] = noise.copy() raw_g = 3 if gain == 2 else gain
def badpixels_cell(wid, index, g):
"""Check if there are wrong bad gain values.
Indicate pixels with wrong gain value across all trains for each cell."""
badpixels_map[mod][gain, index,
np.mean(g, axis=(0, 1)) != raw_g] |= BadPixels.WRONG_GAIN_VALUE.value
context.map(badpixels_cell, (even_gain, odd_gain))
step_timer.done_step("Processing darks") step_timer.done_step("Processing darks")
```
%% Output
DA01 has 1042 trains with empty frames out of 2041 trains
Processing 999 trains.
preparing raw data: 0.0 s
convert to 10bit: 4.3 s
Processing darks: 12.1 s
%% Cell type:code id:4b81a3e7 tags:
``` python
badpixels_map[mod].shape
```
%% Output
(3, 2, 1280)
%% Cell type:code id:7ec0e569 tags:
``` python
np.where(badpixels_map[mod] != 0])
```
%% Output
array([], dtype=uint8)
%% Cell type:code id:3fc17e05-17ab-4ac4-9e79-c95399278bb9 tags:
``` python
def print_bp_entry(bp):
print("{:<30s} {:032b} -> {}".format(bp.name, bp.value, int(bp.value)))
print_bp_entry(BadPixels.OFFSET_OUT_OF_THRESHOLD)
print_bp_entry(BadPixels.NOISE_OUT_OF_THRESHOLD)
print_bp_entry(BadPixels.OFFSET_NOISE_EVAL_ERROR)
print_bp_entry(BadPixels.WRONG_GAIN_VALUE)
def eval_bpidx(d):
mdn = np.nanmedian(d, axis=(0, 1))[None, None, :, :]
std = np.nanstd(d, axis=(0, 1))[None, None, :, :]
idx = (d > badpixel_threshold_sigma*std+mdn) | (d < (-badpixel_threshold_sigma)*std+mdn)
return idx
```
%% Cell type:code id:40c34cc5-fe93-4b83-bf39-f465f37c40b4 tags:
``` python
step_timer.start()
for mod in karabo_da:
display(Markdown(f"### Badpixels for module {mod}:"))
offset_abs_threshold = np.array(offset_abs_threshold)
bad_pixels_map[mod][eval_bpidx(offset_map[mod])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bad_pixels_map[mod][~np.isfinite(offset_map[mod])] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
bad_pixels_map[mod][eval_bpidx(noise_map[mod])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bad_pixels_map[mod][~np.isfinite(noise_map[mod])] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
bad_pixels_map[mod][(offset_map[mod] < offset_abs_threshold[0][None, None, None, :]) | (offset_map[mod] > offset_abs_threshold[1][None, None, None, :])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value # noqa
for g_idx in gains:
for cell in range(memory_cells):
bad_pixels = bad_pixels_map[mod][:, :, cell, g_idx]
fn_0 = heatmapPlot(
np.swapaxes(bad_pixels, 0, 1),
y_label="Row",
x_label="Column",
lut_label=f"Badpixels {g_name[g_idx]} [ADCu]",
aspect=1.,
vmin=0, vmax=5,
title=f'G{g_idx} Bad pixel map - Cell {cell:02d} - Module {mod}')
step_timer.done_step(f'Creating bad pixels constant and plotting it.')
``` ```
%% Cell type:code id:6e10ed93-66de-4fb1-bf97-f8d25af22edb tags: %% Cell type:code id:6e10ed93-66de-4fb1-bf97-f8d25af22edb tags:
``` python ``` python
# # set the operating condition # # set the operating condition
# # TODO: add the final conditions for constants. # # TODO: add the final conditions for constants.
# condition = Conditions.Dark.Gotthard2( # condition = Conditions.Dark.Gotthard2(
# bias_voltage=bias_voltage, # bias_voltage=bias_voltage,
# ) # )
# db_modules = get_pdu_from_db( # db_modules = get_pdu_from_db(
# karabo_id=karabo_id, # karabo_id=karabo_id,
# karabo_da=karabo_da, # karabo_da=karabo_da,
# constant=Constants.Gotthard2.Offset(), # constant=Constants.Gotthard2.Offset(),
# condition=condition, # condition=condition,
# cal_db_interface=cal_db_interface, # cal_db_interface=cal_db_interface,
# snapshot_at=creation_time) # snapshot_at=creation_time)
``` ```
%% Cell type:code id:fde8e1cf-bc74-462f-b6e5-cfee8279090d tags: %% Cell type:code id:fde8e1cf-bc74-462f-b6e5-cfee8279090d tags:
``` python ``` python
from XFELDetAna.plotting.heatmap import heatmapPlot from XFELDetAna.plotting.heatmap import heatmapPlot
unit = '[ADCu]' unit = '[ADCu]'
for mod in karabo_da: for mod in karabo_da:
for _, [gain, _] in run_dcs_dict.items(): for _, [gain, _] in run_dcs_dict.items():
heatmapPlot( heatmapPlot(
offset_map[mod][gain], offset_map[mod][gain],
y_label="Row", y_label="Row",
x_label="Column", x_label="Column",
lut_label=unit, lut_label=unit,
title=f"Even / Odd Offset map G{gain} - Module {mod}", # TODO: add PDU name({pdu})', title=f"Even / Odd Offset map G{gain} - Module {mod}", # TODO: add PDU name({pdu})',
) )
plt.show() plt.show()
heatmapPlot( heatmapPlot(
noise_map[mod][gain], noise_map[mod][gain],
y_label="Row", y_label="Row",
x_label="Column", x_label="Column",
lut_label=unit, lut_label=unit,
title=f"Even / Odd noise map G{gain} - Module {mod}", # TODO: add PDU name({pdu})', title=f"Even / Odd noise map G{gain} - Module {mod}", # TODO: add PDU name({pdu})',
) )
plt.show() plt.show()
``` ```
%% Cell type:code id:1c4eddf7-7d6e-49f4-8cbb-12d2bc496a8f tags: %% Cell type:code id:1c4eddf7-7d6e-49f4-8cbb-12d2bc496a8f tags:
``` python ``` python
step_timer.start() step_timer.start()
for mod, db_mod in zip(karabo_da, db_modules): for mod, db_mod in zip(karabo_da, db_modules):
constants = { constants = {
'Offset': offset_map[mod], 'Offset': offset_map[mod],
'Noise': noise_map[mod], 'Noise': noise_map[mod],
} }
md = None md = None
for key, const_data in constants.items(): for key, const_data in constants.items():
const = getattr(Constants.Gotthard2, key)() const = getattr(Constants.Gotthard2, key)()
const.data = const_data const.data = const_data
if db_output: if db_output:
md = send_to_db( md = send_to_db(
db_module=db_mod, db_module=db_mod,
karabo_id=karabo_id, karabo_id=karabo_id,
constant=const, constant=const,
condition=condition, condition=condition,
file_loc=file_loc, file_loc=file_loc,
report_path=report, report_path=report,
cal_db_interface=cal_db_interface, cal_db_interface=cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
timeout=cal_db_timeout, timeout=cal_db_timeout,
) )
if local_output: if local_output:
md = save_const_to_h5( md = save_const_to_h5(
db_module=db_mod, db_module=db_mod,
karabo_id=karabo_id, karabo_id=karabo_id,
constant=const, constant=const,
condition=condition, condition=condition,
data=const.data, data=const.data,
file_loc=file_loc, file_loc=file_loc,
report=report, report=report,
creation_time=creation_time, creation_time=creation_time,
out_folder=out_folder, out_folder=out_folder,
) )
print(f"Calibration constant {key} is stored locally at {out_folder}.\n") print(f"Calibration constant {key} is stored locally at {out_folder}.\n")
print("Constants parameter conditions are:\n") print("Constants parameter conditions are:\n")
# TODO: add the final conditions for constants. # TODO: add the final conditions for constants.
print( print(
f"• Bias voltage: {bias_voltage}\n" f"• Bias voltage: {bias_voltage}\n"
f"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n") # noqa f"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n") # noqa
step_timer.done_step("Injecting constants.") step_timer.done_step("Injecting constants.")
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment