Skip to content
Snippets Groups Projects
Commit a7215c50 authored by David Hammer's avatar David Hammer
Browse files

Old const background retrieval, handle qm, minor cleanup

This commit does too many things, sorry, but stuff was all tangled up:
- clean up a bit (imports, unused variables)
- map to PDU with one query instead of in loop (each query gets all anyway)
- start fetching old constants (for comparison) in the background
- add qm layer to dict holding old constants (see issue #49)
parent 05327835
No related branches found
No related tags found
2 merge requests!492[AGIPD][DARK] Correctly retrieve old constants for comparison,!465[AGIPD][darks] speed up darks processing
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# AGIPD Characterize Dark Images # # AGIPD Characterize Dark Images #
Author: S. Hauf, Version: 0.1 Author: S. Hauf, Version: 0.1
The following code analyzes a set of dark images taken with the AGIPD detector to deduce detector offsets , noise, bad-pixel maps and thresholding. All four types of constants are evaluated per-pixel and per-memory cell. Data for the detector's three gain stages needs to be present, separated into separate runs. The following code analyzes a set of dark images taken with the AGIPD detector to deduce detector offsets , noise, bad-pixel maps and thresholding. All four types of constants are evaluated per-pixel and per-memory cell. Data for the detector's three gain stages needs to be present, separated into separate runs.
The evaluated calibration constants are stored locally and injected in the calibration data base. The evaluated calibration constants are stored locally and injected in the calibration data base.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
in_folder = "/gpfs/exfel/d/raw/CALLAB/202031/p900113" # path to input data, required in_folder = "/gpfs/exfel/d/raw/CALLAB/202031/p900113" # path to input data, required
out_folder = "/gpfs/exfel/data/scratch/hammerd/agipd-fixed-gain" # path to output to, required out_folder = "/gpfs/exfel/data/scratch/hammerd/agipd-fixed-gain" # path to output to, required
sequences = [0] # sequence files to evaluate. sequences = [0] # sequence files to evaluate.
modules = [-1] # list of modules to evaluate, RANGE ALLOWED modules = [-1] # list of modules to evaluate, RANGE ALLOWED
run_high = 9985 # run number in which high gain data was recorded, required run_high = 9985 # run number in which high gain data was recorded, required
run_med = 9984 # run number in which medium gain data was recorded, required run_med = 9984 # run number in which medium gain data was recorded, required
run_low = 9983 # run number in which low gain data was recorded, required run_low = 9983 # run number in which low gain data was recorded, required
operation_mode = "ADAPTIVE_GAIN" # Detector operation mode, optional (defaults to "ADAPTIVE_GAIN") operation_mode = "ADAPTIVE_GAIN" # Detector operation mode, optional (defaults to "ADAPTIVE_GAIN")
karabo_id = "HED_DET_AGIPD500K2G" # karabo karabo_id karabo_id = "HED_DET_AGIPD500K2G" # karabo karabo_id
karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators
receiver_id = "{}CH0" # inset for receiver devices receiver_id = "{}CH0" # inset for receiver devices
path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data
h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images
h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images
h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP' # path to control information h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP' # path to control information
karabo_id_control = "HED_EXP_AGIPD500K2G" # karabo-id for control device ' karabo_id_control = "HED_EXP_AGIPD500K2G" # karabo-id for control device '
karabo_da_control = "AGIPD500K2G00" # karabo DA for control infromation karabo_da_control = "AGIPD500K2G00" # karabo DA for control infromation
use_dir_creation_date = True # use dir creation date as data production reference date use_dir_creation_date = True # use dir creation date as data production reference date
cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use
cal_db_timeout = 3000000 # timeout on caldb requests" cal_db_timeout = 3000000 # timeout on caldb requests"
local_output = True # output constants locally local_output = True # output constants locally
db_output = False # output constants to database db_output = False # output constants to database
mem_cells = 0 # number of memory cells used, set to 0 to automatically infer mem_cells = 0 # number of memory cells used, set to 0 to automatically infer
bias_voltage = 0 # detector bias voltage bias_voltage = 0 # detector bias voltage
gain_setting = 0.1 # the gain setting, use 0.1 to try to auto-determine gain_setting = 0.1 # the gain setting, use 0.1 to try to auto-determine
acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine
interlaced = False # assume interlaced data format, for data prior to Dec. 2017 interlaced = False # assume interlaced data format, for data prior to Dec. 2017
rawversion = 2 # RAW file format version rawversion = 2 # RAW file format version
thresholds_offset_sigma = 3. # offset sigma thresholds for offset deduced bad pixels thresholds_offset_sigma = 3. # offset sigma thresholds for offset deduced bad pixels
thresholds_offset_hard = [0, 0] # For setting the same threshold offset for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters. thresholds_offset_hard = [0, 0] # For setting the same threshold offset for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters.
thresholds_offset_hard_hg = [3000, 7000] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels thresholds_offset_hard_hg = [3000, 7000] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_offset_hard_mg = [6000, 10000] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels thresholds_offset_hard_mg = [6000, 10000] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_offset_hard_lg = [6000, 10000] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels thresholds_offset_hard_lg = [6000, 10000] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_offset_hard_hg_fixed = [3500, 6500] # Same as thresholds_offset_hard_hg, but for fixed gain operation thresholds_offset_hard_hg_fixed = [3500, 6500] # Same as thresholds_offset_hard_hg, but for fixed gain operation
thresholds_offset_hard_mg_fixed = [3500, 6500] # Same as thresholds_offset_hard_mg, but for fixed gain operation thresholds_offset_hard_mg_fixed = [3500, 6500] # Same as thresholds_offset_hard_mg, but for fixed gain operation
thresholds_offset_hard_lg_fixed = [3500, 6500] # Same as thresholds_offset_hard_lg, but for fixed gain operation thresholds_offset_hard_lg_fixed = [3500, 6500] # Same as thresholds_offset_hard_lg, but for fixed gain operation
thresholds_noise_sigma = 5. # noise sigma thresholds for offset deduced bad pixels thresholds_noise_sigma = 5. # noise sigma thresholds for offset deduced bad pixels
thresholds_noise_hard = [0, 0] # For setting the same threshold noise for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters. thresholds_noise_hard = [0, 0] # For setting the same threshold noise for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters.
thresholds_noise_hard_hg = [4, 20] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels thresholds_noise_hard_hg = [4, 20] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_noise_hard_mg = [4, 20] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels thresholds_noise_hard_mg = [4, 20] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_noise_hard_lg = [4, 20] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels thresholds_noise_hard_lg = [4, 20] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_gain_sigma = 5. # Gain separation sigma threshold thresholds_gain_sigma = 5. # Gain separation sigma threshold
high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. ~7mins extra time for 64 memory cells high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. ~7mins extra time for 64 memory cells
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import os import os
from collections import OrderedDict from collections import OrderedDict
from datetime import datetime from datetime import datetime
from typing import Tuple from typing import Tuple
import dateutil.parser import dateutil.parser
import h5py import h5py
import matplotlib import matplotlib
import numpy as np import numpy as np
import pasha as psh import pasha as psh
import tabulate import tabulate
import yaml import yaml
matplotlib.use('agg') matplotlib.use('agg')
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from IPython.display import Latex, Markdown, display from IPython.display import Latex, Markdown, display
%matplotlib inline %matplotlib inline
import itertools import itertools
import multiprocessing import multiprocessing
from cal_tools.agipdlib import ( from cal_tools.agipdlib import (
get_acq_rate, get_acq_rate,
get_bias_voltage, get_bias_voltage,
get_gain_mode, get_gain_mode,
get_gain_setting, get_gain_setting,
get_num_cells, get_num_cells,
) )
from cal_tools.enums import AgipdGainMode, BadPixels from cal_tools.enums import AgipdGainMode, BadPixels
from cal_tools.plotting import ( from cal_tools.plotting import (
create_constant_overview, create_constant_overview,
plot_badpix_3d, plot_badpix_3d,
show_overview, show_overview,
show_processed_modules, show_processed_modules,
) )
from cal_tools.tools import ( from cal_tools.tools import (
get_dir_creation_date, get_dir_creation_date,
get_from_db, get_from_db,
get_pdu_from_db,
get_random_db_interface, get_random_db_interface,
get_report, get_report,
map_gain_stages, map_gain_stages,
module_index_to_qm, module_index_to_qm,
run_prop_seq_from_path, run_prop_seq_from_path,
save_const_to_h5, save_const_to_h5,
send_to_db, send_to_db,
) )
from iCalibrationDB import Conditions, Constants, Detectors import iCalibrationDB
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# insert control device if format string (does nothing otherwise) # insert control device if format string (does nothing otherwise)
h5path_ctrl = h5path_ctrl.format(karabo_id_control) h5path_ctrl = h5path_ctrl.format(karabo_id_control)
max_cells = mem_cells max_cells = mem_cells
offset_runs = OrderedDict() offset_runs = OrderedDict()
offset_runs["high"] = run_high offset_runs["high"] = run_high
offset_runs["med"] = run_med offset_runs["med"] = run_med
offset_runs["low"] = run_low offset_runs["low"] = run_low
creation_time=None creation_time=None
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run_high) creation_time = get_dir_creation_date(in_folder, run_high)
print(f"Using {creation_time} as creation time of constant.") print(f"Using {creation_time} as creation time of constant.")
run, prop, seq = run_prop_seq_from_path(in_folder) run, prop, seq = run_prop_seq_from_path(in_folder)
cal_db_interface = get_random_db_interface(cal_db_interface) cal_db_interface = get_random_db_interface(cal_db_interface)
print(f'Calibration database interface: {cal_db_interface}') print(f'Calibration database interface: {cal_db_interface}')
instrument = karabo_id.split("_")[0] instrument = karabo_id.split("_")[0]
if instrument == "SPB": if instrument == "SPB":
dinstance = "AGIPD1M1" dinstance = "AGIPD1M1"
nmods = 16 nmods = 16
elif instrument == "MID": elif instrument == "MID":
dinstance = "AGIPD1M2" dinstance = "AGIPD1M2"
nmods = 16 nmods = 16
elif instrument == "HED": elif instrument == "HED":
dinstance = "AGIPD500K" dinstance = "AGIPD500K"
nmods = 8 nmods = 8
control_names = [f'{in_folder}/r{r:04d}/RAW-R{r:04d}-{karabo_da_control}-S00000.h5' control_names = [f'{in_folder}/r{r:04d}/RAW-R{r:04d}-{karabo_da_control}-S00000.h5'
for r in (run_high, run_med, run_low)] for r in (run_high, run_med, run_low)]
if operation_mode not in ("ADAPTIVE_GAIN", "FIXED_GAIN"): if operation_mode not in ("ADAPTIVE_GAIN", "FIXED_GAIN"):
print(f"WARNING: unknown operation_mode \"{operation_mode}\" parameter set") print(f"WARNING: unknown operation_mode \"{operation_mode}\" parameter set")
run_gain_modes = [get_gain_mode(fn, h5path_ctrl) for fn in control_names] run_gain_modes = [get_gain_mode(fn, h5path_ctrl) for fn in control_names]
if all(gm == AgipdGainMode.ADAPTIVE_GAIN for gm in run_gain_modes): if all(gm == AgipdGainMode.ADAPTIVE_GAIN for gm in run_gain_modes):
fixed_gain_mode = False fixed_gain_mode = False
if operation_mode == "FIXED_GAIN": if operation_mode == "FIXED_GAIN":
print("WARNING: operation_mode parameter is FIXED_GAIN, slow data indicates adaptive gain") print("WARNING: operation_mode parameter is FIXED_GAIN, slow data indicates adaptive gain")
elif run_gain_modes == [AgipdGainMode.FIXED_HIGH_GAIN, AgipdGainMode.FIXED_MEDIUM_GAIN, AgipdGainMode.FIXED_LOW_GAIN]: elif run_gain_modes == [AgipdGainMode.FIXED_HIGH_GAIN, AgipdGainMode.FIXED_MEDIUM_GAIN, AgipdGainMode.FIXED_LOW_GAIN]:
if operation_mode == "ADAPTIVE_GAIN": if operation_mode == "ADAPTIVE_GAIN":
print("WARNING: operation_mode parameter ix ADAPTIVE_GAIN, slow data indicates fixed gain") print("WARNING: operation_mode parameter ix ADAPTIVE_GAIN, slow data indicates fixed gain")
fixed_gain_mode = True fixed_gain_mode = True
else: else:
print(f'Something is clearly wrong; slow data indicates gain modes {run_gain_modes}') print(f'Something is clearly wrong; slow data indicates gain modes {run_gain_modes}')
print(f"Detector in use is {karabo_id}") print(f"Detector in use is {karabo_id}")
print(f"Instrument {instrument}") print(f"Instrument {instrument}")
print(f"Detector instance {dinstance}") print(f"Detector instance {dinstance}")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
runs = [run_high, run_med, run_low] runs = [run_high, run_med, run_low]
if gain_setting == 0.1: if gain_setting == 0.1:
if creation_time.replace(tzinfo=None) < dateutil.parser.parse('2020-01-31'): if creation_time.replace(tzinfo=None) < dateutil.parser.parse('2020-01-31'):
print("Set gain-setting to None for runs taken before 2020-01-31") print("Set gain-setting to None for runs taken before 2020-01-31")
gain_setting = None gain_setting = None
else: else:
try: try:
# extract gain setting and validate that all runs have the same setting # extract gain setting and validate that all runs have the same setting
gsettings = [] gsettings = []
for r in runs: for r in runs:
control_fname = '{}/r{:04d}/RAW-R{:04d}-{}-S00000.h5'.format(in_folder, r, r, control_fname = '{}/r{:04d}/RAW-R{:04d}-{}-S00000.h5'.format(in_folder, r, r,
karabo_da_control) karabo_da_control)
gsettings.append(get_gain_setting(control_fname, h5path_ctrl)) gsettings.append(get_gain_setting(control_fname, h5path_ctrl))
if not all(g == gsettings[0] for g in gsettings): if not all(g == gsettings[0] for g in gsettings):
raise ValueError(f"Different gain settings for the 3 input runs {gsettings}") raise ValueError(f"Different gain settings for the 3 input runs {gsettings}")
gain_setting = gsettings[0] gain_setting = gsettings[0]
except Exception as e: except Exception as e:
print(f'Error while reading gain setting from: \n{control_fname}') print(f'Error while reading gain setting from: \n{control_fname}')
print(f'Error: {e}') print(f'Error: {e}')
if "component not found" in str(e): if "component not found" in str(e):
print("Gain setting is not found in the control information") print("Gain setting is not found in the control information")
print("Data will not be processed") print("Data will not be processed")
sequences = [] sequences = []
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if karabo_da[0] == '-1': if karabo_da[0] == '-1':
if modules[0] == -1: if modules[0] == -1:
modules = list(range(nmods)) modules = list(range(nmods))
karabo_da = ["AGIPD{:02d}".format(i) for i in modules] karabo_da = ["AGIPD{:02d}".format(i) for i in modules]
else: else:
modules = [int(x[-2:]) for x in karabo_da] modules = [int(x[-2:]) for x in karabo_da]
h5path = h5path.format(karabo_id, receiver_id) h5path = h5path.format(karabo_id, receiver_id)
h5path_idx = h5path_idx.format(karabo_id, receiver_id) h5path_idx = h5path_idx.format(karabo_id, receiver_id)
if bias_voltage == 0: if bias_voltage == 0:
# Read the bias voltage from files, if recorded. # Read the bias voltage from files, if recorded.
# If not available, make use of the historical voltage the detector is running at # If not available, make use of the historical voltage the detector is running at
bias_voltage = get_bias_voltage(control_names[0], karabo_id_control) bias_voltage = get_bias_voltage(control_names[0], karabo_id_control)
bias_voltage = bias_voltage if bias_voltage is not None else 300 bias_voltage = bias_voltage if bias_voltage is not None else 300
print("Parameters are:") print("Parameters are:")
print(f"Proposal: {prop}") print(f"Proposal: {prop}")
print(f"Memory cells: {mem_cells}/{max_cells}") print(f"Memory cells: {mem_cells}/{max_cells}")
print("Runs: {}".format([ v for v in offset_runs.values()])) print("Runs: {}".format([ v for v in offset_runs.values()]))
print(f"Sequences: {sequences}") print(f"Sequences: {sequences}")
print(f"Interlaced mode: {interlaced}") print(f"Interlaced mode: {interlaced}")
print(f"Using DB: {db_output}") print(f"Using DB: {db_output}")
print(f"Input: {in_folder}") print(f"Input: {in_folder}")
print(f"Output: {out_folder}") print(f"Output: {out_folder}")
print(f"Bias voltage: {bias_voltage}V") print(f"Bias voltage: {bias_voltage}V")
print(f"Gain setting: {gain_setting}") print(f"Gain setting: {gain_setting}")
print(f"Operation mode is {'fixed' if fixed_gain_mode else 'adaptive'} gain mode") print(f"Operation mode is {'fixed' if fixed_gain_mode else 'adaptive'} gain mode")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
The following lines will create a queue of files which will the be executed module-parallel. Distiguishing between different gains. The following lines will create a queue of files which will the be executed module-parallel. Distiguishing between different gains.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set everything up filewise # set everything up filewise
os.makedirs(out_folder, exist_ok=True) os.makedirs(out_folder, exist_ok=True)
gmf = map_gain_stages(in_folder, offset_runs, path_template, karabo_da, sequences) gmf = map_gain_stages(in_folder, offset_runs, path_template, karabo_da, sequences)
gain_mapped_files, total_sequences, total_file_size = gmf gain_mapped_files, total_sequences, total_file_size = gmf
print(f"Will process a total of {total_sequences} files.") print(f"Will process a total of {total_sequences} files.")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Calculate Offsets, Noise and Thresholds ## ## Calculate Offsets, Noise and Thresholds ##
The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array. The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if thresholds_offset_hard != [0, 0]: if thresholds_offset_hard != [0, 0]:
# if set, this will override the individual parameters # if set, this will override the individual parameters
thresholds_offset_hard = [thresholds_offset_hard] * 3 thresholds_offset_hard = [thresholds_offset_hard] * 3
elif fixed_gain_mode: elif fixed_gain_mode:
thresholds_offset_hard = [ thresholds_offset_hard = [
thresholds_offset_hard_hg_fixed, thresholds_offset_hard_hg_fixed,
thresholds_offset_hard_mg_fixed, thresholds_offset_hard_mg_fixed,
thresholds_offset_hard_lg_fixed, thresholds_offset_hard_lg_fixed,
] ]
else: else:
thresholds_offset_hard = [ thresholds_offset_hard = [
thresholds_offset_hard_hg, thresholds_offset_hard_hg,
thresholds_offset_hard_mg, thresholds_offset_hard_mg,
thresholds_offset_hard_lg, thresholds_offset_hard_lg,
] ]
print(f"Will use the following hard offset thresholds") print(f"Will use the following hard offset thresholds")
for name, value in zip(("High", "Medium", "Low"), thresholds_offset_hard): for name, value in zip(("High", "Medium", "Low"), thresholds_offset_hard):
print(f"- {name} gain: {value}") print(f"- {name} gain: {value}")
if thresholds_noise_hard != [0, 0]: if thresholds_noise_hard != [0, 0]:
thresholds_noise_hard = [thresholds_noise_hard] * 3 thresholds_noise_hard = [thresholds_noise_hard] * 3
else: else:
thresholds_noise_hard = [ thresholds_noise_hard = [
thresholds_noise_hard_hg, thresholds_noise_hard_hg,
thresholds_noise_hard_mg, thresholds_noise_hard_mg,
thresholds_noise_hard_lg, thresholds_noise_hard_lg,
] ]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def characterize_module(fast_data_filename: str, channel: int, gg: int) -> Tuple[np.array, np.array, np.array, np.array, int, np.array, int, float]: def characterize_module(fast_data_filename: str, channel: int, gg: int) -> Tuple[np.array, np.array, np.array, np.array, int, np.array, int, float]:
if max_cells == 0: if max_cells == 0:
num_cells = get_num_cells(fast_data_filename, karabo_id, channel) num_cells = get_num_cells(fast_data_filename, karabo_id, channel)
else: else:
num_cells = max_cells num_cells = max_cells
print(f"Using {num_cells} memory cells") print(f"Using {num_cells} memory cells")
if acq_rate == 0.: if acq_rate == 0.:
slow_paths = control_names[gg], karabo_id_control slow_paths = control_names[gg], karabo_id_control
fast_paths = fast_data_filename, karabo_id, channel fast_paths = fast_data_filename, karabo_id, channel
local_acq_rate = get_acq_rate(fast_paths, slow_paths) local_acq_rate = get_acq_rate(fast_paths, slow_paths)
else: else:
local_acq_rate = acq_rate local_acq_rate = acq_rate
local_thresholds_offset_hard = thresholds_offset_hard[gg] local_thresholds_offset_hard = thresholds_offset_hard[gg]
local_thresholds_noise_hard = thresholds_noise_hard[gg] local_thresholds_noise_hard = thresholds_noise_hard[gg]
h5path_f = h5path.format(channel) h5path_f = h5path.format(channel)
h5path_idx_f = h5path_idx.format(channel) h5path_idx_f = h5path_idx.format(channel)
with h5py.File(fast_data_filename, "r") as infile: with h5py.File(fast_data_filename, "r") as infile:
if rawversion == 2: if rawversion == 2:
count = np.squeeze(infile[f"{h5path_idx_f}/count"]) count = np.squeeze(infile[f"{h5path_idx_f}/count"])
first = np.squeeze(infile[f"{h5path_idx_f}/first"]) first = np.squeeze(infile[f"{h5path_idx_f}/first"])
last_index = int(first[count != 0][-1]+count[count != 0][-1]) last_index = int(first[count != 0][-1]+count[count != 0][-1])
first_index = int(first[count != 0][0]) first_index = int(first[count != 0][0])
else: else:
status = np.squeeze(infile[f"{h5path_idx_f}/status"]) status = np.squeeze(infile[f"{h5path_idx_f}/status"])
if np.count_nonzero(status != 0) == 0: if np.count_nonzero(status != 0) == 0:
return return
last = np.squeeze(infile[f"{h5path_idx_f}/last"]) last = np.squeeze(infile[f"{h5path_idx_f}/last"])
first = np.squeeze(infile[f"{h5path_idx_f}/first"]) first = np.squeeze(infile[f"{h5path_idx_f}/first"])
last_index = int(last[status != 0][-1]) + 1 last_index = int(last[status != 0][-1]) + 1
first_index = int(first[status != 0][0]) first_index = int(first[status != 0][0])
im = np.array(infile[f"{h5path_f}/data"][first_index:last_index,...]) im = np.array(infile[f"{h5path_f}/data"][first_index:last_index,...])
cellIds = np.squeeze(infile[f"{h5path_f}/cellId"][first_index:last_index,...]) cellIds = np.squeeze(infile[f"{h5path_f}/cellId"][first_index:last_index,...])
if interlaced: if interlaced:
if not fixed_gain_mode: if not fixed_gain_mode:
ga = im[1::2, 0, ...] ga = im[1::2, 0, ...]
im = im[0::2, 0, ...].astype(np.float32) im = im[0::2, 0, ...].astype(np.float32)
cellIds = cellIds[::2] cellIds = cellIds[::2]
else: else:
if not fixed_gain_mode: if not fixed_gain_mode:
ga = im[:, 1, ...] ga = im[:, 1, ...]
im = im[:, 0, ...].astype(np.float32) im = im[:, 0, ...].astype(np.float32)
im = np.rollaxis(im, 2) im = np.rollaxis(im, 2)
im = np.rollaxis(im, 2, 1) im = np.rollaxis(im, 2, 1)
if not fixed_gain_mode: if not fixed_gain_mode:
ga = np.rollaxis(ga, 2) ga = np.rollaxis(ga, 2)
ga = np.rollaxis(ga, 2, 1) ga = np.rollaxis(ga, 2, 1)
offset = psh.array((im.shape[0], im.shape[1], num_cells)) offset = psh.array((im.shape[0], im.shape[1], num_cells))
noise = psh.array((im.shape[0], im.shape[1], num_cells)) noise = psh.array((im.shape[0], im.shape[1], num_cells))
if fixed_gain_mode: if fixed_gain_mode:
gains = None gains = None
gains_std = None gains_std = None
else: else:
gains = psh.array((im.shape[0], im.shape[1], num_cells)) gains = psh.array((im.shape[0], im.shape[1], num_cells))
gains_std = psh.array((im.shape[0], im.shape[1], num_cells)) gains_std = psh.array((im.shape[0], im.shape[1], num_cells))
def process_cell(worker_id, array_index, cell_number): def process_cell(worker_id, array_index, cell_number):
cell_slice_index = (cellIds == cell_number) cell_slice_index = (cellIds == cell_number)
im_slice = im[..., cell_slice_index] im_slice = im[..., cell_slice_index]
offset[..., cell_number] = np.median(im_slice, axis=2) offset[..., cell_number] = np.median(im_slice, axis=2)
noise[..., cell_number] = np.std(im_slice, axis=2) noise[..., cell_number] = np.std(im_slice, axis=2)
if not fixed_gain_mode: if not fixed_gain_mode:
ga_slice = ga[..., cell_slice_index] ga_slice = ga[..., cell_slice_index]
gains[..., cell_number] = np.median(ga_slice, axis=2) gains[..., cell_number] = np.median(ga_slice, axis=2)
gains_std[..., cell_number] = np.std(ga_slice, axis=2) gains_std[..., cell_number] = np.std(ga_slice, axis=2)
psh.map(process_cell, np.arange(num_cells)) psh.map(process_cell, np.arange(num_cells))
# bad pixels # bad pixels
bp = np.zeros(offset.shape, np.uint32) bp = np.zeros(offset.shape, np.uint32)
# offset related bad pixels # offset related bad pixels
offset_mn = np.nanmedian(offset, axis=(0,1)) offset_mn = np.nanmedian(offset, axis=(0,1))
offset_std = np.nanstd(offset, axis=(0,1)) offset_std = np.nanstd(offset, axis=(0,1))
bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) | bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |
(offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value (offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[(offset < local_thresholds_offset_hard[0]) | ( bp[(offset < local_thresholds_offset_hard[0]) | (
offset > local_thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value offset > local_thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# noise related bad pixels # noise related bad pixels
noise_mn = np.nanmedian(noise, axis=(0,1)) noise_mn = np.nanmedian(noise, axis=(0,1))
noise_std = np.nanstd(noise, axis=(0,1)) noise_std = np.nanstd(noise, axis=(0,1))
bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) | bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |
(noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[(noise < local_thresholds_noise_hard[0]) | (noise > local_thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value bp[(noise < local_thresholds_noise_hard[0]) | (noise > local_thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
return offset, noise, gains, gains_std, gg, bp, num_cells, local_acq_rate return offset, noise, gains, gains_std, gg, bp, num_cells, local_acq_rate
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
parallel_num_procs = 4 parallel_num_procs = 4
parallel_num_threads = multiprocessing.cpu_count() // parallel_num_procs parallel_num_threads = multiprocessing.cpu_count() // parallel_num_procs
psh.set_default_context("threads", num_workers=parallel_num_threads) psh.set_default_context("threads", num_workers=parallel_num_threads)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
offset_g = OrderedDict() offset_g = OrderedDict()
noise_g = OrderedDict() noise_g = OrderedDict()
badpix_g = OrderedDict() badpix_g = OrderedDict()
if not fixed_gain_mode: if not fixed_gain_mode:
gain_g = OrderedDict() gain_g = OrderedDict()
gainstd_g = OrderedDict() gainstd_g = OrderedDict()
all_cells = [] all_cells = []
all_acq_rate = [] all_acq_rate = []
inp = [] inp = []
for gg, (gain, mapped_files) in enumerate(gain_mapped_files.items()): for gg, (gain, mapped_files) in enumerate(gain_mapped_files.items()):
dones = []
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
if qm in mapped_files and not mapped_files[qm].empty(): if qm in mapped_files and not mapped_files[qm].empty():
fname_in = mapped_files[qm].get() fname_in = mapped_files[qm].get()
print("Process file: ", fname_in) print(f"Process file: {fname_in} for {qm}")
dones.append(mapped_files[qm].empty())
else: else:
continue continue
inp.append((fname_in, i, gg)) inp.append((fname_in, i, gg))
with multiprocessing.Pool(processes=parallel_num_procs) as pool: with multiprocessing.Pool(processes=parallel_num_procs) as pool:
results = pool.starmap(characterize_module, inp) results = pool.starmap(characterize_module, inp)
for offset, noise, gains, gains_std, gg, bp, thiscell, thisacq in results: for offset, noise, gains, gains_std, gg, bp, thiscell, thisacq in results:
all_cells.append(thiscell) all_cells.append(thiscell)
all_acq_rate.append(thisacq) all_acq_rate.append(thisacq)
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
if qm not in offset_g: if qm not in offset_g:
offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2], 3)) offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2], 3))
noise_g[qm] = np.zeros_like(offset_g[qm]) noise_g[qm] = np.zeros_like(offset_g[qm])
badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32) badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)
if not fixed_gain_mode: if not fixed_gain_mode:
gain_g[qm] = np.zeros_like(offset_g[qm]) gain_g[qm] = np.zeros_like(offset_g[qm])
gainstd_g[qm] = np.zeros_like(offset_g[qm]) gainstd_g[qm] = np.zeros_like(offset_g[qm])
offset_g[qm][...,gg] = offset offset_g[qm][...,gg] = offset
noise_g[qm][...,gg] = noise noise_g[qm][...,gg] = noise
badpix_g[qm][...,gg] = bp badpix_g[qm][...,gg] = bp
if not fixed_gain_mode: if not fixed_gain_mode:
gain_g[qm][...,gg] = gains gain_g[qm][...,gg] = gains
gainstd_g[qm][..., gg] = gains_std gainstd_g[qm][..., gg] = gains_std
max_cells = np.max(all_cells) max_cells = np.max(all_cells)
print(f"Using {max_cells} memory cells") print(f"Using {max_cells} memory cells")
acq_rate = np.max(all_acq_rate) acq_rate = np.max(all_acq_rate)
print(f"Using {acq_rate} MHz acquisition rate") print(f"Using {acq_rate} MHz acquisition rate")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Add a badpixel due to bad gain separation # Add a badpixel due to bad gain separation
if not fixed_gain_mode: if not fixed_gain_mode:
for g in range(2): for g in range(2):
# Bad pixels during bad gain separation. # Bad pixels during bad gain separation.
# Fraction of pixels in the module with separation lower than "thresholds_gain_sigma". # Fraction of pixels in the module with separation lower than "thresholds_gain_sigma".
bad_sep = (gain_g[qm][..., g+1] - gain_g[qm][..., g]) / np.sqrt(gainstd_g[qm][..., g+1]**2 + gainstd_g[qm][..., g]**2) bad_sep = (gain_g[qm][..., g+1] - gain_g[qm][..., g]) / np.sqrt(gainstd_g[qm][..., g+1]**2 + gainstd_g[qm][..., g]**2)
badpix_g[qm][...,g+1][(bad_sep)<thresholds_gain_sigma]|= BadPixels.GAIN_THRESHOLDING_ERROR.value badpix_g[qm][...,g+1][(bad_sep)<thresholds_gain_sigma]|= BadPixels.GAIN_THRESHOLDING_ERROR.value
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
The thresholds for gain switching are then defined as the mean value between in individual gain bit levels. Note that these thresholds need to be refined with charge induced thresholds, as the two are not the same. The thresholds for gain switching are then defined as the mean value between in individual gain bit levels. Note that these thresholds need to be refined with charge induced thresholds, as the two are not the same.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if not fixed_gain_mode: if not fixed_gain_mode:
thresholds_g = {} thresholds_g = {}
for qm in gain_g.keys(): for qm in gain_g.keys():
thresholds_g[qm] = np.zeros((gain_g[qm].shape[0], gain_g[qm].shape[1], gain_g[qm].shape[2], 5)) thresholds_g[qm] = np.zeros((gain_g[qm].shape[0], gain_g[qm].shape[1], gain_g[qm].shape[2], 5))
thresholds_g[qm][...,0] = (gain_g[qm][...,1]+gain_g[qm][...,0])/2 thresholds_g[qm][...,0] = (gain_g[qm][...,1]+gain_g[qm][...,0])/2
thresholds_g[qm][...,1] = (gain_g[qm][...,2]+gain_g[qm][...,1])/2 thresholds_g[qm][...,1] = (gain_g[qm][...,2]+gain_g[qm][...,1])/2
for i in range(3): for i in range(3):
thresholds_g[qm][...,2+i] = gain_g[qm][...,i] thresholds_g[qm][...,2+i] = gain_g[qm][...,i]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
res = OrderedDict() res = OrderedDict()
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
res[qm] = { res[qm] = {
'Offset': offset_g[qm], 'Offset': offset_g[qm],
'Noise': noise_g[qm], 'Noise': noise_g[qm],
'BadPixelsDark': badpix_g[qm] 'BadPixelsDark': badpix_g[qm]
} }
if not fixed_gain_mode: if not fixed_gain_mode:
res[qm]['ThresholdsDark'] = thresholds_g[qm] res[qm]['ThresholdsDark'] = thresholds_g[qm]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Read report path and create file location tuple to add with the injection # Read report path and create file location tuple to add with the injection
proposal = list(filter(None, in_folder.strip('/').split('/')))[-2] proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]
file_loc = 'proposal:{} runs:{} {} {}'.format(proposal, run_low, run_med, run_high) file_loc = 'proposal:{} runs:{} {} {}'.format(proposal, run_low, run_med, run_high)
report = get_report(out_folder) report = get_report(out_folder)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# TODO: add db_module when received from myMDC # TODO: add db_module when received from myMDC
# Create the modules dict of karabo_das and PDUs # Create the modules dict of karabo_das and PDUs
qm_dict = OrderedDict() qm_dict = OrderedDict()
for i, k_da in zip(modules, karabo_da): for i, k_da in zip(modules, karabo_da):
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
qm_dict[qm] = { qm_dict[qm] = {
"karabo_da": k_da, "karabo_da": k_da,
"db_module": "" "db_module": ""
} }
# going through tools.get_pdu_from_db seems wasteful
all_pdus = iCalibrationDB.ConstantMetaData().retrieve_pdus_for_detector(
karabo_id=karabo_id,
receiver=cal_db_interface,
snapshot_at=creation_time.isoformat(),
timeout=cal_db_timeout
)
karabo_da_to_pdu = {d["karabo_da"]: d["pdu_physical_name"] for d in all_pdus}
for qm_attr in qm_dict.values():
qm_attr["db_module"] = karabo_da_to_pdu[qm_attr["karabo_da"]]
for pdu_dict in all_pdus:
this_karabo_da = pdu_dict["karabo_da"]
if this_karabo_da in qm_dict:
qm_dict[this_karabo_da]["db_module"] = pdu_dict["pdu_physical_name"]
```
%% Cell type:code id: tags:
``` python
with open(f"{out_folder}/module_mapping.yml","w") as fd:
yaml.safe_dump({
"module_mapping": {
qm: qm_dict[qm]["db_module"] for qm in qm_dict
}
}, fd)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set the operating condition # set the operating condition
# note: iCalibrationDB only adds gain_mode if it is truthy, so we don't need to handle None # note: iCalibrationDB only adds gain_mode if it is truthy, so we don't need to handle None
condition = Conditions.Dark.AGIPD(memory_cells=max_cells, condition = Conditions.Dark.AGIPD(memory_cells=max_cells,
bias_voltage=bias_voltage, bias_voltage=bias_voltage,
acquisition_rate=acq_rate, acquisition_rate=acq_rate,
gain_setting=gain_setting, gain_setting=gain_setting,
gain_mode=fixed_gain_mode) gain_mode=fixed_gain_mode)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Retrieve existing constants for comparison # Retrieve existing constants for comparison
old_const = {} qm_x_const = [(qm, const) for const in res[qm] for qm in res]
old_mdata = {}
print('Retrieve pre-existing constants for comparison.') print('Retrieve pre-existing constants for comparison.')
for qm in res: def boom(qm, const):
qm_db = qm_dict[qm] qm_db = qm_dict[qm]
karabo_da = qm_db["karabo_da"] this_karabo_da = qm_db["karabo_da"]
for const in res[qm]: dconst = getattr(iCalibrationDB.Constants.AGIPD, const)()
dconst = getattr(Constants.AGIPD, const)()
# This should be used in case of running notebook # This should be used in case of running notebook
# by a different method other than myMDC which already # by a different method other than myMDC which already
# sends CalCat info. # sends CalCat info.
# TODO: Set db_module to "" by default in the first cell # TODO: Set db_module to "" by default in the first cell
if not qm_db["db_module"]:
qm_db["db_module"] = get_pdu_from_db(karabo_id, karabo_da, dconst, data, mdata = get_from_db(
condition, cal_db_interface, karabo_id,
snapshot_at=creation_time)[0] this_karabo_da,
constant=dconst,
data, mdata = get_from_db(karabo_id, karabo_da, condition=condition,
dconst, condition, empty_constant=None,
None, cal_db_interface, cal_db_interface=cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
verbosity=2, timeout=cal_db_timeout) verbosity=2,
timeout=cal_db_timeout
old_const[const] = data )
if mdata is not None and data is not None:
time = mdata.calibration_constant_version.begin_at if mdata is None or data is None:
old_mdata[const] = time.isoformat() timestamp = "Not found"
os.makedirs('{}/old/'.format(out_folder), exist_ok=True) else:
save_const_to_h5(qm_db["db_module"], karabo_id, timestamp = mdata.calibration_constant_version.begin_at.isoformat()
dconst, condition, data,
file_loc, report, creation_time, return data, timestamp
f'{out_folder}/old/')
else: old_retrieval_pool = multiprocessing.Pool()
old_mdata[const] = "Not found" old_retrieval_res = old_retrieval_pool.starmap_async(boom, qm_x_const)
with open(f"{out_folder}/module_mapping_{qm}.yml","w") as fd: ```
yaml.safe_dump({"module_mapping": {qm: qm_db["db_module"]}}, fd)
%% Cell type:code id: tags:
``` python
old_retrieval_pool.close()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
md = None md = None
for qm in res: for qm in res:
karabo_da = qm_dict[qm]["karabo_da"]
db_module = qm_dict[qm]["db_module"] db_module = qm_dict[qm]["db_module"]
for const in res[qm]: for const in res[qm]:
dconst = getattr(Constants.AGIPD, const)() dconst = getattr(iCalibrationDB.Constants.AGIPD, const)()
dconst.data = res[qm][const] dconst.data = res[qm][const]
if db_output: if db_output:
md = send_to_db(db_module, karabo_id, dconst, condition, file_loc, md = send_to_db(db_module, karabo_id, dconst, condition, file_loc,
report, cal_db_interface, creation_time=creation_time, report, cal_db_interface, creation_time=creation_time,
timeout=cal_db_timeout) timeout=cal_db_timeout)
if local_output: if local_output:
md = save_const_to_h5(db_module, karabo_id, dconst, condition, dconst.data, md = save_const_to_h5(db_module, karabo_id, dconst, condition, dconst.data,
file_loc, report, creation_time, out_folder) file_loc, report, creation_time, out_folder)
print(f"Calibration constant {const} is stored locally.\n") print(f"Calibration constant {const} for {qm} is stored locally in {file_loc}.\n")
print("Constants parameter conditions are:\n") print("Constants parameter conditions are:\n")
print(f"• memory_cells: {max_cells}\n• bias_voltage: {bias_voltage}\n" print(f"• memory_cells: {max_cells}\n• bias_voltage: {bias_voltage}\n"
f"• acquisition_rate: {acq_rate}\n• gain_setting: {gain_setting}\n" f"• acquisition_rate: {acq_rate}\n• gain_setting: {gain_setting}\n"
f"• gain_mode: {fixed_gain_mode}\n" f"• gain_mode: {fixed_gain_mode}\n"
f"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n") f"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
mnames=[] mnames=[]
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
mnames.append(qm) mnames.append(qm)
display(Markdown(f'## Position of the module {qm} and its ASICs##')) display(Markdown(f'## Position of the module {qm} and its ASICs##'))
show_processed_modules(dinstance, constants=None, mnames=mnames, mode="position") show_processed_modules(dinstance, constants=None, mnames=mnames, mode="position")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Single-Cell Overviews ## ## Single-Cell Overviews ##
Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible. Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible.
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### High Gain ### ### High Gain ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cell = 3 cell = 3
gain = 0 gain = 0
show_overview(res, cell, gain, infix="{}-{}-{}".format(*offset_runs.values())) show_overview(res, cell, gain, infix="{}-{}-{}".format(*offset_runs.values()))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Medium Gain ### ### Medium Gain ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cell = 3 cell = 3
gain = 1 gain = 1
show_overview(res, cell, gain, infix="{}-{}-{}".format(*offset_runs.values())) show_overview(res, cell, gain, infix="{}-{}-{}".format(*offset_runs.values()))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Low Gain ### ### Low Gain ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cell = 3 cell = 3
gain = 2 gain = 2
show_overview(res, cell, gain, infix="{}-{}-{}".format(*offset_runs.values())) show_overview(res, cell, gain, infix="{}-{}-{}".format(*offset_runs.values()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'), cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),
BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'), BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'), BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),
BadPixels.GAIN_THRESHOLDING_ERROR.value: (BadPixels.GAIN_THRESHOLDING_ERROR.name, '#FF40FF40'), BadPixels.GAIN_THRESHOLDING_ERROR.value: (BadPixels.GAIN_THRESHOLDING_ERROR.name, '#FF40FF40'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('OFFSET_OUT_OF_THRESHOLD + NOISE_OUT_OF_THRESHOLD', '#DD00DD80'), BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('OFFSET_OUT_OF_THRESHOLD + NOISE_OUT_OF_THRESHOLD', '#DD00DD80'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value | BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value |
BadPixels.GAIN_THRESHOLDING_ERROR.value: ('MIXED', '#BFDF009F')} BadPixels.GAIN_THRESHOLDING_ERROR.value: ('MIXED', '#BFDF009F')}
if high_res_badpix_3d: if high_res_badpix_3d:
display(Markdown(""" display(Markdown("""
## Global Bad Pixel Behaviour ## ## Global Bad Pixel Behaviour ##
The following plots show the results of bad pixel evaluation for all evaluated memory cells. The following plots show the results of bad pixel evaluation for all evaluated memory cells.
Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2.
This excludes single bad pixels present only in disconnected pixels. This excludes single bad pixels present only in disconnected pixels.
Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated.
Colors encode the bad pixel type, or mixed type. Colors encode the bad pixel type, or mixed type.
""")) """))
gnames = ['High Gain', 'Medium Gain', 'Low Gain'] gnames = ['High Gain', 'Medium Gain', 'Low Gain']
for gain in range(3): for gain in range(3):
display(Markdown(f'### {gnames[gain]} ###')) display(Markdown(f'### {gnames[gain]} ###'))
for mod, data in badpix_g.items(): for mod, data in badpix_g.items():
plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=1) plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=1)
plt.show() plt.show()
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Aggregate values, and per Cell behaviour ## ## Aggregate values, and per Cell behaviour ##
The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior. The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
create_constant_overview(offset_g, "Offset (ADU)", max_cells, 4000, 8000, create_constant_overview(offset_g, "Offset (ADU)", max_cells, 4000, 8000,
badpixels=[badpix_g, np.nan]) badpixels=[badpix_g, np.nan])
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100, create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100,
badpixels=[badpix_g, np.nan]) badpixels=[badpix_g, np.nan])
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if not fixed_gain_mode: if not fixed_gain_mode:
# Plot only three gain threshold maps. # Plot only three gain threshold maps.
bp_thresh = OrderedDict() bp_thresh = OrderedDict()
for mod, con in badpix_g.items(): for mod, con in badpix_g.items():
bp_thresh[mod] = np.zeros((con.shape[0], con.shape[1], con.shape[2], 5), dtype=con.dtype) bp_thresh[mod] = np.zeros((con.shape[0], con.shape[1], con.shape[2], 5), dtype=con.dtype)
bp_thresh[mod][...,:2] = con[...,:2] bp_thresh[mod][...,:2] = con[...,:2]
bp_thresh[mod][...,2:] = con bp_thresh[mod][...,2:] = con
create_constant_overview(thresholds_g, "Threshold (ADU)", max_cells, 4000, 10000, 5, create_constant_overview(thresholds_g, "Threshold (ADU)", max_cells, 4000, 10000, 5,
badpixels=[bp_thresh, np.nan], badpixels=[bp_thresh, np.nan],
gmap=['HG-MG Threshold', 'MG-LG Threshold', 'High gain', 'Medium gain', 'low gain'], gmap=['HG-MG Threshold', 'MG-LG Threshold', 'High gain', 'Medium gain', 'low gain'],
marker=['d','d','','',''] marker=['d','d','','','']
) )
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
bad_pixel_aggregate_g = OrderedDict() bad_pixel_aggregate_g = OrderedDict()
for m, d in badpix_g.items(): for m, d in badpix_g.items():
bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float) bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)
create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, 0, 0.10, 3) create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, 0, 0.10, 3)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Summary tables ## ## Summary tables ##
The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database. The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
display(Markdown('The following pre-existing constants are used for comparison: \n')) # now we need the old constants
for key in old_mdata: old_const = {}
display(Markdown('**{}** at {}'.format(key, old_mdata[key]))) old_mdata = {}
old_retrieval_res.wait()
```
%% Cell type:code id: tags:
``` python
for (qm, const), (data, timestamp) in zip(qm_x_const, old_retrieval_res.get()):
old_const.setdefault(qm, {})[const] = data
old_mdata.setdefault(qm, {})[const] = timestamp
```
%% Cell type:code id: tags:
``` python
display(Markdown("The following pre-existing constants are used for comparison:"))
for qm, consts in old_mdata.items():
display(Markdown(f"- {qm}"))
for const in consts:
display(Markdown(f" - {const} at {consts[const]}"))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
table = [] table = []
gain_names = ['High', 'Medium', 'Low'] gain_names = ['High', 'Medium', 'Low']
bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR, BadPixels.GAIN_THRESHOLDING_ERROR] bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR, BadPixels.GAIN_THRESHOLDING_ERROR]
for qm in badpix_g.keys(): for qm in badpix_g.keys():
for gain in range(3): for gain in range(3):
l_data = [] l_data = []
l_data_old = [] l_data_old = []
data = np.copy(badpix_g[qm][:,:,:,gain]) data = np.copy(badpix_g[qm][:,:,:,gain])
datau32 = data.astype(np.uint32) datau32 = data.astype(np.uint32)
l_data.append(len(datau32[datau32>0].flatten())) l_data.append(len(datau32[datau32>0].flatten()))
for bit in bits: for bit in bits:
l_data.append(np.count_nonzero(badpix_g[qm][:,:,:,gain] & bit.value)) l_data.append(np.count_nonzero(badpix_g[qm][:,:,:,gain] & bit.value))
if old_const['BadPixelsDark'] is not None: if old_const['BadPixelsDark'] is not None:
dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain]) dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain])
datau32old = dataold.astype(np.uint32) datau32old = dataold.astype(np.uint32)
l_data_old.append(len(datau32old[datau32old>0].flatten())) l_data_old.append(len(datau32old[datau32old>0].flatten()))
for bit in bits: for bit in bits:
l_data_old.append(np.count_nonzero(old_const['BadPixelsDark'][:, :, :, gain] & bit.value)) l_data_old.append(np.count_nonzero(old_const['BadPixelsDark'][:, :, :, gain] & bit.value))
l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD',
'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR', 'GAIN_THRESHOLDING_ERROR'] 'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR', 'GAIN_THRESHOLDING_ERROR']
l_threshold = ['', f'{thresholds_noise_sigma}' f'{thresholds_noise_hard[gain]}', l_threshold = ['', f'{thresholds_noise_sigma}' f'{thresholds_noise_hard[gain]}',
f'{thresholds_offset_sigma}' f'{thresholds_offset_hard[gain]}', f'{thresholds_offset_sigma}' f'{thresholds_offset_hard[gain]}',
'', f'{thresholds_gain_sigma}'] '', f'{thresholds_gain_sigma}']
for i in range(len(l_data)): for i in range(len(l_data)):
line = [f'{l_data_name[i]}, {gain_names[gain]} gain', l_threshold[i], l_data[i]] line = [f'{l_data_name[i]}, {gain_names[gain]} gain', l_threshold[i], l_data[i]]
if old_const['BadPixelsDark'] is not None: if old_const['BadPixelsDark'] is not None:
line += [l_data_old[i]] line += [l_data_old[i]]
else: else:
line += ['-'] line += ['-']
table.append(line) table.append(line)
table.append(['', '', '', '']) table.append(['', '', '', ''])
display(Markdown(''' display(Markdown('''
### Number of bad pixels ### ### Number of bad pixels ###
One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels. One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels.
''')) '''))
if len(table)>0: if len(table)>0:
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', md = display(Latex(tabulate.tabulate(table, tablefmt='latex',
headers=["Pixel type", "Threshold", headers=["Pixel type", "Threshold",
"New constant", "Old constant"]))) "New constant", "Old constant"])))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
header = ['Parameter', header = ['Parameter',
"New constant", "Old constant ", "New constant", "Old constant ",
"New constant", "Old constant ", "New constant", "Old constant ",
"New constant", "Old constant ", "New constant", "Old constant ",
"New constant", "Old constant "] "New constant", "Old constant "]
if fixed_gain_mode: if fixed_gain_mode:
constants = ['Offset', 'Noise'] constants = ['Offset', 'Noise']
else: else:
constants = ['Offset', 'Noise', 'ThresholdsDark'] constants = ['Offset', 'Noise', 'ThresholdsDark']
constants_x_qms = list(itertools.product(constants, res.keys())) constants_x_qms = list(itertools.product(constants, res.keys()))
def compute_table(const, qm): def compute_table(const, qm):
if const == 'ThresholdsDark': if const == 'ThresholdsDark':
table = [['','HG-MG threshold', 'HG-MG threshold', 'MG-LG threshold', 'MG-LG threshold']] table = [['','HG-MG threshold', 'HG-MG threshold', 'MG-LG threshold', 'MG-LG threshold']]
else: else:
table = [['','High gain', 'High gain', 'Medium gain', 'Medium gain', 'Low gain', 'Low gain']] table = [['','High gain', 'High gain', 'Medium gain', 'Medium gain', 'Low gain', 'Low gain']]
compare_with_old_constant = old_const[const] is not None and old_const['BadPixelsDark'] is not None compare_with_old_constant = old_const[qm][const] is not None and \
old_const[qm]['BadPixelsDark'] is not None
data = np.copy(res[qm][const]) data = np.copy(res[qm][const])
if const == 'ThresholdsDark': if const == 'ThresholdsDark':
data[...,0][res[qm]['BadPixelsDark'][...,0]>0] = np.nan data[...,0][res[qm]['BadPixelsDark'][...,0]>0] = np.nan
data[...,1][res[qm]['BadPixelsDark'][...,1]>0] = np.nan data[...,1][res[qm]['BadPixelsDark'][...,1]>0] = np.nan
else: else:
data[res[qm]['BadPixelsDark']>0] = np.nan data[res[qm]['BadPixelsDark']>0] = np.nan
if compare_with_old_constant: if compare_with_old_constant:
data_old = np.copy(old_const[const]) data_old = np.copy(old_const[qm][const])
if const == 'ThresholdsDark': if const == 'ThresholdsDark':
data_old[...,0][old_const['BadPixelsDark'][...,0]>0] = np.nan data_old[...,0][old_const[qm]['BadPixelsDark'][...,0]>0] = np.nan
data_old[...,1][old_const['BadPixelsDark'][...,1]>0] = np.nan data_old[...,1][old_const[qm]['BadPixelsDark'][...,1]>0] = np.nan
else: else:
data_old[old_const['BadPixelsDark']>0] = np.nan data_old[old_const[qm]['BadPixelsDark']>0] = np.nan
f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax] f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]
n_list = ['Median', 'Mean', 'Std', 'Min', 'Max'] n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']
def compute_row(i): def compute_row(i):
line = [n_list[i]] line = [n_list[i]]
for gain in range(3): for gain in range(3):
# Compare only 3 threshold gain-maps # Compare only 3 threshold gain-maps
if gain == 2 and const == 'ThresholdsDark': if gain == 2 and const == 'ThresholdsDark':
continue continue
stat_measure = f_list[i](data[...,gain]) stat_measure = f_list[i](data[...,gain])
line.append(f"{stat_measure:6.1f}") line.append(f"{stat_measure:6.1f}")
if compare_with_old_constant: if compare_with_old_constant:
old_stat_measure = f_list[i](data_old[...,gain]) old_stat_measure = f_list[i](data_old[...,gain])
line.append(f"{old_stat_measure:6.1f}") line.append(f"{old_stat_measure:6.1f}")
else: else:
line.append("-") line.append("-")
return line return line
with multiprocessing.pool.ThreadPool(processes=multiprocessing.cpu_count() // len(constants_x_qms)) as pool: with multiprocessing.pool.ThreadPool(processes=multiprocessing.cpu_count() // len(constants_x_qms)) as pool:
rows = pool.map(compute_row, range(len(f_list))) rows = pool.map(compute_row, range(len(f_list)))
table.extend(rows) table.extend(rows)
return table return table
with multiprocessing.Pool(processes=len(constants_x_qms)) as pool: with multiprocessing.Pool(processes=len(constants_x_qms)) as pool:
tables = pool.starmap(compute_table, constants_x_qms) tables = pool.starmap(compute_table, constants_x_qms)
for (const, qm), table in zip(constants_x_qms, tables): for (const, qm), table in zip(constants_x_qms, tables):
display(Markdown(f"### {qm}: {const} [ADU], good pixels only")) display(Markdown(f"### {qm}: {const} [ADU], good pixels only"))
display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header))) display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment