Skip to content
Snippets Groups Projects
Commit 1fb92c2a authored by Karim Ahmed's avatar Karim Ahmed
Browse files

Fix plotting if cellID data is the same for all trains

parent 7ff7dbae
No related branches found
No related tags found
1 merge request!755[LPD][DARK] Fix plotting if cellID data is the same for all trains
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# LPD Offset, Noise and Dead Pixels Characterization # # LPD Offset, Noise and Dead Pixels Characterization #
Author: M. Karnevskiy, S. Hauf Author: M. Karnevskiy, S. Hauf
This notebook performs re-characterize of dark images to derive offset, noise and bad-pixel maps. All three types of constants are evaluated per-pixel and per-memory cell. This notebook performs re-characterize of dark images to derive offset, noise and bad-pixel maps. All three types of constants are evaluated per-pixel and per-memory cell.
The notebook will correctly handle veto settings, but note that if you veto cells you will not be able to use these offsets for runs with different veto settings - vetoed cells will have zero offset. The notebook will correctly handle veto settings, but note that if you veto cells you will not be able to use these offsets for runs with different veto settings - vetoed cells will have zero offset.
The evaluated calibration constants are stored locally and injected in the calibration data base. The evaluated calibration constants are stored locally and injected in the calibration data base.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cluster_profile = "noDB" # The ipcluster profile to use cluster_profile = "noDB" # The ipcluster profile to use
in_folder = "/gpfs/exfel/exp/FXE/202030/p900121/raw" # path to input data, required in_folder = "/gpfs/exfel/exp/FXE/202030/p900121/raw" # path to input data, required
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/LPD/" # path to output to, required out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/LPD/" # path to output to, required
metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate
sequence = 0 # sequence files to evaluate sequence = 0 # sequence files to evaluate
modules = [-1] # list of modules to evaluate, RANGE ALLOWED modules = [-1] # list of modules to evaluate, RANGE ALLOWED
run_high = 120 # run number in which high gain data was recorded, required run_high = 120 # run number in which high gain data was recorded, required
run_med = 121 # run number in which medium gain data was recorded, required run_med = 121 # run number in which medium gain data was recorded, required
run_low = 122 # run number in which low gain data was recorded, required run_low = 122 # run number in which low gain data was recorded, required
karabo_id = "FXE_DET_LPD1M-1" # karabo karabo_id karabo_id = "FXE_DET_LPD1M-1" # karabo karabo_id
karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators
receiver_id = "{}CH0" # inset for receiver devices receiver_id = "{}CH0" # inset for receiver devices
path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data
h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images
h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images
use_dir_creation_date = True # use the creation date of the directory for database time derivation use_dir_creation_date = True # use the creation date of the directory for database time derivation
cal_db_interface = "tcp://max-exfl016:8015#8025" # the database interface to use cal_db_interface = "tcp://max-exfl016:8015#8025" # the database interface to use
cal_db_timeout = 300000 # timeout on caldb requests" cal_db_timeout = 300000 # timeout on caldb requests"
local_output = True # output constants locally local_output = True # output constants locally
db_output = False # output constants to database db_output = False # output constants to database
capacitor_setting = 5 # capacitor_setting for which data was taken capacitor_setting = 5 # capacitor_setting for which data was taken
mem_cells = 512 # number of memory cells used mem_cells = 512 # number of memory cells used
bias_voltage = 250 # detector bias voltage bias_voltage = 250 # detector bias voltage
thresholds_offset_sigma = 3. # bad pixel relative threshold in terms of n sigma offset thresholds_offset_sigma = 3. # bad pixel relative threshold in terms of n sigma offset
thresholds_offset_hard = [400, 1500] # bad pixel hard threshold thresholds_offset_hard = [400, 1500] # bad pixel hard threshold
thresholds_noise_sigma = 7. # bad pixel relative threshold in terms of n sigma noise thresholds_noise_sigma = 7. # bad pixel relative threshold in terms of n sigma noise
thresholds_noise_hard = [1, 35] # bad pixel hard threshold thresholds_noise_hard = [1, 35] # bad pixel hard threshold
skip_first_ntrains = 10 # Number of first trains to skip skip_first_ntrains = 10 # Number of first trains to skip
instrument = "FXE" # instrument name instrument = "FXE" # instrument name
ntrains = 100 # number of trains to use ntrains = 100 # number of trains to use
high_res_badpix_3d = False # plot bad-pixel summary in high resolution high_res_badpix_3d = False # plot bad-pixel summary in high resolution
test_for_normality = False # permorm normality test test_for_normality = False # permorm normality test
operation_mode = '' # Detector operation mode, optional operation_mode = '' # Detector operation mode, optional
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import copy import copy
import os import os
import warnings import warnings
from collections import OrderedDict from collections import OrderedDict
from datetime import datetime from datetime import datetime
from functools import partial from functools import partial
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
import dateutil.parser import dateutil.parser
import h5py import h5py
import matplotlib import matplotlib
from ipyparallel import Client from ipyparallel import Client
from IPython.display import Latex, Markdown, display from IPython.display import Latex, Markdown, display
matplotlib.use("agg") matplotlib.use("agg")
import matplotlib.patches as patches import matplotlib.patches as patches
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
%matplotlib inline %matplotlib inline
import numpy as np import numpy as np
import tabulate import tabulate
import yaml import yaml
from iCalibrationDB import Conditions, Constants, Detectors, Versions from iCalibrationDB import Conditions, Constants, Detectors, Versions
from XFELDetAna.plotting.heatmap import heatmapPlot from XFELDetAna.plotting.heatmap import heatmapPlot
from XFELDetAna.plotting.simpleplot import simplePlot from XFELDetAna.plotting.simpleplot import simplePlot
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from cal_tools.plotting import ( from cal_tools.plotting import (
create_constant_overview, create_constant_overview,
plot_badpix_3d, plot_badpix_3d,
show_overview, show_overview,
show_processed_modules, show_processed_modules,
) )
from cal_tools.tools import ( from cal_tools.tools import (
get_dir_creation_date, get_dir_creation_date,
get_from_db, get_from_db,
get_notebook_name, get_notebook_name,
get_pdu_from_db, get_pdu_from_db,
get_random_db_interface, get_random_db_interface,
get_report, get_report,
map_gain_stages, map_gain_stages,
module_index_to_qm, module_index_to_qm,
parse_runs, parse_runs,
run_prop_seq_from_path, run_prop_seq_from_path,
save_const_to_h5, save_const_to_h5,
send_to_db, send_to_db,
) )
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
client = Client(profile=cluster_profile) client = Client(profile=cluster_profile)
view = client[:] view = client[:]
view.use_dill() view.use_dill()
gains = np.arange(3) gains = np.arange(3)
max_cells = mem_cells max_cells = mem_cells
cells = np.arange(max_cells) cells = np.arange(max_cells)
gain_names = ['High', 'Medium', 'Low'] gain_names = ['High', 'Medium', 'Low']
if karabo_da[0] == '-1': if karabo_da[0] == '-1':
if modules[0] == -1: if modules[0] == -1:
modules = list(range(16)) modules = list(range(16))
karabo_da = ['LPD{:02d}'.format(i) for i in modules] karabo_da = ['LPD{:02d}'.format(i) for i in modules]
else: else:
modules = [int(x[-2:]) for x in karabo_da] modules = [int(x[-2:]) for x in karabo_da]
gain_runs = OrderedDict() gain_runs = OrderedDict()
if capacitor_setting == 5: if capacitor_setting == 5:
gain_runs["high_5pf"] = run_high gain_runs["high_5pf"] = run_high
gain_runs["med_5pf"] = run_med gain_runs["med_5pf"] = run_med
gain_runs["low_5pf"] = run_low gain_runs["low_5pf"] = run_low
elif capacitor_setting == 50: elif capacitor_setting == 50:
gain_runs["high_50pf"] = run_high gain_runs["high_50pf"] = run_high
gain_runs["med_50pf"] = run_med gain_runs["med_50pf"] = run_med
gain_runs["low_50pf"] = run_low gain_runs["low_50pf"] = run_low
capacitor_settings = [capacitor_setting] capacitor_settings = [capacitor_setting]
capacitor_settings = ['{}pf'.format(c) for c in capacitor_settings] capacitor_settings = ['{}pf'.format(c) for c in capacitor_settings]
h5path = h5path.format(karabo_id, receiver_id) h5path = h5path.format(karabo_id, receiver_id)
h5path_idx = h5path_idx.format(karabo_id, receiver_id) h5path_idx = h5path_idx.format(karabo_id, receiver_id)
creation_time = None creation_time = None
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run_high) creation_time = get_dir_creation_date(in_folder, run_high)
print("Using {} as creation time".format(creation_time)) print("Using {} as creation time".format(creation_time))
run, prop, seq = run_prop_seq_from_path(in_folder) run, prop, seq = run_prop_seq_from_path(in_folder)
cal_db_interface = get_random_db_interface(cal_db_interface) cal_db_interface = get_random_db_interface(cal_db_interface)
display(Markdown('## Evaluated parameters')) display(Markdown('## Evaluated parameters'))
print('CalDB Interface {}'.format(cal_db_interface)) print('CalDB Interface {}'.format(cal_db_interface))
print("Proposal: {}".format(prop)) print("Proposal: {}".format(prop))
print("Memory cells: {}/{}".format(mem_cells, max_cells)) print("Memory cells: {}/{}".format(mem_cells, max_cells))
print("Runs: {}, {}, {}".format(run_high, run_med, run_low)) print("Runs: {}, {}, {}".format(run_high, run_med, run_low))
print("Sequence: {}".format(sequence)) print("Sequence: {}".format(sequence))
print("Using DB: {}".format(db_output)) print("Using DB: {}".format(db_output))
print("Input: {}".format(in_folder)) print("Input: {}".format(in_folder))
print("Output: {}".format(out_folder)) print("Output: {}".format(out_folder))
print("Bias voltage: {}V".format(bias_voltage)) print("Bias voltage: {}V".format(bias_voltage))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set everything up filewise # set everything up filewise
gmf = map_gain_stages(in_folder, gain_runs, path_template, karabo_da, [sequence]) gmf = map_gain_stages(in_folder, gain_runs, path_template, karabo_da, [sequence])
gain_mapped_files, total_sequences, total_file_size = gmf gain_mapped_files, total_sequences, total_file_size = gmf
print(f"Will process a total of {total_sequences} files.") print(f"Will process a total of {total_sequences} files.")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Data processing ## Data processing
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# the actual characterization # the actual characterization
def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, test_for_normality, def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, test_for_normality,
h5path, h5path_idx, inp): h5path, h5path_idx, inp):
import copy import copy
import h5py import h5py
import numpy as np import numpy as np
import scipy.stats import scipy.stats
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
def splitOffGainLPD(d): def splitOffGainLPD(d):
msk = np.zeros(d.shape, np.uint16) msk = np.zeros(d.shape, np.uint16)
msk[...] = 0b0000111111111111 msk[...] = 0b0000111111111111
data = np.bitwise_and(d, msk) data = np.bitwise_and(d, msk)
msk[...] = 0b0011000000000000 msk[...] = 0b0011000000000000
gain = np.bitwise_and(d, msk)//4096 gain = np.bitwise_and(d, msk)//4096
gain[gain > 2] = 2 gain[gain > 2] = 2
return data, gain return data, gain
filename, channel, gg, cap = inp filename, channel, gg, cap = inp
thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh
infile = h5py.File(filename, "r") infile = h5py.File(filename, "r")
h5path = h5path.format(channel) h5path = h5path.format(channel)
h5path_idx = h5path_idx.format(channel) h5path_idx = h5path_idx.format(channel)
count = infile[f"{h5path_idx}/count"][()] count = infile[f"{h5path_idx}/count"][()]
first = infile[f"{h5path_idx}/first"][()] first = infile[f"{h5path_idx}/first"][()]
valid = count != 0 valid = count != 0
count, first = count[valid], first[valid] count, first = count[valid], first[valid]
first_image = int(first[skip_first_ntrains] if first.shape[0] > skip_first_ntrains else 0) first_image = int(first[skip_first_ntrains] if first.shape[0] > skip_first_ntrains else 0)
last_image = int(first_image + np.sum(count[skip_first_ntrains:skip_first_ntrains+ntrains])) last_image = int(first_image + np.sum(count[skip_first_ntrains:skip_first_ntrains+ntrains]))
im = np.array(infile["{}/data".format(h5path, channel)][first_image:last_image, ...]) im = np.array(infile["{}/data".format(h5path, channel)][first_image:last_image, ...])
cellid = np.squeeze(np.array(infile["{}/cellId".format(h5path, channel)][first_image:last_image, ...])) cellid = np.squeeze(np.array(infile["{}/cellId".format(h5path, channel)][first_image:last_image, ...]))
infile.close() infile.close()
im, g = splitOffGainLPD(im[:, 0, ...]) im, g = splitOffGainLPD(im[:, 0, ...])
im = im.astype(np.float32) im = im.astype(np.float32)
im = np.rollaxis(im, 2) im = np.rollaxis(im, 2)
im = np.rollaxis(im, 2, 1) im = np.rollaxis(im, 2, 1)
offset = np.zeros((im.shape[0], im.shape[1], cells)) offset = np.zeros((im.shape[0], im.shape[1], cells))
noise = np.zeros((im.shape[0], im.shape[1], cells)) noise = np.zeros((im.shape[0], im.shape[1], cells))
normal_test = np.zeros((im.shape[0], im.shape[1], cells)) normal_test = np.zeros((im.shape[0], im.shape[1], cells))
for cc in range(cells): for cc in range(cells):
idx = cellid == cc idx = cellid == cc
if np.any(idx): if np.any(idx):
offset[..., cc] = np.median(im[:, :, idx], axis=2) offset[..., cc] = np.median(im[:, :, idx], axis=2)
noise[..., cc] = np.std(im[:, :, idx], axis=2) noise[..., cc] = np.std(im[:, :, idx], axis=2)
if test_for_normality: if test_for_normality:
_, normal_test[..., cc] = scipy.stats.normaltest( _, normal_test[..., cc] = scipy.stats.normaltest(
im[:, :, idx], axis=2) im[:, :, idx], axis=2)
# bad pixels # bad pixels
bp = np.zeros(offset.shape, np.uint32) bp = np.zeros(offset.shape, np.uint32)
# offset related bad pixels # offset related bad pixels
offset_mn = np.nanmedian(offset, axis=(0, 1)) offset_mn = np.nanmedian(offset, axis=(0, 1))
offset_std = np.nanstd(offset, axis=(0, 1)) offset_std = np.nanstd(offset, axis=(0, 1))
bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) | bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |
(offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value (offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[(offset < thresholds_offset_hard[0]) | ( bp[(offset < thresholds_offset_hard[0]) | (
offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# noise related bad pixels # noise related bad pixels
noise_mn = np.nanmedian(noise, axis=(0, 1)) noise_mn = np.nanmedian(noise, axis=(0, 1))
noise_std = np.nanstd(noise, axis=(0, 1)) noise_std = np.nanstd(noise, axis=(0, 1))
bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) | bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |
(noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[(noise < thresholds_noise_hard[0]) | ( bp[(noise < thresholds_noise_hard[0]) | (
noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
idx = cellid == 12 idx = cellid == 12
return offset, noise, channel, gg, cap, bp, im[12, 12, idx], normal_test return offset, noise, channel, gg, cap, bp, im[12, 12, idx], normal_test
offset_g = OrderedDict() offset_g = OrderedDict()
noise_g = OrderedDict() noise_g = OrderedDict()
badpix_g = OrderedDict() badpix_g = OrderedDict()
data_g = OrderedDict() data_g = OrderedDict()
ntest_g = OrderedDict() ntest_g = OrderedDict()
gg = 0 gg = 0
old_cap = None old_cap = None
start = datetime.now() start = datetime.now()
inp = [] inp = []
for gain, mapped_files in gain_mapped_files.items(): for gain, mapped_files in gain_mapped_files.items():
cap = gain.split("_")[1] cap = gain.split("_")[1]
if cap != old_cap: if cap != old_cap:
gg = 0 gg = 0
old_cap = cap old_cap = cap
offset_g[cap] = OrderedDict() offset_g[cap] = OrderedDict()
noise_g[cap] = OrderedDict() noise_g[cap] = OrderedDict()
badpix_g[cap] = OrderedDict() badpix_g[cap] = OrderedDict()
data_g[cap] = OrderedDict() data_g[cap] = OrderedDict()
ntest_g[cap] = OrderedDict() ntest_g[cap] = OrderedDict()
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
if qm in mapped_files and not mapped_files[qm].empty(): if qm in mapped_files and not mapped_files[qm].empty():
fname_in = mapped_files[qm].get() fname_in = mapped_files[qm].get()
print("Process file: ", fname_in) print("Process file: ", fname_in)
inp.append((fname_in, i, gg, cap)) inp.append((fname_in, i, gg, cap))
gg+=1 gg+=1
p = partial(characterize_module, max_cells, p = partial(characterize_module, max_cells,
(thresholds_offset_hard, thresholds_offset_sigma, (thresholds_offset_hard, thresholds_offset_sigma,
thresholds_noise_hard, thresholds_noise_sigma), thresholds_noise_hard, thresholds_noise_sigma),
skip_first_ntrains, ntrains, test_for_normality, skip_first_ntrains, ntrains, test_for_normality,
h5path, h5path_idx) h5path, h5path_idx)
# Don't remove. Used for Debugging. # Don't remove. Used for Debugging.
#results = list(map(p, inp)) #results = list(map(p, inp))
results = view.map_sync(p, inp) results = view.map_sync(p, inp)
for ir, r in enumerate(results): for ir, r in enumerate(results):
offset, noise, i, gg, cap, bp, data, normal = r offset, noise, i, gg, cap, bp, data, normal = r
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
if qm not in offset_g[cap]: if qm not in offset_g[cap]:
offset_g[cap][qm] = np.zeros( offset_g[cap][qm] = np.zeros(
(offset.shape[0], offset.shape[1], offset.shape[2], 3)) (offset.shape[0], offset.shape[1], offset.shape[2], 3))
noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm]) noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm])
badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm], dtype=np.uint32) badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm], dtype=np.uint32)
data_g[cap][qm] = np.full((ntrains, 3), np.nan) data_g[cap][qm] = np.full((ntrains, 3), np.nan)
ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm]) ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm])
offset_g[cap][qm][..., gg] = offset offset_g[cap][qm][..., gg] = offset
noise_g[cap][qm][..., gg] = noise noise_g[cap][qm][..., gg] = noise
badpix_g[cap][qm][..., gg] = bp badpix_g[cap][qm][..., gg] = bp
data_g[cap][qm][:data.shape[0], gg] = data data_g[cap][qm][:data.shape[0], gg] = data
ntest_g[cap][qm][..., gg] = normal ntest_g[cap][qm][..., gg] = normal
hn, cn = np.histogram(data, bins=20) hn, cn = np.histogram(data, bins=20)
print(f"{gain_names[gg]} gain, Capacitor {cap}, Module: {qm}. " print(f"{gain_names[gg]} gain, Capacitor {cap}, Module: {qm}. "
f"Number of processed trains per cell: {data.shape[0]}.") f"Number of processed trains per cell: {data.shape[0]}.")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Read report path and create file location tuple to add with the injection # Read report path and create file location tuple to add with the injection
proposal = list(filter(None, in_folder.strip('/').split('/')))[-2] proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]
file_loc = 'proposal:{} runs:{} {} {}'.format(proposal, run_low, run_med, run_high) file_loc = 'proposal:{} runs:{} {} {}'.format(proposal, run_low, run_med, run_high)
report = get_report(metadata_folder) report = get_report(metadata_folder)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# TODO: add db_module when received from myMDC # TODO: add db_module when received from myMDC
# Create the modules dict of karabo_das and PDUs # Create the modules dict of karabo_das and PDUs
qm_dict = OrderedDict() qm_dict = OrderedDict()
for i, k_da in zip(modules, karabo_da): for i, k_da in zip(modules, karabo_da):
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
qm_dict[qm] = {"karabo_da": k_da, qm_dict[qm] = {"karabo_da": k_da,
"db_module": ""} "db_module": ""}
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Retrieve existing constants for comparison # Retrieve existing constants for comparison
clist = ["Offset", "Noise", "BadPixelsDark"] clist = ["Offset", "Noise", "BadPixelsDark"]
old_const = {} old_const = {}
old_mdata = {} old_mdata = {}
dinstance = "LPD1M1" dinstance = "LPD1M1"
detinst = getattr(Detectors, dinstance) detinst = getattr(Detectors, dinstance)
print('Retrieve pre-existing constants for comparison.') print('Retrieve pre-existing constants for comparison.')
for cap in capacitor_settings: for cap in capacitor_settings:
old_const[cap] = {} old_const[cap] = {}
old_mdata[cap] = {} old_mdata[cap] = {}
for qm in offset_g[cap].keys(): for qm in offset_g[cap].keys():
old_const[cap][qm] = {} old_const[cap][qm] = {}
old_mdata[cap][qm] = {} old_mdata[cap][qm] = {}
qm_db = qm_dict[qm] qm_db = qm_dict[qm]
karabo_da = qm_db["karabo_da"] karabo_da = qm_db["karabo_da"]
condition = Conditions.Dark.LPD(memory_cells=max_cells, condition = Conditions.Dark.LPD(memory_cells=max_cells,
bias_voltage=bias_voltage, bias_voltage=bias_voltage,
capacitor=cap) capacitor=cap)
for const in clist: for const in clist:
constant = getattr(Constants.LPD, const)() constant = getattr(Constants.LPD, const)()
if not qm_db["db_module"]: if not qm_db["db_module"]:
# This should be used in case of running notebook # This should be used in case of running notebook
# by a different method other than myMDC which already # by a different method other than myMDC which already
# sends CalCat info. # sends CalCat info.
qm_db["db_module"] = get_pdu_from_db(karabo_id, [karabo_da], constant, qm_db["db_module"] = get_pdu_from_db(karabo_id, [karabo_da], constant,
condition, cal_db_interface, condition, cal_db_interface,
snapshot_at=creation_time)[0] snapshot_at=creation_time)[0]
data, mdata = get_from_db(karabo_id, karabo_da, data, mdata = get_from_db(karabo_id, karabo_da,
constant, constant,
condition, None, condition, None,
cal_db_interface, cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
verbosity=2, timeout=cal_db_timeout) verbosity=2, timeout=cal_db_timeout)
old_const[cap][qm][const] = data old_const[cap][qm][const] = data
if mdata is None or data is None: if mdata is None or data is None:
old_mdata[cap][qm][const] = { old_mdata[cap][qm][const] = {
"timestamp": "Not found", "timestamp": "Not found",
"filepath": None, "filepath": None,
"h5path": None "h5path": None
} }
else: else:
timestamp = mdata.calibration_constant_version.begin_at.isoformat() timestamp = mdata.calibration_constant_version.begin_at.isoformat()
filepath = os.path.join( filepath = os.path.join(
mdata.calibration_constant_version.hdf5path, mdata.calibration_constant_version.hdf5path,
mdata.calibration_constant_version.filename mdata.calibration_constant_version.filename
) )
h5path = mdata.calibration_constant_version.h5path h5path = mdata.calibration_constant_version.h5path
old_mdata[cap][qm][const] = { old_mdata[cap][qm][const] = {
"timestamp": timestamp, "timestamp": timestamp,
"filepath": filepath, "filepath": filepath,
"h5path": h5path "h5path": h5path
} }
with open(f"{out_folder}/module_metadata_{qm}.yml","w") as fd: with open(f"{out_folder}/module_metadata_{qm}.yml","w") as fd:
yaml.safe_dump( yaml.safe_dump(
{ {
"module": qm, "module": qm,
"pdu": qm_db["db_module"], "pdu": qm_db["db_module"],
"old-constants": old_mdata[cap][qm] "old-constants": old_mdata[cap][qm]
}, fd) }, fd)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
res = OrderedDict() res = OrderedDict()
for cap in capacitor_settings: for cap in capacitor_settings:
res[cap] = OrderedDict() res[cap] = OrderedDict()
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
res[cap][qm] = {'Offset': offset_g[cap][qm], res[cap][qm] = {'Offset': offset_g[cap][qm],
'Noise': noise_g[cap][qm], 'Noise': noise_g[cap][qm],
'BadPixelsDark': badpix_g[cap][qm] 'BadPixelsDark': badpix_g[cap][qm]
} }
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Save constants in the calibration DB # Save constants in the calibration DB
md = None md = None
for cap in capacitor_settings: for cap in capacitor_settings:
for qm in res[cap]: for qm in res[cap]:
karabo_da = qm_dict[qm]["karabo_da"] karabo_da = qm_dict[qm]["karabo_da"]
db_module = qm_dict[qm]["db_module"] db_module = qm_dict[qm]["db_module"]
# Do not store empty constants # Do not store empty constants
# In case of 0 trains data_g is initiated with nans and never refilled. # In case of 0 trains data_g is initiated with nans and never refilled.
if np.count_nonzero(~np.isnan(data_g[cap][qm]))==0: if np.count_nonzero(~np.isnan(data_g[cap][qm]))==0:
continue continue
for const in res[cap][qm]: for const in res[cap][qm]:
dconst = getattr(Constants.LPD, const)() dconst = getattr(Constants.LPD, const)()
dconst.data = res[cap][qm][const] dconst.data = res[cap][qm][const]
# set the operating condition # set the operating condition
condition = Conditions.Dark.LPD(memory_cells=max_cells, condition = Conditions.Dark.LPD(memory_cells=max_cells,
bias_voltage=bias_voltage, bias_voltage=bias_voltage,
capacitor=cap) capacitor=cap)
if db_output: if db_output:
md = send_to_db(db_module, karabo_id, dconst, condition, md = send_to_db(db_module, karabo_id, dconst, condition,
file_loc, report_path=report, file_loc, report_path=report,
cal_db_interface=cal_db_interface, cal_db_interface=cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
timeout=cal_db_timeout) timeout=cal_db_timeout)
if local_output: if local_output:
md = save_const_to_h5(db_module, karabo_id, dconst, condition, md = save_const_to_h5(db_module, karabo_id, dconst, condition,
dconst.data, file_loc, report, creation_time, out_folder) dconst.data, file_loc, report, creation_time, out_folder)
print(f"Calibration constant {const} is stored locally.\n") print(f"Calibration constant {const} is stored locally.\n")
print("Constants parameter conditions are:\n") print("Constants parameter conditions are:\n")
print(f"• memory_cells: {max_cells}\n• bias_voltage: {bias_voltage}\n" print(f"• memory_cells: {max_cells}\n• bias_voltage: {bias_voltage}\n"
f"• capacitor: {cap}\n" f"• capacitor: {cap}\n"
f"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n") f"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
show_processed_modules( show_processed_modules(
dinstance=dinstance, dinstance=dinstance,
constants=None, constants=None,
mnames=[module_index_to_qm(i) for i in modules], mnames=[module_index_to_qm(i) for i in modules],
mode="position" mode="position"
) )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Raw pedestal distribution ## ## Raw pedestal distribution ##
Distribution of a pedestal (ADUs) over trains for the pixel (12,12), memory cell 12. A median of the distribution is shown in yellow. A standard deviation is shown in red. The green line shows average over all pixels for a given memory cell and gain stage. Distribution of a pedestal (ADUs) over trains for the pixel (12,12), memory cell 12. A median of the distribution is shown in yellow. A standard deviation is shown in red. The green line shows average over all pixels for a given memory cell and gain stage.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig, grid = plt.subplots(3, 1, sharex="col", sharey="row", figsize=(10, 7)) fig, grid = plt.subplots(3, 1, sharex="col", sharey="row", figsize=(10, 7))
fig.subplots_adjust(wspace=0, hspace=0) fig.subplots_adjust(wspace=0, hspace=0)
for cap in capacitor_settings: for cap in capacitor_settings:
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
if np.count_nonzero(~np.isnan(data_g[cap][qm])) == 0: if np.count_nonzero(~np.isnan(data_g[cap][qm])) == 0:
break break
for gain in range(3): for gain in range(3):
data = data_g[cap][qm][:, gain] data = data_g[cap][qm][:, gain]
offset = np.nanmedian(data) offset = np.nanmedian(data)
noise = np.nanstd(data) noise = np.nanstd(data)
xrange = [np.nanmin(data_g[cap][qm]), np.nanmax(data_g[cap][qm])] xrange = [np.nanmin(data_g[cap][qm]), np.nanmax(data_g[cap][qm])]
nbins = int(xrange[1] - xrange[0]) if len(set(xrange)) == 1:
xrange = [0, xrange[0]+xrange[0]//2]
nbins = data_g[cap][qm].shape[0]
else:
nbins = int(xrange[1] - xrange[0])
hn, cn = np.histogram(data, bins=nbins, range=xrange) hn, cn = np.histogram(data, bins=nbins, range=xrange)
grid[gain].hist(data, range=xrange, bins=nbins) grid[gain].hist(data, range=xrange, bins=nbins)
grid[gain].plot([offset-noise, offset-noise], [0, np.nanmax(hn)], grid[gain].plot([offset-noise, offset-noise], [0, np.nanmax(hn)],
linewidth=1.5, color='red', linewidth=1.5, color='red',
label='1 $\sigma$ deviation') label='1 $\sigma$ deviation')
grid[gain].plot([offset+noise, offset+noise], grid[gain].plot([offset+noise, offset+noise],
[0, np.nanmax(hn)], linewidth=1.5, color='red') [0, np.nanmax(hn)], linewidth=1.5, color='red')
grid[gain].plot([offset, offset], [0, 0], grid[gain].plot([offset, offset], [0, 0],
linewidth=1.5, color='y', label='median') linewidth=1.5, color='y', label='median')
grid[gain].plot([np.nanmedian(offset_g[cap][qm][:, :, 12, gain]), grid[gain].plot([np.nanmedian(offset_g[cap][qm][:, :, 12, gain]),
np.nanmedian(offset_g[cap][qm][:, :, 12, gain])], np.nanmedian(offset_g[cap][qm][:, :, 12, gain])],
[0, np.nanmax(hn)], linewidth=1.5, color='green', [0, np.nanmax(hn)], linewidth=1.5, color='green',
label='average over pixels') label='average over pixels')
grid[gain].set_xlim(xrange) grid[gain].set_xlim(xrange)
grid[gain].set_ylim(0, np.nanmax(hn)*1.1) grid[gain].set_ylim(0, np.nanmax(hn)*1.1)
grid[gain].set_xlabel("Offset value [ADU]") grid[gain].set_xlabel("Offset value [ADU]")
grid[gain].set_ylabel("# of occurance") grid[gain].set_ylabel("# of occurance")
if gain == 0: if gain == 0:
leg = grid[gain].legend( leg = grid[gain].legend(
loc='upper center', ncol=3, loc='upper center', ncol=3,
bbox_to_anchor=(0.1, 0.25, 0.7, 1.0)) bbox_to_anchor=(0.1, 0.25, 0.7, 1.0))
grid[gain].text(820, np.nanmax(hn)*0.4, grid[gain].text(820, np.nanmax(hn)*0.4,
"{} gain".format(gain_names[gain]), fontsize=20) "{} gain".format(gain_names[gain]), fontsize=20)
a = plt.axes([.125, .1, 0.775, .8], frame_on=False) a = plt.axes([.125, .1, 0.775, .8], frame_on=False)
a.patch.set_alpha(0.05) a.patch.set_alpha(0.05)
a.set_xlim(xrange) a.set_xlim(xrange)
plt.plot([offset, offset], [0, 1], linewidth=1.5, color='y') plt.plot([offset, offset], [0, 1], linewidth=1.5, color='y')
plt.xticks([]) plt.xticks([])
plt.yticks([]) plt.yticks([])
ypos = 0.9 ypos = 0.9
x1pos = (np.nanmedian(data_g[cap][qm][:, 0]) + x1pos = (np.nanmedian(data_g[cap][qm][:, 0]) +
np.nanmedian(data_g[cap][qm][:, 2]))/2. np.nanmedian(data_g[cap][qm][:, 2]))/2.
x2pos = (np.nanmedian(data_g[cap][qm][:, 2]) + x2pos = (np.nanmedian(data_g[cap][qm][:, 2]) +
np.nanmedian(data_g[cap][qm][:, 1]))/2.-10 np.nanmedian(data_g[cap][qm][:, 1]))/2.-10
plt.annotate("", xy=(np.nanmedian(data_g[cap][qm][:, 0]), ypos), xycoords='data', plt.annotate("", xy=(np.nanmedian(data_g[cap][qm][:, 0]), ypos), xycoords='data',
xytext=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), textcoords='data', xytext=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), textcoords='data',
arrowprops=dict(arrowstyle="<->", connectionstyle="arc3")) arrowprops=dict(arrowstyle="<->", connectionstyle="arc3"))
plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 0])-np.nanmedian(data_g[cap][qm][:, 2])), plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 0])-np.nanmedian(data_g[cap][qm][:, 2])),
xy=(x1pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points') xy=(x1pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')
plt.annotate("", xy=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), xycoords='data', plt.annotate("", xy=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), xycoords='data',
xytext=(np.nanmedian(data_g[cap][qm][:, 1]), ypos), textcoords='data', xytext=(np.nanmedian(data_g[cap][qm][:, 1]), ypos), textcoords='data',
arrowprops=dict(arrowstyle="<->", connectionstyle="arc3")) arrowprops=dict(arrowstyle="<->", connectionstyle="arc3"))
plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 2])-np.nanmedian(data_g[cap][qm][:, 1])), plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 2])-np.nanmedian(data_g[cap][qm][:, 1])),
xy=(x2pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points') xy=(x2pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')
plt.show() plt.show()
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Normality test ## ## Normality test ##
Distributions of raw pedestal values have been tested if they are normally distributed. A normality test have been performed for each pixel and each memory cell. Plots below show histogram of p-Values and a 2D distribution for the memory cell 12. Distributions of raw pedestal values have been tested if they are normally distributed. A normality test have been performed for each pixel and each memory cell. Plots below show histogram of p-Values and a 2D distribution for the memory cell 12.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Loop over capacitor settings, modules, constants # Loop over capacitor settings, modules, constants
for cap in capacitor_settings: for cap in capacitor_settings:
if not test_for_normality: if not test_for_normality:
print('Normality test was not requested. Flag `test_for_normality` False') print('Normality test was not requested. Flag `test_for_normality` False')
break break
for i in modules: for i in modules:
qm = module_index_to_qm(i) qm = module_index_to_qm(i)
data = np.copy(ntest_g[cap][qm][:,:,:,:]) data = np.copy(ntest_g[cap][qm][:,:,:,:])
data[badpix_g[cap][qm][:,:,:,:]>0] = 1.01 data[badpix_g[cap][qm][:,:,:,:]>0] = 1.01
hn,cn = np.histogram(data[:,:,:,0], bins=100) hn,cn = np.histogram(data[:,:,:,0], bins=100)
d = [{'x': np.arange(100)*0.01+0.01, d = [{'x': np.arange(100)*0.01+0.01,
'y': np.histogram(data[:,:,:,0], bins=100)[0], 'y': np.histogram(data[:,:,:,0], bins=100)[0],
'drawstyle': 'steps-pre', 'drawstyle': 'steps-pre',
'label' : 'High gain', 'label' : 'High gain',
}, },
{'x': np.arange(100)*0.01+0.01, {'x': np.arange(100)*0.01+0.01,
'y': np.histogram(data[:,:,:,1], bins=100)[0], 'y': np.histogram(data[:,:,:,1], bins=100)[0],
'drawstyle': 'steps-pre', 'drawstyle': 'steps-pre',
'label' : 'Medium gain', 'label' : 'Medium gain',
}, },
{'x': np.arange(100)*0.01+0.01, {'x': np.arange(100)*0.01+0.01,
'y': np.histogram(data[:,:,:,2], bins=100)[0], 'y': np.histogram(data[:,:,:,2], bins=100)[0],
'drawstyle': 'steps-pre', 'drawstyle': 'steps-pre',
'label' : 'Low gain', 'label' : 'Low gain',
}, },
] ]
fig = plt.figure(figsize=(15,15), tight_layout={'pad': 0.5, 'w_pad': 0.3}) fig = plt.figure(figsize=(15,15), tight_layout={'pad': 0.5, 'w_pad': 0.3})
for gain in range(3): for gain in range(3):
ax = fig.add_subplot(221+gain) ax = fig.add_subplot(221+gain)
heatmapPlot(data[:,:,12,gain], add_panels=False, cmap='viridis', figsize=(10,10), heatmapPlot(data[:,:,12,gain], add_panels=False, cmap='viridis', figsize=(10,10),
y_label='Rows', x_label='Columns', y_label='Rows', x_label='Columns',
lut_label='p-Value', lut_label='p-Value',
use_axis=ax, use_axis=ax,
title='p-Value for cell 12, {} gain'.format(gain_names[gain]) ) title='p-Value for cell 12, {} gain'.format(gain_names[gain]) )
ax = fig.add_subplot(224) ax = fig.add_subplot(224)
_ = simplePlot(d, #aspect=1.6, _ = simplePlot(d, #aspect=1.6,
x_label = "p-Value".format(gain), x_label = "p-Value".format(gain),
y_label="# of occurance", y_label="# of occurance",
use_axis=ax, use_axis=ax,
y_log=False, legend='outside-top-ncol3-frame', legend_pad=0.05, legend_size='5%') y_log=False, legend='outside-top-ncol3-frame', legend_pad=0.05, legend_size='5%')
ax.ticklabel_format(style='sci', axis='y', scilimits=(4,6)) ax.ticklabel_format(style='sci', axis='y', scilimits=(4,6))
``` ```
%% Cell type:raw id: tags: %% Cell type:raw id: tags:
.. raw:: latex .. raw:: latex
\newpage \newpage
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Single-Cell Overviews ## ## Single-Cell Overviews ##
Single cell overviews allow to identify potential effects on all memory cells, e.g. on a sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible. Single cell overviews allow to identify potential effects on all memory cells, e.g. on a sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cell = 12 cell = 12
for cap in capacitor_settings: for cap in capacitor_settings:
for gain in range(3): for gain in range(3):
display( display(
Markdown('### Cell-12 overview - {} gain'.format(gain_names[gain]))) Markdown('### Cell-12 overview - {} gain'.format(gain_names[gain])))
fig = plt.figure(figsize=(18, 22) , tight_layout={'pad': 0.1, 'w_pad': 0.1}) fig = plt.figure(figsize=(18, 22) , tight_layout={'pad': 0.1, 'w_pad': 0.1})
for qm in res[cap]: for qm in res[cap]:
for iconst, const in enumerate(['Offset', 'Noise', 'BadPixelsDark']): for iconst, const in enumerate(['Offset', 'Noise', 'BadPixelsDark']):
ax = fig.add_subplot(321+iconst) ax = fig.add_subplot(321+iconst)
data = res[cap][qm][const][:, :, 12, gain] data = res[cap][qm][const][:, :, 12, gain]
vmax = 1.5 * np.nanmedian(res[cap][qm][const][:, :, 12, gain]) vmax = 1.5 * np.nanmedian(res[cap][qm][const][:, :, 12, gain])
title = const title = const
label = '{} value [ADU]'.format(const) label = '{} value [ADU]'.format(const)
title = '{} value'.format(const) title = '{} value'.format(const)
if const == 'BadPixelsDark': if const == 'BadPixelsDark':
vmax = 4 vmax = 4
bpix_code = data.astype(np.float32) bpix_code = data.astype(np.float32)
bpix_code[bpix_code == 0] = np.nan bpix_code[bpix_code == 0] = np.nan
title = 'Bad pixel code' title = 'Bad pixel code'
label = title label = title
cb_labels = ['1 {}'.format(BadPixels.NOISE_OUT_OF_THRESHOLD.name), cb_labels = ['1 {}'.format(BadPixels.NOISE_OUT_OF_THRESHOLD.name),
'2 {}'.format(BadPixels.OFFSET_NOISE_EVAL_ERROR.name), '2 {}'.format(BadPixels.OFFSET_NOISE_EVAL_ERROR.name),
'3 {}'.format(BadPixels.OFFSET_OUT_OF_THRESHOLD.name), '3 {}'.format(BadPixels.OFFSET_OUT_OF_THRESHOLD.name),
'4 {}'.format('MIXED')] '4 {}'.format('MIXED')]
heatmapPlot(bpix_code, add_panels=False, cmap='viridis', heatmapPlot(bpix_code, add_panels=False, cmap='viridis',
y_label='Rows', x_label='Columns', y_label='Rows', x_label='Columns',
lut_label='', vmax=vmax, lut_label='', vmax=vmax,
use_axis=ax, cb_ticklabels=cb_labels, cb_ticks = np.arange(4)+1, use_axis=ax, cb_ticklabels=cb_labels, cb_ticks = np.arange(4)+1,
title='{}'.format(title)) title='{}'.format(title))
del bpix_code del bpix_code
else: else:
heatmapPlot(data, add_panels=False, cmap='viridis', heatmapPlot(data, add_panels=False, cmap='viridis',
y_label='Rows', x_label='Columns', y_label='Rows', x_label='Columns',
lut_label=label, vmax=vmax, lut_label=label, vmax=vmax,
use_axis=ax, use_axis=ax,
title='{}'.format(title)) title='{}'.format(title))
for qm in res[cap]: for qm in res[cap]:
for iconst, const in enumerate(['Offset', 'Noise']): for iconst, const in enumerate(['Offset', 'Noise']):
data = res[cap][qm][const] data = res[cap][qm][const]
dataBP = np.copy(data) dataBP = np.copy(data)
dataBP[res[cap][qm]['BadPixelsDark'] > 0] = -1 dataBP[res[cap][qm]['BadPixelsDark'] > 0] = -1
x_ranges = [[0, 1500], [0, 40]] x_ranges = [[0, 1500], [0, 40]]
hn, cn = np.histogram( hn, cn = np.histogram(
data[:, :, :, gain], bins=100, range=x_ranges[iconst]) data[:, :, :, gain], bins=100, range=x_ranges[iconst])
hnBP, cnBP = np.histogram(dataBP[:, :, :, gain], bins=cn) hnBP, cnBP = np.histogram(dataBP[:, :, :, gain], bins=cn)
d = [{'x': cn[:-1], d = [{'x': cn[:-1],
'y': hn, 'y': hn,
'drawstyle': 'steps-pre', 'drawstyle': 'steps-pre',
'label': 'All data', 'label': 'All data',
}, },
{'x': cnBP[:-1], {'x': cnBP[:-1],
'y': hnBP, 'y': hnBP,
'drawstyle': 'steps-pre', 'drawstyle': 'steps-pre',
'label': 'Bad pixels masked', 'label': 'Bad pixels masked',
}, },
] ]
ax = fig.add_subplot(325+iconst) ax = fig.add_subplot(325+iconst)
_ = simplePlot(d, figsize=(5, 7), aspect=1, _ = simplePlot(d, figsize=(5, 7), aspect=1,
x_label="{} value [ADU]".format(const), x_label="{} value [ADU]".format(const),
y_label="# of occurance", y_label="# of occurance",
title='', legend_pad=0.1, legend_size='10%', title='', legend_pad=0.1, legend_size='10%',
use_axis=ax, use_axis=ax,
y_log=True, legend='outside-top-2col-frame') y_log=True, legend='outside-top-2col-frame')
plt.show() plt.show()
``` ```
%% Cell type:raw id: tags: %% Cell type:raw id: tags:
.. raw:: latex .. raw:: latex
\newpage \newpage
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'), cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),
BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'), BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'), BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')} BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}
if high_res_badpix_3d: if high_res_badpix_3d:
display(Markdown(""" display(Markdown("""
## Global Bad Pixel Behaviour ## ## Global Bad Pixel Behaviour ##
The following plots shows the results of a bad pixel evaluation for all evaluated memory cells. The following plots shows the results of a bad pixel evaluation for all evaluated memory cells.
Cells are stacked in the Z-dimension, while pixels values in x/y are re-binned with a factor of 2. Cells are stacked in the Z-dimension, while pixels values in x/y are re-binned with a factor of 2.
This excludes single bad pixels present only in disconnected pixels. This excludes single bad pixels present only in disconnected pixels.
Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated.
Colors encode the bad pixel type, or mixed type. Colors encode the bad pixel type, or mixed type.
""")) """))
# Switch rebin to 1 for full resolution and # Switch rebin to 1 for full resolution and
# no interpolation for badpixel values. # no interpolation for badpixel values.
rebin = 2 rebin = 2
for gain in range(3): for gain in range(3):
display(Markdown('### Bad pixel behaviour - {} gain ###'.format(gain_names[gain]))) display(Markdown('### Bad pixel behaviour - {} gain ###'.format(gain_names[gain])))
for cap in capacitor_settings: for cap in capacitor_settings:
for mod, data in badpix_g[cap].items(): for mod, data in badpix_g[cap].items():
plot_badpix_3d(data[...,gain], cols, title='', rebin_fac=rebin) plot_badpix_3d(data[...,gain], cols, title='', rebin_fac=rebin)
ax = plt.gca() ax = plt.gca()
leg = ax.get_legend() leg = ax.get_legend()
leg.set(alpha=0.5) leg.set(alpha=0.5)
plt.show() plt.show()
``` ```
%% Cell type:raw id: tags: %% Cell type:raw id: tags:
.. raw:: latex .. raw:: latex
\newpage \newpage
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Summary across tiles ## ## Summary across tiles ##
Plots give an overview of calibration constants averaged across tiles. A bad pixel mask is applied. Constants are compared with pre-existing constants retrieved from the calibration database. Differences $\Delta$ between the old and new constants is shown. Plots give an overview of calibration constants averaged across tiles. A bad pixel mask is applied. Constants are compared with pre-existing constants retrieved from the calibration database. Differences $\Delta$ between the old and new constants is shown.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
time_summary = [] time_summary = []
for cap, cap_data in old_mdata.items(): for cap, cap_data in old_mdata.items():
time_summary.append(f"The following pre-existing constants are used for comparison for capacitor setting **{cap}**:") time_summary.append(f"The following pre-existing constants are used for comparison for capacitor setting **{cap}**:")
for qm, qm_data in cap_data.items(): for qm, qm_data in cap_data.items():
time_summary.append(f"- Module {qm}") time_summary.append(f"- Module {qm}")
for const, const_data in qm_data.items(): for const, const_data in qm_data.items():
time_summary.append(f" - {const} created at {const_data['timestamp']}") time_summary.append(f" - {const} created at {const_data['timestamp']}")
display(Markdown("\n".join(time_summary))) display(Markdown("\n".join(time_summary)))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Loop over capacitor settings, modules, constants # Loop over capacitor settings, modules, constants
for cap in res: for cap in res:
for qm in res[cap]: for qm in res[cap]:
for gain in range(3): for gain in range(3):
display(Markdown('### Summary across tiles - {} gain'.format(gain_names[gain]))) display(Markdown('### Summary across tiles - {} gain'.format(gain_names[gain])))
for const in res[cap][qm]: for const in res[cap][qm]:
data = np.copy(res[cap][qm][const][:, :, :, gain]) data = np.copy(res[cap][qm][const][:, :, :, gain])
label = 'Fraction of bad pixels' label = 'Fraction of bad pixels'
if const != 'BadPixelsDark': if const != 'BadPixelsDark':
data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan
label = '{} value [ADU]'.format(const) label = '{} value [ADU]'.format(const)
else: else:
data[data>0] = 1.0 data[data>0] = 1.0
data = data.reshape( data = data.reshape(
int(data.shape[0] / 32), int(data.shape[0] / 32),
32, 32,
int(data.shape[1] / 128), int(data.shape[1] / 128),
128, 128,
data.shape[2]) data.shape[2])
data = np.nanmean(data, axis=(1, 3)).swapaxes( data = np.nanmean(data, axis=(1, 3)).swapaxes(
0, 2).reshape(512, 16) 0, 2).reshape(512, 16)
fig = plt.figure(figsize=(15, 6)) fig = plt.figure(figsize=(15, 6))
ax = fig.add_subplot(121) ax = fig.add_subplot(121)
_ = heatmapPlot(data[:510, :], add_panels=True, _ = heatmapPlot(data[:510, :], add_panels=True,
y_label='Momery Cell ID', x_label='Tile ID', y_label='Momery Cell ID', x_label='Tile ID',
lut_label=label, use_axis=ax, lut_label=label, use_axis=ax,
panel_y_label=label, panel_x_label=label, panel_y_label=label, panel_x_label=label,
cmap='viridis', # cb_loc='right',cb_aspect=15, cmap='viridis', # cb_loc='right',cb_aspect=15,
x_ticklabels=np.arange(16)+1, x_ticklabels=np.arange(16)+1,
x_ticks=np.arange(16)+0.5) x_ticks=np.arange(16)+0.5)
if old_const[cap][qm][const] is not None: if old_const[cap][qm][const] is not None:
ax = fig.add_subplot(122) ax = fig.add_subplot(122)
dataold = np.copy(old_const[cap][qm][const][:, :, :, gain]) dataold = np.copy(old_const[cap][qm][const][:, :, :, gain])
label = '$\Delta$ {}'.format(label) label = '$\Delta$ {}'.format(label)
if const != 'BadPixelsDark': if const != 'BadPixelsDark':
if old_const[cap][qm]['BadPixelsDark'] is not None: if old_const[cap][qm]['BadPixelsDark'] is not None:
dataold[old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] > 0] = np.nan dataold[old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] > 0] = np.nan
else: else:
dataold[:] = np.nan dataold[:] = np.nan
else: else:
dataold[dataold>0]=1.0 dataold[dataold>0]=1.0
dataold = dataold.reshape( dataold = dataold.reshape(
int(dataold.shape[0] / 32), int(dataold.shape[0] / 32),
32, 32,
int(dataold.shape[1] / 128), int(dataold.shape[1] / 128),
128, 128,
dataold.shape[2]) dataold.shape[2])
dataold = np.nanmean(dataold, axis=( dataold = np.nanmean(dataold, axis=(
1, 3)).swapaxes(0, 2).reshape(512, 16) 1, 3)).swapaxes(0, 2).reshape(512, 16)
dataold = dataold - data dataold = dataold - data
_ = heatmapPlot(dataold[:510, :], add_panels=True, _ = heatmapPlot(dataold[:510, :], add_panels=True,
y_label='Momery Cell ID', x_label='Tile ID', y_label='Momery Cell ID', x_label='Tile ID',
lut_label=label, use_axis=ax, lut_label=label, use_axis=ax,
panel_y_label=label, panel_x_label=label, panel_y_label=label, panel_x_label=label,
cmap='viridis', # cb_loc='right',cb_aspect=15, cmap='viridis', # cb_loc='right',cb_aspect=15,
x_ticklabels=np.arange(16)+1, x_ticklabels=np.arange(16)+1,
x_ticks=np.arange(16)+0.5) x_ticks=np.arange(16)+0.5)
plt.show() plt.show()
``` ```
%% Cell type:raw id: tags: %% Cell type:raw id: tags:
.. raw:: latex .. raw:: latex
\newpage \newpage
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Variation of offset and noise across Tiles and ASICs ## ## Variation of offset and noise across Tiles and ASICs ##
The following plots show a standard deviation $\sigma$ of the calibration constant. The plot of standard deviations across tiles show pixels of one tile ($128 \times 32$). Value for each pixel shows a standard deviation across 16 tiles. The standard deviation across ASICs are shown overall tiles. The plot shows pixels of one ASIC ($16 \times 32$), where the value shows a standard deviation across all ACIS of the module. The following plots show a standard deviation $\sigma$ of the calibration constant. The plot of standard deviations across tiles show pixels of one tile ($128 \times 32$). Value for each pixel shows a standard deviation across 16 tiles. The standard deviation across ASICs are shown overall tiles. The plot shows pixels of one ASIC ($16 \times 32$), where the value shows a standard deviation across all ACIS of the module.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Loop over capacitor settings, modules, constants # Loop over capacitor settings, modules, constants
for cap in res: for cap in res:
for qm in res[cap]: for qm in res[cap]:
for gain in range(3): for gain in range(3):
display(Markdown('### Variation of offset and noise across ASICs - {} gain'.format(gain_names[gain]))) display(Markdown('### Variation of offset and noise across ASICs - {} gain'.format(gain_names[gain])))
fig = plt.figure(figsize=(15, 6)) fig = plt.figure(figsize=(15, 6))
for iconst, const in enumerate(['Offset', 'Noise']): for iconst, const in enumerate(['Offset', 'Noise']):
data = np.copy(res[cap][qm][const][:, :, :, gain]) data = np.copy(res[cap][qm][const][:, :, :, gain])
data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan
label = '$\sigma$ {} [ADU]'.format(const) label = '$\sigma$ {} [ADU]'.format(const)
dataA = np.nanmean(data, axis=2) # average over cells dataA = np.nanmean(data, axis=2) # average over cells
dataA = dataA.reshape(8, 32, 16, 16) dataA = dataA.reshape(8, 32, 16, 16)
dataA = np.nanstd(dataA, axis=(0, 2)) # average across ASICs dataA = np.nanstd(dataA, axis=(0, 2)) # average across ASICs
ax = fig.add_subplot(121+iconst) ax = fig.add_subplot(121+iconst)
_ = heatmapPlot(dataA, add_panels=True, _ = heatmapPlot(dataA, add_panels=True,
y_label='rows', x_label='columns', y_label='rows', x_label='columns',
lut_label=label, use_axis=ax, lut_label=label, use_axis=ax,
panel_y_label=label, panel_x_label=label, panel_y_label=label, panel_x_label=label,
cmap='viridis' cmap='viridis'
) )
plt.show() plt.show()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Loop over capacitor settings, modules, constants # Loop over capacitor settings, modules, constants
for cap in res: for cap in res:
for qm in res[cap]: for qm in res[cap]:
for gain in range(3): for gain in range(3):
display(Markdown('### Variation of offset and noise across tiles - {} gain'.format(gain_names[gain]))) display(Markdown('### Variation of offset and noise across tiles - {} gain'.format(gain_names[gain])))
fig = plt.figure(figsize=(15, 6)) fig = plt.figure(figsize=(15, 6))
for iconst, const in enumerate(['Offset', 'Noise']): for iconst, const in enumerate(['Offset', 'Noise']):
data = np.copy(res[cap][qm][const][:, :, :, gain]) data = np.copy(res[cap][qm][const][:, :, :, gain])
data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan
label = '$\sigma$ {} [ADU]'.format(const) label = '$\sigma$ {} [ADU]'.format(const)
dataT = data.reshape( dataT = data.reshape(
int(data.shape[0] / 32), int(data.shape[0] / 32),
32, 32,
int(data.shape[1] / 128), int(data.shape[1] / 128),
128, 128,
data.shape[2]) data.shape[2])
dataT = np.nanstd(dataT, axis=(0, 2)) dataT = np.nanstd(dataT, axis=(0, 2))
dataT = np.nanmean(dataT, axis=2) dataT = np.nanmean(dataT, axis=2)
ax = fig.add_subplot(121+iconst) ax = fig.add_subplot(121+iconst)
_ = heatmapPlot(dataT, add_panels=True, _ = heatmapPlot(dataT, add_panels=True,
y_label='rows', x_label='columns', y_label='rows', x_label='columns',
lut_label=label, use_axis=ax, lut_label=label, use_axis=ax,
panel_y_label=label, panel_x_label=label, panel_y_label=label, panel_x_label=label,
cmap='viridis') cmap='viridis')
plt.show() plt.show()
``` ```
%% Cell type:raw id: tags: %% Cell type:raw id: tags:
.. raw:: latex .. raw:: latex
\newpage \newpage
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Aggregate values and per cell behaviour ## ## Aggregate values and per cell behaviour ##
The following tables and plots give an overview of statistical aggregates for each constant, as well as per-cell behavior, averaged across pixels. The following tables and plots give an overview of statistical aggregates for each constant, as well as per-cell behavior, averaged across pixels.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Loop over capacitor settings, modules, constants # Loop over capacitor settings, modules, constants
for cap in res: for cap in res:
for qm in res[cap]: for qm in res[cap]:
for gain in range(3): for gain in range(3):
display(Markdown('### Mean over pixels - {} gain'.format(gain_names[gain]))) display(Markdown('### Mean over pixels - {} gain'.format(gain_names[gain])))
fig = plt.figure(figsize=(9,11)) fig = plt.figure(figsize=(9,11))
for iconst, const in enumerate(res[cap][qm]): for iconst, const in enumerate(res[cap][qm]):
ax = fig.add_subplot(311+iconst) ax = fig.add_subplot(311+iconst)
data = res[cap][qm][const][:,:,:510,gain] data = res[cap][qm][const][:,:,:510,gain]
if const == 'BadPixelsDark': if const == 'BadPixelsDark':
data[data>0] = 1.0 data[data>0] = 1.0
dataBP = np.copy(data) dataBP = np.copy(data)
dataBP[badpix_g[cap][qm][:,:,:510,gain]>0] = -10 dataBP[badpix_g[cap][qm][:,:,:510,gain]>0] = -10
data = np.nanmean(data, axis=(0,1)) data = np.nanmean(data, axis=(0,1))
dataBP = np.nanmean(dataBP, axis=(0,1)) dataBP = np.nanmean(dataBP, axis=(0,1))
d = [{'y': data, d = [{'y': data,
'x': np.arange(data.shape[0]), 'x': np.arange(data.shape[0]),
'drawstyle': 'steps-mid', 'drawstyle': 'steps-mid',
'label' : 'All data' 'label' : 'All data'
} }
] ]
if const != 'BadPixelsDark': if const != 'BadPixelsDark':
d.append({'y': dataBP, d.append({'y': dataBP,
'x': np.arange(data.shape[0]), 'x': np.arange(data.shape[0]),
'drawstyle': 'steps-mid', 'drawstyle': 'steps-mid',
'label' : 'good pixels only' 'label' : 'good pixels only'
}) })
y_title = "{} value [ADU]".format(const) y_title = "{} value [ADU]".format(const)
title = "{} value, {} gain".format(const, gain_names[gain]) title = "{} value, {} gain".format(const, gain_names[gain])
else: else:
y_title = "Fraction of Bad Pixels" y_title = "Fraction of Bad Pixels"
title = "Fraction of Bad Pixels, {} gain".format(gain_names[gain]) title = "Fraction of Bad Pixels, {} gain".format(gain_names[gain])
data_min = np.min([data, dataBP])if const != 'BadPixelsDark' else np.min([data]) data_min = np.min([data, dataBP])if const != 'BadPixelsDark' else np.min([data])
data_max = np.max([data[20:], dataBP[20:]]) data_max = np.max([data[20:], dataBP[20:]])
data_dif = data_max - data_min data_dif = data_max - data_min
local_max = np.max([data[200:300], dataBP[200:300]]) local_max = np.max([data[200:300], dataBP[200:300]])
frac = 0.35 frac = 0.35
new_max = (local_max - data_min*(1-frac))/frac new_max = (local_max - data_min*(1-frac))/frac
new_max = np.max([data_max, new_max]) new_max = np.max([data_max, new_max])
_ = simplePlot(d, figsize=(10,10), aspect=2, xrange=(-12, 510), _ = simplePlot(d, figsize=(10,10), aspect=2, xrange=(-12, 510),
x_label = 'Memory Cell ID', x_label = 'Memory Cell ID',
y_label=y_title, use_axis=ax, y_label=y_title, use_axis=ax,
title=title, title=title,
title_position=[0.5, 1.15], title_position=[0.5, 1.15],
inset='xy-coord-right', inset_x_range=(0,20), inset_indicated=True, inset='xy-coord-right', inset_x_range=(0,20), inset_indicated=True,
inset_labeled=True, inset_coord=[0.2,0.5,0.6,0.95], inset_labeled=True, inset_coord=[0.2,0.5,0.6,0.95],
inset_lw = 1.0, y_range = [data_min-data_dif*0.05, new_max+data_dif*0.05], inset_lw = 1.0, y_range = [data_min-data_dif*0.05, new_max+data_dif*0.05],
y_log=False, legend='outside-top-ncol2-frame', legend_size='18%', y_log=False, legend='outside-top-ncol2-frame', legend_size='18%',
legend_pad=0.00) legend_pad=0.00)
plt.tight_layout(pad=1.08, h_pad=0.35) plt.tight_layout(pad=1.08, h_pad=0.35)
plt.show() plt.show()
``` ```
%% Cell type:raw id: tags: %% Cell type:raw id: tags:
.. raw:: latex .. raw:: latex
\newpage \newpage
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Summary tables ## ## Summary tables ##
The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database. The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
table = [] table = []
bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR] bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR]
for cap in res: for cap in res:
for qm in res[cap]: for qm in res[cap]:
for gain in range(3): for gain in range(3):
l_data = [] l_data = []
l_data_old = [] l_data_old = []
data = np.copy(res[cap][qm]['BadPixelsDark'][:,:,:,gain]) data = np.copy(res[cap][qm]['BadPixelsDark'][:,:,:,gain])
l_data.append(len(data[data>0].flatten())) l_data.append(len(data[data>0].flatten()))
for bit in bits: for bit in bits:
l_data.append(np.count_nonzero(badpix_g[cap][qm][:,:,:,gain] & bit.value)) l_data.append(np.count_nonzero(badpix_g[cap][qm][:,:,:,gain] & bit.value))
if old_const[cap][qm]['BadPixelsDark'] is not None: if old_const[cap][qm]['BadPixelsDark'] is not None:
old_const[cap][qm]['BadPixelsDark'] = old_const[cap][qm]['BadPixelsDark'].astype(np.uint32) old_const[cap][qm]['BadPixelsDark'] = old_const[cap][qm]['BadPixelsDark'].astype(np.uint32)
dataold = np.copy(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain]) dataold = np.copy(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain])
l_data_old.append(len(dataold[dataold>0].flatten())) l_data_old.append(len(dataold[dataold>0].flatten()))
for bit in bits: for bit in bits:
l_data_old.append(np.count_nonzero(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] & bit.value)) l_data_old.append(np.count_nonzero(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] & bit.value))
l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD',
'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR'] 'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']
l_threshold = ['', f'{thresholds_noise_sigma}', f'{thresholds_offset_sigma}', l_threshold = ['', f'{thresholds_noise_sigma}', f'{thresholds_offset_sigma}',
f'{thresholds_offset_hard}/{thresholds_noise_hard}'] f'{thresholds_offset_hard}/{thresholds_noise_hard}']
for i in range(len(l_data)): for i in range(len(l_data)):
line = [f'{l_data_name[i]}, gain {gain_names[gain]}', l_threshold[i], l_data[i]] line = [f'{l_data_name[i]}, gain {gain_names[gain]}', l_threshold[i], l_data[i]]
if old_const[cap][qm]['BadPixelsDark'] is not None: if old_const[cap][qm]['BadPixelsDark'] is not None:
line += [l_data_old[i]] line += [l_data_old[i]]
else: else:
line += ['-'] line += ['-']
table.append(line) table.append(line)
table.append(['', '', '', '']) table.append(['', '', '', ''])
display(Markdown(''' display(Markdown('''
### Number of bad pixels ### ### Number of bad pixels ###
One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels. One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels.
''')) '''))
if len(table)>0: if len(table)>0:
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', md = display(Latex(tabulate.tabulate(table, tablefmt='latex',
headers=["Pixel type", "Threshold", headers=["Pixel type", "Threshold",
"New constant", "Old constant"]))) "New constant", "Old constant"])))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
header = ['Parameter', header = ['Parameter',
"New constant", "Old constant ", "New constant", "Old constant ",
"New constant", "Old constant ", "New constant", "Old constant ",
"New constant", "Old constant "] "New constant", "Old constant "]
for const in ['Offset', 'Noise']: for const in ['Offset', 'Noise']:
table = [['','High gain', 'High gain', 'Medium gain', 'Medium gain', 'Low gain', 'Low gain']] table = [['','High gain', 'High gain', 'Medium gain', 'Medium gain', 'Low gain', 'Low gain']]
for cap in res: for cap in res:
for qm in res[cap]: for qm in res[cap]:
data = np.copy(res[cap][qm][const]) data = np.copy(res[cap][qm][const])
data[res[cap][qm]['BadPixelsDark']>0] = np.nan data[res[cap][qm]['BadPixelsDark']>0] = np.nan
if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None : if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None :
dataold = np.copy(old_const[cap][qm][const]) dataold = np.copy(old_const[cap][qm][const])
dataold[old_const[cap][qm]['BadPixelsDark']>0] = np.nan dataold[old_const[cap][qm]['BadPixelsDark']>0] = np.nan
f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax] f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]
n_list = ['Median', 'Mean', 'Std', 'Min', 'Max'] n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']
for i, f in enumerate(f_list): for i, f in enumerate(f_list):
line = [n_list[i]] line = [n_list[i]]
for gain in range(3): for gain in range(3):
line.append('{:6.1f}'.format(f(data[...,gain]))) line.append('{:6.1f}'.format(f(data[...,gain])))
if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None: if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None:
line.append('{:6.1f}'.format(f(dataold[...,gain]))) line.append('{:6.1f}'.format(f(dataold[...,gain])))
else: else:
line.append('-') line.append('-')
table.append(line) table.append(line)
display(Markdown('### {} [ADU], good pixels only ###'.format(const))) display(Markdown('### {} [ADU], good pixels only ###'.format(const)))
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header))) md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment