Skip to content
Snippets Groups Projects
Commit 56a9d056 authored by Thomas Kluyver's avatar Thomas Kluyver
Browse files

Allow for CalCat-style constant files in shared summary NB

parent ad0d30fc
No related branches found
No related tags found
1 merge request!1133[LPD] [Dark] Inject constants using new CalCat API
%% Cell type:code id: tags:
``` python
# Author: European XFEL Detector Group, Version: 1.0
# Summary for processed of dark calibration constants and a comparison with previous injected constants.
out_folder = "/gpfs/exfel/data/scratch/kluyvert/lpd-dark-p900320-r26_27_28" # path to output to, required
metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate
karabo_id = "FXE_DET_LPD1M-1" # detector instance
gain_names = ['High gain', 'Medium gain', 'Low gain'] # a list of gain names to be used in plotting
threshold_names = ['HG-MG threshold', 'MG_LG threshold'] # a list of gain names to be used in plotting
local_output = True # Boolean indicating that local constants were stored in the out_folder
# Skip the whole notebook if local_output is false in the preceding notebooks.
if not local_output:
print('No local constants saved. Skipping summary plots')
import sys
sys.exit(0)
```
%% Cell type:code id: tags:
``` python
import warnings
from pathlib import Path
warnings.filterwarnings('ignore')
import h5py
import matplotlib
import numpy as np
import pasha as psh
import yaml
from IPython.display import Latex, Markdown, display
matplotlib.use("agg")
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
%matplotlib inline
import extra_geom
import tabulate
from cal_tools import step_timing
from cal_tools.ana_tools import get_range
from cal_tools.enums import BadPixels
from cal_tools.plotting import agipd_single_module_geometry, show_processed_modules
from cal_tools.tools import CalibrationMetadata, module_index_to_qm
from XFELDetAna.plotting.simpleplot import simplePlot
```
%% Cell type:code id: tags:
``` python
def bp_entry(bp):
return [f"{bp.name:<30s}", f"{bp.value:032b}", f"{int(bp.value)}"]
```
%% Cell type:code id: tags:
``` python
if "AGIPD" in karabo_id:
module_shape = (512, 128)
if "AGIPD1M" in karabo_id:
nmods = 16
elif "AGIPD500K" in karabo_id:
nmods = 8
else:
nmods = 1
# This list needs to be in that order as later Adaptive or fixed gain is
# decided based on the condition for the Offset constant.
expected_constants = ['Offset', 'Noise', 'ThresholdsDark', 'BadPixelsDark']
table = []
badpixels = [
BadPixels.OFFSET_OUT_OF_THRESHOLD,
BadPixels.NOISE_OUT_OF_THRESHOLD,
BadPixels.OFFSET_NOISE_EVAL_ERROR,
BadPixels.GAIN_THRESHOLDING_ERROR,
]
for bp in badpixels:
table.append(bp_entry(bp))
display(Markdown("""
# Summary of AGIPD dark characterization #
The following report shows a set of dark images taken with the AGIPD detector to deduce detector offsets,
noise, bad-pixel maps and thresholding. All four types of constants are evaluated per-pixel and per-memory cell.
**The offset** ($O$) is defined as the median ($M$) of the dark signal ($Ds$) over trains ($t$) for a given pixel
($x,y$) and memory cell ($c$).
**The noise** $N$ is the standard deviation $\sigma$ of the dark signal.
$$ O_{x,y,c} = M(Ds)_{t} ,\,\,\,\,\,\, N_{x,y,c} = \sigma(Ds)_{t}$$
**The bad pixel** mask is encoded as a bit mask."""))
display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["Name", "bit value", "integer value"])))
display(Markdown("""
**"OFFSET_OUT_OF_THRESHOLD":**
Offset outside of bounds:
$$M(O)_{x,y} - \sigma(O)_{x,y} * \mathrm{thresholds\_offset\_sigma} < O < M(O)_{x,y} + \sigma(O)_{x,y} * \mathrm{thresholds\_offset\_sigma} $$
or offset outside of hard limits
$$ \mathrm{thresholds\_offset\_hard}_\mathrm{low} < O < \mathrm{thresholds\_offset\_hard}_\mathrm{high} $$
**"NOISE_OUT_OF_THRESHOLD":**
Noise outside of bounds:
$$M(N)_{x,y} - \sigma(N)_{x,y} * \mathrm{thresholds\_noise\_sigma} < N < M(N)_{x,y} + \sigma(N)_{x,y} * \mathrm{thresholds\_noise\_sigma} $$
or noise outside of hard limits
$$\mathrm{thresholds\_noise\_hard}_\mathrm{low} < N < \mathrm{thresholds\_noise\_hard}_\mathrm{high} $$
**"OFFSET_NOISE_EVAL_ERROR":**
Offset and Noise both not $nan$ values
Values: $\mathrm{thresholds\_offset\_sigma}$, $\mathrm{thresholds\_offset\_hard}$, $\mathrm{thresholds\_noise\_sigma}$, $\mathrm{thresholds\_noise\_hard}$ are given as parameters.
**"GAIN_THRESHOLDING_ERROR":**
Bad gain separated pixels with sigma separation less than gain_separation_sigma_threshold
$$ sigma\_separation = \\frac{\mathrm{gain\_offset} - \mathrm{previous\_gain\_offset}}{\sqrt{\mathrm{gain\_offset_{std}}^\mathrm{2} + \mathrm{previuos\_gain\_offset_{std}}^\mathrm{2}}}$$
$$ Bad\_separation = sigma\_separation < \mathrm{gain\_separation\_sigma\_threshold} $$
"""))
elif "LPD" in karabo_id:
nmods = 16
module_shape = (256, 256)
expected_constants = ['Offset', 'Noise', 'BadPixelsDark']
table = []
badpixels = [
BadPixels.OFFSET_OUT_OF_THRESHOLD,
BadPixels.NOISE_OUT_OF_THRESHOLD,
BadPixels.OFFSET_NOISE_EVAL_ERROR,
]
for bp in badpixels:
table.append(bp_entry(bp))
display(Markdown("""
# Summary of LPD dark characterization #
The following report shows a set of dark images taken with the LPD detector to deduce detector offsets, noise, bad-pixel maps. All three types of constants are evaluated per-pixel and per-memory cell.
**The offset** ($O$) is defined as the median ($M$) of the dark signal ($Ds$) over trains ($t$) for a given pixel ($x,y$) and memory cell ($c$).
**The noise** $N$ is the standard deviation $\sigma$ of the dark signal.
$$ O_{x,y,c} = M(Ds)_{t} ,\,\,\,\,\,\, N_{x,y,c} = \sigma(Ds)_{t}$$
**The bad pixel** mask is encoded as a bit mask."""))
display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["Name", "bit value", "integer value"])))
display(Markdown("""
**"OFFSET_OUT_OF_THRESHOLD":**
Offset outside of bounds:
$$M(O)_{x,y} - \sigma(O)_{x,y} * \mathrm{thresholds\_offset\_sigma} < O < M(O)_{x,y} + \sigma(O)_{x,y} * \mathrm{thresholds\_offset\_sigma} $$
or offset outside of hard limits
$$ \mathrm{thresholds\_offset\_hard}_\mathrm{low} < O < \mathrm{thresholds\_offset\_hard}_\mathrm{high} $$
**"NOISE_OUT_OF_THRESHOLD":**
Noise outside of bounds:
$$M(N)_{x,y} - \sigma(N)_{x,y} * \mathrm{thresholds\_noise\_sigma} < N < M(N)_{x,y} + \sigma(N)_{x,y} * \mathrm{thresholds\_noise\_sigma} $$
or noise outside of hard limits
$$\mathrm{thresholds\_noise\_hard}_\mathrm{low} < N < \mathrm{thresholds\_noise\_hard}_\mathrm{high} $$
**"OFFSET_NOISE_EVAL_ERROR":**
Offset and Noise both not $nan$ values
"Values: $\\mathrm{thresholds\\_offset\\_sigma}$, $\\mathrm{thresholds\\_offset\\_hard}$, $\\mathrm{thresholds\\_noise\\_sigma}$, $\\mathrm{thresholds\\_noise\\_hard}$ are given as parameters.\n",
"""))
elif "DSSC" in karabo_id:
nmods = 16
module_shape = (128, 512)
expected_constants = ['Offset', 'Noise']
display(Markdown("""
# Summary of DSSC dark characterization #
"""))
```
%% Cell type:code id: tags:
``` python
step_timer = step_timing.StepTimer()
out_folder = Path(out_folder)
metadata = CalibrationMetadata(metadata_folder or out_folder)
mod_mapping = metadata.setdefault("modules-mapping", {})
old_constant_metadata = {}
for fn in out_folder.glob("module_metadata_*.yml"):
with fn.open("r") as fd:
fdict = yaml.safe_load(fd)
module = fdict["module"]
mod_mapping[module] = fdict["pdu"]
old_constant_metadata[module] = fdict["old-constants"]
metadata.save()
```
%% Cell type:code id: tags:
``` python
# In AGIPD fixed gain mode, ThresholdsDark is not expected
if 'AGIPD' in karabo_id:
for i in range(nmods):
qm = module_index_to_qm(i)
if not mod_mapping.get(qm):
continue
mod_pdu = mod_mapping[qm]
fpath = out_folder / f"const_Offset_{mod_pdu}.h5"
if not fpath.exists():
continue
with h5py.File(fpath, 'r') as f:
if 'Gain mode' in f['condition']:
if f["condition"]["Gain mode"]["value"][()]:
expected_constants.remove("ThresholdsDark")
break
```
%% Cell type:markdown id: tags:
Preparing newly injected and previous constants from produced local folder in out_folder.
%% Cell type:code id: tags:
``` python
step_timer.start()
# Get shape, dtype, and number of files for each constant.
# Also build lists of the files involved, to be loaded in parallel in a later cell.
const_shape_and_dtype = {}
found_module_nums = set()
pieces_to_load = []
pieces_to_load_prev = []
for cname in expected_constants:
for i in range(nmods):
qm = module_index_to_qm(i)
if not mod_mapping.get(qm):
continue
mod_pdu = mod_mapping[qm]
fpath = out_folder / f"const_{cname}_{mod_pdu}.h5"
if not fpath.exists():
continue
pieces_to_load.append((cname, i, fpath))
# Newer write_ccv code uses this (also for constants in DB);
# older save_const_to_h5 puts 'data' in root of file.
h5path_maybe = f"{mod_pdu}/{cname}/0"
pieces_to_load.append((cname, i, fpath, h5path_maybe))
found_module_nums.add(i)
# try finding old constants using paths from CalCat store
if qm not in old_constant_metadata:
continue
qm_mdata = old_constant_metadata[qm]
if cname not in qm_mdata:
continue
fpath_prev = qm_mdata[cname]["filepath"]
h5path_prev = qm_mdata[cname]["h5path"]
if fpath_prev and h5path_prev:
pieces_to_load_prev.append((cname, i, fpath_prev, h5path_prev))
# Get the constant shape from one of the module files
with h5py.File(fpath, 'r') as f:
const_shape_and_dtype[cname] = (f['data'].shape, f['data'].dtype)
if 'data' in f:
dset = f['data']
else:
dset = f[h5path_maybe]['data']
const_shape_and_dtype[cname] = (dset.shape, dset.dtype)
# Allocate arrays for these constants (without space for missing modules)
nmods_found = len(found_module_nums)
constants = {
cname: psh.alloc((nmods_found,) + module_const_shape, dtype=dt, fill=0)
for cname, (module_const_shape, dt) in const_shape_and_dtype.items()
}
prev_const = {
cname: psh.alloc((nmods_found,) + module_const_shape, dtype=dt, fill=0)
for cname, (module_const_shape, dt) in const_shape_and_dtype.items()
}
step_timer.done_step("Preparing arrays for old and new constants.")
```
%% Cell type:code id: tags:
``` python
step_timer.start()
# Load the constant data in parallel
found_module_nums = sorted(found_module_nums)
mod_names = [module_index_to_qm(n) for n in found_module_nums]
def load_piece(wid, ix, entry):
cname, mod_no, fpath = entry
cname, mod_no, fpath, h5path_maybe = entry
mod_ix = found_module_nums.index(mod_no)
with h5py.File(fpath, 'r') as f:
f['data'].read_direct(constants[cname][mod_ix])
if 'data' in f:
dset = f['data']
else:
dset = f[h5path_maybe]['data']
dset.read_direct(constants[cname][mod_ix])
psh.map(load_piece, pieces_to_load)
print(f"Loaded constant data from {len(pieces_to_load)} files")
def load_piece_prev(wid, ix, entry):
cname, mod_no, fpath, h5path = entry
mod_ix = found_module_nums.index(mod_no)
with h5py.File(fpath, 'r') as f:
f[h5path]['data'].read_direct(prev_const[cname][mod_ix])
psh.map(load_piece_prev, pieces_to_load_prev)
print(f"Loaded previous constant data from {len(pieces_to_load_prev)} files")
step_timer.done_step("Loading constants data.")
```
%% Cell type:code id: tags:
``` python
display(Markdown('## Processed modules'))
show_processed_modules(karabo_id, constants, mod_names, mode="processed")
```
%% Cell type:markdown id: tags:
## Summary figures across Modules ##
The following plots give an overview of calibration constants averaged across pixels and memory cells. A bad pixel mask is applied.
%% Cell type:code id: tags:
``` python
if "LPD" in karabo_id:
geom = extra_geom.LPD_1MGeometry.from_quad_positions(
quad_pos=[
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)])
elif "AGIPD1M" in karabo_id:
geom = extra_geom.AGIPD_1MGeometry.from_quad_positions(
quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475)])
elif "AGIPD500K" in karabo_id:
geom = extra_geom.AGIPD_500K2GGeometry.from_origin()
elif "AGIPD" in karabo_id: # single module detector
geom = agipd_single_module_geometry()
elif "DSSC" in karabo_id:
quadpos = [(-130, 5), (-130, -125), (5, -125), (5, 5)]
geom = extra_geom.DSSC_1MGeometry.from_quad_positions(quadpos)
```
%% Cell type:code id: tags:
``` python
def plot_const_and_delta(const, delta, const_name, gain_name):
gs = gridspec.GridSpec(2, 2)
fig = plt.figure(figsize=(24, 32))
ax0 = fig.add_subplot(gs[0, :])
vmin, vmax = get_range(const, 2)
geom.plot_data_fast(
const, vmin=vmin, vmax=vmax, ax=ax0, colorbar={
'shrink': 0.9, 'pad': 0.05, 'label': 'ADUs'
})
ax0.set_title(f"{const_name} - {gain_name}", fontsize=15)
if np.count_nonzero(delta) == np.count_nonzero(np.isnan(delta)):
fig.text(0.5, 0.4, "No difference from previous constant",
ha='center', va='center', fontsize=15)
return
# Plot delta from previous constant
ax1 = fig.add_subplot(gs[1, 0])
vmin, vmax = get_range(delta, 2)
vmax = max(vmax, abs(vmin)) # Center around zero
geom.plot_data_fast(
delta, vmin=-vmax, vmax=vmax, ax=ax1, colorbar={
'shrink': 0.6, 'pad': 0.1, 'label': 'ADUs'
})
ax1.set_title(f"Difference with previous {const_name} - {gain_name}", fontsize=15)
# Plot % delta from previous constant
delta_pct = delta / const * 100
ax2 = fig.add_subplot(gs[1, 1])
vmin, vmax = get_range(delta_pct, 2)
vmax = max(vmax, abs(vmin)) # Center around zero
geom.plot_data_fast(
delta_pct, vmin=-vmax, vmax=vmax, ax=ax2, colorbar={
'shrink': 0.6, 'pad': 0.1, 'label': '%'
})
ax2.set_title("Percentage difference", fontsize=15)
```
%% Cell type:code id: tags:
``` python
psh_ctx = psh.ProcessContext(nmods)
```
%% Cell type:code id: tags:
``` python
step_timer.start()
gainstages = 1
for const_name, const in constants.items():
if const_name == 'BadPixelsDark':
continue
# Check if constant gain available in constant e.g. AGIPD, LPD
if len(const.shape) == 5:
gainstages = 3
else:
gainstages = 1
display(Markdown(f'##### {const_name}'))
print_once = True
for gain in range(gainstages):
if const_name == 'ThresholdsDark':
if gain > 1:
continue
glabel = threshold_names[gain]
else:
glabel = gain_names[gain]
stacked_const = psh_ctx.alloc((nmods,) + module_shape, dtype=np.float64, fill=0)
stacked_delta = psh_ctx.alloc((nmods,) + module_shape, dtype=np.float64, fill=0)
def average_module(wid, i, _):
qm = module_index_to_qm(i)
if qm in mod_names:
m_idx = mod_names.index(qm)
# Check if constant shape of 5 indices e.g. AGIPD, LPD
if const.ndim == 5:
values = np.nanmean(const[m_idx, :, :, :, gain], axis=2)
prev_val = np.nanmean(prev_const[const_name][m_idx, :, :, :, gain], axis=2)
else:
values = np.nanmean(const[m_idx, :, :, :], axis=2)
prev_val = np.nanmean(prev_const[const_name][m_idx, :, :, :], axis=2)
values[values == 0] = np.nan
prev_val[prev_val == 0] = np.nan
stacked_const[i] = np.moveaxis(values, 0, -1)
stacked_delta[i] = np.moveaxis(values - prev_val, 0, -1)
else:
# if module not available fill space with nan
stacked_const[i] = np.nan
psh_ctx.map(average_module, range(nmods))
# Plotting constant overall modules.
display(Markdown(f'###### {glabel} ######'))
plot_const_and_delta(stacked_const, stacked_delta, const_name, glabel)
plt.show()
step_timer.done_step("Plotting constants and relative differences.")
```
%% Cell type:code id: tags:
``` python
# Loop over modules and constants
step_timer.start()
for const_name, const in constants.items():
if const_name == 'BadPixelsDark':
continue # Displayed separately below
display(Markdown(f'### Summary across Modules - {const_name}'))
for gain in range(gainstages):
if const_name == 'ThresholdsDark':
if gain == 2:
continue
glabel = threshold_names[gain]
else:
glabel = gain_names[gain]
if const.ndim == 5:
data = const[:, :, :, :, gain]
else:
data = const
# Bad pixels are per gain stage, and you need thresholds to pick the gain
# stage, so we don't mask the thresholds.
if ('BadPixelsDark' in constants) and (const_name != 'ThresholdsDark'):
label = f'{const_name} value [ADU], good pixels only'
if const.ndim == 5:
goodpix = constants['BadPixelsDark'][:, :, :, :, gain] == 0
else:
goodpix = constants['BadPixelsDark'] == 0
else:
label = f'{const_name} value [ADU], good and bad pixels'
goodpix = [True] * data.shape[0]
# Reduce data in parallel (one worker per module):
datamean = psh_ctx.alloc(data.shape[:1] + data.shape[3:], data.dtype)
datastd = psh_ctx.alloc(data.shape[:1] + data.shape[3:], data.dtype)
def average_mem_cells(wid, i, _):
datamean[i] = np.mean(data[i], axis=(0, 1), where=goodpix[i])
datastd[i] = np.std(data[i], axis=(0, 1), where=goodpix[i])
psh_ctx.map(average_mem_cells, range(data.shape[0]))
fig = plt.figure(figsize=(15, 6), tight_layout={
'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})
ax = fig.add_subplot(121)
d = []
for im, mod in enumerate(datamean):
d.append({'x': np.arange(mod.shape[0]),
'y': mod,
'drawstyle': 'steps-pre',
'label': mod_names[im],
})
_ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),
x_label='Memory Cell ID',
y_label=label,
use_axis=ax,
title=glabel,
title_position=[0.5, 1.18],
legend='outside-top-ncol6-frame', legend_size='18%',
legend_pad=0.00)
# Plot standard deviation
ax = fig.add_subplot(122)
if "BadPixelsDark" in constants.keys():
label = f'$\sigma$ {const_name} [ADU], good pixels only'
else:
label = f'$\sigma$ {const_name} [ADU], good and bad pixels'
d = []
for im, mod in enumerate(datastd):
d.append({'x': np.arange(mod.shape[0]),
'y': mod,
'drawstyle': 'steps-pre',
'label': mod_names[im],
})
_ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),
x_label='Memory Cell ID',
y_label=label,
use_axis=ax,
title=f'{glabel} $\sigma$',
title_position=[0.5, 1.18],
legend='outside-top-ncol6-frame', legend_size='18%',
legend_pad=0.00)
plt.show()
step_timer.done_step("Plotting summary across modules.")
```
%% Cell type:code id: tags:
``` python
if 'BadPixelsDark' in constants:
step_timer.start()
display(Markdown(f'### Summary across Modules - BadPixelsDark'))
bad_px_dark = constants['BadPixelsDark']
for gain in range(gainstages):
glabel = gain_names[gain]
if bad_px_dark.ndim == 5:
data = bad_px_dark[:, :, :, :, gain]
else:
data = bad_px_dark[:, :, :, :]
bad_px_per_cell = np.count_nonzero(data, axis=(1, 2))
fraction_bad_per_cell = bad_px_per_cell / (data.shape[1] * data.shape[2])
fraction_bad_per_cell[fraction_bad_per_cell == 1.0] = np.nan
fig = plt.figure(figsize=(15, 6), tight_layout={
'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})
ax = fig.add_subplot(1, 1, 1)
d = []
for im, mod in enumerate(fraction_bad_per_cell):
d.append({'x': np.arange(mod.shape[0]),
'y': mod,
'drawstyle': 'steps-pre',
'label': mod_names[im],
})
_ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),
x_label='Memory Cell ID',
y_label='Fraction of bad pixels',
use_axis=ax,
title=glabel,
title_position=[0.5, 1.18],
legend='outside-top-ncol6-frame', legend_size='18%',
legend_pad=0.00)
plt.show()
step_timer.done_step("Summary across modules for BadPixels.")
```
%% Cell type:markdown id: tags:
## Summary tables across Modules ##
Tables show values averaged across all pixels and memory cells of a given detector module.
%% Cell type:code id: tags:
``` python
if u'$' in tabulate.LATEX_ESCAPE_RULES:
del(tabulate.LATEX_ESCAPE_RULES[u'$'])
if u'\\' in tabulate.LATEX_ESCAPE_RULES:
del(tabulate.LATEX_ESCAPE_RULES[u'\\'])
```
%% Cell type:code id: tags:
``` python
step_timer.start()
head = ['Module', 'High gain', 'Medium gain', 'Low gain']
head_th = ['Module', 'HG_MG threshold', 'MG_LG threshold']
for const_name, const in constants.items():
if const_name == 'BadPixelsDark':
continue # Handled below
if const.ndim == 4: # Add gain dimension if not present
const = const[:, :, : , :, np.newaxis]
if ('BadPixelsDark' in constants) and (const_name != 'ThresholdsDark'):
goodpix = constants['BadPixelsDark'] == 0
if goodpix.ndim == 4:
goodpix = goodpix[:, :, : , :, np.newaxis]
label = f'### Average {const_name} [ADU], good pixels only'
else:
goodpix = [True] * const.shape[0]
label = f'### Average {const_name} [ADU], good and bad pixels'
# Reduce data in parallel (one worker per module)
mean_by_module_gain = psh.alloc(const.shape[:1] + const.shape[4:], const.dtype)
std_by_module_gain = psh.alloc(const.shape[:1] + const.shape[4:], const.dtype)
def average_module(wid, i, _):
mean_by_module_gain[i] = np.mean(const[i], axis=(0, 1, 2), where=goodpix[i])
std_by_module_gain[i] = np.std (const[i], axis=(0, 1, 2), where=goodpix[i])
psh_ctx.map(average_module, range(const.shape[0]))
table = []
for i_mod, mod in enumerate(mod_names):
t_line = [mod]
for gain in range(gainstages):
if const_name == 'ThresholdsDark' and gain == 2:
continue
datamean = mean_by_module_gain[i_mod, gain]
datastd = std_by_module_gain[i_mod, gain]
t_line.append(f'{datamean:6.1f} $\\pm$ {datastd:6.1f}')
table.append(t_line)
display(Markdown(label))
header = head_th if const_name == 'ThresholdsDark' else head
md = display(Latex(tabulate.tabulate(
table, tablefmt='latex', headers=header)))
step_timer.done_step("Summary tables across modules.")
```
%% Cell type:code id: tags:
``` python
# Bad pixels summary table
if 'BadPixelsDark' in constants:
step_timer.start()
bad_px_dark = constants['BadPixelsDark']
table = []
for i_mod, mod in enumerate(mod_names):
t_line = [mod]
for gain in range(gainstages):
if bad_px_dark.ndim == 5:
data = bad_px_dark[i_mod, :, :, :, gain]
else:
data = bad_px_dark[i_mod]
datasum = np.count_nonzero(data)
datamean = datasum / data.size
t_line.append(f'{datasum:6.0f} ({datamean:6.3f}) ')
label = '## Number(fraction) of bad pixels'
table.append(t_line)
display(Markdown(label))
md = display(Latex(tabulate.tabulate(
table, tablefmt='latex', headers=head)))
step_timer.done_step("Summary table across modules for BadPixels.")
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment