Newer
Older
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Author: European XFEL Detector Group, Version: 1.0\n",
"\n",
"# Summary for processed of dark calibration constants and a comparison with previous injected constants.\n",
"out_folder = \"/gpfs/exfel/data/scratch/kluyvert/lpd-dark-p900320-r26_27_28\" # path to output to, required\n",
"metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
"karabo_id = \"FXE_DET_LPD1M-1\" # detector instance\n",
"gain_names = ['High gain', 'Medium gain', 'Low gain'] # a list of gain names to be used in plotting\n",
Karim Ahmed
committed
"threshold_names = ['HG-MG threshold', 'MG_LG threshold'] # a list of gain names to be used in plotting\n",
"local_output = True # Boolean indicating that local constants were stored in the out_folder\n",
"\n",
"# Skip the whole notebook if local_output is false in the preceding notebooks.\n",
"if not local_output:\n",
" print('No local constants saved. Skipping summary plots')\n",
" import sys\n",
" sys.exit(0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"import copy\n",
"import os\n",
"import warnings\n",
"from collections import OrderedDict\n",
"from pathlib import Path\n",
"warnings.filterwarnings('ignore')\n",
"\n",
"import glob\n",
"import h5py\n",
"import matplotlib\n",
"import yaml\n",
"from IPython.display import Latex, Markdown, display\n",
"\n",
"matplotlib.use(\"agg\")\n",
"import matplotlib.gridspec as gridspec\n",
"import matplotlib.pyplot as plt\n",
"import tabulate\n",
"from cal_tools.ana_tools import get_range\n",
"from cal_tools.plotting import show_processed_modules\n",
"from cal_tools.tools import CalibrationMetadata, module_index_to_qm\n",
"from XFELDetAna.plotting.simpleplot import simplePlot"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if \"AGIPD\" in karabo_id:\n",
" if \"SPB\" in karabo_id:\n",
" dinstance = \"AGIPD1M1\"\n",
" elif \"MID\" in karabo_id:\n",
" dinstance = \"AGIPD1M2\"\n",
Karim Ahmed
committed
" # This list needs to be in that order as later Adaptive or fixed gain is\n",
" # decided based on the condition for the Offset constant.\n",
" expected_constants = ['Offset', 'Noise', 'ThresholdsDark', 'BadPixelsDark']\n",
" display(Markdown(\"\"\"\n",
" \n",
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
"# Summary of AGIPD dark characterization #\n",
"\n",
"The following report shows a set of dark images taken with the AGIPD detector to deduce detector offsets, noise, bad-pixel maps and thresholding. All four types of constants are evaluated per-pixel and per-memory cell.\n",
"\n",
"\n",
"**The offset** ($O$) is defined as the median ($M$) of the dark signal ($Ds$) over trains ($t$) for a given pixel ($x,y$) and memory cell ($c$). \n",
"\n",
"**The noise** $N$ is the standard deviation $\\sigma$ of the dark signal.\n",
"\n",
"$$ O_{x,y,c} = M(Ds)_{t} ,\\,\\,\\,\\,\\,\\, N_{x,y,c} = \\sigma(Ds)_{t}$$\n",
"\n",
"**The bad pixel** mask is encoded as a bit mask.\n",
"\n",
"**\"OFFSET_OUT_OF_THRESHOLD\":**\n",
"\n",
"Offset outside of bounds:\n",
"\n",
"$$M(O)_{x,y} - \\sigma(O)_{x,y} * \\mathrm{thresholds\\_offset\\_sigma} < O < M(O)_{x,y} + \\sigma(O)_{x,y} * \\mathrm{thresholds\\_offset\\_sigma} $$\n",
"\n",
"or offset outside of hard limits\n",
"\n",
"$$ \\mathrm{thresholds\\_offset\\_hard}_\\mathrm{low} < O < \\mathrm{thresholds\\_offset\\_hard}_\\mathrm{high} $$\n",
"\n",
"**\"NOISE_OUT_OF_THRESHOLD\":**\n",
"\n",
"Noise outside of bounds:\n",
"\n",
"$$M(N)_{x,y} - \\sigma(N)_{x,y} * \\mathrm{thresholds\\_noise\\_sigma} < N < M(N)_{x,y} + \\sigma(N)_{x,y} * \\mathrm{thresholds\\_noise\\_sigma} $$\n",
"\n",
"or noise outside of hard limits\n",
"\n",
"$$\\mathrm{thresholds\\_noise\\_hard}_\\mathrm{low} < N < \\mathrm{thresholds\\_noise\\_hard}_\\mathrm{high} $$\n",
"\n",
"**\"OFFSET_NOISE_EVAL_ERROR\":**\n",
"\n",
"Offset and Noise both not $nan$ values\n",
"Values: $\\mathrm{thresholds\\_offset\\_sigma}$, $\\mathrm{thresholds\\_offset\\_hard}$, $\\mathrm{thresholds\\_noise\\_sigma}$, $\\mathrm{thresholds\\_noise\\_hard}$ are given as parameters.\n",
"\n",
"\"**\\\"GAIN_THRESHOLDING_ERROR\\\":**\n",
"Bad gain separated pixels with sigma separation less than gain_separation_sigma_threshold\n",
"$$ sigma\\_separation = \\\\frac{\\mathrm{gain\\_offset} - \\mathrm{previous\\_gain\\_offset}}{\\sqrt{\\mathrm{gain\\_offset_{std}}^\\mathrm{2} + \\mathrm{previuos\\_gain\\_offset_{std}}^\\mathrm{2}}}$$ \n",
"$$ Bad\\_separation = sigma\\_separation < \\mathrm{gain\\_separation\\_sigma\\_threshold} $$\n",
"\n",
"\"\"\"))\n",
" \n",
"elif \"LPD\" in karabo_id:\n",
" dinstance = \"LPD1M1\"\n",
Karim Ahmed
committed
" expected_constants = ['Offset', 'Noise', 'BadPixelsDark']\n",
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
" display(Markdown(\"\"\"\n",
" \n",
"# Summary of LPD dark characterization #\n",
"\n",
"The following report shows a set of dark images taken with the LPD detector to deduce detector offsets, noise, bad-pixel maps. All three types of constants are evaluated per-pixel and per-memory cell.\n",
"\n",
"**The offset** ($O$) is defined as the median ($M$) of the dark signal ($Ds$) over trains ($t$) for a given pixel ($x,y$) and memory cell ($c$). \n",
"\n",
"**The noise** $N$ is the standard deviation $\\sigma$ of the dark signal.\n",
"\n",
"$$ O_{x,y,c} = M(Ds)_{t} ,\\,\\,\\,\\,\\,\\, N_{x,y,c} = \\sigma(Ds)_{t}$$\n",
"\n",
"**The bad pixel** mask is encoded as a bit mask.\n",
"\n",
"**\"OFFSET_OUT_OF_THRESHOLD\":**\n",
"\n",
"Offset outside of bounds:\n",
"\n",
"$$M(O)_{x,y} - \\sigma(O)_{x,y} * \\mathrm{thresholds\\_offset\\_sigma} < O < M(O)_{x,y} + \\sigma(O)_{x,y} * \\mathrm{thresholds\\_offset\\_sigma} $$\n",
"\n",
"or offset outside of hard limits\n",
"\n",
"$$ \\mathrm{thresholds\\_offset\\_hard}_\\mathrm{low} < O < \\mathrm{thresholds\\_offset\\_hard}_\\mathrm{high} $$\n",
"\n",
"**\"NOISE_OUT_OF_THRESHOLD\":**\n",
"\n",
"Noise outside of bounds:\n",
"\n",
"$$M(N)_{x,y} - \\sigma(N)_{x,y} * \\mathrm{thresholds\\_noise\\_sigma} < N < M(N)_{x,y} + \\sigma(N)_{x,y} * \\mathrm{thresholds\\_noise\\_sigma} $$\n",
"\n",
"or noise outside of hard limits\n",
"\n",
"$$\\mathrm{thresholds\\_noise\\_hard}_\\mathrm{low} < N < \\mathrm{thresholds\\_noise\\_hard}_\\mathrm{high} $$\n",
"\n",
"**\"OFFSET_NOISE_EVAL_ERROR\":**\n",
"\n",
"Offset and Noise both not $nan$ values \n",
"\n",
"\"Values: $\\\\mathrm{thresholds\\\\_offset\\\\_sigma}$, $\\\\mathrm{thresholds\\\\_offset\\\\_hard}$, $\\\\mathrm{thresholds\\\\_noise\\\\_sigma}$, $\\\\mathrm{thresholds\\\\_noise\\\\_hard}$ are given as parameters.\\n\",\n",
"\"\"\"))\n",
"elif \"DSSC\" in karabo_id:\n",
" dinstance = \"DSSC1M1\"\n",
Karim Ahmed
committed
" expected_constants = ['Offset', 'Noise', 'BadPixelsDark']\n",
" display(Markdown(\"\"\"\n",
" \n",
"# Summary of DSSC dark characterization #\n",
" \n",
" \"\"\"))"
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"out_folder = Path(out_folder)\n",
"metadata = CalibrationMetadata(metadata_folder or out_folder)\n",
"mod_mapping = metadata.setdefault(\"modules-mapping\", {})\n",
"old_constant_metadata = {}\n",
"for fn in out_folder.glob(\"module_metadata_*.yml\"):\n",
" with fn.open(\"r\") as fd:\n",
" fdict = yaml.safe_load(fd)\n",
" module = fdict[\"module\"]\n",
" mod_mapping[module] = fdict[\"pdu\"]\n",
" old_constant_metadata[module] = fdict[\"old-constants\"]\n",
"\n",
"metadata.save()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# In AGIPD fixed gain mode, ThresholdsDark is not expected\n",
"if 'AGIPD' in karabo_id:\n",
" for i in range(nmods):\n",
" qm = module_index_to_qm(i)\n",
" if not mod_mapping.get(qm):\n",
" continue\n",
" mod_pdu = mod_mapping[qm]\n",
" fpath = out_folder / f\"const_Offset_{mod_pdu}.h5\"\n",
" if not fpath.exists():\n",
" continue\n",
"\n",
" with h5py.File(fpath, 'r') as f:\n",
" if 'Gain Mode' in f['condition']:\n",
" if f[\"condition\"][\"Gain Mode\"][\"value\"][()]:\n",
" expected_constants.remove(\"ThresholdsDark\")\n",
" break"
]
},
"metadata": {},
"source": [
"Preparing newly injected and previous constants from produced local folder in out_folder."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Get shape, dtype, and number of files for each constant.\n",
"# Also build lists of the files involved, to be loaded in parallel in a later cell.\n",
"const_shape_and_dtype = {}\n",
"found_module_nums = set()\n",
"pieces_to_load = []\n",
"pieces_to_load_prev = []\n",
"\n",
"for cname in expected_constants:\n",
" for i in range(nmods):\n",
" qm = module_index_to_qm(i)\n",
" if not mod_mapping.get(qm):\n",
Karim Ahmed
committed
" continue\n",
" mod_pdu = mod_mapping[qm]\n",
" fpath = out_folder / f\"const_{cname}_{mod_pdu}.h5\"\n",
" if not fpath.exists():\n",
" \n",
" pieces_to_load.append((cname, i, fpath))\n",
" found_module_nums.add(i)\n",
" # try finding old constants using paths from CalCat store\n",
" if qm not in old_constant_metadata:\n",
" continue\n",
" qm_mdata = old_constant_metadata[qm]\n",
" if cname not in qm_mdata:\n",
" continue\n",
" fpath_prev = qm_mdata[cname][\"filepath\"]\n",
" h5path_prev = qm_mdata[cname][\"h5path\"]\n",
" if fpath_prev and h5path_prev:\n",
" pieces_to_load_prev.append((cname, i, fpath_prev, h5path_prev))\n",
" \n",
" # Get the constant shape from one of the module files\n",
" with h5py.File(fpath, 'r') as f:\n",
" const_shape_and_dtype[cname] = (f['data'].shape, f['data'].dtype)\n",
" \n",
"# Allocate arrays for these constants (without space for missing modules)\n",
"nmods_found = len(found_module_nums)\n",
"constants = {\n",
" cname: psh.alloc((nmods_found,) + module_const_shape, dtype=dt, fill=0)\n",
" for cname, (module_const_shape, dt) in const_shape_and_dtype.items()\n",
"}\n",
"prev_const = {\n",
" cname: psh.alloc((nmods_found,) + module_const_shape, dtype=dt, fill=0)\n",
" for cname, (module_const_shape, dt) in const_shape_and_dtype.items()\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Load the constant data in parallel\n",
"found_module_nums = sorted(found_module_nums)\n",
"mod_names = [module_index_to_qm(n) for n in found_module_nums]\n",
"\n",
"def load_piece(wid, ix, entry):\n",
" cname, mod_no, fpath = entry\n",
" mod_ix = found_module_nums.index(mod_no)\n",
" \n",
" with h5py.File(fpath, 'r') as f:\n",
" f['data'].read_direct(constants[cname][mod_ix])\n",
"\n",
"psh.map(load_piece, pieces_to_load)\n",
"print(f\"Loaded constant data from {len(pieces_to_load)} files\")\n",
"\n",
"def load_piece_prev(wid, ix, entry):\n",
" cname, mod_no, fpath, h5path = entry\n",
" mod_ix = found_module_nums.index(mod_no)\n",
" \n",
" with h5py.File(fpath, 'r') as f:\n",
" f[h5path]['data'].read_direct(prev_const[cname][mod_ix])\n",
"\n",
"psh.map(load_piece_prev, pieces_to_load_prev)\n",
"print(f\"Loaded previous constant data from {len(pieces_to_load_prev)} files\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"display(Markdown('## Processed modules'))\n",
"show_processed_modules(dinstance, constants, mod_names, mode=\"processed\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Summary figures across Modules ##\n",
"\n",
"The following plots give an overview of calibration constants averaged across pixels and memory cells. A bad pixel mask is applied."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if \"LPD\" in dinstance:\n",
" geom = extra_geom.LPD_1MGeometry.from_quad_positions(quad_pos=[(11.4, 299),\n",
" (-11.5, 8),\n",
" (254.5, -16),\n",
" (278.5, 275)])\n",
" module_shape = (256, 256)\n",
"elif dinstance in ('AGIPD1M1', 'AGIPD1M2'):\n",
" geom = extra_geom.AGIPD_1MGeometry.from_quad_positions(quad_pos=[(-525, 625),\n",
" (-550, -10),\n",
" (520, -160),\n",
" (542.5, 475)])\n",
" module_shape = (512, 128)\n",
"elif dinstance == \"AGIPD500K\":\n",
" geom = extra_geom.AGIPD_500K2GGeometry.from_origin()\n",
" module_shape = (512, 128)\n",
"elif \"DSSC\" in dinstance:\n",
" module_shape = (128, 512)\n",
" quadpos = [(-130, 5), (-130, -125), (5, -125), (5, 5)]\n",
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
" geom = extra_geom.DSSC_1MGeometry.from_quad_positions(quadpos)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def plot_const_and_delta(const, delta, const_name, gain_name):\n",
" gs = gridspec.GridSpec(2, 2)\n",
" fig = plt.figure(figsize=(24, 32))\n",
" \n",
" ax0 = fig.add_subplot(gs[0, :])\n",
" vmin, vmax = get_range(const, 2)\n",
" geom.plot_data_fast(\n",
" const, vmin=vmin, vmax=vmax, ax=ax0, colorbar={\n",
" 'shrink': 0.9, 'pad': 0.05, 'label': 'ADUs'\n",
" })\n",
" ax0.set_title(f\"{const_name} - {gain_name}\", fontsize=15)\n",
" \n",
" if np.count_nonzero(delta) == np.count_nonzero(np.isnan(delta)):\n",
" fig.text(0.5, 0.4, \"No difference from previous constant\",\n",
" ha='center', va='center', fontsize=15)\n",
" return\n",
" \n",
" # Plot delta from previous constant\n",
" ax1 = fig.add_subplot(gs[1, 0])\n",
" vmin, vmax = get_range(delta, 2)\n",
" vmax = max(vmax, abs(vmin)) # Center around zero\n",
" geom.plot_data_fast(\n",
" delta, vmin=-vmax, vmax=vmax, ax=ax1, cmap=\"RdBu\", colorbar={\n",
" 'shrink': 0.6, 'pad': 0.1, 'label': 'ADUs'\n",
" })\n",
" ax1.set_title(f\"Difference with previous {const_name} - {gain_name}\", fontsize=15)\n",
" \n",
" # Plot % delta from previous constant\n",
" delta_pct = delta / const * 100\n",
" ax2 = fig.add_subplot(gs[1, 1])\n",
" vmin, vmax = get_range(delta_pct, 2)\n",
" vmax = max(vmax, abs(vmin)) # Center around zero\n",
" geom.plot_data_fast(\n",
" delta_pct, vmin=-vmax, vmax=vmax, ax=ax2, cmap=\"RdBu\", colorbar={\n",
" 'shrink': 0.6, 'pad': 0.1, 'label': '%'\n",
" })\n",
" ax2.set_title(\"Percentage difference\", fontsize=15)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"psh_ctx = psh.ProcessContext(nmods)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"for const_name, const in constants.items():\n",
" if const_name == 'BadPixelsDark':\n",
" continue\n",
" # Check if constant gain available in constant e.g. AGIPD, LPD\n",
" if len(const.shape) == 5:\n",
" gainstages = 3\n",
" else:\n",
" gainstages = 1\n",
" \n",
" display(Markdown(f'##### {const_name}'))\n",
" print_once = True\n",
" for gain in range(gainstages):\n",
" glabel = threshold_names[gain]\n",
" glabel = gain_names[gain]\n",
"\n",
" stacked_const = psh_ctx.alloc((nmods,) + module_shape, dtype=np.float64, fill=0)\n",
" stacked_delta = psh_ctx.alloc((nmods,) + module_shape, dtype=np.float64, fill=0)\n",
" def average_module(wid, i, _):\n",
" qm = module_index_to_qm(i)\n",
" if qm in mod_names:\n",
" m_idx = mod_names.index(qm)\n",
" # Check if constant shape of 5 indices e.g. AGIPD, LPD\n",
" values = np.nanmean(const[m_idx, :, :, :, gain], axis=2)\n",
" prev_val = np.nanmean(prev_const[const_name][m_idx, :, :, :, gain], axis=2)\n",
" else:\n",
" values = np.nanmean(const[m_idx, :, :, :], axis=2)\n",
" prev_val = np.nanmean(prev_const[const_name][m_idx, :, :, :], axis=2)\n",
" prev_val[prev_val == 0] = np.nan\n",
" stacked_const[i] = np.moveaxis(values, 0, -1)\n",
" stacked_delta[i] = np.moveaxis(values - prev_val, 0, -1)\n",
" # if module not available fill space with nan\n",
" stacked_const[i] = np.nan\n",
"\n",
" psh_ctx.map(average_module, range(nmods))\n",
" \n",
" display(Markdown(f'###### {glabel} ######'))\n",
"\n",
" plot_const_and_delta(stacked_const, stacked_delta, const_name, glabel)\n",
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"# Loop over modules and constants\n",
"for const_name, const in constants.items():\n",
" if const_name == 'BadPixelsDark':\n",
" continue # Displayed separately below\n",
" \n",
" display(Markdown(f'### Summary across Modules - {const_name}'))\n",
" for gain in range(gainstages):\n",
" if const_name == 'ThresholdsDark':\n",
" if gain == 2:\n",
" continue\n",
" glabel = threshold_names[gain]\n",
" glabel = gain_names[gain]\n",
" if const.ndim == 5:\n",
" data = const[:, :, :, :, gain]\n",
" data = const\n",
" \n",
" if \"BadPixelsDark\" in constants.keys():\n",
" label = f'{const_name} value [ADU], good pixels only'\n",
" goodpix = constants['BadPixelsDark'][:, :, :, :, gain] == 0\n",
" goodpix = constants['BadPixelsDark'] == 0\n",
" label = f'{const_name} value [ADU], good and bad pixels'\n",
" goodpix = [True] * data.shape[0]\n",
" # Reduce data in parallel (one worker per module):\n",
" datamean = psh_ctx.alloc(data.shape[:1] + data.shape[3:], data.dtype)\n",
" datastd = psh_ctx.alloc(data.shape[:1] + data.shape[3:], data.dtype)\n",
" \n",
" def average_mem_cells(wid, i, _):\n",
" datamean[i] = np.mean(data[i], axis=(0, 1), where=goodpix[i])\n",
" datastd[i] = np.std(data[i], axis=(0, 1), where=goodpix[i])\n",
" psh_ctx.map(average_mem_cells, range(data.shape[0]))\n",
"\n",
" fig = plt.figure(figsize=(15, 6), tight_layout={\n",
" 'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})\n",
" ax = fig.add_subplot(121)\n",
"\n",
" d = []\n",
" for im, mod in enumerate(datamean):\n",
" d.append({'x': np.arange(mod.shape[0]),\n",
" 'y': mod,\n",
" 'drawstyle': 'steps-pre',\n",
" 'label': mod_names[im],\n",
" })\n",
"\n",
" _ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n",
" x_label='Memory Cell ID',\n",
" y_label=label,\n",
" use_axis=ax,\n",
" title_position=[0.5, 1.18],\n",
" legend='outside-top-ncol6-frame', legend_size='18%',\n",
" legend_pad=0.00)\n",
"\n",
" # Plot standard deviation\n",
" ax = fig.add_subplot(122)\n",
" if \"BadPixelsDark\" in constants.keys():\n",
" label = f'$\\sigma$ {const_name} [ADU], good pixels only'\n",
" else:\n",
" label = f'$\\sigma$ {const_name} [ADU], good and bad pixels'\n",
" d = []\n",
" for im, mod in enumerate(datastd):\n",
" d.append({'x': np.arange(mod.shape[0]),\n",
" 'y': mod,\n",
" 'drawstyle': 'steps-pre',\n",
" 'label': mod_names[im],\n",
" })\n",
"\n",
" _ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n",
" x_label='Memory Cell ID',\n",
" y_label=label,\n",
" use_axis=ax,\n",
" title=f'{glabel} $\\sigma$',\n",
" title_position=[0.5, 1.18],\n",
" legend='outside-top-ncol6-frame', legend_size='18%',\n",
" legend_pad=0.00)\n",
"\n",
" plt.show()"
]
},
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"display(Markdown(f'### Summary across Modules - BadPixelsDark'))\n",
"\n",
"bad_px_dark = constants['BadPixelsDark']\n",
"\n",
"for gain in range(gainstages):\n",
" glabel = gain_names[gain]\n",
"\n",
" if bad_px_dark.ndim == 5:\n",
" data = bad_px_dark[:, :, :, :, gain]\n",
" else:\n",
" data = bad_px_dark[:, :, :, :]\n",
"\n",
" bad_px_per_cell = np.count_nonzero(data, axis=(1, 2))\n",
" fraction_bad_per_cell = bad_px_per_cell / (data.shape[1] * data.shape[2])\n",
" fraction_bad_per_cell[fraction_bad_per_cell == 1.0] = np.nan\n",
"\n",
" fig = plt.figure(figsize=(15, 6), tight_layout={\n",
" 'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})\n",
" ax = fig.add_subplot(1, 1, 1)\n",
"\n",
" d = []\n",
" for im, mod in enumerate(fraction_bad_per_cell):\n",
" d.append({'x': np.arange(mod.shape[0]),\n",
" 'y': mod,\n",
" 'drawstyle': 'steps-pre',\n",
" 'label': mod_names[im],\n",
" })\n",
"\n",
" _ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n",
" x_label='Memory Cell ID',\n",
" y_label='Fraction of bad pixels',\n",
" use_axis=ax,\n",
" title=glabel,\n",
" title_position=[0.5, 1.18],\n",
" legend='outside-top-ncol6-frame', legend_size='18%',\n",
" legend_pad=0.00)\n",
"\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Summary tables across Modules ##\n",
"\n",
"Tables show values averaged across all pixels and memory cells of a given detector module."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if u'$' in tabulate.LATEX_ESCAPE_RULES:\n",
" del(tabulate.LATEX_ESCAPE_RULES[u'$'])\n",
" \n",
"if u'\\\\' in tabulate.LATEX_ESCAPE_RULES:\n",
" del(tabulate.LATEX_ESCAPE_RULES[u'\\\\'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [
"head = ['Module', 'High gain', 'Medium gain', 'Low gain']\n",
"head_th = ['Module', 'HG_MG threshold', 'MG_LG threshold']\n",
"for const_name, const in constants.items():\n",
" if const_name == 'BadPixelsDark':\n",
" continue # Handled below\n",
"\n",
" if const.ndim == 4: # Add gain dimension if not present\n",
" const = const[:, :, : , :, np.newaxis]\n",
" \n",
" if \"BadPixelsDark\" in constants.keys():\n",
" goodpix = constants['BadPixelsDark'] == 0\n",
" if goodpix.ndim == 4:\n",
" goodpix = goodpix[:, :, : , :, np.newaxis]\n",
" \n",
" label = f'### Average {const_name} [ADU], good pixels only'\n",
" else:\n",
" goodpix = [True] * const.shape[0]\n",
" label = f'### Average {const_name} [ADU], good and bad pixels'\n",
"\n",
" # Reduce data in parallel (one worker per module)\n",
" mean_by_module_gain = psh.alloc(const.shape[:1] + const.shape[4:], const.dtype)\n",
" std_by_module_gain = psh.alloc(const.shape[:1] + const.shape[4:], const.dtype)\n",
" \n",
" def average_module(wid, i, _):\n",
" mean_by_module_gain[i] = np.mean(const[i], axis=(0, 1, 2), where=goodpix[i])\n",
" std_by_module_gain[i] = np.std (const[i], axis=(0, 1, 2), where=goodpix[i])\n",
" psh_ctx.map(average_module, range(const.shape[0]))\n",
"\n",
" table = []\n",
"\n",
" for i_mod, mod in enumerate(mod_names):\n",
" t_line = [mod]\n",
" for gain in range(gainstages):\n",
" if const_name == 'ThresholdsDark'and gain == 2:\n",
" continue\n",
" datamean = mean_by_module_gain[i_mod, gain]\n",
" datastd = std_by_module_gain[i_mod, gain]\n",
" t_line.append(f'{datamean:6.1f} $\\\\pm$ {datastd:6.1f}')\n",
"\n",
" table.append(t_line)\n",
"\n",
" display(Markdown(label))\n",
" header = head_th if const_name == 'ThresholdsDark' else head\n",
" md = display(Latex(tabulate.tabulate(\n",
" table, tablefmt='latex', headers=header)))"
]
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Bad pixels summary table\n",
"table = []\n",
"\n",
"for i_mod, mod in enumerate(mod_names):\n",
"\n",
" t_line = [mod]\n",
" for gain in range(gainstages):\n",
" if bad_px_dark.ndim == 5: \n",
" data = bad_px_dark[i_mod, :, :, :, gain]\n",
" else:\n",
" data = bad_px_dark[i_mod]\n",
"\n",
" datasum = np.count_nonzero(data)\n",
" datamean = datasum / data.size\n",
"\n",
" t_line.append(f'{datasum:6.0f} ({datamean:6.3f}) ')\n",
" label = '## Number(fraction) of bad pixels'\n",
"\n",
" table.append(t_line)\n",
"\n",
"display(Markdown(label))\n",
"md = display(Latex(tabulate.tabulate(\n",
" table, tablefmt='latex', headers=head)))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Offline Cal",
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
}
},
"nbformat": 4,
"nbformat_minor": 2
}