From 8536a23c34a111d81c9bb9f292a7e90e1a9ff785 Mon Sep 17 00:00:00 2001
From: Nuno Duarte <duarten@max-exfl001.desy.de>
Date: Mon, 20 Jun 2022 17:02:02 +0200
Subject: [PATCH] Initial commit: Add common mode to Dark Characterization NB

---
 .../Characterize_Darks_ePix100_NBC.ipynb      | 724 ++++++++++++++----
 1 file changed, 593 insertions(+), 131 deletions(-)

diff --git a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
index 8fe339e74..f422a3ac8 100644
--- a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
@@ -12,7 +12,13 @@
     "\n",
     "The following notebook provides dark image analysis and calibration constants of the ePix100 detector.\n",
     "\n",
-    "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. Resulting maps are saved as .h5 files for a latter use and injected to calibration DB."
+    "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. \n",
+    "\n",
+    "Noise and bad pixels maps are calculated independently for each of the 4 ASICs of ePix100, since their noise behaviour can be significantly different.\n",
+    "\n",
+    "Common mode correction can be applied to increase sensitivity to noise related bad pixels. Common mode correction is achieved by subtracting the median of all pixels that are read out at the same time along a row/column. This is done in an iterative process, by which a new bad pixels map is calculated and used to mask data as the common mode values across the rows/columns is updated.\n",
+    "\n",
+    "Resulting maps are saved as .h5 files for a later use and injected to calibration DB."
    ]
   },
   {
@@ -21,15 +27,15 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "in_folder = '/gpfs/exfel/exp/HED/202230/p900247/raw' # input folder, required\n",
+    "in_folder = '/gpfs/exfel/exp/HED/202201/p002804/raw' # input folder, required\n",
     "out_folder = '' # output folder, required\n",
     "metadata_folder = ''  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
     "sequence = 0 # sequence file to use\n",
-    "run = 45 # which run to read data from, required\n",
+    "run = 281 # which run to read data from, required\n",
     "\n",
     "# Parameters for accessing the raw data.\n",
     "karabo_id = \"HED_IA1_EPX100-1\" # karabo karabo_id\n",
-    "karabo_da = [\"EPIX01\"]  # data aggregators\n",
+    "karabo_da = \"EPIX01\"  # data aggregators\n",
     "receiver_template = \"RECEIVER\" # detector receiver template for accessing raw data files\n",
     "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
     "instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files\n",
@@ -47,7 +53,11 @@
     "fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n",
     "fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n",
     "temp_limits = 5 # Limit for parameter Operational temperature\n",
-    "badpixel_threshold_sigma = 5.  # Bad pixels defined by values outside n times this std from median\n",
+    "badpixel_threshold_sigma = 5.  # Bad pixels defined by values outside n times this std from median. Default: 5\n",
+    "CM_N_iterations = 2 # Number of iterations for common mode correction. Set to 0 to skip it\n",
+    "\n",
+    "# Visualization parameters\n",
+    "frame_display = -1 # Example frame to display. Set to -1 display last acquired frame.\n",
     "\n",
     "# Parameters used during selecting raw data trains.\n",
     "min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.\n",
@@ -69,14 +79,18 @@
     "import os\n",
     "import warnings\n",
     "\n",
+    "import matplotlib.pyplot as plt\n",
     "import numpy as np\n",
     "import pasha as psh\n",
     "from extra_data import RunDirectory\n",
+    "from prettytable import PrettyTable\n",
     "\n",
     "import XFELDetAna.xfelprofiler as xprof\n",
     "from XFELDetAna import xfelpyanatools as xana\n",
+    "from XFELDetAna import xfelpycaltools as xcal\n",
     "from XFELDetAna.plotting.util import prettyPlotting\n",
     "from cal_tools.enums import BadPixels\n",
+    "from cal_tools.step_timing import StepTimer\n",
     "from cal_tools.tools import (\n",
     "    get_dir_creation_date,\n",
     "    get_pdu_from_db,\n",
@@ -183,6 +197,41 @@
     "print(f\"Operated in vacuum: {in_vacuum}\")"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Passing repetitive code along the notebook to a function   \n",
+    "def stats_calc(data):\n",
+    "    '''\n",
+    "    Calculates basic statistical parameters of the input data: \n",
+    "    mean, standard deviation, median, minimum and maximum.\n",
+    "    \n",
+    "    Returns dictionary with keys: 'mean', 'std', 'median', \n",
+    "    'min', 'max' and 'legend'.\n",
+    "    \n",
+    "    Parameters\n",
+    "    ----------\n",
+    "    data : ndarray\n",
+    "           Data to analyse.\n",
+    "    '''\n",
+    "    \n",
+    "    stats = {}\n",
+    "    stats['mean'] = np.nanmean(data)\n",
+    "    stats['std'] = np.nanstd(data)\n",
+    "    stats['median'] = np.nanmedian(data)\n",
+    "    stats['min'] = np.nanmin(data)\n",
+    "    stats['max'] = np.nanmax(data)\n",
+    "    stats['legend'] = ('mean: ' + str(np.round(stats['mean'],2)) +\n",
+    "                       '\\nstd: ' + str(np.round(stats['std'],2)) +\n",
+    "                       '\\nmedian: ' + str(np.round(stats['median'],2)) +\n",
+    "                       '\\nmin: ' + str(np.round(stats['min'],2)) +\n",
+    "                       '\\nmax: ' + str(np.round(stats['max'],2)))\n",
+    "    return stats"
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -191,45 +240,24 @@
    },
    "outputs": [],
    "source": [
-    "# Calculate noise and offset per pixel and global average, std and median\n",
+    "step_timer = StepTimer()\n",
+    "step_timer.start()\n",
+    "\n",
+    "# Read data\n",
     "data_dc = run_dir.select(\n",
     "    *pixels_src, require_all=True).select_trains(np.s_[:n_trains])\n",
     "\n",
     "data = data_dc[pixels_src].ndarray()\n",
     "\n",
-    "noise_data = np.std(data, axis=0)\n",
-    "offset_data = np.mean(data, axis=0)\n",
-    "\n",
-    "noise_mean = np.mean(noise_data)\n",
-    "noise_sigma = np.std(noise_data)\n",
-    "noise_median = np.median(noise_data)\n",
-    "noise_min = np.min(noise_data)\n",
-    "noise_max = np.max(noise_data)\n",
-    "\n",
-    "offset_mean = np.mean(offset_data)\n",
-    "offset_sigma = np.std(offset_data)\n",
-    "offset_median = np.median(offset_data)\n",
-    "offset_min = np.min(offset_data)\n",
-    "offset_max = np.max(offset_data)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Create and fill offset and noise maps\n",
-    "constant_maps = {}\n",
-    "constant_maps['Offset'] = offset_data[..., np.newaxis]\n",
-    "constant_maps['Noise'] = noise_data[..., np.newaxis]"
+    "# Instantiate constant maps to be filled with offset, noise and bad pixels maps\n",
+    "constant_maps = {}"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## Offset ###"
+    "## Offset Map"
    ]
   },
   {
@@ -238,56 +266,49 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "#************** OFFSET HEAT MAP **************#\n",
+    "step_timer.start()\n",
+    "\n",
+    "# Calculate offset per pixel and store it in constant_maps\n",
+    "constant_maps['Offset'] = np.nanmean(data, axis=0)[..., np.newaxis]\n",
+    "\n",
+    "# Calculate basic statistical parameters\n",
+    "stats = stats_calc(constant_maps['Offset'].flatten())\n",
+    "\n",
+    "# Offset map\n",
     "fig = xana.heatmapPlot(\n",
-    "    constant_maps['Offset'][:, :, 0],\n",
+    "    constant_maps['Offset'].squeeze(),\n",
     "    lut_label='[ADU]',\n",
     "    x_label='Column',\n",
     "    y_label='Row',\n",
-    "    x_range=(0, sensor_size[0]),\n",
-    "    y_range=(0, sensor_size[1]), \n",
-    "    vmin=max(0, offset_median - badpixel_threshold_sigma*offset_sigma), \n",
-    "    vmax=min(np.power(2,14)-1, offset_median + badpixel_threshold_sigma*offset_sigma)\n",
+    "    vmin=max(0, np.round((stats['median']-stats['std'])/250)*250),\n",
+    "    vmax=min(np.power(2,14)-1, np.round((stats['median']+stats['std'])/250)*250)\n",
     ")\n",
     "fig.suptitle('Offset Map', x=.48, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)\n",
     "\n",
-    "#************** OFFSET HISTOGRAM **************#\n",
-    "binwidth = offset_sigma/100\n",
-    "ho, co = np.histogram(\n",
+    "# Offset Histogram\n",
+    "h, c = np.histogram(\n",
     "    constant_maps['Offset'].flatten(),\n",
-    "    bins = np.arange(offset_min, offset_max, binwidth)\n",
+    "    bins = np.arange(stats['min'], stats['max'], stats['std']/100)\n",
     ")\n",
-    "do = {'x': co[:-1],\n",
-    "      'y': ho,\n",
-    "      'y_err': np.sqrt(ho[:]),\n",
-    "      'drawstyle': 'bars',\n",
-    "      'color': 'cornflowerblue'}\n",
+    "d = {'x': c[:-1],\n",
+    "      'y': h,\n",
+    "      'color': 'blue'\n",
+    "     }\n",
     "\n",
     "fig = xana.simplePlot(\n",
-    "    do, \n",
+    "    d,\n",
     "    aspect=1.5,\n",
     "    x_label='Offset [ADU]',\n",
     "    y_label='Counts',\n",
     "    x_range=(0, np.power(2,14)-1),\n",
-    "    y_range=(0, max(ho)*1.1),\n",
+    "    y_range=(0, max(h)*1.1),\n",
     "    y_log=True\n",
-    ")\n",
-    "fig.suptitle('Offset Distribution', x=.5,y =.92, fontsize=16)\n",
-    "\n",
-    "stats_str = (\n",
-    "    f'mean: {np.round(offset_mean,2)}\\n'\n",
-    "    f'std : {np.round(offset_sigma,2)}\\n'\n",
-    "    f'median: {np.round(offset_median,2)}\\n'\n",
-    "    f'min: {np.round(offset_min,2)}\\n'\n",
-    "    f'max: {np.round(offset_max,2)}'\n",
-    ")\n",
-    "fig.text(\n",
-    "    s=stats_str,\n",
-    "    x=.7,\n",
-    "    y=.7,\n",
-    "    fontsize=14,\n",
-    "    bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1));"
+    ");\n",
+    "fig.text(s=stats['legend'],x=.7,y=.7,fontsize=14,bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1))\n",
+    "fig.suptitle('Offset Map Histogram', x=.5,y =.92, fontsize=16);\n",
+    "\n",
+    "step_timer.done_step('Offset Map created. Elapsed Time')"
    ]
   },
   {
@@ -296,7 +317,7 @@
     "tags": []
    },
    "source": [
-    "## Noise ###"
+    "## Initial Noise Map"
    ]
   },
   {
@@ -305,66 +326,60 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "#************** NOISE HEAT MAP **************#\n",
+    "step_timer.start()\n",
+    "\n",
+    "# Calculate noise per pixel and store it in constant_maps\n",
+    "constant_maps['Noise'] = np.nanstd(data, axis=0)[..., np.newaxis]\n",
+    "\n",
+    "# Calculate basic statistical parameters\n",
+    "stats = stats_calc(constant_maps['Noise'].flatten())\n",
+    "\n",
+    "# Noise map\n",
     "fig = xana.heatmapPlot(\n",
-    "    constant_maps['Noise'][:, :, 0],\n",
+    "    constant_maps['Noise'].squeeze(),\n",
     "    lut_label='[ADU]',\n",
     "    x_label='Column', \n",
     "    y_label='Row',\n",
-    "    x_range=(0, sensor_size[0]),\n",
-    "    y_range=(0, sensor_size[1]),\n",
-    "    vmin=max(0, noise_median - badpixel_threshold_sigma*noise_sigma), \n",
-    "    vmax=noise_median + badpixel_threshold_sigma*noise_sigma\n",
+    "    vmin=max(0, np.round((stats['median'] - badpixel_threshold_sigma*stats['std']))), \n",
+    "    vmax=np.round(stats['median'] +badpixel_threshold_sigma*stats['std'])\n",
     ")\n",
-    "fig.suptitle('Noise Map', x=.5, y=.9, fontsize=16)\n",
+    "fig.suptitle('Initial Noise Map', x=.5, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)\n",
     "\n",
-    "#************** NOISE HISTOGRAM **************#\n",
-    "binwidth = noise_sigma/100\n",
-    "hn, cn = np.histogram(constant_maps['Noise'].flatten(),\n",
-    "                      bins=np.arange((min(constant_maps['Noise'].flatten())),\n",
-    "                                 max(constant_maps['Noise'].flatten()) + binwidth,\n",
-    "                                 binwidth))\n",
-    "dn = {'x': cn[:-1],\n",
-    "      'y': hn,\n",
-    "      'y_err': np.sqrt(hn[:]),\n",
-    "      'drawstyle': 'bars',\n",
-    "      'color': 'cornflowerblue'}\n",
+    "# Noise histogram\n",
+    "bins = np.arange(stats['min'], stats['max'], stats['std']/100)\n",
     "\n",
+    "h, c = np.histogram(\n",
+    "    constant_maps['Noise'].flatten(),\n",
+    "    bins = bins\n",
+    ")\n",
+    "d = {'x': c[:-1],\n",
+    "      'y': h,\n",
+    "      'color': 'blue'\n",
+    "     }\n",
     "fig = xana.simplePlot(\n",
-    "    dn,\n",
+    "    d,\n",
     "    aspect=1.5,\n",
     "    x_label='Noise [ADU]',\n",
     "    y_label='Counts',\n",
-    "    x_range=(max(0, noise_median - badpixel_threshold_sigma*noise_sigma),\n",
-    "             noise_median + badpixel_threshold_sigma*noise_sigma),\n",
-    "    y_range=(0, max(hn)*1.1),\n",
+    "    x_range=(max(0, stats['median'] - badpixel_threshold_sigma*stats['std']),\n",
+    "             stats['median'] + badpixel_threshold_sigma*stats['std']),\n",
+    "    y_range=(0, max(h)*1.1),\n",
     "    y_log=True\n",
     ")\n",
-    "fig.suptitle('Noise Distribution',x=.5,y=.92,fontsize=16);\n",
-    "\n",
-    "stats_str = (\n",
-    "    f'mean: {np.round(noise_mean,2)}\\n'\n",
-    "    f'std: {np.round(noise_sigma,2)}\\n'\n",
-    "    f'median: {np.round(noise_median,2)}\\n'\n",
-    "    f'min: {np.round(noise_min,2)}\\n'\n",
-    "    f'max: {np.round(noise_max,2)}'\n",
-    ")\n",
-    "fig.text(\n",
-    "    s=stats_str,\n",
-    "    x=.7,\n",
-    "    y=.7,\n",
-    "    fontsize=14,\n",
-    "    bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1));"
+    "fig.text(s=stats['legend'],x=.75,y=.7,fontsize=14,bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1))\n",
+    "fig.suptitle('Initial Noise Map Histogram',x=.5,y=.92,fontsize=16)\n",
+    "\n",
+    "step_timer.done_step('Initial Noise Map created. Elapsed Time')"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## Bad Pixels ###\n",
+    "## Bad Pixels\n",
     "\n",
-    "The bad pixel map is deduced by comparing offset and noise of each pixel ($v_i$) against the median value of the respective maps ($v_k$):\n",
+    "The bad pixel map is deduced by comparing offset and noise of each pixel ($v_i$) against the median value of the respective maps for each ASIC ($v_k$):\n",
     "\n",
     "$$ \n",
     "v_i > \\mathrm{median}(v_{k}) + n \\sigma_{v_{k}}\n",
@@ -380,7 +395,9 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": true
+   },
    "outputs": [],
    "source": [
     "def print_bp_entry(bp):\n",
@@ -391,7 +408,7 @@
     "    ----------\n",
     "    bp : enum 'BadPixels'\n",
     "    '''\n",
-    "    print('{:<30s} {:032b}'.format(bp.name, bp.value))\n",
+    "    print('{:<23s}: {:032b} ({})'.format(bp.name, bp.value, bp.real))\n",
     "\n",
     "print_bp_entry(BadPixels.OFFSET_OUT_OF_THRESHOLD)\n",
     "print_bp_entry(BadPixels.NOISE_OUT_OF_THRESHOLD)\n",
@@ -404,12 +421,13 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "def eval_bpidx(const_map, n_sigma):\n",
+    "def eval_bpidx(const_map, n_sigma, block_size):\n",
     "    '''\n",
-    "    Evaluates bad pixels by comparing offset and noise of each\n",
-    "    pixel against the median value of the respective maps.\n",
+    "    Evaluates bad pixels by comparing the average offset and \n",
+    "    noise of each pixel against the median value of the\n",
+    "    respective maps of each ASIC.\n",
     "    \n",
-    "    Returns boolean array\n",
+    "    Returns boolean array.\n",
     "    \n",
     "    Parameters\n",
     "    ----------\n",
@@ -418,14 +436,66 @@
     "    n_sigma : float\n",
     "              Standard deviation multiplicity interval outside\n",
     "              which bad pixels are defined.\n",
+    "    block_size : ndarray\n",
+    "                 dimensions ([x,y]) of each ASIC.\n",
+    "    '''\n",
+    "    \n",
+    "    blocks = {} # Each block corresponds to 1 ASIC\n",
+    "    blocks['1'] = np.copy(const_map[:int(block_size[1]), :int(block_size[0])]) # bottom left\n",
+    "    blocks['2'] = np.copy(const_map[int(block_size[1]):, :int(block_size[0])]) # top left\n",
+    "    blocks['3'] = np.copy(const_map[int(block_size[1]):, int(block_size[0]):]) # top right\n",
+    "    blocks['4'] = np.copy(const_map[:int(block_size[1]), int(block_size[0]):]) # bottom right\n",
+    "    \n",
+    "    idx = {}\n",
+    "    for b in range(1, len(blocks)+1):\n",
+    "        mdn = np.nanmedian(blocks[str(b)])\n",
+    "        std = np.nanstd(blocks[str(b)])\n",
+    "        idx[str(b)] = ( (blocks[str(b)] > mdn + n_sigma*std) | (blocks[str(b)] < mdn - n_sigma*std) )\n",
+    "\n",
+    "    idx_output = np.zeros(const_map.shape,dtype=bool)\n",
+    "    idx_output[:int(block_size[1]), :int(block_size[0])] = idx['1']\n",
+    "    idx_output[int(block_size[1]):, :int(block_size[0])] = idx['2']\n",
+    "    idx_output[int(block_size[1]):, int(block_size[0]):] = idx['3']\n",
+    "    idx_output[:int(block_size[1]), int(block_size[0]):] = idx['4']\n",
+    "\n",
+    "    return idx_output"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def bp_analysis_table(bad_pixels_map, title = ''):\n",
     "    '''\n",
+    "    Prints a table with short analysis of the number and \n",
+    "    percentage of bad pixels on count of offset or noise \n",
+    "    out of threshold, or evaluation error.\n",
     "    \n",
-    "    mdn = np.nanmedian(const_map)\n",
-    "    std = np.nanstd(const_map)  \n",
-    "    idx = ( (const_map > mdn + n_sigma*std)\n",
-    "          | (const_map < mdn - n_sigma*std) )\n",
+    "    Returns PrettyTable.\n",
+    "    \n",
+    "    Parameters\n",
+    "    ----------\n",
+    "    bad_pixels_map : ndarray\n",
+    "                     Bad pixel map to analyse.\n",
+    "    title : string, optional\n",
+    "            Table title to be used. \n",
+    "    '''\n",
+    "\n",
+    "    num_bp = np.sum(bad_pixels_map!=0)\n",
+    "    offset_bp = np.sum(bad_pixels_map==1)\n",
+    "    noise_bp = np.sum(bad_pixels_map==2)\n",
+    "    eval_error_bp = np.sum(bad_pixels_map==4)\n",
     "\n",
-    "    return idx"
+    "    t = PrettyTable()\n",
+    "    t.field_names = [title]\n",
+    "    t.add_row(['Total number of Bad Pixels : {:<5} ({:<.2f}%)'.format(num_bp, 100*num_bp/np.prod(bad_pixels_map.shape))])\n",
+    "    t.add_row(['  Offset out of threshold  : {:<5} ({:<.2f}%)'.format(offset_bp, 100*offset_bp/np.prod(bad_pixels_map.shape))])\n",
+    "    t.add_row(['  Noise out of threshold   : {:<5} ({:<.2f}%)'.format(noise_bp, 100*noise_bp/np.prod(bad_pixels_map.shape))])\n",
+    "    t.add_row(['  Evaluation error         : {:<5} ({:<.2f}%)'.format(0, 100*eval_error_bp/np.prod(bad_pixels_map.shape))])\n",
+    "    \n",
+    "    return t"
    ]
   },
   {
@@ -436,20 +506,89 @@
    },
    "outputs": [],
    "source": [
-    "# Add BadPixels to constant_maps\n",
+    "# Add bad pixels from darks to constant_maps\n",
     "constant_maps['BadPixelsDark'] = np.zeros(constant_maps['Offset'].shape, np.uint32)\n",
     "\n",
-    "# Noise related bad pixels\n",
-    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "# Find noise related bad pixels\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
-    "# Offset related bad pixels\n",
-    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
+    "# Find offset related bad pixels\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
     "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
-    "num_bad_pixels = np.sum(constant_maps['BadPixelsDark']!=0)\n",
+    "# Plot Bad Pixels Map\n",
+    "fig = xana.heatmapPlot(\n",
+    "    np.exp(np.nan_to_num(np.log(constant_maps['BadPixelsDark']),neginf=np.nan)).squeeze(), # convert zeros to NaN\n",
+    "    lut_label='Bad pixel value [ADU]',                                                     # for plotting purposes\n",
+    "    x_label='Column',\n",
+    "    y_label='Row',\n",
+    "    x_range=(0, sensor_size[0]),\n",
+    "    y_range=(0, sensor_size[1])\n",
+    ")\n",
+    "fig.suptitle('Initial Bad Pixels Map', x=.5, y=.9, fontsize=16)\n",
+    "fig.set_size_inches(h=15, w=15)\n",
+    "\n",
+    "step_timer.done_step('Initial Bad Pixels Map created. Elapsed Time')\n",
+    "print(bp_analysis_table(constant_maps['BadPixelsDark'], title='Initial Bad Pixel Analysis'))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "## Common Mode Correction\n",
+    "\n",
+    "Common mode correction is an iterative process. Each iteration is composed by the follwing steps:\n",
+    "\n",
+    "1 - Common mode noise is calculated and subtracted from data.<div> </div>\n",
+    "2 - Noise map is recalculated.<div> </div>\n",
+    "3 - Bad pixels are recalulated based on the new noise map. <div> </div>\n",
+    "4 - Data is masked based on the new bad pixels map.<div> </div>"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "step_timer.start()\n",
+    "data = data.astype(float) # This conversion is needed for offset subtraction\n",
+    "data_raw = np.copy(data)  # Save data before corrections for later analysis\n",
+    "\n",
+    "# Subtract Offset\n",
+    "data -= constant_maps['Offset'].squeeze()\n",
+    "\n",
+    "step_timer.done_step('Offset correction applied. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if CM_N_iterations<1:\n",
+    "    print('Common mode correction not applied.')\n",
+    "else:\n",
+    "    \n",
+    "    commonModeBlockSize = sensor_size//2\n",
+    "\n",
+    "    # Instantiate common mode calculators for column and row CM correction\n",
+    "    cmCorrection_col = xcal.CommonModeCorrection(\n",
+    "        sensor_size.tolist(),\n",
+    "        commonModeBlockSize.tolist(),\n",
+    "        'col',\n",
+    "        parallel=False)\n",
     "\n",
-    "print('Number of Bad Pixels: ' + str(num_bad_pixels) + ' (' + str(np.round(100*num_bad_pixels/(sensor_size[0]*sensor_size[1]),2)) + '%)')"
+    "    cmCorrection_row = xcal.CommonModeCorrection(\n",
+    "        sensor_size.tolist(),\n",
+    "        commonModeBlockSize.tolist(),\n",
+    "        'row',\n",
+    "        parallel=False)"
    ]
   },
   {
@@ -458,17 +597,340 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "#************** BAD PIXELS HEAT MAP **************#\n",
+    "if CM_N_iterations<1:\n",
+    "    print('Common mode correction not applied.')    \n",
+    "else:\n",
+    "    noise_map_initial = np.copy(constant_maps['Noise']) # Save uncorrected noise map for later comparison\n",
+    "\n",
+    "    bp_offset = [np.sum(constant_maps['BadPixelsDark']==1)]\n",
+    "    bp_noise = [np.sum(constant_maps['BadPixelsDark']==2)]\n",
+    "\n",
+    "    for it in range (0,CM_N_iterations):\n",
+    "        step_timer.start()\n",
+    "\n",
+    "        # Mask bad pixels\n",
+    "        BadPixels_mask = np.squeeze(constant_maps['BadPixelsDark'] != 0)\n",
+    "        BadPixels_mask = np.repeat(BadPixels_mask[np.newaxis,...],data.shape[0],axis=0)\n",
+    "        data[BadPixels_mask] = np.nan\n",
+    "\n",
+    "        # Common mode correction\n",
+    "        data = np.swapaxes(data,0,-1)\n",
+    "        data = cmCorrection_col.correct(data)\n",
+    "        data = cmCorrection_row.correct(data)\n",
+    "        data = np.swapaxes(data,0,-1)\n",
+    "\n",
+    "        # Update noise map\n",
+    "        constant_maps['Noise'] = np.nanstd(data, axis=0)[..., np.newaxis]\n",
+    "\n",
+    "        # Update bad pixels map \n",
+    "        constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "        bp_offset.append(np.sum(constant_maps['BadPixelsDark']==1))\n",
+    "        bp_noise.append(np.sum(constant_maps['BadPixelsDark']==2))\n",
+    "\n",
+    "        print(bp_analysis_table(constant_maps['BadPixelsDark'], title=f'{it+1} CM correction iterations'));\n",
+    "        step_timer.done_step('Elapsed Time');\n",
+    "        print('\\n')\n",
+    "\n",
+    "\n",
+    "    # Apply final bad pixels mask\n",
+    "    BadPixels_mask = np.broadcast_to(np.squeeze(constant_maps['BadPixelsDark'] != 0), data.shape)\n",
+    "    data[BadPixels_mask] = np.nan\n",
+    "\n",
+    "    # Mask bad pixels in noise map\n",
+    "    constant_maps['Noise'][constant_maps['BadPixelsDark']!=0] = np.nan\n",
+    "    noise_map_initial[constant_maps['BadPixelsDark']!=0] = np.nan\n",
+    "\n",
+    "    it = np.arange(0,CM_N_iterations+1)\n",
+    "\n",
+    "    plt.figure()\n",
+    "    plt.plot(it, np.sum((bp_offset,bp_noise),axis=0), c = 'black', ls = '--', marker = 'o', label = 'Total')\n",
+    "    plt.plot(it, bp_noise, c = 'red', ls = '--', marker = 'v', label = 'Noise out of threshold')\n",
+    "    plt.plot(it, bp_offset, c = 'blue', ls = '--', marker = 's',label = 'Offset out of threshold')\n",
+    "    plt.xticks(it)\n",
+    "    plt.xlabel('CM correction iteration')\n",
+    "    plt.ylabel('# Bad Pixels')\n",
+    "    plt.legend()\n",
+    "    plt.grid(linestyle = ':')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Raw & Corrected Frame Visualization "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Select example frame to display \n",
+    "try:\n",
+    "    if (frame_display <-1) | (frame_display >= data.shape[0]):\n",
+    "        raise ValueError\n",
+    "except ValueError: \n",
+    "    frame_display = -1        \n",
+    "    print(f'Frame number out of range. The last acquired frame ({data.shape[0]-1}) will be used.')\n",
+    "if frame_display == -1:\n",
+    "    frame_display = data.shape[0]-1 # Last frame\n",
+    "\n",
+    "# Define equal colorbar limits to compare images\n",
+    "stats = stats_calc(data[frame_display,:,:].flatten())\n",
+    "cb_vmin = stats['median'] - badpixel_threshold_sigma*stats['std']\n",
+    "cb_vmax = stats['median'] + badpixel_threshold_sigma*stats['std']\n",
+    "cb_lim = np.floor(np.mean(np.abs((cb_vmin,cb_vmax))))\n",
+    "\n",
+    "# Plot offset corrected dark image\n",
+    "image_offset_corrected = data_raw[frame_display] - constant_maps['Offset'].squeeze()\n",
+    "image_offset_corrected[constant_maps['BadPixelsDark'].squeeze()!=0] = np.nan\n",
     "fig = xana.heatmapPlot(\n",
-    "    np.log2(constant_maps['BadPixelsDark'][:, :, 0]),\n",
-    "    lut_label='Bad pixel bit assinged',\n",
+    "    image_offset_corrected,\n",
+    "    lut_label='[ADU]',\n",
     "    x_label='Column',\n",
     "    y_label='Row',\n",
-    "    x_range=(0, sensor_size[0]),\n",
-    "    y_range=(0, sensor_size[1])\n",
+    "    vmin=-cb_lim,\n",
+    "    vmax=cb_lim\n",
     ")\n",
-    "fig.suptitle('Bad Pixels Map', x=.5, y=.9, fontsize=16)\n",
-    "fig.set_size_inches(h=15, w=15)"
+    "fig.suptitle(f'Dark Image #{frame_display+1} - Offset Subtracted', x=.48, y=.9, fontsize=16)\n",
+    "fig.set_size_inches(h=15, w=15)\n",
+    "\n",
+    "if CM_N_iterations<1:\n",
+    "    print('Common mode correction not applied.')\n",
+    "else:\n",
+    "\n",
+    "    # Plot common mode profile to be subtracted\n",
+    "    CM_map = data_raw[frame_display]-constant_maps['Offset'].squeeze()-data[frame_display]\n",
+    "    fig = xana.heatmapPlot(\n",
+    "        CM_map,\n",
+    "        lut_label='[ADU]',\n",
+    "        x_label='Column', \n",
+    "        y_label='Row',\n",
+    "        vmax = -cb_lim,\n",
+    "        vmin = cb_lim\n",
+    "    )\n",
+    "    fig.suptitle(f'Dark Image #{frame_display+1} - Common Mode Profile', x=.48, y=.9, fontsize=16)\n",
+    "    fig.set_size_inches(h=15, w=15)\n",
+    "\n",
+    "    # Plot CM corrected dark image\n",
+    "    stats = stats_calc(data[frame_display,:,:].flatten())\n",
+    "    fig = xana.heatmapPlot(\n",
+    "        data[frame_display,:,:],\n",
+    "        lut_label='[ADU]',\n",
+    "        x_label='Column',\n",
+    "        y_label='Row',\n",
+    "        vmin=-cb_lim,\n",
+    "        vmax=cb_lim\n",
+    "    )\n",
+    "    fig.suptitle(f'Dark Image #{frame_display+1} - Common Mode Corrected', x=.48, y=.9, fontsize=16)\n",
+    "    fig.set_size_inches(h=15, w=15)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Corrected Bad Pixels Map"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if CM_N_iterations<1:\n",
+    "    print('Common mode correction not applied.')\n",
+    "else:\n",
+    "\n",
+    "    # Plot final bad pixels map\n",
+    "    fig = xana.heatmapPlot(\n",
+    "        np.exp(np.nan_to_num(np.log(constant_maps['BadPixelsDark']),neginf=np.nan)).squeeze(), # convert zeros to NaN\n",
+    "        lut_label='Bad pixel value [ADU]',                                                     # for plotting purposes\n",
+    "        x_label='Column',\n",
+    "        y_label='Row',\n",
+    "        x_range=(0, sensor_size[0]),\n",
+    "        y_range=(0, sensor_size[1])\n",
+    "    )\n",
+    "    fig.suptitle('Final Bad Pixels Map', x=.5, y=.9, fontsize=16)\n",
+    "    fig.set_size_inches(h=15, w=15)\n",
+    "\n",
+    "    print(bp_analysis_table(constant_maps['BadPixelsDark'], title='Final Bad Pixels Analysis'))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Corrected Noise Map"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if CM_N_iterations<1:\n",
+    "    print('Common mode correction not applied.')\n",
+    "else:\n",
+    "\n",
+    "    # Define equal colorbar limits to compare images\n",
+    "    stats = stats_calc(constant_maps['Noise'].flatten())\n",
+    "    cb_vmin = np.floor(stats['min'])\n",
+    "    cb_vmax = np.ceil(stats['max'])\n",
+    "\n",
+    "    # Plot initial noise map\n",
+    "    fig = xana.heatmapPlot(\n",
+    "        noise_map_initial.squeeze(),\n",
+    "        lut_label='[ADU]',\n",
+    "        x_label='Column', \n",
+    "        y_label='Row',\n",
+    "        vmin=cb_vmin,\n",
+    "        vmax=cb_vmax\n",
+    "    )\n",
+    "    fig.suptitle(f'Initial Noise Map', x=.5, y=.9, fontsize=16)\n",
+    "    fig.set_size_inches(h=15, w=15)\n",
+    "\n",
+    "    # Plot corrected noise map\n",
+    "    fig = xana.heatmapPlot(\n",
+    "        constant_maps['Noise'].squeeze(),\n",
+    "        lut_label='[ADU]',\n",
+    "        x_label='Column', \n",
+    "        y_label='Row',\n",
+    "        vmin=cb_vmin,\n",
+    "        vmax=cb_vmax\n",
+    "    )\n",
+    "    fig.suptitle(f'Corrected Noise Map', x=.5, y=.9, fontsize=16)\n",
+    "    fig.set_size_inches(h=15, w=15)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if CM_N_iterations<1:\n",
+    "    print('Common mode correction not applied.')\n",
+    "else:\n",
+    "\n",
+    "    # Initial noise map histogram\n",
+    "    stats = stats_calc(noise_map_initial.flatten())\n",
+    "    bins = np.arange(np.floor(stats['min']), np.ceil(stats['max']), stats['std']/100)\n",
+    "    d = []\n",
+    "\n",
+    "    h, c = np.histogram(noise_map_initial.flatten(),bins=bins)\n",
+    "    d.append({'x': c[:-1],\n",
+    "          'y': h,\n",
+    "          'color': 'blue',\n",
+    "          'label': 'Initial noise map'})\n",
+    "\n",
+    "    print('Initial noise map   : median = ' + str(np.round(stats['median'],2))  + \n",
+    "          ' | std = ' + str(np.round(stats['std'],2)))    \n",
+    "\n",
+    "    d.append({'x': stats['median']*np.ones([2]),\n",
+    "              'y': np.array([0, np.max(h)*2]),\n",
+    "              'color': 'blue',\n",
+    "              'linestyle': '--'})\n",
+    "\n",
+    "    # Corrected noise map histogram\n",
+    "    stats = stats_calc(constant_maps['Noise'].flatten())\n",
+    "    h, c = np.histogram(constant_maps['Noise'].flatten(), bins=bins)\n",
+    "    d.append({'x': c[:-1],\n",
+    "              'y': h,\n",
+    "              'color': 'green',\n",
+    "              'label': 'Corrected noise map'})\n",
+    "    d.append({'x': stats['median']*np.ones([2]),\n",
+    "              'y': np.array([0, np.max(h)*2]),\n",
+    "              'color': 'red',\n",
+    "              'linestyle': '--'})\n",
+    "\n",
+    "    print('Corrected noise map : median = ' + str(np.round(stats['median'],2))  + \n",
+    "          ' | std = ' + str(np.round(stats['std'],2)))    \n",
+    "\n",
+    "    # Plot\n",
+    "    fig = xana.simplePlot(\n",
+    "        d,\n",
+    "        aspect=1.5,\n",
+    "        x_label='Noise [ADU]',\n",
+    "        y_label='Counts',\n",
+    "        x_range=(bins[0],np.ceil(bins[-1])),\n",
+    "        y_range=(0,np.max([d[0]['y'],d[2]['y']])*1.05)\n",
+    "    )\n",
+    "\n",
+    "    plt.grid(linestyle = ':')\n",
+    "    plt.legend(fontsize = 12)\n",
+    "\n",
+    "    fig.suptitle('Noise Map Histogram',x=.5,y=.92,fontsize=16);"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ASIC = []\n",
+    "ASIC.append({'noise': constant_maps['Noise'][:int(sensor_size[1]//2), :int(sensor_size[0]//2)],\n",
+    "            'label': 'ASIC 1 (bottom left)',\n",
+    "            'color': 'blue'})\n",
+    "ASIC.append({'noise': constant_maps['Noise'][int(sensor_size[1]//2):, :int(sensor_size[0]//2)],\n",
+    "            'label': 'ASIC 2 (top left)',\n",
+    "            'color': 'red'})\n",
+    "ASIC.append({'noise': constant_maps['Noise'][int(sensor_size[1]//2):, int(sensor_size[0]//2):],\n",
+    "            'label': 'ASIC 3 (top right)',\n",
+    "            'color': 'green'})\n",
+    "ASIC.append({'noise': constant_maps['Noise'][:int(sensor_size[1]//2), int(sensor_size[0]//2):],\n",
+    "            'label': 'ASIC 4 (bottom right)',\n",
+    "            'color': 'magenta'})\n",
+    "d = []\n",
+    "\n",
+    "stats = stats_calc(constant_maps['Noise'][constant_maps['BadPixelsDark']==0].flatten())\n",
+    "bins = np.arange(np.floor(stats['min']), np.ceil(stats['max']), stats['std']/100)\n",
+    "\n",
+    "min_std = np.inf\n",
+    "\n",
+    "for a in ASIC:\n",
+    "    h, c = np.histogram(a['noise'].flatten(), bins=bins)\n",
+    "    d.append({'x': c[:-1], 'y': h, 'color': a['color'], 'label': a['label']})\n",
+    "    min_std = np.nanmin((min_std, np.nanstd(a['noise'])))\n",
+    "    print(a['label'][:6] + \n",
+    "          ': median = ' + \"{:.2f}\".format(np.round(np.nanmedian(a['noise']),2)) +  \n",
+    "          ' | std = ' + \"{:.2f}\".format(np.round(np.nanstd(a['noise']),2)) +  \n",
+    "          a['label'][6:])\n",
+    "\n",
+    "\n",
+    "arg_max_median = 0;\n",
+    "for i in range(0,np.size(d)):\n",
+    "    arg_max_median = np.max((arg_max_median, np.argmax(d[i]['y'])))        \n",
+    "\n",
+    "fig = xana.simplePlot(\n",
+    "    d,\n",
+    "    aspect=1.5,\n",
+    "    x_label='Noise [ADU]',\n",
+    "    y_label='Counts',\n",
+    "    x_range=(bins[0], np.ceil(d[0]['x'][arg_max_median]+min_std*5)),\n",
+    "    y_range=(0, np.max([d[0]['y'], d[1]['y'], d[2]['y'], d[3]['y']])*1.1),\n",
+    "    y_log=False\n",
+    ")\n",
+    "plt.grid(linestyle = ':')\n",
+    "leg = fig.legend(bbox_to_anchor=(.42, .88),fontsize = 14)\n",
+    "\n",
+    "if CM_N_iterations<1:\n",
+    "    fig.suptitle('Uncorrected Noise Map Histogram per ASIC',x=.5,y=.92,fontsize=16);\n",
+    "else:\n",
+    "    fig.suptitle('Corrected Noise Map Histogram per ASIC',x=.5,y=.92,fontsize=16);"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Calibration Constants DB\n",
+    "\n",
+    "Send the dark constants to the database and/or save them locally."
    ]
   },
   {
-- 
GitLab