diff --git a/cal_tools/cal_tools/plotting.py b/cal_tools/cal_tools/plotting.py
index 03355d0d6f53c34dc3712d5887ca7fdc552c4f1b..0e9d9983578db0a1f2f8598be8adcb2199220783 100644
--- a/cal_tools/cal_tools/plotting.py
+++ b/cal_tools/cal_tools/plotting.py
@@ -61,6 +61,7 @@ def rebin(a, *args):
     are factors of the original dimensions. eg. An array with 6 columns and 4 rows
     can be reduced to have 6,3,2 or 1 columns and 4,2 or 1 rows.
     example usages:
+    https://scipy-cookbook.readthedocs.io/items/Rebinning.html
     >>> a=rand(6,4); b=rebin(a,3,2)
     >>> a=rand(6); b=rebin(a,2)
     '''
@@ -71,7 +72,7 @@ def rebin(a, *args):
              ['args[%d],factor[%d],'%(i,i) for i in range(lenShape)] + \
              [')'] + ['.sum(%d)'%(i+1) for i in range(lenShape)] + \
              ['/factor[%d]'%i for i in range(lenShape-1)]
-    
+
     ta = eval(''.join(evList))
     return ta.astype(np.uint32), np.indices([s + 1 for s in ta.shape])
 
@@ -89,7 +90,7 @@ def plot_badpix_3d(data, definitions, title=None, rebin_fac=2, azim=22.5):
 
     fig = plt.figure(figsize=(15,10))
     ax = fig.gca(projection="3d")
-    vox = ax.voxels(xx*rebin_fac, yy*rebin_fac, zz, voxels, facecolors=colors)
+    ax.voxels(xx*rebin_fac, yy*rebin_fac, zz, voxels, facecolors=colors)
     ax.view_init(elev=25., azim=azim)
     ax.set_xlabel("pixels")
     ax.set_ylabel("pixels")
@@ -100,9 +101,9 @@ def plot_badpix_3d(data, definitions, title=None, rebin_fac=2, azim=22.5):
 
     for k, c in cols.items():
         ax.plot([-1,], [-1,], color=c[1], label=c[0])
-    l = ax.legend()
+    ax.legend()
     if title:
-        t = ax.set_title(title)
+        ax.set_title(title)
 
 def create_constant_overview(constant, name, cells, vmin=None, vmax=None,
                              entries=3, out_folder=None, infix=None,
diff --git a/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb b/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb
index ca345b5b104b1a46c06b9d4a01ccd3f0ccede587..d4299cd575ba43756c838cf74e06ac438e1dd8c6 100644
--- a/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb
+++ b/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb
@@ -25,13 +25,13 @@
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
-    "in_folder = \"/gpfs/exfel/d/raw/SPB/202030/p900138/\" # path to input data, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD3\" # path to output to, required\n",
+    "in_folder = \"/gpfs/exfel/d/raw/SPB/202030/p900138\" # path to input data, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPDbad_sep64\" # path to output to, required\n",
     "sequences = [0] # sequence files to evaluate.\n",
     "modules = [-1]  # list of modules to evaluate, RANGE ALLOWED\n",
-    "run_high = 264 # run number in which high gain data was recorded, required\n",
-    "run_med = 265 # run number in which medium gain data was recorded, required\n",
-    "run_low = 266 # run number in which low gain data was recorded, required\n",
+    "run_high = 167 # run number in which high gain data was recorded, required\n",
+    "run_med = 168 # run number in which medium gain data was recorded, required\n",
+    "run_low = 169 # run number in which low gain data was recorded, required\n",
     "\n",
     "karabo_id = \"SPB_DET_AGIPD1M-1\" # karabo karabo_id\n",
     "karabo_da = [-1]  # data aggregators\n",
@@ -40,8 +40,8 @@
     "h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
     "h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
     "h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP_TEST' # path to control information\n",
-    "karabo_id_control = \"SPB_IRU_AGIPD1M1\" # karabo-id for control device\n",
-    "karabo_da_control = 'DA02' # karabo DA for control infromation\n",
+    "karabo_id_control = \"SPB_IRU_AGIPD1M1\" # karabo-id for control device '\n",
+    "karabo_da_control = \"AGIPD1MCTRL00\" # karabo DA for control infromation\n",
     "\n",
     "use_dir_creation_date = True  # use dir creation date as data production reference date\n",
     "cal_db_interface = \"tcp://max-exfl016:8020\" # the database interface to use\n",
@@ -56,13 +56,21 @@
     "interlaced = False # assume interlaced data format, for data prior to Dec. 2017\n",
     "rawversion = 2 # RAW file format version\n",
     "\n",
-    "thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
-    "thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_offset_sigma = 3. # offset sigma thresholds for offset deduced bad pixels\n",
+    "thresholds_offset_hard = [0, 0] # For setting the same threshold offset for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters.\n",
+    "thresholds_offset_hard_hg = [3000, 7000] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_offset_hard_mg = [6000, 10000] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_offset_hard_lg = [6000, 10000] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels\n",
     "\n",
-    "thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
-    "thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_noise_sigma = 5. # noise sigma thresholds for offset deduced bad pixels\n",
+    "thresholds_noise_hard = [0, 0] # For setting the same threshold noise for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters.\n",
+    "thresholds_noise_hard_hg = [4, 20] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_noise_hard_mg = [4, 20] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_noise_hard_lg = [4, 20] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels\n",
     "\n",
-    "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h"
+    "thresholds_gain_sigma = 5. # Gain separation sigma threshold\n",
+    "\n",
+    "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. ~7mins extra time for 64 memory cells"
    ]
   },
   {
@@ -123,6 +131,7 @@
     "creation_time=None\n",
     "if use_dir_creation_date:\n",
     "    creation_time = get_dir_creation_date(in_folder, run_high)\n",
+    "\n",
     "print(f\"Using {creation_time} as creation time of constant.\")\n",
     "\n",
     "run, prop, seq = run_prop_seq_from_path(in_folder)\n",
@@ -152,6 +161,8 @@
    "source": [
     "control_fname = '{}/r{:04d}/RAW-R{:04d}-{}-S00000.h5'.format(in_folder, run_high, \n",
     "                                                             run_high, karabo_da_control)\n",
+    "if '{' in h5path_ctrl:\n",
+    "    h5path_ctrl = h5path_ctrl.format(karabo_id_control)\n",
     "\n",
     "if gain_setting == 0.1:\n",
     "    if creation_time.replace(tzinfo=None) < dateutil.parser.parse('2020-01-31'):\n",
@@ -185,10 +196,8 @@
     "    karabo_da = [\"AGIPD{:02d}\".format(i) for i in modules]\n",
     "else:\n",
     "    modules = [int(x[-2:]) for x in karabo_da]\n",
-    "\n",
     "h5path = h5path.format(karabo_id, receiver_id)\n",
     "h5path_idx = h5path_idx.format(karabo_id, receiver_id)\n",
-    "h5path_ctrl = h5path_ctrl.format(karabo_id_control)\n",
     "\n",
     "print(\"Parameters are:\")\n",
     "print(f\"Proposal: {prop}\")\n",
@@ -244,7 +253,8 @@
     "ExecuteTime": {
      "end_time": "2019-02-20T10:50:55.839958Z",
      "start_time": "2019-02-20T10:50:55.468134Z"
-    }
+    },
+    "scrolled": false
    },
    "outputs": [],
    "source": [
@@ -257,8 +267,8 @@
     "    import h5py\n",
     "    from cal_tools.enums import BadPixels\n",
     "    from cal_tools.agipdlib import get_num_cells, get_acq_rate\n",
-    "    \n",
-    "    filename, filename_out, channel = inp\n",
+    "\n",
+    "    filename, channel, gg = inp\n",
     "    \n",
     "    if cells == 0:\n",
     "        cells = get_num_cells(filename, loc, channel)\n",
@@ -267,9 +277,10 @@
     "    \n",
     "    if acq_rate == 0.:\n",
     "        acq_rate = get_acq_rate(filename, loc, channel)\n",
-    "    \n",
-    "    thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh \n",
     "\n",
+    "    thresholds_offset, thresholds_offset_sigma, thresholds_noise, thresholds_noise_sigma = bp_thresh \n",
+    "    thresholds_offset_hard = thresholds_offset[gg]\n",
+    "    thresholds_noise_hard = thresholds_noise[gg]\n",
     "    infile = h5py.File(filename, \"r\", driver=\"core\")\n",
     "    \n",
     "    h5path = h5path.format(channel)\n",
@@ -300,7 +311,7 @@
     "    else:\n",
     "        ga = im[:, 1, ...]\n",
     "        im = im[:, 0, ...].astype(np.float32)\n",
-    "        \n",
+    "\n",
     "    im = np.rollaxis(im, 2)\n",
     "    im = np.rollaxis(im, 2, 1)\n",
     "\n",
@@ -311,84 +322,100 @@
     "    offset = np.zeros((im.shape[0], im.shape[1], mcells))\n",
     "    gains = np.zeros((im.shape[0], im.shape[1], mcells))\n",
     "    noise = np.zeros((im.shape[0], im.shape[1], mcells))\n",
+    "    gains_std = np.zeros((im.shape[0], im.shape[1], mcells))\n",
     "    \n",
     "    for cc in np.unique(cellIds[cellIds < mcells]):\n",
     "        cellidx = cellIds == cc\n",
     "        offset[...,cc] = np.median(im[..., cellidx], axis=2)\n",
     "        noise[...,cc] = np.std(im[..., cellidx], axis=2)\n",
     "        gains[...,cc] = np.median(ga[..., cellidx], axis=2)\n",
-    "        \n",
+    "        gains_std[...,cc] = np.std(ga[..., cellidx], axis=2)\n",
+    "\n",
     "    # bad pixels\n",
     "    bp = np.zeros(offset.shape, np.uint32)\n",
     "    # offset related bad pixels\n",
     "    offset_mn = np.nanmedian(offset, axis=(0,1))\n",
-    "    offset_std = np.nanstd(offset, axis=(0,1))    \n",
-    "    \n",
+    "    offset_std = np.nanstd(offset, axis=(0,1))\n",
+    "\n",
     "    bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |\n",
     "       (offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
     "    bp[(offset < thresholds_offset_hard[0]) | (\n",
     "        offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
     "    bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
-    "    \n",
+    "\n",
     "    # noise related bad pixels\n",
     "    noise_mn = np.nanmedian(noise, axis=(0,1))\n",
     "    noise_std = np.nanstd(noise, axis=(0,1))    \n",
-    "    \n",
     "    bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |\n",
     "       (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "    bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "    bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
+    "    return offset, noise, gains, gains_std, gg, bp, cells, acq_rate\n",
     "\n",
-    "    return offset, noise, gains, bp, cells, acq_rate\n",
-    "        \n",
-    "        \n",
     "offset_g = OrderedDict()\n",
     "noise_g = OrderedDict()\n",
     "gain_g = OrderedDict()\n",
+    "gainstd_g = OrderedDict()\n",
     "badpix_g = OrderedDict()\n",
     "gg = 0\n",
     "\n",
     "start = datetime.now()\n",
     "all_cells = []\n",
     "all_acq_rate = []\n",
-    "for gain, mapped_files in gain_mapped_files.items():\n",
+    "\n",
+    "if thresholds_offset_hard == [0, 0]:\n",
+    "    thresholds_offset_hard = [thresholds_offset_hard_hg, thresholds_offset_hard_mg, thresholds_offset_hard_lg]\n",
+    "else:\n",
+    "    thresholds_offset_hard = [thresholds_offset_hard] * 3\n",
+    "\n",
+    "if thresholds_noise_hard == [0, 0]:\n",
+    "    thresholds_noise_hard = [thresholds_noise_hard_hg, thresholds_noise_hard_mg, thresholds_noise_hard_lg]\n",
+    "else:\n",
+    "    thresholds_noise_hard = [thresholds_noise_hard] * 3\n",
+    "\n",
     "    \n",
-    "    inp = []\n",
+    "inp = []\n",
+    "for gain, mapped_files in gain_mapped_files.items():\n",
     "    dones = []\n",
     "    for i in modules:\n",
-    "        qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1)    \n",
+    "        qm = f\"Q{i//4+1}M{i%4+1}\"\n",
     "        if qm in mapped_files and not mapped_files[qm].empty():\n",
-    "            fname_in = mapped_files[qm].get()            \n",
+    "            fname_in = mapped_files[qm].get()\n",
     "            dones.append(mapped_files[qm].empty())\n",
     "        else:\n",
     "            continue\n",
-    "        fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
-    "        inp.append((fname_in, fout, i))\n",
-    "    first = False\n",
-    "    p = partial(characterize_module, IL_MODE, max_cells,\n",
-    "               (thresholds_offset_hard, thresholds_offset_sigma,\n",
-    "                thresholds_noise_hard, thresholds_noise_sigma), \n",
-    "                rawversion, karabo_id, acq_rate, h5path, h5path_idx)\n",
-    "    #results = list(map(p, inp))\n",
-    "    results = view.map_sync(p, inp)\n",
-    "    for ii, r in enumerate(results):\n",
-    "        i = modules[ii]\n",
-    "        offset, noise, gain, bp, thiscell, thisacq = r\n",
-    "        all_cells.append(thiscell)\n",
-    "        all_acq_rate.append(thisacq)\n",
-    "        qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1)\n",
-    "        if qm not in offset_g:\n",
-    "            offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2], 3))\n",
-    "            noise_g[qm] = np.zeros_like(offset_g[qm])\n",
-    "            gain_g[qm] = np.zeros_like(offset_g[qm])\n",
-    "            badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)\n",
+    "        inp.append((fname_in, i, gg))\n",
     "        \n",
-    "        offset_g[qm][...,gg] = offset\n",
-    "        noise_g[qm][...,gg] = noise\n",
-    "        gain_g[qm][...,gg] = gain\n",
-    "        badpix_g[qm][...,gg] = bp\n",
-    "    gg +=1\n",
+    "    gg += 1\n",
+    "\n",
+    "p = partial(characterize_module, IL_MODE, max_cells,\n",
+    "           (thresholds_offset_hard, thresholds_offset_sigma,\n",
+    "            thresholds_noise_hard, thresholds_noise_sigma),\n",
+    "            rawversion, karabo_id, acq_rate, h5path, h5path_idx)\n",
+    "\n",
+    "# Don't remove. Used for Debugging.\n",
+    "#results = list(map(p, inp))\n",
+    "results = view.map_sync(p, inp)\n",
+    "\n",
+    "for ii, r in enumerate(results):\n",
+    "    offset, noise, gains, gains_std, gg, bp, thiscell, thisacq = r\n",
+    "    all_cells.append(thiscell)\n",
+    "    all_acq_rate.append(thisacq)\n",
+    "    qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n",
+    "    if qm not in offset_g:\n",
+    "        offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2], 3))\n",
+    "        noise_g[qm] = np.zeros_like(offset_g[qm])\n",
+    "        gain_g[qm] = np.zeros_like(offset_g[qm])\n",
+    "        gainstd_g[qm] = np.zeros_like(offset_g[qm])\n",
+    "        badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)\n",
+    "\n",
+    "    offset_g[qm][...,gg] = offset\n",
+    "    noise_g[qm][...,gg] = noise\n",
+    "    gain_g[qm][...,gg] = gains\n",
+    "    gainstd_g[qm][..., gg] = gains_std\n",
+    "    badpix_g[qm][...,gg] = bp\n",
+    "    \n",
     "\n",
     "duration = (datetime.now()-start).total_seconds()\n",
     "logger.runtime_summary_entry(success=True, runtime=duration,\n",
@@ -401,6 +428,20 @@
     "print(f\"Using {acq_rate} MHz acquisition rate\")"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Add a badpixel due to bad gain separation\n",
+    "for g in range(2):\n",
+    "    # Bad pixels during bad gain separation.\n",
+    "    # Fraction of pixels in the module with separation lower than \"thresholds_gain_sigma\".\n",
+    "    bad_sep = (gain_g[qm][..., g+1] - gain_g[qm][..., g]) / np.sqrt(gainstd_g[qm][..., g+1]**2 + gainstd_g[qm][..., g]**2)\n",
+    "    badpix_g[qm][...,g+1][(bad_sep)<thresholds_gain_sigma]|= BadPixels.GAIN_THRESHOLDING_ERROR.value"
+   ]
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
@@ -447,7 +488,7 @@
     "               'ThresholdsDark': thresholds_g[qm],\n",
     "               'BadPixelsDark': badpix_g[qm]    \n",
     "               }\n",
-    "    \n",
+    "\n",
     "if local_output:\n",
     "    for qm in offset_g.keys():\n",
     "        ofile = \"{}/agipd_offset_store_{}_{}.h5\".format(out_folder,\n",
@@ -530,8 +571,8 @@
     "\n",
     "    print(f\"Injecting constants {const} with conditions:\\n\")\n",
     "    print(\"1. memory_cells: {}\\n2. bias_voltage: {}\\n\"\n",
-    "              \"3. acquisition_rate: {}\\n4. gain_setting: {}\\n\"\n",
-    "              \"5. creation_time: {}\\n\".format(max_cells, bias_voltage,\n",
+    "          \"3. acquisition_rate: {}\\n4. gain_setting: {}\\n\"\n",
+    "          \"5. creation_time: {}\\n\".format(max_cells, bias_voltage,\n",
     "                                           acq_rate, gain_setting,\n",
     "                                           creation_time))\n",
     "    for const in res[qm]:\n",
@@ -566,7 +607,7 @@
     "                print(msg.format(const, qm,\n",
     "                                 metadata.calibration_constant_version.begin_at))\n",
     "            except Exception as e:\n",
-    "                print(e)\n",
+    "                print(\"Error:\", e)\n",
     "\n",
     "        if local_output:\n",
     "            save_const_to_h5(metadata, out_folder)\n",
@@ -610,7 +651,7 @@
     "                ax.add_patch(matplotlib.patches.Rectangle((x,(q_x[1]-q_st*im+3)), l_x, l_y/2, linewidth=2,edgecolor='c',\n",
     "                                           facecolor='sandybrown', fill=True))\n",
     "            x += asic_pos\n",
-    "            \n",
+    "\n",
     "        if iq*4+im == modules[0]:\n",
     "            # Change Text for current processed module.\n",
     "            ax.text(q_x[0]+13, q_x[1]-q_st*im+1.5, 'Q{}M{}'.format(\n",
@@ -694,22 +735,6 @@
     "show_overview(res, cell, gain, out_folder=out_folder, infix=\"{}-{}-{}\".format(*offset_runs.values()))"
    ]
   },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Global Bad Pixel Behaviour ##\n",
-    "\n",
-    "The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### High Gain ###"
-   ]
-  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -721,49 +746,30 @@
     "cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),\n",
     "        BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),\n",
     "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),\n",
-    "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}\n",
+    "        BadPixels.GAIN_THRESHOLDING_ERROR.value: (BadPixels.GAIN_THRESHOLDING_ERROR.name, '#FF40FF40'),\n",
+    "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('OFFSET_OUT_OF_THRESHOLD + NOISE_OUT_OF_THRESHOLD', '#DD00DD80'),\n",
+    "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value | \n",
+    "        BadPixels.GAIN_THRESHOLDING_ERROR.value: ('MIXED', '#BFDF009F')}\n",
     "\n",
-    "rebin = 8 if not high_res_badpix_3d else 2\n",
+    "if high_res_badpix_3d:\n",
+    "    display(Markdown(\"\"\"\n",
+    "    \n",
+    "    ## Global Bad Pixel Behaviour ##\n",
     "\n",
-    "gain = 0\n",
-    "for mod, data in badpix_g.items():\n",
-    "    plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Medium Gain ###"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "gain = 1\n",
-    "for mod, data in badpix_g.items():\n",
-    "    plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Low Gain ###"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "gain = 2\n",
-    "for mod, data in badpix_g.items():\n",
-    "    plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)"
+    "    The following plots show the results of bad pixel evaluation for all evaluated memory cells. \n",
+    "    Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. \n",
+    "    This excludes single bad pixels present only in disconnected pixels. \n",
+    "    Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. \n",
+    "    Colors encode the bad pixel type, or mixed type.\n",
+    "\n",
+    "    \"\"\"))\n",
+    "\n",
+    "    gnames = ['High Gain', 'Medium Gain', 'Low Gain']\n",
+    "    for gain in range(3):\n",
+    "        display(Markdown(f'### {gnames[gain]} ###'))\n",
+    "        for mod, data in badpix_g.items():\n",
+    "            plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=1)\n",
+    "            plt.show()"
    ]
   },
   {
@@ -783,9 +789,9 @@
    },
    "outputs": [],
    "source": [
-    "create_constant_overview(offset_g, \"Offset (ADU)\", max_cells, 4000, 8000,\n",
+    "create_constant_overview(offset_g, \"Offset (ADU)\", max_cells, 4000, 10000,\n",
     "                         out_folder=out_folder, infix=\"{}-{}-{}\".format(*offset_runs.values()),\n",
-    "                         badpixels=[badpix_g, np.nan]))"
+    "                         badpixels=[badpix_g, np.nan])"
    ]
   },
   {
@@ -815,8 +821,8 @@
     "    bp_thresh[mod] = np.zeros((con.shape[0], con.shape[1], con.shape[2], 5), dtype=con.dtype)\n",
     "    bp_thresh[mod][...,:2] = con[...,:2]\n",
     "    bp_thresh[mod][...,2:] = con\n",
-    "    \n",
-    "    \n",
+    "\n",
+    "\n",
     "create_constant_overview(thresholds_g, \"Threshold (ADU)\", max_cells, 4000, 10000, 5,\n",
     "                         out_folder=out_folder, infix=\"{}-{}-{}\".format(*offset_runs.values()),\n",
     "                         badpixels=[bp_thresh, np.nan],\n",
@@ -855,6 +861,7 @@
    "source": [
     "table = []\n",
     "gain_names = ['High', 'Medium', 'Low']\n",
+    "bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR, BadPixels.GAIN_THRESHOLDING_ERROR]\n",
     "for qm in badpix_g.keys():\n",
     "    for gain in range(3):\n",
     "\n",
@@ -863,42 +870,45 @@
     "\n",
     "        data = np.copy(badpix_g[qm][:,:,:,gain])\n",
     "        datau32 = data.astype(np.uint32)\n",
-    "        l_data.append(data)\n",
-    "        l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.NOISE_OUT_OF_THRESHOLD.value))\n",
-    "        l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.OFFSET_OUT_OF_THRESHOLD.value))\n",
-    "        l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.OFFSET_NOISE_EVAL_ERROR.value))\n",
+    "        l_data.append(len(datau32[datau32>0].flatten()))\n",
+    "        for bit in bits:\n",
+    "            l_data.append(np.count_nonzero(badpix_g[qm][:,:,:,gain] & bit.value))\n",
     "\n",
     "        if old_const['BadPixelsDark'] is not None:\n",
     "            dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain])\n",
     "            datau32old = dataold.astype(np.uint32)\n",
-    "            l_data_old.append(dataold)\n",
-    "            l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.NOISE_OUT_OF_THRESHOLD.value))\n",
-    "            l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.OFFSET_OUT_OF_THRESHOLD.value))\n",
-    "            l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.OFFSET_NOISE_EVAL_ERROR.value))\n",
+    "            l_data_old.append(len(datau32old[datau32old>0].flatten()))\n",
+    "            for bit in bits:\n",
+    "                l_data_old.append(np.count_nonzero(old_const['BadPixelsDark'][:, :, :, gain] & bit.value))\n",
     "\n",
     "        l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', \n",
-    "                       'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']\n",
+    "                       'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR', 'GAIN_THRESHOLDING_ERROR']\n",
     "\n",
-    "        l_threshold = ['', '{}'.format(thresholds_noise_sigma), '{}'.format(thresholds_offset_sigma),\n",
-    "                      '{}/{}'.format(thresholds_offset_hard, thresholds_noise_hard)]\n",
+    "        l_threshold = ['', f'{thresholds_noise_sigma}', f'{thresholds_offset_sigma}',\n",
+    "                      f'{thresholds_offset_hard[gain]}/{thresholds_noise_hard[gain]}', '']\n",
     "\n",
     "        for i in range(len(l_data)):\n",
-    "            line = ['{}, {} gain'.format(l_data_name[i], gain_names[gain]),\n",
-    "                      l_threshold[i],\n",
-    "                      len(l_data[i][l_data[i]>0].flatten())\n",
-    "                   ]\n",
+    "            line = [f'{l_data_name[i]}, {gain_names[gain]} gain', l_threshold[i], l_data[i]]\n",
+    "\n",
     "            if old_const['BadPixelsDark'] is not None:\n",
-    "                line += [len(l_data_old[i][l_data_old[i]>0].flatten())]\n",
+    "                line += [l_data_old[i]]\n",
     "            else:\n",
     "                line += ['-']\n",
     "\n",
     "            table.append(line)\n",
     "        table.append(['', '', '', ''])\n",
     "\n",
-    "display(Markdown('### Number of bad pixels ###'.format(qm)))\n",
+    "display(Markdown('''\n",
+    "\n",
+    "### Number of bad pixels ###\n",
+    "\n",
+    "One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels.\n",
+    "\n",
+    "'''))\n",
     "if len(table)>0:\n",
     "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', \n",
-    "                                     headers=[\"Pixel type\", \"Threshold\", \"New constant\", \"Old constant \"])))  "
+    "                                         headers=[\"Pixel type\", \"Threshold\", \n",
+    "                                                  \"New constant\", \"Old constant\"])))  "
    ]
   },
   {
@@ -957,6 +967,13 @@
     "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))  "
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
   {
    "cell_type": "code",
    "execution_count": null,
diff --git a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
index 6b3d7b71146b03ec10859f336733b040775dd0d6..35926c6e6a8d98a3c5c6718f2adba4594b02a03a 100644
--- a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
+++ b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
@@ -491,15 +491,6 @@
     "show_overview(res, cell, gain, out_folder=out_folder, infix=\"_{}\".format(run))"
    ]
   },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Global Bad Pixel Behaviour ##\n",
-    "\n",
-    "The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type."
-   ]
-  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -513,11 +504,24 @@
     "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),\n",
     "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}\n",
     "\n",
-    "rebin = 8 if not high_res_badpix_3d else 2\n",
-    "\n",
-    "gain = 0\n",
-    "for mod, data in badpix_g.items():\n",
-    "    plot_badpix_3d(data, cols, title=mod, rebin_fac=rebin)"
+    "if high_res_badpix_3d:\n",
+    "    display(Markdown(\"\"\"\n",
+    "    \n",
+    "    ## Global Bad Pixel Behaviour ##\n",
+    "\n",
+    "    The following plots show the results of bad pixel evaluation for all evaluated memory cells. \n",
+    "    Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. \n",
+    "    This excludes single bad pixels present only in disconnected pixels. \n",
+    "    Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. \n",
+    "    Colors encode the bad pixel type, or mixed type.\n",
+    "\n",
+    "    \"\"\"))\n",
+    "    # set rebin_fac to 1 for avoiding rebining and\n",
+    "    # losing real values of badpixels(High resolution).\n",
+    "    gain = 0\n",
+    "    for mod, data in badpix_g.items():\n",
+    "        plot_badpix_3d(data, cols, title=mod, rebin_fac=2)\n",
+    "        plt.show()"
    ]
   },
   {
@@ -597,7 +601,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.7.6"
+   "version": "3.6.7"
   }
  },
  "nbformat": 4,
diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
index d710ba8afa0e142d267a9b5cf5592c52e91707f4..11ed8987c1435aefbe7951d3ed715fe6c9d1289b 100644
--- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb
+++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
@@ -57,7 +57,7 @@
    "source": [
     "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
     "in_folder = \"/gpfs/exfel/exp/FXE/201931/p900088/raw\" # path to input data, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/karnem/LPD/\" # path to output to, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/LPD/\" # path to output to, required\n",
     "sequence = 0 # sequence files to evaluate\n",
     "modules = [-1] # list of modules to evaluate, RANGE ALLOWED\n",
     "run_high = 112 # run number in which high gain data was recorded, required\n",
@@ -816,11 +816,7 @@
   {
    "cell_type": "markdown",
    "metadata": {},
-   "source": [
-    "## Global Bad Pixel Behaviour ##\n",
-    "\n",
-    "The following plots shows the results of a bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are re-binned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type."
-   ]
+   "source": []
   },
   {
    "cell_type": "code",
@@ -833,17 +829,30 @@
     "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),\n",
     "        BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}\n",
     "\n",
-    "rebin = 8 if not high_res_badpix_3d else 2\n",
-    "\n",
-    "for gain in range(3):\n",
-    "    display(Markdown('### Bad pixel behaviour - {} gain ###'.format(gain_names[gain])))\n",
-    "    for cap in capacitor_settings:\n",
-    "        for mod, data in badpix_g[cap].items():\n",
-    "            plot_badpix_3d(data[...,gain], cols, title='', rebin_fac=rebin)\n",
-    "            ax = plt.gca()\n",
-    "            leg = ax.get_legend()\n",
-    "            leg.set(alpha=0.5)\n",
-    "    plt.show()"
+    "if high_res_badpix_3d:\n",
+    "    display(Markdown(\"\"\"\n",
+    "    \n",
+    "    ## Global Bad Pixel Behaviour ##\n",
+    "\n",
+    "    The following plots shows the results of a bad pixel evaluation for all evaluated memory cells.\n",
+    "    Cells are stacked in the Z-dimension, while pixels values in x/y are re-binned with a factor of 2.\n",
+    "    This excludes single bad pixels present only in disconnected pixels.\n",
+    "    Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated.\n",
+    "    Colors encode the bad pixel type, or mixed type.\n",
+    "\n",
+    "        \"\"\"))\n",
+    "    # set rebin_fac to 1 for avoiding rebining and\n",
+    "    # losing real values of badpixels(High resolution).\n",
+    "    rebin = 2\n",
+    "    for gain in range(3):\n",
+    "        display(Markdown('### Bad pixel behaviour - {} gain ###'.format(gain_names[gain])))\n",
+    "        for cap in capacitor_settings:\n",
+    "            for mod, data in badpix_g[cap].items():\n",
+    "                plot_badpix_3d(data[...,gain], cols, title='', rebin_fac=rebin)\n",
+    "                ax = plt.gca()\n",
+    "                leg = ax.get_legend()\n",
+    "                leg.set(alpha=0.5)\n",
+    "        plt.show()"
    ]
   },
   {
@@ -1155,7 +1164,7 @@
    "outputs": [],
    "source": [
     "table = []\n",
-    "\n",
+    "bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR]\n",
     "for cap in res:\n",
     "    for qm in res[cap]:\n",
     "        for gain in range(3):\n",
@@ -1165,42 +1174,45 @@
     "            \n",
     "            data = np.copy(res[cap][qm]['BadPixelsDark'][:,:,:,gain])\n",
     "            datau32 = data.astype(np.uint32)\n",
-    "            l_data.append(data)\n",
-    "            l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.NOISE_OUT_OF_THRESHOLD.value))\n",
-    "            l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.OFFSET_OUT_OF_THRESHOLD.value))\n",
-    "            l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.OFFSET_NOISE_EVAL_ERROR.value))\n",
+    "            l_data.append(len(datau32[datau32>0].flatten()))\n",
+    "            for bit in bits:\n",
+    "                l_data.append(np.count_nonzero(badpix_g[cap][qm][:,:,:,gain].astype(np.uint32) & bit.value))\n",
     "            \n",
     "            if old_const['BadPixelsDark'] is not None:\n",
     "                dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain])\n",
     "                datau32old = dataold.astype(np.uint32)\n",
-    "                l_data_old.append(dataold)\n",
-    "                l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.NOISE_OUT_OF_THRESHOLD.value))\n",
-    "                l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.OFFSET_OUT_OF_THRESHOLD.value))\n",
-    "                l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.OFFSET_NOISE_EVAL_ERROR.value))\n",
+    "                l_data_old.append(len(datau32old[datau32old>0].flatten()))\n",
+    "                for bit in bits:\n",
+    "                    l_data_old.append(np.count_nonzero(old_const['BadPixelsDark'][:, :, :, gain].astype(np.uint32) & bit.value))\n",
     "\n",
     "            l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', \n",
     "                           'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']\n",
     "            \n",
-    "            l_threshold = ['', '{}'.format(thresholds_noise_sigma), '{}'.format(thresholds_offset_sigma),\n",
-    "                          '{}/{}'.format(thresholds_offset_hard, thresholds_noise_hard)]\n",
+    "            l_threshold = ['', f'{thresholds_noise_sigma}', f'{thresholds_offset_sigma}',\n",
+    "                           f'{thresholds_offset_hard}/{thresholds_noise_hard}']\n",
     "            \n",
     "            for i in range(len(l_data)):\n",
-    "                line = ['{}, gain {}'.format(l_data_name[i], gain_names[gain]),\n",
-    "                          l_threshold[i],\n",
-    "                          len(l_data[i][l_data[i]>0].flatten())\n",
-    "                       ]\n",
+    "                line = [f'{l_data_name[i]}, gain {gain_names[gain]}', l_threshold[i], l_data[i]]\n",
     "            \n",
     "                if old_const['BadPixelsDark'] is not None:\n",
-    "                    line += [len(l_data_old[i][l_data_old[i]>0].flatten())]\n",
+    "                    line += [l_data_old[i]]\n",
     "                else:\n",
     "                    line += ['-']\n",
     "                    \n",
     "                table.append(line)\n",
+    "            table.append(['', '', '', ''])\n",
+    "\n",
+    "display(Markdown('''\n",
     "\n",
-    "display(Markdown('### Number of bad pixels ###'.format(qm)))\n",
+    "### Number of bad pixels ###\n",
+    "\n",
+    "One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels.\n",
+    "\n",
+    "'''))\n",
     "if len(table)>0:\n",
     "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', \n",
-    "                                     headers=[\"Pixel type\", \"Threshold\", \"New constant\", \"Old constant \"])))  "
+    "                                     headers=[\"Pixel type\", \"Threshold\",\n",
+    "                                              \"New constant\", \"Old constant\"])))  "
    ]
   },
   {
@@ -1244,6 +1256,13 @@
     "    display(Markdown('### {} [ADU], good pixels only ###'.format(const)))\n",
     "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))  "
    ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
   }
  ],
  "metadata": {
@@ -1262,7 +1281,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.7.6"
+   "version": "3.6.7"
   }
  },
  "nbformat": 4,
diff --git a/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb b/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb
index e2f6df1dcdedff5eb3031bb15e860ca412458a10..3509955b32c9969afd80d19efe7c55b63e1e18a1 100644
--- a/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb
+++ b/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb
@@ -41,7 +41,14 @@
     "\n",
     "Offset and Noise both not $nan$ values \n",
     "\n",
-    "Values: $\\mathrm{thresholds\\_offset\\_sigma}$, $\\mathrm{thresholds\\_offset\\_hard}$, $\\mathrm{thresholds\\_noise\\_sigma}$, $\\mathrm{thresholds\\_noise\\_hard}$ are given as parameters."
+    "Values: $\\mathrm{thresholds\\_offset\\_sigma}$, $\\mathrm{thresholds\\_offset\\_hard}$, $\\mathrm{thresholds\\_noise\\_sigma}$, $\\mathrm{thresholds\\_noise\\_hard}$ are given as parameters.\n",
+    "\n",
+    "**\"GAIN_THRESHOLDING_ERROR\":**\n",
+    "\n",
+    "Bad gain separated pixels with sigma separation less than gain_separation_sigma_threshold\n",
+    "\n",
+    "$$ sigma\\_separation = \\frac{\\mathrm{gain\\_offset} - \\mathrm{previous\\_gain\\_offset}}{\\sqrt{\\mathrm{gain\\_offset_{std}}^\\mathrm{2} + \\mathrm{previuos\\_gain\\_offset_{std}}^\\mathrm{2}}}$$ \n",
+    "$$ Bad\\_separation = sigma\\_separation < \\mathrm{gain\\_separation\\_sigma\\_threshold} $$"
    ]
   },
   {
@@ -51,7 +58,7 @@
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/SPB/DARK/AGIPD1/\" # path to output to, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_bad_gain_separation/\" # path to output to, required\n",
     "dinstance = \"AGIPD1M1\" # detector instance\n",
     "gain_names = ['High gain', 'Medium gain', 'Low gain'] # a list of gain names to be used in plotting\n",
     "threshold_names = ['HG-MG threshold', 'MG_LG threshold'] # a list of gain names to be used in plotting"
@@ -445,7 +452,9 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": false
+   },
    "outputs": [],
    "source": [
     "head = ['Module', 'High gain', 'Medium gain', 'Low gain']\n",
@@ -468,24 +477,22 @@
     "            data = np.copy(const[i_mod, :, :, :, gain])\n",
     "            if const_name == 'BadPixelsDark':\n",
     "                data[data > 0] = 1.0\n",
+    "\n",
     "                datasum = np.nansum(data)\n",
     "                datamean = np.nanmean(data)\n",
     "                if datamean == 1.0:\n",
     "                    datamean = np.nan\n",
     "                    datasum = np.nan\n",
     "\n",
-    "                t_line.append('{:6.0f} ({:6.3f}) '.format(\n",
-    "                    datasum, datamean))\n",
-    "\n",
-    "                label = '### Number (fraction) of bad pixels ###'\n",
+    "                t_line.append(f'{datasum:6.0f} ({datamean:6.3f}) ')\n",
+    "                label = '## Number(fraction) of bad pixels'\n",
     "            else:\n",
     "\n",
     "                data[constants['BadPixelsDark']\n",
     "                     [i_mod, :, :, :, gain] > 0] = np.nan\n",
     "\n",
-    "                t_line.append('{:6.1f} $\\\\pm$ {:6.1f}'.format(\n",
-    "                    np.nanmean(data), np.nanstd(data)))\n",
-    "                label = '### Average {} [ADU], good pixels only ###'.format(const_name)\n",
+    "                t_line.append(f'{np.nanmean(data):6.1f} $\\\\pm$ {np.nanstd(data):6.1f}')\n",
+    "                label = f'## Average {const_name} [ADU], good pixels only'\n",
     "\n",
     "        table.append(t_line)\n",
     "\n",
@@ -501,6 +508,13 @@
    "outputs": [],
    "source": []
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
   {
    "cell_type": "code",
    "execution_count": null,