diff --git a/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb b/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb
index 62976688bd9a1ed75318c898df61af5c6be2314e..aceb6595cbb3ca3c198fcdbb3a060b1b0b9de58d 100644
--- a/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb
+++ b/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb
@@ -48,7 +48,7 @@
     "metadata_folder = \"\"  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
     "dark_run = 5 # run containning CS specific darks, required\n",
     "\n",
-    "modules = [7] # modules to work on, required, range allowed\n",
+    "modules = [8] # modules to work on, required, range allowed\n",
     "karabo_da = [\"all\"]\n",
     "karabo_id_control = \"SPB_IRU_AGIPD1M1\"  # karabo-id for the control device e.g. \"MID_EXP_AGIPD1M1\", or \"SPB_IRU_AGIPD1M1\"\n",
     "karabo_id = \"SPB_DET_AGIPD1M-1\"\n",
@@ -616,13 +616,8 @@
     "    ax4.set_xlabel('Integration time (clk)', fontsize=11)\n",
     "    ax3.title.set_text('CS signal fits')\n",
     "    \n",
-    "    fig5 = plt.figure(figsize=(9, 4))\n",
-    "    gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])\n",
-    "    ax5 = plt.subplot(gs[0])\n",
-    "    ax6 = plt.subplot(gs[1])\n",
+    "    fig5, ax5 = plt.subplots(figsize=(9, 4))\n",
     "    ax5.grid(zorder=0, lw=1.5)\n",
-    "    ax6.set_xlabel(\"Pixel counter\", fontsize=11)\n",
-    "    ax6.set_ylabel(\"Relative\\ndifference\", fontsize=11)\n",
     "    ax5.set_ylabel('Fitted slope ratio', fontsize=11)\n",
     "    ax5.title.set_text('Ratios of fitted gains')\n",
     "    ax5.set_ylim(0,60)\n",
@@ -720,7 +715,6 @@
     "            else:\n",
     "                no_entry += 1\n",
     "\n",
-    "    ax6.hlines(np.mean(reldiff), 0, counter, color='k', zorder=10)\n",
     "    leg = ax5.legend()\n",
     "\n",
     "        \n",
@@ -909,6 +903,16 @@
     "fres_copy = copy.deepcopy(fres)"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "for g in range(0,3):\n",
+    "    fres[g]['m'][~np.isfinite(fres[g]['m'])] = -1."
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -940,7 +944,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "def calc_median(roi):\n",
+    "def calc_median(roi, useBP=None):\n",
     "    \"\"\"Calculate median for a specified block of pixels.\"\"\"\n",
     "    \n",
     "    index = []\n",
@@ -959,13 +963,26 @@
     "\n",
     "        for r_ac in range(index.shape[0]):\n",
     "            idx = index[r_ac]\n",
-    "            val = np.nanmedian(slopes[g][:, idx[1][0]:idx[1][1], \n",
-    "                                            idx[0][0]:idx[0][1]], axis=(1,2))\n",
-    "            means[:, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]]=np.repeat(val, roi[0]*roi[1]).reshape(fshape[0],roi[0],roi[1])\n",
-    "\n",
-    "            val = np.nanmedian(intercepts[g][:, idx[1][0]:idx[1][1], \n",
-    "                                            idx[0][0]:idx[0][1]], axis=(1,2))\n",
-    "            means_b[:, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]]=np.repeat(val, roi[0]*roi[1]).reshape(fshape[0],roi[0],roi[1])\n",
+    "            if useBP:\n",
+    "                val_m = []\n",
+    "                val_b = []\n",
+    "                for cell in range(fshape[0]):\n",
+    "                    sel_m = slopes[g][cell, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]][BPmap[cell, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]] == 0]\n",
+    "                    sel_b = intercepts[g][cell, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]][BPmap[cell, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]] == 0]\n",
+    "                    val_m_int = np.nanmedian(sel_m)\n",
+    "                    val_b_int = np.nanmedian(sel_b)\n",
+    "                    val_m.append(val_m_int)\n",
+    "                    val_b.append(val_b_int)\n",
+    "                val_m = np.array(val_m)\n",
+    "                val_b = np.array(val_b)\n",
+    "            else:    \n",
+    "                sel_m = slopes[g][:, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]]\n",
+    "                sel_b = intercepts[g][:, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]]\n",
+    "                val_m = np.nanmedian(sel_m, axis=(1,2))                    \n",
+    "                val_b = np.nanmedian(sel_b, axis=(1,2))\n",
+    "                \n",
+    "            means[:, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]] = np.repeat(val_m, roi[0]*roi[1]).reshape(fshape[0],roi[0],roi[1])\n",
+    "            means_b[:, idx[1][0]:idx[1][1], idx[0][0]:idx[0][1]] = np.repeat(val_b, roi[0]*roi[1]).reshape(fshape[0],roi[0],roi[1])\n",
     "        median_m[g, ...] = means\n",
     "        median_b[g, ...] = means_b\n",
     "    return median_m, median_b"
@@ -977,10 +994,13 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "# First we calculate median with higher granularity to cope with inhomogeneity of CS injection over module\n",
-    "# The median values are used to replace bad pixel values.\n",
-    "roi = [4,8]\n",
-    "median_m, median_b = calc_median(roi)"
+    "# First we calculate median with lower granularity to cope with inhomogeneity of CS injection over module\n",
+    "\n",
+    "roi = [64,64]\n",
+    "median_m, median_b = calc_median(roi)\n",
+    "\n",
+    "for g in range(0,3):\n",
+    "    median_m[g][median_m[g] == 0.] = -1."
    ]
   },
   {
@@ -1021,7 +1041,6 @@
     "    mask[(~np.isfinite(fres[g]['fit_dev'])) & (mask==0)] |= BadPixels.CI_LINEAR_DEVIATION.value\n",
     "    fit_dev_thr = np.nanmean(fres[g]['fit_dev'][np.isfinite(fres[g]['fit_dev'])])+5*np.nanstd(fres[g]['fit_dev'][np.isfinite(fres[g]['fit_dev'])])    \n",
     "    mask[(fres[g]['fit_dev'] > fit_dev_thr) & (mask==0)] |= BadPixels.CI_LINEAR_DEVIATION.value\n",
-    "#     mask[((slopes[g] <= 0.1) | (intercepts[g] <= 1.)) & (mask==0)] |= BadPixels.CI_EVAL_ERROR.value #this causes most pixels mask in A0\n",
     "    mask[((~np.isfinite(slopes[g])) | (~np.isfinite(intercepts[g]))) & (mask==0)] |= BadPixels.CI_EVAL_ERROR.value     \n",
     "\n",
     "    bad_pixels[g] = mask"
@@ -1089,6 +1108,22 @@
     "It is a known behaviour that cells around cell Id higher than 300 show peculiar CS signal, hence fitting procedure fails, leading to almost 100% of bad pixels. These cells are later on filled with median values calculated over all memory cells."
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# This is needed to determine how many ASICs are not working properly to exclude them from BP counts\n",
+    "counter = 0\n",
+    "for row in range(0,128,64):\n",
+    "    for col in range(0, 512, 64):\n",
+    "        mins = np.nanmedian(median_m[0][1, row:row+64, col:col+64])\n",
+    "        if  mins == -1.:\n",
+    "            counter += 1\n",
+    "print(f'Number of not working ASICs: {counter}')"
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -1097,7 +1132,7 @@
    "source": [
     "nonzeros = []\n",
     "for i in range(BPmap.shape[0]):\n",
-    "    nonzeros.append(np.count_nonzero(BPmap[i, ...]))\n",
+    "    nonzeros.append(np.count_nonzero(BPmap[i, ...]) - counter*(64*64))\n",
     "\n",
     "worst_cell = np.where(nonzeros == np.max(nonzeros))[0][0] \n",
     "print('Percentage of bad pixels in cell 1: {:.2f}%'.format(np.asarray(nonzeros[1])/(512*128)*100))\n",
@@ -1130,7 +1165,6 @@
     "plt.ylabel('# Bad pixels %')\n",
     "plt.xlabel('Memory cell Id')\n",
     "plt.grid(axis='y', ls='dotted')\n",
-    "# plt.ylim(0,30)\n",
     "plt.show()"
    ]
   },
@@ -1146,7 +1180,9 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": true
+   },
    "outputs": [],
    "source": [
     "total = np.count_nonzero(BPmap)\n",
@@ -1216,7 +1252,6 @@
     "    ax.grid(zorder=0)\n",
     "    ax.set_xlabel(name)\n",
     "    ax.set_yscale('log')\n",
-    "#     ax.set_title(name)\n",
     "    ax.legend()\n",
     "    def statistic(stat, colour, shift):\n",
     "        textstr = \"\"\n",
@@ -1264,11 +1299,87 @@
     "for g in range(0,3):\n",
     "    slopes[g][BPmap > 0] = median_m[g, ...][BPmap > 0]\n",
     "    intercepts[g][BPmap > 0] = median_b[g, ...][BPmap > 0]\n",
-    "ratio_HM = slopes[0] / slopes [1]\n",
-    "ratio_ML = slopes[1] / slopes [2]\n",
+    "ratio_HM[BPmap > 0] = ratio_HM_med[BPmap > 0]\n",
+    "ratio_ML[BPmap > 0] = ratio_ML_med[BPmap > 0]\n",
     "\n",
-    "ratio_HM[np.isnan(ratio_HM) | np.isinf(ratio_HM)] = 0.\n",
-    "ratio_ML[np.isnan(ratio_ML) | np.isinf(ratio_ML)] = 0."
+    "# Here we recalculate median values with higher granularity over already sanitized data.\n",
+    "# This is to minise effect of the horizontal streaks over the rows. New medians are then applied to bad pixels.\n",
+    "roi = [64, 32]\n",
+    "median_m, median_b = calc_median(roi, useBP=True)\n",
+    "ratio_HM_med = median_m[0] / median_m[1]\n",
+    "ratio_ML_med = median_m[1] / median_m[2]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "for g in range(0,3):\n",
+    "    slopes[g][BPmap > 0] = median_m[g, ...][BPmap > 0]\n",
+    "    intercepts[g][BPmap > 0] = median_b[g, ...][BPmap > 0]\n",
+    "ratio_HM[BPmap > 0] = ratio_HM_med[BPmap > 0]\n",
+    "ratio_ML[BPmap > 0] = ratio_ML_med[BPmap > 0]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Here we recalculate median values with higher granularity over already sanitized data.\n",
+    "# This is to minise effect of the horizontal streaks over the rows. New medians are then applied to bad pixels.\n",
+    "roi = [4,8]\n",
+    "median_m, median_b = calc_median(roi)\n",
+    "ratio_HM_med = median_m[0] / median_m[1]\n",
+    "ratio_ML_med = median_m[1] / median_m[2]\n",
+    "\n",
+    "for g in range(0,3):\n",
+    "    slopes[g][BPmap > 0] = median_m[g, ...][BPmap > 0]\n",
+    "    intercepts[g][BPmap > 0] = median_b[g, ...][BPmap > 0]\n",
+    "ratio_HM[BPmap > 0] = ratio_HM_med[BPmap > 0]\n",
+    "ratio_ML[BPmap > 0] = ratio_ML_med[BPmap > 0]\n",
+    "ratio_HM[ratio_HM < thr_mean_HM[0]] = thr_mean_HM[0]\n",
+    "ratio_HM[ratio_HM > thr_mean_HM[1]] = thr_mean_HM[1]\n",
+    "ratio_ML[ratio_ML < thr_mean_ML[0]] = thr_mean_ML[0]\n",
+    "ratio_ML[ratio_ML > thr_mean_ML[1]] = thr_mean_ML[1]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "HGMG_mean = np.nanmedian(ratio_HM, axis=0)\n",
+    "MGLG_mean = np.nanmedian(ratio_ML, axis=0)\n",
+    "\n",
+    "slopes_median = {}\n",
+    "intercepts_median = {}\n",
+    "for g in range(0,3):\n",
+    "    slopes_median[g] = np.nanmedian(slopes[g], axis=0)\n",
+    "    intercepts_median[g] = np.nanmedian(intercepts[g], axis=0)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Sanitisation of cells having more than 20% of bad pixels.\n",
+    "for cell in range(0,352):\n",
+    "    if BP_rtio[cell] > 20:\n",
+    "        ratio_HM[cell, ...] = HGMG_mean\n",
+    "        ratio_ML[cell, ...] = MGLG_mean\n",
+    "        \n",
+    "for g in range(0,3):\n",
+    "    for cell in range(0,352):\n",
+    "        if BP_rtio[cell] > 20:\n",
+    "            slopes[g][cell, ...] = slopes_median[g]\n",
+    "            intercepts[g ][cell, ...] = intercepts_median[g]"
    ]
   },
   {
@@ -1435,71 +1546,6 @@
     "    preview_fitted_params(data, g, 310)"
    ]
   },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Here we recalculate median values with lower granularity over already sanitized data.\n",
-    "# This is to minise effect of the horizontal streaks over the rows. New medians are then applied to bad pixels.\n",
-    "roi = [32,64]\n",
-    "median_m, median_b = calc_median(roi)\n",
-    "\n",
-    "for g in range(0,3):\n",
-    "    slopes[g][BPmap > 0] = median_m[g, ...][BPmap > 0]\n",
-    "    intercepts[g][BPmap > 0] = median_b[g, ...][BPmap > 0]\n",
-    "ratio_HM = slopes[0] / slopes [1]\n",
-    "ratio_ML = slopes[1] / slopes [2]\n",
-    "\n",
-    "ratio_HM[np.isnan(ratio_HM) | np.isinf(ratio_HM)] = 0.\n",
-    "ratio_ML[np.isnan(ratio_ML) | np.isinf(ratio_ML)] = 0."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "HGMG_mean = np.nanmedian(ratio_HM, axis=0)\n",
-    "MGLG_mean = np.nanmedian(ratio_ML, axis=0)\n",
-    "\n",
-    "slopes_median = {}\n",
-    "intercepts_median = {}\n",
-    "for g in range(0,3):\n",
-    "    slopes_median[g] = np.nanmedian(slopes[g], axis=0)\n",
-    "    intercepts_median[g] = np.nanmedian(intercepts[g], axis=0)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Sanitisation of cells having more than 20% of bad pixels.\n",
-    "for cell in range(0,352):\n",
-    "    if BP_rtio[cell] > 20:\n",
-    "        ratio_HM[cell, ...] = HGMG_mean\n",
-    "        ratio_ML[cell, ...] = MGLG_mean\n",
-    "        \n",
-    "for g in range(0,3):\n",
-    "    for cell in range(0,352):\n",
-    "        if BP_rtio[cell] > 20:\n",
-    "            slopes[g][cell, ...] = slopes_median[g]\n",
-    "            intercepts[g ][cell, ...] = intercepts_median[g]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    " fres = copy.deepcopy(fres_copy) # this is needed to have raw fits without sanitization"
-   ]
-  },
   {
    "cell_type": "markdown",
    "metadata": {},
@@ -1519,7 +1565,7 @@
     "\n",
     "for i, cell in enumerate(cells):\n",
     "    vmin, vmax = get_range(ratio_HM, 8)\n",
-    "    fig = xana.heatmapPlot(ratio_HM[cell, ...], cmap='jet', add_panels=False, aspect=1,\n",
+    "    fig = xana.heatmapPlot(ratio_HM[cell, ...][BPmap[cell, ...] == 0], cmap='jet', add_panels=False, aspect=1,\n",
     "                    lut_label=f'H-M ratio, Cell: {cell}', \n",
     "                         cb_loc='bottom', vmin=vmin, vmax=vmax)\n",
     "\n",
@@ -1660,8 +1706,11 @@
    "outputs": [],
    "source": [
     "avg_m = {}\n",
+    "avg_b = {}\n",
     "for g in range(0,3):\n",
-    "    avg_m[g] = np.nanmedian(slopes[g])"
+    "    avg_m[g] = np.nanmean(slopes[g])\n",
+    "    avg_b[g] = np.nanmean(intercepts[g])\n",
+    "    print('Avg. slope values for g{}: {:.2f}'.format(g, avg_m[g]))"
    ]
   },
   {
@@ -1671,6 +1720,32 @@
     "## Validation plot"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from cal_tools.tools import get_from_db\n",
+    "dconst = getattr(Constants.AGIPD, 'Offset')()\n",
+    "condition = Conditions.Dark.AGIPD(\n",
+    "            memory_cells=mem_cells,\n",
+    "            bias_voltage=bias_voltage,\n",
+    "            acquisition_rate=4.5,\n",
+    "            gain_setting=gain_setting)\n",
+    "\n",
+    "offset_d = get_from_db(karabo_id=karabo_id,\n",
+    "            karabo_da=karabo_da[0],\n",
+    "            constant=dconst,\n",
+    "            condition=condition,\n",
+    "            empty_constant=None,\n",
+    "            cal_db_interface=cal_db_interface,\n",
+    "            creation_time=creation_time-timedelta(seconds=1),\n",
+    "            verbosity=1)\n",
+    "offset_d = np.moveaxis(offset_d[0], 2, 0)\n",
+    "print(f'Offset retrieved for: {karabo_da[0]}')"
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -1678,13 +1753,13 @@
    "outputs": [],
    "source": [
     "one_photon = 73 #assuming 10keV ph\n",
-    "tpix_range2 = [(35,40), (38,44)]\n",
+    "tpix_range2 = [(35,55), (38,44)]\n",
     "\n",
     "test_pixels = []\n",
     "for i in range(*tpix_range2[0]):\n",
     "    for j in range(*tpix_range2[1]):\n",
     "        test_pixels.append((j,i))\n",
-    "test_cells = [1, 35]\n",
+    "test_cells = [35, 50]\n",
     "tcell = np.array(test_cells)\n",
     "tcell = tcell[tcell < mem_cells]\n",
     "if tcell.size == 0:\n",
@@ -1692,12 +1767,13 @@
     "else:\n",
     "    test_cells = tcell.tolist()\n",
     "\n",
-    "bins = (np.linspace(-100,  6000,  100),\n",
-    "        np.linspace(5000,  13000,  1000),\n",
-    "       )    \n",
+    "bins = (np.linspace(0,  5000,  100),\n",
+    "        np.linspace(0,  5000,  100),\n",
+    "       )     \n",
     "    \n",
     "markers = ['o', '.', 'x', 'v']\n",
     "colors = ['tab:blue', 'tab:orange', 'tab:green']\n",
+    "cmaps = ['viridis', 'plasma', 'winter']\n",
     "\n",
     "fig1, ax1 = plt.subplots(figsize=(9, 5))\n",
     "ax1.grid(zorder=0, lw=1.5)\n",
@@ -1711,11 +1787,16 @@
     "ax2.set_xlabel('Integration time (clk)', fontsize=11)\n",
     "ax2.title.set_text('Corrected CS signal response')\n",
     "ax2.set_xscale('log')\n",
+    "\n",
+    "fig3, ax3 = plt.subplots(figsize=(9, 5))\n",
+    "ax3.set_ylabel(\"AGIPD response (#photon)\")\n",
+    "ax3.set_xlabel(\"Integration time (clk)\")\n",
+    "ax3.grid(lw=1.5)\n",
     "    \n",
     "ana = cs_data[0] \n",
     "\n",
-    "H = [0, 0, 0, 0, 0]\n",
-    "gains = [0, 1, 1, 2, 2]\n",
+    "H = [0, 0, 0,]\n",
+    "gains = [0, 1, 2]\n",
     "ex, ey = None, None\n",
     "for pix in test_pixels:\n",
     "    for cell in test_cells:\n",
@@ -1741,22 +1822,44 @@
     "                    \n",
     "                else:\n",
     "                    y_sg = y[labels==g][:bounds[g]]\n",
-    "                    hg = slopes[0][cell, pix[0], pix[1]]\n",
+    "                    y_corr[labels==g] = y[labels==g] - offset_d[cell, pix[0], pix[1], g]\n",
     "                    og = slopes[g][cell, pix[0], pix[1]]\n",
-    "                    chg = hg / avg_m[0]\n",
+    "                    r = [1., ratio_HM[cell, pix[0], pix[1]], ratio_ML[cell, pix[0], pix[1]]*ratio_HM[cell, pix[0], pix[1]]]\n",
+    "\n",
     "                    cog = og / avg_m[g]\n",
     "                    cm = avg_m[0] / avg_m[g]\n",
-    "                    A_offset = intercepts[g][cell, pix[0], pix[1]]-intercepts[0][cell, pix[0], pix[1]] \n",
-    "                    y_corr = (y[labels==g] - A_offset) / cog * cm\n",
-    "                    y_sg = y_corr[:bounds[g]]\n",
+    "                    b_corr = intercepts[g][cell, pix[0], pix[1]] - offset_d[cell, pix[0], pix[1], g]\n",
+    "                    A_offset = intercepts[0][cell, pix[0], pix[1]] - b_corr * r[g]\n",
+    "                    y_newcorr = (y_corr[labels==g]) * r[g] / cog + A_offset\n",
+    "                    y_sg = y_newcorr[:bounds[g]]\n",
     "\n",
     "\n",
+    "                h, ex, ey = np.histogram2d(x_sg, y_sg/one_photon,\n",
+    "                                           range=((0,5e3), (0, 5e3)), bins=bins)\n",
+    "                H[g] += h\n",
     "                ax2.plot(x_sg[::5], (y_sg/one_photon)[::5], ls='None', marker=markers[g], \n",
     "                         color=colors[g], alpha=0.3)\n",
-    "x = x_eq[:cs_data[0].shape[0]]\n",
-    "ideal = avg_m[1] * x / one_photon\n",
+    "x = x_eq[:710]\n",
+    "ideal = (avg_m[0] * x + avg_b[0]) / one_photon\n",
+    "\n",
+    "for g in range(3):\n",
+    "    H[g][H[g]==0] = np.nan \n",
+    "    ax3.imshow(H[g].T, origin=\"lower\", extent=[ex[0], ex[-1], ey[0], ey[-1]],\n",
+    "          aspect='auto', cmap=cmaps[g], alpha=0.7, vmin=0, vmax=300)\n",
+    "ax3.plot(x, ideal, color='red')\n",
+    "\n",
     "fig1.show()\n",
-    "fig2.show()"
+    "fig2.show()\n",
+    "fig3.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    " fres = copy.deepcopy(fres_copy) # this is needed to have raw fits without sanitization"
    ]
   },
   {