diff --git a/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb b/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb
index 9464321c24ee53b707d66e6b4b2b5f0033e4db0b..f2504afb5becdd50cd966ba98cc953ecaf45342e 100644
--- a/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb
+++ b/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb
@@ -112,9 +112,7 @@
     "from cal_tools.tools import (\n",
     "    get_constant_from_db_and_time,\n",
     "    calcat_creation_time,\n",
-    "    get_pdu_from_db,\n",
-    "    get_report,\n",
-    "    send_to_db,\n",
+    "\n",
     ")\n",
     "from iCalibrationDB import Conditions, Constants\n",
     "\n",
@@ -344,7 +342,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## Mark gain stage\n"
+    "## Mark gain stage ##"
    ]
   },
   {
@@ -494,7 +492,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "# Inspection of gain stage marking "
+    "## Inspection of gain stage marking "
    ]
   },
   {
@@ -568,7 +566,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "# Example from Pixel Subset\n",
+    "## Example from Pixel Subset\n",
     "The following is to verify the labeling and fitting procedure for a small sample of pixels from two regions of interest.\n",
     "\n",
     "Plots visualize:\n",
@@ -766,7 +764,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "# Parallel fitting of the whole module"
+    "## Parallel fitting of the whole module"
    ]
   },
   {
@@ -993,7 +991,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## Example of high gain median values of cell 1"
+    "### Example of high gain median values of cell 1"
    ]
   },
   {
@@ -1506,105 +1504,6 @@
     " fres = copy.deepcopy(fres_copy) # this is needed to have raw fits without sanitization"
    ]
   },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Sanitised constants keys:\n",
-    "# mH: high gain slope\n",
-    "# mM: medium gain slope\n",
-    "# mL: low gain slope\n",
-    "#\n",
-    "# bH: high gain intercept\n",
-    "# bM: medium gain intercept\n",
-    "# bL: low gain intercept\n",
-    "# \n",
-    "# H-M: ratio of high gain and medium gain slope\n",
-    "# M-L: ratio of medium gain and low gain slope\n",
-    "\n",
-    "sanitised_const = {}\n",
-    "gain_keys = ['mH', 'mM', 'mL', 'bH', 'bM', 'bL']\n",
-    "\n",
-    "for g, key, in enumerate(gain_keys):\n",
-    "    if g < 3:\n",
-    "        sanitised_const[key] = slopes[g]\n",
-    "    else:\n",
-    "        sanitised_const[key] = intercepts[g-3]\n",
-    "\n",
-    "sanitised_const['H-M'] = ratio_HM\n",
-    "sanitised_const['M-L'] = ratio_ML"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def slope_dict_to_arr(d):\n",
-    "    \"\"\"Convert dictionary to numpy array.\"\"\"\n",
-    "\n",
-    "    arr = np.zeros((8,mem_cells,128,512), np.float32)\n",
-    "    for i, key in enumerate(d):\n",
-    "        arr[i,...] = d[key]\n",
-    "        \n",
-    "    return arr"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n",
-    "file_loc = f'Proposal: {proposal}, Run: {dark_run}'\n",
-    "\n",
-    "report = get_report(metadata_folder)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "md = None\n",
-    "\n",
-    "# set the operating condition\n",
-    "condition = Conditions.Dark.AGIPD(memory_cells=mem_cells, \n",
-    "                                  bias_voltage=bias_voltage,\n",
-    "                                  acquisition_rate=acq_rate, \n",
-    "                                  gain_setting=gain_setting,\n",
-    "                                  integration_time=integration_time)\n",
-    "\n",
-    "db_modules = get_pdu_from_db(karabo_id, karabo_da, Constants.AGIPD.SlopesCS(),\n",
-    "                             condition, cal_db_interface,\n",
-    "                             snapshot_at=creation_time)\n",
-    "\n",
-    "if db_output:\n",
-    "    for pdu in db_modules:\n",
-    "        for const in [\"SlopesCS\", \"BadPixelsCS\"]:\n",
-    "            dbconst = getattr(Constants.AGIPD, const)()\n",
-    "            \n",
-    "            if const == \"SlopesCS\":\n",
-    "                dbconst.data = slope_dict_to_arr(sanitised_const)\n",
-    "            else:\n",
-    "                dbconst.data = BPmap \n",
-    "\n",
-    "            md = send_to_db(pdu, karabo_id, dbconst, condition,\n",
-    "                            file_loc, report, cal_db_interface,\n",
-    "                            creation_time=creation_time)\n",
-    "\n",
-    "    print(\"Constants parameter conditions are:\\n\")\n",
-    "    print(f\"• memory_cells: {mem_cells}\\n• bias_voltage: {bias_voltage}\\n\"\n",
-    "      f\"• acquisition_rate: {acq_rate}\\n• gain_setting: {gain_setting}\\n\"\n",
-    "      f\"• integration_time: {integration_time}\\n\"\n",
-    "      f\"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
-   ]
-  },
   {
    "cell_type": "markdown",
    "metadata": {},
diff --git a/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb b/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb
index c5c1ca82dfbdc083b5266fe421eba1d9eea67965..926adb27801c0401fd53c7de6ba298a6fe0c2c6a 100644
--- a/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb
+++ b/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb
@@ -16,7 +16,7 @@
    "outputs": [],
    "source": [
     "in_folder = \"\" # in this notebook, in_folder is not used as the data source is in the destination folder\n",
-    "out_folder = \"/gpfs/exfel/exp/SPB/202330/p900340/scratch/CS_Processing/test\"  # the folder to output to, required\n",
+    "out_folder = \"/gpfs/exfel/exp/SPB/202330/p900340/scratch/CS_Processing/test/230914/\"  # the folder to output to, required\n",
     "metadata_folder = \"\"  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
     "proc_folder = \"\" # Path to corrected image data used to create histograms and validation plots\n",
     "raw_folder = '/gpfs/exfel/exp/SPB/202330/p900340/raw/'  # folder of raw data. This is used to save information of source data of generated constants, required\n",
@@ -28,6 +28,8 @@
     "\n",
     "creation_time = \"\" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC e.g. \"2022-06-28 13:00:00\"\n",
     "creation_date_offset = \"00:00:00\" # add an offset to creation date, e.g. to get different constants\n",
+    "cal_db_interface = \"tcp://max-exfl-cal002:8015#8045\" \n",
+    "db_output = False\n",
     "\n",
     "# Detector conditions\n",
     "bias_voltage = -1 # detector bias voltage, use -1 to use value stored in slow data.\n",
@@ -43,7 +45,6 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "import traceback\n",
     "import warnings\n",
     "\n",
     "import h5py\n",
@@ -63,9 +64,14 @@
     "from cal_tools.enums import BadPixels\n",
     "from cal_tools.tools import ( \n",
     "    module_index_to_qm,\n",
-    "    calcat_creation_time\n",
+    "    calcat_creation_time,\n",
+    "    get_report,\n",
+    "    get_pdu_from_db,\n",
+    "    send_to_db\n",
     ")\n",
     "\n",
+    "from iCalibrationDB import Conditions, Constants\n",
+    "\n",
     "from extra_data import RunDirectory\n",
     "from extra_geom import AGIPD_1MGeometry, AGIPD_500K2GGeometry\n",
     "\n",
@@ -142,8 +148,24 @@
    "metadata": {},
    "outputs": [],
    "source": [
+    "# Sanitised constants keys:\n",
+    "# mH: high gain slope\n",
+    "# mM: medium gain slope\n",
+    "# mL: low gain slope\n",
+    "#\n",
+    "# bH: high gain intercept\n",
+    "# bM: medium gain intercept\n",
+    "# bL: low gain intercept\n",
+    "# \n",
+    "# H-M: ratio of high gain and medium gain slope\n",
+    "# M-L: ratio of medium gain and low gain slope\n",
+    "\n",
+    "# Consts saved to DB as (8,mem_cells,128,512) \n",
+    "# order: ['mH', 'mM', 'mL', 'bH', 'bM', 'bL']\n",
+    "\n",
     "keys_fit = ['m', 'b']\n",
-    "keys = ['H-M', 'M-L']\n",
+    "gs = ['H', 'M', 'L']\n",
+    "keys_ratio = ['H-M', 'M-L']\n",
     "\n",
     "labels = {'m': 'Slope (m)',\n",
     "          'b': 'Intercept (b)',\n",
@@ -154,6 +176,7 @@
     "fit_data = {}\n",
     "ratios = {}\n",
     "BPmap = {}\n",
+    "sanitised_const = {}\n",
     "\n",
     "modules = []\n",
     "karabo_da = []\n",
@@ -161,23 +184,124 @@
     "    qm = module_index_to_qm(mod)\n",
     "    ratios[mod] = {}\n",
     "    fit_data[mod] = {}\n",
+    "    sanitised_const[mod] = {}\n",
     "    constants_file = f'{out_folder}/CSconst_{karabo_id}_M{mod}.h5'\n",
     "\n",
     "    if os.path.exists(constants_file):\n",
     "        print(f'Data available for module {qm}')\n",
+    "        \n",
     "        with h5py.File(constants_file, 'r') as hf:\n",
     "            BPmap[mod] = hf['/BadPixels/data'][()].swapaxes(1,2) \n",
-    "            for key in keys:\n",
+    "            \n",
+    "            for key in keys_fit:\n",
+    "                fit_data[mod][key] = {}\n",
+    "                for g in range(0,3):\n",
+    "                    # loop 1st through keys, then through gains \n",
+    "                    # to have right order of constants for DB injection\n",
+    "                    fit_data[mod][key][g] = hf[f'/SanitizedConsts/{g}/{key}/data'][()].swapaxes(1,2)\n",
+    "                    sanitised_const[mod][f'{key}{gs[g]}'] = hf[f'/SanitizedConsts/{g}/{key}/data'][()]\n",
+    "            \n",
+    "            for key in keys_ratio:\n",
     "                ratios[mod][key]= hf[f'/SanitizedConsts/Ratios/{key}/data'][()].swapaxes(1,2) \n",
-    "            for g in range(0,3):\n",
-    "                fit_data[mod][g] = {}\n",
-    "                for key in keys_fit:\n",
-    "                    fit_data[mod][g][key] = hf[f'/SanitizedConsts/{g}/{key}/data'][()].swapaxes(1,2) \n",
+    "                sanitised_const[mod][key] = hf[f'/SanitizedConsts/Ratios/{key}/data'][()]\n",
     "    \n",
     "        modules.append(mod)\n",
     "        karabo_da.append(f\"AGIPD{mod:02d}\")\n",
     "    else:\n",
-    "        print(f\"No fit data available for module {qm}\")"
+    "        print(f\"No fit data available for module {qm}\")\n",
+    "\n",
+    "# Change the order of dict keys to maintain compatibility for the rest of code\n",
+    "fit_data = {mod: {g: {f: fit_data[mod][f][g] for f in keys_fit\n",
+    "                     } \n",
+    "                  for g in range(0,3)\n",
+    "                 } \n",
+    "            for mod in modules\n",
+    "           }"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def slope_dict_to_arr(d):\n",
+    "    \"\"\"Convert dictionary to numpy array.\"\"\"\n",
+    "\n",
+    "    arr = np.zeros((8,mem_cells,128,512), np.float32)\n",
+    "    for i, key in enumerate(d):\n",
+    "        arr[i,...] = d[key]\n",
+    "        \n",
+    "    return arr"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "proposal = list(filter(None, out_folder.strip('/').split('/')))[-2]\n",
+    "file_loc = f'Proposal: {proposal}, Run: {dark_run}'\n",
+    "\n",
+    "report = get_report(metadata_folder)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Injection to DB"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if not db_output:\n",
+    "    print('Injection to DB not requested.')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "md = None\n",
+    "\n",
+    "# set the operating condition\n",
+    "condition = Conditions.Dark.AGIPD(memory_cells=mem_cells, \n",
+    "                                  bias_voltage=bias_voltage,\n",
+    "                                  acquisition_rate=acq_rate, \n",
+    "                                  gain_setting=gain_setting,\n",
+    "                                  integration_time=integration_time)\n",
+    "\n",
+    "db_modules = get_pdu_from_db(karabo_id, karabo_da, Constants.AGIPD.SlopesCS(),\n",
+    "                             condition, cal_db_interface,\n",
+    "                             snapshot_at=creation_time)\n",
+    "\n",
+    "if db_output:\n",
+    "    for mod, pdu in zip(modules, db_modules):\n",
+    "        for const in [\"SlopesCS\", \"BadPixelsCS\"]:\n",
+    "            dbconst = getattr(Constants.AGIPD, const)()\n",
+    "            \n",
+    "            if const == \"SlopesCS\":\n",
+    "                dbconst.data = slope_dict_to_arr(sanitised_const[mod])\n",
+    "            else:\n",
+    "                dbconst.data = BPmap[mod]\n",
+    "\n",
+    "            md = send_to_db(pdu, karabo_id, dbconst, condition,\n",
+    "                            file_loc, report, cal_db_interface,\n",
+    "                            creation_time=creation_time)\n",
+    "\n",
+    "    print(\"Constants parameter conditions are:\\n\")\n",
+    "    print(f\"• memory_cells: {mem_cells}\\n• bias_voltage: {bias_voltage}\\n\"\n",
+    "      f\"• acquisition_rate: {acq_rate}\\n• gain_setting: {gain_setting}\\n\"\n",
+    "      f\"• integration_time: {integration_time}\\n\"\n",
+    "      f\"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
    ]
   },
   {
@@ -211,17 +335,17 @@
     "\n",
     "# const_data contains ratios of slopes and BP\n",
     "const_data = {}\n",
-    "for key in keys:\n",
+    "for key in keys_ratio:\n",
     "    const_data[key] = np.full((nmods, mem_cells, 512, 128), np.nan)\n",
-    "    for i in range(nmods):\n",
-    "        if key in ratios[i]:\n",
-    "            const_data[key][i,:,pixel_range[0]:pixel_range[2],\n",
-    "                               pixel_range[1]:pixel_range[3]] = ratios[i][key]\n",
+    "    for mod in modules:\n",
+    "        if key in ratios[mod]:\n",
+    "            const_data[key][mod,:,pixel_range[0]:pixel_range[2],\n",
+    "                               pixel_range[1]:pixel_range[3]] = ratios[mod][key]\n",
     "            \n",
     "const_data['mask'] = np.full((nmods, mem_cells, 512, 128), np.nan)\n",
-    "for i in BPmap:\n",
-    "    const_data['mask'][i,:,pixel_range[0]:pixel_range[2],\n",
-    "                    pixel_range[1]:pixel_range[3]] = BPmap[i]\n",
+    "for mod in modules:\n",
+    "    const_data['mask'][mod,:,pixel_range[0]:pixel_range[2],\n",
+    "                    pixel_range[1]:pixel_range[3]] = BPmap[mod]\n",
     "    \n",
     "# fit_const_data contains fitted slopes and intercepts for each gain stage        \n",
     "fit_const_data = {}\n",
@@ -356,23 +480,20 @@
    "outputs": [],
    "source": [
     "table = []\n",
-    "for i in modules:\n",
+    "for mod in modules:\n",
     "        \n",
-    "    table.append((i,\n",
-    "                  f\"{np.nanmean(ratios[i]['H-M']):0.1f} +- {np.nanstd(ratios[i]['H-M']):0.2f}\",\n",
-    "                  f\"{np.nanmean(ratios[i]['M-L']):0.1f} +- {np.nanstd(ratios[i]['M-L']):0.2f}\",\n",
-    "                  f\"{np.nanmean(BPmap[i]>0)*100:0.1f} ({np.nansum(BPmap[i]>0)})\"\n",
+    "    table.append((mod,\n",
+    "                  f\"{np.nanmean(ratios[mod]['H-M']):0.1f} +- {np.nanstd(ratios[mod]['H-M']):0.2f}\",\n",
+    "                  f\"{np.nanmean(ratios[mod]['M-L']):0.1f} +- {np.nanstd(ratios[mod]['M-L']):0.2f}\",\n",
+    "                  f\"{np.nanmean(BPmap[mod]>0)*100:0.1f} ({np.nansum(BPmap[mod]>0)})\"\n",
     "                        ))\n",
     "\n",
     "all_HM = []\n",
     "all_ML = []\n",
     "for mod in modules:\n",
-    "    #try except is used for the script to work with less than 16 modules\n",
-    "#     try:\n",
     "    all_HM.extend(ratios[mod]['H-M'])\n",
     "    all_ML.extend(ratios[mod]['M-L'])\n",
-    "#     except Exception as e:\n",
-    "#         continue\n",
+    "\n",
     "all_HM = np.array(all_HM)\n",
     "all_ML = np.array(all_ML)\n",
     "\n",