diff --git a/notebooks/ePix100/Correction_ePix100_NBC.ipynb b/notebooks/ePix100/Correction_ePix100_NBC.ipynb
index 4d7fbc3363291e4182834af7bf492b76f44f5f48..83ffb1e5983c8f2666be9b7f394ea54af5c83ad0 100644
--- a/notebooks/ePix100/Correction_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Correction_ePix100_NBC.ipynb
@@ -17,9 +17,9 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "cluster_profile = \"noDB0\"  # ipcluster profile to use\n",
+    "cluster_profile = \"noDB\"  # ipcluster profile to use\n",
     "in_folder = \"/gpfs/exfel/exp/MID/202121/p002929/raw\"  # input folder, required\n",
-    "out_folder = \"/home/cascella/scratch/epix-test/\"  # output folder, required\n",
+    "out_folder = \"\"  # output folder, required\n",
     "sequences = [-1]  # sequences to correct, set to -1 for all, range allowed\n",
     "run = 126  # which run to read data from, required\n",
     "\n",
@@ -121,11 +121,10 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "\n",
     "h5path = h5path.format(karabo_id, receiver_id)\n",
     "h5path_t = h5path_t.format(karabo_id, receiver_id)\n",
     "h5path_cntrl = h5path_cntrl.format(karabo_id)\n",
-    "plot_unit = 'ADU'\n"
+    "plot_unit = 'ADU'"
    ]
   },
   {
@@ -337,10 +336,7 @@
     "    nCells=memoryCells,\n",
     "    cores=cpuCores,\n",
     "    blockSize=blockSize\n",
-    ")\n",
-    "\n",
-    "offsetCorrection.debug()\n",
-    "histCalOffsetCor.debug()"
+    ")"
    ]
   },
   {
@@ -565,7 +561,7 @@
     "                histCalRelGainCor.fill(data)\n",
     "\n",
     "            ddset[...] = np.moveaxis(data, 2, 0)\n",
-    "            \n",
+    "\n",
     "            if pattern_classification:\n",
     "                ddsetc = ofile.create_dataset(\n",
     "                    h5path+\"/pixels_classified\",\n",
@@ -579,9 +575,8 @@
     "                    chunks=(chunk_size_idim, oshape[1], oshape[2]),\n",
     "                    dtype=np.int32, compression=\"gzip\")\n",
     "\n",
-    "\n",
     "                data_clu, patterns = patternClassifier.classify(data)\n",
-    "                \n",
+    "\n",
     "                data_clu[data_clu < (split_evt_primary_threshold*const_data[\"Noise\"])] = 0  # noqa\n",
     "                ddsetc[...] = np.moveaxis(data_clu, 2, 0)\n",
     "                ddsetp[...] = np.moveaxis(patterns, 2, 0)\n",
@@ -592,13 +587,13 @@
     "            # absolute gain correction\n",
     "            # changes data from ADU to keV (or n. of photons)\n",
     "            if absolute_gain:\n",
-    "                data = data*gain_cnst\n",
+    "                data = data * gain_cnst\n",
     "                if photon_energy > 0:\n",
     "                    data /= photon_energy\n",
     "                histCalAbsGainCor.fill(data)\n",
     "\n",
     "                if pattern_classification:\n",
-    "                    data_clu = data_clu*gain_cnst\n",
+    "                    data_clu = data_clu *gain_cnst\n",
     "                    if photon_energy > 0:\n",
     "                        data_clu /= photon_energy\n",
     "                    ddsetc[...] = np.moveaxis(data_clu, 2, 0)\n",