diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
index 050dafc9fc0db493e5a051681fb645857f6755e7..1c17b282be509aff6c370a8ab6254a5d7294b3dc 100644
--- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb
+++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
@@ -82,7 +82,7 @@
     "instrument = \"FXE\" # instrument name\n",
     "ntrains = 300 # number of trains to use\n",
     "high_res_badpix_3d = False # plot bad-pixel summary in high resolution\n",
-    "do_norm_test = False # permorm normality test"
+    "test_for_normality = False # permorm normality test"
    ]
   },
   {
@@ -212,7 +212,7 @@
    "outputs": [],
    "source": [
     "# the actual characterization - to not eded this without consultation\n",
-    "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, do_norm_test, inp):\n",
+    "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, test_for_normality, inp):\n",
     "    import numpy as np\n",
     "    import copy\n",
     "    import h5py\n",
@@ -228,17 +228,19 @@
     "        gain[gain > 2] = 2\n",
     "        return data, gain\n",
     "\n",
-    "    filename, filename_out, channel = inp\n",
+    "    filename, channel, gg, cap = inp\n",
     "    thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh\n",
     "\n",
     "    infile = h5py.File(filename, \"r\", driver=\"core\")\n",
     "    \n",
-    "    first = np.array(infile[\"/INDEX/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/first\".format(channel)])\n",
-    "    count = np.array(infile[\"/INDEX/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/count\".format(channel)])\n",
-    "    n_images = count[-1]+first[-1]\n",
-    "    first_image = skip_first_ntrains*cells\n",
-    "    last_image = min(skip_first_ntrains*cells+ntrains*cells, n_images)\n",
-    "    \n",
+    "    bpath = \"/INDEX/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image\".format(channel)\n",
+    "    count = infile[f\"{bpath}/count\"][()]\n",
+    "    first = infile[f\"{bpath}/first\"][()]\n",
+    "    valid = count != 0\n",
+    "    count, first = count[valid], first[valid]\n",
+    "    first_image = first[skip_first_ntrains]\n",
+    "    last_image = min(first[-1]+count[-1], first_image+ntrains*count[-1])\n",
+    "\n",
     "    im = np.array(infile[\"/INSTRUMENT/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/data\".format(\n",
     "        channel)][first_image:last_image, ...])\n",
     "    cellid = np.squeeze(np.array(infile[\"/INSTRUMENT/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/cellId\".format(\n",
@@ -260,7 +262,7 @@
     "\n",
     "            offset[..., cc] = np.median(im[:, :, idx], axis=2)\n",
     "            noise[..., cc] = np.std(im[:, :, idx], axis=2)\n",
-    "            if do_norm_test:\n",
+    "            if test_for_normality:\n",
     "                _, normal_test[..., cc] = scipy.stats.normaltest(\n",
     "                    im[:, :, idx], axis=2)\n",
     "\n",
@@ -287,7 +289,7 @@
     "    bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "    idx = cellid == 12\n",
-    "    return offset, noise, channel, bp, im[12, 12, idx], normal_test\n",
+    "    return offset, noise, channel, gg, cap, bp, im[12, 12, idx], normal_test\n",
     "\n",
     "\n",
     "offset_g = OrderedDict()\n",
@@ -297,7 +299,6 @@
     "ntest_g = OrderedDict()\n",
     "\n",
     "gg = 0\n",
-    "pars = []\n",
     "old_cap = None\n",
     "start = datetime.now()\n",
     "inp = []\n",
@@ -317,11 +318,8 @@
     "        qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n",
     "        if qm in mapped_files and not mapped_files[qm].empty():\n",
     "            fname_in = mapped_files[qm].get()\n",
-    "            fout = os.path.abspath(\n",
-    "                \"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
-    "            print(\"Process file: \", fout)\n",
-    "            inp.append((fname_in, fout, i))\n",
-    "            pars.append((gg, cap))\n",
+    "            print(\"Process file: \", fname_in)\n",
+    "            inp.append((fname_in, i, gg, cap))\n",
     "\n",
     "    gg+=1\n",
     "\n",
@@ -329,13 +327,12 @@
     "p = partial(characterize_module, max_cells,\n",
     "                (thresholds_offset_hard, thresholds_offset_sigma,\n",
     "                 thresholds_noise_hard, thresholds_noise_sigma),\n",
-    "                skip_first_ntrains, ntrains, do_norm_test)\n",
+    "                skip_first_ntrains, ntrains, test_for_normality)\n",
     "results = view.map_sync(p, inp)\n",
     "\n",
     "for ir, r in enumerate(results):\n",
-    "    offset, noise, i, bp, data, normal = r\n",
+    "    offset, noise, i, gg, cap, bp, data, normal = r\n",
     "    qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n",
-    "    gg, cap = pars[ir]\n",
     "    if qm not in offset_g[cap]:\n",
     "        offset_g[cap][qm] = np.zeros(\n",
     "            (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n",
@@ -645,8 +642,8 @@
    "source": [
     "# Loop over capacitor settings, modules, constants\n",
     "for cap in capacitor_settings:\n",
-    "    if not do_norm_test:\n",
-    "        print('Normality test was not requested. Flag `do_norm_test` False')\n",
+    "    if not test_for_normality:\n",
+    "        print('Normality test was not requested. Flag `test_for_normality` False')\n",
     "        break\n",
     "    for i in modules:\n",
     "        qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n",