diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
index 8f793f13bf319d7d5ed59f78c92f1ed489b2afae..68258048916fde65c3f1800c0aba3367173d5b6f 100644
--- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb
+++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
@@ -81,7 +81,8 @@
     "not_use_dir_creation_date = False # do not use the creation date of the directory for database time derivation\n",
     "instrument = \"FXE\" # instrument name\n",
     "ntrains = 300 # number of trains to use\n",
-    "high_res_badpix_3d = False # plot bad-pixel summary in high resolution"
+    "high_res_badpix_3d = False # plot bad-pixel summary in high resolution\n",
+    "test_for_normality = False # permorm normality test"
    ]
   },
   {
@@ -211,7 +212,7 @@
    "outputs": [],
    "source": [
     "# the actual characterization - to not eded this without consultation\n",
-    "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, inp):\n",
+    "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, test_for_normality, inp):\n",
     "    import numpy as np\n",
     "    import copy\n",
     "    import h5py\n",
@@ -227,14 +228,23 @@
     "        gain[gain > 2] = 2\n",
     "        return data, gain\n",
     "\n",
-    "    filename, filename_out, channel = inp\n",
+    "    filename, channel, gg, cap = inp\n",
     "    thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh\n",
     "\n",
     "    infile = h5py.File(filename, \"r\", driver=\"core\")\n",
+    "    \n",
+    "    bpath = \"/INDEX/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image\".format(channel)\n",
+    "    count = infile[f\"{bpath}/count\"][()]\n",
+    "    first = infile[f\"{bpath}/first\"][()]\n",
+    "    valid = count != 0\n",
+    "    count, first = count[valid], first[valid]\n",
+    "    first_image = first[skip_first_ntrains]\n",
+    "    last_image = first_image + np.sum(count[skip_first_ntrains:skip_first_ntrains+ntrains])\n",
+    "\n",
     "    im = np.array(infile[\"/INSTRUMENT/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/data\".format(\n",
-    "        channel)][skip_first_ntrains*cells:skip_first_ntrains*cells+ntrains*cells, ...])\n",
+    "        channel)][first_image:last_image, ...])\n",
     "    cellid = np.squeeze(np.array(infile[\"/INSTRUMENT/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/cellId\".format(\n",
-    "        channel)][skip_first_ntrains*cells:skip_first_ntrains*cells+ntrains*cells, ...]))\n",
+    "        channel)][first_image:last_image, ...]))\n",
     "    infile.close()\n",
     "\n",
     "    im, g = splitOffGainLPD(im[:, 0, ...])\n",
@@ -252,8 +262,9 @@
     "\n",
     "            offset[..., cc] = np.median(im[:, :, idx], axis=2)\n",
     "            noise[..., cc] = np.std(im[:, :, idx], axis=2)\n",
-    "            _, normal_test[..., cc] = scipy.stats.normaltest(\n",
-    "                im[:, :, idx], axis=2)\n",
+    "            if test_for_normality:\n",
+    "                _, normal_test[..., cc] = scipy.stats.normaltest(\n",
+    "                    im[:, :, idx], axis=2)\n",
     "\n",
     "    # bad pixels\n",
     "    bp = np.zeros(offset.shape, np.uint32)\n",
@@ -278,7 +289,7 @@
     "    bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "    idx = cellid == 12\n",
-    "    return offset, noise, channel, bp, im[12, 12, idx], normal_test\n",
+    "    return offset, noise, channel, gg, cap, bp, im[12, 12, idx], normal_test\n",
     "\n",
     "\n",
     "offset_g = OrderedDict()\n",
@@ -290,6 +301,8 @@
     "gg = 0\n",
     "old_cap = None\n",
     "start = datetime.now()\n",
+    "inp = []\n",
+    "    \n",
     "for gain, mapped_files in gain_mapped_files.items():\n",
     "    cap = gain.split(\"_\")[1]\n",
     "    if cap != old_cap:\n",
@@ -301,49 +314,42 @@
     "        data_g[cap] = OrderedDict()\n",
     "        ntest_g[cap] = OrderedDict()\n",
     "\n",
-    "    dones = []\n",
-    "    inp = []\n",
-    "\n",
     "    for i in modules:\n",
     "        qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n",
     "        if qm in mapped_files and not mapped_files[qm].empty():\n",
     "            fname_in = mapped_files[qm].get()\n",
-    "            dones.append(mapped_files[qm].empty())\n",
+    "            print(\"Process file: \", fname_in)\n",
+    "            inp.append((fname_in, i, gg, cap))\n",
     "\n",
-    "        else:\n",
-    "            continue\n",
-    "        fout = os.path.abspath(\n",
-    "            \"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
-    "        print(\"Process file: \", fout)\n",
-    "        inp.append((fname_in, fout, i))\n",
-    "    first = False\n",
-    "    p = partial(characterize_module, max_cells,\n",
+    "    gg+=1\n",
+    "\n",
+    "\n",
+    "p = partial(characterize_module, max_cells,\n",
     "                (thresholds_offset_hard, thresholds_offset_sigma,\n",
     "                 thresholds_noise_hard, thresholds_noise_sigma),\n",
-    "                skip_first_ntrains, ntrains)\n",
-    "    results = view.map_sync(p, inp)\n",
-    "    for r in results:\n",
-    "        offset, noise, i, bp, data, normal = r\n",
-    "        qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n",
-    "        if qm not in offset_g[cap]:\n",
-    "            offset_g[cap][qm] = np.zeros(\n",
-    "                (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n",
-    "            noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
-    "            badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
-    "            data_g[cap][qm] = np.zeros((ntrains, 3))\n",
-    "            ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
-    "\n",
-    "        offset_g[cap][qm][..., gg] = offset\n",
-    "        noise_g[cap][qm][..., gg] = noise\n",
-    "        badpix_g[cap][qm][..., gg] = bp\n",
-    "        data_g[cap][qm][:data.shape[0], gg] = data\n",
-    "        ntest_g[cap][qm][..., gg] = normal\n",
-    "\n",
-    "        hn, cn = np.histogram(data, bins=20)\n",
-    "        print(\"{} gain. Module: {}. Number of processed trains per cell: {}.\\n\".format(\n",
-    "            gain_names[gg], qm, data.shape[0]))\n",
-    "    gg += 1\n",
-    "    plt.show()\n",
+    "                skip_first_ntrains, ntrains, test_for_normality)\n",
+    "results = view.map_sync(p, inp)\n",
+    "\n",
+    "for ir, r in enumerate(results):\n",
+    "    offset, noise, i, gg, cap, bp, data, normal = r\n",
+    "    qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n",
+    "    if qm not in offset_g[cap]:\n",
+    "        offset_g[cap][qm] = np.zeros(\n",
+    "            (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n",
+    "        noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
+    "        badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
+    "        data_g[cap][qm] = np.full((ntrains, 3), np.nan)\n",
+    "        ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
+    "\n",
+    "    offset_g[cap][qm][..., gg] = offset\n",
+    "    noise_g[cap][qm][..., gg] = noise\n",
+    "    badpix_g[cap][qm][..., gg] = bp\n",
+    "    data_g[cap][qm][:data.shape[0], gg] = data\n",
+    "    ntest_g[cap][qm][..., gg] = normal\n",
+    "\n",
+    "    hn, cn = np.histogram(data, bins=20)\n",
+    "    print(\"{} gain, Capacitor {}, Module: {}. Number of processed trains per cell: {}.\\n\".format(\n",
+    "        gain_names[gg], cap, qm, data.shape[0]))\n",
     "\n",
     "duration = (datetime.now()-start).total_seconds()\n",
     "logger.runtime_summary_entry(success=True, runtime=duration,\n",
@@ -636,6 +642,9 @@
    "source": [
     "# Loop over capacitor settings, modules, constants\n",
     "for cap in capacitor_settings:\n",
+    "    if not test_for_normality:\n",
+    "        print('Normality test was not requested. Flag `test_for_normality` False')\n",
+    "        break\n",
     "    for i in modules:\n",
     "        qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n",
     "\n",