diff --git a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
index 81134340ab4facbb443af90bd9ed7b1a73526778..9770751f9a3bc86acfe33f00c52f71cf52d7369b 100644
--- a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
+++ b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
@@ -459,17 +459,12 @@
     "    if use_litframe_finder not in ['auto', 'offline', 'online']:\n",
     "        raise ValueError(\"Unexpected value in 'use_litframe_finder'.\")\n",
     "\n",
-    "    selection_type = {'off': 0, 'cm': 1, 'final': 2}\n",
-    "    if use_super_selection not in selection_type:\n",
-    "        raise ValueError(\"Unexpected value in 'common_selection'.\")\n",
-    "\n",
     "    inst = karabo_id_control[:3]\n",
     "    litfrm = make_litframe_finder(inst, dc, litframe_device_id)\n",
     "    try:\n",
-    "        get_data = {'auto': litfrm.read_or_process, 'offline':litfrm.process, 'online': litfrm.read}\n",
+    "        get_data = {'auto': litfrm.read_or_process, 'offline': litfrm.process, 'online': litfrm.read}\n",
     "        r = get_data[use_litframe_finder]()\n",
-    "        cell_sel = LitFrameSelection(r, train_ids, max_pulses, energy_threshold,\n",
-    "                                     selection_type[use_super_selection])\n",
+    "        cell_sel = LitFrameSelection(r, train_ids, max_pulses, energy_threshold, use_super_selection)\n",
     "        cell_sel.print_report()\n",
     "    except LitFrameFinderError as err:\n",
     "        warn(f\"Cannot use AgipdLitFrameFinder due to:\\n{err}\")\n",
@@ -720,13 +715,13 @@
     "            # In common mode corrected is enabled.\n",
     "            # Cell selection is only activated after common mode correction.\n",
     "            # Perform cross-file correction parallel over asics\n",
-    "            file_ix = [i_proc for i_proc, n_img in enumerate(img_counts) if n_img > 0]\n",
+    "            image_files_idx = [i_proc for i_proc, n_img in enumerate(img_counts) if n_img > 0]\n",
     "            pool.starmap(agipd_corr.cm_correction, itertools.product(\n",
-    "                file_ix, range(16)  # 16 ASICs per module\n",
+    "                image_files_idx, range(16)  # 16 ASICs per module\n",
     "            ))\n",
     "            step_timer.done_step(\"Common-mode correction\")\n",
     "\n",
-    "            img_counts = pool.map(agipd_corr.apply_selected_pulses, file_ix)\n",
+    "            img_counts = pool.map(agipd_corr.apply_selected_pulses, image_files_idx)\n",
     "            step_timer.done_step(\"Applying selected cells after common mode correction\")\n",
     "            \n",
     "        # Perform image-wise correction\"\n",
diff --git a/src/cal_tools/agipdlib.py b/src/cal_tools/agipdlib.py
index cb12a91b6c8a019d8e49c3c46ee664e98464b8a0..39fb187b74fe8a65333b700eecabb716712c04f8 100644
--- a/src/cal_tools/agipdlib.py
+++ b/src/cal_tools/agipdlib.py
@@ -303,6 +303,8 @@ class CellSelection:
         """Returns mask of cells selected for processing
 
         :param train_sel: list of a train ids selected for processing
+        :param nfrm: the number of frames expected for every train in
+            the list `train_sel`
         :param cm: flag indicates the final selection or interim selection
             for common-mode correction
 
@@ -1033,7 +1035,7 @@ class AgipdCorrections:
 
         ntrains = data_dict["n_valid_trains"][0]
         train_ids = data_dict["valid_trains"][:ntrains]
-        nimg_in_trains = data_dict["nimg_in_trains"][:ntrains].astype(int)
+        nimg_in_trains = data_dict["nimg_in_trains"][:ntrains]
 
         # Initializing can_calibrate array
         can_calibrate = self.cell_sel.get_cells_on_trains(
@@ -1544,7 +1546,7 @@ class AgipdCorrections:
             self.shared_dict[i]["cm_presel"] = sharedmem.empty(1, dtype="b")
             self.shared_dict[i]["n_valid_trains"] = sharedmem.empty(1, dtype="i4")  # noqa
             self.shared_dict[i]["valid_trains"] = sharedmem.empty(1024, dtype="u8")  # noqa
-            self.shared_dict[i]["nimg_in_trains"] = sharedmem.empty(1024, dtype="i4")  # noqa            
+            self.shared_dict[i]["nimg_in_trains"] = sharedmem.empty(1024, dtype="i8")  # noqa
 
         if self.corr_bools.get("round_photons"):
             self.shared_hist_preround = sharedmem.empty(len(self.hist_bins_preround) - 1, dtype="i8")
@@ -1660,30 +1662,36 @@ class LitFrameSelection(CellSelection):
                  train_ids: List[int],
                  crange: Optional[List[int]] = None,
                  energy_threshold: float = -1000,
-                 super_selection: int = 0):
+                 use_super_selection: str = 'off'):
         """Initialize lit frame selection
 
         :param litfrmdata: AgipdLitFrameFinder output data
         :param train_ids: the list of selected trains
         :param crange: range parameters of selected cells,
             list up to 3 elements
+        :param energy_threshold: the minimum allowed value for
+            pulse energy
+        :param use_super_selection: the stage when super selection
+            should be applied: `off`, `cm` or `final`
         """
         from extra_redu import FrameSelection, SelType
         self.dev = litfrmdata.meta.litFrmDev
         self.crange = validate_selected_pulses(crange, self.ncell_max)
         self.energy_threshold = energy_threshold
-        self.super_selection = super_selection
-        if super_selection == 0:
+        self.use_super_selection = use_super_selection
+
+        if use_super_selection == 'off':
             self.cm_sel_type = SelType.ROW
             self.final_sel_type = SelType.CELL
-        elif super_selection == 1:
+        elif use_super_selection == 'cm':
             self.cm_sel_type = SelType.SUPER_ROW
             self.final_sel_type = SelType.CELL
-        elif super_selection == 2:
+        elif use_super_selection == 'final':
             self.cm_sel_type = SelType.SUPER_ROW
             self.final_sel_type = SelType.SUPER_CELL
         else:
-            raise ValueError("param 'super_selection' takes only 0,1,2")
+            raise ValueError("param 'use_super_selection' takes only "
+                             "'off', 'cm' or 'final'")
 
         self._sel = FrameSelection(
             litfrmdata, guess_missed=True, crange=slice(*self.crange),