diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
index c9508aa5c5d43a540dec87f08045217e229a7ed0..ed1686ed9c72a2ef7c04631e80c805050f957b93 100644
--- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb
+++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb
@@ -22,21 +22,17 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "in_folder = \"/gpfs/exfel/exp/FXE/202030/p900121/raw\" # path to input data, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/LPD/\" # path to output to, required\n",
+    "in_folder = \"/gpfs/exfel/exp/FXE/202304/p003338/raw\" # path to input data, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/kluyvert/lpd-darks-p3338-r133-134-135/\" # path to output to, required\n",
     "metadata_folder = \"\"  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
-    "sequence = 0 # sequence files to evaluate\n",
     "modules = [-1] # list of modules to evaluate, RANGE ALLOWED\n",
-    "run_high = 120 # run number in which high gain data was recorded, required\n",
-    "run_med = 121 # run number in which medium gain data was recorded, required\n",
-    "run_low = 122 # run number in which low gain data was recorded, required\n",
+    "run_high = 133 # run number in which high gain data was recorded, required\n",
+    "run_med = 134 # run number in which medium gain data was recorded, required\n",
+    "run_low = 135 # run number in which low gain data was recorded, required\n",
     "\n",
     "karabo_id = \"FXE_DET_LPD1M-1\" # karabo karabo_id\n",
     "karabo_da = ['-1']  # a list of data aggregators names, Default [-1] for selecting all data aggregators\n",
-    "receiver_id = \"{}CH0\" # inset for receiver devices\n",
-    "path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data\n",
-    "h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
-    "h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
+    "source_name = \"{}/DET/{}CH0:xtdf\"  # Source name for raw detector data - filled with karabo_id & module number\n",
     "\n",
     "use_dir_creation_date = True # use the creation date of the directory for database time derivation\n",
     "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # the database interface to use\n",
@@ -51,13 +47,14 @@
     "thresholds_offset_hard = [400, 1500] # bad pixel hard threshold\n",
     "thresholds_noise_sigma = 7. # bad pixel relative threshold in terms of n sigma noise\n",
     "thresholds_noise_hard = [1, 35] # bad pixel hard threshold\n",
-    "skip_first_ntrains = 10 # Number of first trains to skip\n",
+    "\n",
+    "ntrains = 500  # maximum number of trains to use in each gain stage\n",
+    "skip_first_ntrains = 10  # Number of first trains to skip\n",
+    "min_trains = 370  # minimum number of trains needed for each gain stage\n",
     "\n",
     "# Parameters for plotting\n",
     "skip_plots = False  # exit after writing corrected files\n",
     "\n",
-    "instrument = \"FXE\" # instrument name\n",
-    "ntrains = 100 # number of trains to use\n",
     "high_res_badpix_3d = False # plot bad-pixel summary in high resolution\n",
     "test_for_normality = False # permorm normality test\n",
     "inject_cell_order = False  # Include memory cell order as part of the detector condition\n",
@@ -72,17 +69,15 @@
    "source": [
     "import copy\n",
     "import multiprocessing\n",
-    "import os\n",
     "import warnings\n",
-    "from collections import OrderedDict\n",
     "from datetime import datetime\n",
     "from functools import partial\n",
     "from logging import warning\n",
+    "from pathlib import Path\n",
     "\n",
     "warnings.filterwarnings('ignore')\n",
     "\n",
     "import dateutil.parser\n",
-    "import h5py\n",
     "import matplotlib\n",
     "import pasha as psh\n",
     "import scipy.stats\n",
@@ -99,6 +94,7 @@
     "from iCalibrationDB import Conditions, Constants, Detectors, Versions\n",
     "from XFELDetAna.plotting.heatmap import heatmapPlot\n",
     "from XFELDetAna.plotting.simpleplot import simplePlot\n",
+    "from extra_data import RunDirectory\n",
     "\n",
     "from cal_tools.enums import BadPixels\n",
     "from cal_tools.plotting import (\n",
@@ -117,6 +113,7 @@
     "    map_gain_stages,\n",
     "    module_index_to_qm,\n",
     "    parse_runs,\n",
+    "    reorder_axes,\n",
     "    run_prop_seq_from_path,\n",
     "    save_const_to_h5,\n",
     "    send_to_db,\n",
@@ -141,21 +138,7 @@
     "else:\n",
     "    modules = [int(x[-2:]) for x in karabo_da]\n",
     "\n",
-    "gain_runs = OrderedDict()\n",
-    "if capacitor_setting == 5:\n",
-    "    gain_runs[\"high_5pf\"] = run_high\n",
-    "    gain_runs[\"med_5pf\"] =  run_med\n",
-    "    gain_runs[\"low_5pf\"] =  run_low\n",
-    "elif capacitor_setting == 50:\n",
-    "    gain_runs[\"high_50pf\"] = run_high\n",
-    "    gain_runs[\"med_50pf\"] =  run_med\n",
-    "    gain_runs[\"low_50pf\"] =  run_low\n",
-    "\n",
-    "capacitor_settings = [capacitor_setting]\n",
-    "capacitor_settings = ['{}pf'.format(c) for c in capacitor_settings]\n",
-    "\n",
-    "h5path = h5path.format(karabo_id, receiver_id)\n",
-    "h5path_idx = h5path_idx.format(karabo_id, receiver_id)\n",
+    "capacitor_setting_s = f'{capacitor_setting}pf'\n",
     "\n",
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
@@ -171,23 +154,11 @@
     "print(\"Proposal: {}\".format(prop))\n",
     "print(\"Memory cells: {}/{}\".format(mem_cells, max_cells))\n",
     "print(\"Runs: {}, {}, {}\".format(run_high, run_med, run_low))\n",
-    "print(\"Sequence: {}\".format(sequence))\n",
     "print(\"Using DB: {}\".format(db_output))\n",
     "print(\"Input: {}\".format(in_folder))\n",
     "print(\"Output: {}\".format(out_folder))\n",
-    "print(\"Bias voltage: {}V\".format(bias_voltage))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set everything up filewise\n",
-    "gmf = map_gain_stages(in_folder, gain_runs, path_template, karabo_da, [sequence])\n",
-    "gain_mapped_files, total_sequences, total_file_size = gmf\n",
-    "print(f\"Will process a total of {total_sequences} files.\")"
+    "print(\"Bias voltage: {}V\".format(bias_voltage))\n",
+    "print(f\"Capacitor setting: {capacitor_setting_s}\")"
    ]
   },
   {
@@ -207,41 +178,35 @@
     "parallel_num_threads = multiprocessing.cpu_count() // parallel_num_procs\n",
     "\n",
     "# the actual characterization\n",
-    "def characterize_module(filename, channel, gg, cap):\n",
-    "\n",
-    "    def splitOffGainLPD(d):\n",
-    "        msk = np.zeros(d.shape, np.uint16)\n",
-    "        msk[...] = 0b0000111111111111\n",
-    "        data = np.bitwise_and(d, msk)\n",
-    "        msk[...] = 0b0011000000000000\n",
-    "        gain = np.bitwise_and(d, msk)//4096\n",
-    "        gain[gain > 2] = 2\n",
-    "        return data, gain\n",
-    "\n",
-    "    infile = h5py.File(filename, \"r\")\n",
+    "def characterize_module(run_path, channel, gg):\n",
+    "    run = RunDirectory(run_path)\n",
+    "    det_source = source_name.format(karabo_id, channel)\n",
+    "    data = run[det_source, 'image.data'].drop_empty_trains()\n",
+    "    data = data[skip_first_ntrains : skip_first_ntrains + ntrains]\n",
+    "    cell_ids = run[det_source, 'image.cellId'].drop_empty_trains()\n",
+    "    cell_ids = cell_ids[skip_first_ntrains : skip_first_ntrains + ntrains]\n",
     "    \n",
-    "    instrument_src = h5path.format(channel)\n",
-    "    index_src = h5path_idx.format(channel)\n",
-    "    count = infile[f\"{index_src}/count\"][()]\n",
-    "    first = infile[f\"{index_src}/first\"][()]\n",
-    "    valid = count != 0\n",
-    "    count, first = count[valid], first[valid]\n",
-    "    first_image = int(first[skip_first_ntrains] if first.shape[0] > skip_first_ntrains else 0)\n",
-    "    last_image = int(first_image + np.sum(count[skip_first_ntrains:skip_first_ntrains+ntrains]))\n",
-    "\n",
-    "    im = np.array(infile[\"{}/data\".format(instrument_src, channel)][first_image:last_image, ...])\n",
-    "    cellid = np.squeeze(np.array(infile[\"{}/cellId\".format(instrument_src, channel)][first_image:last_image, ...]))\n",
-    "    infile.close()\n",
-    "    if im.shape[0] == 0:  # No data\n",
-    "        return None, None, channel, gg, cap, None, None, None, None\n",
-    "\n",
-    "    cellid_pattern = cellid[:count[0]]\n",
-    "\n",
-    "    im, g = splitOffGainLPD(im[:, 0, ...])\n",
-    "    im = im.astype(np.float32)\n",
+    "    if len(data.train_ids) < min_trains:\n",
+    "        raise Exception(f\"Run {run_path} only contains {len(data.train_ids)} trains, but {min_trains} required\")\n",
     "\n",
-    "    im = np.rollaxis(im, 2)\n",
-    "    im = np.rollaxis(im, 2, 1)\n",
+    "    im = data.ndarray()\n",
+    "    if im.ndim > 3:\n",
+    "        im = im[:, 0]  # Drop extra dimension\n",
+    "    \n",
+    "    cellid = cell_ids.ndarray()\n",
+    "    cellid_pattern = cell_ids[0].ndarray()\n",
+    "    if cellid.ndim > 1:\n",
+    "        cellid = cellid[:, 0]\n",
+    "        cellid_pattern = cellid_pattern[:, 0]\n",
+    "\n",
+    "    # Mask off gain bits, leaving only data\n",
+    "    im &= 0b0000111111111111\n",
+    "\n",
+    "    im = im.astype(np.float32)\n",
+    "    im = reorder_axes(im,\n",
+    "        from_order=('frames', 'slow_scan', 'fast_scan'),\n",
+    "        to_order=('fast_scan', 'slow_scan', 'frames'),\n",
+    "    )\n",
     "\n",
     "    context = psh.context.ThreadContext(num_workers=parallel_num_threads)\n",
     "    offset = context.alloc(shape=(im.shape[0], im.shape[1], max_cells), dtype=np.float64)\n",
@@ -281,80 +246,63 @@
     "    bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "    idx = (cellid == cellid[0])\n",
-    "    return offset, noise, channel, gg, cap, bp, im[12, 12, idx], normal_test, cellid_pattern"
+    "    return offset, noise, channel, gg, bp, im[12, 12, idx], normal_test, cellid_pattern"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": false
+   },
    "outputs": [],
    "source": [
-    "offset_g = OrderedDict()\n",
-    "noise_g = OrderedDict()\n",
-    "badpix_g = OrderedDict()\n",
-    "data_g = OrderedDict()\n",
-    "ntest_g = OrderedDict()\n",
+    "offset_g = {}\n",
+    "noise_g = {}\n",
+    "badpix_g = {}\n",
+    "data_g = {}\n",
+    "ntest_g = {}\n",
     "# Should be the same cell order for all modules & all gain stages\n",
     "cellid_patterns_g = {}\n",
     "\n",
-    "gg = 0\n",
-    "old_cap = None\n",
-    "start = datetime.now()\n",
-    "inp = []\n",
-    "    \n",
-    "for gain, mapped_files in gain_mapped_files.items():\n",
-    "    cap = gain.split(\"_\")[1]\n",
-    "    if cap != old_cap:\n",
-    "        gg = 0\n",
-    "        old_cap = cap\n",
-    "        offset_g[cap] = OrderedDict()\n",
-    "        noise_g[cap] = OrderedDict()\n",
-    "        badpix_g[cap] = OrderedDict()\n",
-    "        data_g[cap] = OrderedDict()\n",
-    "        ntest_g[cap] = OrderedDict()\n",
-    "        cellid_patterns_g[cap] = {}\n",
     "\n",
+    "inp = []\n",
+    "for gg, run_num in enumerate([run_high, run_med, run_low]):\n",
+    "    run_path = Path(in_folder, f\"r{run_num:04d}\")\n",
     "    for i in modules:\n",
-    "        qm = module_index_to_qm(i)\n",
-    "        if qm in mapped_files and not mapped_files[qm].empty():\n",
-    "            fname_in = mapped_files[qm].get()\n",
-    "            print(\"Process file: \", fname_in)\n",
-    "            inp.append((fname_in, i, gg, cap))\n",
-    "\n",
-    "    gg+=1\n",
+    "        inp.append((run_path, i, gg))\n",
     "\n",
     "with multiprocessing.Pool(processes=parallel_num_procs) as pool:\n",
     "    results = pool.starmap(characterize_module, inp)\n",
     "\n",
     "for ir, r in enumerate(results):\n",
-    "    offset, noise, i, gg, cap, bp, data, normal, cellid_pattern = r\n",
+    "    offset, noise, i, gg, bp, data, normal, cellid_pattern = r\n",
     "    if data is None:\n",
     "        warning(f\"No data for module {i} of gain {gg}\")\n",
     "        skip_plots = True\n",
     "        continue\n",
     "    qm = module_index_to_qm(i)\n",
-    "    if qm not in offset_g[cap]:\n",
-    "        offset_g[cap][qm] = np.zeros(\n",
-    "            (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n",
-    "        noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
-    "        badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm], dtype=np.uint32)\n",
-    "        data_g[cap][qm] = np.full((ntrains, 3), np.nan)\n",
-    "        ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n",
-    "        cellid_patterns_g[cap][qm] = cellid_pattern\n",
+    "    if qm not in offset_g:\n",
+    "        offset_g[qm] = np.zeros(offset.shape[:3] + (3,))\n",
+    "        print(\"Constant shape:\", offset_g[qm].shape)\n",
+    "        noise_g[qm] = np.zeros_like(offset_g[qm])\n",
+    "        badpix_g[qm] = np.zeros_like(offset_g[qm], dtype=np.uint32)\n",
+    "        data_g[qm] = np.full((ntrains, 3), np.nan)\n",
+    "        ntest_g[qm] = np.zeros_like(offset_g[qm])\n",
+    "        cellid_patterns_g[qm] = cellid_pattern\n",
     "    else:\n",
-    "        if not np.array_equal(cellid_pattern, cellid_patterns_g[cap][qm]):\n",
+    "        if not np.array_equal(cellid_pattern, cellid_patterns_g[qm]):\n",
     "            raise ValueError(\"Inconsistent cell ID pattern between gain stages\")\n",
     "            \n",
     "\n",
-    "    offset_g[cap][qm][..., gg] = offset\n",
-    "    noise_g[cap][qm][..., gg] = noise\n",
-    "    badpix_g[cap][qm][..., gg] = bp\n",
-    "    data_g[cap][qm][:data.shape[0], gg] = data\n",
-    "    ntest_g[cap][qm][..., gg] = normal\n",
+    "    offset_g[qm][..., gg] = offset\n",
+    "    noise_g[qm][..., gg] = noise\n",
+    "    badpix_g[qm][..., gg] = bp\n",
+    "    data_g[qm][:data.shape[0], gg] = data\n",
+    "    ntest_g[qm][..., gg] = normal\n",
     "\n",
     "    hn, cn = np.histogram(data, bins=20)\n",
-    "    print(f\"{gain_names[gg]} gain, Capacitor {cap}, Module: {qm}. \"\n",
+    "    print(f\"{gain_names[gg]} gain, Module: {qm}. \"\n",
     "          f\"Number of processed trains per cell: {data.shape[0]}.\")"
    ]
   },
@@ -391,7 +339,7 @@
    "source": [
     "# TODO: add db_module when received from myMDC\n",
     "# Create the modules dict of karabo_das and PDUs\n",
-    "qm_dict = OrderedDict()\n",
+    "qm_dict = {}\n",
     "for i, k_da in zip(modules, karabo_da):\n",
     "    qm = module_index_to_qm(i)\n",
     "    qm_dict[qm] = {\"karabo_da\": k_da,\n",
@@ -406,76 +354,74 @@
    "source": [
     "# Retrieve existing constants for comparison\n",
     "clist = [\"Offset\", \"Noise\", \"BadPixelsDark\"]\n",
-    "old_const = {}\n",
-    "old_mdata = {}\n",
     "\n",
     "dinstance = \"LPD1M1\"\n",
     "detinst = getattr(Detectors, dinstance)\n",
     "print('Retrieve pre-existing constants for comparison.')\n",
-    "for cap in capacitor_settings:\n",
-    "    old_const[cap] = {}\n",
-    "    old_mdata[cap] = {}\n",
-    "    for qm in offset_g[cap].keys():\n",
-    "        old_const[cap][qm] = {}\n",
-    "        old_mdata[cap][qm] = {}\n",
-    "        qm_db = qm_dict[qm]\n",
-    "        karabo_da = qm_db[\"karabo_da\"]\n",
-    "        cellid_pattern = cellid_patterns_g[cap][qm]\n",
-    "        if inject_cell_order:\n",
-    "            mem_cell_order = \",\".join([str(c) for c in cellid_pattern]) + \",\"\n",
-    "        else:\n",
-    "            mem_cell_order = None\n",
     "\n",
-    "        condition = Conditions.Dark.LPD(memory_cells=max_cells,\n",
-    "                                        bias_voltage=bias_voltage,\n",
-    "                                        capacitor=cap,\n",
-    "                                        memory_cell_order=mem_cell_order,\n",
-    "                                       )\n",
-    "        for const in clist:\n",
-    "            constant = getattr(Constants.LPD, const)()\n",
-    "            if not qm_db[\"db_module\"]:\n",
-    "                # This should be used in case of running notebook\n",
-    "                # by a different method other than myMDC which already\n",
-    "                # sends CalCat info.\n",
-    "                qm_db[\"db_module\"] = get_pdu_from_db(karabo_id, [karabo_da], constant,\n",
-    "                                                     condition, cal_db_interface,\n",
-    "                                                     snapshot_at=creation_time)[0]\n",
-    "\n",
-    "            data, mdata = get_from_db(karabo_id, karabo_da,\n",
-    "                                      constant,\n",
-    "                                      condition, None,\n",
-    "                                      cal_db_interface,\n",
-    "                                      creation_time=creation_time,\n",
-    "                                      verbosity=2, timeout=cal_db_timeout)\n",
-    "\n",
-    "            old_const[cap][qm][const] = data\n",
-    "\n",
-    "            if mdata is None or data is None:\n",
-    "                old_mdata[cap][qm][const] = {\n",
-    "                    \"timestamp\": \"Not found\",\n",
-    "                    \"filepath\": None,\n",
-    "                    \"h5path\": None\n",
-    "                }\n",
-    "            else:\n",
-    "                timestamp = mdata.calibration_constant_version.begin_at.isoformat()\n",
-    "                filepath = os.path.join(\n",
-    "                    mdata.calibration_constant_version.hdf5path,\n",
-    "                    mdata.calibration_constant_version.filename\n",
-    "                )\n",
-    "                h5path = mdata.calibration_constant_version.h5path\n",
-    "                old_mdata[cap][qm][const] = {\n",
-    "                    \"timestamp\": timestamp,\n",
-    "                    \"filepath\": filepath,\n",
-    "                    \"h5path\": h5path\n",
-    "                }\n",
-    "\n",
-    "        with open(f\"{out_folder}/module_metadata_{qm}.yml\",\"w\") as fd:\n",
-    "            yaml.safe_dump(\n",
-    "                {\n",
-    "                    \"module\": qm,\n",
-    "                    \"pdu\": qm_db[\"db_module\"],\n",
-    "                    \"old-constants\": old_mdata[cap][qm]\n",
-    "                }, fd)"
+    "old_const = {}\n",
+    "old_mdata = {}\n",
+    "for qm in offset_g.keys():\n",
+    "    old_const[qm] = {}\n",
+    "    old_mdata[qm] = {}\n",
+    "    qm_db = qm_dict[qm]\n",
+    "    karabo_da = qm_db[\"karabo_da\"]\n",
+    "    cellid_pattern = cellid_patterns_g[qm]\n",
+    "    if inject_cell_order:\n",
+    "        mem_cell_order = \",\".join([str(c) for c in cellid_pattern]) + \",\"\n",
+    "    else:\n",
+    "        mem_cell_order = None\n",
+    "\n",
+    "    condition = Conditions.Dark.LPD(memory_cells=max_cells,\n",
+    "                                    bias_voltage=bias_voltage,\n",
+    "                                    capacitor=capacitor_setting_s,\n",
+    "                                    memory_cell_order=mem_cell_order,\n",
+    "                                   )\n",
+    "    for const in clist:\n",
+    "        constant = getattr(Constants.LPD, const)()\n",
+    "        if not qm_db[\"db_module\"]:\n",
+    "            # This should be used in case of running notebook\n",
+    "            # by a different method other than myMDC which already\n",
+    "            # sends CalCat info.\n",
+    "            qm_db[\"db_module\"] = get_pdu_from_db(karabo_id, [karabo_da], constant,\n",
+    "                                                 condition, cal_db_interface,\n",
+    "                                                 snapshot_at=creation_time)[0]\n",
+    "\n",
+    "        data, mdata = get_from_db(karabo_id, karabo_da,\n",
+    "                                  constant,\n",
+    "                                  condition, None,\n",
+    "                                  cal_db_interface,\n",
+    "                                  creation_time=creation_time,\n",
+    "                                  verbosity=2, timeout=cal_db_timeout)\n",
+    "\n",
+    "        old_const[qm][const] = data\n",
+    "\n",
+    "        if mdata is None or data is None:\n",
+    "            old_mdata[qm][const] = {\n",
+    "                \"timestamp\": \"Not found\",\n",
+    "                \"filepath\": None,\n",
+    "                \"h5path\": None\n",
+    "            }\n",
+    "        else:\n",
+    "            timestamp = mdata.calibration_constant_version.begin_at.isoformat()\n",
+    "            filepath = Path(\n",
+    "                mdata.calibration_constant_version.hdf5path,\n",
+    "                mdata.calibration_constant_version.filename\n",
+    "            )\n",
+    "            h5path = mdata.calibration_constant_version.h5path\n",
+    "            old_mdata[qm][const] = {\n",
+    "                \"timestamp\": timestamp,\n",
+    "                \"filepath\": str(filepath),\n",
+    "                \"h5path\": h5path\n",
+    "            }\n",
+    "\n",
+    "    with open(f\"{out_folder}/module_metadata_{qm}.yml\",\"w\") as fd:\n",
+    "        yaml.safe_dump(\n",
+    "            {\n",
+    "                \"module\": qm,\n",
+    "                \"pdu\": qm_db[\"db_module\"],\n",
+    "                \"old-constants\": old_mdata[qm]\n",
+    "            }, fd)"
    ]
   },
   {
@@ -484,16 +430,14 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "res = OrderedDict()\n",
-    "for cap in capacitor_settings:\n",
-    "    res[cap] = OrderedDict()\n",
-    "    for i in modules:\n",
-    "        qm = module_index_to_qm(i)\n",
+    "res = {}\n",
+    "for i in modules:\n",
+    "    qm = module_index_to_qm(i)\n",
     "\n",
-    "        res[cap][qm] = {'Offset': offset_g[cap][qm],\n",
-    "                        'Noise': noise_g[cap][qm],\n",
-    "                        'BadPixelsDark': badpix_g[cap][qm]\n",
-    "                        }"
+    "    res[qm] = {'Offset': offset_g[qm],\n",
+    "               'Noise': noise_g[qm],\n",
+    "               'BadPixelsDark': badpix_g[qm]\n",
+    "              }"
    ]
   },
   {
@@ -504,54 +448,54 @@
    "source": [
     "# Save constants in the calibration DB\n",
     "md = None\n",
-    "for cap in capacitor_settings:\n",
-    "    for qm in res[cap]:\n",
-    "\n",
-    "        karabo_da = qm_dict[qm][\"karabo_da\"]\n",
-    "        db_module = qm_dict[qm][\"db_module\"]\n",
-    "        cellid_pattern = cellid_patterns_g[cap][qm]\n",
-    "        if inject_cell_order:\n",
-    "            mem_cell_order = \",\".join([str(c) for c in cellid_pattern]) + \",\"\n",
-    "        else:\n",
-    "            mem_cell_order = None\n",
-    "\n",
-    "        # Do not store empty constants\n",
-    "        # In case of 0 trains data_g is initiated with nans and never refilled.\n",
-    "        if np.count_nonzero(~np.isnan(data_g[cap][qm]))==0:\n",
-    "            print(f\"Constant ({cap}, {qm}) would be empty, skipping saving\")\n",
-    "            continue\n",
-    "\n",
-    "        for const in res[cap][qm]:\n",
-    "\n",
-    "            dconst = getattr(Constants.LPD, const)()\n",
-    "            dconst.data = res[cap][qm][const]\n",
-    "\n",
-    "            # set the operating condition\n",
-    "\n",
-    "            condition = Conditions.Dark.LPD(memory_cells=max_cells,\n",
-    "                                        bias_voltage=bias_voltage,\n",
-    "                                        capacitor=cap,\n",
-    "                                        memory_cell_order=mem_cell_order,\n",
-    "                                       )\n",
-    "\n",
-    "            if db_output:\n",
-    "                md = send_to_db(db_module, karabo_id, dconst, condition,\n",
-    "                                file_loc, report_path=report,\n",
-    "                                cal_db_interface=cal_db_interface,\n",
-    "                                creation_time=creation_time,\n",
-    "                                timeout=cal_db_timeout)\n",
-    "\n",
-    "            if local_output:\n",
-    "                md = save_const_to_h5(db_module, karabo_id, dconst, condition,\n",
-    "                                      dconst.data, file_loc, report, creation_time, out_folder)\n",
-    "                print(f\"Calibration constant {const} is stored locally.\\n\")\n",
-    "\n",
-    "        print(\"Constants parameter conditions are:\\n\")\n",
-    "        print(f\"• memory_cells: {max_cells}\\n\"\n",
-    "              f\"• bias_voltage: {bias_voltage}\\n\"\n",
-    "              f\"• capacitor: {cap}\\n\"\n",
-    "              f\"• memory cell order: {mem_cell_order}\\n\"\n",
-    "              f\"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
+    "\n",
+    "for qm in res:\n",
+    "\n",
+    "    karabo_da = qm_dict[qm][\"karabo_da\"]\n",
+    "    db_module = qm_dict[qm][\"db_module\"]\n",
+    "    cellid_pattern = cellid_patterns_g[qm]\n",
+    "    if inject_cell_order:\n",
+    "        mem_cell_order = \",\".join([str(c) for c in cellid_pattern]) + \",\"\n",
+    "    else:\n",
+    "        mem_cell_order = None\n",
+    "\n",
+    "    # Do not store empty constants\n",
+    "    # In case of 0 trains data_g is initiated with nans and never refilled.\n",
+    "    if np.count_nonzero(~np.isnan(data_g[qm]))==0:\n",
+    "        print(f\"Constant ({qm}) would be empty, skipping saving\")\n",
+    "        continue\n",
+    "\n",
+    "    for const in res[qm]:\n",
+    "\n",
+    "        dconst = getattr(Constants.LPD, const)()\n",
+    "        dconst.data = res[qm][const]\n",
+    "\n",
+    "        # set the operating condition\n",
+    "\n",
+    "        condition = Conditions.Dark.LPD(memory_cells=max_cells,\n",
+    "                                    bias_voltage=bias_voltage,\n",
+    "                                    capacitor=capacitor_setting_s,\n",
+    "                                    memory_cell_order=mem_cell_order,\n",
+    "                                   )\n",
+    "\n",
+    "        if db_output:\n",
+    "            md = send_to_db(db_module, karabo_id, dconst, condition,\n",
+    "                            file_loc, report_path=report,\n",
+    "                            cal_db_interface=cal_db_interface,\n",
+    "                            creation_time=creation_time,\n",
+    "                            timeout=cal_db_timeout)\n",
+    "\n",
+    "        if local_output:\n",
+    "            md = save_const_to_h5(db_module, karabo_id, dconst, condition,\n",
+    "                                  dconst.data, file_loc, report, creation_time, out_folder)\n",
+    "            print(f\"Calibration constant {const} is stored locally.\\n\")\n",
+    "\n",
+    "    print(\"Constants parameter conditions are:\\n\")\n",
+    "    print(f\"• memory_cells: {max_cells}\\n\"\n",
+    "          f\"• bias_voltage: {bias_voltage}\\n\"\n",
+    "          f\"• capacitor: {capacitor_setting_s}\\n\"\n",
+    "          f\"• memory cell order: {mem_cell_order}\\n\"\n",
+    "          f\"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
    ]
   },
   {
@@ -586,77 +530,76 @@
     "fig, grid = plt.subplots(3, 1, sharex=\"col\", sharey=\"row\", figsize=(10, 7))\n",
     "fig.subplots_adjust(wspace=0, hspace=0)\n",
     "\n",
-    "for cap in capacitor_settings:\n",
-    "    for i in modules:\n",
-    "        qm = module_index_to_qm(i)\n",
-    "        if np.count_nonzero(~np.isnan(data_g[cap][qm])) == 0:\n",
-    "            break\n",
-    "        for gain in range(3):\n",
-    "            data = data_g[cap][qm][:, gain]\n",
-    "            offset = np.nanmedian(data)\n",
-    "            noise = np.nanstd(data)\n",
-    "            xrange = [np.nanmin(data_g[cap][qm]), np.nanmax(data_g[cap][qm])]\n",
-    "            if xrange[1] == xrange[0]:\n",
-    "                xrange = [0, xrange[0]+xrange[0]//2]\n",
-    "                nbins = data_g[cap][qm].shape[0]\n",
-    "            else:\n",
-    "                nbins = int(xrange[1] - xrange[0])\n",
-    "\n",
-    "            hn, cn = np.histogram(data, bins=nbins, range=xrange)\n",
-    "\n",
-    "            grid[gain].hist(data, range=xrange, bins=nbins)\n",
-    "            grid[gain].plot([offset-noise, offset-noise], [0, np.nanmax(hn)], \n",
-    "                            linewidth=1.5, color='red',\n",
-    "                            label='1 $\\sigma$ deviation')\n",
-    "            grid[gain].plot([offset+noise, offset+noise],\n",
-    "                            [0, np.nanmax(hn)], linewidth=1.5, color='red')\n",
-    "            grid[gain].plot([offset, offset], [0, 0],\n",
-    "                            linewidth=1.5, color='y', label='median')\n",
-    "\n",
-    "            grid[gain].plot([np.nanmedian(offset_g[cap][qm][:, :, 12, gain]), \n",
-    "                             np.nanmedian(offset_g[cap][qm][:, :, 12, gain])],\n",
-    "                            [0, np.nanmax(hn)], linewidth=1.5, color='green', \n",
-    "                            label='average over pixels')\n",
-    "\n",
-    "            grid[gain].set_xlim(xrange)\n",
-    "            grid[gain].set_ylim(0, np.nanmax(hn)*1.1)\n",
-    "            grid[gain].set_xlabel(\"Offset value [ADU]\")\n",
-    "            grid[gain].set_ylabel(\"# of occurance\")\n",
-    "\n",
-    "            if gain == 0:\n",
-    "                leg = grid[gain].legend(\n",
-    "                    loc='upper center', ncol=3, \n",
-    "                    bbox_to_anchor=(0.1, 0.25, 0.7, 1.0))\n",
-    "\n",
-    "            grid[gain].text(820, np.nanmax(hn)*0.4,\n",
-    "                            \"{} gain\".format(gain_names[gain]), fontsize=20)\n",
-    "\n",
-    "            a = plt.axes([.125, .1, 0.775, .8], frame_on=False)\n",
-    "            a.patch.set_alpha(0.05)\n",
-    "            a.set_xlim(xrange)\n",
-    "            plt.plot([offset, offset], [0, 1], linewidth=1.5, color='y')\n",
-    "            plt.xticks([])\n",
-    "            plt.yticks([])\n",
-    "\n",
-    "        ypos = 0.9\n",
-    "        x1pos = (np.nanmedian(data_g[cap][qm][:, 0]) +\n",
-    "                 np.nanmedian(data_g[cap][qm][:, 2]))/2.\n",
-    "        x2pos = (np.nanmedian(data_g[cap][qm][:, 2]) +\n",
-    "                 np.nanmedian(data_g[cap][qm][:, 1]))/2.-10\n",
-    "\n",
-    "        plt.annotate(\"\", xy=(np.nanmedian(data_g[cap][qm][:, 0]), ypos), xycoords='data',\n",
-    "                     xytext=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), textcoords='data',\n",
-    "                     arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n",
-    "\n",
-    "        plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 0])-np.nanmedian(data_g[cap][qm][:, 2])),\n",
-    "                     xy=(x1pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')\n",
-    "\n",
-    "        plt.annotate(\"\", xy=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), xycoords='data',\n",
-    "                     xytext=(np.nanmedian(data_g[cap][qm][:, 1]), ypos), textcoords='data',\n",
-    "                     arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n",
-    "\n",
-    "        plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 2])-np.nanmedian(data_g[cap][qm][:, 1])),\n",
-    "                     xy=(x2pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')\n",
+    "for i in modules:\n",
+    "    qm = module_index_to_qm(i)\n",
+    "    if np.count_nonzero(~np.isnan(data_g[qm])) == 0:\n",
+    "        break\n",
+    "    for gain in range(3):\n",
+    "        data = data_g[qm][:, gain]\n",
+    "        offset = np.nanmedian(data)\n",
+    "        noise = np.nanstd(data)\n",
+    "        xrange = [np.nanmin(data_g[qm]), np.nanmax(data_g[qm])]\n",
+    "        if xrange[1] == xrange[0]:\n",
+    "            xrange = [0, xrange[0]+xrange[0]//2]\n",
+    "            nbins = data_g[qm].shape[0]\n",
+    "        else:\n",
+    "            nbins = int(xrange[1] - xrange[0])\n",
+    "\n",
+    "        hn, cn = np.histogram(data, bins=nbins, range=xrange)\n",
+    "\n",
+    "        grid[gain].hist(data, range=xrange, bins=nbins)\n",
+    "        grid[gain].plot([offset-noise, offset-noise], [0, np.nanmax(hn)], \n",
+    "                        linewidth=1.5, color='red',\n",
+    "                        label='1 $\\sigma$ deviation')\n",
+    "        grid[gain].plot([offset+noise, offset+noise],\n",
+    "                        [0, np.nanmax(hn)], linewidth=1.5, color='red')\n",
+    "        grid[gain].plot([offset, offset], [0, 0],\n",
+    "                        linewidth=1.5, color='y', label='median')\n",
+    "\n",
+    "        grid[gain].plot([np.nanmedian(offset_g[qm][:, :, 12, gain]), \n",
+    "                         np.nanmedian(offset_g[qm][:, :, 12, gain])],\n",
+    "                        [0, np.nanmax(hn)], linewidth=1.5, color='green', \n",
+    "                        label='average over pixels')\n",
+    "\n",
+    "        grid[gain].set_xlim(xrange)\n",
+    "        grid[gain].set_ylim(0, np.nanmax(hn)*1.1)\n",
+    "        grid[gain].set_xlabel(\"Offset value [ADU]\")\n",
+    "        grid[gain].set_ylabel(\"# of occurance\")\n",
+    "\n",
+    "        if gain == 0:\n",
+    "            leg = grid[gain].legend(\n",
+    "                loc='upper center', ncol=3, \n",
+    "                bbox_to_anchor=(0.1, 0.25, 0.7, 1.0))\n",
+    "\n",
+    "        grid[gain].text(820, np.nanmax(hn)*0.4,\n",
+    "                        \"{} gain\".format(gain_names[gain]), fontsize=20)\n",
+    "\n",
+    "        a = plt.axes([.125, .1, 0.775, .8], frame_on=False)\n",
+    "        a.patch.set_alpha(0.05)\n",
+    "        a.set_xlim(xrange)\n",
+    "        plt.plot([offset, offset], [0, 1], linewidth=1.5, color='y')\n",
+    "        plt.xticks([])\n",
+    "        plt.yticks([])\n",
+    "\n",
+    "    ypos = 0.9\n",
+    "    x1pos = (np.nanmedian(data_g[qm][:, 0]) +\n",
+    "             np.nanmedian(data_g[qm][:, 2]))/2.\n",
+    "    x2pos = (np.nanmedian(data_g[qm][:, 2]) +\n",
+    "             np.nanmedian(data_g[qm][:, 1]))/2.-10\n",
+    "\n",
+    "    plt.annotate(\"\", xy=(np.nanmedian(data_g[qm][:, 0]), ypos), xycoords='data',\n",
+    "                 xytext=(np.nanmedian(data_g[qm][:, 2]), ypos), textcoords='data',\n",
+    "                 arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n",
+    "\n",
+    "    plt.annotate('{}'.format(np.nanmedian(data_g[qm][:, 0])-np.nanmedian(data_g[qm][:, 2])),\n",
+    "                 xy=(x1pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')\n",
+    "\n",
+    "    plt.annotate(\"\", xy=(np.nanmedian(data_g[qm][:, 2]), ypos), xycoords='data',\n",
+    "                 xytext=(np.nanmedian(data_g[qm][:, 1]), ypos), textcoords='data',\n",
+    "                 arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n",
+    "\n",
+    "    plt.annotate('{}'.format(np.nanmedian(data_g[qm][:, 2])-np.nanmedian(data_g[qm][:, 1])),\n",
+    "                 xy=(x2pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')\n",
     "\n",
     "plt.show()"
    ]
@@ -676,16 +619,16 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "# Loop over capacitor settings, modules, constants\n",
-    "for cap in capacitor_settings:\n",
-    "    if not test_for_normality:\n",
-    "        print('Normality test was not requested. Flag `test_for_normality` False')\n",
-    "        break\n",
+    "# Loop over modules, constants\n",
+    "\n",
+    "if not test_for_normality:\n",
+    "    print('Normality test was not requested. Flag `test_for_normality` False')\n",
+    "else:\n",
     "    for i in modules:\n",
     "        qm = module_index_to_qm(i)\n",
     "\n",
-    "        data = np.copy(ntest_g[cap][qm][:,:,:,:])\n",
-    "        data[badpix_g[cap][qm][:,:,:,:]>0] = 1.01\n",
+    "        data = np.copy(ntest_g[qm][:,:,:,:])\n",
+    "        data[badpix_g[qm][:,:,:,:]>0] = 1.01\n",
     "            \n",
     "        hn,cn = np.histogram(data[:,:,:,0], bins=100)\n",
     "       \n",
@@ -748,84 +691,86 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": false
+   },
    "outputs": [],
    "source": [
     "cell = 12\n",
-    "for cap in capacitor_settings:\n",
-    "    for gain in range(3):\n",
-    "        display(\n",
-    "            Markdown('### Cell-12 overview - {} gain'.format(gain_names[gain])))\n",
-    "\n",
-    "        fig = plt.figure(figsize=(18, 22) , tight_layout={'pad': 0.1, 'w_pad': 0.1})\n",
-    "        for qm in res[cap]:\n",
-    "            for iconst, const in enumerate(['Offset', 'Noise', 'BadPixelsDark']):\n",
     "\n",
-    "                ax = fig.add_subplot(321+iconst)\n",
-    "\n",
-    "                data = res[cap][qm][const][:, :, 12, gain]\n",
-    "                vmax = 1.5 * np.nanmedian(res[cap][qm][const][:, :, 12, gain])\n",
-    "                title = const\n",
-    "                label = '{} value [ADU]'.format(const)\n",
-    "                title = '{} value'.format(const)\n",
-    "                if const == 'BadPixelsDark':\n",
-    "                    vmax = 4\n",
-    "                    bpix_code = data.astype(np.float32)\n",
-    "                    bpix_code[bpix_code == 0] = np.nan\n",
-    "                    title = 'Bad pixel code'\n",
-    "                    label = title\n",
-    "\n",
-    "                    cb_labels = ['1 {}'.format(BadPixels.NOISE_OUT_OF_THRESHOLD.name),\n",
-    "                                 '2 {}'.format(BadPixels.OFFSET_NOISE_EVAL_ERROR.name),\n",
-    "                                 '3 {}'.format(BadPixels.OFFSET_OUT_OF_THRESHOLD.name),\n",
-    "                                 '4 {}'.format('MIXED')]\n",
-    "\n",
-    "                    heatmapPlot(bpix_code, add_panels=False, cmap='viridis',\n",
-    "                                y_label='Rows', x_label='Columns',\n",
-    "                                lut_label='', vmax=vmax,\n",
-    "                                use_axis=ax, cb_ticklabels=cb_labels, cb_ticks = np.arange(4)+1,\n",
-    "                                title='{}'.format(title))\n",
-    "                    del bpix_code\n",
-    "                else:\n",
+    "for gain in range(3):\n",
+    "    display(\n",
+    "        Markdown('### Cell-12 overview - {} gain'.format(gain_names[gain])))\n",
+    "\n",
+    "    fig = plt.figure(figsize=(18, 22) , tight_layout={'pad': 0.1, 'w_pad': 0.1})\n",
+    "    for qm in res:\n",
+    "        for iconst, const in enumerate(['Offset', 'Noise', 'BadPixelsDark']):\n",
+    "\n",
+    "            ax = fig.add_subplot(321+iconst)\n",
+    "\n",
+    "            data = res[qm][const][:, :, 12, gain]\n",
+    "            vmax = 1.5 * np.nanmedian(res[qm][const][:, :, 12, gain])\n",
+    "            title = const\n",
+    "            label = '{} value [ADU]'.format(const)\n",
+    "            title = '{} value'.format(const)\n",
+    "            if const == 'BadPixelsDark':\n",
+    "                vmax = 4\n",
+    "                bpix_code = data.astype(np.float32)\n",
+    "                bpix_code[bpix_code == 0] = np.nan\n",
+    "                title = 'Bad pixel code'\n",
+    "                label = title\n",
+    "\n",
+    "                cb_labels = ['1 {}'.format(BadPixels.NOISE_OUT_OF_THRESHOLD.name),\n",
+    "                             '2 {}'.format(BadPixels.OFFSET_NOISE_EVAL_ERROR.name),\n",
+    "                             '3 {}'.format(BadPixels.OFFSET_OUT_OF_THRESHOLD.name),\n",
+    "                             '4 {}'.format('MIXED')]\n",
+    "\n",
+    "                heatmapPlot(bpix_code, add_panels=False, cmap='viridis',\n",
+    "                            y_label='Rows', x_label='Columns',\n",
+    "                            lut_label='', vmax=vmax,\n",
+    "                            use_axis=ax, cb_ticklabels=cb_labels, cb_ticks = np.arange(4)+1,\n",
+    "                            title='{}'.format(title))\n",
+    "                del bpix_code\n",
+    "            else:\n",
     "\n",
-    "                    heatmapPlot(data, add_panels=False, cmap='viridis',\n",
-    "                                y_label='Rows', x_label='Columns',\n",
-    "                                lut_label=label, vmax=vmax,\n",
+    "                heatmapPlot(data, add_panels=False, cmap='viridis',\n",
+    "                            y_label='Rows', x_label='Columns',\n",
+    "                            lut_label=label, vmax=vmax,\n",
+    "                            use_axis=ax,\n",
+    "                            title='{}'.format(title))\n",
+    "\n",
+    "    for qm in res:\n",
+    "        for iconst, const in enumerate(['Offset', 'Noise']):\n",
+    "            data = res[qm][const]\n",
+    "            dataBP = np.copy(data)\n",
+    "            dataBP[res[qm]['BadPixelsDark'] > 0] = -1\n",
+    "\n",
+    "            x_ranges = [[0, 1500], [0, 40]]\n",
+    "            hn, cn = np.histogram(\n",
+    "                data[:, :, :, gain], bins=100, range=x_ranges[iconst])\n",
+    "            hnBP, cnBP = np.histogram(dataBP[:, :, :, gain], bins=cn)\n",
+    "\n",
+    "            d = [{'x': cn[:-1],\n",
+    "                  'y': hn,\n",
+    "                  'drawstyle': 'steps-pre',\n",
+    "                  'label': 'All data',\n",
+    "                  },\n",
+    "                 {'x': cnBP[:-1],\n",
+    "                  'y': hnBP,\n",
+    "                  'drawstyle': 'steps-pre',\n",
+    "                  'label': 'Bad pixels masked',\n",
+    "                  },\n",
+    "                 ]\n",
+    "\n",
+    "            ax = fig.add_subplot(325+iconst)\n",
+    "            _ = simplePlot(d, figsize=(5, 7), aspect=1,\n",
+    "                                x_label=\"{} value [ADU]\".format(const),\n",
+    "                                y_label=\"# of occurance\",\n",
+    "                                title='', legend_pad=0.1, legend_size='10%',\n",
     "                                use_axis=ax,\n",
-    "                                title='{}'.format(title))\n",
-    "\n",
-    "        for qm in res[cap]:\n",
-    "            for iconst, const in enumerate(['Offset', 'Noise']):\n",
-    "                data = res[cap][qm][const]\n",
-    "                dataBP = np.copy(data)\n",
-    "                dataBP[res[cap][qm]['BadPixelsDark'] > 0] = -1\n",
-    "\n",
-    "                x_ranges = [[0, 1500], [0, 40]]\n",
-    "                hn, cn = np.histogram(\n",
-    "                    data[:, :, :, gain], bins=100, range=x_ranges[iconst])\n",
-    "                hnBP, cnBP = np.histogram(dataBP[:, :, :, gain], bins=cn)\n",
-    "\n",
-    "                d = [{'x': cn[:-1],\n",
-    "                      'y': hn,\n",
-    "                      'drawstyle': 'steps-pre',\n",
-    "                      'label': 'All data',\n",
-    "                      },\n",
-    "                     {'x': cnBP[:-1],\n",
-    "                      'y': hnBP,\n",
-    "                      'drawstyle': 'steps-pre',\n",
-    "                      'label': 'Bad pixels masked',\n",
-    "                      },\n",
-    "                     ]\n",
-    "\n",
-    "                ax = fig.add_subplot(325+iconst)\n",
-    "                _ = simplePlot(d, figsize=(5, 7), aspect=1,\n",
-    "                                    x_label=\"{} value [ADU]\".format(const),\n",
-    "                                    y_label=\"# of occurance\",\n",
-    "                                    title='', legend_pad=0.1, legend_size='10%',\n",
-    "                                    use_axis=ax,\n",
-    "                                    y_log=True, legend='outside-top-2col-frame')\n",
+    "                                y_log=True, legend='outside-top-2col-frame')\n",
     "\n",
-    "        plt.show()"
+    "    plt.show()"
    ]
   },
   {
@@ -870,12 +815,11 @@
     "    rebin = 2\n",
     "    for gain in range(3):\n",
     "        display(Markdown('### Bad pixel behaviour - {} gain ###'.format(gain_names[gain])))\n",
-    "        for cap in capacitor_settings:\n",
-    "            for mod, data in badpix_g[cap].items():\n",
-    "                plot_badpix_3d(data[...,gain], cols, title='', rebin_fac=rebin)\n",
-    "                ax = plt.gca()\n",
-    "                leg = ax.get_legend()\n",
-    "                leg.set(alpha=0.5)\n",
+    "        for mod, data in badpix_g.items():\n",
+    "            plot_badpix_3d(data[...,gain], cols, title='', rebin_fac=rebin)\n",
+    "            ax = plt.gca()\n",
+    "            leg = ax.get_legend()\n",
+    "            leg.set(alpha=0.5)\n",
     "        plt.show()"
    ]
   },
@@ -904,91 +848,91 @@
    "outputs": [],
    "source": [
     "time_summary = []\n",
-    "for cap, cap_data in old_mdata.items():\n",
-    "    time_summary.append(f\"The following pre-existing constants are used for comparison for capacitor setting **{cap}**:\")\n",
-    "    for qm, qm_data in cap_data.items():\n",
-    "        time_summary.append(f\"- Module {qm}\")\n",
-    "        for const, const_data in qm_data.items():\n",
-    "            time_summary.append(f\"    - {const} created at {const_data['timestamp']}\")\n",
+    "time_summary.append(f\"The following pre-existing constants are used for comparison:\")\n",
+    "for qm, qm_data in old_mdata.items():\n",
+    "    time_summary.append(f\"- Module {qm}\")\n",
+    "    for const, const_data in qm_data.items():\n",
+    "        time_summary.append(f\"    - {const} created at {const_data['timestamp']}\")\n",
     "display(Markdown(\"\\n\".join(time_summary)))"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": false
+   },
    "outputs": [],
    "source": [
-    "# Loop over capacitor settings, modules, constants\n",
-    "for cap in res:\n",
-    "    for qm in res[cap]:\n",
-    "        for gain in range(3):\n",
-    "            display(Markdown('### Summary across tiles - {} gain'.format(gain_names[gain])))\n",
+    "# Loop over modules, constants\n",
+    "for qm in res:\n",
+    "    for gain in range(3):\n",
+    "        display(Markdown('### Summary across tiles - {} gain'.format(gain_names[gain])))\n",
     "\n",
-    "            for const in res[cap][qm]:\n",
-    "                data = np.copy(res[cap][qm][const][:, :, :, gain])\n",
+    "        for const in res[qm]:\n",
+    "            data = np.copy(res[qm][const][:, :, :, gain])\n",
+    "\n",
+    "            label = 'Fraction of bad pixels'\n",
+    "\n",
+    "            if const != 'BadPixelsDark':\n",
+    "                data[badpix_g[qm][:, :, :, gain] > 0] = np.nan\n",
+    "                label = '{} value [ADU]'.format(const)\n",
+    "            else:\n",
+    "                data[data>0] = 1.0\n",
     "\n",
-    "                label = 'Fraction of bad pixels'\n",
+    "            data = data.reshape(\n",
+    "                int(data.shape[0] / 32),\n",
+    "                32,\n",
+    "                int(data.shape[1] / 128),\n",
+    "                128,\n",
+    "                data.shape[2])\n",
+    "            data = np.nanmean(data, axis=(1, 3)).swapaxes(\n",
+    "                0, 2).reshape(512, 16)\n",
+    "\n",
+    "            fig = plt.figure(figsize=(15, 6))\n",
+    "            ax = fig.add_subplot(121)\n",
+    "\n",
+    "            _ = heatmapPlot(data[:510, :], add_panels=True,\n",
+    "                            y_label='Momery Cell ID', x_label='Tile ID',\n",
+    "                            lut_label=label, use_axis=ax,\n",
+    "                            panel_y_label=label, panel_x_label=label,\n",
+    "                            cmap='viridis',  # cb_loc='right',cb_aspect=15,\n",
+    "                            x_ticklabels=np.arange(16)+1,\n",
+    "                            x_ticks=np.arange(16)+0.5)\n",
+    "\n",
+    "            if old_const[qm][const] is not None:\n",
+    "                ax = fig.add_subplot(122)\n",
+    "\n",
+    "                dataold = np.copy(old_const[qm][const][:, :, :, gain])\n",
+    "\n",
+    "                label = '$\\Delta$ {}'.format(label)\n",
     "\n",
     "                if const != 'BadPixelsDark':\n",
-    "                    data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan\n",
-    "                    label = '{} value [ADU]'.format(const)\n",
+    "                    if old_const[qm]['BadPixelsDark'] is not None:\n",
+    "                        dataold[old_const[qm]['BadPixelsDark'][:, :, :, gain] > 0] = np.nan\n",
+    "                    else:\n",
+    "                        dataold[:] = np.nan\n",
     "                else:\n",
-    "                    data[data>0] = 1.0\n",
-    "                    \n",
-    "                data = data.reshape(\n",
-    "                    int(data.shape[0] / 32),\n",
+    "                    dataold[dataold>0]=1.0\n",
+    "\n",
+    "                dataold = dataold.reshape(\n",
+    "                    int(dataold.shape[0] / 32),\n",
     "                    32,\n",
-    "                    int(data.shape[1] / 128),\n",
+    "                    int(dataold.shape[1] / 128),\n",
     "                    128,\n",
-    "                    data.shape[2])\n",
-    "                data = np.nanmean(data, axis=(1, 3)).swapaxes(\n",
-    "                    0, 2).reshape(512, 16)\n",
+    "                    dataold.shape[2])\n",
+    "                dataold = np.nanmean(dataold, axis=(\n",
+    "                    1, 3)).swapaxes(0, 2).reshape(512, 16)\n",
+    "                dataold = dataold - data\n",
     "\n",
-    "                fig = plt.figure(figsize=(15, 6))\n",
-    "                ax = fig.add_subplot(121)\n",
-    "\n",
-    "                _ = heatmapPlot(data[:510, :], add_panels=True,\n",
+    "                _ = heatmapPlot(dataold[:510, :], add_panels=True,\n",
     "                                y_label='Momery Cell ID', x_label='Tile ID',\n",
     "                                lut_label=label, use_axis=ax,\n",
     "                                panel_y_label=label, panel_x_label=label,\n",
     "                                cmap='viridis',  # cb_loc='right',cb_aspect=15,\n",
     "                                x_ticklabels=np.arange(16)+1,\n",
     "                                x_ticks=np.arange(16)+0.5)\n",
-    "\n",
-    "                if old_const[cap][qm][const] is not None:\n",
-    "                    ax = fig.add_subplot(122)\n",
-    "\n",
-    "                    dataold = np.copy(old_const[cap][qm][const][:, :, :, gain])\n",
-    "                    \n",
-    "                    label = '$\\Delta$ {}'.format(label)\n",
-    "\n",
-    "                    if const != 'BadPixelsDark':\n",
-    "                        if old_const[cap][qm]['BadPixelsDark'] is not None:\n",
-    "                            dataold[old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] > 0] = np.nan\n",
-    "                        else:\n",
-    "                            dataold[:] = np.nan\n",
-    "                    else:\n",
-    "                        dataold[dataold>0]=1.0\n",
-    "\n",
-    "                    dataold = dataold.reshape(\n",
-    "                        int(dataold.shape[0] / 32),\n",
-    "                        32,\n",
-    "                        int(dataold.shape[1] / 128),\n",
-    "                        128,\n",
-    "                        dataold.shape[2])\n",
-    "                    dataold = np.nanmean(dataold, axis=(\n",
-    "                        1, 3)).swapaxes(0, 2).reshape(512, 16)\n",
-    "                    dataold = dataold - data\n",
-    "\n",
-    "                    _ = heatmapPlot(dataold[:510, :], add_panels=True,\n",
-    "                                    y_label='Momery Cell ID', x_label='Tile ID',\n",
-    "                                    lut_label=label, use_axis=ax,\n",
-    "                                    panel_y_label=label, panel_x_label=label,\n",
-    "                                    cmap='viridis',  # cb_loc='right',cb_aspect=15,\n",
-    "                                    x_ticklabels=np.arange(16)+1,\n",
-    "                                    x_ticks=np.arange(16)+0.5)\n",
-    "            plt.show()"
+    "        plt.show()"
    ]
   },
   {
@@ -1015,31 +959,30 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "# Loop over capacitor settings, modules, constants\n",
-    "for cap in res:\n",
-    "    for qm in res[cap]:\n",
-    "        for gain in range(3):\n",
-    "            display(Markdown('### Variation of offset and noise across ASICs - {} gain'.format(gain_names[gain])))\n",
-    "\n",
-    "            fig = plt.figure(figsize=(15, 6))\n",
-    "            for iconst, const in enumerate(['Offset', 'Noise']):\n",
-    "                data = np.copy(res[cap][qm][const][:, :, :, gain])\n",
-    "                data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan\n",
-    "                label = '$\\sigma$ {} [ADU]'.format(const)\n",
-    "\n",
-    "                dataA = np.nanmean(data, axis=2)  # average over cells\n",
-    "                dataA = dataA.reshape(8, 32, 16, 16)\n",
-    "                dataA = np.nanstd(dataA, axis=(0, 2))  # average across ASICs\n",
-    "\n",
-    "                ax = fig.add_subplot(121+iconst)\n",
-    "                _ = heatmapPlot(dataA, add_panels=True,\n",
-    "                                y_label='rows', x_label='columns',\n",
-    "                                lut_label=label, use_axis=ax,\n",
-    "                                panel_y_label=label, panel_x_label=label,\n",
-    "                                cmap='viridis'\n",
-    "                                )\n",
+    "# Loop over modules, constants\n",
+    "for qm in res:\n",
+    "    for gain in range(3):\n",
+    "        display(Markdown('### Variation of offset and noise across ASICs - {} gain'.format(gain_names[gain])))\n",
+    "\n",
+    "        fig = plt.figure(figsize=(15, 6))\n",
+    "        for iconst, const in enumerate(['Offset', 'Noise']):\n",
+    "            data = np.copy(res[qm][const][:, :, :, gain])\n",
+    "            data[badpix_g[qm][:, :, :, gain] > 0] = np.nan\n",
+    "            label = '$\\sigma$ {} [ADU]'.format(const)\n",
+    "\n",
+    "            dataA = np.nanmean(data, axis=2)  # average over cells\n",
+    "            dataA = dataA.reshape(8, 32, 16, 16)\n",
+    "            dataA = np.nanstd(dataA, axis=(0, 2))  # average across ASICs\n",
+    "\n",
+    "            ax = fig.add_subplot(121+iconst)\n",
+    "            _ = heatmapPlot(dataA, add_panels=True,\n",
+    "                            y_label='rows', x_label='columns',\n",
+    "                            lut_label=label, use_axis=ax,\n",
+    "                            panel_y_label=label, panel_x_label=label,\n",
+    "                            cmap='viridis'\n",
+    "                            )\n",
     "\n",
-    "            plt.show()"
+    "        plt.show()"
    ]
   },
   {
@@ -1048,34 +991,33 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "# Loop over capacitor settings, modules, constants\n",
-    "for cap in res:\n",
-    "    for qm in res[cap]:\n",
-    "        for gain in range(3):\n",
-    "            display(Markdown('### Variation of offset and noise across tiles - {} gain'.format(gain_names[gain])))\n",
-    "\n",
-    "            fig = plt.figure(figsize=(15, 6))\n",
-    "            for iconst, const in enumerate(['Offset', 'Noise']):\n",
-    "                data = np.copy(res[cap][qm][const][:, :, :, gain])\n",
-    "                data[badpix_g[cap][qm][:, :, :, gain] > 0] = np.nan\n",
-    "                label = '$\\sigma$ {} [ADU]'.format(const)\n",
-    "                    \n",
-    "                dataT = data.reshape(\n",
-    "                    int(data.shape[0] / 32),\n",
-    "                    32,\n",
-    "                    int(data.shape[1] / 128),\n",
-    "                    128,\n",
-    "                    data.shape[2])\n",
-    "                dataT = np.nanstd(dataT, axis=(0, 2))\n",
-    "                dataT = np.nanmean(dataT, axis=2)\n",
-    "                \n",
-    "                ax = fig.add_subplot(121+iconst)\n",
-    "                _ = heatmapPlot(dataT, add_panels=True,\n",
-    "                                y_label='rows', x_label='columns',\n",
-    "                                lut_label=label, use_axis=ax,\n",
-    "                                panel_y_label=label, panel_x_label=label,\n",
-    "                                cmap='viridis')\n",
-    "            plt.show()"
+    "# Loop over modules, constants\n",
+    "for qm in res:\n",
+    "    for gain in range(3):\n",
+    "        display(Markdown('### Variation of offset and noise across tiles - {} gain'.format(gain_names[gain])))\n",
+    "\n",
+    "        fig = plt.figure(figsize=(15, 6))\n",
+    "        for iconst, const in enumerate(['Offset', 'Noise']):\n",
+    "            data = np.copy(res[qm][const][:, :, :, gain])\n",
+    "            data[badpix_g[qm][:, :, :, gain] > 0] = np.nan\n",
+    "            label = '$\\sigma$ {} [ADU]'.format(const)\n",
+    "\n",
+    "            dataT = data.reshape(\n",
+    "                int(data.shape[0] / 32),\n",
+    "                32,\n",
+    "                int(data.shape[1] / 128),\n",
+    "                128,\n",
+    "                data.shape[2])\n",
+    "            dataT = np.nanstd(dataT, axis=(0, 2))\n",
+    "            dataT = np.nanmean(dataT, axis=2)\n",
+    "\n",
+    "            ax = fig.add_subplot(121+iconst)\n",
+    "            _ = heatmapPlot(dataT, add_panels=True,\n",
+    "                            y_label='rows', x_label='columns',\n",
+    "                            lut_label=label, use_axis=ax,\n",
+    "                            panel_y_label=label, panel_x_label=label,\n",
+    "                            cmap='viridis')\n",
+    "        plt.show()"
    ]
   },
   {
@@ -1099,73 +1041,74 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "scrolled": false
+   },
    "outputs": [],
    "source": [
-    "# Loop over capacitor settings, modules, constants\n",
-    "for cap in res:\n",
-    "    for qm in res[cap]:\n",
-    "        for gain in range(3):\n",
-    "            display(Markdown('### Mean over pixels - {} gain'.format(gain_names[gain])))\n",
-    "            \n",
-    "            fig = plt.figure(figsize=(9,11))\n",
-    "\n",
-    "            for iconst, const in enumerate(res[cap][qm]):\n",
-    "\n",
-    "                ax = fig.add_subplot(311+iconst)\n",
-    "                    \n",
-    "                data = res[cap][qm][const][:,:,:510,gain]\n",
-    "                if const == 'BadPixelsDark':\n",
-    "                    data[data>0] = 1.0\n",
-    "                    \n",
-    "                dataBP = np.copy(data)\n",
-    "                dataBP[badpix_g[cap][qm][:,:,:510,gain]>0] = -10\n",
-    "\n",
-    "                data = np.nanmean(data, axis=(0,1))\n",
-    "                dataBP = np.nanmean(dataBP, axis=(0,1))\n",
-    "                \n",
-    "                d = [{'y': data,\n",
-    "                      'x': np.arange(data.shape[0]),\n",
-    "                      'drawstyle': 'steps-mid',\n",
-    "                      'label' : 'All data'\n",
-    "                     }\n",
-    "                    ]\n",
-    "                \n",
-    "                if const != 'BadPixelsDark':\n",
-    "                    d.append({'y': dataBP,\n",
-    "                      'x': np.arange(data.shape[0]),\n",
-    "                      'drawstyle': 'steps-mid',\n",
-    "                      'label' : 'good pixels only'\n",
-    "                     })\n",
-    "                    y_title = \"{} value [ADU]\".format(const)\n",
-    "                    title = \"{} value, {} gain\".format(const, gain_names[gain])\n",
-    "                else:\n",
-    "                    y_title = \"Fraction of Bad Pixels\"\n",
-    "                    title = \"Fraction of Bad Pixels, {} gain\".format(gain_names[gain])\n",
-    "                \n",
-    "                data_min = np.min([data, dataBP])if const != 'BadPixelsDark' else np.min([data])\n",
-    "                data_max = np.max([data[20:], dataBP[20:]])\n",
-    "                data_dif = data_max - data_min\n",
-    "                \n",
-    "                local_max = np.max([data[200:300], dataBP[200:300]])\n",
-    "                frac = 0.35\n",
-    "                new_max = (local_max - data_min*(1-frac))/frac\n",
-    "                new_max = np.max([data_max, new_max])\n",
-    "               \n",
-    "                _ = simplePlot(d, figsize=(10,10), aspect=2, xrange=(-12, 510),\n",
-    "                                  x_label = 'Memory Cell ID', \n",
-    "                                  y_label=y_title, use_axis=ax,\n",
-    "                                  title=title,\n",
-    "                                  title_position=[0.5, 1.15],  \n",
-    "                                  inset='xy-coord-right', inset_x_range=(0,20), inset_indicated=True,\n",
-    "                                  inset_labeled=True, inset_coord=[0.2,0.5,0.6,0.95],\n",
-    "                                    inset_lw = 1.0, y_range = [data_min-data_dif*0.05, new_max+data_dif*0.05],\n",
-    "                                  y_log=False, legend='outside-top-ncol2-frame', legend_size='18%',\n",
-    "                                     legend_pad=0.00)\n",
-    "                \n",
-    "                plt.tight_layout(pad=1.08, h_pad=0.35)\n",
-    "                \n",
-    "            plt.show()"
+    "# Loop over modules, constants\n",
+    "for qm in res:\n",
+    "    for gain in range(3):\n",
+    "        display(Markdown('### Mean over pixels - {} gain'.format(gain_names[gain])))\n",
+    "\n",
+    "        fig = plt.figure(figsize=(9,11))\n",
+    "\n",
+    "        for iconst, const in enumerate(res[qm]):\n",
+    "\n",
+    "            ax = fig.add_subplot(311+iconst)\n",
+    "\n",
+    "            data = res[qm][const][:,:,:510,gain]\n",
+    "            if const == 'BadPixelsDark':\n",
+    "                data[data>0] = 1.0\n",
+    "\n",
+    "            dataBP = np.copy(data)\n",
+    "            dataBP[badpix_g[qm][:,:,:510,gain]>0] = -10\n",
+    "\n",
+    "            data = np.nanmean(data, axis=(0,1))\n",
+    "            dataBP = np.nanmean(dataBP, axis=(0,1))\n",
+    "\n",
+    "            d = [{'y': data,\n",
+    "                  'x': np.arange(data.shape[0]),\n",
+    "                  'drawstyle': 'steps-mid',\n",
+    "                  'label' : 'All data'\n",
+    "                 }\n",
+    "                ]\n",
+    "\n",
+    "            if const != 'BadPixelsDark':\n",
+    "                d.append({'y': dataBP,\n",
+    "                  'x': np.arange(data.shape[0]),\n",
+    "                  'drawstyle': 'steps-mid',\n",
+    "                  'label' : 'good pixels only'\n",
+    "                 })\n",
+    "                y_title = \"{} value [ADU]\".format(const)\n",
+    "                title = \"{} value, {} gain\".format(const, gain_names[gain])\n",
+    "            else:\n",
+    "                y_title = \"Fraction of Bad Pixels\"\n",
+    "                title = \"Fraction of Bad Pixels, {} gain\".format(gain_names[gain])\n",
+    "\n",
+    "            data_min = np.min([data, dataBP])if const != 'BadPixelsDark' else np.min([data])\n",
+    "            data_max = np.max([data[20:], dataBP[20:]])\n",
+    "            data_dif = data_max - data_min\n",
+    "\n",
+    "            local_max = np.max([data[200:300], dataBP[200:300]])\n",
+    "            frac = 0.35\n",
+    "            new_max = (local_max - data_min*(1-frac))/frac\n",
+    "            new_max = np.max([data_max, new_max])\n",
+    "\n",
+    "            _ = simplePlot(d, figsize=(10,10), aspect=2, xrange=(-12, 510),\n",
+    "                              x_label = 'Memory Cell ID', \n",
+    "                              y_label=y_title, use_axis=ax,\n",
+    "                              title=title,\n",
+    "                              title_position=[0.5, 1.15],  \n",
+    "                              inset='xy-coord-right', inset_x_range=(0,20), inset_indicated=True,\n",
+    "                              inset_labeled=True, inset_coord=[0.2,0.5,0.6,0.95],\n",
+    "                                inset_lw = 1.0, y_range = [data_min-data_dif*0.05, new_max+data_dif*0.05],\n",
+    "                              y_log=False, legend='outside-top-ncol2-frame', legend_size='18%',\n",
+    "                                 legend_pad=0.00)\n",
+    "\n",
+    "            plt.tight_layout(pad=1.08, h_pad=0.35)\n",
+    "\n",
+    "        plt.show()"
    ]
   },
   {
@@ -1193,42 +1136,46 @@
    "outputs": [],
    "source": [
     "table = []\n",
-    "bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR]\n",
-    "for cap in res:\n",
-    "    for qm in res[cap]:\n",
-    "        for gain in range(3):\n",
-    "            \n",
-    "            l_data = []\n",
-    "            l_data_old = []\n",
-    "            \n",
-    "            data = np.copy(res[cap][qm]['BadPixelsDark'][:,:,:,gain])\n",
-    "            l_data.append(len(data[data>0].flatten()))\n",
+    "bits = [\n",
+    "    BadPixels.NOISE_OUT_OF_THRESHOLD,\n",
+    "    BadPixels.OFFSET_OUT_OF_THRESHOLD,\n",
+    "    BadPixels.OFFSET_NOISE_EVAL_ERROR\n",
+    "]\n",
+    "\n",
+    "for qm in res:\n",
+    "    for gain in range(3):\n",
+    "\n",
+    "        l_data = []\n",
+    "        l_data_old = []\n",
+    "\n",
+    "        data = np.copy(res[qm]['BadPixelsDark'][:,:,:,gain])\n",
+    "        l_data.append(len(data[data>0].flatten()))\n",
+    "        for bit in bits:\n",
+    "            l_data.append(np.count_nonzero(badpix_g[qm][:,:,:,gain] & bit.value))\n",
+    "\n",
+    "        if old_const[qm]['BadPixelsDark'] is not None:\n",
+    "            old_const[qm]['BadPixelsDark'] = old_const[qm]['BadPixelsDark'].astype(np.uint32)\n",
+    "            dataold = np.copy(old_const[qm]['BadPixelsDark'][:, :, :, gain])\n",
+    "            l_data_old.append(len(dataold[dataold>0].flatten()))\n",
     "            for bit in bits:\n",
-    "                l_data.append(np.count_nonzero(badpix_g[cap][qm][:,:,:,gain] & bit.value))\n",
-    "            \n",
-    "            if old_const[cap][qm]['BadPixelsDark'] is not None:\n",
-    "                old_const[cap][qm]['BadPixelsDark'] = old_const[cap][qm]['BadPixelsDark'].astype(np.uint32)\n",
-    "                dataold = np.copy(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain])\n",
-    "                l_data_old.append(len(dataold[dataold>0].flatten()))\n",
-    "                for bit in bits:\n",
-    "                    l_data_old.append(np.count_nonzero(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] & bit.value))\n",
-    "\n",
-    "            l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', \n",
-    "                           'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']\n",
-    "            \n",
-    "            l_threshold = ['', f'{thresholds_noise_sigma}', f'{thresholds_offset_sigma}',\n",
-    "                           f'{thresholds_offset_hard}/{thresholds_noise_hard}']\n",
-    "            \n",
-    "            for i in range(len(l_data)):\n",
-    "                line = [f'{l_data_name[i]}, gain {gain_names[gain]}', l_threshold[i], l_data[i]]\n",
-    "            \n",
-    "                if old_const[cap][qm]['BadPixelsDark'] is not None:\n",
-    "                    line += [l_data_old[i]]\n",
-    "                else:\n",
-    "                    line += ['-']\n",
-    "                    \n",
-    "                table.append(line)\n",
-    "            table.append(['', '', '', ''])\n",
+    "                l_data_old.append(np.count_nonzero(old_const[qm]['BadPixelsDark'][:, :, :, gain] & bit.value))\n",
+    "\n",
+    "        l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', \n",
+    "                       'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']\n",
+    "\n",
+    "        l_threshold = ['', f'{thresholds_noise_sigma}', f'{thresholds_offset_sigma}',\n",
+    "                       f'{thresholds_offset_hard}/{thresholds_noise_hard}']\n",
+    "\n",
+    "        for i in range(len(l_data)):\n",
+    "            line = [f'{l_data_name[i]}, gain {gain_names[gain]}', l_threshold[i], l_data[i]]\n",
+    "\n",
+    "            if old_const[qm]['BadPixelsDark'] is not None:\n",
+    "                line += [l_data_old[i]]\n",
+    "            else:\n",
+    "                line += ['-']\n",
+    "\n",
+    "            table.append(line)\n",
+    "        table.append(['', '', '', ''])\n",
     "\n",
     "display(Markdown('''\n",
     "\n",
@@ -1256,29 +1203,28 @@
     "\n",
     "for const in ['Offset', 'Noise']:\n",
     "    table = [['','High gain', 'High gain', 'Medium gain', 'Medium gain', 'Low gain', 'Low gain']]\n",
-    "    for cap in res:\n",
-    "        for qm in res[cap]:\n",
+    "    for qm in res:\n",
     "\n",
-    "            data = np.copy(res[cap][qm][const])\n",
-    "            data[res[cap][qm]['BadPixelsDark']>0] = np.nan\n",
-    "            \n",
-    "            if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None :\n",
-    "                dataold = np.copy(old_const[cap][qm][const])\n",
-    "                dataold[old_const[cap][qm]['BadPixelsDark']>0] = np.nan\n",
-    "\n",
-    "            f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]\n",
-    "            n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']\n",
-    "\n",
-    "            for i, f in enumerate(f_list):\n",
-    "                line = [n_list[i]]\n",
-    "                for gain in range(3):\n",
-    "                    line.append('{:6.1f}'.format(f(data[...,gain])))\n",
-    "                    if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None:\n",
-    "                        line.append('{:6.1f}'.format(f(dataold[...,gain])))\n",
-    "                    else:\n",
-    "                        line.append('-')\n",
+    "        data = np.copy(res[qm][const])\n",
+    "        data[res[qm]['BadPixelsDark']>0] = np.nan\n",
+    "\n",
+    "        if old_const[qm][const] is not None and old_const[qm]['BadPixelsDark'] is not None :\n",
+    "            dataold = np.copy(old_const[qm][const])\n",
+    "            dataold[old_const[qm]['BadPixelsDark']>0] = np.nan\n",
+    "\n",
+    "        f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]\n",
+    "        n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']\n",
+    "\n",
+    "        for i, f in enumerate(f_list):\n",
+    "            line = [n_list[i]]\n",
+    "            for gain in range(3):\n",
+    "                line.append('{:6.1f}'.format(f(data[...,gain])))\n",
+    "                if old_const[qm][const] is not None and old_const[qm]['BadPixelsDark'] is not None:\n",
+    "                    line.append('{:6.1f}'.format(f(dataold[...,gain])))\n",
+    "                else:\n",
+    "                    line.append('-')\n",
     "\n",
-    "                table.append(line)\n",
+    "            table.append(line)\n",
     "\n",
     "    display(Markdown('### {} [ADU], good pixels only ###'.format(const)))\n",
     "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))  "
@@ -1287,9 +1233,9 @@
  ],
  "metadata": {
   "kernelspec": {
-   "display_name": "Python 3",
+   "display_name": "Offline Cal",
    "language": "python",
-   "name": "python3"
+   "name": "offline-cal"
   },
   "language_info": {
    "codemirror_mode": {
@@ -1301,7 +1247,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.8.12"
+   "version": "3.8.10"
   }
  },
  "nbformat": 4,