diff --git a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
index cd968ed1f16d1109ae4175e64337aa8d30bcf9f0..4d3a6339d04cb0da994cdb6a3b86f40512a3a977 100644
--- a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
+++ b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
@@ -45,10 +45,11 @@
     "chunk_size_idim = 1  # chunking size of imaging dimension, adjust if user software is sensitive to this.\n",
     "integration_time = 4.96  # integration time in us, will be overwritten by value in file\n",
     "mem_cells = 0  # leave memory cells equal 0, as it is saved in control information starting 2019.\n",
-    "db_module = [\"Jungfrau_M275\"]  # ID of module in calibration database\n",
+    "db_module = \"\"  # ID of module in calibration database\n",
     "manual_slow_data = False  # if true, use manually entered bias_voltage and integration_time values\n",
     "chunk_size = 0\n",
     "\n",
+    "\n",
     "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n",
     "    from xfel_calibrate.calibrate import balance_sequences as bs\n",
     "    return bs(in_folder, run, sequences, sequences_per_node, karabo_da)"
diff --git a/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb
index 5edd0344aa075d478e9c6820f92a66df4fe1b624..123191b50c6d955d6de1d47eabf025991d6d67d0 100644
--- a/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb
+++ b/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb
@@ -21,28 +21,28 @@
     "in_folder = '/gpfs/exfel/exp/SPB/201922/p002566/raw/'  # folder under which runs are located, required\n",
     "out_folder = '/gpfs/exfel/data/scratch/ahmedk/jftest_dark/' # path to place reports at, required\n",
     "sequences = 0  # number of sequence files in that run\n",
-    "run_high = 103 # run number for G0 dark run, required\n",
-    "run_med = 104 # run number for G1 dark run, required\n",
-    "run_low = 105 # run number for G2 dark run, required\n",
+    "run_high = 103  # run number for G0 dark run, required\n",
+    "run_med = 104  # run number for G1 dark run, required\n",
+    "run_low = 105  # run number for G2 dark run, required\n",
     "\n",
-    "karabo_da = ['JNGFR01', 'JNGFR02','JNGFR03','JNGFR06','JNGFR07','JNGFR08'] # list of data aggregators, which corresponds to different JF modules\n",
+    "karabo_da = ['JNGFR01', 'JNGFR02','JNGFR03','JNGFR06','JNGFR07','JNGFR08']  # list of data aggregators, which corresponds to different JF modules\n",
     "karabo_id = \"SPB_IRDA_JNGFR\"  # bla karabo prefix of Jungfrau devices\n",
     "karabo_id_control = \"\"  # if control is on a different ID, set to empty string if it is the same a karabo-id\n",
-    "receiver_id = 'MODULE_{}' # inset for receiver devices\n",
-    "receiver_control_id = \"CONTROL\" # inset for control devices\n",
+    "receiver_id = 'MODULE_{}'  # inset for receiver devices\n",
+    "receiver_control_id = \"CONTROL\"  # inset for control devices\n",
     "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5'  # template to use for file name, double escape sequence number\n",
     "h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data'  # path in H5 file under which images are located\n",
-    "h5path_run = '/RUN/{}/DET/{}' # path to run data\n",
-    "h5path_cntrl = '/CONTROL/{}/DET/{}' # path to control data\n",
+    "h5path_run = '/RUN/{}/DET/{}'  # path to run data\n",
+    "h5path_cntrl = '/CONTROL/{}/DET/{}'  # path to control data\n",
     "karabo_da_control = \"JNGFR01\" # file inset for control data\n",
     "\n",
-    "use_dir_creation_date = True # use dir creation date\n",
+    "use_dir_creation_date = True  # use dir creation date\n",
     "cal_db_interface = 'tcp://max-exfl016:8016'  # calibrate db interface to connect to\n",
     "cal_db_timeout = 300000 # timeout on caldb requests\n",
-    "local_output = True # output constants locally\n",
-    "db_output = False # output constants to database\n",
+    "local_output = True  # output constants locally\n",
+    "db_output = False  # output constants to database\n",
     "\n",
-    "integration_time = 1000 # integration time in us, will be overwritten by value in file\n",
+    "integration_time = 1000  # integration time in us, will be overwritten by value in file\n",
     "bias_voltage = 90  # sensor bias voltage in V, will be overwritten by value in file\n",
     "badpixel_threshold_sigma = 5.  # bad pixels defined by values outside n times this std from median\n",
     "offset_abs_threshold_low = [1000, 10000, 10000]  # absolute bad pixel threshold in terms of offset, lower values\n",
@@ -50,9 +50,9 @@
     "chunkSize = 10  # iteration chunk size, needs to match or be less than number of images in a sequence file\n",
     "imageRange = [0, 500]  # image range in which to evaluate\n",
     "memoryCells = 16  # number of memory cells\n",
-    "db_module = ['Jungfrau_M275', \"Jungfrau_M035\", 'Jungfrau_M273','Jungfrau_M203','Jungfrau_M221','Jungfrau_M267'] # ID of module in calibration database\n",
+    "db_module = \"\"  # ID of module in calibration database\n",
     "manual_slow_data = False  # if true, use manually entered bias_voltage and integration_time values\n",
-    "time_limits = 0.025 #  to find calibration constants later on, the integration time is allowed to vary by 0.5 us\n",
+    "time_limits = 0.025  # to find calibration constants later on, the integration time is allowed to vary by 0.5 us\n",
     "operation_mode = ''  # Detector operation mode, optional"
    ]
   },
@@ -67,18 +67,28 @@
     "import glob\n",
     "import os\n",
     "import warnings\n",
+    "from collections import OrderedDict\n",
     "\n",
     "warnings.filterwarnings('ignore')\n",
     "\n",
     "import h5py\n",
     "import matplotlib\n",
+    "import matplotlib.pyplot as plt\n",
+    "import numpy as np\n",
     "from h5py import File as h5file\n",
     "\n",
     "matplotlib.use('agg')\n",
-    "import matplotlib.pyplot as plt\n",
-    "\n",
     "%matplotlib inline\n",
-    "import numpy as np\n",
+    "\n",
+    "from XFELDetAna.detectors.jungfrau.util import (\n",
+    "    rollout_data,\n",
+    "    sanitize_data_cellid,\n",
+    ")\n",
+    "from XFELDetAna.detectors.jungfrau import reader as jfreader\n",
+    "from XFELDetAna.detectors.jungfrau.jf_chunk_reader import JFChunkReader\n",
+    "from XFELDetAna.plotting.heatmap import heatmapPlot\n",
+    "from XFELDetAna.plotting.histogram import histPlot\n",
+    "from XFELDetAna.util import env\n",
     "from cal_tools.ana_tools import save_dict_to_hdf5\n",
     "from cal_tools.enums import BadPixels\n",
     "from cal_tools.tools import (\n",
@@ -89,20 +99,9 @@
     "    save_const_to_h5,\n",
     "    send_to_db,\n",
     ")\n",
-    "from iCalibrationDB import Conditions, Constants, Detectors, Versions\n",
-    "from XFELDetAna.util import env\n",
+    "from iCalibrationDB import Conditions, Constants\n",
     "\n",
-    "env.iprofile = cluster_profile\n",
-    "from XFELDetAna.detectors.jungfrau import reader as jfreader\n",
-    "from XFELDetAna.detectors.jungfrau import readerPSI as jfreaderPSI\n",
-    "from XFELDetAna.detectors.jungfrau.jf_chunk_reader import JFChunkReader\n",
-    "from XFELDetAna.detectors.jungfrau.util import (\n",
-    "    count_n_files,\n",
-    "    rollout_data,\n",
-    "    sanitize_data_cellid,\n",
-    ")\n",
-    "from XFELDetAna.plotting.heatmap import heatmapPlot\n",
-    "from XFELDetAna.plotting.histogram import histPlot"
+    "env.iprofile = cluster_profile"
    ]
   },
   {
@@ -119,17 +118,6 @@
     "report = get_report(out_folder)\n",
     "\n",
     "os.makedirs(out_folder, exist_ok=True)\n",
-    "\n",
-    "# TODO \n",
-    "# this trick is needed until proper mapping is introduced\n",
-    "if len(db_module)>1:\n",
-    "    # TODO: SPB JF Hack till using all modules.\n",
-    "    if karabo_id == \"SPB_IRDA_JNGFR\" and int(path_inset[-2:]) > 5:\n",
-    "        db_module = db_module[int(path_inset[-2:])-3]\n",
-    "    else:\n",
-    "        db_module = db_module[int(path_inset[-2:])-1]\n",
-    "else:\n",
-    "    db_module = db_module[0]\n",
     "    \n",
     "# Constants relevant for the analysis\n",
     "run_nums = [run_high, run_med, run_low] # run number for G0/HG0, G1, G2 \n",
@@ -138,7 +126,9 @@
     "xRange = [0, 0+sensorSize[0]]\n",
     "yRange = [0, 0+sensorSize[1]]\n",
     "gains = [0, 1, 2]\n",
+    "\n",
     "h5path = h5path.format(karabo_id, receiver_id)\n",
+    "\n",
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
     "    creation_time = get_dir_creation_date(in_folder, run_high)\n",
@@ -183,32 +173,34 @@
    "source": [
     "chunkSize = 100\n",
     "filep_size = 1000\n",
-    "noiseCal = None\n",
-    "noise_map = None\n",
-    "offset_map = None\n",
     "memoryCells = None\n",
-    "for i, r_n in enumerate(run_nums):\n",
-    "    \n",
-    "    gain = i\n",
-    "    print(f\"Gain stage {gain}, run {r_n}\")\n",
-    "    valid_data = []\n",
-    "    valid_cellids = []\n",
-    "    if r_n is not None:\n",
+    "\n",
+    "noise_map = OrderedDict()\n",
+    "offset_map = OrderedDict()\n",
+    "\n",
+    "# TODO: parallelize with multiprocessing.\n",
+    "for mod in karabo_da:\n",
+    "    for gain, r_n in enumerate(run_nums):\n",
+    "\n",
+    "        print(f\"Gain stage {gain}, run {r_n}\")\n",
+    "        valid_data = []\n",
+    "        valid_cellids = []\n",
+    "\n",
     "        n_tr = 0\n",
     "        n_empty_trains = 0\n",
     "        n_empty_sc = 0\n",
-    "        \n",
+    "\n",
     "        ped_dir = \"{}/r{:04d}/\".format(in_folder, r_n)\n",
     "        fp_name = path_template.format(r_n, karabo_da_control)\n",
     "        fp_path = '{}/{}'.format(ped_dir, fp_name)\n",
-    "      \n",
+    "\n",
     "        n_files = len(glob.glob(\"{}/*{}*.h5\".format(ped_dir, path_inset)))\n",
     "        myRange = range(0, n_files)\n",
     "        control_path = h5path_cntrl.format(karabo_id_control, receiver_control_id)\n",
-    "        \n",
+    "\n",
     "        this_run_mcells, sc_start = check_memoryCells(fp_path.format(0).format(myRange[0]), control_path)\n",
-    "            \n",
-    "        if noise_map is None:\n",
+    "\n",
+    "        if mod not in noise_map.keys():\n",
     "            if not manual_slow_data:\n",
     "                with h5py.File(fp_path.format(0), 'r') as f:\n",
     "                    run_path = h5path_run.format(karabo_id_control, receiver_control_id)\n",
@@ -216,16 +208,17 @@
     "                    bias_voltage = int(np.squeeze(f[f'{run_path}/vHighVoltage/value'])[0])\n",
     "            print(\"Integration time is {} us\".format(integration_time))\n",
     "            print(\"Bias voltage is {} V\".format(bias_voltage))\n",
+    "\n",
     "            if this_run_mcells == 1:\n",
     "                memoryCells = 1\n",
     "                print('Dark runs in single cell mode\\n storage cell start: {:02d}'.format(sc_start))\n",
     "            else:\n",
     "                memoryCells = 16\n",
     "                print('Dark runs in burst mode\\n storage cell start: {:02d}'.format(sc_start))\n",
-    "            \n",
-    "            noise_map = np.zeros(sensorSize+[memoryCells, 3])\n",
-    "            offset_map = np.zeros(sensorSize+[memoryCells, 3])\n",
-    "            \n",
+    "\n",
+    "        noise_map[mod] = np.zeros(sensorSize+[memoryCells, 3])\n",
+    "        offset_map[mod] = np.zeros(sensorSize+[memoryCells, 3])\n",
+    "\n",
     "        fp_name = path_template.format(r_n, path_inset)\n",
     "        fp_path = '{}/{}'.format(ped_dir, fp_name)\n",
     "        myRange_P = range(0, sequences)\n",
@@ -234,7 +227,7 @@
     "        print(\"Reading data from {}\".format(fp_path))\n",
     "        print(\"Run is: {}\".format(r_n))\n",
     "        print(\"HDF5 path: {}\".format(h5path))\n",
-    "            \n",
+    "\n",
     "        imageRange = [0, filep_size*len(myRange)]\n",
     "        reader = JFChunkReader(filename = fp_path, readFun = jfreader.readData, size = filep_size, chunkSize = chunkSize,\n",
     "                               path = h5path, image_range=imageRange, pixels_x = sensorSize[0], pixels_y = sensorSize[1],\n",
@@ -242,7 +235,7 @@
     "                               memoryCells=this_run_mcells, blockSize=blockSize)\n",
     "\n",
     "        for data in reader.readChunks():\n",
-    "            \n",
+    "\n",
     "            images = np.array(data[0], dtype=np.float)\n",
     "            gainmaps = np.array(data[1], dtype=np.uint16)\n",
     "            trainId = np.array(data[2])\n",
@@ -250,50 +243,51 @@
     "            acelltable = np.array(data[4])\n",
     "            n_tr += acelltable.shape[-1]\n",
     "            this_tr = acelltable.shape[-1]\n",
-    "           \n",
-    "              \n",
-    "            \n",
+    "\n",
+    "\n",
+    "\n",
     "            idxs = np.nonzero(trainId)[0]\n",
     "            images = images[..., idxs]\n",
     "            gainmaps = gainmaps[..., idxs]\n",
     "            fr_num = fr_num[..., idxs]\n",
     "            acelltable = acelltable[..., idxs]\n",
-    "            \n",
+    "\n",
     "            if memoryCells == 1:\n",
     "                acelltable -= sc_start\n",
-    "           \n",
+    "\n",
     "            n_empty_trains += this_tr - acelltable.shape[-1]\n",
     "            n_empty_sc += len(acelltable[acelltable > 15])\n",
-    "            \n",
-    "            if i > 0 and memoryCells == 16: ## throwing away all the SC entries except the first for lower gains\n",
+    "\n",
+    "            # throwing away all the SC entries except\n",
+    "            # the first for lower gains.\n",
+    "            if gain > 0 and memoryCells == 16: \n",
     "                acelltable[1:] = 255\n",
     "\n",
     "            # makes 4-dim vecs into 3-dim\n",
     "            # makes 2-dim into 1-dim\n",
     "            # leaves  1-dim and 3-dim vecs\n",
     "\n",
-    "            images, gainmaps, acelltable = rollout_data([images, gainmaps, acelltable]) \n",
-    "            \n",
-    "            images, gainmaps, acelltable = sanitize_data_cellid([images, gainmaps], acelltable) # removes entries with cellID 255\n",
+    "            images, gainmaps, acelltable = rollout_data([images, gainmaps, acelltable])\n",
+    "\n",
+    "            # removes entries with cellID 255\n",
+    "            images, gainmaps, acelltable = sanitize_data_cellid([images, gainmaps], acelltable)\n",
     "            valid_data.append(images)\n",
     "            valid_cellids.append(acelltable)\n",
-    "        \n",
+    "\n",
     "        valid_data = np.concatenate(valid_data, axis=2)\n",
     "        valid_cellids = np.concatenate(valid_cellids, axis=0)\n",
     "\n",
     "        for cell in range(memoryCells):\n",
     "\n",
-    "            thiscell = valid_data[...,valid_cellids == cell]\n",
-    "            noise_map[...,cell,gain] = np.std(thiscell, axis=2)\n",
-    "            offset_map[...,cell,gain] = np.mean(thiscell, axis=2)\n",
-    "        \n",
-    "        \n",
-    "        print('G{:01d} dark calibration'.format(i))\n",
-    "        print('Missed {:d} out of {:d} trains'.format(n_empty_trains, n_tr))\n",
-    "        print('Lost {:d} images out of {:d}'.format(n_empty_sc, this_run_mcells * (n_tr - n_empty_trains)))\n",
+    "            thiscell = valid_data[..., valid_cellids == cell]\n",
+    "            noise_map[mod][..., cell, gain] = np.std(thiscell, axis=2)\n",
+    "            offset_map[mod][..., cell, gain] = np.mean(thiscell, axis=2)\n",
+    "\n",
     "\n",
-    "    else:\n",
-    "        print('missing G{:01d}'.format(i))"
+    "        print(f'G{gain:01d} dark calibration')\n",
+    "        print(f'Missed {n_empty_trains:d} out of {n_tr:d} trains')\n",
+    "        print(\n",
+    "            f'Lost {n_empty_sc:d} images out of {this_run_mcells*(n_tr-n_empty_trains):d}')  # noqa"
    ]
   },
   {
@@ -311,15 +305,6 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "import matplotlib.pyplot as plt\n",
-    "from XFELDetAna.core.util import remove_nans\n",
-    "\n",
-    "%matplotlib inline\n",
-    "#%matplotlib notebook\n",
-    "\n",
-    "from XFELDetAna.plotting.heatmap import heatmapPlot\n",
-    "from XFELDetAna.plotting.histogram import histPlot\n",
-    "\n",
     "g_name = ['G0', 'G1', 'G2']\n",
     "g_range = [(0, 8000), (8000, 16000), (8000, 16000)]\n",
     "n_range = [(0., 50.), (0., 50.), (0., 50.)]\n",
@@ -335,53 +320,63 @@
    },
    "outputs": [],
    "source": [
-    "for g_idx in gains:\n",
-    "    for cell in range(0, memoryCells):\n",
-    "        f_o0 = heatmapPlot(np.swapaxes(offset_map[..., cell, g_idx], 0, 1), \n",
-    "                           y_label=\"Row\",\n",
-    "                           x_label=\"Column\",\n",
-    "                           lut_label=unit,\n",
-    "                           aspect=1.,\n",
-    "                           vmin=g_range[g_idx][0],\n",
-    "                           vmax=g_range[g_idx][1],\n",
-    "                           title=f'Pedestal {g_name[g_idx]} - Cell {cell:02d}')\n",
-    "\n",
-    "        fo0, ax_o0 = plt.subplots()\n",
-    "        res_o0 = histPlot(ax_o0, offset_map[..., cell, g_idx],\n",
-    "                          bins=800,\n",
-    "                          range=g_range[g_idx],\n",
-    "                          facecolor='b',\n",
-    "                          histotype='stepfilled')\n",
-    "\n",
-    "        ax_o0.tick_params(axis='both',which='major',labelsize=15)\n",
-    "        ax_o0.set_title(f'Module pedestal distribution - Cell {cell:02d}', fontsize=15)\n",
-    "        ax_o0.set_xlabel(f'Pedestal {g_name[g_idx]} {unit}',fontsize=15)\n",
-    "        ax_o0.set_yscale('log')\n",
-    "\n",
-    "        f_n0 = heatmapPlot(np.swapaxes(noise_map[..., cell, g_idx], 0, 1), \n",
-    "                           y_label=\"Row\",\n",
-    "                           x_label=\"Column\",\n",
-    "                           lut_label= unit,\n",
-    "                           aspect=1.,\n",
-    "                           vmin=n_range[g_idx][0],\n",
-    "                           vmax=n_range[g_idx][1],\n",
-    "                           title=f\"RMS noise {g_name[g_idx]} - Cell {cell:02d}\")\n",
-    "\n",
-    "        fn0, ax_n0 = plt.subplots()\n",
-    "        res_n0 = histPlot(ax_n0, noise_map[..., cell, g_idx],\n",
-    "                          bins=100,\n",
-    "                          range=n_range[g_idx],\n",
-    "                          facecolor='b',                          \n",
-    "                          histotype='stepfilled')\n",
-    "  \n",
-    "        ax_n0.tick_params(axis='both',which='major',labelsize=15)\n",
-    "        ax_n0.set_title(f'Module noise distribution - Cell {cell:02d}', fontsize=15)\n",
-    "        ax_n0.set_xlabel(f'RMS noise {g_name[g_idx]} ' + unit, fontsize=15)\n",
-    "        #ax_n0.set_yscale('log')\n",
-    "        \n",
-    "        plt.show()\n",
-    "  \n",
-    "   "
+    "for mod in karabo_da:\n",
+    "    for g_idx in gains:\n",
+    "        for cell in range(0, memoryCells):\n",
+    "            f_o0 = heatmapPlot(\n",
+    "                np.swapaxes(offset_map[mod][..., cell, g_idx], 0, 1), \n",
+    "                y_label=\"Row\",\n",
+    "                x_label=\"Column\",\n",
+    "                lut_label=unit,\n",
+    "                aspect=1.,\n",
+    "                vmin=g_range[g_idx][0],\n",
+    "                vmax=g_range[g_idx][1],\n",
+    "                title=f'Pedestal {g_name[g_idx]} - Cell {cell:02d} - Module {mod}')\n",
+    "\n",
+    "            fo0, ax_o0 = plt.subplots()\n",
+    "            res_o0 = histPlot(\n",
+    "                ax_o0, offset_map[mod][..., cell, g_idx],\n",
+    "                bins=800,\n",
+    "                range=g_range[g_idx],\n",
+    "                facecolor='b',\n",
+    "                histotype='stepfilled',\n",
+    "            )\n",
+    "\n",
+    "            ax_o0.tick_params(axis='both',which='major',labelsize=15)\n",
+    "            ax_o0.set_title(\n",
+    "                f'Module pedestal distribution - Cell {cell:02d} - Module {mod}',\n",
+    "                fontsize=15)\n",
+    "            ax_o0.set_xlabel(f'Pedestal {g_name[g_idx]} {unit}',fontsize=15)\n",
+    "            ax_o0.set_yscale('log')\n",
+    "\n",
+    "            f_n0 = heatmapPlot(\n",
+    "                np.swapaxes(noise_map[mod][..., cell, g_idx], 0, 1),\n",
+    "                y_label=\"Row\",\n",
+    "                x_label=\"Column\",\n",
+    "                lut_label= unit,\n",
+    "                aspect=1.,\n",
+    "                vmin=n_range[g_idx][0],\n",
+    "                vmax=n_range[g_idx][1],\n",
+    "                title=f\"RMS noise {g_name[g_idx]} - Cell {cell:02d} - Module {mod}\",\n",
+    "            )\n",
+    "\n",
+    "            fn0, ax_n0 = plt.subplots()\n",
+    "            res_n0 = histPlot(\n",
+    "                ax_n0,\n",
+    "                noise_map[mod][..., cell, g_idx],\n",
+    "                bins=100,\n",
+    "                range=n_range[g_idx],\n",
+    "                facecolor='b',\n",
+    "                histotype='stepfilled',\n",
+    "            )\n",
+    "\n",
+    "            ax_n0.tick_params(axis='both',which='major', labelsize=15)\n",
+    "            ax_n0.set_title(\n",
+    "                f'Module noise distribution - Cell {cell:02d} - Module {mod}',\n",
+    "                fontsize=15)\n",
+    "            ax_n0.set_xlabel(\n",
+    "                f'RMS noise {g_name[g_idx]} ' + unit, fontsize=15)\n",
+    "            plt.show()"
    ]
   },
   {
@@ -417,6 +412,24 @@
     "print_bp_entry(BadPixels.OFFSET_NOISE_EVAL_ERROR)"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def eval_bpidx(d):\n",
+    "\n",
+    "    mdn = np.nanmedian(d, axis=(0, 1))[None, None, :, :]\n",
+    "    std = np.nanstd(d, axis=(0, 1))[None, None, :, :]\n",
+    "    idx = (\n",
+    "        (d > badpixel_threshold_sigma * std + mdn) |\n",
+    "        (d < (-badpixel_threshold_sigma) * std + mdn)\n",
+    "    )\n",
+    "\n",
+    "    return idx"
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -425,32 +438,39 @@
    },
    "outputs": [],
    "source": [
-    "bad_pixels_map = np.zeros(noise_map.shape, np.uint32)\n",
-    "def eval_bpidx(d):\n",
+    "bad_pixels_map = OrderedDict()\n",
     "\n",
-    "    mdn = np.nanmedian(d, axis=(0, 1))[None, None, :, :]\n",
-    "    std = np.nanstd(d, axis=(0, 1))[None, None, :, :]    \n",
-    "    idx = (d > badpixel_threshold_sigma*std+mdn) | (d < (-badpixel_threshold_sigma)*std+mdn)\n",
-    "        \n",
-    "    return idx\n",
+    "for mod in karabo_da:\n",
+    "\n",
+    "    bad_pixels_map[mod] = np.zeros(noise_map[mod].shape, np.uint32)\n",
+    "    offset_abs_threshold = np.array(offset_abs_threshold)\n",
+    "\n",
+    "    bad_pixels_map[mod][\n",
+    "        eval_bpidx(offset_map[mod])] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
     "\n",
-    "offset_abs_threshold = np.array(offset_abs_threshold)\n",
+    "    bad_pixels_map[mod][\n",
+    "        ~np.isfinite(offset_map[mod])] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
-    "bad_pixels_map[eval_bpidx(offset_map)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
-    "bad_pixels_map[~np.isfinite(offset_map)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
-    "bad_pixels_map[eval_bpidx(noise_map)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
-    "bad_pixels_map[~np.isfinite(noise_map)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
-    "bad_pixels_map[(offset_map < offset_abs_threshold[0][None, None, None, :]) | (offset_map > offset_abs_threshold[1][None, None, None, :])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
+    "    bad_pixels_map[mod][\n",
+    "        eval_bpidx(noise_map[mod])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "\n",
+    "    bad_pixels_map[mod][\n",
+    "        ~np.isfinite(noise_map[mod])] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
+    "\n",
+    "    bad_pixels_map[mod][\n",
+    "        (offset_map[mod] < offset_abs_threshold[0][None, None, None, :]) |\n",
+    "        (offset_map[mod] > offset_abs_threshold[1][None, None, None, :])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value  # noqa\n",
     "\n",
     "for g_idx in gains:\n",
     "    for cell in range(memoryCells):\n",
-    "        bad_pixels = bad_pixels_map[:, :, cell, g_idx]\n",
-    "        fn_0 = heatmapPlot(np.swapaxes(bad_pixels, 0, 1), \n",
-    "                           y_label=\"Row\",\n",
-    "                           x_label=\"Column\",\n",
-    "                           lut_label=f\"Badpixels {g_name[g_idx]} [ADCu]\",\n",
-    "                           aspect=1.,\n",
-    "                           vmin=0, title=f'G{g_idx} Bad pixel map - Cell {cell:02d}')"
+    "        bad_pixels = bad_pixels_map[mod][:, :, cell, g_idx]\n",
+    "        fn_0 = heatmapPlot(\n",
+    "            np.swapaxes(bad_pixels, 0, 1),\n",
+    "            y_label=\"Row\",\n",
+    "            x_label=\"Column\",\n",
+    "            lut_label=f\"Badpixels {g_name[g_idx]} [ADCu]\",\n",
+    "            aspect=1.,\n",
+    "            vmin=0, title=f'G{g_idx} Bad pixel map - Cell {cell:02d} - Module {mod}')"
    ]
   },
   {
@@ -459,50 +479,20 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "# TODO: this cell in the notebook is not designed to run for more than one module\n",
-    "# Constants need to be able to have constant for each module as for big detectors\n",
-    "constants = {'Offset': np.moveaxis(offset_map, 0, 1),\n",
-    "             'Noise': np.moveaxis(noise_map, 0, 1), \n",
-    "             'BadPixelsDark': np.moveaxis(bad_pixels_map, 0, 1)}\n",
-    "md = None\n",
-    "for key, const_data in constants.items():\n",
-    "\n",
-    "    const =  getattr(Constants.jungfrau, key)()\n",
-    "    const.data = const_data\n",
-    "\n",
-    "    # set the operating condition\n",
-    "    condition = Conditions.Dark.jungfrau(memory_cells=memoryCells, bias_voltage=bias_voltage,\n",
-    "                                         integration_time=integration_time)\n",
-    "    \n",
-    "    for parm in condition.parameters:\n",
-    "        if parm.name == \"Integration Time\":\n",
-    "            parm.lower_deviation = time_limits\n",
-    "            parm.upper_deviation = time_limits\n",
-    "\n",
-    "    # This should be used in case of running notebook \n",
-    "    # by a different method other than myMDC which already\n",
-    "    # sends CalCat info.\n",
-    "    # TODO: Set db_module to \"\" by default in the first cell\n",
-    "    if not db_module:\n",
-    "        db_module = get_pdu_from_db(karabo_id, karabo_da, const,\n",
-    "                                    condition, cal_db_interface,\n",
-    "                                    snapshot_at=creation_time)[0]\n",
-    "    if db_output:\n",
-    "        md = send_to_db(db_module, karabo_id, const, condition,\n",
-    "                        file_loc=file_loc, report_path=report,\n",
-    "                        cal_db_interface=cal_db_interface,\n",
-    "                        creation_time=creation_time,\n",
-    "                        timeout=cal_db_timeout)\n",
-    "    if local_output:\n",
-    "        md = save_const_to_h5(db_module, karabo_id, const, condition,\n",
-    "                              const.data, file_loc, report,\n",
-    "                              creation_time, out_folder)\n",
-    "        print(f\"Calibration constant {key} is stored locally at {out_folder}.\\n\")\n",
-    "        \n",
-    "print(\"Constants parameter conditions are:\\n\")\n",
-    "print(f\"• Bias voltage: {bias_voltage}\\n• Memory cells: {memoryCells}\\n\"\n",
-    "      f\"• Integration time: {integration_time}\\n\"\n",
-    "      f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
+    "# set the operating condition\n",
+    "condition = Conditions.Dark.jungfrau(\n",
+    "    memory_cells=memoryCells,\n",
+    "    bias_voltage=bias_voltage,\n",
+    "    integration_time=integration_time,\n",
+    ")\n",
+    "\n",
+    "db_modules = get_pdu_from_db(\n",
+    "    karabo_id=karabo_id,\n",
+    "    karabo_da=karabo_da,\n",
+    "    constant=Constants.jungfrau.Offset(),\n",
+    "    condition=condition,\n",
+    "    cal_db_interface=cal_db_interface,\n",
+    "    snapshot_at=creation_time)[0]"
    ]
   },
   {
@@ -510,7 +500,58 @@
    "execution_count": null,
    "metadata": {},
    "outputs": [],
-   "source": []
+   "source": [
+    "for mod, db_mod in zip(karabo_da, db_modules):\n",
+    "    constants = {\n",
+    "        'Offset': np.moveaxis(offset_map[mod], 0, 1),\n",
+    "        'Noise': np.moveaxis(noise_map[mod], 0, 1),\n",
+    "        'BadPixelsDark': np.moveaxis(bad_pixels_map[mod], 0, 1),\n",
+    "    }\n",
+    "\n",
+    "    md = None\n",
+    "\n",
+    "    for key, const_data in constants.items():\n",
+    "\n",
+    "        const =  getattr(Constants.jungfrau, key)()\n",
+    "        const.data = const_data\n",
+    "\n",
+    "        for parm in condition.parameters:\n",
+    "            if parm.name == \"Integration Time\":\n",
+    "                parm.lower_deviation = time_limits\n",
+    "                parm.upper_deviation = time_limits\n",
+    "\n",
+    "        if db_output:\n",
+    "            md = send_to_db(\n",
+    "                db_module=db_mod,\n",
+    "                karabo_id=karabo_id,\n",
+    "                constant=const,\n",
+    "                condition=condition,\n",
+    "                file_loc=file_loc,\n",
+    "                report_path=report,\n",
+    "                cal_db_interface=cal_db_interface,\n",
+    "                creation_time=creation_time,\n",
+    "                timeout=cal_db_timeout,\n",
+    "            )\n",
+    "        if local_output:\n",
+    "            md = save_const_to_h5(\n",
+    "                db_module=db_mod,\n",
+    "                karabo_id=karabo_id,\n",
+    "                constant=const,\n",
+    "                condition=condition,\n",
+    "                data=const.data,\n",
+    "                file_loc=file_loc,\n",
+    "                report=report,\n",
+    "                creation_time=creation_time,\n",
+    "                out_folder=out_folder,\n",
+    "            )\n",
+    "            print(f\"Calibration constant {key} is stored locally at {out_folder}.\\n\")\n",
+    "\n",
+    "print(\"Constants parameter conditions are:\\n\")\n",
+    "print(\n",
+    "    f\"• Bias voltage: {bias_voltage}\\n• Memory cells: {memoryCells}\\n\"\n",
+    "    f\"• Integration time: {integration_time}\\n\"\n",
+    "    f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")  # noqa"
+   ]
   }
  ],
  "metadata": {