diff --git a/cal_tools/cal_tools/dssclib.py b/cal_tools/cal_tools/dssclib.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e17f2011471f185e39f38fedcfcfab0cb25523d
--- /dev/null
+++ b/cal_tools/cal_tools/dssclib.py
@@ -0,0 +1,127 @@
+import binascii
+import h5py
+from hashlib import blake2b
+import numpy as np
+import os
+import re
+import struct
+from typing import Dict, Tuple
+
+
+def get_pulseid_checksum(fname, h5path, h5path_idx):
+    """generates hash value from pulse pattern (veto defined)."""
+    with h5py.File(fname, "r") as infile:
+        count = np.squeeze(infile[f"{h5path_idx}/count"])
+        first = np.squeeze(infile[f"{h5path_idx}/first"])
+        last_index = int(first[count != 0][-1]+count[count != 0][-1])
+        first_index = int(first[count != 0][0])
+        pulseids = infile[f"{h5path}/pulseId"][first_index:
+                                               int(first[count != 0][1])]
+        bveto = blake2b(pulseids.data, digest_size=8)
+        pulseid_checksum = struct.unpack(
+            'd', binascii.unhexlify(bveto.hexdigest()))[0]
+        return pulseid_checksum
+
+
+def _extr_gainparam_conffilename(fileName: str) -> Tuple[int]:
+    """extracts target gain from config filename, if provided."""
+    vals = re.search(".*_TG(?P<TG>\d+.?\d+)", fileName)
+    if vals:
+        return vals.group('TG')
+
+
+def _get_gain_encoded_val(gainSettingsMap: Dict[str, int]) -> int:
+    """The description of the paramters one can find in:
+    https://docs.xfel.eu/share/page/site/dssc/documentlibrary
+    Documents> DSSC - Documentation> DSSC - ASIC Documentation.
+    """
+    result = np.uint64(0)
+    result += (np.int64(gainSettingsMap['csaFbCap']) +
+               np.int64(gainSettingsMap['csaResistor'] << 8) +
+               np.int64(gainSettingsMap['fcfEnCap'] << 16) +
+               np.int64(gainSettingsMap['trimmed'] << 32))
+    return result
+
+
+def get_dssc_ctrl_data(in_folder, slow_data_pattern,
+                       slow_data_aggregators, run_number):
+    """Obtaining dssc specific slow data like:
+    operating frequency, target gain and encoded gain code from filename, etc.
+    """
+
+    # returned dictionaries
+    targetGainAll = {}
+    encodedGainAll = {}
+    operatingFreqAll = {}
+    for i in range(16):
+        qm = 'Q{}M{}'.format(i//4+1, i % 4+1)
+        targetGainAll[qm] = None
+        encodedGainAll[qm] = None
+        operatingFreqAll[qm] = None
+
+    ctrlDataFiles = {}
+    for quadrant, aggregator in enumerate(slow_data_aggregators):
+        quad_sd_pattern = slow_data_pattern.format("{:04d}".format(run_number),
+                                                   "{:02d}".format(aggregator))
+
+        f = os.path.join(in_folder, quad_sd_pattern)
+        if os.path.exists(f):
+            ctrlDataFiles[quadrant+1] = f
+
+    if len(ctrlDataFiles) == 0:
+        print("No Control Slow Data found!")
+        return targetGainAll, encodedGainAll, operatingFreqAll
+
+    ctrlloc = h5py.File(next(iter(ctrlDataFiles.values())), 'r')[
+        '/METADATA/dataSources/deviceId'][0]
+    ctrlloc = ctrlloc.decode("utf-8")
+    ctrlloc = ctrlloc[:ctrlloc.find('/')]
+
+    tGain = {}
+    encodedGain = {}
+    operatingFreqs = {}
+    for quadrant, file in ctrlDataFiles.items():
+        if len(file):
+            h5file = h5py.File(file)
+            if not f'/RUN/{ctrlloc}/FPGA/PPT_Q{quadrant}/epcRegisterFilePath/value' in h5file:
+                print(f"Slow control data file {file} is not usable")
+                continue
+            epcConfig = h5file[f'/RUN/{ctrlloc}/FPGA/PPT_Q{quadrant}/epcRegisterFilePath/value'][0]\
+                .decode("utf-8")
+            epcConfig = epcConfig[epcConfig.rfind('/') + 1:]
+
+            print(f"EPC configuration: {epcConfig}")
+
+            targGain = _extr_gainparam_conffilename(epcConfig)
+
+            tGain[quadrant] = float(targGain) if targGain is not None else 0.0
+            # 0.0 is default value for TG
+
+            gainSettingsMap = {}
+            for coarseParam in ['fcfEnCap', 'csaFbCap', 'csaResistor']:
+                gainSettingsMap[coarseParam] = int(
+                    h5file[f'/RUN/{ctrlloc}/FPGA/PPT_Q{quadrant}/gain/{coarseParam}/value'][0])
+
+            irampSettings = h5file[f'/RUN/{ctrlloc}/FPGA/PPT_Q{quadrant}/gain/irampFineTrm/value'][0]\
+                .decode("utf-8")
+
+            gainSettingsMap['trimmed'] = np.int64(
+                1) if irampSettings == "Various" else np.int64(0)
+
+            encodedGain[quadrant] = _get_gain_encoded_val(gainSettingsMap)
+
+            opFreq = h5file[f'/RUN/{ctrlloc}/FPGA/PPT_Q{quadrant}/sequencer/cycleLength/value'][0]
+            # The Operating Frequency of the detector should be in MHz.
+            # Here the karabo operation mode is converted to acquisition rate:
+            # 22 corresponds to 4.5 MHz, 44 to 2.25 MHz, etc.
+            operatingFreqs[quadrant] = 4.5 * (22.0 / opFreq)
+        else:
+            print(f"no slow data for quadrant {quadrant} is found")
+
+    for varpair in [(targetGainAll, tGain), (encodedGainAll, encodedGain), (operatingFreqAll, operatingFreqs)]:
+        for quadrant, value in varpair[1].items():
+            for module in range(1, 5):
+                qm = f'Q{quadrant}M{module}'
+                varpair[0][qm] = value
+
+    return targetGainAll, encodedGainAll, operatingFreqAll
diff --git a/cal_tools/cal_tools/plotting.py b/cal_tools/cal_tools/plotting.py
index 7329e7b7eec613497c8943ba290cfb7771ed9a90..7eff8f3ed4be174a5c384ba791ffb6a33a14c7e6 100644
--- a/cal_tools/cal_tools/plotting.py
+++ b/cal_tools/cal_tools/plotting.py
@@ -46,10 +46,13 @@ def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=No
                                         gain_to_preview + cf])
             else:
                 med = np.nanmedian(item[..., cell_to_preview])
+            medscale = med
+            if med == 0:           
+                medscale = 0.1
 
             bound = 0.2
-            while (np.count_nonzero((item[..., cell_to_preview, gain_to_preview + cf] < med - np.abs(bound * med)) |  # noqa
-                                    (item[..., cell_to_preview, gain_to_preview + cf] > med + np.abs(bound * med))) /  # noqa
+            while (np.count_nonzero((item[..., cell_to_preview, gain_to_preview + cf] < med - np.abs(bound * medscale)) |  # noqa
+                                    (item[..., cell_to_preview, gain_to_preview + cf] > med + np.abs(bound * medscale))) /  # noqa
                    item[..., cell_to_preview, gain_to_preview + cf].size > 0.01):  # noqa
                 bound *= 2
 
@@ -69,10 +72,10 @@ def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=No
                     # on the output report.
                     if im_prev.shape[0] > im_prev.shape[1]:
                         im_prev = np.moveaxis(item[..., cell_to_preview], 0, 1)
-                    vmax = med + np.abs(bound * med)
+                    vmax = med + np.abs(bound * medscale)
 
                 im = grid[i].imshow(im_prev, interpolation="nearest",
-                                    vmin=med - np.abs(bound * med),
+                                    vmin=med - np.abs(bound * medscale),
                                     vmax=vmax, aspect='auto')
 
             cb = grid.cbar_axes[i].colorbar(im)
diff --git a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
index 921904a5cb0f2cd7eb35931158e708453f2e2ae0..e4ec8fd7182d69959f998256ecc34556139a3984 100644
--- a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
+++ b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
@@ -25,11 +25,11 @@
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
-    "in_folder = \"/gpfs/exfel/exp/SCS/202030/p900125/raw\" # path to input data, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/DSSC\" # path to output to, required\n",
+    "in_folder = \"/gpfs/exfel/exp/SCS/202031/p900170/raw\" # path to input data, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/samartse/data/DSSC\" # path to output to, required\n",
     "sequences = [0] # sequence files to evaluate.\n",
     "modules = [-1]  # modules to run for\n",
-    "run = 222 # run numbr in which data was recorded, required\n",
+    "run = 223 #run number in which data was recorded, required\n",
     "\n",
     "karabo_id = \"SCS_DET_DSSC1M-1\" # karabo karabo_id\n",
     "karabo_da = ['-1']  # a list of data aggregators names, Default [-1] for selecting all data aggregators\n",
@@ -37,6 +37,7 @@
     "path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data\n",
     "h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
     "h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
+    "slow_data_pattern = 'RAW-R{}-DA{}-S00000.h5'\n",
     "\n",
     "use_dir_creation_date = True # use the dir creation date for determining the creation time\n",
     "cal_db_interface = \"tcp://max-exfl016:8020\" # the database interface to use\n",
@@ -45,17 +46,20 @@
     "db_output = False # output constants to database\n",
     "\n",
     "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
-    "bias_voltage = 300 # detector bias voltage\n",
+    "bias_voltage = 100 # detector bias voltage\n",
     "rawversion = 2 # RAW file format version\n",
     "\n",
     "thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
-    "thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_offset_hard = [4, 125] # thresholds in absolute ADU terms for offset deduced bad pixels,\n",
+    "# minimal threshold at 4 is set at hardware level, DSSC full range 0-511\n",
     "\n",
-    "thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
-    "thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "thresholds_noise_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
+    "thresholds_noise_hard = [0.001, 3] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
+    "offset_numpy_algorithm = \"mean\"\n",
     "\n",
     "instrument = \"SCS\" # the instrument\n",
-    "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h"
+    "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h\n",
+    "slow_data_aggregators = [1,2,3,4] #quadrant/aggregator\n"
    ]
   },
   {
@@ -95,6 +99,9 @@
     "                             map_gain_stages, parse_runs,\n",
     "                             run_prop_seq_from_path, \n",
     "                             save_const_to_h5, send_to_db)\n",
+    "from cal_tools.dssclib import (get_dssc_ctrl_data,\n",
+    "                               get_pulseid_checksum)\n",
+    "\n",
     "from iCalibrationDB import Constants, Conditions, Detectors, Versions\n",
     "view = Client(profile=cluster_profile)[:]\n",
     "view.use_dill()\n",
@@ -176,7 +183,7 @@
     "os.makedirs(out_folder, exist_ok=True)\n",
     "gmf = map_gain_stages(in_folder, offset_runs, path_template, karabo_da, sequences)\n",
     "gain_mapped_files, total_sequences, total_file_size = gmf\n",
-    "print(f\"Will process a total of {total_sequences} file.\")"
+    "print(f\"Will process a total of {total_sequences} file.\")\n"
    ]
   },
   {
@@ -206,15 +213,13 @@
     "    import copy\n",
     "    import h5py\n",
     "    from cal_tools.enums import BadPixels\n",
-    "    \n",
-    "    from hashlib import blake2b\n",
-    "    import struct\n",
-    "    import binascii\n",
-    "    \n",
+    " \n",
     "    def get_num_cells(fname, h5path):\n",
     "        with h5py.File(fname, \"r\") as f:\n",
     "\n",
     "            cells = f[f\"{h5path}/cellId\"][()]\n",
+    "            if cells == []:\n",
+    "                return\n",
     "            maxcell = np.max(cells)\n",
     "            options = [100, 200, 400, 500, 600, 700, 800]\n",
     "            dists = np.array([(o-maxcell) for o in options])\n",
@@ -225,9 +230,11 @@
     "    \n",
     "    h5path = h5path.format(channel)\n",
     "    h5path_idx = h5path_idx.format(channel)\n",
-    "    \n",
     "    if cells == 0:\n",
     "        cells = get_num_cells(filename, h5path)\n",
+    "        if cells is None:\n",
+    "            raise ValueError(f\"ERROR! Empty image data file for channel {channel}\")\n",
+    "        \n",
     "\n",
     "    print(f\"Using {cells} memory cells\")\n",
     "    \n",
@@ -241,9 +248,6 @@
     "        first = np.squeeze(infile[f\"{h5path_idx}/first\"])\n",
     "        last_index = int(first[count != 0][-1]+count[count != 0][-1])\n",
     "        first_index = int(first[count != 0][0])\n",
-    "        pulseids = infile[f\"{h5path}/pulseId\"][first_index:int(first[count != 0][1])]\n",
-    "        bveto = blake2b(pulseids.data, digest_size=8)\n",
-    "        pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]\n",
     "    else:\n",
     "        status = np.squeeze(infile[f\"{h5path_idx}/status\"])\n",
     "        if np.count_nonzero(status != 0) == 0:\n",
@@ -254,9 +258,9 @@
     "        first_index = int(first[status != 0][0])\n",
     "    im = np.array(infile[f\"{h5path}/data\"][first_index:last_index,...])    \n",
     "    cellIds = np.squeeze(infile[f\"{h5path}/cellId\"][first_index:last_index,...]) \n",
-    "    \n",
     "    infile.close()\n",
-    "\n",
+    "    \n",
+    "    pulseid_checksum = get_pulseid_checksum(filename, h5path, h5path_idx)\n",
     "    \n",
     "    im = im[:, 0, ...].astype(np.float32)\n",
     "        \n",
@@ -264,12 +268,15 @@
     "    im = np.rollaxis(im, 2, 1)\n",
     "\n",
     "    mcells = cells\n",
-    "    offset = np.zeros((im.shape[0], im.shape[1], mcells))\n",
-    "    noise = np.zeros((im.shape[0], im.shape[1], mcells))\n",
+    "    offset = np.zeros((im.shape[0], im.shape[1], mcells), dtype = np.float64)\n",
+    "    noise = np.zeros((im.shape[0], im.shape[1], mcells), dtype = np.float64)\n",
     "    \n",
     "    for cc in np.unique(cellIds[cellIds < mcells]):\n",
     "        cellidx = cellIds == cc\n",
-    "        offset[...,cc] = np.median(im[..., cellidx], axis=2)\n",
+    "        if offset_numpy_algorithm == \"mean\":\n",
+    "            offset[...,cc] = np.mean(im[..., cellidx], axis=2)\n",
+    "        else:\n",
+    "            offset[...,cc] = np.median(im[..., cellidx], axis=2)\n",
     "        noise[...,cc] = np.std(im[..., cellidx], axis=2)\n",
     "        \n",
     "        \n",
@@ -307,6 +314,11 @@
     "all_cells = []\n",
     "checksums = {}\n",
     "\n",
+    "tGain, encodedGain, operatingFreq = get_dssc_ctrl_data(in_folder + f\"/r{:04d}/\".format(offset_runs[\"high\"]),\n",
+    "                                                       slow_data_pattern,\n",
+    "                                                       slow_data_aggregators,\n",
+    "                                                       offset_runs[\"high\"])\n",
+    "\n",
     "for gain, mapped_files in gain_mapped_files.items():\n",
     "    inp = []\n",
     "    dones = []\n",
@@ -323,6 +335,8 @@
     "    p = partial(characterize_module, max_cells,\n",
     "               (thresholds_offset_hard, thresholds_offset_sigma,\n",
     "                thresholds_noise_hard, thresholds_noise_sigma), rawversion, karabo_id, h5path, h5path_idx)\n",
+    "    \n",
+    "    \n",
     "    results = list(map(p, inp))\n",
     "    \n",
     "    for ii, r in enumerate(results):\n",
@@ -367,7 +381,10 @@
     "    for const in clist:\n",
     "        condition = Conditions.Dark.DSSC(memory_cells=max_cells,\n",
     "                                         bias_voltage=bias_voltage,\n",
-    "                                         pulseid_checksum=checksums[qm])\n",
+    "                                         pulseid_checksum=checksums[qm],\n",
+    "                                         acquisition_rate=operatingFreq[qm], \n",
+    "                                         target_gain=tGain[qm],\n",
+    "                                         encoded_gain=encodedGain[qm])\n",
     "\n",
     "        data, mdata = get_from_db(device,\n",
     "                                  getattr(Constants.DSSC, const)(),\n",
@@ -407,8 +424,6 @@
     "    try:\n",
     "        res[qm] = {'Offset': offset_g[qm],\n",
     "                   'Noise': noise_g[qm],\n",
-    "                   #TODO: No badpixelsdark, yet.\n",
-    "                   #'BadPixelsDark': badpix_g[qm]    \n",
     "                   }\n",
     "    except Exception as e:\n",
     "        print(f\"Error: No constants for {qm}: {e}\")"
@@ -438,28 +453,35 @@
     "            dconst = getattr(Constants.DSSC, const)()\n",
     "            dconst.data = res[qm][const]\n",
     "\n",
+    "            opfreq = None if dont_use_pulseIds else operatingFreq[qm]\n",
+    "            targetgain = None if dont_use_pulseIds else tGain[qm]\n",
+    "            encodedgain =  None if dont_use_pulseIds else encodedGain[qm]\n",
     "            pidsum = None if dont_use_pulseIds else checksums[qm]\n",
+    "            \n",
     "            # set the operating condition\n",
     "            condition = Conditions.Dark.DSSC(memory_cells=max_cells,\n",
     "                                             bias_voltage=bias_voltage,\n",
-    "                                             pulseid_checksum=pidsum)\n",
+    "                                             pulseid_checksum=pidsum,\n",
+    "                                             acquisition_rate=opfreq, \n",
+    "                                             target_gain=targetgain,\n",
+    "                                             encoded_gain=encodedgain)\n",
+    "            \n",
     "            if db_output:\n",
     "                md = send_to_db(device, dconst, condition, file_loc, \n",
     "                                cal_db_interface, creation_time=creation_time, timeout=cal_db_timeout)\n",
-    "\n",
-    "            if local_output:\n",
-    "                # Don't save constant localy two times.\n",
-    "                if dont_use_pulseIds:\n",
-    "                    md = save_const_to_h5(device, dconst, condition,\n",
-    "                                          dconst.data, file_loc,\n",
-    "                                          creation_time, out_folder)\n",
-    "                    print(f\"Calibration constant {const} is stored locally.\\n\")\n",
+    "                \n",
+    "            if local_output and dont_use_pulseIds: # Don't save constant localy two times.\n",
+    "                md = save_const_to_h5(device, dconst, condition,\n",
+    "                                      dconst.data, file_loc,\n",
+    "                                      creation_time, out_folder)\n",
+    "                print(f\"Calibration constant {const} is stored locally.\\n\")\n",
     "    \n",
     "        if not dont_use_pulseIds:\n",
     "            print(\"Constants parameter conditions are:\\n\")\n",
     "            print(f\"• memory_cells: {max_cells}\\n• bias_voltage: {bias_voltage}\\n\"\n",
-    "                  f\"• pulseid_checksum: {pidsum}\\n\"\n",
-    "                  f\"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
+    "                  f\"• pulseid_checksum: {pidsum}\\n• acquisition_rate: {opfreq}\\n\"\n",
+    "                  f\"• target_gain: {targetgain}\\n• encoded_gain: {encodedgain}\\n\"\n",
+    "                  f\"• creation_time: {creation_time}\\n\")\n"
    ]
   },
   {
@@ -623,20 +645,6 @@
     "    display(Markdown('### {} [ADU], good and bad pixels ###'.format(const)))\n",
     "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))  "
    ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
   }
  ],
  "metadata": {
diff --git a/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb b/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
index 2fa40b72e8115213bee926ad07484dbf68cc126e..41f9f5555881109ce67b84b27d7e5b480e9d2773 100644
--- a/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
+++ b/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
@@ -23,11 +23,11 @@
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
-    "in_folder = \"/gpfs/exfel/exp/CALLAB/202031/p900113/raw\" # path to input data, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/karnem/test/DSSC\" # path to output to, required\n",
+    "in_folder = \"/gpfs/exfel/exp/SCS/202031/p900170/raw\" # path to input data, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/samartse/test/DSSC\" # path to output to, required\n",
     "sequences = [-1] # sequence files to evaluate.\n",
     "modules = [-1] # modules to correct, set to -1 for all, range allowed\n",
-    "run = 9987 #runs to process, required\n",
+    "run = 229 #runs to process, required\n",
     "\n",
     "karabo_id = \"SCS_DET_DSSC1M-1\" # karabo karabo_id\n",
     "karabo_da = ['-1']  # a list of data aggregators names, Default [-1] for selecting all data aggregators\n",
@@ -35,6 +35,7 @@
     "path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data\n",
     "h5path = 'INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
     "h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images\n",
+    "slow_data_pattern = 'RAW-R{}-DA{}-S00000.h5'\n",
     "\n",
     "use_dir_creation_date = True # use the creation data of the input dir for database queries\n",
     "cal_db_interface = \"tcp://max-exfl016:8020#8025\" # the database interface to use\n",
@@ -42,16 +43,16 @@
     "\n",
     "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
     "overwrite = True # set to True if existing data should be overwritten\n",
-    "max_pulses = 500 # maximum number of pulses per train\n",
+    "max_pulses = 800 # maximum number of pulses per train\n",
     "bias_voltage = 100 # detector bias voltage\n",
     "sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n",
     "chunk_size_idim = 1  # chunking size of imaging dimension, adjust if user software is sensitive to this.\n",
     "mask_noisy_asic = 0.25 # set to a value other than 0 and below 1 to mask entire ADC if fraction of noisy pixels is above\n",
-    "offset_image = \"PP\" # last one\n",
     "mask_cold_asic = 0.25 # mask cold ASICS if number of pixels with negligable standard deviation is larger than this fraction\n",
     "noisy_pix_threshold = 1. # threshold above which ap pixel is considered noisy.\n",
     "geo_file = \"/gpfs/exfel/data/scratch/xcal/dssc_geo_june19.h5\" # detector geometry file\n",
     "dinstance = \"DSSC1M1\"\n",
+    "slow_data_aggregators = [1,2,3,4] #quadrant/aggregator\n",
     "\n",
     "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n",
     "    from xfel_calibrate.calibrate import balance_sequences as bs\n",
@@ -86,14 +87,24 @@
     "view = Client(profile=cluster_profile)[:]\n",
     "view.use_dill()\n",
     "\n",
-    "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n",
-    "from cal_tools.tools import (map_modules_from_folder, parse_runs, run_prop_seq_from_path, get_notebook_name,\n",
+    "from iCalibrationDB import (ConstantMetaData, Constants, Conditions,\\\n",
+    "                            Detectors, Versions)\n",
+    "from cal_tools.tools import (map_modules_from_folder, parse_runs,\\\n",
+    "                             run_prop_seq_from_path, get_notebook_name,\n",
     "                                 get_dir_creation_date, get_constant_from_db)\n",
     "\n",
-    "from dateutil import parser\n",
-    "from datetime import timedelta\n",
-    "\n",
+    "from cal_tools.dssclib import (get_dssc_ctrl_data, get_pulseid_checksum)\n",
     "\n",
+    "from dateutil import parser\n",
+    "from datetime import timedelta"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
     "    creation_time = get_dir_creation_date(in_folder, run)\n",
@@ -130,81 +141,7 @@
     "import warnings\n",
     "warnings.filterwarnings('ignore')\n",
     "\n",
-    "print(f\"Detector in use is {karabo_id}\")    \n",
-    "\n",
-    "if offset_image.upper() != \"PP\":\n",
-    "    offset_image = int(offset_image)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2019-02-21T11:30:07.263445Z",
-     "start_time": "2019-02-21T11:30:07.217070Z"
-    }
-   },
-   "outputs": [],
-   "source": [
-    "def combine_stack(d, sdim):\n",
-    "    combined = np.zeros((sdim, 2048,2048))\n",
-    "    combined[...] = np.nan\n",
-    "    d = np.moveaxis(d, 2, 3)\n",
-    "    dy = 0\n",
-    "    quad_pos = [\n",
-    "        (0, 145),\n",
-    "        (-5, 25),\n",
-    "        (130, 15),\n",
-    "        (130, 140),\n",
-    "    ]\n",
-    "    \n",
-    "    px = 0.236\n",
-    "    py = 0.204\n",
-    "    with h5py.File(geo_file, \"r\") as gf:\n",
-    "        \n",
-    "        for i in range(16):\n",
-    "            t1 = gf[\"Q{}M{}\"]\n",
-    "\n",
-    "            try:\n",
-    "                if i < 8:\n",
-    "                    dx = -512\n",
-    "                    if i > 3:\n",
-    "                        dx -= 25\n",
-    "                    mx = 1\n",
-    "                    my = i % 8\n",
-    "                    combined[:, my*128+dy:(my+1)*128+dy,\n",
-    "                             mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,:,::-1]\n",
-    "                    dy += 30\n",
-    "                    if i == 3:\n",
-    "                        dy += 30\n",
-    "\n",
-    "                elif i < 12:\n",
-    "                    dx = 80 - 50\n",
-    "                    if i == 8:\n",
-    "                        dy = 4*30 + 30 +50 -30\n",
-    "\n",
-    "                    mx = 1\n",
-    "                    my = i % 8 +4\n",
-    "                    combined[:, my*128+dy:(my+1)*128+dy,\n",
-    "                             mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]\n",
-    "                    dy += 30\n",
-    "                else:\n",
-    "                    dx = 100 - 50\n",
-    "                    if i == 11:\n",
-    "                        dy = 20\n",
-    "\n",
-    "                    mx = 1\n",
-    "                    my = i - 14\n",
-    "\n",
-    "                    combined[:, my*128+dy:(my+1)*128+dy,\n",
-    "                             mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]\n",
-    "                    dy += 30\n",
-    "            except:\n",
-    "                continue\n",
-    "\n",
-    "        \n",
-    "    return combined"
+    "print(f\"Detector in use is {karabo_id}\")"
    ]
   },
   {
@@ -279,22 +216,23 @@
    "source": [
     "import copy\n",
     "from functools import partial\n",
-    "def correct_module(total_sequences, sequences_qm, karabo_id, dinstance, offset_image,\n",
-    "                   mask_noisy_asic, mask_cold_asic, noisy_pix_threshold, chunksize,\n",
-    "                   mem_cells, bias_voltage, cal_db_timeout, creation_time, cal_db_interface,\n",
-    "                   h5path, h5path_idx, inp):\n",
+    "def correct_module(total_sequences, sequences_qm, karabo_id, dinstance, mask_noisy_asic, \n",
+    "                   mask_cold_asic, noisy_pix_threshold, chunksize, mem_cells, bias_voltage,\n",
+    "                   cal_db_timeout, creation_time, cal_db_interface, h5path, h5path_idx, inp):\n",
+    "   \n",
     "    import numpy as np\n",
     "    import copy\n",
     "    import h5py\n",
+    "    from cal_tools.dssclib import (get_dssc_ctrl_data, get_pulseid_checksum)\n",
     "    from cal_tools.enums import BadPixels\n",
-    "    from cal_tools.tools import get_dir_creation_date, get_constant_from_db_and_time, get_random_db_interface\n",
+    "    from cal_tools.tools import get_constant_from_db_and_time\n",
     "    from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n",
     "    \n",
     "    from hashlib import blake2b\n",
     "    import struct\n",
     "    import binascii\n",
     "    \n",
-    "    filename, filename_out, channel, qm = inp\n",
+    "    filename, filename_out, channel, qm, conditions = inp\n",
     "    \n",
     "    # DSSC correction requires path without the leading \"/\"\n",
     "    if h5path[0] == '/':\n",
@@ -322,27 +260,18 @@
     "            dists[dists<0] = 10000 # assure to always go higher\n",
     "            return options[np.argmin(dists)]\n",
     "        \n",
-    "    def get_checksum(fname, h5path, h5path_idx):\n",
-    "        with h5py.File(fname, \"r\") as infile:\n",
-    "            count = np.squeeze(infile[f\"{h5path_idx}/count\"])\n",
-    "            first = np.squeeze(infile[f\"{h5path_idx}/first\"])\n",
-    "            last_index = int(first[count != 0][-1]+count[count != 0][-1])\n",
-    "            first_index = int(first[count != 0][0])\n",
-    "            pulseids = infile[f\"{h5path}/pulseId\"][first_index:int(first[count != 0][1])]\n",
-    "            bveto = blake2b(pulseids.data, digest_size=8)\n",
-    "            pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]\n",
-    "            return pulseid_checksum\n",
-    "    \n",
     "    if mem_cells == 0:\n",
     "        mem_cells = get_num_cells(filename, h5path)\n",
     "        \n",
-    "    pulseid_checksum = get_checksum(filename, h5path, h5path_idx)\n",
+    "    pulseid_checksum = get_pulseid_checksum(filename, h5path, h5path_idx)\n",
     "        \n",
     "    print(f\"Memcells: {mem_cells}\")\n",
     "    \n",
-    "    condition =  Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells,\n",
-    "                                      pulseid_checksum=pulseid_checksum)\n",
-    "    \n",
+    "    condition =  Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells,\\\n",
+    "                                      pulseid_checksum=pulseid_checksum,\\\n",
+    "                                      acquisition_rate=conditions['acquisition_rate'],\\\n",
+    "                                      target_gain=conditions['target_gain'],\\\n",
+    "                                      encoded_gain=conditions['encoded_gain'])\n",
     "    \n",
     "    detinst = getattr(Detectors, dinstance)\n",
     "    device = getattr(detinst, qm)\n",
@@ -353,18 +282,16 @@
     "                                                  Constants.DSSC.Offset(),\n",
     "                                                  condition,\n",
     "                                                  None,\n",
-    "                                                  get_random_db_interface(cal_db_interface),\n",
+    "                                                  cal_db_interface,\n",
     "                                                  creation_time=creation_time,\n",
     "                                                  timeout=cal_db_timeout)\n",
+    "    \n",
     "    if offset is not None:\n",
     "        offset = np.moveaxis(np.moveaxis(offset[...], 2, 0), 2, 1)\n",
     "    else:\n",
     "        offset_not_found = True\n",
-    "        offset = np.zeros((x, y, mem_cells))\n",
-    "        offset = np.moveaxis(np.moveaxis(offset[...], 2, 0), 2, 1)\n",
     "        print(\"No offset found in the database\")\n",
     "    \n",
-    "    \n",
     "    def copy_and_sanitize_non_cal_data(infile, outfile):\n",
     "        # these are touched in the correct function, do not copy them here\n",
     "        dont_copy = [\"data\"]\n",
@@ -412,21 +339,19 @@
     "                for train in range(first_arr.size):\n",
     "                    first = first_arr[train]\n",
     "                    last = last_arr[train]\n",
+    "                    if first == last:\n",
+    "                        continue\n",
     "                    data = np.squeeze(infile[f\"{h5path}/data\"][first:last, ...].astype(np.float32))\n",
     "                    cellId = np.squeeze(infile[f\"{h5path}/cellId\"][first:last, ...])\n",
-    "                    pulseId = np.squeeze(infile[f\"{h5path}/pulseId\"][first:last, ...])\n",
-    "\n",
-    "                    if offset_image != \"PP\" and offset_not_found:\n",
-    "                        data -= data[offset_image, ...]\n",
-    "                    else:\n",
+    "                    pulseId = np.squeeze(infile[f\"{h5path}/pulseId\"][first:last, ...])                   \n",
+    "                    if not offset_not_found:\n",
     "                        data[...] -= offset[cellId,...]\n",
-    "\n",
-    "                    if train == 0:\n",
+    "                        \n",
+    "                    if hists_signal_low is None:\n",
     "                        pulseId = np.repeat(pulseId[:, None], data.shape[1], axis=1)\n",
     "                        pulseId = np.repeat(pulseId[:,:,None], data.shape[2], axis=2)\n",
     "                        bins = (55, int(pulseId.max()))\n",
     "                        rnge = [[-5, 50], [0, int(pulseId.max())]]\n",
-    "\n",
     "                        hists_signal_low, low_edges, pulse_edges = np.histogram2d(data.flatten(),\n",
     "                                                                                  pulseId.flatten(),\n",
     "                                                                                  bins=bins,\n",
@@ -435,9 +360,9 @@
     "                        hists_signal_high, high_edges, _ = np.histogram2d(data.flatten(),\n",
     "                                                                          pulseId.flatten(),\n",
     "                                                                          bins=bins,\n",
-    "                                                                          range=rnge)                        \n",
-    "                    # data[data < 0] = 0\n",
+    "                                                                          range=rnge)\n",
     "                    ddset[first:last, ...] = data\n",
+    "                \n",
     "                # find static and noisy values in dark images\n",
     "                data = infile[f\"{h5path}/data\"][last, ...].astype(np.float32)\n",
     "                bpix = np.zeros(oshape[1:], np.uint32)\n",
@@ -457,13 +382,6 @@
     "                            if count_cold/(64*64) > mask_cold_asic:\n",
     "                                bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.ASIC_STD_BELOW_NOISE.value\n",
     "\n",
-    "# This was removed because last element in last is -1 and that produce errors\n",
-    "# Also because mdset is not clear why is it needed.\n",
-    "                \n",
-    "#                 for train in range(first_arr.size):\n",
-    "#                     first = first_arr[train]\n",
-    "#                     last = last_arr[train]\n",
-    "#                     mdset[first:last, ...] = np.repeat(bpix[None,...], last-first, axis=0)\n",
     "    except Exception as e:\n",
     "        print(e)\n",
     "        success = False\n",
@@ -476,7 +394,7 @@
     "    return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges, when, qm, err)\n",
     "    \n",
     "done = False\n",
-    "first_files = []\n",
+    "first_files = {}\n",
     "inp = []\n",
     "left = total_sequences\n",
     "\n",
@@ -485,12 +403,16 @@
     "\n",
     "low_edges, high_edges, pulse_edges = None, None, None\n",
     "\n",
+    "tGain, encodedGain, operatingFreq = get_dssc_ctrl_data(in_folder\\\n",
+    "                                + \"/r{:04d}/\".format(run),\\\n",
+    "                                slow_data_pattern,slow_data_aggregators, run)\n",
+    "\n",
     "whens = []\n",
     "qms = []\n",
     "Errors = []\n",
     "while not done:\n",
-    "    \n",
     "    dones = []\n",
+    "    # TODO: add adaptive number of modules\n",
     "    for i in range(16):\n",
     "        qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1)\n",
     "\n",
@@ -505,15 +427,20 @@
     "            print(f\"Skipping {qm}\")\n",
     "            continue\n",
     "        fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
-    "        first_files.append((i, fname_in, fout))\n",
-    "        inp.append((fname_in, fout, i,  qm))\n",
-    "\n",
+    "        \n",
+    "        first_files[i] = (fname_in, fout)\n",
+    "        conditions = {}\n",
+    "        conditions['acquisition_rate'] = operatingFreq[qm]\n",
+    "        conditions['target_gain'] = tGain[qm]\n",
+    "        conditions['encoded_gain'] = encodedGain[qm]\n",
+    "        inp.append((fname_in, fout, i,  qm, conditions))\n",
+    "        \n",
     "    if len(inp) >= min(MAX_PAR, left):\n",
     "        print(f\"Running {len(inp)} tasks parallel\")\n",
     "        p = partial(correct_module, total_sequences, sequences_qm,\n",
-    "                    karabo_id, dinstance, offset_image, mask_noisy_asic,\n",
-    "                    mask_cold_asic, noisy_pix_threshold, chunk_size_idim,\n",
-    "                    mem_cells, bias_voltage, cal_db_timeout, creation_time, cal_db_interface,\n",
+    "                    karabo_id, dinstance, mask_noisy_asic, mask_cold_asic,\n",
+    "                    noisy_pix_threshold, chunk_size_idim, mem_cells,\n",
+    "                    bias_voltage, cal_db_timeout, creation_time, cal_db_interface,\n",
     "                    h5path, h5path_idx)\n",
     "\n",
     "        r = view.map_sync(p, inp)\n",
@@ -524,7 +451,7 @@
     "        \n",
     "        for rr in r:\n",
     "            if rr is not None:\n",
-    "                hl, hh, low_edges, high_edges, pulse_edges, when, qm, err = rr  \n",
+    "                hl, hh, low_edges, high_edges, pulse_edges, when, qm, err = rr\n",
     "                whens.append(when)\n",
     "                qms.append(qm)\n",
     "                Errors.append(err)\n",
@@ -656,9 +583,9 @@
     "mask = []\n",
     "pulse_ids = []\n",
     "train_ids = [] \n",
-    "for ff in first_files:\n",
+    "for channel, ff in first_files.items():\n",
     "    try:\n",
-    "        channel, raw_file, corr_file = ff\n",
+    "        raw_file, corr_file = ff\n",
     "        data_path = h5path.format(channel)\n",
     "        index_path = h5path_idx.format(channel)\n",
     "        try:\n",
@@ -673,16 +600,16 @@
     "            else:\n",
     "                plt_im = d.shape[0]\n",
     "            last_idx = first_idx + plt_im\n",
-    "            raw.append(raw_d[first_idx:last_idx,0,...])\n",
+    "            raw.append((channel,raw_d[first_idx:last_idx,0,...]))\n",
     "        finally:\n",
     "            infile.close()\n",
     "        \n",
     "        infile = h5py.File(corr_file, \"r\")\n",
     "        try:\n",
-    "            corrected.append(np.array(infile[f\"{data_path}/data\"][first_idx:last_idx,...]))\n",
-    "            mask.append(np.array(infile[f\"{data_path}/mask\"][first_idx:last_idx,...]))\n",
-    "            pulse_ids.append(np.squeeze(infile[f\"{data_path}/pulseId\"][first_idx:last_idx,...]))\n",
-    "            train_ids.append(np.squeeze(infile[f\"{data_path}/trainId\"][first_idx:last_idx,...]))\n",
+    "            corrected.append((channel, np.array(infile[f\"{data_path}/data\"][first_idx:last_idx,...])))\n",
+    "            mask.append((channel, np.array(infile[f\"{data_path}/mask\"][first_idx:last_idx,...])))\n",
+    "            pulse_ids.append((channel, np.squeeze(infile[f\"{data_path}/pulseId\"][first_idx:last_idx,...])))\n",
+    "            train_ids.append((channel, np.squeeze(infile[f\"{data_path}/trainId\"][first_idx:last_idx,...])))\n",
     "        finally:\n",
     "            infile.close()\n",
     "        \n",
@@ -712,21 +639,22 @@
     "    px = 0.236\n",
     "    py = 0.204\n",
     "    with h5py.File(geo_file, \"r\") as gf:\n",
-    "        \n",
+    "        # TODO: refactor to -> for ch, f in d:\n",
     "        for i in range(len(d)):\n",
-    "            mi = i\n",
     "            \n",
-    "            mi = 3-(i%4)\n",
-    "            mp = gf[\"Q{}/M{}/Position\".format(i//4+1, mi%4+1)][()]\n",
-    "            t1 = gf[\"Q{}/M{}/T01/Position\".format(i//4+1, i%4+1)][()]\n",
-    "            t2 = gf[\"Q{}/M{}/T02/Position\".format(i//4+1, i%4+1)][()]\n",
-    "            if i//4 < 2:\n",
+    "            ch = d[i][0]\n",
+    "          \n",
+    "            mi = 3-(ch%4)\n",
+    "            mp = gf[\"Q{}/M{}/Position\".format(ch//4+1, mi%4+1)][()]\n",
+    "            t1 = gf[\"Q{}/M{}/T01/Position\".format(ch//4+1, ch%4+1)][()]\n",
+    "            t2 = gf[\"Q{}/M{}/T02/Position\".format(ch//4+1, ch%4+1)][()]\n",
+    "            if ch//4 < 2:\n",
     "                t1, t2 = t2, t1\n",
     "            \n",
-    "            if i // 4 == 0 or i // 4 == 1:\n",
-    "                td = d[i][:,::-1,:]\n",
+    "            if ch // 4 == 0 or ch // 4 == 1:\n",
+    "                td = d[i][1][:,::-1,:]\n",
     "            else:\n",
-    "                td = d[i][:,:,::-1]\n",
+    "                td = d[i][1][:,:,::-1]\n",
     "            \n",
     "            t1d = td[:,:,:256]\n",
     "            t2d = td[:,:,256:]\n",
@@ -743,9 +671,7 @@
     "            combined[:,y0t1:y0t1+128,x0t1:x0t1+256] = t1d\n",
     "            combined[:,y0t2:y0t2+128,x0t2:x0t2+256] = t2d\n",
     "\n",
-    "    return combined\n",
-    "\n",
-    "combined = combine_stack(corrected, last_idx-first_idx)"
+    "    return combined"
    ]
   },
   {