diff --git a/cal_tools/cal_tools/plotting.py b/cal_tools/cal_tools/plotting.py
index ca8580fcd218a5287d997eb0844f3399b2fa459f..f265a3e5b0809038e01f81e873d80aff7f530525 100644
--- a/cal_tools/cal_tools/plotting.py
+++ b/cal_tools/cal_tools/plotting.py
@@ -5,7 +5,7 @@ import numpy as np
 
 
 def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=None):
-    
+
     for module, data in d.items():
         fig = plt.figure(figsize=(20,20))
         grid = AxesGrid(fig, 111,
@@ -54,9 +54,6 @@ def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=No
             fig.savefig("{}/dark_analysis_{}_module_{}.png".format(out_folder,
                                                                    infix,
                                                                    module))
-            
-
-
 
 
 def rebin(a, *args):
@@ -107,22 +104,27 @@ def plot_badpix_3d(data, definitions, title=None, rebin_fac=2, azim=22.5):
     if title:
         t = ax.set_title(title)
 
-        
 
 from IPython.display import HTML, display, Markdown, Latex
 import tabulate
 
 
-def create_constant_overview(constant, name, cells, vmin, vmax, entries=3,
+def create_constant_overview(constant, name, cells, vmin=None, vmax=None, entries=3,
                             out_folder=None, infix=None):
     gmap = {0: 'High', 1: 'Medium', 2: 'Low'}
     for g in range(entries):
         fig = plt.figure(figsize=(10, 5))
         ax = fig.add_subplot(111)
         table = []
-        
+
         for qm in constant.keys():
-            d = constant[qm][...,g]
+            if len(constant[qm].shape) == 4:
+                d = constant[qm][..., g]
+            else:
+                # This case was introduced for DSSC dark
+                # it uses this function but have a constant
+                # of shape (x, y, z), unlike AGIPD.
+                d = constant[qm]
             #print("{} {}, gain {:0.2f}: mean: {:0.2f}, median: {:0.2f}, std: {:0.2f}".format(name, qm, g,
             #                                                          np.nanmean(d),
             #                                                          np.nanmedian(d),
@@ -132,7 +134,8 @@ def create_constant_overview(constant, name, cells, vmin, vmax, entries=3,
         ax.set_xlabel("Memory cell")
         ax.set_ylabel(name)
         ax.set_title("{} Gain Median per Cell".format(gmap[g]))
-        ax.set_ylim(vmin, vmax)
+        if vmin and vmax:
+            ax.set_ylim(vmin, vmax)
         #if out_folder and infix:
         #    fig.savefig("{}/dark_analysis_{}_{}_per_cell_gain{}.png".format(out_folder,
         #                                                                    infix,
diff --git a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
index f08ef2fad6e21961ce4cfbd1a44e61e05a63ff6b..e867c8192ef55d0214f8bad0431bd3b96980b6e0 100644
--- a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
+++ b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb
@@ -8,7 +8,7 @@
     "\n",
     "Author: S. Hauf, Version: 0.1\n",
     "\n",
-    "The following code analyzes a set of dark images taken with the AGIPD detector to deduce detector offsets and noise. Data for the detector's three gain stages needs to be present, separated into separate runs.\n",
+    "The following code analyzes a set of dark images taken with the DSSC detector to deduce detector offsets and noise. Data for the detector is presented in one run and don't acquire multiple gain stages.\n",
     "\n",
     "The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data."
    ]
@@ -20,17 +20,16 @@
     "ExecuteTime": {
      "end_time": "2019-02-20T12:42:51.255184Z",
      "start_time": "2019-02-20T12:42:51.225500Z"
-    },
-    "collapsed": true
+    }
    },
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
-    "in_folder = \"/gpfs/exfel/exp/SCS/201930/p900079/raw\" # path to input data, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/haufs/test/\" # path to output to, required\n",
+    "in_folder = \"/gpfs/exfel/exp/SCS/201931/p900095/raw\" # path to input data, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/DSSC\" # path to output to, required\n",
     "sequences = [0] # sequence files to evaluate.\n",
     "\n",
-    "run = 20 # run number in which data was recorded, required\n",
+    "run = 1497 # run number in which data was recorded, required\n",
     "\n",
     "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
     "local_output = False # output constants locally\n",
@@ -38,7 +37,7 @@
     "bias_voltage = 300 # detector bias voltage\n",
     "cal_db_interface = \"tcp://max-exfl016:8020\" # the database interface to use\n",
     "rawversion = 2 # RAW file format version\n",
-    "dont_use_dir_date = False # don't use the dir creation date for determining the creation time\n",
+    "dont_use_dir_date = True # don't use the dir creation date for determining the creation time\n",
     "\n",
     "thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n",
     "thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels\n",
@@ -48,7 +47,7 @@
     "\n",
     "instrument = \"SCS\" # the instrument\n",
     "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h\n",
-    "modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]  # module to run for"
+    "modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]  # modules to run for"
    ]
   },
   {
@@ -145,7 +144,7 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "The following lines will create a queue of files which will the be executed module-parallel. Distiguishing between different gains."
+    "The following lines will create a queue of files which will the be executed module-parallel. Distinguishing between different gains."
    ]
   },
   {
@@ -165,7 +164,6 @@
     "\n",
     "gmf = gain_map_files(in_folder, offset_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD)\n",
     "gain_mapped_files, total_sequences, total_file_size = gmf\n",
-    "\n",
     "print(\"Will process at total of {} sequences: {:0.2f} GB of data.\".format(total_sequences, total_file_size))"
    ]
   },
@@ -197,6 +195,10 @@
     "    import h5py\n",
     "    from cal_tools.enums import BadPixels\n",
     "    \n",
+    "    from hashlib import blake2b\n",
+    "    import struct\n",
+    "    import binascii\n",
+    "    \n",
     "    def get_num_cells(fname, loc, module):\n",
     "        with h5py.File(fname, \"r\") as f:\n",
     "\n",
@@ -212,7 +214,9 @@
     "\n",
     "    if cells == 0:\n",
     "        cells = get_num_cells(filename, loc, channel)\n",
-    "    \n",
+    "\n",
+    "    pulseid_checksum = None\n",
+    "\n",
     "    thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh \n",
     "\n",
     "    infile = h5py.File(filename, \"r\", driver=\"core\")\n",
@@ -221,6 +225,9 @@
     "        first = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/first\".format(loc, channel)])\n",
     "        last_index = int(first[count != 0][-1]+count[count != 0][-1])\n",
     "        first_index = int(first[count != 0][0])\n",
+    "        pulseids = infile[\"INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId\".format(loc, channel)][first_index:int(first[count != 0][1])]\n",
+    "        bveto = blake2b(pulseids.data, digest_size=8)\n",
+    "        pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]\n",
     "    else:\n",
     "        status = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/status\".format(loc, channel)])\n",
     "        if np.count_nonzero(status != 0) == 0:\n",
@@ -271,7 +278,7 @@
     "    bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "\n",
-    "    return offset, noise, bp, cells\n",
+    "    return offset, noise, bp, cells, pulseid_checksum\n",
     "        \n",
     "        \n",
     "offset_g = OrderedDict()\n",
@@ -282,6 +289,7 @@
     "\n",
     "start = datetime.now()\n",
     "all_cells = []\n",
+    "checksums = {}\n",
     "\n",
     "for gain, mapped_files in gain_mapped_files.items():\n",
     "    \n",
@@ -304,7 +312,7 @@
     "    \n",
     "    for ii, r in enumerate(results):\n",
     "        i = modules[ii]\n",
-    "        offset, noise,  bp, thiscell = r\n",
+    "        offset, noise,  bp, thiscell, pulseid_checksum = r\n",
     "        all_cells.append(thiscell)\n",
     "        qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1)\n",
     "        if qm not in offset_g:\n",
@@ -312,7 +320,8 @@
     "            noise_g[qm] = np.zeros_like(offset_g[qm])\n",
     "            \n",
     "            badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)\n",
-    "        \n",
+    "            checksums[qm] = pulseid_checksum\n",
+    "            \n",
     "        offset_g[qm][...] = offset\n",
     "        noise_g[qm][...] = noise\n",
     "        badpix_g[qm][...] = bp\n",
@@ -334,8 +343,7 @@
     "ExecuteTime": {
      "end_time": "2018-12-06T09:38:18.234582Z",
      "start_time": "2018-12-06T09:38:18.222838Z"
-    },
-    "collapsed": true
+    }
    },
    "outputs": [],
    "source": [
@@ -364,66 +372,72 @@
     "ExecuteTime": {
      "end_time": "2018-12-06T09:49:32.449330Z",
      "start_time": "2018-12-06T09:49:20.231607Z"
-    },
-    "collapsed": true
+    }
    },
    "outputs": [],
    "source": [
     "if db_output:\n",
-    "    for qm in offset_g.keys():\n",
-    "        metadata = ConstantMetaData()\n",
-    "        offset = Constants.DSSC.Offset()\n",
-    "        offset.data = offset_g[qm]\n",
-    "        metadata.calibration_constant = offset\n",
-    "\n",
-    "        # set the operating condition\n",
-    "        condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage)\n",
-    "        detinst = getattr(Detectors, dinstance)\n",
-    "\n",
-    "        device = getattr(detinst, qm)\n",
-    "        \n",
-    "        metadata.detector_condition = condition\n",
-    "        \n",
-    "        # specify the a version for this constant\n",
-    "        if creation_time is None:\n",
-    "            metadata.calibration_constant_version = Versions.Now(device=device)\n",
-    "        else:\n",
-    "            metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
-    "        metadata.send(cal_db_interface, timeout=3000000)\n",
-    "        \n",
-    "        \n",
-    "        metadata = ConstantMetaData()\n",
-    "        noise = Constants.DSSC.Noise()\n",
-    "        noise.data = noise_g[qm]\n",
-    "        metadata.calibration_constant = noise\n",
-    "\n",
-    "        # set the operating condition\n",
-    "        condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage)\n",
-    "        metadata.detector_condition = condition\n",
-    "\n",
-    "        # specify the a version for this constant\n",
-    "        if creation_time is None:\n",
-    "            metadata.calibration_constant_version = Versions.Now(device=device)\n",
-    "        else:\n",
-    "            metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
-    "        metadata.send(cal_db_interface, timeout=3000000)\n",
-    "        \n",
-    "        continue  # no bad pixels yet\n",
-    "        metadata = ConstantMetaData()\n",
-    "        badpixels = Constants.DSSC.BadPixelsDark()\n",
-    "        badpixels.data = badpix_g[qm]\n",
-    "        metadata.calibration_constant = badpixels\n",
-    "\n",
-    "        # set the operating condition\n",
-    "        condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage)\n",
-    "        metadata.detector_condition = condition\n",
-    "\n",
-    "        # specify the a version for this constant\n",
-    "        if creation_time is None:\n",
-    "            metadata.calibration_constant_version = Versions.Now(device=device)\n",
-    "        else:\n",
-    "            metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
-    "        metadata.send(cal_db_interface, timeout=3000000)"
+    "    for dont_use_pulseIds in [True, False]:\n",
+    "        for qm in offset_g.keys():\n",
+    "            try:\n",
+    "                metadata = ConstantMetaData()\n",
+    "                offset = Constants.DSSC.Offset()\n",
+    "                offset.data = offset_g[qm]\n",
+    "                metadata.calibration_constant = offset\n",
+    "                pidsum = None if dont_use_pulseIds else checksums[qm]\n",
+    "                # set the operating condition\n",
+    "                condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,\n",
+    "                                                 pulseid_checksum=pidsum)\n",
+    "                detinst = getattr(Detectors, dinstance)\n",
+    "\n",
+    "                device = getattr(detinst, qm)\n",
+    "\n",
+    "                metadata.detector_condition = condition\n",
+    "\n",
+    "                # specify the a version for this constant\n",
+    "                if creation_time is None:\n",
+    "                    metadata.calibration_constant_version = Versions.Now(device=device)\n",
+    "                else:\n",
+    "                    metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
+    "                metadata.send(cal_db_interface, timeout=3000000)\n",
+    "\n",
+    "\n",
+    "                metadata = ConstantMetaData()\n",
+    "                noise = Constants.DSSC.Noise()\n",
+    "                noise.data = noise_g[qm]\n",
+    "                metadata.calibration_constant = noise\n",
+    "\n",
+    "                # set the operating condition\n",
+    "                condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,\n",
+    "                                                 pulseid_checksum=pidsum)\n",
+    "                metadata.detector_condition = condition\n",
+    "\n",
+    "                # specify the a version for this constant\n",
+    "                if creation_time is None:\n",
+    "                    metadata.calibration_constant_version = Versions.Now(device=device)\n",
+    "                else:\n",
+    "                    metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
+    "                metadata.send(cal_db_interface, timeout=3000000)\n",
+    "\n",
+    "                continue  # no bad pixels yet\n",
+    "                metadata = ConstantMetaData()\n",
+    "                badpixels = Constants.DSSC.BadPixelsDark()\n",
+    "                badpixels.data = badpix_g[qm]\n",
+    "                metadata.calibration_constant = badpixels\n",
+    "\n",
+    "                # set the operating condition\n",
+    "                condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,\n",
+    "                                                 pulseid_checksum=pidsum)\n",
+    "                metadata.detector_condition = condition\n",
+    "\n",
+    "                # specify the a version for this constant\n",
+    "                if creation_time is None:\n",
+    "                    metadata.calibration_constant_version = Versions.Now(device=device)\n",
+    "                else:\n",
+    "                    metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n",
+    "                metadata.send(cal_db_interface, timeout=3000000)\n",
+    "            except Exception as e:\n",
+    "                print(e)"
    ]
   },
   {
@@ -435,6 +449,16 @@
     "Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible."
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "for r in res.values():\n",
+    "    del r[\"BadPixels\"]"
+   ]
+  },
   {
    "cell_type": "code",
    "execution_count": null,
@@ -447,7 +471,7 @@
    },
    "outputs": [],
    "source": [
-    "cell = 3\n",
+    "cell = 9\n",
     "gain = 0\n",
     "out_folder = None\n",
     "show_overview(res, cell, gain, out_folder=out_folder, infix=\"_\".join(offset_runs.values()))"
@@ -466,7 +490,6 @@
    "cell_type": "code",
    "execution_count": null,
    "metadata": {
-    "collapsed": true,
     "scrolled": false
    },
    "outputs": [],
@@ -480,7 +503,7 @@
     "\n",
     "gain = 0\n",
     "for mod, data in badpix_g.items():\n",
-    "    plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)"
+    "    plot_badpix_3d(data, cols, title=mod, rebin_fac=rebin)"
    ]
   },
   {
@@ -496,49 +519,50 @@
    "cell_type": "code",
    "execution_count": null,
    "metadata": {
-    "collapsed": true,
     "scrolled": false
    },
    "outputs": [],
    "source": [
-    "create_constant_overview(offset_g, \"Offset (ADU)\", max_cells, 4000, 8000,\n",
-    "                         out_folder=out_folder, infix=\"_\".join(offset_runs.values()))"
+    "create_constant_overview(offset_g, \"Offset (ADU)\", max_cells,\n",
+    "                         out_folder=out_folder, infix=\"_\".join(offset_runs.values()), entries=1)"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
    "metadata": {
-    "collapsed": true,
     "scrolled": false
    },
    "outputs": [],
    "source": [
     "create_constant_overview(noise_g, \"Noise (ADU)\", max_cells, 0, 100,\n",
-    "                         out_folder=out_folder, infix=\"_\".join(offset_runs.values()))"
+    "                         out_folder=out_folder, infix=\"_\".join(offset_runs.values()), entries=1)"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "bad_pixel_aggregate_g = OrderedDict()\n",
     "for m, d in badpix_g.items():\n",
     "    bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)\n",
-    "create_constant_overview(bad_pixel_aggregate_g, \"Bad pixel fraction\", max_cells, 0, 0.10, 3,\n",
+    "create_constant_overview(bad_pixel_aggregate_g, \"Bad pixel fraction\", max_cells, entries=1,\n",
     "                         out_folder=out_folder, infix=\"_\".join(offset_runs.values()))"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
    "outputs": [],
    "source": []
   }