diff --git a/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb b/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
index ca00f530e8598db04398f04676ba34bfcc7ea9ec..e8c29b356fce8f32432c76bb457753eea5e29ef9 100644
--- a/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
+++ b/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
@@ -22,17 +22,17 @@
    },
    "outputs": [],
    "source": [
-    "in_folder = \"/gpfs/exfel/exp/SCS/201901/p002212/raw/\" # the folder to read data from, required\n",
-    "run = 29 # runs to process, required\n",
-    "out_folder =  \"/gpfs/exfel/data/scratch/xcal/test/\"  # the folder to output to, required\n",
-    "sequences =  [-1] # sequences to correct, set to -1 for all, range allowed\n",
+    "in_folder = \"/gpfs/exfel/exp/SCS/201802/p002222/raw/\" # the folder to read data from, required\n",
+    "run = 144 # runs to process, required\n",
+    "out_folder =  \"/gpfs/exfel/data/scratch/ahmedk/test/\"  # the folder to output to, required\n",
+    "sequences =  [0] # sequences to correct, set to -1 for all, range allowed\n",
     "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
     "overwrite = True # set to True if existing data should be overwritten\n",
     "cluster_profile = \"noDB\" # cluster profile to use\n",
     "max_pulses = 500 # maximum number of pulses per train\n",
     "bias_voltage = 100 # detector bias voltage\n",
     "cal_db_interface = \"tcp://max-exfl016:8020#8025\" # the database interface to use\n",
-    "use_dir_creation_date = True # use the creation data of the input dir for database queries\n",
+    "use_dir_creation_date = False # use the creation data of the input dir for database queries\n",
     "sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n",
     "cal_db_timeout = 300000 # in milli seconds\n",
     "chunk_size_idim = 1  # chunking size of imaging dimension, adjust if user software is sensitive to this.\n",
@@ -336,6 +336,10 @@
     "    from cal_tools.tools import get_dir_creation_date, get_constant_from_db_and_time, get_random_db_interface\n",
     "    from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n",
     "    \n",
+    "    from hashlib import blake2b\n",
+    "    import struct\n",
+    "    import binascii\n",
+    "    \n",
     "    filename, filename_out, channel, qm = inp\n",
     "    h5path = \"INSTRUMENT/{}/DET/{}CH0:xtdf/\".format(loc, channel)\n",
     "    h5path_idx = \"INDEX/{}/DET/{}CH0:xtdf/\".format(loc, channel)\n",
@@ -345,7 +349,7 @@
     "    high_edges = None\n",
     "    hists_signal_high = None\n",
     "    pulse_edges = None\n",
-    "    \n",
+    "    err = None\n",
     "    def get_num_cells(fname, loc, module):\n",
     "        with h5py.File(fname, \"r\") as f:\n",
     "\n",
@@ -355,14 +359,28 @@
     "            dists = np.array([(o-maxcell) for o in options])\n",
     "            dists[dists<0] = 10000 # assure to always go higher\n",
     "            return options[np.argmin(dists)]\n",
+    "        \n",
+    "    def get_checksum(fname, loc, module):\n",
+    "        with h5py.File(fname, \"r\") as infile:\n",
+    "            count = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/count\".format(loc, channel)])\n",
+    "            first = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/first\".format(loc, channel)])\n",
+    "            last_index = int(first[count != 0][-1]+count[count != 0][-1])\n",
+    "            first_index = int(first[count != 0][0])\n",
+    "            pulseids = infile[\"INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId\".format(loc, channel)][first_index:int(first[count != 0][1])]\n",
+    "            bveto = blake2b(pulseids.data, digest_size=8)\n",
+    "            pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]\n",
+    "            return pulseid_checksum\n",
     "    \n",
     "\n",
     "    if mem_cells == 0:\n",
     "        mem_cells = get_num_cells(filename, loc, channel)\n",
     "        \n",
+    "    pulseid_checksum = get_checksum(filename, loc, channel)\n",
+    "        \n",
     "    print(\"Memcells: {}\".format(mem_cells))\n",
     "    \n",
-    "    condition =  Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells)\n",
+    "    condition =  Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells,\n",
+    "                                      pulseid_checksum=pulseid_checksum)\n",
     "    \n",
     "    \n",
     "    detinst = getattr(Detectors, dinstance)\n",
@@ -378,6 +396,8 @@
     "        \n",
     "        offset = np.moveaxis(np.moveaxis(offset[...], 2, 0), 2, 1)\n",
     "        print(offset.shape)\n",
+    "    else:\n",
+    "        print(\"No offset found\")\n",
     "    \n",
     "    \n",
     "    def copy_and_sanitize_non_cal_data(infile, outfile):\n",
@@ -433,7 +453,7 @@
     "                    if offset_image != \"PP\" or offset is None:\n",
     "                        data -= data[offset_image, ...]\n",
     "                    else:\n",
-    "                        data -= offset[cellId,...]\n",
+    "                        data[...] -= offset[cellId,...]\n",
     "                    \n",
     "                    if train == 0:\n",
     "                        pulseId = np.repeat(pulseId[:, None], data.shape[1], axis=1)\n",
@@ -481,10 +501,10 @@
     "        print(e)\n",
     "        success = False\n",
     "        reason = \"Error\"\n",
-    "        \n",
+    "        err = e\n",
     "   \n",
     "        \n",
-    "    return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges, when, qm)\n",
+    "    return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges, when, qm, err)\n",
     "    \n",
     "done = False\n",
     "first_files = []\n",
@@ -498,6 +518,7 @@
     "\n",
     "whens = []\n",
     "qms = []\n",
+    "Errors = []\n",
     "while not done:\n",
     "    \n",
     "    dones = []\n",
@@ -530,9 +551,10 @@
     "        \n",
     "        for rr in r:\n",
     "            if rr is not None:\n",
-    "                hl, hh, low_edges, high_edges, pulse_edges, when, qm = rr  \n",
+    "                hl, hh, low_edges, high_edges, pulse_edges, when, qm, err = rr  \n",
     "                whens.append(when)\n",
     "                qms.append(qm)\n",
+    "                Errors.append(err)\n",
     "                if hl is not None:  # any one being None will also make the others None\n",
     "                    hists_signal_low += hl.astype(np.float64)\n",
     "                    hists_signal_high += hh.astype(np.float64)                \n",
@@ -542,7 +564,10 @@
     "whens = [x for _,x in sorted(zip(qms,whens))]\n",
     "qms = sorted(qms)\n",
     "for i, qm in enumerate(qms):\n",
-    "    print(\"Offset for {} was injected on {}\".format(qm, whens[i].isoformat()))"
+    "    if Errors[i] is not None:\n",
+    "        print(\"Offset for {} was injected on {}, Error: {}\".format(qm, whens[i].isoformat(), Errors[i]))\n",
+    "    else:\n",
+    "        print(\"Offset for {} was injected on {}\".format(qm, whens[i].isoformat()))"
    ]
   },
   {