diff --git a/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb b/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
index ca00f530e8598db04398f04676ba34bfcc7ea9ec..fb365ff161347a65bc4257a4ce2218e940cecdbc 100644
--- a/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
+++ b/notebooks/DSSC/DSSC_Correct_and_Verify.ipynb
@@ -22,9 +22,9 @@
    },
    "outputs": [],
    "source": [
-    "in_folder = \"/gpfs/exfel/exp/SCS/201901/p002212/raw/\" # the folder to read data from, required\n",
-    "run = 29 # runs to process, required\n",
-    "out_folder =  \"/gpfs/exfel/data/scratch/xcal/test/\"  # the folder to output to, required\n",
+    "in_folder = \"/gpfs/exfel/exp/SCS/201931/p900095/raw/\" # the folder to read data from, required\n",
+    "run = 1515 # runs to process, required\n",
+    "out_folder =  \"/gpfs/exfel/data/scratch/ahmedk/test/\"  # the folder to output to, required\n",
     "sequences =  [-1] # sequences to correct, set to -1 for all, range allowed\n",
     "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
     "overwrite = True # set to True if existing data should be overwritten\n",
@@ -336,6 +336,10 @@
     "    from cal_tools.tools import get_dir_creation_date, get_constant_from_db_and_time, get_random_db_interface\n",
     "    from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n",
     "    \n",
+    "    from hashlib import blake2b\n",
+    "    import struct\n",
+    "    import binascii\n",
+    "    \n",
     "    filename, filename_out, channel, qm = inp\n",
     "    h5path = \"INSTRUMENT/{}/DET/{}CH0:xtdf/\".format(loc, channel)\n",
     "    h5path_idx = \"INDEX/{}/DET/{}CH0:xtdf/\".format(loc, channel)\n",
@@ -345,7 +349,8 @@
     "    high_edges = None\n",
     "    hists_signal_high = None\n",
     "    pulse_edges = None\n",
-    "    \n",
+    "    err = None\n",
+    "    offset_not_found = False\n",
     "    def get_num_cells(fname, loc, module):\n",
     "        with h5py.File(fname, \"r\") as f:\n",
     "\n",
@@ -355,18 +360,35 @@
     "            dists = np.array([(o-maxcell) for o in options])\n",
     "            dists[dists<0] = 10000 # assure to always go higher\n",
     "            return options[np.argmin(dists)]\n",
+    "        \n",
+    "    def get_checksum(fname, loc, module):\n",
+    "        with h5py.File(fname, \"r\") as infile:\n",
+    "            count = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/count\".format(loc, channel)])\n",
+    "            first = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/first\".format(loc, channel)])\n",
+    "            last_index = int(first[count != 0][-1]+count[count != 0][-1])\n",
+    "            first_index = int(first[count != 0][0])\n",
+    "            pulseids = infile[\"INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId\".format(loc, channel)][first_index:int(first[count != 0][1])]\n",
+    "            bveto = blake2b(pulseids.data, digest_size=8)\n",
+    "            pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]\n",
+    "            return pulseid_checksum\n",
     "    \n",
     "\n",
     "    if mem_cells == 0:\n",
     "        mem_cells = get_num_cells(filename, loc, channel)\n",
     "        \n",
+    "    pulseid_checksum = get_checksum(filename, loc, channel)\n",
+    "        \n",
     "    print(\"Memcells: {}\".format(mem_cells))\n",
     "    \n",
-    "    condition =  Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells)\n",
+    "    condition =  Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells,\n",
+    "                                      pulseid_checksum=pulseid_checksum)\n",
     "    \n",
     "    \n",
     "    detinst = getattr(Detectors, dinstance)\n",
     "    device = getattr(detinst, qm)\n",
+    "    with h5py.File(filename, \"r\", driver=\"core\") as infile:\n",
+    "        y = infile[h5path+\"image/data\"].shape[2]\n",
+    "        x = infile[h5path+\"image/data\"].shape[3]\n",
     "    offset, when = get_constant_from_db_and_time(device,\n",
     "                                                  Constants.DSSC.Offset(),\n",
     "                                                  condition,\n",
@@ -375,9 +397,12 @@
     "                                                  creation_time=creation_time,\n",
     "                                                  timeout=cal_db_timeout)\n",
     "    if offset is not None:\n",
-    "        \n",
     "        offset = np.moveaxis(np.moveaxis(offset[...], 2, 0), 2, 1)\n",
-    "        print(offset.shape)\n",
+    "    else:\n",
+    "        offset_not_found = True\n",
+    "        offset = np.zeros((x, y, mem_cells))\n",
+    "        offset = np.moveaxis(np.moveaxis(offset[...], 2, 0), 2, 1)\n",
+    "        print(\"No offset found in the database\")\n",
     "    \n",
     "    \n",
     "    def copy_and_sanitize_non_cal_data(infile, outfile):\n",
@@ -415,7 +440,7 @@
     "                                               oshape, chunks=chunks,\n",
     "                                               dtype=np.float32,\n",
     "                                               fletcher32=True)\n",
-    "                \n",
+    "\n",
     "                mdset = outfile.create_dataset(h5path + \"image/mask\",\n",
     "                                               oshape, chunks=chunks,\n",
     "                                               dtype=np.uint32,\n",
@@ -423,24 +448,25 @@
     "                                               compression_opts=1,\n",
     "                                               shuffle=True,\n",
     "                                               fletcher32=True)\n",
-    "                \n",
+    "\n",
     "                for train in range(first_arr.size):\n",
     "                    first = first_arr[train]\n",
     "                    last = last_arr[train]\n",
     "                    data = np.squeeze(infile[h5path+\"image/data\"][first:last, ...].astype(np.float32))\n",
     "                    cellId = np.squeeze(infile[h5path+\"image/cellId\"][first:last, ...])\n",
     "                    pulseId = np.squeeze(infile[h5path+\"image/pulseId\"][first:last, ...])\n",
-    "                    if offset_image != \"PP\" or offset is None:\n",
+    "\n",
+    "                    if offset_image != \"PP\" and offset_not_found:\n",
     "                        data -= data[offset_image, ...]\n",
     "                    else:\n",
-    "                        data -= offset[cellId,...]\n",
-    "                    \n",
+    "                        data[...] -= offset[cellId,...]\n",
+    "\n",
     "                    if train == 0:\n",
     "                        pulseId = np.repeat(pulseId[:, None], data.shape[1], axis=1)\n",
     "                        pulseId = np.repeat(pulseId[:,:,None], data.shape[2], axis=2)\n",
     "                        bins = (55, pulseId.max())\n",
     "                        rnge = [[-5, 50], [0, pulseId.max()]]\n",
-    "                        \n",
+    "\n",
     "                        hists_signal_low, low_edges, pulse_edges = np.histogram2d(data.flatten(),\n",
     "                                                                                  pulseId.flatten(),\n",
     "                                                                                  bins=bins,\n",
@@ -452,13 +478,12 @@
     "                                                                          range=rnge)                        \n",
     "                    # data[data < 0] = 0\n",
     "                    ddset[first:last, ...] = data\n",
-    "                \n",
     "                # find static and noisy values in dark images\n",
     "                data = infile[h5path+\"image/data\"][last, ...].astype(np.float32)\n",
     "                bpix = np.zeros(oshape[1:], np.uint32)\n",
     "                dark_std = np.std(data, axis=0)\n",
     "                bpix[dark_std > noisy_pix_threshold] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
-    "                \n",
+    "\n",
     "                for i in range(8):\n",
     "                    for j in range(2):\n",
     "                        count_noise = np.count_nonzero(bpix[i*64:(i+1)*64, j*64:(j+1)*64])\n",
@@ -466,25 +491,29 @@
     "                        if mask_noisy_asic:\n",
     "                            if count_noise/(64*64) > mask_noisy_asic:\n",
     "                                bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.NOISY_ADC.value\n",
-    "\n",
+    "                    \n",
     "                        if mask_cold_asic:\n",
     "                            count_cold = np.count_nonzero(asic_std < 0.5)\n",
     "                            if count_cold/(64*64) > mask_cold_asic:\n",
     "                                bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.ASIC_STD_BELOW_NOISE.value\n",
-    "                                \n",
-    "                for train in range(first_arr.size):\n",
-    "                    first = first_arr[train]\n",
-    "                    last = last_arr[train]\n",
-    "                    mdset[first:last, ...] = np.repeat(bpix[None,...], last-first, axis=0)\n",
+    "\n",
+    "# This was removed because last element in last is -1 and that produce errors\n",
+    "# Also because mdset is not clear why is it needed.\n",
     "                \n",
+    "#                 for train in range(first_arr.size):\n",
+    "#                     first = first_arr[train]\n",
+    "#                     last = last_arr[train]\n",
+    "#                     mdset[first:last, ...] = np.repeat(bpix[None,...], last-first, axis=0)\n",
     "    except Exception as e:\n",
     "        print(e)\n",
     "        success = False\n",
     "        reason = \"Error\"\n",
-    "        \n",
+    "        err = e\n",
     "   \n",
+    "    if err is None and offset_not_found:\n",
+    "        err = \"Offset was not found in the database!\"\n",
     "        \n",
-    "    return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges, when, qm)\n",
+    "    return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges, when, qm, err)\n",
     "    \n",
     "done = False\n",
     "first_files = []\n",
@@ -498,6 +527,7 @@
     "\n",
     "whens = []\n",
     "qms = []\n",
+    "Errors = []\n",
     "while not done:\n",
     "    \n",
     "    dones = []\n",
@@ -530,9 +560,10 @@
     "        \n",
     "        for rr in r:\n",
     "            if rr is not None:\n",
-    "                hl, hh, low_edges, high_edges, pulse_edges, when, qm = rr  \n",
+    "                hl, hh, low_edges, high_edges, pulse_edges, when, qm, err = rr  \n",
     "                whens.append(when)\n",
     "                qms.append(qm)\n",
+    "                Errors.append(err)\n",
     "                if hl is not None:  # any one being None will also make the others None\n",
     "                    hists_signal_low += hl.astype(np.float64)\n",
     "                    hists_signal_high += hh.astype(np.float64)                \n",
@@ -542,7 +573,14 @@
     "whens = [x for _,x in sorted(zip(qms,whens))]\n",
     "qms = sorted(qms)\n",
     "for i, qm in enumerate(qms):\n",
-    "    print(\"Offset for {} was injected on {}\".format(qm, whens[i].isoformat()))"
+    "    try:\n",
+    "        when = whens[i].isoformat()\n",
+    "    except:\n",
+    "        when = whens[i]\n",
+    "    if Errors[i] is not None:\n",
+    "        print(\"Offset for {} was injected on {}, Error: {}\".format(qm, when, Errors[i]))\n",
+    "    else:\n",
+    "        print(\"Offset for {} was injected on {}\".format(qm, when))"
    ]
   },
   {
@@ -995,6 +1033,13 @@
    "outputs": [],
    "source": []
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
   {
    "cell_type": "code",
    "execution_count": null,