diff --git a/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb b/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb
index de4b54ecc0d7324cabf85f3f8e598ec3d1fc31c5..c84fcbb313e9b243dccce3aa4e6ee3df3c26a0f6 100755
--- a/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb
+++ b/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb
@@ -141,44 +141,6 @@
     "    return tpx_data\n",
     "\n",
     "\n",
-    "def pre_clustering_filter(tpx_data, tot_threshold=0):\n",
-    "    \"\"\"\n",
-    "    Collection of filters directly applied before clustering.\n",
-    "    Note: at no point a copy of the dictionary is made, as they are mutable, the input array is changed in memory!\n",
-    "\n",
-    "    Parameters\n",
-    "    ----------\n",
-    "    tpx_data:      Dictionary with timepix data, all arrays behind each key must be of same length\n",
-    "    tot_threshold: minimum ToT required for a pixel to contain valid data\n",
-    "\n",
-    "    Returns\n",
-    "    -------\n",
-    "    tpx_data: like input tpx_data but with applied filters\n",
-    "    \"\"\"\n",
-    "    if tot_threshold > 0:\n",
-    "        tpx_data = apply_single_filter(tpx_data, tpx_data[\"tot\"] >= tot_threshold)\n",
-    "\n",
-    "    return tpx_data\n",
-    "\n",
-    "\n",
-    "def post_clustering_filter(tpx_data):\n",
-    "    \"\"\"\n",
-    "    Collection of filters directly applied after clustering.\n",
-    "    Note: at no point a copy of the dictionary is made, as they are mutable, the input array is changed in memory!\n",
-    "\n",
-    "    Parameters\n",
-    "    ----------\n",
-    "    tpx_data:    Dictionary with timepix data, all arrays behind each key must be of same length, now with key labels\n",
-    "\n",
-    "    Returns\n",
-    "    -------\n",
-    "    tpx_data: like input tpx_data but with applied filters\n",
-    "    \"\"\"\n",
-    "    if tpx_data[\"labels\"] is not None:\n",
-    "        tpx_data = apply_single_filter(tpx_data, tpx_data[\"labels\"] != 0)\n",
-    "\n",
-    "    return tpx_data\n",
-    "\n",
     "\n",
     "def clustering(tpx_data, epsilon=2, tof_scale=1e7, min_samples=3, n_jobs=1):\n",
     "    \"\"\"\n",
@@ -203,7 +165,7 @@
     "    \"\"\"\n",
     "    coords = np.column_stack((tpx_data[\"x\"], tpx_data[\"y\"], tpx_data[\"toa\"]*tof_scale))\n",
     "    dist = DBSCAN(eps=epsilon, min_samples=min_samples, metric=\"euclidean\", n_jobs=n_jobs).fit(coords)\n",
-    "    return dist.labels_ + 1\n",
+    "    return dist.labels_\n",
     "\n",
     "def empty_centroid_data():\n",
     "    return {\n",
@@ -268,15 +230,20 @@
     "\n",
     "    # clustering (identify clusters in 2d data (x,y,tof) that belong to a single hit,\n",
     "    # each sample belonging to a cluster is labeled with an integer cluster id no)\n",
-    "    _tpx_data = pre_clustering_filter(_tpx_data, tot_threshold=threshold_tot)\n",
-    "    _tpx_data[\"labels\"] = clustering(_tpx_data, epsilon=clustering_epsilon, tof_scale=clustering_tof_scale, min_samples=clustering_min_samples)\n",
-    "    _tpx_data = post_clustering_filter(_tpx_data)\n",
+    "    if threshold_tot > 0:\n",
+    "        _tpx_data = apply_single_filter(_tpx_data, _tpx_data[\"tot\"] >= threshold_tot)    \n",
+    "\n",
+    "    labels = clustering(_tpx_data, epsilon=clustering_epsilon, tof_scale=clustering_tof_scale, min_samples=clustering_min_samples)\n",
+    "    _tpx_data[\"labels\"] = labels\n",
+    "    \n",
+    "    if labels is not None:\n",
+    "        _tpx_data = apply_single_filter(_tpx_data, labels >= 0)\n",
     "    \n",
     "    # compute centroid data (reduce cluster of samples to a single point with properties)\n",
-    "    if _tpx_data[\"labels\"] is None or _tpx_data[\"labels\"].size == 0:\n",
+    "    if labels is None or len(_tpx_data['x']) == 0:\n",
     "        # handle case of no identified clusters, return empty dictionary with expected keys\n",
     "        return np.array([]), empty_centroid_data()\n",
-    "    return _tpx_data['labels'], get_centroids(_tpx_data, timewalk_lut=centroiding_timewalk_lut)\n",
+    "    return labels, get_centroids(_tpx_data, timewalk_lut=centroiding_timewalk_lut)\n",
     "\n",
     "\n",
     "def process_train(worker_id, index, train_id, data):\n",