From 955da35a95b9e627d08ceae4d2f7b53e979429d8 Mon Sep 17 00:00:00 2001 From: Philipp Schmidt <philipp.schmidt@xfel.eu> Date: Thu, 22 Feb 2024 10:39:11 +0100 Subject: [PATCH] Remove inner parallelization in Timepix centroiding --- notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb b/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb index 50a985dfd..036bb35f7 100755 --- a/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb +++ b/notebooks/Timepix/Compute_Timepix_Event_Centroids.ipynb @@ -45,7 +45,6 @@ "clustering_epsilon = 2.0 # centroiding: The maximum distance between two samples for one to be considered as in the neighborhood of the other\n", "clustering_tof_scale = 1e7 # centroiding: Scaling factor for the ToA axis so that the epsilon parameter in DB scan works in all 3 dimensions\n", "clustering_min_samples = 2 # centroiding: minimum number of samples necessary for a cluster\n", - "clustering_n_jobs = 5 # centroiding: (DBSCAN) The number of parallel jobs to run.\n", "threshold_tot = 0 # raw data: minimum ToT necessary for a pixel to contain valid data\n", "\n", "raw_timewalk_lut_filepath = '' # fpath to look up table for timewalk correction relative to proposal path or empty string,\n", @@ -249,7 +248,6 @@ " clustering_epsilon=2,\n", " clustering_tof_scale=1e7,\n", " clustering_min_samples=3,\n", - " clustering_n_jobs=1,\n", " centroiding_timewalk_lut=None):\n", " # format input data\n", " _tpx_data = {\n", @@ -271,7 +269,7 @@ " # clustering (identify clusters in 2d data (x,y,tof) that belong to a single hit,\n", " # each sample belonging to a cluster is labeled with an integer cluster id no)\n", " _tpx_data = pre_clustering_filter(_tpx_data, tot_threshold=threshold_tot)\n", - " _tpx_data[\"labels\"] = clustering(_tpx_data, epsilon=clustering_epsilon, tof_scale=clustering_tof_scale, min_samples=clustering_min_samples, n_jobs=clustering_n_jobs)\n", + " _tpx_data[\"labels\"] = clustering(_tpx_data, epsilon=clustering_epsilon, tof_scale=clustering_tof_scale, min_samples=clustering_min_samples)\n", " _tpx_data = post_clustering_filter(_tpx_data)\n", " # compute centroid data (reduce cluster of samples to a single point with properties)\n", " if _tpx_data[\"labels\"] is None or _tpx_data[\"labels\"].size == 0:\n", @@ -301,7 +299,7 @@ " missing_centroids = num_centroids > max_num_centroids\n", "\n", " if num_centroids > max_num_centroids:\n", - " warn('number of centroids larger than definde maximum, some data cannot be written to disk')\n", + " warn('Number of centroids is larger than the defined maximum, some data cannot be written to disk')\n", "\n", " for key in centroid_dt.names:\n", " out_data[index, :num_centroids][key] = centroids[key]\n", @@ -385,7 +383,6 @@ " clustering_epsilon=clustering_epsilon,\n", " clustering_tof_scale=clustering_tof_scale,\n", " clustering_min_samples=clustering_min_samples,\n", - " clustering_n_jobs=clustering_n_jobs,\n", " centroiding_timewalk_lut=centroiding_timewalk_lut)\n", "\n", "\n", -- GitLab