diff --git a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
index 7a174bdd77310197f6da7ba559a3eb700af70136..6776037feeb2cd71da04303042bbb2902f2ff6b0 100644
--- a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
@@ -27,11 +27,11 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "in_folder = '/gpfs/exfel/exp/HED/202201/p002804/raw' # input folder, required\n",
+    "in_folder = '/gpfs/exfel/exp/HED/202330/p900338/raw' # input folder, required\n",
     "out_folder = '' # output folder, required\n",
     "metadata_folder = ''  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
     "sequence = 0 # sequence file to use\n",
-    "run = 281 # which run to read data from, required\n",
+    "run = 176 # which run to read data from, required\n",
     "\n",
     "# Parameters for accessing the raw data.\n",
     "karabo_id = \"HED_IA1_EPX100-1\" # karabo karabo_id\n",
@@ -53,7 +53,8 @@
     "fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n",
     "fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n",
     "temp_limits = 5 # Limit for parameter Operational temperature\n",
-    "badpixel_threshold_sigma = 5.  # Bad pixels defined by values outside n times this std from median. Default: 5\n",
+    "badpixel_noise_sigma = 5  # Bad pixels defined by noise value outside n * std from median. Default: 5\n",
+    "badpixel_offset_sigma = 2  # Bad pixels defined by offset value outside n * std from median. Default: 2\n",
     "CM_N_iterations = 2  # Number of iterations for common mode correction. Set to 0 to skip it\n",
     "\n",
     "# Parameters used during selecting raw data trains.\n",
@@ -359,15 +360,15 @@
     "    lut_label='[ADU]',\n",
     "    x_label='Column', \n",
     "    y_label='Row',\n",
-    "    vmin=max(0, np.round((stats['median'] - badpixel_threshold_sigma*stats['std']))), \n",
-    "    vmax=np.round(stats['median'] + badpixel_threshold_sigma*stats['std'])\n",
+    "    vmin=max(0, np.round((stats['median'] - badpixel_noise_sigma*stats['std']))), \n",
+    "    vmax=np.round(stats['median'] + badpixel_noise_sigma*stats['std'])\n",
     ")\n",
     "fig.suptitle('Noise Map', x=.5, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)\n",
     "\n",
     "# Calculate overall noise histogram\n",
-    "bins = np.arange(max(0, stats['mean'] - badpixel_threshold_sigma*stats['std']),\n",
-    "                 stats['mean'] + badpixel_threshold_sigma*stats['std'], \n",
+    "bins = np.arange(max(0, stats['mean'] - badpixel_noise_sigma*stats['std']),\n",
+    "                 stats['mean'] + badpixel_noise_sigma*stats['std'], \n",
     "                 stats['std']/100)\n",
     "\n",
     "h, c = np.histogram(\n",
@@ -415,8 +416,8 @@
     "    aspect=1.5,\n",
     "    x_label='Noise [ADU]',\n",
     "    y_label='Counts',\n",
-    "    x_range=(max(0, stats['median'] - badpixel_threshold_sigma*stats['std']),\n",
-    "             stats['median'] + badpixel_threshold_sigma*stats['std']),\n",
+    "    x_range=(max(0, stats['median'] - badpixel_noise_sigma*stats['std']),\n",
+    "             stats['median'] + badpixel_noise_sigma*stats['std']),\n",
     "    y_range=(0, max(d[0]['y'])*1.1),\n",
     ")\n",
     "plt.grid(linestyle = ':')\n",
@@ -571,11 +572,11 @@
     "constant_maps['BadPixelsDark'] = np.zeros(constant_maps['Offset'].shape, np.uint32)\n",
     "\n",
     "# Find noise related bad pixels\n",
-    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_noise_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "# Find offset related bad pixels\n",
-    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_offset_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
     "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "# Plot Bad Pixels Map\n",
@@ -620,7 +621,7 @@
     "    print('Common mode correction not applied.')\n",
     "else:\n",
     "    \n",
-    "    commonModeBlockSize = sensor_size//2\n",
+    "    commonModeBlockSize = (sensor_size//[8,2]).astype(int) # bank size (x=96,y=354) pixels\n",
     "\n",
     "    # Instantiate common mode calculators for column and row CM correction\n",
     "    cmCorrection_col = xcal.CommonModeCorrection(\n",
@@ -672,7 +673,7 @@
     "        noise_map_corrected = np.nanstd(data, axis=0)[..., np.newaxis]\n",
     "\n",
     "        # Update bad pixels map \n",
-    "        constant_maps['BadPixelsDark'][eval_bpidx(noise_map_corrected, badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "        constant_maps['BadPixelsDark'][eval_bpidx(noise_map_corrected, badpixel_noise_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "        bp_offset.append(np.sum(constant_maps['BadPixelsDark']==1))\n",
     "        bp_noise.append(np.sum(constant_maps['BadPixelsDark']==2))\n",
     "\n",