From 905d2b4af56f603d4c6f55677a0d382bdbdb001d Mon Sep 17 00:00:00 2001
From: Nuno Duarte <duarten@max-display002.desy.de>
Date: Thu, 17 Mar 2022 19:28:09 +0100
Subject: [PATCH] reviewed version 2

---
 .../Characterize_Darks_ePix100_NBC.ipynb      | 218 +++++++++---------
 1 file changed, 112 insertions(+), 106 deletions(-)

diff --git a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
index 37ec926c6..18d7085a9 100644
--- a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
@@ -41,11 +41,12 @@
     "local_output = True # output constants locally\n",
     "\n",
     "# Conditions used for injected calibration constants.\n",
-    "bias_voltage = 200 # bias voltage\n",
-    "in_vacuum = False # detector operated in vacuum\n",
-    "fix_temperature = 0. # Fixed temperature in Kelvin. Set to 0 to read from .h5 file\n",
-    "temp_limits = 5 # limit for parameter Operational temperature\n",
-    "badpixel_threshold_sigma = 5.  # bad pixels defined by values outside n times this std from median\n",
+    "bias_voltage = 200 # Bias voltage\n",
+    "in_vacuum = False # Detector operated in vacuum\n",
+    "integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n",
+    "fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n",
+    "temp_limits = 5 # Limit for parameter Operational temperature\n",
+    "badpixel_threshold_sigma = 5.  # Bad pixels defined by values outside n times this std from median\n",
     "\n",
     "# Parameters used during selecting raw data trains.\n",
     "min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.\n",
@@ -73,7 +74,6 @@
     "\n",
     "import XFELDetAna.xfelprofiler as xprof\n",
     "from XFELDetAna import xfelpyanatools as xana\n",
-    "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n",
     "from XFELDetAna.plotting.util import prettyPlotting\n",
     "from cal_tools.enums import BadPixels\n",
     "from cal_tools.tools import (\n",
@@ -118,7 +118,6 @@
     "\n",
     "ped_dir = os.path.join(in_folder, f\"r{run:04d}\")\n",
     "fp_name = path_template.format(run, karabo_da[0]).format(sequence)\n",
-    "filename = os.path.join(ped_dir, fp_name)\n",
     "run_dir = RunDirectory(ped_dir)\n",
     "\n",
     "print(f\"Run number: {run}\")\n",
@@ -136,15 +135,14 @@
    "outputs": [],
    "source": [
     "# Read sensor size\n",
-    "sensor_size = np.array(run_dir.get_array( \n",
-    "    instrument_src,\n",
-    "    \"data.image.dims\")[0,:2]) # (x,y)\n",
+    "sensor_size = run_dir[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected\n",
+    "assert np.allclose(sensor_size, np.array([768, 708])), 'Unexpected sensor dimensions.'\n",
     "\n",
-    "# Path to pixel ADC values\n",
-    "pixel_data_dir = (instrument_src, \"data.image.pixels\")\n",
+    "# Path to pixels ADC values\n",
+    "pixels_src = (instrument_src, \"data.image.pixels\")\n",
     "\n",
     "# Specifies total number of images to proceed\n",
-    "n_trains = run_dir.get_data_counts(*pixel_data_dir).shape[0]\n",
+    "n_trains = run_dir.get_data_counts(*pixels_src).shape[0]\n",
     "\n",
     "# Modify n_trains to process based on given maximum\n",
     "# and minimun number of trains.\n",
@@ -158,23 +156,27 @@
     "\n",
     "print(f\"Number of dark images to analyze: {n_trains}\")\n",
     "\n",
-    "integration_time = int(run_dir.get_array(\n",
-    "    f\"{karabo_id}/DET/CONTROL\",\n",
-    "    \"expTime.value\")[0])\n",
-    "\n",
-    "if fix_temperature:\n",
-    "    temperature_k = fix_temperature\n",
-    "    temperature = fix_temperature - 273.15\n",
-    "    print(\"Temperature is fixed!\")\n",
+    "# Read integration time\n",
+    "if integration_time == -1:\n",
+    "    integration_time = run_dir[f\"{karabo_id}/DET/CONTROL\", 'expTime.value'].as_single_value(reduce_by='first')\n",
+    "    integration_time_str_add = ''\n",
     "else:\n",
-    "    temperature = np.mean(run_dir.get_array(\n",
-    "        instrument_src,\n",
-    "        \"data.backTemp\").values) / 100.\n",
+    "    integration_time_str_add = '(manual input)'\n",
+    "    \n",
+    "# Read temperature    \n",
+    "if fix_temperature == -1:\n",
+    "    temperature = np.mean(run_dir.get_array(instrument_src,\"data.backTemp\").values)/100.\n",
     "    temperature_k = temperature + 273.15\n",
-    "\n",
-    "print(f\"Bias voltage is {bias_voltage} V\")\n",
-    "print(f\"Detector integration time is set to {integration_time} \\u03BCs\")\n",
-    "print(f\"Mean temperature was {temperature:0.1f}°C / {temperature_k:0.1f} K\")\n",
+    "    temp_str_add = ''\n",
+    "else:\n",
+    "    temperature_k = fix_temperature\n",
+    "    temperature = fix_temperature - 273.15\n",
+    "    temp_str_add = '(manual input)'\n",
+    "    \n",
+    "# Print operating conditions\n",
+    "print(f\"Bias voltage: {bias_voltage} V\")\n",
+    "print(f\"Detector integration time: {integration_time} \\u03BCs {integration_time_str_add}\")\n",
+    "print(f\"Mean temperature: {temperature:0.2f}°C / {temperature_k:0.2f} K {temp_str_add}\")\n",
     "print(f\"Operated in vacuum: {in_vacuum}\")"
    ]
   },
@@ -188,10 +190,10 @@
    "source": [
     "# Calculate noise and offset per pixel and global average, std and median\n",
     "data_dc = run_dir.select(\n",
-    "    *pixel_data_dir, require_all=True).select_trains(np.s_[:n_trains])\n",
+    "    *pixels_src, require_all=True).select_trains(np.s_[:n_trains])\n",
     "print(f\"Reading data from: {data_dc.files}\\n\")\n",
     "\n",
-    "data = data_dc[pixel_data_dir].ndarray()\n",
+    "data = data_dc[pixels_src].ndarray()\n",
     "\n",
     "noise_data = np.std(data, axis=0)\n",
     "offset_data = np.mean(data, axis=0)\n",
@@ -238,12 +240,12 @@
     "fig = xana.heatmapPlot(\n",
     "    constant_maps['Offset'][:, :, 0],\n",
     "    lut_label='[ADU]',\n",
-    "    x_label = 'Column',\n",
-    "    y_label = 'Row',\n",
-    "    x_range = (0, sensor_size[0]),\n",
-    "    y_range = (0, sensor_size[1]), \n",
-    "    vmin = max(0, offset_median - badpixel_threshold_sigma*offset_sigma), \n",
-    "    vmax = min(np.power(2,14)-1, offset_median + badpixel_threshold_sigma*offset_sigma)\n",
+    "    x_label='Column',\n",
+    "    y_label='Row',\n",
+    "    x_range=(0, sensor_size[0]),\n",
+    "    y_range=(0, sensor_size[1]), \n",
+    "    vmin=max(0, offset_median - badpixel_threshold_sigma*offset_sigma), \n",
+    "    vmax=min(np.power(2,14)-1, offset_median + badpixel_threshold_sigma*offset_sigma)\n",
     ")\n",
     "fig.suptitle('Offset Map', x=.48, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)\n",
@@ -262,12 +264,12 @@
     "\n",
     "fig = xana.simplePlot(\n",
     "    do, \n",
-    "    aspect = 1.5,\n",
-    "    x_label = 'Offset [ADU]',\n",
-    "    y_label = 'Counts',\n",
-    "    x_range = (0, np.power(2,14)-1),\n",
-    "    y_range = (0, max(ho)*1.1),\n",
-    "    y_log = True\n",
+    "    aspect=1.5,\n",
+    "    x_label='Offset [ADU]',\n",
+    "    y_label='Counts',\n",
+    "    x_range=(0, np.power(2,14)-1),\n",
+    "    y_range=(0, max(ho)*1.1),\n",
+    "    y_log=True\n",
     ")\n",
     "fig.suptitle('Offset Distribution', x=.5,y =.92, fontsize=16)\n",
     "\n",
@@ -279,11 +281,11 @@
     "    f'max: {np.round(offset_max,2)}'\n",
     ")\n",
     "fig.text(\n",
-    "    s = stats_str,\n",
-    "    x = .7,\n",
-    "    y = .7,\n",
-    "    fontsize = 14,\n",
-    "    bbox = dict(facecolor='yellow', edgecolor='black', alpha=.1));"
+    "    s=stats_str,\n",
+    "    x=.7,\n",
+    "    y=.7,\n",
+    "    fontsize=14,\n",
+    "    bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1));"
    ]
   },
   {
@@ -304,13 +306,13 @@
     "#************** NOISE HEAT MAP **************#\n",
     "fig = xana.heatmapPlot(\n",
     "    constant_maps['Noise'][:, :, 0],\n",
-    "    lut_label = '[ADU]',\n",
-    "    x_label = 'Column', \n",
-    "    y_label = 'Row',\n",
-    "    x_range = (0, sensor_size[0]),\n",
-    "    y_range = (0, sensor_size[1]),\n",
-    "    vmin = max(0, noise_median - badpixel_threshold_sigma*noise_sigma), \n",
-    "    vmax = noise_median + badpixel_threshold_sigma*noise_sigma\n",
+    "    lut_label='[ADU]',\n",
+    "    x_label='Column', \n",
+    "    y_label='Row',\n",
+    "    x_range=(0, sensor_size[0]),\n",
+    "    y_range=(0, sensor_size[1]),\n",
+    "    vmin=max(0, noise_median - badpixel_threshold_sigma*noise_sigma), \n",
+    "    vmax=noise_median + badpixel_threshold_sigma*noise_sigma\n",
     ")\n",
     "fig.suptitle('Noise Map', x=.5, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)\n",
@@ -329,13 +331,13 @@
     "\n",
     "fig = xana.simplePlot(\n",
     "    dn,\n",
-    "    aspect = 1.5,\n",
-    "    x_label = 'Noise [ADU]',\n",
-    "    y_label = 'Counts',\n",
-    "    x_range = (max(0, noise_median - badpixel_threshold_sigma*noise_sigma), \n",
-    "               noise_median + badpixel_threshold_sigma*noise_sigma),\n",
-    "    y_range = (0, max(hn)*1.1),\n",
-    "    y_log = True\n",
+    "    aspect=1.5,\n",
+    "    x_label='Noise [ADU]',\n",
+    "    y_label='Counts',\n",
+    "    x_range=(max(0, noise_median - badpixel_threshold_sigma*noise_sigma),\n",
+    "             noise_median + badpixel_threshold_sigma*noise_sigma),\n",
+    "    y_range=(0, max(hn)*1.1),\n",
+    "    y_log=True\n",
     ")\n",
     "fig.suptitle('Noise Distribution',x=.5,y=.92,fontsize=16);\n",
     "\n",
@@ -347,18 +349,18 @@
     "    f'max: {np.round(noise_max,2)}'\n",
     ")\n",
     "fig.text(\n",
-    "    s = stats_str,\n",
-    "    x = .7,\n",
-    "    y = .7,\n",
-    "    fontsize = 14,\n",
-    "    bbox = dict(facecolor='yellow', edgecolor='black', alpha=.1));"
+    "    s=stats_str,\n",
+    "    x=.7,\n",
+    "    y=.7,\n",
+    "    fontsize=14,\n",
+    "    bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1));"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "## Bad Pixels Map ###\n",
+    "## Bad Pixels ###\n",
     "\n",
     "The bad pixel map is deduced by comparing offset and noise of each pixel ($v_i$) against the median value of the respective maps ($v_k$):\n",
     "\n",
@@ -433,17 +435,17 @@
    "outputs": [],
    "source": [
     "# Add BadPixels to constant_maps\n",
-    "constant_maps['BadPixels'] = np.zeros(constant_maps['Offset'].shape, np.uint32)\n",
+    "constant_maps['BadPixelsDark'] = np.zeros(constant_maps['Offset'].shape, np.uint32)\n",
     "\n",
     "# Noise related bad pixels\n",
-    "constant_maps['BadPixels'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
-    "constant_maps['BadPixels'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "# Offset related bad pixels\n",
-    "constant_maps['BadPixels'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
-    "constant_maps['BadPixels'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
+    "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
-    "num_bad_pixels = np.sum(constant_maps['BadPixels']!=0)\n",
+    "num_bad_pixels = np.sum(constant_maps['BadPixelsDark']!=0)\n",
     "\n",
     "print('Number of Bad Pixels: ' + str(num_bad_pixels) + ' (' + str(np.round(100*num_bad_pixels/(sensor_size[0]*sensor_size[1]),2)) + '%)')"
    ]
@@ -456,12 +458,12 @@
    "source": [
     "#************** BAD PIXELS HEAT MAP **************#\n",
     "fig = xana.heatmapPlot(\n",
-    "    np.log2(constant_maps['BadPixels'][:, :, 0]),\n",
+    "    np.log2(constant_maps['BadPixelsDark'][:, :, 0]),\n",
     "    lut_label='Bad pixel bit assinged',\n",
-    "    x_label = 'Column',\n",
-    "    y_label = 'Row',\n",
-    "    x_range = (0, sensor_size[0]),\n",
-    "    y_range = (0, sensor_size[1])\n",
+    "    x_label='Column',\n",
+    "    y_label='Row',\n",
+    "    x_range=(0, sensor_size[0]),\n",
+    "    y_range=(0, sensor_size[1])\n",
     ")\n",
     "fig.suptitle('Bad Pixels Map', x=.5, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)"
@@ -482,10 +484,10 @@
     "\n",
     "    # Set the operating condition\n",
     "    condition = Conditions.Dark.ePix100(\n",
-    "        bias_voltage = bias_voltage,\n",
-    "        integration_time = integration_time,\n",
-    "        temperature = temperature_k,\n",
-    "        in_vacuum = in_vacuum)\n",
+    "        bias_voltage=bias_voltage,\n",
+    "        integration_time=integration_time,\n",
+    "        temperature=temperature_k,\n",
+    "        in_vacuum=in_vacuum)\n",
     " \n",
     "    for parm in condition.parameters:\n",
     "        if parm.name == \"Sensor Temperature\":\n",
@@ -494,37 +496,37 @@
     "\n",
     "    # Get physical detector unit\n",
     "    db_module = get_pdu_from_db(\n",
-    "        karabo_id = karabo_id,\n",
-    "        karabo_da = karabo_da,\n",
-    "        constant = const,\n",
-    "        condition = condition,\n",
-    "        cal_db_interface = cal_db_interface,\n",
+    "        karabo_id=karabo_id,\n",
+    "        karabo_da=karabo_da,\n",
+    "        constant=const,\n",
+    "        condition=condition,\n",
+    "        cal_db_interface=cal_db_interface,\n",
     "        snapshot_at=creation_time)[0]\n",
     "\n",
     "    # Inject or save calibration constants\n",
     "    if db_output:\n",
     "        md = send_to_db(\n",
-    "            db_module = db_module,\n",
-    "            karabo_id = karabo_id,\n",
-    "            constant = const,\n",
-    "            condition = condition,\n",
-    "            file_loc = file_loc,\n",
-    "            report_path = report,\n",
-    "            cal_db_interface = cal_db_interface,\n",
-    "            creation_time = creation_time,\n",
-    "            timeout = cal_db_timeout\n",
+    "            db_module=db_module,\n",
+    "            karabo_id=karabo_id,\n",
+    "            constant=const,\n",
+    "            condition=condition,\n",
+    "            file_loc=file_loc,\n",
+    "            report_path=report,\n",
+    "            cal_db_interface=cal_db_interface,\n",
+    "            creation_time=creation_time,\n",
+    "            timeout=cal_db_timeout\n",
     "        )\n",
     "    if local_output:\n",
     "        md = save_const_to_h5(\n",
-    "            db_module = db_module,\n",
-    "            karabo_id = karabo_id,\n",
-    "            constant = const,\n",
-    "            condition = condition,\n",
-    "            data = const.data,\n",
-    "            file_loc = file_loc,\n",
-    "            report = report,\n",
-    "            creation_time = creation_time,\n",
-    "            out_folder = out_folder\n",
+    "            db_module=db_module,\n",
+    "            karabo_id=karabo_id,\n",
+    "            constant=const,\n",
+    "            condition=condition,\n",
+    "            data=const.data,\n",
+    "            file_loc=file_loc,\n",
+    "            report=report,\n",
+    "            creation_time=creation_time,\n",
+    "            out_folder=out_folder\n",
     "        )\n",
     "        print(f\"Calibration constant {const_name} is stored locally at {out_folder} \\n\")\n",
     "\n",
@@ -571,7 +573,11 @@
    "latex_user_defs": false,
    "report_style_numbering": false,
    "user_envs_cfg": false
-  }
+  },
+  "toc-autonumbering": false,
+  "toc-showcode": false,
+  "toc-showmarkdowntxt": false,
+  "toc-showtags": false
  },
  "nbformat": 4,
  "nbformat_minor": 4
-- 
GitLab