diff --git a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb index eacdcffe2503561803508141281afbeefa4b9514..5e64c6ae03dce1fb33e93e75bae681ee2b2fb6a2 100644 --- a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb +++ b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb @@ -29,6 +29,7 @@ "receiver_template = \"JNGFR{:02d}\" # Detector receiver template for accessing raw data files. e.g. \"JNGFR{:02d}\"\n", "path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # template to use for file name\n", "instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'\n", + "ctrl_source_template = '{}/DET/CONTROL' # template for control source name (filled with karabo_id).\n", "karabo_id_control = \"\" # if control is on a different ID, set to empty string if it is the same a karabo-id\n", "karabo_da_control = \"JNGFRCTRL00\" # file inset for control data\n", "\n", @@ -81,7 +82,8 @@ "from extra_data import H5File, RunDirectory\n", "from matplotlib.colors import LogNorm\n", "\n", - "from cal_tools import h5_copy_except, jungfraulib\n", + "from cal_tools import h5_copy_except\n", + "from cal_tools.jungfraulib import JungfrauCtrl\n", "from cal_tools.enums import BadPixels\n", "from cal_tools.step_timing import StepTimer\n", "from cal_tools.tools import (\n", @@ -170,16 +172,11 @@ "metadata": {}, "outputs": [], "source": [ - "# TODO: do we still need to avoid raising error and exiting here?\n", + "ctrl_src = ctrl_source_template.format(karabo_id_control)\n", + "ctrl_data = JungfrauCtrl(run_dc, ctrl_src)\n", "try:\n", - " ctrl_data = jungfraulib.JFCtrl(run_dc, karabo_id_control)\n", " this_run_mcells, sc_start = ctrl_data.get_memory_cells()\n", "\n", - " if not manual_slow_data:\n", - " integration_time = ctrl_data.get_integration_time()\n", - " bias_voltage = ctrl_data.get_bias_voltage()\n", - " gain_str, gain_setting = ctrl_data.get_gain_setting()\n", - "\n", " if this_run_mcells == 1:\n", " memory_cells = 1\n", " print(\"Run is in single cell mode.\\n\"\n", @@ -188,20 +185,19 @@ " memory_cells = 16\n", " print(f\"Run is in burst mode.\\n\"\n", " f\"Storage cell start: {sc_start:02d}\")\n", - "\n", - "except Exception as e:\n", - " if \"Unable to open object\" in str(e):\n", - " if mem_cells == 0:\n", - " memory_cells = 1\n", - " else:\n", - " memory_cells = mem_cells\n", - " print(f\"Set memory cells to {memory_cells}, as \"\n", - " \"it is not saved in control information.\")\n", + "except KeyError as e:\n", + " print(\"WARNING: KeyError while reading number of memory cells.\")\n", + " if mem_cells == 0:\n", + " memory_cells = 1\n", " else:\n", - " display(Markdown(\n", - " \"### <span style='color: #ff0000'>ERROR: </span> \"\n", - " \"Accessing control information \"\n", - " f\": {e}\"))\n", + " memory_cells = mem_cells\n", + " print(f\"WARNING: Set memory cells to {memory_cells}, as \"\n", + " \"it is not saved in control information.\")\n", + "\n", + "if not manual_slow_data:\n", + " integration_time = ctrl_data.get_integration_time()\n", + " bias_voltage = ctrl_data.get_bias_voltage()\n", + " gain_str, gain_setting = ctrl_data.get_gain_setting()\n", "\n", "print(f\"Integration time is {integration_time} us\")\n", "print(f\"Gain setting is {gain_setting} ({gain_str})\")\n", @@ -314,7 +310,7 @@ "source": [ "# Correct a chunk of images for offset and gain\n", "def correct_train(wid, index, d):\n", - " d = d.astype(np.float32)\n", + " d = d.astype(np.float32) # [2, x, y]\n", " g = gain[index]\n", " m = memcells[index]\n", "\n", @@ -329,7 +325,9 @@ " # As this result in copying data to a new array on every train,\n", " # even when there's the same pattern of memory cells on every train.\n", " if memory_cells > 1:\n", - " m[m>16] = 0\n", + " m[m>16] = 0 # TODO: this is wrong and needs to be updated with burst mode.\n", + " # For an invalid image a memory cell of 255 is set.\n", + " # These images doesn't need to be processed.\n", " offset_map_cell = offset_map[m, ...]\n", " mask_cell = mask[m, ...]\n", " else:\n", @@ -407,14 +405,15 @@ "\n", " # Save corrected data in an output file with name\n", " # of corresponding raw sequence file.\n", - " out_file = out_folder / sequence_file.name.replace(\"RAW\", \"CORR\")\n", + " ofile_name = sequence_file.name.replace(\"RAW\", \"CORR\")\n", + " out_file = out_folder / ofile_name\n", "\n", " # load shape of data for memory cells, and detector size (imgs, cells, x, y)\n", " # dshape[0] = number of available images to correct.\n", " dshape = seq_dc[instrument_src_kda, \"data.adc\"].shape\n", "\n", " if dshape[0] == 0:\n", - " print(f\"\\t- WARNING: No image data for {out_file}: data shape is {dshape}\")\n", + " print(f\"\\t- WARNING: No image data for {ofile_name}: data shape is {dshape}\")\n", " continue\n", "\n", " sensor_size = dshape[1:]\n", @@ -427,9 +426,9 @@ " if limit_images > 0:\n", " n_imgs = min(n_imgs, limit_images)\n", "\n", - " print(f\"\\nNumber of images to correct: {n_imgs} for {out_file}\")\n", + " print(f\"\\nNumber of images to correct: {n_imgs} for {ofile_name}\")\n", " if n_trains - dshape[0] != 0:\n", - " print(f\"\\t- WARNING: {sequence_file} has {n_trains - dshape[0]} \"\n", + " print(f\"\\t- WARNING: {sequence_file.name} has {n_trains - dshape[0]} \"\n", " \"trains with empty data.\")\n", "\n", " # Just in case if n_imgs is less than the chosen plt_images.\n", diff --git a/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb index 06ff820f0633bdf0b8d0848314e670c7fb42fec3..e24b0cd43edbfd6f85d67691fe780861aabeebb0 100644 --- a/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb +++ b/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb @@ -25,13 +25,14 @@ "\n", "# Parameters used to access raw data.\n", "karabo_da = ['JNGFR01', 'JNGFR02','JNGFR03','JNGFR04', 'JNGFR05', 'JNGFR06','JNGFR07','JNGFR08'] # list of data aggregators, which corresponds to different JF modules\n", - "karabo_id = \"SPB_IRDA_JF4M\" # karabo_id (detector identifier) prefix of Jungfrau detector to process.\n", - "karabo_id_control = \"\" # if control is on a different ID, set to empty string if it is the same a karabo-id\n", + "karabo_id = 'SPB_IRDA_JF4M' # karabo_id (detector identifier) prefix of Jungfrau detector to process.\n", + "karabo_id_control = '' # if control is on a different ID, set to empty string if it is the same a karabo-id\n", "receiver_template = 'JNGFR{:02}' # inset for receiver devices\n", "receiver_control_id = \"CONTROL\" # inset for control devices\n", "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # template to use for file name, double escape sequence number\n", - "instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'\n", - "karabo_da_control = \"JNGFRCTRL00\" # file inset for control data\n", + "instrument_source_template = '{}/DET/{}:daqOutput' # template for instrument source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'\n", + "ctrl_source_template = '{}/DET/CONTROL' # template for control source name (filled with karabo_id).\n", + "karabo_da_control = 'JNGFRCTRL00' # file inset for control data\n", "\n", "# Parameters for calibration database and storing constants.\n", "use_dir_creation_date = True # use dir creation date\n", @@ -73,6 +74,7 @@ "import glob\n", "import os\n", "import warnings\n", + "from pathlib import Path\n", "\n", "warnings.filterwarnings('ignore')\n", "\n", @@ -84,13 +86,9 @@ "matplotlib.use('agg')\n", "%matplotlib inline\n", "\n", - "from XFELDetAna.detectors.jungfrau.util import (\n", - " rollout_data,\n", - " sanitize_data_cellid,\n", - ")\n", "from XFELDetAna.plotting.heatmap import heatmapPlot\n", "from XFELDetAna.plotting.histogram import histPlot\n", - "from cal_tools import jungfraulib\n", + "from cal_tools import jungfraulib, step_timing\n", "from cal_tools.ana_tools import save_dict_to_hdf5\n", "from cal_tools.enums import BadPixels\n", "from cal_tools.tools import (\n", @@ -139,7 +137,9 @@ "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n", "file_loc = f\"proposal:{proposal} runs:{run_high} {run_med} {run_low}\"\n", "\n", - "report = get_report(out_folder)" + "report = get_report(out_folder)\n", + "\n", + "step_timer = step_timing.StepTimer()" ] }, { @@ -155,19 +155,20 @@ "metadata": {}, "outputs": [], "source": [ + "step_timer.start()\n", "gain_runs = dict()\n", "noise_map = dict()\n", "offset_map = dict()\n", "gain_str = None\n", + "ctrl_src = ctrl_source_template.format(karabo_id_control)\n", "\n", "for gain, run_n in enumerate(run_nums):\n", " run_dc = RunDirectory(f\"{in_folder}/r{run_n:04d}/\")\n", " gain_runs[run_n] = [gain, run_dc]\n", "\n", - " # Read control data for the first gain only.\n", + " # Read control data for the high gain run only.\n", " if run_n == run_high:\n", - "\n", - " ctrl_data = jungfraulib.JFCtrl(run_dc, karabo_id_control)\n", + " ctrl_data = jungfraulib.JungfrauCtrl(run_dc, ctrl_src)\n", " run_mcells, sc_start = ctrl_data.get_memory_cells()\n", "\n", " if not manual_slow_data:\n", @@ -187,7 +188,7 @@ " memory_cells = 16\n", " print('Dark runs in burst mode, '\n", " f'storage cell start: {sc_start:02d}')\n", - "\n", + "step_timer.done_step(f'Reading control data.')\n", "# Initialize noise_map and offset_map module arrays.\n", "for mod in karabo_da:\n", " noise_map[mod] = np.zeros(sensor_size+(memory_cells, 3))\n", @@ -203,7 +204,7 @@ "outputs": [], "source": [ "for mod in karabo_da:\n", - "\n", + " step_timer.start()\n", " instrument_src = instrument_source_template.format(\n", " karabo_id, receiver_template.format(int(mod[-2:])))\n", "\n", @@ -215,52 +216,41 @@ "\n", " # load shape of data for memory cells, and detector size (imgs, cells, x, y)\n", " n_imgs = run_dc[instrument_src, \"data.adc\"].shape[0]\n", - " \n", - " if max_trains > 0:\n", - " n_imgs = min(n_imgs, max_trains)\n", " # load number of data available, including trains with empty data.\n", " n_trains = run_dc.get_data_counts(instrument_src, \"data.adc\").shape[0]\n", - "\n", - " instr_dc = run_dc.select(instrument_src, require_all=True).select_trains(np.s_[:n_imgs])\n", - "\n", + " instr_dc = run_dc.select(instrument_src, require_all=True)\n", " if n_trains-n_imgs != 0:\n", - " print(f\"\\tWARNING: {instr_dc.files[0].filename} has {n_trains-n_imgs} \"\n", - " f\"trains with empty data out of {n_trains} trains.\")\n", + " print(\n", + " f\"\\tWARNING: {Path(run_dc.files[0].filename).name} has {n_trains-n_imgs} \" # noqa\n", + " f\"trains with empty data out of {n_trains} trains.\")\n", + "\n", + " if max_trains > 0:\n", + " n_imgs = min(n_imgs, max_trains)\n", + " print(f\"Processing {n_imgs} images based on the given max_trains: {max_trains}.\") # noqa\n", + " # Select only requested number of images to process darks.\n", + " instr_dc = instr_dc.select_trains(np.s_[:n_imgs])\n", "\n", " if n_imgs < min_trains:\n", " raise ValueError(\n", " f\"Less than {min_trains} trains are available in RAW data.\"\n", - " \" Not enough data to process darks.\")\n", + " \" Not enough data to process darks.\")\n", "\n", " images = np.transpose(\n", " instr_dc[instrument_src, \"data.adc\"].ndarray(), (3, 2, 1, 0))\n", - "\n", - " roi=np.s_[:1]\n", - " if gain > 0 and memory_cells == 16:\n", - " roi=np.s_[:]\n", - "\n", - " acelltable = np.transpose(\n", - " instr_dc[instrument_src, \"data.memoryCell\"].ndarray(roi=roi), (1, 0))\n", + " acelltable = np.transpose(instr_dc[instrument_src, \"data.memoryCell\"].ndarray())\n", "\n", " if memory_cells == 1:\n", " acelltable -= sc_start\n", - " # throwing away all the SC entries except\n", - " # the first for lower gains.\n", - " if gain > 0 and memory_cells == 16: \n", - " acelltable[1:] = 255\n", - " # makes 4-dim vecs into 3-dim\n", - " # makes 2-dim into 1-dim\n", - " # leaves 1-dim and 3-dim vecs\n", - "\n", - " images, acelltable = rollout_data([images, acelltable])\n", - "\n", - " # removes entries with cellID 255\n", - " images, acelltable = sanitize_data_cellid([images], acelltable)\n", + " if gain > 0 and memory_cells == 16:\n", + " # 255 is used as the detector sets 255 as well for\n", + " # cell images identified as bad.\n", + " acelltable[1:] = 255 \n", "\n", " for cell in range(memory_cells):\n", " thiscell = images[..., acelltable == cell]\n", " noise_map[mod][..., cell, gain] = np.std(thiscell, axis=2)\n", - " offset_map[mod][..., cell, gain] = np.mean(thiscell, axis=2)" + " offset_map[mod][..., cell, gain] = np.mean(thiscell, axis=2)\n", + " step_timer.done_step(f'Creating Offset and noise constants for a module.')" ] }, { @@ -294,6 +284,7 @@ "outputs": [], "source": [ "# TODO: Fix plots arrangment and speed for Jungfrau burst mode.\n", + "step_timer.start()\n", "for mod in karabo_da:\n", " for g_idx in gains:\n", " for cell in range(0, memory_cells):\n", @@ -350,7 +341,8 @@ " fontsize=15)\n", " ax_n0.set_xlabel(\n", " f'RMS noise {g_name[g_idx]} ' + unit, fontsize=15)\n", - " plt.show()" + " plt.show()\n", + "step_timer.done_step(f'Plotting offset and noise maps.')" ] }, { @@ -397,13 +389,15 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ + "step_timer.start()\n", "bad_pixels_map = dict()\n", "\n", "for mod in karabo_da:\n", - "\n", " bad_pixels_map[mod] = np.zeros(noise_map[mod].shape, np.uint32)\n", " offset_abs_threshold = np.array(offset_abs_threshold)\n", "\n", @@ -417,16 +411,17 @@ "\n", " bad_pixels_map[mod][(offset_map[mod] < offset_abs_threshold[0][None, None, None, :]) | (offset_map[mod] > offset_abs_threshold[1][None, None, None, :])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value # noqa\n", "\n", - "for g_idx in gains:\n", - " for cell in range(memory_cells):\n", - " bad_pixels = bad_pixels_map[mod][:, :, cell, g_idx]\n", - " fn_0 = heatmapPlot(\n", - " np.swapaxes(bad_pixels, 0, 1),\n", - " y_label=\"Row\",\n", - " x_label=\"Column\",\n", - " lut_label=f\"Badpixels {g_name[g_idx]} [ADCu]\",\n", - " aspect=1.,\n", - " vmin=0, title=f'G{g_idx} Bad pixel map - Cell {cell:02d} - Module {mod}')" + " for g_idx in gains:\n", + " for cell in range(memory_cells):\n", + " bad_pixels = bad_pixels_map[mod][:, :, cell, g_idx]\n", + " fn_0 = heatmapPlot(\n", + " np.swapaxes(bad_pixels, 0, 1),\n", + " y_label=\"Row\",\n", + " x_label=\"Column\",\n", + " lut_label=f\"Badpixels {g_name[g_idx]} [ADCu]\",\n", + " aspect=1.,\n", + " vmin=0, title=f'G{g_idx} Bad pixel map - Cell {cell:02d} - Module {mod}')\n", + "step_timer.done_step(f'Creating bad pixels constant and plotting it for a module.')" ] }, { @@ -464,6 +459,7 @@ "metadata": {}, "outputs": [], "source": [ + "step_timer.start()\n", "for mod, db_mod in zip(karabo_da, db_modules):\n", " constants = {\n", " 'Offset': np.moveaxis(offset_map[mod], 0, 1),\n", diff --git a/src/cal_tools/jungfraulib.py b/src/cal_tools/jungfraulib.py index 4be66f301644fa3c740fe67006a24e9eaafb1db9..ec04f2414b8fad34482e05b78e7ff2e0b2e9b96e 100644 --- a/src/cal_tools/jungfraulib.py +++ b/src/cal_tools/jungfraulib.py @@ -1,18 +1,16 @@ -from typing import Tuple - - -class JFCtrl(): +class JungfrauCtrl(): def __init__( self, - run_dc: "extra_data.DataCollection", - karabo_id_control: str): + run_dc: "extra_data.DataCollection", # noqa + ctrl_src: str, + ): """Read slow data from RUN source. :param run_dir: EXtra-data RunDirectory DataCollection object. :param karabo_id_control: Karabo ID for control h5file with slow data. """ self.run_dc = run_dc - self.ctrl_src = f"{karabo_id_control}/DET/CONTROL" + self.ctrl_src = ctrl_src def get_memory_cells(self): n_storage_cells = int(self.run_dc.get_run_value(