From 7dbe440031b03518102f40e04cf8fa28cba93cdc Mon Sep 17 00:00:00 2001 From: ahmedk <karim.ahmed@xfel.eu> Date: Fri, 18 Nov 2022 10:24:13 +0100 Subject: [PATCH] calcat_interface_lpd_epix_jf --- ...Jungfrau_Gain_Correct_and_Verify_NBC.ipynb | 157 ++- ...retrieve_constants_precorrection_NBC.ipynb | 128 +- notebooks/LPD/LPD_Correct_Fast.ipynb | 27 +- .../ePix100/Correction_ePix100_NBC.ipynb | 4 +- ...100_retrieve_constants_precorrection.ipynb | 43 +- src/cal_tools/calcat_interface.py | 1047 +++++++++++++++++ 6 files changed, 1194 insertions(+), 212 deletions(-) create mode 100644 src/cal_tools/calcat_interface.py diff --git a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb index 89391efcd..497bb4d1b 100644 --- a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb +++ b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb @@ -72,10 +72,10 @@ "metadata": {}, "outputs": [], "source": [ + "import gc\n", "import multiprocessing\n", "import sys\n", "import warnings\n", - "from functools import partial\n", "from logging import warning\n", "from pathlib import Path\n", "\n", @@ -90,19 +90,16 @@ "from extra_geom import JUNGFRAUGeometry\n", "from matplotlib.colors import LogNorm\n", "\n", - "from cal_tools import h5_copy_except\n", + "from cal_tools.calcat_interface import JUNGFRAU_CalibrationData\n", "from cal_tools.jungfraulib import JungfrauCtrl\n", "from cal_tools.enums import BadPixels\n", "from cal_tools.files import DataFile\n", "from cal_tools.step_timing import StepTimer\n", "from cal_tools.tools import (\n", - " get_constant_from_db_and_time,\n", " get_dir_creation_date,\n", - " get_pdu_from_db,\n", " map_seq_files,\n", " CalibrationMetadata,\n", ")\n", - "from iCalibrationDB import Conditions, Constants\n", "\n", "warnings.filterwarnings('ignore')\n", "\n", @@ -122,6 +119,10 @@ "run_dc = RunDirectory(run_folder)\n", "instrument_src = instrument_source_template.format(karabo_id, receiver_template)\n", "\n", + "metadata = CalibrationMetadata(metadata_folder or out_folder)\n", + "# NOTE: this notebook will not overwrite calibration metadata file\n", + "const_yaml = metadata.get(\"retrieved-constants\", {})\n", + "\n", "out_folder.mkdir(parents=True, exist_ok=True)\n", "\n", "print(f\"Run is: {run}\")\n", @@ -189,7 +190,6 @@ "\n", "if mem_cells < 0:\n", " memory_cells, sc_start = ctrl_data.get_memory_cells()\n", - "\n", " mem_cells_name = \"single cell\" if memory_cells == 1 else \"burst\"\n", " print(f\"Run is in {mem_cells_name} mode.\\nStorage cell start: {sc_start:02d}\")\n", "else:\n", @@ -203,6 +203,10 @@ " gain_setting = ctrl_data.get_gain_setting()\n", " gain_mode = ctrl_data.get_gain_mode()\n", "\n", + "# A workound for correcting runs of forceswitchg1 and forceswitchg2\n", + "# of single cell mode.\n", + "memory_cells = 1 if memory_cells == 2 and \"forceswitch\" in ctrl_data.run_settings else 16\n", + "\n", "print(f\"Integration time is {integration_time} us\")\n", "print(f\"Gain setting is {gain_setting} (run settings: {ctrl_data.run_settings})\")\n", "print(f\"Gain mode is {gain_mode} ({ctrl_data.run_mode})\")\n", @@ -235,70 +239,65 @@ "metadata": {}, "outputs": [], "source": [ - "condition = Conditions.Dark.jungfrau(\n", - " memory_cells=memory_cells,\n", - " bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " gain_setting=gain_setting,\n", - " gain_mode=gain_mode,\n", - ")\n", - "\n", - "empty_constants = {\n", - " \"Offset\": np.zeros((512, 1024, memory_cells, 3), dtype=np.float32),\n", - " \"BadPixelsDark\": np.zeros((512, 1024, memory_cells, 3), dtype=np.uint32),\n", - " \"RelativeGain\": None,\n", - " \"BadPixelsFF\": None,\n", - "}\n", - "metadata = CalibrationMetadata(metadata_folder or out_folder)\n", - "# NOTE: this notebook will not overwrite calibration metadata file\n", - "const_yaml = metadata.get(\"retrieved-constants\", {})\n", + "da_to_pdu = {}\n", "\n", - "def get_constants_for_module(karabo_da: str):\n", - " \"\"\" Get calibration constants for given module of Jungfrau\n", + "if const_yaml:\n", + " const_data = dict()\n", + " for mod in karabo_da:\n", + " const_data[mod] = dict()\n", + " for cname, mdata in const_yaml[mod][\"constants\"].items():\n", + " const_data[mod][cname] = dict()\n", + " if mdata[\"creation-time\"]:\n", + " with h5py.File(mdata[\"path\"], \"r\") as cf:\n", + " const_data[mod][cname] = np.copy(\n", + " cf[f\"{mdata['dataset']}/data\"])\n", + "else:\n", + " jf_cal = JUNGFRAU_CalibrationData(\n", + " detector_name=karabo_id,\n", + " sensor_bias_voltage=bias_voltage,\n", + " event_at=creation_time,\n", + " modules=[int(x[-2:]) for x in karabo_da],\n", + " memory_cells=memory_cells,\n", + " integration_time=integration_time,\n", + " gain_setting=gain_setting,\n", + " gain_mode=gain_mode,\n", + " )\n", + " constant_names = [\n", + " \"Offset10Hz\", \"BadPixelsDark10Hz\",\n", + " \"BadPixelsFF10Hz\", \"RelativeGain10Hz\",\n", + " ]\n", + " const_data = jf_cal.ndarray_map(calibrations=constant_names)\n", + " \n", + " for mod_info in jf_cal.physical_detector_units.values():\n", + " da_to_pdu[mod_info[\"karabo_da\"]] = mod_info[\"physical_name\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def prepare_constants(module: str):\n", + " \"\"\"Prepare constant arrays.\n", "\n", + " :param module: The module name (karabo_da)\n", " :return:\n", " offset_map (offset map),\n", " mask (mask of bad pixels),\n", " gain_map (map of relative gain factors),\n", - " db_module (name of DB module),\n", - " when (dictionary: constant - creation time)\n", + " module (name of module),\n", " \"\"\"\n", + " constant_arrays = const_data[module]\n", + " try:\n", + " offset_map = constant_arrays[\"Offset10Hz\"]\n", + " mask = constant_arrays[\"BadPixelsDark10Hz\"]\n", + " except KeyError as e:\n", + " # Fail processing if dark constants are not available.\n", + " raise KeyError(f\"Dark constant is not available for correction for {module}. {e}\")\n", "\n", - " when = dict()\n", - " const_data = dict()\n", - "\n", - " if const_yaml:\n", - " for cname, mdata in const_yaml[karabo_da][\"constants\"].items():\n", - " const_data[cname] = dict()\n", - " when[cname] = mdata[\"creation-time\"]\n", - " if when[cname]:\n", - " with h5py.File(mdata[\"file-path\"], \"r\") as cf:\n", - " const_data[cname] = np.copy(\n", - " cf[f\"{mdata['dataset-name']}/data\"])\n", - " else:\n", - " const_data[cname] = empty_constants[cname]\n", - " else:\n", - " retrieval_function = partial(\n", - " get_constant_from_db_and_time,\n", - " karabo_id=karabo_id,\n", - " karabo_da=karabo_da,\n", - " cal_db_interface=cal_db_interface,\n", - " creation_time=creation_time,\n", - " timeout=cal_db_timeout,\n", - " print_once=False,\n", - " )\n", - " \n", - " for cname, cempty in empty_constants.items():\n", - " const_data[cname], when[cname] = retrieval_function(\n", - " condition=condition,\n", - " constant=getattr(Constants.jungfrau, cname)(),\n", - " empty_constant=cempty,\n", - " )\n", - "\n", - " offset_map = const_data[\"Offset\"]\n", - " mask = const_data[\"BadPixelsDark\"]\n", - " gain_map = const_data[\"RelativeGain\"]\n", - " mask_ff = const_data[\"BadPixelsFF\"]\n", + " gain_map = constant_arrays[\"RelativeGain10Hz\"]\n", + " mask_ff = constant_arrays[\"BadPixelsFF10Hz\"]\n", "\n", " # Combine masks\n", " if mask_ff is not None:\n", @@ -311,7 +310,7 @@ " else:\n", " offset_map = np.squeeze(offset_map)\n", " mask = np.squeeze(mask)\n", - " \n", + "\n", " # masking double size pixels\n", " mask[..., [255, 256], :, :] |= BadPixels.NON_STANDARD_SIZE\n", " mask[..., [255, 256, 511, 512, 767, 768], :] |= BadPixels.NON_STANDARD_SIZE\n", @@ -325,23 +324,23 @@ " else:\n", " gain_map = np.moveaxis(np.squeeze(gain_map), 1, 0)\n", "\n", - " return offset_map, mask, gain_map, karabo_da, when\n", + " return offset_map, mask, gain_map, module\n", "\n", "with multiprocessing.Pool() as pool:\n", - " r = pool.map(get_constants_for_module, karabo_da)\n", + " r = pool.map(prepare_constants, karabo_da)\n", "\n", "# Print timestamps for the retrieved constants.\n", "constants = {}\n", - "for offset_map, mask, gain_map, k_da, when in r:\n", - " print(f'Constants for module {k_da}:')\n", - " for const in when:\n", - " print(f' {const} injected at {when[const]}')\n", + "for offset_map, mask, gain_map, k_da in r:\n", "\n", " if gain_map is None:\n", - " print(\"No gain map found\")\n", + " warning(\"No gain map found. Relative gain correction is disabled.\")\n", " relative_gain = False\n", "\n", - " constants[k_da] = (offset_map, mask, gain_map)" + " constants[k_da] = (offset_map, mask, gain_map)\n", + "\n", + "const_data.clear()\n", + "gc.collect()" ] }, { @@ -714,22 +713,6 @@ ")" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "db_modules = get_pdu_from_db(\n", - " karabo_id=karabo_id,\n", - " karabo_da=karabo_da,\n", - " constant=Constants.jungfrau.Offset(),\n", - " condition=condition,\n", - " cal_db_interface=cal_db_interface,\n", - " snapshot_at=creation_time,\n", - ")" - ] - }, { "cell_type": "markdown", "metadata": {}, diff --git a/notebooks/Jungfrau/Jungfrau_retrieve_constants_precorrection_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_retrieve_constants_precorrection_NBC.ipynb index 50115ad43..808ff3229 100644 --- a/notebooks/Jungfrau/Jungfrau_retrieve_constants_precorrection_NBC.ipynb +++ b/notebooks/Jungfrau/Jungfrau_retrieve_constants_precorrection_NBC.ipynb @@ -55,20 +55,16 @@ "metadata": {}, "outputs": [], "source": [ - "import datetime\n", - "from functools import partial\n", - "\n", - "import multiprocessing\n", "from extra_data import RunDirectory\n", "from pathlib import Path\n", "\n", + "from cal_tools.calcat_interface import JUNGFRAU_CalibrationData\n", "from cal_tools.jungfraulib import JungfrauCtrl\n", + "from cal_tools.step_timing import StepTimer\n", "from cal_tools.tools import (\n", " get_dir_creation_date,\n", - " get_from_db,\n", " CalibrationMetadata,\n", - ")\n", - "from iCalibrationDB import Conditions, Constants" + ")" ] }, { @@ -79,7 +75,11 @@ "source": [ "in_folder = Path(in_folder)\n", "out_folder = Path(out_folder)\n", + "\n", "metadata = CalibrationMetadata(metadata_folder or out_folder)\n", + "# Constant paths & timestamps are saved under retrieved-constants in calibration_metadata.yml\n", + "retrieved_constants = metadata[\"retrieved-constants\"] = dict()\n", + "\n", "run_folder = in_folder / f'r{run:04d}'\n", "run_dc = RunDirectory(run_folder)\n", "\n", @@ -105,7 +105,6 @@ "\n", "if mem_cells < 0:\n", " memory_cells, sc_start = ctrl_data.get_memory_cells()\n", - "\n", " mem_cells_name = \"single cell\" if memory_cells == 1 else \"burst\"\n", " print(f\"Run is in {mem_cells_name} mode.\\nStorage cell start: {sc_start:02d}\")\n", "else:\n", @@ -132,57 +131,7 @@ "metadata": {}, "outputs": [], "source": [ - "condition = Conditions.Dark.jungfrau(\n", - " memory_cells=memory_cells,\n", - " bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " gain_setting=gain_setting,\n", - " gain_mode=gain_mode,\n", - ")\n", - "\n", - "def get_constants_for_module(mod: str):\n", - " \"\"\"Get calibration constants for given module for Jungfrau.\"\"\"\n", - " retrieval_function = partial(\n", - " get_from_db,\n", - " karabo_id=karabo_id,\n", - " karabo_da=mod,\n", - " cal_db_interface=cal_db_interface,\n", - " creation_time=creation_time,\n", - " timeout=cal_db_timeout,\n", - " verbosity=0,\n", - " meta_only=True,\n", - " load_data=False,\n", - " empty_constant=None\n", - " )\n", - "\n", - " mdata_dict = dict()\n", - " mdata_dict[\"constants\"] = dict()\n", - " constants = [\n", - " \"Offset\", \"BadPixelsDark\",\n", - " \"RelativeGain\", \"BadPixelsFF\",\n", - " ]\n", - " for cname in constants:\n", - " mdata_dict[\"constants\"][cname] = dict()\n", - " if not relative_gain and cname in [\"BadPixelsFF\", \"RelativeGain\"]:\n", - " continue\n", - " _, mdata = retrieval_function(\n", - " condition=condition,\n", - " constant=getattr(Constants.jungfrau, cname)(),\n", - " )\n", - " mdata_const = mdata.calibration_constant_version\n", - " const_mdata = mdata_dict[\"constants\"][cname]\n", - " # check if constant was successfully retrieved.\n", - " if mdata.comm_db_success:\n", - " const_mdata[\"file-path\"] = (\n", - " f\"{mdata_const.hdf5path}\" f\"{mdata_const.filename}\"\n", - " )\n", - " const_mdata[\"dataset-name\"] = mdata_const.h5path\n", - " const_mdata[\"creation-time\"] = f\"{mdata_const.begin_at}\"\n", - " mdata_dict[\"physical-detector-unit\"] = mdata_const.device_name\n", - " else:\n", - " const_mdata[\"file-path\"] = None\n", - " const_mdata[\"creation-time\"] = None\n", - " return mdata_dict, mod" + "step_timer = StepTimer()" ] }, { @@ -191,20 +140,42 @@ "metadata": {}, "outputs": [], "source": [ - "# Constant paths are saved under retrieved-constants in calibration_metadata.yml\n", - "retrieved_constants = metadata.setdefault(\"retrieved-constants\", {})\n", - "# Avoid retrieving constants for available modules in calibration_metadata.yml\n", - "# This is used during reproducability.\n", - "query_karabo_da = []\n", - "for mod in karabo_da:\n", - " if mod in retrieved_constants.keys():\n", - " print(f\"Constant for {mod} already in \"\n", - " \"calibration_metadata.yml, won't query again.\")\n", - " continue\n", - " query_karabo_da.append(mod)\n", - "\n", - "with multiprocessing.Pool() as pool:\n", - " results = pool.map(get_constants_for_module, query_karabo_da)" + "step_timer.start()\n", + "jf_cal = JUNGFRAU_CalibrationData(\n", + " detector_name=karabo_id,\n", + " sensor_bias_voltage=bias_voltage,\n", + " event_at=creation_time,\n", + " modules=None,\n", + " memory_cells=memory_cells,\n", + " integration_time=integration_time,\n", + " gain_setting=gain_setting,\n", + " gain_mode=gain_mode,\n", + ")\n", + "constant_names = [\n", + " \"Offset10Hz\", \"BadPixelsDark10Hz\",\n", + " \"BadPixelsFF10Hz\", \"RelativeGain10Hz\",\n", + "]\n", + "\n", + "mdata_dict = {\"constants\": dict()}\n", + "\n", + "# Don't raise errors for now if relative_gain is on\n", + "# and constant was not retrieved.\n", + "raise_error = False if relative_gain else True\n", + "jf_metadata = jf_cal.metadata(constant_names, raise_error=raise_error)\n", + "\n", + "for mod, ccv_dict in jf_metadata.items():\n", + " mod_dict = retrieved_constants.setdefault(mod, dict())\n", + " const_dict = mod_dict.setdefault(\"constants\", dict())\n", + " if [\"Offset10Hz\", \"BadPixelsDark10Hz\"] not in ccv_dict.keys():\n", + " # Fail processing if dark constants are not available.\n", + " raise KeyError(f\"Dark constant is not available for correction for {module}. {e}\")\n", + " for cname, ccv_metadata in ccv_dict.items():\n", + " const_dict[cname] = {\n", + " \"path\": str(jf_cal.caldb_root / ccv_metadata[\"path\"]),\n", + " \"dataset\": ccv_metadata[\"dataset\"],\n", + " \"creation-time\": ccv_metadata[\"begin_validity_at\"],\n", + " }\n", + " mod_dict[\"physical-name\"] = ccv_metadata[\"physical_name\"]" ] }, { @@ -214,19 +185,18 @@ "outputs": [], "source": [ "timestamps = dict()\n", - "for md_dict, mod in results:\n", - " retrieved_constants[mod] = md_dict\n", "\n", + "for mod in karabo_da:\n", " module_timestamps = timestamps[mod] = dict()\n", " module_constants = retrieved_constants[mod]\n", "\n", " print(f\"Module: {mod}:\")\n", " for cname, mdata in module_constants[\"constants\"].items():\n", - " if hasattr(mdata[\"creation-time\"], 'strftime'):\n", - " mdata[\"creation-time\"] = mdata[\"creation-time\"].strftime('%y-%m-%d %H:%M')\n", - " print(f'{cname:.<12s}', mdata[\"creation-time\"])\n", + " if hasattr(mdata[\"creation-time\"], \"strftime\"):\n", + " mdata[\"creation-time\"] = mdata[\"creation-time\"].strftime(\"%y-%m-%d %H:%M\")\n", + " print(f\"\\t{cname:.<12s}\", mdata[\"creation-time\"])\n", "\n", - " for cname in [\"Offset\", \"BadPixelsDark\", \"RelativeGain\", \"BadPixelsFF\"]:\n", + " for cname in constant_names:\n", " if cname in module_constants[\"constants\"]:\n", " module_timestamps[cname] = module_constants[\"constants\"][cname][\"creation-time\"]\n", " else:\n", diff --git a/notebooks/LPD/LPD_Correct_Fast.ipynb b/notebooks/LPD/LPD_Correct_Fast.ipynb index be56e6b66..fec39227a 100644 --- a/notebooks/LPD/LPD_Correct_Fast.ipynb +++ b/notebooks/LPD/LPD_Correct_Fast.ipynb @@ -198,26 +198,6 @@ "# Obtain and prepare calibration constants" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Connect to CalCat.\n", - "calcat_config = restful_config['calcat']\n", - "client = CalibrationClient(\n", - " base_api_url=calcat_config['base-api-url'],\n", - " use_oauth2=calcat_config['use-oauth2'],\n", - " client_id=calcat_config['user-id'],\n", - " client_secret=calcat_config['user-secret'],\n", - " user_email=calcat_config['user-email'],\n", - " token_url=calcat_config['token-url'],\n", - " refresh_url=calcat_config['refresh-url'],\n", - " auth_url=calcat_config['auth-url'],\n", - " scope='')" - ] - }, { "cell_type": "code", "execution_count": null, @@ -232,7 +212,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "const_data = {}\n", @@ -554,8 +536,7 @@ "ExecuteTime": { "end_time": "2018-11-13T18:24:57.547563Z", "start_time": "2018-11-13T18:24:56.995005Z" - }, - "scrolled": false + } }, "outputs": [], "source": [ diff --git a/notebooks/ePix100/Correction_ePix100_NBC.ipynb b/notebooks/ePix100/Correction_ePix100_NBC.ipynb index ca19f1bdb..829a24b3d 100644 --- a/notebooks/ePix100/Correction_ePix100_NBC.ipynb +++ b/notebooks/ePix100/Correction_ePix100_NBC.ipynb @@ -278,7 +278,7 @@ " in_vacuum=in_vacuum,\n", " source_energy=gain_photon_energy,\n", " event_at=creation_time,\n", - " snapshot_at=None, # creation_time, # TODO:.. why None works\n", + " snapshot_at=None,\n", " )\n", "constant_names = [\"OffsetEPix100\", \"NoiseEPix100\"]\n", "if relative_gain:\n", @@ -291,7 +291,7 @@ " const_data = dict()\n", " for cname in constant_names:\n", " try:\n", - " const_data[cname] = epix_cal.ndarray(module=1, calibration=cname) # TODO: what is this module number?\n", + " const_data[cname] = epix_cal.ndarray(module=1, calibration=cname)\n", " except CalCatError as e:\n", " if cname != \"RelativeGainEPix100\":\n", " raise\n", diff --git a/notebooks/ePix100/ePix100_retrieve_constants_precorrection.ipynb b/notebooks/ePix100/ePix100_retrieve_constants_precorrection.ipynb index 4c118dc56..67ce2eb00 100644 --- a/notebooks/ePix100/ePix100_retrieve_constants_precorrection.ipynb +++ b/notebooks/ePix100/ePix100_retrieve_constants_precorrection.ipynb @@ -81,6 +81,7 @@ "# if it already contains details about which constants to use.\n", "retrieved_constants = metadata.setdefault(\"retrieved-constants\", {})\n", "\n", + "# TODO decide if this is required or not!\n", "if karabo_da in retrieved_constants:\n", " print(\n", " f\"Constant for {karabo_da} already in {metadata.filename}, won't query again.\"\n", @@ -140,7 +141,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "epix_cal = EPIX100_CalibrationData(\n", @@ -154,31 +157,29 @@ " snapshot_at=None, # creation_time, # TODO:.. why None works\n", " )\n", "\n", - "const_data = dict()\n", - "mdata_dict = dict()\n", - "mdata_dict[\"constants\"] = dict()\n", + "mdata_dict = {\"constants\": dict()}\n", "\n", "constant_names = [\"OffsetEPix100\", \"NoiseEPix100\"]\n", "if relative_gain:\n", " constant_names += [\"RelativeGainEPix100\"]\n", "\n", - "for cname in constant_names:\n", - " # Retrieve metadata for all epix100 constants.\n", - " try:\n", - " epix_metadata = epix_cal.metadata([cname])\n", - " for cname, ccv_metadata in list(epix_metadata.values())[0].items():\n", - " mdata_dict[\"constants\"][cname] = {\n", - " \"path\": str(epix_cal.caldb_root / ccv_metadata[\"path\"]),\n", - " \"dataset\": ccv_metadata[\"dataset\"],\n", - " \"creation-time\": ccv_metadata[\"begin_validity_at\"],\n", - " }\n", - " print(f\"Retrieved {cname} with creation-time: {ccv_metadata['begin_validity_at']}\")\n", - " except CalCatError:\n", - " if cname == \"RelativeGainEPix100\":\n", - " warning(\"RelativeGainEPix100 is not found in CALCAT.\")\n", - " else:\n", - " raise\n", - "mdata_dict[\"physical-detector-unit\"] = ccv_metadata[\"physical_name\"]\n", + "# Retrieve metadata for all epix100 constants.\n", + "# Error out if dark constants are not retrieved.\n", + "raise_error = False if relative_gain else True\n", + "epix_metadata = epix_cal.metadata(constant_names, raise_error=raise_error)\n", + "\n", + "for cname, ccv_metadata in list(epix_metadata.values())[0].items():\n", + " mdata_dict[\"constants\"][cname] = {\n", + " \"path\": str(epix_cal.caldb_root / ccv_metadata[\"path\"]),\n", + " \"dataset\": ccv_metadata[\"dataset\"],\n", + " \"creation-time\": ccv_metadata[\"begin_validity_at\"],\n", + " }\n", + " print(f\"Retrieved {cname} with creation-time: {ccv_metadata['begin_validity_at']}\")\n", + "\n", + "if relative_gain and \"RelativeGainEPix100\" not in mdata_dict[\"constants\"]:\n", + " warning(\"RelativeGainEPix100 is not found in CALCAT.\")\n", + "\n", + "mdata_dict[\"physical-name\"] = ccv_metadata[\"physical_name\"]\n", "retrieved_constants[karabo_da] = mdata_dict\n", "metadata.save()\n", "print(f\"Stored retrieved constants in {metadata.filename}\")" diff --git a/src/cal_tools/calcat_interface.py b/src/cal_tools/calcat_interface.py new file mode 100644 index 000000000..d72e0a2ec --- /dev/null +++ b/src/cal_tools/calcat_interface.py @@ -0,0 +1,1047 @@ + +"""Interfaces to calibration constant data.""" + +import multiprocessing +import re +import socket +from datetime import date, datetime, time, timezone +from functools import lru_cache +from os import getenv +from pathlib import Path +#from sys import maxsize +from weakref import WeakKeyDictionary +import pasha as psh + +import h5py +import numpy as np +from calibration_client import CalibrationClient +from calibration_client.modules import ( + Calibration, + CalibrationConstantVersion, + Detector, + Parameter, + PhysicalDetectorUnit, +) +from logging import warning + +__all__ = [ + 'CalCatError', + 'CalibrationData', + 'AGIPD_CalibrationData', + 'LPD_CalibrationData', + 'DSSC_CalibrationData', + 'JUNGFRAU_CalibrationData', + 'PNCCD_CalibrationData', + 'EPIX100_CalibrationData', + 'Gotthard2_CalibrationData' +] + + +class CCVMetadata(dict): + """Dictionary for CCV metadata. + + Identical to a regular dict, but with a custom pandas-based + string representation to be easier to read. + """ + + def __str__(self): + """Pretty-print CCV metadata using pandas.""" + + import pandas as pd + + res = {pdu_idx: {calibration: ccv_data['ccv_name'] + for calibration, ccv_data in pdu_data.items()} + for pdu_idx, pdu_data in self.items()} + + return str(pd.DataFrame.from_dict(res, orient='index')) + + +class CalCatError(Exception): + """CalCat API error.""" + + def __init__(self, response): + super().__init__(response['info']) + + +class ClientWrapper(type): + """Metaclass to wrap each calibration_client exactly once.""" + + _clients = WeakKeyDictionary() + + def __call__(cls, client): + instance = cls._clients.get(client, None) + + if instance is None: + instance = cls._clients[client] = type.__call__(cls, client) + + return instance + + +class CalCatApi(metaclass=ClientWrapper): + """Internal calibration_client wrapper.""" + + get_detector_keys = [ + 'id', 'name', 'identifier', 'karabo_name', + 'karabo_id_control', 'description'] + get_pdu_keys = [ + 'id', 'physical_name', 'karabo_da', 'virtual_device_name', + 'detector_type_id', 'detector_id', 'description'] + + def __init__(self, client): + self.client = client + + @classmethod + def format_time(cls, dt): + """Parse different ways to specify time to CalCat.""" + + if isinstance(dt, datetime): + return dt.astimezone(timezone.utc).strftime('%Y%m%dT%H%M%S%Z') + elif isinstance(dt, date): + return cls.format_time(datetime.combine(dt, time())) + + return dt + + def format_cond(self, condition): + """Encode operating condition to CalCat API format. + + Args: + caldata (CalibrationData): Calibration data instance used to + interface with database. + + Returns: + (dict) Operating condition for use in CalCat API. + """ + + return {'parameters_conditions_attributes': [ + {'parameter_id': self.parameter_id(k), 'value': str(v)} + for k, v in condition.items()]} + + @lru_cache() + def detector(self, detector_name): + """Detector metadata.""" + + resp_detector = Detector.get_by_identifier( + self.client, detector_name) + + if not resp_detector['success']: + raise CalCatError(resp_detector) + + return {k: resp_detector['data'][k] for k in self.get_detector_keys} + + @lru_cache() + def physical_detector_units(self, detector_id, snapshot_at): + """Physical detector unit metadata.""" + + resp_pdus = PhysicalDetectorUnit.get_all_by_detector( + self.client, detector_id, self.format_time(snapshot_at)) + + if not resp_pdus['success']: + raise CalCatError(resp_pdus) + return { + pdu['karabo_da']: { + k: pdu[k] for k in self.get_pdu_keys} + for pdu in resp_pdus['data'] + } + + @lru_cache() + def calibration_id(self, calibration_name): + """ID for a calibration in CalCat.""" + + resp_calibration = Calibration.get_by_name( + self.client, calibration_name) + + if not resp_calibration['success']: + raise CalCatError(resp_calibration) + + return resp_calibration['data']['id'] + + @lru_cache() + def parameter_id(self, param_name): + """ID for an operating condition parameter in CalCat.""" + + resp_parameter = Parameter.get_by_name(self.client, param_name) + + if not resp_parameter['success']: + raise CalCatError(resp_parameter) + + return resp_parameter['data']['id'] + + def closest_ccv_by_time_by_condition( + self, detector_name, calibrations, condition, + modules=None, event_at=None, snapshot_at=None, metadata=None, + ): + """Query bulk CCV metadata from CalCat. + + This method uses the /get_closest_version_by_detector API + to query matching CCVs for PDUs connected to a detector instance + in one go. In particular, it automatically includes the PDU as + an operating condition parameter to allow for a single global + condition rather than PDU-specific ones. + + Args: + detector_name (str): Detector instance name. + calibrations (Iterable of str): Calibrations to query + metadata for. + condition (dict): Mapping of parameter name to value. + modules (Collection of int or None): List of module numbers + or None for all (default). + event_at (datetime, date, str or None): Time at which the + CCVs should have been valid or None for now (default). + snapshot_at (datetime, date, str or None): Time of database + state to look at or None for now (default). + metadata (dict or None): Mapping to fill for results or + None for a new dictionary (default). + + Returns: + (dict) Nested mapping of module number to calibrations to + CCV metadata. Identical to passed metadata argument if + passed. + """ + event_at = self.format_time(event_at) + snapshot_at = self.format_time(snapshot_at) + + # Map aggregator to module number. + # da_to_modno = { + # data['karabo_da']: modno for modno, data in + # self.physical_detector_units( + # self.detector(detector_name)['id'], snapshot_at).items() + # if not modules or modno in modules} + + if metadata is None: + metadata = CCVMetadata() + + if not calibrations: + # Make sure there are at least empty dictionaries for each + # module. + for mod in modules.values(): + metadata.setdefault(mod, dict()) + return metadata + + # Map calibration ID to calibratio name. + cal_id_map = {self.calibration_id(calibration): calibration + for calibration in calibrations} + calibration_ids = list(cal_id_map.keys()) + + # The API call supports a single module or all modules, as the + # performance increase is only minor in between. Hence, all + # modules are queried if more than one is selected and filtered + # afterwards, if necessary. + karabo_da = next(iter(modules)) if modules is not None and len(modules) == 1 else '', # noqa + resp_versions = CalibrationConstantVersion.get_closest_by_time_by_detector_conditions( # noqa + self.client, detector_name, calibration_ids, + self.format_cond(condition), + karabo_da=karabo_da, + event_at=event_at, snapshot_at=snapshot_at) + + if not resp_versions['success']: + raise CalCatError(resp_versions) + + for ccv in resp_versions['data']: + try: + mod = ccv['physical_detector_unit']['karabo_da'] + except KeyError: + # Not included in our modules + continue + + cc = ccv['calibration_constant'] + metadata.setdefault(mod, dict())[ + cal_id_map[cc['calibration_id']]] = dict( + cc_id=cc['id'], + cc_name=cc['name'], + condition_id=cc['condition_id'], + ccv_id=ccv['id'], + ccv_name=ccv['name'], + path=Path(ccv['path_to_file']) / ccv['file_name'], + dataset=ccv['data_set_name'], + begin_validity_at=ccv['begin_validity_at'], + end_validity_at=ccv['end_validity_at'], + raw_data_location=ccv['raw_data_location'], + start_idx=ccv['start_idx'], + end_idx=ccv['end_idx'], + physical_name=ccv['physical_detector_unit']['physical_name'], + ) + + return metadata + + +class CalibrationData: + """Calibration constants data for detectors. + + European XFEL uses a web app and database to store records about the + characterization of detectors and the data necessary to their + correction and analysis, collectively called CalCat. The default + installation is available at https://in.xfel.eu/calibration. + + A detector is identified by a name (e.g. SPB_DET_AGIPD1M-1) and + consists of one or more detector modules. The modules are a virtual + concept and may be identified by their number (e.g. 3), the Karabo + data aggregator in EuXFEL's DAQ system they're connected to + (e.g. AGIPD05) or a virtual device name describing their relative + location (e.g. Q3M2). + + A detector module is mapped to an actual physical detector unit + (PDU), which may be changed in case of a physical replacement. When + characterization data is inserted into the database, it is attached + to the PDU currently mapped to a module and not the virtual module + itself. + + Characterization data is organized by its type just called + calibration (e.g. Offset or SlopesFF) and the operating condition it + was taken in, which is a mapping of parameter keys to their values + (e.g. Sensor bias voltage or integration time). Any unique + combination of calibration (type) and operating condition is a + calibration constant (CC). Any individual measurement of a CC is + called a calibration constant version (CCV). There may be many CCVs + for any given CC. + + Note that while an authenticated connection to CalCat is possible + from anywhere, the actual calibration data referred to is only + available on the European XFEL computing infrastructure. If no + explicit credentials are supplied, an anonymous read-only connection + is established that is also only available from there. + """ + + calibrations = set() + default_client = None + + def __init__(self, detector_name, modules=None, client=None, event_at=None, + snapshot_at=None): + """Initialize a new CalibrationData object. + + If no calibration-client object is passed or has been created + using Calibration.new_client, an anonymous read-only connection + is established automatically. + + Args: + detector_name (str): Name of detector in CalCat. + modules (Iterable of int, optional): Module numbers to + query for or None for all available (default). + client (CalibrationClient, optional): Client for CalCat + communication, global one by default. + event_at (datetime, date, str or None): Default time at which the + CCVs should have been valid, now if omitted + snapshot_at (datetime, date, str or None): Default time of + database state to look at, now if omitted. + **condition_params: Operating condition parameters defined + on an instance level. + """ + + self.detector_name = detector_name + self.modules = modules + self.event_at = event_at + self.snapshot_at = snapshot_at + + if client is None: + client = self.__class__.default_client or \ + self.__class__.new_anonymous_client() + + self._api = CalCatApi(client) + + @staticmethod + def new_anonymous_client(): + """Create an anonymous calibration-client object. + + This connection allows read-only access to CalCat using a + facility-proveded OAuth reverse proxy. This is only accessible + on the European XFEL computing infrastructure. + """ + + print('Access to CalCat via the XFEL OAuth proxy is currently ' + 'considered in testing, please report any issues to ' + 'da-support@xfel.eu') + return CalibrationData.new_client( + None, None, None, use_oauth2=False, + base_url='http://exflcalproxy:8080/') + + @staticmethod + def new_client( + client_id, client_secret, user_email, installation='', + base_url='https://in.xfel.eu/{}calibration', **kwargs, + ): + """Create a new calibration-client object. + + The client object is saved as a class property and is + automatically to any future CalibrationData objects created, if + no other client is passed explicitly. + + Arguments: + client_id (str): Client ID. + client_secret (str): Client secret. + user_email (str): LDAP user email. + installation (str, optional): Prefix for CalCat + installation, production system by default. + base_url (str, optional): URL template for CalCat + installation, public European XFEL by default. + Any further keyword arguments are passed on to + CalibrationClient.__init__(). + + Returns: + (CalibrationClient) CalCat client. + """ + + base_url = base_url.format(f'{installation}_' if installation else '') + + # Note this is not a classmethod and we're modifying + # CalibrationData directly to use the same object across all + # detector-specific implementations. + CalibrationData.default_client = CalibrationClient( + client_id=client_id, + client_secret=client_secret, + user_email=user_email, + base_api_url=f'{base_url}/api/', + token_url=f'{base_url}/oauth/token', + refresh_url=f'{base_url}/oauth/token', + auth_url=f'{base_url}/oauth/authorize', + scope='', + **kwargs + ) + return CalibrationData.default_client + + @property + def caldb_root(self): + """Root directory for calibration constant data. + + Returns: + (Path or None) Location of caldb store or + None if not available. + """ + + if not hasattr(CalibrationData, '_caldb_root'): + if getenv('SASE'): + # ONC + CalibrationData._caldb_root = Path('/common/cal/caldb_store') + elif re.match(r'^max-(.+)\.desy\.de$', socket.getfqdn()): + # Maxwell + CalibrationData._caldb_root = Path( + '/gpfs/exfel/d/cal/caldb_store') + else: + # Probably unavailable + CalibrationData._caldb_root = None + + return CalibrationData._caldb_root + + @property + def client(self): + return self._api.client + + @property + def detector(self): + return self._api.detector(self.detector_name) + + @property + def physical_detector_units(self): + return self._api.physical_detector_units( + self.detector['id'], self.snapshot_at) + + @property + def condition(self): + return self._build_condition(self.parameters) + + def replace(self, **new_kwargs): + """Create a new CalibrationData object with altered values.""" + + keys = { + 'detector_name', 'modules', 'client', 'event_at', 'snapshot_at' + } | { + self._simplify_parameter_name(name)for name in self.parameters + } + + kwargs = {key: getattr(self, key) for key in keys} + kwargs.update(new_kwargs) + + return self.__class__(**kwargs) + + def metadata( + self, calibrations=None, event_at=None, snapshot_at=None, + raise_error=True, + ): + """Query CCV metadata for calibrations, conditions and time. + + Args: + calibrations (Iterable of str, optional): Calibrations to + query metadata for, may be None to retrieve all. + event_at (datetime, date, str or None): Time at which the + CCVs should have been valid, now or default value passed at + initialization time if omitted. + snapshot_at (datetime, date, str or None): Time of database + state to look at, now or default value passed at + initialization time if omitted. + + Returns: + (CCVMetadata) CCV metadata result. + """ + + metadata = CCVMetadata() + try: + self._api.closest_ccv_by_time_by_condition( + self.detector_name, calibrations or self.calibrations, + self.condition, self.modules, + event_at or self.event_at, snapshot_at or self.snapshot_at, + metadata) + except CalCatError as e: + if not raise_error: + warning(str(e)) + else: + raise + return metadata + + def ndarray( + self, module, calibration, metadata=None, raise_error=True, empty_array=None, + ): + """Load CCV data as ndarray. + + Args: + module (int): Module number + calibration (str): Calibration constant. + metadata (CCVMetadata, optional): CCV metadata to load + constant data for, may be None to query metadata. + + Returns: + (ndarray): CCV data + """ + if self.caldb_root is None: + raise RuntimeError('calibration database store unavailable') + + if self.modules and module not in self.modules: + raise ValueError('module not part of this calibration data') + + if metadata is None: + metadata = self.metadata([calibration], raise_error=raise_error) + if metadata: # In case a constant was not found, empty dict. + return self._load_ccv_data(metadata, module, calibration) + else: + return empty_array + + def _load_ccv_data(self, metadata, module, calibration): + row = metadata[module][calibration] + + with h5py.File(self.caldb_root / row['path'], 'r') as f: + return f[row['dataset'] + '/data'][()] + + def ndarray_map( + self, calibrations=None, metadata=None, processes=None, + ): + """Load all CCV data in a nested map of ndarrays. + + Args: + calibrations (Iterable of str, optional): Calibration constants + or None for all available (default). + metadata (CCVMetadata, optional): CCV metadata to load constant + for or None to query metadata automatically (default). + processes (Int): + Returns: + (dict of dict of ndarray): CCV data by module number and + calibration constant name. + {module: {calibration: ndarray}} + """ + from functools import partial + if self.caldb_root is None: + raise RuntimeError('calibration database store unavailable') + + if metadata is None: + metadata = self.metadata(calibrations) + + modno_cname = [ + (modno, cname) for modno, md in metadata.items() for cname in md.keys()] + + load_ccv_data = partial(self._load_ccv_data, metadata) + with multiprocessing.pool.ThreadPool() as pool: + r = pool.starmap(load_ccv_data, modno_cname) + + arr_map = {} + for (mod, cname), val in zip(modno_cname, r): + arr_map.setdefault(mod, {})[cname] = val + + return arr_map + + def _build_condition(self, parameters): + cond = dict() + + for db_name in parameters: + value = getattr(self, self._simplify_parameter_name(db_name), None) + + if value is not None: + cond[db_name] = value + + return cond + + @classmethod + def _from_multimod_detector_data( + cls, component_cls, data, detector, + modules, client, + ): + if isinstance(detector, component_cls): + detector_name = detector.detector_name + elif detector is None: + detector_name = component_cls._find_detector_name(data) + elif isinstance(detector, str): + detector_name = detector + else: + raise ValueError(f'detector may be an object of type ' + f'{type(cls)}, a string or None') + + source_to_modno = dict(component_cls._source_matches( + data, detector_name)) + detector_sources = [data[source] for source in source_to_modno.keys()] + + if modules is None: + modules = sorted(source_to_modno.values()) + + creation_date = cls._determine_data_creation_date(data) + + # Create new CalibrationData object. + caldata = cls( + detector_name, modules, client, + creation_date, creation_date, + ) + + caldata.memory_cells = component_cls._get_memory_cell_count( + detector_sources[0]) + caldata.pixels_x = component_cls.module_shape[1] + caldata.pixels_y = component_cls.module_shape[0] + + return caldata, detector_sources + + @staticmethod + def _simplify_parameter_name(name): + """Convert parameter names to valid Python symbols.""" + + return name.lower().replace(' ', '_') + + @staticmethod + def _determine_data_creation_date(data): + """Determine data creation date.""" + + assert data.files, 'data contains no files' + + try: + creation_date = data.files[0].metadata()['creationDate'] + except KeyError: + from warnings import warn + warn('Last file modification time used as creation date for old ' + 'DAQ file format may be unreliable') + + return datetime.fromtimestamp( + Path(data.files[0].filename).lstat().st_mtime) + else: + if not data.is_single_run: + from warnings import warn + warn('Sample file used to determine creation date for multi ' + 'run data') + + return creation_date + + +class SplitConditionCalibrationData(CalibrationData): + """Calibration data with dark and illuminated conditions. + + Some detectors of this kind distinguish between two different + operating conditions depending on whether photons illuminate the + detector or not, correspondingly called the illuminated and dark + conditions. Typically the illuminated condition is a superset of the + dark condition. + + Not all implementations for semiconductor detectors inherit from + this type, but only those that make this distinction such as AGIPD + and LPD. + """ + + dark_calibrations = set() + illuminated_calibrations = set() + dark_parameters = list() + illuminated_parameters = list() + + @property + def calibrations(self): + """Compatibility with CalibrationData.""" + + return self.dark_calibrations | self.illuminated_calibrations + + @property + def parameters(self): + """Compatibility with CalibrationData.""" + + # Removes likely duplicates while preserving order. + return list(dict.fromkeys( + self.dark_parameters + self.illuminated_parameters)) + + @property + def condition(self): + """Compatibility with CalibrationData.""" + + cond = dict() + cond.update(self.dark_condition) + cond.update(self.illuminated_condition) + + return cond + + @property + def dark_condition(self): + return self._build_condition(self.dark_parameters) + + @property + def illuminated_condition(self): + return self._build_condition(self.illuminated_parameters) + + def metadata( + self, calibrations=None, event_at=None, snapshot_at=None, raise_error=True, + ): + """Query CCV metadata for calibrations, conditions and time. + + Args: + calibrations (Iterable of str, optional): Calibrations to + query metadata for, may be None to retrieve all. + event_at (datetime, date, str or None): Time at which the + CCVs should have been valid, now or default value passed at + initialization time if omitted. + snapshot_at (datetime, date, str or None): Time of database + state to look at, now or default value passed at + initialization time if omitted. + + Returns: + (CCVMetadata) CCV metadata result. + """ + + if calibrations is None: + calibrations = ( + self.dark_calibrations | self.illuminated_calibrations) + + metadata = CCVMetadata() + + dark_calibrations = self.dark_calibrations & set(calibrations) + try: + if dark_calibrations: + self._api.closest_ccv_by_time_by_condition( + self.detector_name, dark_calibrations, + self.dark_condition, self.modules, + event_at or self.event_at, snapshot_at or self.snapshot_at, + metadata) + + illum_calibrations = self.illuminated_calibrations & set(calibrations) + if illum_calibrations: + self._api.closest_ccv_by_time_by_condition( + self.detector_name, illum_calibrations, + self.illuminated_condition, self.modules, + event_at or self.event_at, snapshot_at or self.snapshot_at, + metadata) + except CalCatError as e: + print(e) + if not raise_error: + warning(str(e)) + else: + raise + return metadata + + +class AGIPD_CalibrationData(SplitConditionCalibrationData): + """Calibration data for the AGIPD detector.""" + + dark_calibrations = { + 'Offset', + 'Noise', + 'ThresholdsDark', + 'BadPixelsDark', + 'BadPixelsPC', + 'SlopesPC', + } + illuminated_calibrations = { + 'BadPixelsFF', + 'SlopesFF', + } + + dark_parameters = [ + 'Sensor Bias Voltage', + 'Pixels X', + 'Pixels Y', + 'Memory cells', + 'Acquisition rate', + 'Gain setting', + 'Gain mode', + 'Integration time', + ] + illuminated_parameters = dark_parameters + ['Source energy'] + + def __init__( + self, detector_name, sensor_bias_voltage, + memory_cells, acquisition_rate, + modules=None, client=None, + event_at=None, snapshot_at=None, + gain_setting=None, gain_mode=None, + integration_time=12, source_energy=9.2, + pixels_x=512, pixels_y=128, + ): + super().__init__( + detector_name, modules, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.memory_cells = memory_cells + self.pixels_x = pixels_x + self.pixels_y = pixels_y + self.acquisition_rate = acquisition_rate + self.gain_setting = gain_setting + self.gain_mode = gain_mode + self.integration_time = integration_time + self.source_energy = source_energy + + def _build_condition(self, parameters): + cond = super()._build_condition(parameters) + + # Fix-up some database quirks. + if int(cond.get('Gain mode', -1)) == 0: + del cond['Gain mode'] + + if int(cond.get('Integration time', -1)) == 12: + del cond['Integration time'] + + return cond + + +class LPD_CalibrationData(SplitConditionCalibrationData): + """Calibration data for the LPD detector.""" + + dark_calibrations = { + 'Offset', + 'Noise', + 'BadPixelsDark', + } + illuminated_calibrations = { + 'RelativeGain', + 'GainAmpMap', + 'FFMap', + 'BadPixelsFF', + } + + dark_parameters = [ + 'Sensor Bias Voltage', + 'Memory cells', + 'Pixels X', + 'Pixels Y', + 'Feedback capacitor', + ] + illuminated_parameters = dark_parameters + ['Source Energy', 'category'] + + def __init__( + self, detector_name, sensor_bias_voltage, + memory_cells, feedback_capacitor=5.0, + pixels_x=256, pixels_y=256, + source_energy=9.2, category=1, + modules=None, client=None, + event_at=None, snapshot_at=None, + ): + super().__init__( + detector_name, modules, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.memory_cells = memory_cells + self.pixels_x = pixels_x + self.pixels_y = pixels_y + self.feedback_capacitor = feedback_capacitor + self.source_energy = source_energy + self.category = category + + +class DSSC_CalibrationData(CalibrationData): + """Calibration data for the DSSC detetor.""" + + calibrations = { + 'Offset', + 'Noise', + } + parameters = [ + 'Sensor Bias Voltage', + 'Memory cells', + 'Pixels X', + 'Pixels Y', + 'Pulse id checksum', + 'Acquisition rate', + 'Target gain', + 'Encoded gain', + ] + + def __init__( + self, detector_name, + sensor_bias_voltage, memory_cells, + pulse_id_checksum=None, acquisition_rate=None, + target_gain=None, encoded_gain=None, + pixels_x=512, pixels_y=128, + modules=None, client=None, + event_at=None, snapshot_at=None, + ): + super().__init__( + detector_name, modules, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.memory_cells = memory_cells + self.pixels_x = pixels_x + self.pixels_y = pixels_y + self.pulse_id_checksum = pulse_id_checksum + self.acquisition_rate = acquisition_rate + self.target_gain = target_gain + self.encoded_gain = encoded_gain + + +class JUNGFRAU_CalibrationData(CalibrationData): + """Calibration data for the JUNGFRAU detector.""" + + calibrations = { + 'Offset10Hz', + 'Noise10Hz', + 'BadPixelsDark10Hz', + 'RelativeGain10Hz', + 'BadPixelsFF10Hz', + } + parameters = [ + 'Sensor Bias Voltage', + 'Memory Cells', + 'Pixels X', + 'Pixels Y', + 'Integration Time', + 'Sensor temperature', + 'Gain Setting', + ] + + def __init__( + self, detector_name, sensor_bias_voltage, + memory_cells, integration_time, + gain_setting, + gain_mode=None, sensor_temperature=291, + pixels_x=1024, pixels_y=512, + modules=None, client=None, + event_at=None, snapshot_at=None, + ): + super().__init__( + detector_name, modules, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.memory_cells = memory_cells + self.pixels_x = pixels_x + self.pixels_y = pixels_y + self.integration_time = integration_time + self.sensor_temperature = sensor_temperature + self.gain_setting = gain_setting + self.gain_mode = None if gain_mode == 0 else 1 + + +class PNCCD_CalibrationData(CalibrationData): + calibrations = { + 'OffsetCCD', + 'BadPixelsDarkCCD', + 'NoiseCCD', + 'RelativeGainCCD', + 'CTECCD', + } + parameters = [ + 'Sensor Bias Voltage', + 'Memory cells', + 'Pixels X', + 'Pixels Y', + 'Integration Time', + 'Sensor Temperature', + 'Gain Setting', + ] + + def __init__( + self, detector_name, sensor_bias_voltage, + integration_time, sensor_temperature, + gain_setting, pixels_x=1024, + pixels_y=1024, client=None, + event_at=None, snapshot_at=None, + ): + # Ignore modules for this detector. + super().__init__( + detector_name, None, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.memory_cells = 1 # Ignore memory_cells for this detector + self.pixels_x = pixels_x + self.pixels_y = pixels_y + self.integration_time = integration_time + self.sensor_temperature = sensor_temperature + self.gain_setting = gain_setting + + +class EPIX100_CalibrationData(SplitConditionCalibrationData): + dark_calibrations = { + 'OffsetEPix100', + 'NoiseEPix100', + 'BadPixelsDarkEPix100', + } + illuminated_calibrations = { + 'RelativeGainEPix100', + #'BadPixelsFFEPix100', + } + dark_parameters = [ + 'Sensor Bias Voltage', + 'Memory cells', + 'Pixels X', + 'Pixels Y', + 'Integration time', + 'Sensor temperature', + 'In vacuum', + ] + illuminated_parameters = dark_parameters + ['Source energy'] + + def __init__( + self, detector_name, + sensor_bias_voltage, integration_time, + in_vacuum=0, sensor_temperature=288, + pixels_x=708, pixels_y=768, + source_energy=9.2, client=None, + event_at=None, snapshot_at=None, + ): + # Ignore modules for this detector. + super().__init__( + detector_name, None, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.integration_time = integration_time + self.memory_cells = 1 # Ignore memory_cells for this detector + self.pixels_x = pixels_x + self.pixels_y = pixels_y + self.in_vacuum = in_vacuum + self.sensor_temperature = sensor_temperature + self.source_energy = source_energy + + +class GOTTHARD2_CalibrationData(CalibrationData): + calibrations = { + 'LUTGotthard2' + 'OffsetGotthard2', + 'NoiseGotthard2', + 'BadPixelsDarkGotthard2', + 'RelativeGainGotthard2', + 'BadPixelsFFGotthard2', + } + parameters = [ + 'Sensor Bias Voltage', + 'Memory cells', + 'Pixels X', + 'Pixels Y', + 'Integration time', + 'Sensor temperature', + 'Gain setting', + ] + + def __init__( + self, detector_name, + sensor_bias_voltage, exposure_time, + exposure_period, acquisition_rate, + single_photon, client=None, + event_at=None, snapshot_at=None, + ): + # Ignore modules for this detector. + super().__init__( + detector_name, None, client, event_at, snapshot_at, + ) + + self.sensor_bias_voltage = sensor_bias_voltage + self.exposure_time = exposure_time + self.exposure_period = exposure_period + self.acquisition_rate = acquisition_rate + self.single_photon = single_photon -- GitLab