{ "cells": [ { "cell_type": "markdown", "id": "49b6577f-96a5-4dd2-bdd9-da661b2c4619", "metadata": {}, "source": [ "# Gotthard2 Dark Image Characterization #\n", "\n", "Author: European XFEL Detector Group, Version: 1.0\n", "\n", "The following is a processing for the dark constants (`Offset`, `Noise`, and `BadPixelsDark`) maps using dark images taken with Gotthard2 detector (GH2 50um or 25um).\n", "All constants are evaluated per strip, per pulse, and per memory cell. The maps are calculated for each gain stage that is acquired in 3 separate runs.\n", "\n", "The three maps are of shape (stripes, cells, gains): (1280, 2, 3). They can be injected to the database (`db_output`) and/or stored locally (`local_output`)." ] }, { "cell_type": "code", "execution_count": null, "id": "818e24e8", "metadata": {}, "outputs": [], "source": [ "in_folder = \"/gpfs/exfel/exp/FXE/202231/p900298/raw\" # the folder to read data from, required\n", "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/gotthard2/darks\" # the folder to output to, required\n", "metadata_folder = '' # Directory containing calibration_metadata.yml when run by xfel-calibrate\n", "run_high = 7 # run number for G0 dark run, required\n", "run_med = 8 # run number for G1 dark run, required\n", "run_low = 9 # run number for G2 dark run, required\n", "\n", "# Parameters used to access raw data.\n", "karabo_id = \"FXE_XAD_G2XES\" # karabo prefix of Gotthard-II devices\n", "karabo_da = [\"GH201\"] # data aggregators\n", "receiver_template = \"RECEIVER\" # receiver template used to read INSTRUMENT keys.\n", "control_template = \"CONTROL\" # control template used to read CONTROL keys.\n", "ctrl_source_template = '{}/DET/{}' # template for control source name (filled with karabo_id_control)\n", "karabo_id_control = \"\" # Control karabo ID. Set to empty string to use the karabo-id\n", "\n", "# Parameters for the calibration database.\n", "cal_db_interface = \"tcp://max-exfl-cal001:8020\" # calibration DB interface to use\n", "cal_db_timeout = 300000 # timeout on caldb requests\n", "creation_time = \"\" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC e.g. \"2022-06-28 13:00:00\"\n", "db_output = False # Output constants to the calibration database\n", "local_output = True # Output constants locally\n", "\n", "# Conditions used for injected calibration constants.\n", "bias_voltage = -1 # Detector bias voltage, set to -1 to use value in raw file.\n", "exposure_time = -1. # Detector exposure time, set to -1 to use value in raw file.\n", "exposure_period = -1. # Detector exposure period, set to -1 to use value in raw file.\n", "acquisition_rate = -1. # Detector acquisition rate (1.1/4.5), set to -1 to use value in raw file.\n", "single_photon = -1 # Detector single photon mode (High/Low CDS), set to -1 to use value in raw file.\n", "\n", "# Parameters used during selecting raw data trains.\n", "min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.\n", "max_trains = 1000 # Maximum number of trains to use for processing dark constants. Set to 0 to use all available trains.\n", "badpixel_threshold_sigma = 5. # bad pixels defined by values outside n times this std from median\n", "\n", "# Don't delete! myMDC sends this by default.\n", "operation_mode = '' # Detector dark run acquiring operation mode, optional" ] }, { "cell_type": "code", "execution_count": null, "id": "8085f9aa", "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import pasha as psh\n", "from IPython.display import Markdown, display\n", "from extra_data import RunDirectory\n", "from pathlib import Path\n", "\n", "import yaml\n", "from cal_tools.calcat_interface import CalCatApi\n", "from cal_tools.enums import BadPixels\n", "from cal_tools.gotthard2 import gotthard2algs, gotthard2lib\n", "from cal_tools.step_timing import StepTimer\n", "from cal_tools.restful_config import calibration_client\n", "from cal_tools.tools import (\n", " calcat_creation_time,\n", " get_constant_from_db_and_time,\n", " get_report,\n", " save_const_to_h5,\n", " send_to_db,\n", ")\n", "\n", "from iCalibrationDB import Conditions, Constants\n", "\n", "%matplotlib inline" ] }, { "cell_type": "code", "execution_count": null, "id": "18fe4379", "metadata": {}, "outputs": [], "source": [ "run_nums = [run_high, run_med, run_low]\n", "in_folder = Path(in_folder)\n", "out_folder = Path(out_folder)\n", "out_folder.mkdir(parents=True, exist_ok=True)\n", "\n", "ctrl_src = ctrl_source_template.format(karabo_id_control, control_template)\n", "\n", "# Read report path to associate it later with injected constants.\n", "report = get_report(metadata_folder)\n", "\n", "# Run's creation time:\n", "creation_time = calcat_creation_time(in_folder, run_high, creation_time)\n", "print(f\"Creation time: {creation_time}\")\n", "\n", "if not karabo_id_control:\n", " karabo_id_control = karabo_id" ] }, { "cell_type": "code", "execution_count": null, "id": "c176a86f", "metadata": {}, "outputs": [], "source": [ "run_dc = RunDirectory(in_folder / f\"r{run_high:04d}\")\n", "file_loc = f\"proposal:{run_dc.run_metadata()['proposalNumber']} runs:{run_high} {run_med} {run_low}\" # noqa\n", "\n", "receivers = sorted(list(run_dc.select(f'{karabo_id}/DET/{receiver_template}*').all_sources))" ] }, { "cell_type": "code", "execution_count": null, "id": "108be688", "metadata": {}, "outputs": [], "source": [ "step_timer = StepTimer()" ] }, { "cell_type": "code", "execution_count": null, "id": "ff9149fc", "metadata": {}, "outputs": [], "source": [ "# Read parameter conditions and validate the values for the three runs.\n", "\n", "step_timer.start()\n", "run_dcs_dict = dict()\n", "\n", "ctrl_src = ctrl_source_template.format(karabo_id_control, control_template)\n", "conditions = {\n", " \"bias_voltage\": set(),\n", " \"exposure_time\": set(),\n", " \"exposure_period\": set(),\n", " \"acquisition_rate\": set(),\n", " \"single_photon\": set(),\n", "}\n", "for gain, run in enumerate(run_nums):\n", " run_dc = RunDirectory(in_folder / f\"r{run:04d}/\")\n", " run_dcs_dict[run] = [gain, run_dc]\n", "\n", " # Read slow data.\n", " g2ctrl = gotthard2lib.Gotthard2Ctrl(run_dc=run_dc, ctrl_src=ctrl_src)\n", " conditions[\"bias_voltage\"].add(\n", " g2ctrl.get_bias_voltage() if bias_voltage == -1 else bias_voltage\n", " )\n", " conditions[\"exposure_time\"].add(\n", " g2ctrl.get_exposure_time() if exposure_time == -1 else exposure_time\n", " )\n", " conditions[\"exposure_period\"].add(\n", " g2ctrl.get_exposure_period() if exposure_period == -1 else exposure_period\n", " )\n", " conditions[\"single_photon\"].add(\n", " g2ctrl.get_single_photon() if single_photon == -1 else single_photon\n", " )\n", " conditions[\"acquisition_rate\"].add(\n", " g2ctrl.get_acquisition_rate() if acquisition_rate == -1 else acquisition_rate\n", " )\n", "\n", "for c, v in conditions.items():\n", " assert len(v) == 1, f\"{c} value is not the same for the three runs.\"\n", "\n", "bias_voltage = conditions[\"bias_voltage\"].pop()\n", "print(\"Bias voltage: \", bias_voltage)\n", "exposure_time = conditions[\"exposure_time\"].pop()\n", "print(\"Exposure time: \", exposure_time)\n", "exposure_period = conditions[\"exposure_period\"].pop()\n", "print(\"Exposure period: \", exposure_period)\n", "single_photon = conditions[\"single_photon\"].pop()\n", "print(\"Single photon: \", single_photon)\n", "acquisition_rate = conditions[\"acquisition_rate\"].pop()\n", "print(\"Acquisition rate: \", acquisition_rate)\n", "\n", "gh2_detector = g2ctrl.get_det_type()\n", "print(f\"Processing {gh2_detector} Gotthard2.\")\n" ] }, { "cell_type": "code", "execution_count": null, "id": "f64bc150-cfcd-4f98-83f9-a982fdacedd7", "metadata": {}, "outputs": [], "source": [ "calcat = CalCatApi(client=calibration_client())\n", "detector_id = calcat.detector(karabo_id)['id']\n", "pdus_by_da = calcat.physical_detector_units(detector_id, pdu_snapshot_at=creation_time)\n", "da_to_pdu = {da: p['physical_name'] for (da, p) in pdus_by_da.items()}\n", "\n", "if karabo_da != [\"\"]:\n", " # Filter DA connected to detector in CALCAT\n", " karabo_da = [da for da in karabo_da if da in da_to_pdu]\n", " # Exclude non selected DA from processing.\n", " da_to_pdu = {da: da_to_pdu[da] for da in karabo_da}\n", "else:\n", " karabo_da = sorted(da_to_pdu.keys())\n", "\n", "print(f\"Processing {karabo_da}\")" ] }, { "cell_type": "code", "execution_count": null, "id": "ac9c5dc3-bc66-4e7e-b6a1-360259be535c", "metadata": {}, "outputs": [], "source": [ "def specify_trains_to_process(\n", " img_key_data: \"extra_data.KeyData\",\n", "):\n", " \"\"\"Specify total number of trains to process.\n", " Based on given min_trains and max_trains, if given.\n", "\n", " Print number of trains to process and number of empty trains.\n", " Raise ValueError if specified trains are less than min_trains.\n", " \"\"\"\n", " # Specifies total number of trains to process.\n", " n_trains = img_key_data.shape[0]\n", " all_trains = len(img_key_data.train_ids)\n", " print(\n", " f\"{receiver} has {all_trains - n_trains} \"\n", " f\"trains with empty frames out of {all_trains} trains\"\n", " )\n", "\n", " if n_trains < min_trains:\n", " raise ValueError(\n", " f\"Less than {min_trains} trains are available in RAW data.\"\n", " \" Not enough data to process darks.\"\n", " )\n", "\n", " if max_trains > 0:\n", " n_trains = min(n_trains, max_trains)\n", "\n", " print(f\"Processing {n_trains} trains.\")\n", "\n", " return n_trains" ] }, { "cell_type": "code", "execution_count": null, "id": "3c59c11d", "metadata": {}, "outputs": [], "source": [ "# set the operating condition\n", "condition = Conditions.Dark.Gotthard2(\n", " bias_voltage=bias_voltage,\n", " exposure_time=exposure_time,\n", " exposure_period=exposure_period,\n", " acquisition_rate=acquisition_rate,\n", " single_photon=single_photon,\n", ")" ] }, { "cell_type": "code", "execution_count": null, "id": "e2eb2fc0-df9c-4887-9691-f81474f8c131", "metadata": {}, "outputs": [], "source": [ "def convert_train(wid, index, tid, d):\n", " \"\"\"Convert a Gotthard2 train from 12bit to 10bit.\"\"\"\n", " gotthard2algs.convert_to_10bit(\n", " d[receiver][\"data.adc\"], lut, data_10bit[index, ...]\n", " )" ] }, { "cell_type": "code", "execution_count": null, "id": "4e8ffeae", "metadata": {}, "outputs": [], "source": [ "# Calculate noise and offset per pixel and global average, std and median\n", "noise_map = dict()\n", "offset_map = dict()\n", "badpixels_map = dict()\n", "context = psh.ProcessContext(num_workers=25)\n", "\n", "empty_lut = (np.arange(2 ** 12).astype(np.float64) * 2 ** 10 / 2 ** 12).astype(\n", " np.uint16\n", ")\n", "empty_lut = np.stack(1280 * [np.stack([empty_lut] * 2)], axis=0)\n", "for mod, receiver in zip(karabo_da, receivers):\n", "\n", " # Retrieve LUT constant\n", " lut, time = get_constant_from_db_and_time(\n", " constant=Constants.Gotthard2.LUT(),\n", " condition=condition,\n", " empty_constant=empty_lut,\n", " karabo_id=karabo_id,\n", " karabo_da=mod,\n", " cal_db_interface=cal_db_interface,\n", " creation_time=creation_time,\n", " timeout=cal_db_timeout,\n", " print_once=False,\n", " )\n", " print(f\"Retrieved LUT constant with creation-time {time}\")\n", "\n", " cshape = (1280, 2, 3)\n", "\n", " offset_map[mod] = context.alloc(shape=cshape, dtype=np.float32)\n", " noise_map[mod] = context.alloc(like=offset_map[mod])\n", " badpixels_map[mod] = context.alloc(like=offset_map[mod], dtype=np.uint32)\n", "\n", " for run_num, [gain, run_dc] in run_dcs_dict.items():\n", " step_timer.start()\n", " n_trains = specify_trains_to_process(run_dc[receiver, \"data.adc\"])\n", "\n", " # Select requested number of trains to process.\n", " dc = run_dc.select(receiver, require_all=True).select_trains(\n", " np.s_[:n_trains]\n", " )\n", "\n", " step_timer.done_step(\"preparing raw data\")\n", "\n", " step_timer.start()\n", " # Convert 12bit data to 10bit\n", " data_10bit = context.alloc(\n", " shape=dc[receiver, \"data.adc\"].shape, dtype=np.float32\n", " )\n", " context.map(convert_train, dc)\n", " step_timer.done_step(\"convert to 10bit\")\n", "\n", " step_timer.start()\n", "\n", " # The first ~20 frames of each train are excluded.\n", " # The electronics needs some time to reach stable operational conditions\n", " # at the beginning of each acquisition cycle,\n", " # hence the first ~20 images have lower quality and should not be used.\n", " # The rough number of 20 is correct at 4.5 MHz acquisition rate,\n", " # 5 should be enough at 1.1 MHz. Sticking with 20 excluded frames, as there isn't\n", " # much of expected difference.\n", "\n", " # Split even and odd data to calculate the two storage cell constants.\n", " # Detector always operates in burst mode.\n", " even_data = data_10bit[:, 20::2, :]\n", " odd_data = data_10bit[:, 21::2, :]\n", "\n", " def offset_noise_cell(wid, index, d):\n", " offset_map[mod][:, index, gain] = np.mean(d, axis=(0, 1))\n", " noise_map[mod][:, index, gain] = np.std(d, axis=(0, 1))\n", "\n", " # Calculate Offset and Noise Maps.\n", " context.map(offset_noise_cell, (even_data, odd_data))\n", "\n", " # Split even and odd gain data.\n", " data_gain = dc[receiver, \"data.gain\"].ndarray()\n", " even_gain = data_gain[:, 20::2, :]\n", " odd_gain = data_gain[:, 21::2, :]\n", " raw_g = 3 if gain == 2 else gain\n", "\n", " def badpixels_cell(wid, index, g):\n", " \"\"\"Check if there are wrong bad gain values.\n", " Indicate pixels with wrong gain value across all trains for each cell.\"\"\"\n", " badpixels_map[mod][\n", " np.mean(g, axis=(0, 1)) != raw_g, index, gain\n", " ] |= BadPixels.WRONG_GAIN_VALUE.value\n", "\n", " context.map(badpixels_cell, (even_gain, odd_gain))\n", "\n", " step_timer.done_step(\"Processing darks\")" ] }, { "cell_type": "code", "execution_count": null, "id": "8e410ca2", "metadata": {}, "outputs": [], "source": [ "print(f\"Total processing time {step_timer.timespan():.01f} s\")\n", "step_timer.print_summary()" ] }, { "cell_type": "code", "execution_count": null, "id": "3fc17e05-17ab-4ac4-9e79-c95399278bb9", "metadata": {}, "outputs": [], "source": [ "def print_bp_entry(bp):\n", " print(f\"{bp.name:<30s} {bp.value:032b} -> {int(bp.value)}\")\n", "\n", "\n", "print_bp_entry(BadPixels.NOISE_OUT_OF_THRESHOLD)\n", "print_bp_entry(BadPixels.OFFSET_NOISE_EVAL_ERROR)\n", "print_bp_entry(BadPixels.WRONG_GAIN_VALUE)\n", "\n", "\n", "def eval_bpidx(d):\n", " mdn = np.nanmedian(d, axis=(0))[None, :, :]\n", " std = np.nanstd(d, axis=(0))[None, :, :]\n", " idx = (d > badpixel_threshold_sigma * std + mdn) | (\n", " d < (-badpixel_threshold_sigma) * std + mdn\n", " )\n", " return idx" ] }, { "cell_type": "code", "execution_count": null, "id": "40c34cc5-fe93-4b83-bf39-f465f37c40b4", "metadata": {}, "outputs": [], "source": [ "step_timer.start()\n", "g_name = [\"G0\", \"G1\", \"G2\"]\n", "\n", "for mod, pdu in da_to_pdu.items():\n", " display(Markdown(f\"### Badpixels for module {mod}:\"))\n", "\n", " badpixels_map[mod][\n", " ~np.isfinite(offset_map[mod])\n", " ] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n", " badpixels_map[mod][\n", " eval_bpidx(noise_map[mod])\n", " ] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", "\n", " badpixels_map[mod][\n", " ~np.isfinite(noise_map[mod])\n", " ] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n", " if not local_output:\n", " for cell in [0, 1]:\n", " fig, ax = plt.subplots(figsize=(10, 5))\n", " for g_idx in [0, 1, 2]:\n", " ax.plot(badpixels_map[mod][:, cell, g_idx], label=f\"G{g_idx} Bad pixel map\")\n", " ax.set_xticks(np.arange(0, 1281, 80))\n", " ax.set_xlabel(\"Stripes #\")\n", " ax.set_xlabel(\"BadPixels\")\n", " ax.set_title(f\"BadPixels map - Cell {cell} - Module {mod} ({pdu})\")\n", " ax.set_ylim([0, 5])\n", " ax.legend()\n", " plt.show()\n", "step_timer.done_step(f\"Creating bad pixels constant.\")" ] }, { "cell_type": "code", "execution_count": null, "id": "c8777cfe", "metadata": {}, "outputs": [], "source": [ "if not local_output:\n", " for cons, cname in zip([offset_map, noise_map], [\"Offset\", \"Noise\"]):\n", " for mod, pdu in da_to_pdu.items():\n", " display(Markdown(f\"### {cname} for module {mod}:\"))\n", "\n", " for cell in [0, 1]:\n", " fig, ax = plt.subplots(figsize=(10, 5))\n", " for g_idx in [0, 1, 2]:\n", " ax.plot(cons[mod][:, cell, g_idx], label=f\"G{g_idx} {cname} map\")\n", "\n", " ax.set_xlabel(\"Stripes #\")\n", " ax.set_xlabel(cname)\n", " ax.set_title(f\"{cname} map - Cell {cell} - Module {mod} ({pdu})\")\n", " ax.legend()\n", " plt.show()" ] }, { "cell_type": "code", "execution_count": null, "id": "1c4eddf7-7d6e-49f4-8cbb-12d2bc496a8f", "metadata": {}, "outputs": [], "source": [ "step_timer.start()\n", "for mod, db_mod in da_to_pdu.items():\n", " constants = {\n", " \"Offset\": offset_map[mod],\n", " \"Noise\": noise_map[mod],\n", " \"BadPixelsDark\": badpixels_map[mod],\n", " }\n", "\n", " md = None\n", "\n", " for key, const_data in constants.items():\n", "\n", " const = getattr(Constants.Gotthard2, key)()\n", " const.data = const_data\n", "\n", " if db_output:\n", " md = send_to_db(\n", " db_module=db_mod,\n", " karabo_id=karabo_id,\n", " constant=const,\n", " condition=condition,\n", " file_loc=file_loc,\n", " report_path=report,\n", " cal_db_interface=cal_db_interface,\n", " creation_time=creation_time,\n", " timeout=cal_db_timeout,\n", " )\n", " if local_output:\n", " md = save_const_to_h5(\n", " db_module=db_mod,\n", " karabo_id=karabo_id,\n", " constant=const,\n", " condition=condition,\n", " data=const.data,\n", " file_loc=file_loc,\n", " report=report,\n", " creation_time=creation_time,\n", " out_folder=out_folder,\n", " )\n", " print(f\"Calibration constant {key} is stored locally at {out_folder}.\\n\")\n", "\n", "print(\"Constants parameter conditions are:\\n\")\n", "print(\n", " f\"• Bias voltage: {bias_voltage}\\n\"\n", " f\"• Exposure time: {exposure_time}\\n\"\n", " f\"• Exposure period: {exposure_period}\\n\"\n", " f\"• Acquisition rate: {acquisition_rate}\\n\"\n", " f\"• Single photon: {single_photon}\\n\"\n", " f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\"\n", ")\n", "step_timer.done_step(\"Injecting constants.\")" ] }, { "cell_type": "code", "execution_count": null, "id": "98ca9486", "metadata": {}, "outputs": [], "source": [ "# TODO: store old constants for comparison.\n", "for mod, pdu in da_to_pdu.items():\n", " mod_file = mod.replace(\"/\", \"-\")\n", " with open(f\"{metadata_folder or out_folder}/module_metadata_{mod_file}.yml\", \"w\") as fd:\n", " yaml.safe_dump(\n", " {\n", " \"module\": mod,\n", " \"pdu\": pdu,\n", " },\n", " fd,\n", " )" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3.8.11 ('.cal4_venv': venv)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.11" }, "vscode": { "interpreter": { "hash": "25ceec0b6126c0ccf883616a02d86b8eaec8ca3fe33700925044adbe0a704e39" } } }, "nbformat": 4, "nbformat_minor": 5 }