From 8ce9f7771ac5ce982c20d9e7702d8973f6fd50a3 Mon Sep 17 00:00:00 2001 From: Karim Ahmed <karim.ahmed@xfel.eu> Date: Thu, 23 Jul 2020 01:11:53 +0200 Subject: [PATCH] update AGIPD64K --- .../playground/AGIPD_SingleM_test_Dark.ipynb | 483 +++++++++++++----- 1 file changed, 342 insertions(+), 141 deletions(-) diff --git a/notebooks/AGIPD/playground/AGIPD_SingleM_test_Dark.ipynb b/notebooks/AGIPD/playground/AGIPD_SingleM_test_Dark.ipynb index eeffc84fe..2f76d945b 100644 --- a/notebooks/AGIPD/playground/AGIPD_SingleM_test_Dark.ipynb +++ b/notebooks/AGIPD/playground/AGIPD_SingleM_test_Dark.ipynb @@ -20,13 +20,13 @@ "# Inputs exposed to xfel-calibrate package should be in this first cell.\n", "\n", "# Parameters for accessing files.\n", - "in_folder = \"/gpfs/exfel/exp/SPB/202030/p900138/raw\" # path to input data, required\n", - "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/SPB/AGIPD/DARK/202030/p900138/\" # path to output to, required\n", + "in_folder = \"/gpfs/exfel/exp/SPB/202031/p900146/raw\" # path to input data, required\n", + "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/SPB2\" # path to output to, required\n", "sequences = [0] # sequence files to evaluate.\n", "\n", - "run_high = 33 # run number in which high gain data was recorded, required\n", - "run_med = 34 # run number in which medium gain data was recorded, required\n", - "run_low = 35 # run number in which low gain data was recorded, required\n", + "run_high = 67 # run number in which high gain data was recorded, required\n", + "run_med = 68 # run number in which medium gain data was recorded, required\n", + "run_low = 69 # run number in which low gain data was recorded, required\n", "\n", "local_output = True # output constants locally\n", "db_output = False # output constants to database\n", @@ -38,8 +38,12 @@ "module_name = 'AGIPD64K'\n", "\n", "channel = 16\n", + "karabo_da = [\"AGIPD16\"]\n", "\n", "path_template = \"RAW-R{:04d}-{}-S{:05d}\"\n", + "karabo_id_control = \"SPB_IRU_AGIPD1M1\" # karabo-id for control device '\n", + "karabo_da_control = \"AGIPD1MCTRL01\" # karabo DA for control infromation\n", + "h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP' # path to control information\n", "\n", "# Parameters for taking dark.\n", "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n", @@ -49,11 +53,19 @@ "\n", "dont_use_dir_date = False # don't use the dir creation date for determining the creation time\n", "\n", - "thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n", - "thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_offset_sigma = 3. # offset sigma thresholds for offset deduced bad pixels\n", + "thresholds_offset_hard = [0, 0] # For setting the same threshold offset for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters.\n", + "thresholds_offset_hard_hg = [3000, 7000] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_offset_hard_mg = [6000, 10000] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_offset_hard_lg = [6000, 10000] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", "\n", - "thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels\n", - "thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_noise_sigma = 5. # noise sigma thresholds for offset deduced bad pixels\n", + "thresholds_noise_hard = [0, 0] # For setting the same threshold noise for the 3 gains. Left for backcompatability. Default [0, 0] to take the following parameters.\n", + "thresholds_noise_hard_hg = [4, 20] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_noise_hard_mg = [4, 20] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_noise_hard_lg = [4, 20] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "\n", + "thresholds_gain_sigma = 5. # Gain separation sigma threshold\n", "\n", "# Plotting parameters\n", "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h\n", @@ -76,11 +88,16 @@ "import os\n", "import h5py\n", "import numpy as np\n", + "import dateutil.parser\n", + "\n", "import traceback\n", "import matplotlib\n", "matplotlib.use('agg')\n", "import matplotlib.pyplot as plt\n", "%matplotlib inline\n", + "from IPython.display import display, Markdown, Latex\n", + "import tabulate\n", + "\n", "\n", "from cal_tools.tools import (gain_map_files, parse_runs, \n", " run_prop_seq_from_path, get_notebook_name, \n", @@ -97,6 +114,7 @@ "view.use_dill()\n", "\n", "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", + "from cal_tools.agipdlib import get_gain_setting\n", "\n", "gains = np.arange(3)\n", "\n", @@ -107,6 +125,9 @@ "offset_runs[\"med\"] = parse_runs(run_med)[0]\n", "offset_runs[\"low\"] = parse_runs(run_low)[0]\n", "\n", + "gain_names = ['High', 'Medium', 'Low']\n", + "runs = [run_high, run_med, run_low]\n", + "\n", "creation_time=None\n", "if not dont_use_dir_date:\n", " creation_time = get_dir_creation_date(in_folder, run_high)\n", @@ -116,21 +137,41 @@ "logger = InfluxLogger(detector=\"AGIPD\", instrument=instrument, mem_cells=mem_cells,\n", " notebook=get_notebook_name(), proposal=prop)\n", "\n", - "print(\"Using {} as creation time of constant.\".format(creation_time))\n", + "print(f\"Using {creation_time} as creation time of constant.\")\n", "\n", "cal_db_interface = get_random_db_interface(cal_db_interface)\n", - "print('Calibration database interface: {}'.format(cal_db_interface))\n", + "print(f'Calibration database interface: {cal_db_interface}')\n", "\n", "# Same used for testing(Temporary) the Single Module\n", "loc = \"SPB_DET_AGIPD1M-1\"\n", "\n", "# Same used for testing(Temporary) the Single Module\n", "dinstance = \"AGIPD1M1\"\n", - "print(\"Detector in use is {}\".format(loc))\n", - "\n", + "print(f\"Detector in use is {loc}\")\n", + "if \"{\" in h5path_ctrl:\n", + " h5path_ctrl = h5path_ctrl.format(karabo_id_control)\n", + "print(h5path_ctrl)\n", "# Convert gain-setting in case of still being 0.1\n", "if gain_setting == 0.1:\n", - " gain_setting = None" + " if creation_time.replace(tzinfo=None) < dateutil.parser.parse('2020-01-31'):\n", + " print(\"Set gain-setting to None for runs taken before 2020-01-31\")\n", + " gain_setting = None\n", + " else:\n", + " try:\n", + " # extract gain setting and validate that all runs have the same setting\n", + " gsettings = []\n", + " for r in runs:\n", + " control_fname = f'{in_folder}/r{r:04d}/RAW-R{r:04d}-{karabo_da_control}-S00000.h5'\n", + " gsettings.append(get_gain_setting(control_fname, h5path_ctrl))\n", + " if not all(g == gsettings[0] for g in gsettings):\n", + " raise ValueError(f\"Different gain settings for the 3 input runs {gsettings}\")\n", + " gain_setting = gsettings[0] \n", + " except Exception as e:\n", + " print(f'Error while reading gain setting from: \\n{control_fname}')\n", + " print(f'Error: {e}')\n", + " if \"component not found\" in str(e):\n", + " print(\"Gain setting is not found in the control information\")\n", + " gain_setting = None" ] }, { @@ -165,8 +206,7 @@ "outputs": [], "source": [ "# set everything up filewise\n", - "if not os.path.exists(out_folder):\n", - " os.makedirs(out_folder)\n", + "os.makedirs(out_folder, exist_ok=True)\n", "\n", "path_inset = \"AGIPD{}\".format(channel)\n", "raw_files = []\n", @@ -185,9 +225,9 @@ "if len(raw_files) < 1:\n", " print(\"WARNING: NO FILES TO CREATE THE DARK!\")\n", "else:\n", - " total_file_size = total_file_size / 1e9\n", + " total_file_size = total_file_size/1e9\n", " total_sequences = len(raw_files)\n", - " print(\"The total size of the processed data: {}GB\".format(total_file_size))" + " print(f\"The total size of the processed data: {total_file_size}GB\")" ] }, { @@ -207,7 +247,7 @@ "source": [ "import copy\n", "from functools import partial\n", - "def characterize_module(cells, bp_thresh, loc, acq_rate, inp):\n", + "def characterize_module(cells, bp_thresh, loc, acq_rate, channel, inp):\n", " import numpy as np\n", " import copy\n", " import h5py\n", @@ -215,7 +255,7 @@ " from cal_tools.enums import BadPixels\n", " from cal_tools.agipdlib import get_num_cells, get_acq_rate\n", " \n", - " filename, filename_out, channel = inp\n", + " filename, gg = inp\n", " \n", " if cells == 0:\n", " cells = get_num_cells(filename, loc, channel)\n", @@ -223,15 +263,20 @@ " if acq_rate == 0.:\n", " acq_rate = get_acq_rate(filename, loc, channel)\n", " \n", - " thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh \n", + " thresholds_offset, thresholds_offset_sigma, thresholds_noise, thresholds_noise_sigma = bp_thresh \n", + " thresholds_offset_hard = thresholds_offset[gg]\n", + " thresholds_noise_hard = thresholds_noise[gg]\n", + " \n", " infile = h5py.File(filename, \"r\", driver=\"core\")\n", - " count = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/count\".format(loc, channel)])\n", - " first = np.squeeze(infile[\"/INDEX/{}/DET/{}CH0:xtdf/image/first\".format(loc, channel)])\n", + " count = np.squeeze(infile[f\"/INDEX/{loc}/DET/{channel}CH0:xtdf/image/count\"])\n", + " first = np.squeeze(infile[f\"/INDEX/{loc}/DET/{channel}CH0:xtdf/image/first\"])\n", + "\n", " last_index = int(first[count != 0][-1]+count[count != 0][-1])\n", " first_index = int(first[count != 0][0])\n", - " im = np.array(infile[\"/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data\".format(loc, channel)][first_index:last_index,...]) \n", - " cellIds = np.squeeze(infile[\"/INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId\".format(loc, channel)][first_index:last_index,...]) \n", - " \n", + "\n", + " im = np.array(infile[f\"/INSTRUMENT/{loc}/DET/{channel}CH0:xtdf/image/data\"][first_index:last_index,...]) \n", + " cellIds = np.squeeze(infile[f\"/INSTRUMENT/{loc}/DET/{channel}CH0:xtdf/image/cellId\"][first_index:last_index,...]) \n", + "\n", " infile.close()\n", "\n", " ga = im[:, 1, ...]\n", @@ -246,6 +291,7 @@ " mcells = cells\n", " offset = np.zeros((im.shape[0], im.shape[1], mcells))\n", " gains = np.zeros((im.shape[0], im.shape[1], mcells))\n", + " gains_std = np.zeros((im.shape[0], im.shape[1], mcells))\n", " noise = np.zeros((im.shape[0], im.shape[1], mcells))\n", " \n", " for cc in np.unique(cellIds[cellIds < mcells]):\n", @@ -253,6 +299,7 @@ " offset[...,cc] = np.median(im[..., cellidx], axis=2)\n", " noise[...,cc] = np.std(im[..., cellidx], axis=2)\n", " gains[...,cc] = np.median(ga[..., cellidx], axis=2)\n", + " gains_std[...,cc] = np.std(ga[..., cellidx], axis=2)\n", " \n", " # bad pixels\n", " bp = np.zeros(offset.shape, np.uint32)\n", @@ -275,61 +322,80 @@ " bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", " bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n", "\n", - "\n", - " return offset, noise, gains, bp, cells, acq_rate\n", - "\n", - "gg = 0\n", + " return offset, noise, gains, gains_std, gg, bp, cells, acq_rate\n", "\n", "start = datetime.now()\n", "all_cells = []\n", "all_acq_rate = []\n", "\n", - "for gain, fname_in in enumerate(raw_files):\n", + "if thresholds_offset_hard == [0, 0]:\n", + " thresholds_offset_hard = [thresholds_offset_hard_hg, thresholds_offset_hard_mg, thresholds_offset_hard_lg]\n", + "else:\n", + " thresholds_offset_hard = [thresholds_offset_hard] * 3\n", "\n", - " inp = []\n", + "if thresholds_noise_hard == [0, 0]:\n", + " thresholds_noise_hard = [thresholds_noise_hard_hg, thresholds_noise_hard_mg, thresholds_noise_hard_lg]\n", + "else:\n", + " thresholds_noise_hard = [thresholds_noise_hard] * 3\n", "\n", - " #replace RAW with CORR in .hf5 file name.\n", - " fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n", - " inp.append((fname_in, fout, channel))\n", + "inp = []\n", "\n", - " p = partial(characterize_module, max_cells,\n", - " (thresholds_offset_hard, thresholds_offset_sigma,\n", - " thresholds_noise_hard, thresholds_noise_sigma), loc, acq_rate)\n", + "for gain, fname_in in enumerate(raw_files):\n", "\n", - " results = list(map(p, inp))\n", - " #results = view.map_sync(p, inp)\n", + " \n", + " inp.append((fname_in, gain))\n", "\n", - " for ii, r in enumerate(results):\n", - " i = 0\n", - " offset, noise, gain, bp, thiscell, thisacq = r\n", - " all_cells.append(thiscell)\n", - " all_acq_rate.append(thisacq)\n", - " \n", - " # only at the first gain.\n", - " if gg == 0:\n", - " offset_g = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2], 3))\n", - " noise_g = np.zeros_like(offset_g)\n", - " gain_g = np.zeros_like(offset_g)\n", - " badpix_g = np.zeros_like(offset_g, np.uint32)\n", - " first = False\n", - "\n", - " offset_g[...,gg] = offset\n", - " noise_g[...,gg] = noise\n", - " gain_g[...,gg] = gain\n", - " badpix_g[...,gg] = bp\n", - " gg +=1\n", + " p = partial(characterize_module, max_cells,\n", + " (thresholds_offset_hard, thresholds_offset_sigma,\n", + " thresholds_noise_hard, thresholds_noise_sigma),\n", + " loc, acq_rate, channel)\n", + "\n", + "#results = list(map(p, inp))\n", + "results = view.map_sync(p, inp)\n", + "\n", + "for ii, r in enumerate(results):\n", + " offset, noise, gain, gains_std, gg, bp, thiscell, thisacq = r\n", + " all_cells.append(thiscell)\n", + " all_acq_rate.append(thisacq)\n", + "\n", + " if ii == 0:\n", + " offset_g = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2], 3))\n", + " noise_g = np.zeros_like(offset_g)\n", + " gain_g = np.zeros_like(offset_g)\n", + " gainstd_g = np.zeros_like(offset_g)\n", + " badpix_g = np.zeros_like(offset_g, np.uint32)\n", + "\n", + " offset_g[...,gg] = offset\n", + " noise_g[...,gg] = noise\n", + " gain_g[...,gg] = gain\n", + " gainstd_g[..., gg] = gains_std\n", + " badpix_g[...,gg] = bp\n", "\n", "duration = (datetime.now()-start).total_seconds()\n", "logger.runtime_summary_entry(success=True, runtime=duration,\n", - " total_sequences=total_sequences,\n", - " filesize=total_file_size)\n", + " total_sequences=total_sequences,\n", + " filesize=total_file_size)\n", "logger.send()\n", "\n", "max_cells = np.max(all_cells)\n", - "print(\"Using {} memory cells\".format(max_cells))\n", + "print(f\"Using {max_cells} memory cells\")\n", "\n", "acq_rate = np.max(all_acq_rate)\n", - "print(\"Using {} MHz acquisition rate\".format(acq_rate))" + "print(f\"Using {acq_rate} MHz acquisition rate\".format())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Add a badpixel due to bad gain separation\n", + "for g in range(2):\n", + " # Bad pixels during bad gain separation.\n", + " # Fraction of pixels in the module with separation lower than \"thresholds_gain_sigma\".\n", + " bad_sep = (gain_g[..., g+1] - gain_g[..., g]) / np.sqrt(gainstd_g[..., g+1]**2 + gainstd_g[..., g]**2)\n", + " badpix_g[...,g+1][(bad_sep)<thresholds_gain_sigma]|= BadPixels.GAIN_THRESHOLDING_ERROR.value" ] }, { @@ -362,20 +428,18 @@ "outputs": [], "source": [ "res = OrderedDict()\n", - "\n", "res = {'Offset': offset_g,\n", " 'Noise': noise_g,\n", " 'ThresholdsDark': thresholds_g,\n", - " 'BadPixelsDark': badpix_g \n", - " }\n", + " 'BadPixelsDark': badpix_g}\n", "\n", "if local_output:\n", - " ofile = \"{}/agipd_offset_store_{}_{}.h5\".format(out_folder, \"_\".join(offset_runs.values()), module_name)\n", + " ofile = f\"{out_folder}/agipd_offset_store_{'_'.join(offset_runs.values())}_{module_name}.h5\"\n", " store_file = h5py.File(ofile, \"w\")\n", - " store_file[\"{}/Offset/0/data\".format(module_name)] = offset_g\n", - " store_file[\"{}/Noise/0/data\".format(module_name)] = noise_g\n", - " store_file[\"{}/Threshold/0/data\".format(module_name)] = thresholds_g\n", - " store_file[\"{}/BadPixels/0/data\".format(module_name)] = badpix_g\n", + " store_file[f\"{module_name}/Offset/0/data\"] = offset_g\n", + " store_file[f\"{module_name}/Noise/0/data\"] = noise_g\n", + " store_file[f\"{module_name}/Threshold/0/data\"] = thresholds_g\n", + " store_file[f\"{module_name}/BadPixels/0/data\"] = badpix_g\n", " store_file.close()" ] }, @@ -386,7 +450,7 @@ "outputs": [], "source": [ "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n", - "file_loc = 'proposal:{} runs:{} {} {}'.format(proposal, run_low, run_med, run_high)" + "file_loc = f\"proposal:{proposal} runs:{run_low} {run_med} {run_high}\"" ] }, { @@ -406,7 +470,6 @@ " bias_voltage=bias_voltage,\n", " acquisition_rate=acq_rate,\n", " gain_setting=gain_setting)\n", - " detinst = getattr(Detectors, dinstance)\n", "\n", " # AGIPD_SIV1_AGIPDV11_M001Test\n", " device = Detectors.AGIPD.AGIPD_SIV1_AGIPDV11_M001Test\n", @@ -459,12 +522,11 @@ "source": [ "cell = 3\n", "gain = 0\n", - "out_folder = None\n", - "# attach module name for plotting.\n", + "\n", "res_da = {}\n", "res_da[module_name] = res\n", "\n", - "show_overview(res_da, cell, gain, out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "show_overview(res_da, cell, gain, out_folder=None, infix=\"_\".join(offset_runs.values()))" ] }, { @@ -482,7 +544,7 @@ "source": [ "cell = 3\n", "gain = 1\n", - "show_overview(res_da, cell, gain, out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "show_overview(res_da, cell, gain, out_folder=None, infix=\"_\".join(offset_runs.values()))" ] }, { @@ -500,7 +562,7 @@ "source": [ "cell = 3\n", "gain = 2\n", - "show_overview(res_da, cell, gain, out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "show_overview(res_da, cell, gain, out_folder=None, infix=\"_\".join(offset_runs.values()))" ] }, { @@ -512,13 +574,6 @@ "The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type." ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### High Gain ###" - ] - }, { "cell_type": "code", "execution_count": null, @@ -528,22 +583,39 @@ "cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),\n", " BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),\n", " BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),\n", - " BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}\n", + " BadPixels.GAIN_THRESHOLDING_ERROR.value: (BadPixels.GAIN_THRESHOLDING_ERROR.name, '#FF40FF40'),\n", + " BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('OFFSET_OUT_OF_THRESHOLD + NOISE_OUT_OF_THRESHOLD', '#DD00DD80'),\n", + " BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value | \n", + " BadPixels.GAIN_THRESHOLDING_ERROR.value: ('MIXED', '#BFDF009F')}\n", "\n", - "rebin = 8 if not high_res_badpix_3d else 2\n", + "if high_res_badpix_3d:\n", + " display(Markdown(\"\"\"\n", + " \n", + " ## Global Bad Pixel Behaviour ##\n", "\n", - "gain = 0\n", - "badpix_g_da = {}\n", - "badpix_g_da[module_name] = badpix_g\n", - "for mod, data in badpix_g_da.items():\n", - " plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)" + " The following plots show the results of bad pixel evaluation for all evaluated memory cells. \n", + " Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. \n", + " This excludes single bad pixels present only in disconnected pixels. \n", + " Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. \n", + " Colors encode the bad pixel type, or mixed type.\n", + "\n", + " \"\"\"))\n", + "\n", + " gnames = ['High Gain', 'Medium Gain', 'Low Gain']\n", + " for gain in range(3):\n", + " display(Markdown(f'### {gnames[gain]} ###'))\n", + " for mod, data in badpix_g.items():\n", + " plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=1)\n", + " plt.show()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Medium Gain ###" + "## Aggregate values, and per Cell behaviour ##\n", + "\n", + "The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior." ] }, { @@ -552,16 +624,12 @@ "metadata": {}, "outputs": [], "source": [ - "gain = 1\n", - "for mod, data in badpix_g_da.items():\n", - " plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Low Gain ###" + "offset_g_dict = {}\n", + "badpix_g_dict = {}\n", + "offset_g_dict[module_name] = offset_g\n", + "badpix_g_dict[module_name] = badpix_g\n", + "create_constant_overview(offset_g_dict, \"Offset (ADU)\", max_cells, 4000, 8000,\n", + " badpixels=[badpix_g_dict, np.nan])" ] }, { @@ -570,18 +638,11 @@ "metadata": {}, "outputs": [], "source": [ - "gain = 2\n", - "for mod, data in badpix_g_da.items():\n", - " plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Aggregate values, and per Cell behaviour ##\n", + "noise_g_dict = {}\n", + "noise_g_dict[module_name] = noise_g\n", "\n", - "The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior." + "create_constant_overview(noise_g_dict, \"Offset (ADU)\", max_cells, 0, 100,\n", + " badpixels=[badpix_g_dict, np.nan])" ] }, { @@ -590,11 +651,22 @@ "metadata": {}, "outputs": [], "source": [ - "offset_g_da = {}\n", - "offset_g_da[module_name] = offset_g\n", + "thresholds_g_dict = {}\n", + "thresholds_g_dict[module_name] = thresholds_g\n", "\n", - "create_constant_overview(offset_g_da, \"Offset (ADU)\", max_cells, 4000, 8000,\n", - " out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "# Plot only three gain threshold maps.\n", + "bp_thresh = OrderedDict()\n", + "for mod, con in badpix_g_dict.items():\n", + " bp_thresh[mod] = np.zeros((con.shape[0], con.shape[1], con.shape[2], 5), dtype=con.dtype)\n", + " bp_thresh[mod][...,:2] = con[...,:2]\n", + " bp_thresh[mod][...,2:] = con\n", + "\n", + "\n", + "create_constant_overview(thresholds_g_dict, \"Threshold (ADU)\", max_cells, 3000, 10000, 5,\n", + " badpixels=[bp_thresh, np.nan],\n", + " gmap=['HG-MG Threshold', 'MG-LG Threshold', 'High gain', 'Medium gain', 'low gain'],\n", + " marker=['d','d','','','']\n", + " )" ] }, { @@ -603,10 +675,11 @@ "metadata": {}, "outputs": [], "source": [ - "noise_g_da = {}\n", - "noise_g_da[module_name] = noise_g\n", - "create_constant_overview(noise_g_da, \"Noise (ADU)\", max_cells, 0, 100,\n", - " out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "bad_pixel_aggregate_g = OrderedDict()\n", + "\n", + "for m, d in badpix_g_dict.items():\n", + " bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)\n", + "create_constant_overview(bad_pixel_aggregate_g, \"Bad pixel fraction\", max_cells, 0, 0.10, 3)" ] }, { @@ -615,48 +688,176 @@ "metadata": {}, "outputs": [], "source": [ - "thresholds_g_da = {}\n", - "thresholds_g_da[module_name] = thresholds_g\n", - "create_constant_overview(thresholds_g_da, \"Threshold (ADU)\", max_cells, 3000, 8000, 2,\n", - " out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "# Retrieve existing constants for comparison\n", + "clist = [\"Offset\", \"Noise\", \"ThresholdsDark\", \"BadPixelsDark\"]\n", + "old_const = {}\n", + "old_mdata = {}\n", + "\n", + "print('Retrieve pre-existing constants for comparison.')\n", + "\n", + "for const in res:\n", + " metadata = ConstantMetaData()\n", + " dconst = getattr(Constants.AGIPD, const)()\n", + " dconst.data = res[const]\n", + " metadata.calibration_constant = dconst\n", + "\n", + " # Setting conditions\n", + " condition = Conditions.Dark.AGIPD(memory_cells=max_cells,\n", + " bias_voltage=bias_voltage,\n", + " acquisition_rate=acq_rate,\n", + " gain_setting=gain_setting)\n", + "\n", + " metadata.detector_condition = condition\n", + "\n", + " # specify the a version for this constant\n", + " if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + " else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", + " start=creation_time)\n", + "\n", + " metadata.retrieve(cal_db_interface, timeout=cal_db_timeout)\n", + "\n", + " old_const[const] = metadata.calibration_constant.data\n", + "\n", + " if metadata.comm_db_success:\n", + " time = metadata.calibration_constant_version.begin_at\n", + " old_mdata[const] = time.isoformat()\n", + " os.makedirs(os.path.join(f'{out_folder}','old/'), exist_ok=True)\n", + " save_const_to_h5(metadata, os.path.join(f'{out_folder}','old/'))\n", + " else:\n", + " old_mdata[const] = \"Not found\"\n" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "bad_pixel_aggregate_g = OrderedDict()\n", + "## Summary tables ##\n", "\n", - "for m, d in badpix_g_da.items():\n", - " bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)\n", - "create_constant_overview(bad_pixel_aggregate_g, \"Bad pixel fraction\", max_cells, 0, 0.10, 3,\n", - " out_folder=out_folder, infix=\"_\".join(offset_runs.values()))" + "The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database." ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": true - }, + "metadata": {}, "outputs": [], - "source": [] + "source": [ + "table = []\n", + "gain_names = ['High', 'Medium', 'Low']\n", + "bits = [BadPixels.NOISE_OUT_OF_THRESHOLD, BadPixels.OFFSET_OUT_OF_THRESHOLD, BadPixels.OFFSET_NOISE_EVAL_ERROR, BadPixels.GAIN_THRESHOLDING_ERROR]\n", + "for gain in range(3):\n", + "\n", + " l_data = []\n", + " l_data_old = []\n", + "\n", + " data = np.copy(badpix_g[:,:,:,gain])\n", + " datau32 = data.astype(np.uint32)\n", + " l_data.append(len(datau32[datau32>0].flatten()))\n", + " for bit in bits:\n", + " l_data.append(np.count_nonzero(badpix_g[:,:,:,gain] & bit.value))\n", + "\n", + " if old_const['BadPixelsDark'] is not None:\n", + " dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain])\n", + " datau32old = dataold.astype(np.uint32)\n", + " l_data_old.append(len(datau32old[datau32old>0].flatten()))\n", + " for bit in bits:\n", + " l_data_old.append(np.count_nonzero(old_const['BadPixelsDark'][:, :, :, gain] & bit.value))\n", + "\n", + " l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD',\n", + " 'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR', 'GAIN_THRESHOLDING_ERROR']\n", + "\n", + " l_threshold = ['', f'{thresholds_noise_sigma}' f'{thresholds_noise_hard[gain]}',\n", + " f'{thresholds_offset_sigma}' f'{thresholds_offset_hard[gain]}',\n", + " '', f'{thresholds_gain_sigma}']\n", + "\n", + " for i in range(len(l_data)):\n", + " line = [f'{l_data_name[i]}, {gain_names[gain]} gain', l_threshold[i], l_data[i]]\n", + "\n", + " if old_const['BadPixelsDark'] is not None:\n", + " line += [l_data_old[i]]\n", + " else:\n", + " line += ['-']\n", + "\n", + " table.append(line)\n", + " table.append(['', '', '', ''])\n", + "\n", + "display(Markdown('''\n", + "\n", + "### Number of bad pixels ###\n", + "\n", + "One pixel can be bad for different reasons, therefore, the sum of all types of bad pixels can be more than the number of all bad pixels.\n", + "\n", + "'''))\n", + "if len(table)>0:\n", + " md = display(Latex(tabulate.tabulate(table, tablefmt='latex',\n", + " headers=[\"Pixel type\", \"Threshold\",\n", + " \"New constant\", \"Old constant\"])))" + ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "import tabulate\n", + "\n", + "header = ['Parameter', \n", + " \"New constant\", \"Old constant \", \n", + " \"New constant\", \"Old constant \", \n", + " \"New constant\", \"Old constant \",\n", + " \"New constant\", \"Old constant \"]\n", + "\n", + "for const in ['Offset', 'Noise', 'ThresholdsDark']:\n", + " if const != 'ThresholdsDark':\n", + " table = [['','High gain', 'High gain', 'Medium gain', 'Medium gain', 'Low gain', 'Low gain']]\n", + " else:\n", + " table = [['','HG-MG threshold', 'HG-MG threshold', 'MG-LG threshold', 'MG-LG threshold']]\n", + "\n", + " data = np.copy(res[const])\n", + " if const == 'ThresholdsDark':\n", + " data[...,0][res['BadPixelsDark'][...,0]>0] = np.nan\n", + " data[...,1][res['BadPixelsDark'][...,1]>0] = np.nan\n", + " else:\n", + " data[res['BadPixelsDark']>0] = np.nan\n", + "\n", + " if old_const[const] is not None and old_const['BadPixelsDark'] is not None:\n", + " dataold = np.copy(old_const[const])\n", + " if const == 'ThresholdsDark':\n", + " dataold[...,0][old_const['BadPixelsDark'][...,0]>0] = np.nan\n", + " dataold[...,1][old_const['BadPixelsDark'][...,1]>0] = np.nan\n", + " else:\n", + " dataold[old_const['BadPixelsDark']>0] = np.nan\n", + "\n", + " f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]\n", + " n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']\n", + "\n", + " for i, f in enumerate(f_list):\n", + " line = [n_list[i]]\n", + " for gain in range(3):\n", + " # Compare only 3 threshold gain-maps\n", + " if gain == 2 and const == 'ThresholdsDark':\n", + " continue\n", + " line.append('{:6.1f}'.format(f(data[...,gain])))\n", + " if old_const[const] is not None and old_const['BadPixelsDark'] is not None:\n", + " line.append('{:6.1f}'.format(f(dataold[...,gain])))\n", + " else:\n", + " line.append('-')\n", + "\n", + " table.append(line)\n", + "\n", + " display(Markdown('### {} [ADU], good pixels only ###'.format(const)))\n", + " md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header))) " + ] } ], "metadata": { "kernelspec": { - "display_name": "Calibration_VENV", + "display_name": "Python 3", "language": "python", - "name": "calibration_venv" + "name": "python3" }, "language_info": { "codemirror_mode": { -- GitLab