From 53f56cc56cf2521203f9199147cbd127dae6f993 Mon Sep 17 00:00:00 2001 From: Nuno Duarte <duarten@max-exfl014.desy.de> Date: Fri, 28 Oct 2022 13:50:42 +0200 Subject: [PATCH] initial commit --- .../Characterize_FlatFields_ePix100_NBC.ipynb | 1418 +++++++++++++++++ 1 file changed, 1418 insertions(+) create mode 100644 notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb diff --git a/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb new file mode 100644 index 000000000..893e4cf56 --- /dev/null +++ b/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb @@ -0,0 +1,1418 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "7cf33cf7", + "metadata": {}, + "source": [ + "# ePix100 Flat Field Characterization\n", + "\n", + "Author: European XFEL Detector Group, Version 1.0\n", + "\n", + "Generate gain maps from flat-field runs." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c96be80c", + "metadata": {}, + "outputs": [], + "source": [ + "in_folder = '/gpfs/exfel/exp/MID/202231/p900310/raw' # input folder, required\n", + "out_folder = '' # output folder, required\n", + "metadata_folder = '' # Directory containing calibration_metadata.yml when run by xfel-calibrate\n", + "sequences = [-1] # sequences to process, set to -1 for all, range allowed\n", + "run = 33 # which run to read data from, required\n", + "\n", + "# Parameters for accessing the raw data.\n", + "karabo_id = \"MID_EXP_EPIX-1\" # karabo karabo_id\n", + "karabo_da = \"EPIX01\" # data aggregators\n", + "receiver_template = \"RECEIVER\" # detector receiver template for accessing raw data files\n", + "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n", + "instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files\n", + "constants_path = '' # Use constants in given constant file path\n", + "\n", + "# Fit parameters\n", + "peak_fitting = 'gauss' # method to find the peak position per pixel: 'median' or 'gauss'\n", + "N_sigma_interval = 5 # sigma interval to find singles peak in each per pixel \n", + "peak_energy = 8.048 # [keV] Cu Kα1\n", + "\n", + "# ADU range\n", + "ADU_range = [-50,500] # expected range that encloses the raw signal from the FF run \n", + "\n", + "# Cluster calculators (given in N times sigma noise)\n", + "split_evt_primary_threshold = 7 # Split event primary threshold \n", + "split_evt_secondary_threshold = 3 # Split event secondary threshold\n", + "split_evt_mip_threshold = 1000 # Threshold for rejection of MIP events (e.g, cosmic-rays)\n", + "\n", + "# Parameters for the calibration database.\n", + "use_dir_creation_date = True\n", + "cal_db_interface = \"tcp://max-exfl016:8020\" # calibration DB interface to use\n", + "cal_db_timeout = 300000 # timeout on caldb requests\n", + "db_output = False # Output constants to the calibration database\n", + "local_output = True # Output constants locally\n", + "\n", + "# Conditions used for injected calibration constants.\n", + "bias_voltage = 200 # Bias voltage\n", + "in_vacuum = False # Detector operated in vacuum\n", + "fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n", + "fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n", + "temp_limits = 5 # Limit for parameter Operational temperature\n", + "\n", + "# Parameters used during selecting raw data trains.\n", + "min_trains = 1 # Minimum number of trains that should be available. Default 1.\n", + "max_trains = 0 # Maximum number of trains to use for processing. Set to 0 to use all available trains.\n", + "\n", + "# Don't delete! myMDC sends this by default.\n", + "operation_mode = '' # Detector operation mode, optional\n", + "\n", + "# TODO: delete after removing from calibration_configurations\n", + "db_module = '' # ID of module in calibration database, this parameter is ignored in the notebook. TODO: remove from calibration_configurations." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6e89ae94", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import time\n", + "import warnings\n", + "\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.colors import LogNorm\n", + "import numpy as np\n", + "from extra_data import RunDirectory, H5File\n", + "from pathlib import Path\n", + "from prettytable import PrettyTable\n", + "from scipy.optimize import curve_fit\n", + "\n", + "import XFELDetAna.xfelprofiler as xprof\n", + "from XFELDetAna import xfelpyanatools as xana\n", + "from XFELDetAna import xfelpycaltools as xcal\n", + "from XFELDetAna.plotting.util import prettyPlotting\n", + "\n", + "from cal_tools.enums import BadPixels\n", + "from cal_tools.step_timing import StepTimer\n", + "from cal_tools.epix100 import epix100lib\n", + "from cal_tools.tools import (\n", + " get_dir_creation_date,\n", + " get_pdu_from_db,\n", + " get_constant_from_db,\n", + " get_report,\n", + " save_const_to_h5,\n", + " send_to_db,\n", + ")\n", + "from iCalibrationDB import Conditions,Constants,Detectors" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b410e39c", + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib inline\n", + "\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "prettyPlotting = True\n", + "\n", + "profiler = xprof.Profiler()\n", + "profiler.disable()" + ] + }, + { + "cell_type": "markdown", + "id": "7f2f9f6c", + "metadata": {}, + "source": [ + "## Load Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f93b80e6", + "metadata": {}, + "outputs": [], + "source": [ + "instrument_src = instrument_source_template.format(karabo_id, receiver_template)\n", + "\n", + "# Run directory\n", + "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n", + "file_loc = f'proposal:{proposal} runs:{run}'\n", + "report = get_report(metadata_folder)\n", + "\n", + "ped_dir = Path(in_folder) / f'r{run:04d}'\n", + "run_dir = RunDirectory(ped_dir, _use_voview=False)\n", + "\n", + "print(f\"Run is: {run}\")\n", + "print(f\"Instrument H5File source: {instrument_src}\")\n", + "\n", + "creation_time = None\n", + "if use_dir_creation_date:\n", + " creation_time = get_dir_creation_date(in_folder, run)\n", + "if creation_time:\n", + " print(f\"Using {creation_time.isoformat()} as creation time\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ccf801b4", + "metadata": { + "slideshow": { + "slide_type": "-" + } + }, + "outputs": [], + "source": [ + "# Path to pixels ADC values\n", + "pixels_src = (instrument_src, \"data.image.pixels\")\n", + "\n", + "# Specify total number of images to process\n", + "n_trains = run_dir.get_data_counts(*pixels_src).shape[0]\n", + "\n", + "# Modify n_trains to process based on given maximum and minimun number of trains.\n", + "if max_trains:\n", + " n_trains = min(max_trains, n_trains)\n", + " \n", + "if n_trains < min_trains:\n", + " raise ValueError(\n", + " f\"Less than {min_trains} trains are available in RAW data.\"\n", + " \" Not enough data to process flat-fields.\")\n", + "\n", + "# Sequences to read\n", + "seq_files = [Path(f.filename) for f in run_dir.select(f'*{karabo_id}*').files]\n", + "seq_files = sorted(seq_files)\n", + "seq0_size = H5File(seq_files[0]).get_data_counts(*pixels_src).size\n", + "\n", + "if sequences != [-1]:\n", + " seq_files = [f for f in seq_files if any(f.match(f\"*-S{s:05d}.h5\") for s in sequences)]\n", + "\n", + "seq_files = seq_files[:(n_trains-1)//seq0_size+1]\n", + "\n", + "if not len(seq_files):\n", + " raise IndexError(\"No sequence files available for the selected sequences.\")\n", + "\n", + "# Trains to be processed\n", + "trains = np.ndarray(0,dtype=int)\n", + "for seq in seq_files:\n", + " seq_str = str(seq) \n", + " n = int(seq_str[seq_str.rfind('-S0')+len('-S0'):seq_str.rfind('.h5')])\n", + " seq_size = H5File(seq).get_data_counts(*pixels_src).size # last sequence might be smaller than seq0_size\n", + " t = np.arange(n*seq0_size,n*seq0_size+seq_size)\n", + " trains = np.append(trains,t)\n", + " \n", + "trains = trains[:n_trains]\n", + "\n", + "print(f\"Reading from: \")\n", + "[print(f'\\t{seq}') for seq in seq_files]\n", + "print('\\nAvailable sequece files: ' + str(len(run_dir.select(f'*{karabo_id}*').files)))\n", + "print(f'Sequence files used for processing: {len(seq_files)}')\n", + "print(f'Images to analyze: {trains.size}')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa4b9002", + "metadata": {}, + "outputs": [], + "source": [ + "# Read sensor size\n", + "sensor_size = run_dir[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected\n", + "sensor_size = sensor_size[sensor_size != 1] # data.image.dims for old data is [768, 708, 1]\n", + "assert np.array_equal(sensor_size, [768, 708]), 'Unexpected sensor dimensions.' \n", + "\n", + "ctrl_data = epix100lib.epix100Ctrl(\n", + " run_dc=run_dir,\n", + " instrument_src=instrument_src,\n", + " ctrl_src=f\"{karabo_id}/DET/CONTROL\",\n", + " )\n", + "# Read integration time\n", + "if fix_integration_time == -1:\n", + " integration_time = ctrl_data.get_integration_time()\n", + " integration_time_str_add = ''\n", + "else:\n", + " integration_time = fix_integration_time\n", + " integration_time_str_add = '(manual input)'\n", + " \n", + "# Read temperature \n", + "if fix_temperature == -1:\n", + " temperature = ctrl_data.get_temprature()\n", + " temperature_k = temperature + 273.15\n", + " temp_str_add = ''\n", + "else:\n", + " temperature_k = fix_temperature\n", + " temperature = fix_temperature - 273.15\n", + " temp_str_add = '(manual input)'\n", + " \n", + "# Print operating conditions\n", + "print(f\"Bias voltage: {bias_voltage} V\")\n", + "print(f\"Detector integration time: {integration_time} \\u03BCs {integration_time_str_add}\")\n", + "print(f\"Mean temperature: {temperature:0.2f}°C / {temperature_k:0.2f} K {temp_str_add}\")\n", + "print(f\"Operated in vacuum: {in_vacuum}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "400b0e3d", + "metadata": {}, + "outputs": [], + "source": [ + "step_timer = StepTimer()\n", + "step_timer.start()\n", + "\n", + "# Read data\n", + "data_dc = run_dir.select(*pixels_src, require_all=True).select_trains(trains)\n", + "data = data_dc[pixels_src].ndarray().astype(np.float16)\n", + "\n", + "step_timer.done_step('Flat-fields loaded. Elapsed Time')" + ] + }, + { + "cell_type": "markdown", + "id": "7985697d", + "metadata": { + "tags": [] + }, + "source": [ + "## Retrieve Necessary Calibration Constants" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15bb48da", + "metadata": {}, + "outputs": [], + "source": [ + "const_data = dict()\n", + "constants = ['Offset','Noise', 'BadPixelsDark']\n", + "\n", + "condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " temperature=temperature_k,\n", + " in_vacuum=in_vacuum)\n", + "\n", + "for cname in constants: \n", + " const_data[cname] = get_constant_from_db(\n", + " karabo_id=karabo_id,\n", + " karabo_da=karabo_da,\n", + " constant=getattr(Constants.ePix100, cname)(),\n", + " condition=condition,\n", + " empty_constant=None,\n", + " cal_db_interface=cal_db_interface,\n", + " creation_time=creation_time,\n", + " print_once=2,\n", + " timeout=cal_db_timeout\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "0be1ba68", + "metadata": {}, + "source": [ + "## Instantiate calculators" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1913cc2", + "metadata": {}, + "outputs": [], + "source": [ + "block_size = sensor_size//2\n", + "noiseSigma = 5\n", + "\n", + "cmCorrection_block = xcal.CommonModeCorrection(\n", + " sensor_size.tolist(),\n", + " block_size.tolist(),\n", + " 'block',\n", + " noiseMap=const_data['Noise'].swapaxes(0,1),\n", + " noiseSigma=noiseSigma,\n", + " parallel=False)\n", + "cmCorrection_col = xcal.CommonModeCorrection(\n", + " sensor_size.tolist(),\n", + " block_size.tolist(),\n", + " 'col',\n", + " noiseMap=const_data['Noise'].swapaxes(0,1),\n", + " noiseSigma=noiseSigma,\n", + " parallel=False)\n", + "cmCorrection_row = xcal.CommonModeCorrection(\n", + " sensor_size.tolist(),\n", + " block_size.tolist(),\n", + " 'row',\n", + " noiseMap=const_data['Noise'].swapaxes(0,1),\n", + " noiseSigma=noiseSigma,\n", + " parallel=False)\n", + " \n", + "patternClassifier = xcal.PatternClassifier(\n", + " shape=sensor_size.tolist(),\n", + " noisemap=const_data['Noise'].swapaxes(0,1),\n", + " primaryThreshold=split_evt_primary_threshold,\n", + " secondaryThreshold=split_evt_secondary_threshold,\n", + " upperThreshold=split_evt_mip_threshold,\n", + " blockSize=block_size.tolist(),\n", + " setPixelMask = const_data['BadPixelsDark'].flatten(),\n", + " parallel=False\n", + ")\n", + "\n", + "patternSelector = xcal.PatternSelector(\n", + " sensor_size.tolist(), \n", + " selectionList = [100, 101], # singles patterns\n", + " blockSize=block_size.tolist(), \n", + " parallel=False)" + ] + }, + { + "cell_type": "markdown", + "id": "4681991f", + "metadata": {}, + "source": [ + "## Correct data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85f34676", + "metadata": {}, + "outputs": [], + "source": [ + "step_timer.start()\n", + "bins = np.arange(ADU_range[0],ADU_range[1],1)\n", + "\n", + "hist_O=hist_CM=hist_CS=0\n", + "timer_O=timer_CM=timer_CS=0\n", + "data_singles = np.empty(data.swapaxes(0,-1).shape,dtype=int)\n", + "\n", + "chunk_size = 100 # Data is processed by chunks to avoid memory overload\n", + "chunk = 0\n", + "while chunk < data.shape[0]-1:\n", + " \n", + " prev_chunk = chunk\n", + " chunk+=chunk_size\n", + " if chunk > data.shape[0]: # last chunk may have different size\n", + " chunk = data.shape[0]-1\n", + "\n", + " d = data[prev_chunk:chunk]\n", + "\n", + " # Offset correction\n", + " t = time.time()\n", + " d -= const_data['Offset'].squeeze()\n", + " hist_O += np.histogram(d.flatten(),bins=bins)[0]\n", + " timer_O += time.time()-t\n", + "\n", + " # Common Mode correction\n", + " t = time.time()\n", + " d = d.swapaxes(0,-1)\n", + " d = cmCorrection_block.correct(d)\n", + " d = cmCorrection_col.correct(d)\n", + " d = cmCorrection_row.correct(d)\n", + " hist_CM += np.histogram(d.flatten(),bins=bins)[0]\n", + " timer_CM += time.time()-t\n", + " \n", + " # Charge Sharing correction\n", + " t = time.time()\n", + " d, patterns = patternClassifier.classify(d)\n", + " data_singles[:,:,prev_chunk:chunk],fs = patternSelector.select(d,patterns)\n", + " hist_CS += np.histogram(d[d>0].flatten(),bins=bins)[0]\n", + " timer_CS += time.time()-t\n", + " data[prev_chunk:chunk] = d.swapaxes(0,-1)\n", + "\n", + " print(f'Corrected trains: {chunk} ({int(chunk/data.shape[0]*100)}%)',end='\\r')\n", + "\n", + "data_singles = data_singles.swapaxes(0,-1)\n", + "hist_S = np.histogram(data_singles[data_singles>0].flatten(),bins=bins)[0] # histogram of single events\n", + "\n", + "print('',end='\\r')\n", + "print(f'\\nCorrections applied:')\n", + "print(f' Offset: {int(timer_O)} s')\n", + "print(f' Common Mode: {int(timer_CM)} s')\n", + "print(f' Charge Sharing: {int(timer_CS)} s')\n", + "\n", + "step_timer.done_step('Corrected data. Elapsed Time')" + ] + }, + { + "cell_type": "markdown", + "id": "1e7cb152", + "metadata": {}, + "source": [ + "## Plot histograms" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14648d1a", + "metadata": {}, + "outputs": [], + "source": [ + "step_timer.start()\n", + "\n", + "bins_c = bins[:-1]+np.diff(bins)[0]/2 # center of bins\n", + "\n", + "plt.figure(figsize=(12,8))\n", + "plt.plot(bins_c,hist_O, label='Offset corrected')\n", + "plt.plot(bins_c,hist_CM, label='Common Mode corrected')\n", + "plt.plot(bins_c,hist_CS, label='Charge Sharing corrected')\n", + "plt.plot(bins_c,hist_S, label='Singles')\n", + "plt.xlim(ADU_range)\n", + "plt.yscale('log')\n", + "plt.xlabel('ADU',fontsize=12)\n", + "plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=14)\n", + "plt.legend(fontsize=12);\n", + "plt.grid(ls=':')\n", + "\n", + "step_timer.done_step('Calculated histograms. Elapsed Time')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "996ec108", + "metadata": {}, + "outputs": [], + "source": [ + "patternStats = patternClassifier.getPatternStats()\n", + "\n", + "n_singles = np.sum(patternStats['singles'])\n", + "n_doubles = np.sum(patternStats['doubles'])\n", + "n_triples = np.sum(patternStats['triples'])\n", + "n_quads = np.sum(patternStats['quads'])\n", + "n_clusters= np.sum(patternStats['clusters'])\n", + "known_patterns = np.sum((n_singles,n_doubles,n_triples,n_quads))\n", + "\n", + "t1,t2 = PrettyTable(),PrettyTable()\n", + "t1.field_names = ['Photon Hits','Frequency']\n", + "t1.add_row(['Big Clusters',f'{n_clusters/(known_patterns+n_clusters)*100:,.2f} %'])\n", + "t1.add_row(['Listed Patterns',f'{known_patterns/(known_patterns+n_clusters)*100:,.2f} %'])\n", + "t2.field_names = ['Listed Patterns','Frequency']\n", + "t2.add_row(['Singles',f'{n_singles/known_patterns*100:,.2f} %'])\n", + "t2.add_row(['Doubles',f'{n_doubles/known_patterns*100:,.2f} %'])\n", + "t2.add_row(['Triples',f'{n_triples/known_patterns*100:,.2f} %'])\n", + "t2.add_row(['Quadruplets',f'{n_quads/known_patterns*100:,.2f} %'])\n", + "print(f' Primary threshold: {split_evt_primary_threshold}')\n", + "print(f'Secondary threshold: {split_evt_secondary_threshold}')\n", + "print(t1)\n", + "print(t2)" + ] + }, + { + "cell_type": "markdown", + "id": "e5beb142", + "metadata": {}, + "source": [ + "## Flat-Field Statistics" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f6dd62a0", + "metadata": {}, + "outputs": [], + "source": [ + "# Definition of gaussian function for fitting\n", + "def gauss(x, *p):\n", + " A, mu, sigma = p\n", + " return A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", + "\n", + "# rough initial estimate of fit parameters\n", + "fit_estimates = [np.max(hist_S), # amplitude\n", + " bins[np.argmax(hist_S)], # centroid\n", + " 10] # sigma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e23abd09", + "metadata": {}, + "outputs": [], + "source": [ + "coeff, var_matrix = curve_fit(gauss, bins_c, hist_S, p0=fit_estimates)\n", + "singles_mu = coeff[1]\n", + "singles_sig = abs(coeff[2])\n", + "ROI = np.round([singles_mu-N_sigma_interval*singles_sig, # region of interest to find first photopeak per pixel\n", + " singles_mu+N_sigma_interval*singles_sig]).astype(int)\n", + "y_fit = gauss(bins_c, *coeff)\n", + "\n", + "plt.figure(figsize=(9,6))\n", + "plt.plot(bins_c,hist_S, 'k' , label = 'singles')\n", + "plt.plot(bins_c,y_fit, 'g--', label = 'gauss fit') \n", + "plt.ylim(1,max(hist_S)*1.5);\n", + "plt.xlim(ADU_range)\n", + "plt.vlines(coeff[1],0,plt.gca().get_ylim()[1],color='g',ls=':')\n", + "\n", + "plt.axvspan(ROI[0],\n", + " ROI[1],\n", + " alpha = .2,\n", + " color = 'green',\n", + " label = f'μ ± {N_sigma_interval}σ')\n", + "\n", + "plt.legend(fontsize=12);\n", + "plt.xlabel('ADU',fontsize=12)\n", + "plt.yscale('log')\n", + "plt.grid(ls=':')\n", + "plt.show()\n", + "\n", + "print('--------------------')\n", + "print('Fit parameters:')\n", + "print(f' centroid = {np.round(singles_mu,3)}')\n", + "print(f' sigma = {np.round(singles_sig,3)}')\n", + "print('---------------------')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba77bc13", + "metadata": {}, + "outputs": [], + "source": [ + "# Calculate singles per pixel\n", + "step_timer.start()\n", + "\n", + "singles_per_pixel = np.empty(np.flip(sensor_size))\n", + "\n", + "for py in range(0,int(sensor_size[1])):\n", + " for px in range(0,int(sensor_size[0])):\n", + " singles_per_pixel[py,px] = np.sum((data_singles[:,py,px]>=ROI[0]) & (data_singles[:,py,px]<ROI[1]))\n", + "\n", + "step_timer.done_step('Calculated singles per pixel. Elapsed Time')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "888adca9", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "# Ignore bins that have less than 1% of counts compared to the bin with more counts\n", + "mask_bins = np.unique(singles_per_pixel,return_counts=True)[1] > np.max(np.unique(singles_per_pixel,return_counts=True)[1])*.01\n", + "last_bin = np.max(np.unique(singles_per_pixel)[mask_bins])\n", + "\n", + "# Plot singles distribution\n", + "fig = xana.heatmapPlot(\n", + " singles_per_pixel,\n", + " lut_label='# singles',\n", + " x_label='Column',\n", + " y_label='Row',\n", + " vmax = last_bin\n", + ")\n", + "fig.suptitle(f'Singles Distribution', x=.48, y=.9, fontsize=14)\n", + "fig.set_size_inches(h=10, w=10);\n", + "\n", + "plt.figure(figsize=(7,5))\n", + "plt.hist(singles_per_pixel.flatten(),bins=np.arange(0,last_bin,1),\n", + " align = 'left',\n", + " histtype = 'bar',\n", + " edgecolor='black', \n", + " linewidth=1.2)\n", + "plt.xlabel('Singles per pixel',fontsize=12)\n", + "plt.grid(ls='--',axis='y',color='b',alpha=.5)\n", + "plt.show()\n", + "\n", + "print(f'Average number of singles per pixel: {np.round(np.sum(data_singles>0)/np.prod(sensor_size),2)}')" + ] + }, + { + "cell_type": "markdown", + "id": "e8a591cf", + "metadata": {}, + "source": [ + "## Plot random sample pixels " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e02c7217", + "metadata": {}, + "outputs": [], + "source": [ + "N_sample_pixels = 16\n", + "\n", + "# Plot some random pixels, avoiding bad ones\n", + "np.random.seed(0)\n", + "sample_pixels = np.transpose([np.random.randint(0, sensor_size[0], N_sample_pixels),\n", + " np.random.randint(0, sensor_size[1], N_sample_pixels)])\n", + "while np.sum(const_data['BadPixelsDark'][sample_pixels[:,1],sample_pixels[:,0]]):\n", + " sample_pixels = np.transpose([np.random.randint(0, sensor_size[0], N_sample_pixels),\n", + " np.random.randint(0, sensor_size[1], N_sample_pixels)])\n", + "\n", + "fig = plt.figure(figsize=(20,20))\n", + "it_counter = 0\n", + "for px,py in sample_pixels:\n", + " it_counter+=1 \n", + " \n", + " plt.subplot(int(np.sqrt(N_sample_pixels)),int(np.sqrt(N_sample_pixels)),it_counter)\n", + " \n", + " h,ADU = np.histogram(data_singles[:,py,px],bins=np.arange(ROI[0],ROI[1]))\n", + " ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n", + " \n", + " p1 = plt.plot([],[],' ',label = f'({px},{py})')\n", + " p2 = plt.scatter(ADU_c[h>0], h[h>0],marker = 'x',c = 'k', label = 'singles')\n", + "\n", + " mdn = np.median(ADU_c[h>0])\n", + " if ~np.isnan(mdn):\n", + " p3 = plt.plot([mdn, mdn],[0,plt.gca().get_ylim()[1]],color='g', label = f'median={int(mdn)}')\n", + " else:\n", + " p3 = plt.plot([],[],' ', label = 'empty')\n", + " \n", + " try:\n", + " coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n", + " y_fit = gauss(ADU_c, *coeff)\n", + " p4 = plt.plot(ADU_c, y_fit, label = f'fit: μ={int(np.round(coeff[1]))}')\n", + "\n", + " except (RuntimeError,ValueError):\n", + " p4 = plt.plot([],[],' ', label = 'fit error')\n", + " \n", + " plt.grid(ls=':')\n", + " plt.xlabel('ADU')\n", + " plt.xlim(ROI)\n", + " plt.ylim(bottom=0)\n", + " plt.legend()\n" + ] + }, + { + "cell_type": "markdown", + "id": "7ed693b6", + "metadata": {}, + "source": [ + "## Fit single photon peaks per pixel" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4ec68114", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "step_timer.start()\n", + "peak_map = np.zeros(np.flip(sensor_size))[...,np.newaxis]\n", + "\n", + "for py in range(0,int(sensor_size[1])):\n", + " for px in range(0,int(sensor_size[0])): \n", + " h,ADU = np.histogram(data_singles[:,py,px],bins=np.arange(ROI[0],ROI[1]))\n", + " ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n", + " \n", + " if np.sum(h):\n", + " if peak_fitting=='median':\n", + " peak_map[py,px] = np.median(ADU_c[h>0])\n", + " elif peak_fitting=='gauss':\n", + " try:\n", + " coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n", + " peak_map[py,px] = coeff[1]\n", + " except (RuntimeError):\n", + " pass # Failed fits remain 0 \n", + " else:\n", + " peak_map[py,px] = -1 # Assign -1 to empty pixels\n", + "\n", + "peak_map[np.isnan(peak_map)] = 0 # Failed fits can throw no expection but return nan coeffs\n", + "step_timer.done_step(f'Calculated relative gain map using {peak_fitting} fit. Elapsed Time')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aaedd476", + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure(figsize=(7,5))\n", + "plt.hist(peak_map.flatten(),bins=np.arange(ROI[0],ROI[1]),\n", + " histtype = 'bar',\n", + " edgecolor='black',\n", + " alpha = .5,\n", + " linewidth=1.2);\n", + "\n", + "h,ADU = np.histogram(peak_map.flatten(),bins=np.arange(ROI[0],ROI[1]))\n", + "ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n", + "\n", + "coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n", + "BP_fit_threshold = [coeff[1]-N_sigma_interval*abs(coeff[2]),\n", + " coeff[1]+N_sigma_interval*abs(coeff[2])]\n", + "y_fit = gauss(ADU_c, *coeff)\n", + "plt.plot(ADU_c,y_fit, label = f'fit: μ={int(np.round(coeff[1]))}')\n", + "plt.vlines(coeff[1],0,plt.gca().get_ylim()[1],color='orange',ls=':')\n", + "plt.axvspan(BP_fit_threshold[0],\n", + " BP_fit_threshold[1],\n", + " alpha = .3,\n", + " color = 'orange',\n", + " label = f'μ ± {N_sigma_interval}σ')\n", + "\n", + "plt.grid(ls=':')\n", + "plt.xlim(np.array(BP_fit_threshold)*[.9,1.1])\n", + "plt.xlabel('Peak position [ADU]',fontsize=12);\n", + "plt.legend(fontsize=12)\n", + "plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=12)\n", + "plt.ylim((1, coeff[0]*1.2))\n", + "plt.show()\n", + "\n", + "\n", + "print('--------------------')\n", + "print('Fit parameters:')\n", + "print(f' centroid = {np.round(coeff[1],3)}')\n", + "print(f' sigma = {np.round(abs(coeff[2]),3)}')\n", + "print('---------------------')" + ] + }, + { + "cell_type": "markdown", + "id": "97a62e5a", + "metadata": {}, + "source": [ + "## Flat-Field Bad Pixels" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba92087e", + "metadata": {}, + "outputs": [], + "source": [ + "const_data['BadPixelsFF'] = np.zeros(np.flip(sensor_size))[...,np.newaxis]\n", + "\n", + "# Empty Pixels\n", + "const_data['BadPixelsFF'][peak_map==-1] = BadPixels.FF_NO_ENTRIES.value\n", + "\n", + "# Failed Fits\n", + "const_data['BadPixelsFF'][peak_map==0] = BadPixels.FF_GAIN_EVAL_ERROR.value\n", + "\n", + "# Gain out of range\n", + "const_data['BadPixelsFF'][(peak_map!=0) & (peak_map!=-1) & ((peak_map<BP_fit_threshold[0]) | (peak_map>BP_fit_threshold[1]))] = BadPixels.FF_GAIN_DEVIATION.value\n", + "\n", + "# Plot Bad Pixels Map\n", + "fig = xana.heatmapPlot(\n", + " np.nan_to_num(np.log2(const_data['BadPixelsFF'].squeeze())+1, neginf=np.nan),\n", + " cb_label='Bad pixel bit',\n", + " x_label='Column',\n", + " y_label='Row',\n", + ")\n", + "fig.suptitle(f'FF Bad Pixels Map({karabo_id} | {proposal} - r{run})', x=.5, y=.9, fontsize=16)\n", + "fig.set_size_inches(h=12, w=12)\n", + "\n", + "t = PrettyTable()\n", + "t.title = 'Flat-Field Bad Pixel Analysis'\n", + "t.field_names = ['Bit', 'Value', 'Type ', 'Counts', '%']\n", + "t.align['Type '] = 'r'\n", + "\n", + "for BP_type in [BadPixels.FF_GAIN_DEVIATION, BadPixels.FF_GAIN_EVAL_ERROR, BadPixels.FF_NO_ENTRIES]:\n", + " t.add_row([BP_type.bit_length(),\n", + " BP_type.value,\n", + " BP_type.name,\n", + " np.sum(const_data['BadPixelsFF']==BP_type.value),\n", + " np.round(100*np.sum(const_data['BadPixelsFF']==BP_type.value)/np.prod(sensor_size),2)\n", + " ])\n", + "t.add_row(['-','-',\n", + " 'Total',\n", + " np.sum(const_data['BadPixelsFF']>0),\n", + " np.round(100*np.sum(const_data['BadPixelsFF']>0)/np.prod(sensor_size),2)\n", + " ])\n", + "print(t)" + ] + }, + { + "cell_type": "markdown", + "id": "f4efeefe", + "metadata": {}, + "source": [ + "## Relative Gain Map" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24bdbef2", + "metadata": {}, + "outputs": [], + "source": [ + "# Replace FF bad pixels with mean peak value\n", + "peak_map[const_data['BadPixelsFF']>0] = np.nanmean(peak_map[const_data['BadPixelsFF']==0])\n", + "\n", + "# Calculate relative gain\n", + "rel_gain_map = 1/(peak_map.squeeze()/np.mean(peak_map))\n", + "\n", + "fig = xana.heatmapPlot(\n", + " rel_gain_map,\n", + " cb_label='Relative gain',\n", + " x_label='Column',\n", + " y_label='Row',\n", + " vmin=np.floor(np.min(rel_gain_map)/.2)*.2, # force cb limits to be multiples of 0.2 \n", + " vmax=np.ceil(np.max(rel_gain_map)/.2)*.2\n", + ")\n", + "fig.suptitle(f'Relative Gain Map ({karabo_id} | {proposal} - r{run})', x=.48, y=.9, fontsize=16)\n", + "fig.set_size_inches(h=12, w=12)" + ] + }, + { + "cell_type": "markdown", + "id": "6485378b", + "metadata": {}, + "source": [ + "## Absolute Gain Conversion Constant" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ee55bccc", + "metadata": {}, + "outputs": [], + "source": [ + "step_timer.start()\n", + "\n", + "# Correct data with calculated gain map\n", + "data_gain_corrected = data*rel_gain_map\n", + "\n", + "h,ADU = np.histogram(data_gain_corrected.flatten(),\n", + " bins=np.arange(BP_fit_threshold[0],BP_fit_threshold[1]).astype(int))\n", + "ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n", + "\n", + "coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n", + "y_fit = gauss(ADU_c, *coeff)\n", + "\n", + "gain_conv_const = coeff[1]/peak_energy\n", + "\n", + "abs_gain_map = rel_gain_map/gain_conv_const\n", + "\n", + "step_timer.done_step('Calculated Gain Conversion Constant. Elapsed Time')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef97ebf2", + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure(figsize=(7,5))\n", + "\n", + "plt.scatter(ADU_c/gain_conv_const, h, color='k', marker='x', label='Gain Corrected')\n", + "plt.plot(ADU_c/gain_conv_const, y_fit, color='orange', label = f'fit: μ={(np.round(coeff[1],2))} ADU');\n", + "\n", + "plt.ylim(bottom=0)\n", + "plt.legend()\n", + "plt.grid(ls=':')\n", + "\n", + "plt.plot([peak_energy, peak_energy],[0,plt.gca().get_ylim()[1]],color='orange', ls = '--')\n", + "\n", + "ax1 = plt.gca()\n", + "ax2 = ax1.twiny()\n", + "ax2.set_xticks(ax1.get_xticks())\n", + "ax2.set_xbound(ax1.get_xbound())\n", + "ax2.set_xticklabels((ax1.get_xticks()*gain_conv_const).astype(int))\n", + "ax2.set_xlabel('ADU',fontsize=12)\n", + "ax1.set_xlabel('keV',fontsize=12)\n", + "\n", + "ax1.xaxis.label.set_color('red')\n", + "ax1.tick_params(axis='x', colors='red')\n", + "ax2.xaxis.label.set_color('blue')\n", + "ax2.tick_params(axis='x', colors='blue')\n", + "\n", + "plt.suptitle(f'Absolute Gain Conversion ({karabo_id} | {proposal} - r{run})',y =1.02,fontsize = 12)\n", + "plt.show()\n", + "\n", + "print(f'Gain conversion constant: {np.round(gain_conv_const,4)} ADU/keV')" + ] + }, + { + "cell_type": "markdown", + "id": "cf95ebad", + "metadata": {}, + "source": [ + "## Gain Map Validation\n", + "\n", + "Validation tests:\n", + "1. Inspect correlation between calculated gain map and gain map loaded from DB\n", + "2. Perform gain correction of current FF with calculated gain map and DB gain map and compare energy resolution and linearity" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13a0408f", + "metadata": {}, + "outputs": [], + "source": [ + "# Retrieve DB RelativeGain Map\n", + "illum_condition_db = Conditions.Illuminated.ePix100(\n", + " bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " temperature=temperature_k,\n", + " in_vacuum=in_vacuum,\n", + " photon_energy=peak_energy\n", + ")\n", + "\n", + "const_data_db = dict()\n", + "db_gain_map = get_constant_from_db(\n", + " karabo_id=karabo_id,\n", + " karabo_da=karabo_da,\n", + " constant=getattr(Constants.ePix100, 'RelativeGain')(),\n", + " condition=illum_condition_db,\n", + " empty_constant=None,\n", + " cal_db_interface=cal_db_interface,\n", + " creation_time=creation_time,\n", + " print_once=2,\n", + " timeout=cal_db_timeout\n", + ")\n", + "\n", + "if db_gain_map is None:\n", + " print('Waring: No previous RelativeGain map was found for this detector conditions.')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16383412", + "metadata": {}, + "outputs": [], + "source": [ + "if db_gain_map is not None:\n", + " # Correlate new and DB gain maps\n", + " plt.figure(figsize=(7,7))\n", + "\n", + " plt.hist2d(db_gain_map.flatten(),\n", + " abs_gain_map.flatten(),\n", + " bins = 200,\n", + " norm=LogNorm(),\n", + " );\n", + " plt.xlabel('DB noise map',fontsize=12)\n", + " plt.ylabel('New noise map',fontsize=12)\n", + "\n", + " plt.xlim(np.min([db_gain_map,abs_gain_map]),np.max([db_gain_map,abs_gain_map]))\n", + " plt.ylim(np.min([db_gain_map,abs_gain_map]),np.max([db_gain_map,abs_gain_map]))\n", + " plt.grid(ls=':')\n", + "\n", + " rel_change = np.mean(abs(abs_gain_map-db_gain_map)/abs_gain_map)\n", + " print(f'Average relative change of new gain map: {np.round(rel_change*100,3)} %')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a2c1e6cb", + "metadata": {}, + "outputs": [], + "source": [ + "step_timer.start()\n", + "\n", + "N_validation_trains = 1000\n", + "data_dc = run_dir.select(*pixels_src, require_all=True).select_trains(trains[:N_validation_trains])\n", + "FF_data = data_dc[pixels_src].ndarray().astype(np.float16)\n", + "\n", + "# Offset_correction\n", + "FF_data -= const_data['Offset'].squeeze()\n", + "\n", + "# Common Mode correction\n", + "FF_data = FF_data.swapaxes(0,-1)\n", + "FF_data = cmCorrection_block.correct(FF_data)\n", + "FF_data = cmCorrection_col.correct(FF_data)\n", + "FF_data = cmCorrection_row.correct(FF_data)\n", + "FF_data = FF_data.swapaxes(0,-1)\n", + "\n", + "# Relative Gain & Charge Sharing correction\n", + "FF_data_new_map = FF_data*abs_gain_map*gain_conv_const\n", + "FF_data_new_map,pat = patternClassifier.classify(FF_data_new_map.swapaxes(0,-1))\n", + "if db_gain_map is not None:\n", + " gain_conv_const_db = 1/np.median(db_gain_map[const_data['BadPixelsDark'].squeeze()>0])\n", + " FF_data_db_map = FF_data*db_gain_map*gain_conv_const_db\n", + " FF_data_db_map,pat = patternClassifier.classify(FF_data_db_map.swapaxes(0,-1))\n", + " \n", + "# Charge Sharing correction (without gain correction) \n", + "FF_data,pat = patternClassifier.classify(FF_data.swapaxes(0,-1))\n", + "\n", + "step_timer.done_step('Corrected evaluation data. Elapsed Time')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc812883", + "metadata": {}, + "outputs": [], + "source": [ + "bins_FF = np.arange(-50,800)\n", + "FF_hist_CS = np.histogram(FF_data[FF_data>0].flatten(),bins=bins_FF)[0]\n", + "\n", + "bins_keV_new = bins_FF/gain_conv_const\n", + "FF_hist_GC_new_map = np.histogram(FF_data_new_map/gain_conv_const,bins=bins_keV_new)[0]\n", + "\n", + "plt.figure(figsize=(12,8))\n", + "bins_ADU = bins_FF[:-1] + np.diff(bins_FF)[0]/2 # center of bins\n", + "bins_keV_new = bins_keV_new[:-1] + np.diff(bins_keV_new)[0]/2 # center of bins\n", + "plt.plot(bins_ADU,FF_hist_CS, color='black', label='Before gain correction')\n", + "plt.plot(bins_keV_new*gain_conv_const, FF_hist_GC_new_map, color='b', label='Gain correction with new map')\n", + "\n", + "if db_gain_map is not None:\n", + " bins_keV_db = bins_FF/gain_conv_const_db\n", + " FF_hist_GC_db_map = np.histogram(FF_data_db_map/gain_conv_const_db,bins=bins_keV_db)[0]\n", + " bins_keV_db = bins_keV_db[:-1] + np.diff(bins_keV_db)[0]/2 # center of bins\n", + " plt.plot(bins_keV_db*gain_conv_const_db, FF_hist_GC_db_map, color='r', label='Gain correction with DB map')\n", + "\n", + "plt.yscale('log')\n", + "plt.xlim(1,bins_FF[-1]+1)\n", + "\n", + "plt.xlabel('ADU',fontsize=12)\n", + "plt.legend(fontsize=12)\n", + "plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=14)\n", + "plt.grid(ls=':')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "73fe5ba3", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "N_peaks = 6\n", + "sigma_tol = 2 # sigma tolerance to show in gauss fit\n", + "\n", + "# Ignore split events below primary energy threshold\n", + "E_cut = np.mean(const_data['Noise'])*split_evt_primary_threshold/gain_conv_const\n", + "\n", + "too_many_peaks = True\n", + "while too_many_peaks: # Iterate backwards on number of peaks until no exception is thrown\n", + " try:\n", + " FF_hist_AGC_new = FF_hist_GC_new_map/gain_conv_const\n", + " if db_gain_map is not None:\n", + " FF_hist_AGC_db = FF_hist_GC_db_map/gain_conv_const_db\n", + " else:\n", + " FF_hist_AGC_db=None\n", + " bins_keV_db=None\n", + "\n", + " E_res,rel_dev,abs_dev = [],[],[]\n", + " colors = ['blue','red']\n", + "\n", + " for FF_hist_AGC,bins_keV,leg in zip([FF_hist_AGC_new,FF_hist_AGC_db],[bins_keV_new,bins_keV_db],['new','DB']):\n", + "\n", + " if FF_hist_AGC is None:\n", + " continue\n", + "\n", + " FF_hist_AGC = FF_hist_AGC[bins_keV>E_cut]\n", + " FF_hist_AGC[0]=0\n", + "\n", + " bins_keV = bins_keV[bins_keV>E_cut]\n", + " c = colors[0]\n", + " colors.pop(0)\n", + "\n", + " fig = plt.figure(figsize=(12,6))\n", + " plt.suptitle('Correction with '+leg+' gain map',fontsize = 15)\n", + "\n", + " plt.fill(bins_keV,FF_hist_AGC, color='k',alpha=.2,label='Corrected data')\n", + " plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=14)\n", + "\n", + " ylim_top = plt.gca().get_ylim()[1]\n", + "\n", + " ROI_shift = 0\n", + " for p in range(1,N_peaks+1):\n", + "\n", + " peak_ROI = np.array([p*peak_energy-peak_energy/2, p*peak_energy+peak_energy/2]) + ROI_shift\n", + " xx = (bins_keV>peak_ROI[0]) & (bins_keV<peak_ROI[1])\n", + "\n", + " coeff, var_matrix = curve_fit(gauss, bins_keV[xx], FF_hist_AGC[xx], p0=[FF_hist_AGC[xx].max(), p*peak_energy, 1])\n", + " y_fit = gauss(bins_keV[xx], *coeff)\n", + "\n", + " xx_sigma_lim = (bins_keV>coeff[1]-abs(coeff[2])*sigma_tol) & (bins_keV<coeff[1]+abs(coeff[2])*sigma_tol)\n", + "\n", + " plt.vlines(p*peak_energy,0,ylim_top,ls='-',color='grey',label=f'expected peaks')\n", + " plt.fill_between(bins_keV[xx_sigma_lim],\n", + " FF_hist_AGC[xx_sigma_lim],\n", + " color='orange',\n", + " alpha=.5,\n", + " label=f'μ ± {sigma_tol} σ')\n", + " plt.plot(bins_keV[xx],y_fit,color=c)\n", + " plt.vlines(coeff[1],0,ylim_top,ls='--',color=c,label=f'peak {p}: {coeff[1]:,.2f} keV')\n", + "\n", + " ROI_shift = coeff[1] - p*peak_energy \n", + "\n", + " E_res.append(abs(2*np.sqrt(2*np.log(2))*coeff[2]/coeff[1])*100)\n", + " abs_dev.append(coeff[1]-peak_energy*p)\n", + " rel_dev.append(abs(abs_dev[-1])/(peak_energy*p)*100)\n", + "\n", + " plt.yscale('log') \n", + " plt.xlabel('keV',fontsize=12)\n", + " plt.xlim(left=0)\n", + " plt.ylim(1,ylim_top)\n", + "\n", + " # Remove repeated entries from legend\n", + " handles, labels = plt.gca().get_legend_handles_labels()\n", + " by_label = dict(zip(labels, handles))\n", + " plt.legend(by_label.values(), by_label.keys())\n", + " plt.grid(ls=':')\n", + "\n", + " t = PrettyTable()\n", + " t.field_names = ['Peak','Energy Resolution','Rel. Deviation','Abs. Deviation']\n", + " t.title = f'{leg} gain map'\n", + " for p in range(-N_peaks,0):\n", + " t.add_row([f'#{p+N_peaks+1}: {peak_energy*(p+N_peaks+1):,.3f} keV',\n", + " f'{E_res[p]:,.2f} %', \n", + " f'{rel_dev[p]:,.2f} %',\n", + " f'{abs_dev[p]:,.2f} keV']) \n", + " print(t)\n", + " \n", + " too_many_peaks = False\n", + " plt.show()\n", + "\n", + " # throw exception if fit fails due to wrong estimate of number of peaks\n", + " except RuntimeError: \n", + " N_peaks -= 1\n", + " plt.close(fig) # delete plots if exception was found due to wrong number of peaks" + ] + }, + { + "cell_type": "markdown", + "id": "efade482", + "metadata": {}, + "source": [ + "## Linearity Analysis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d0098fd3", + "metadata": {}, + "outputs": [], + "source": [ + "peaks = np.arange(1,N_peaks+1)\n", + "plt.figure(figsize=(15,6))\n", + "plt.subplot(1,2,1)\n", + "plt.plot(peaks,[peak_energy*p for p in peaks], '-', c='k', label='expected')\n", + "plt.plot(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[:N_peaks]), 'o', c='b')\n", + "new_fit= np.polyfit(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[:N_peaks]),1)\n", + "\n", + "plt.plot(peaks,new_fit[0]*peaks+new_fit[1], '--', c='b', label='New gain map')\n", + "str_theo = f'$a_1$={peak_energy :,.4f}, $a_0$=0'\n", + "str_new = f'$a_1$={new_fit[0]:,.4f}, $a_0$={new_fit[1]:,.4f}'\n", + "plt.annotate(s=str_theo,xy=(.36,.94),xycoords='axes fraction',fontsize=11,bbox=dict(facecolor='k',alpha=.2,pad=1))\n", + "plt.annotate(s=str_new ,xy=(.36,.88),xycoords='axes fraction',fontsize=11,bbox=dict(facecolor='b',alpha=.2,pad=1))\n", + "\n", + "xx = np.arange(0,101)\n", + "y_fit_new = new_fit[0]*xx+new_fit[1] # extrapolation for 100 photons\n", + "\n", + "plt.xticks(peaks)\n", + "plt.title(f'Linearity ({karabo_id} | {proposal} - r{run})')\n", + "plt.xlabel('# Photons')\n", + "plt.ylabel('Energy (keV)')\n", + "plt.legend(fontsize=12)\n", + "plt.grid(ls=':')\n", + "\n", + "plt.subplot(1,2,2)\n", + "dev_new = abs(y_fit_new-(peak_energy*xx))/(peak_energy*xx)*100\n", + "plt.plot(xx,dev_new,c='b', label='New gain map')\n", + "plt.xscale('log')\n", + "plt.xlim(1,100)\n", + "plt.xlabel('# Photons')\n", + "plt.ylabel('Linearity Deviation (%)')\n", + "plt.title(f'Linearity extrapolation ({karabo_id} | {proposal} - r{run})')\n", + "plt.grid(ls=':',which='both')\n", + "\n", + "if db_gain_map is not None:\n", + " plt.subplot(1,2,1)\n", + " \n", + " db_fit = np.polyfit(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[N_peaks:]),1)\n", + " plt.plot(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[N_peaks:]), 'o', c='r')\n", + " plt.plot(peaks,db_fit[0]*peaks+db_fit[1], '--', c='r', label='DB gain map')\n", + " \n", + " str_db = f'$a_1$={db_fit[0] :,.4f}, $a_0$={db_fit[1] :,.4f}'\n", + " y_fit_db = db_fit[0]*xx+db_fit[1] # extrapolation for 100 photons\n", + " plt.annotate(s=str_db ,xy=(.36,.82),xycoords='axes fraction',fontsize=11,bbox=dict(facecolor='r',alpha=.2,pad=1))\n", + "\n", + " plt.subplot(1,2,2)\n", + " dev_db = abs(y_fit_db-(peak_energy*xx))/(peak_energy*xx)*100\n", + " plt.plot(xx,dev_db,c='r', label='DB gain map')\n", + "\n", + "plt.subplot(1,2,1)\n", + "leg = plt.legend(fontsize=12)\n", + "plt.subplot(1,2,2)\n", + "leg = plt.legend(fontsize=12)" + ] + }, + { + "cell_type": "markdown", + "id": "39554980", + "metadata": {}, + "source": [ + "## Energy Resolution Analysis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc661463", + "metadata": {}, + "outputs": [], + "source": [ + "def power_function(x,*p):\n", + " a,b,c = p\n", + " return a*x**b + c\n", + "\n", + "# rough initial estimate of fit parameters\n", + "fit_estimates = [20,-.5,0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f29ff449", + "metadata": {}, + "outputs": [], + "source": [ + "# Linearity of the visualized peaks\n", + "plt.figure(figsize=(8,6))\n", + "plt.plot(peaks,E_res[:N_peaks], 'o', c='b', label='New gain map')\n", + "\n", + "xx = np.arange(0,10,.1)\n", + "coeff,param = curve_fit(power_function,peaks,E_res[:N_peaks],p0=fit_estimates)\n", + "new_fit = power_function(xx,*coeff)\n", + "\n", + "plt.plot(xx,new_fit, '--', c='b')\n", + "\n", + "if db_gain_map is not None:\n", + " plt.plot(peaks,E_res[N_peaks:], 'o', c='r', label='DB gain map')\n", + "\n", + "plt.title(f'Energy Resolution ({karabo_id} | {proposal} - r{run})')\n", + "plt.xlabel('# Photons')\n", + "plt.ylabel('Energy Resolution (%)')\n", + "plt.legend(fontsize=12)\n", + "plt.xticks(xx*10)\n", + "plt.xlim(1,10)\n", + "plt.ylim(0,30)\n", + "plt.grid(ls=':')" + ] + }, + { + "cell_type": "markdown", + "id": "43336fd9", + "metadata": {}, + "source": [ + "## Calibration Constants DB\n", + "Send the flat-field constants (RelativeGain and BadPixelsFF) to the database and/or save them locally." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "78f37290", + "metadata": {}, + "outputs": [], + "source": [ + "# Save constants to DB\n", + "\n", + "md = None\n", + "\n", + "constant_maps = {'RelativeGain': abs_gain_map,\n", + " 'BadPixelsFF': const_data['BadPixelsFF']\n", + " } \n", + "\n", + "for const_name in constant_maps.keys():\n", + " const = getattr(Constants.ePix100, const_name)()\n", + " const.data = constant_maps[const_name].data\n", + "\n", + " # Set the operating condition\n", + " condition = Conditions.Illuminated.ePix100(\n", + " bias_voltage=bias_voltage,\n", + " photon_energy=peak_energy,\n", + " integration_time=integration_time,\n", + " temperature=temperature_k,\n", + " in_vacuum=in_vacuum,\n", + " )\n", + " \n", + " for parm in condition.parameters:\n", + " if parm.name == \"Sensor Temperature\":\n", + " parm.lower_deviation = temp_limits\n", + " parm.upper_deviation = temp_limits\n", + "\n", + " # Get physical detector unit\n", + " db_module = get_pdu_from_db(\n", + " karabo_id=karabo_id,\n", + " karabo_da=karabo_da,\n", + " constant=const,\n", + " condition=condition,\n", + " cal_db_interface=cal_db_interface,\n", + " snapshot_at=creation_time)[0]\n", + "\n", + " # Inject or save calibration constants\n", + " if db_output:\n", + " md = send_to_db(\n", + " db_module=db_module,\n", + " karabo_id=karabo_id,\n", + " constant=const,\n", + " condition=condition,\n", + " file_loc=file_loc,\n", + " report_path=report,\n", + " cal_db_interface=cal_db_interface,\n", + " creation_time=creation_time,\n", + " timeout=cal_db_timeout\n", + " )\n", + "\n", + " if local_output:\n", + " md = save_const_to_h5(\n", + " db_module=db_module,\n", + " karabo_id=karabo_id,\n", + " constant=const,\n", + " condition=condition,\n", + " data=const.data,\n", + " file_loc=file_loc,\n", + " report=report,\n", + " creation_time=creation_time,\n", + " out_folder=out_folder\n", + " )\n", + " print(f\"Calibration constant {const_name} is stored locally at {out_folder} \\n\")\n", + "\n", + "print(\"Constants parameter conditions are:\\n\"\n", + " f\"• Bias voltage: {bias_voltage}\\n\"\n", + " f\"• Integration time: {integration_time}\\n\"\n", + " f\"• Temperature: {temperature_k}\\n\"\n", + " f\"• Source Energy: {peak_energy}\\n\" \n", + " f\"• In Vacuum: {in_vacuum}\\n\"\n", + " f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "cal_venv", + "language": "python", + "name": "cal_venv" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} -- GitLab