diff --git a/notebooks/ePix100/Correction_ePix100_NBC.ipynb b/notebooks/ePix100/Correction_ePix100_NBC.ipynb index 083e048249c03e15a6ea821b4e0601766dcc361a..878edb18e2346902e08fe6d2d56be7176afff197 100644 --- a/notebooks/ePix100/Correction_ePix100_NBC.ipynb +++ b/notebooks/ePix100/Correction_ePix100_NBC.ipynb @@ -14,48 +14,47 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:23.218849Z", - "start_time": "2018-12-06T15:54:23.166497Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "cluster_profile = \"noDB\" # ipcluster profile to use\n", - "in_folder = \"/gpfs/exfel/exp/MID/201930/p900071/raw\" # input folder, required\n", - "out_folder = '/gpfs/exfel/data/scratch/karnem/test/' # output folder, required\n", - "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n", - "run = 466 # which run to read data from, required\n", - "\n", - "karabo_id = \"MID_EXP_EPIX-1\" # karabo karabo_id\n", - "karabo_da = \"DA01\" # data aggregators\n", - "receiver_id = \"RECEIVER\" # inset for receiver devices\n", - "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n", - "h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data/image' # path in the HDF5 file to images\n", + "cluster_profile = \"noDB\" # ipcluster profile to use\n", + "in_folder = \"/gpfs/exfel/exp/CALLAB/202031/p900113/raw\" # input folder, required\n", + "out_folder = \"\" # output folder, required\n", + "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n", + "run = 9988 # which run to read data from, required\n", + "\n", + "karabo_id = \"MID_EXP_EPIX-1\" # karabo karabo_id\n", + "karabo_da = \"EPIX01\" # data aggregators\n", + "db_module = \"ePix100_M15\" # module id in the database\n", + "receiver_id = \"RECEIVER\" # inset for receiver devices\n", + "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n", + "h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data/image' # path in the HDF5 file to images\n", "h5path_t = '/INSTRUMENT/{}/DET/{}:daqOutput/data/backTemp' # path to find temperature at\n", "h5path_cntrl = '/CONTROL/{}/DET' # path to control data\n", "\n", - "use_dir_creation_date = True # date constants injected before directory creation time\n", - "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # calibration DB interface to use\n", - "cal_db_timeout = 300000 # timeout on caldb requests\n", - "\n", - "cpuCores = 4 # Specifies the number of running cpu cores\n", - "chunk_size_idim = 1 # H5 chunking size of output data\n", - "overwrite = True # overwrite output folder\n", - "limit_images = 0 # process only first N images, 0 - process all\n", - "db_module = \"ePix100_M15\" # module id in the database\n", - "sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n", - "bias_voltage = 200 # bias voltage\n", - "in_vacuum = False # detector operated in vacuum\n", - "fix_temperature = 290. # fix temperature to this value\n", - "gain_photon_energy = 9.0 # Photon energy used for gain calibration\n", - "photon_energy = 8.0 # Photon energy to calibrate in number of photons, 0 for calibration in keV \n", - "no_relative_gain = False # do not do gain correction\n", - "split_evt_primary_threshold = 7. # primary threshold for split event correction\n", - "split_evt_secondary_threshold = 5. # secondary threshold for split event correction\n", - "split_evt_mip_threshold = 1000. # minimum ionizing particle threshold\n", - " \n", + "use_dir_creation_date = True # date constants injected before directory creation time\n", + "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # calibration DB interface to use\n", + "cal_db_timeout = 300000 # timeout on caldb requests\n", + "\n", + "cpuCores = 4 # Specifies the number of running cpu cores\n", + "chunk_size_idim = 1 # H5 chunking size of output data\n", + "overwrite = True # overwrite output folder\n", + "limit_images = 0 # process only first N images, 0 - process all\n", + "sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n", + "\n", + "bias_voltage = 200 # bias voltage\n", + "in_vacuum = False # detector operated in vacuum\n", + "fix_temperature = 290. # fix temperature to this value\n", + "gain_photon_energy = 9.0 # Photon energy used for gain calibration\n", + "photon_energy = 8.0 # Photon energy to calibrate in number of photons, 0 for calibration in keV\n", + "\n", + "relative_gain = False # Apply relative gain correction.\n", + "\n", + "split_evt_primary_threshold = 7. # primary threshold for split event correction\n", + "split_evt_secondary_threshold = 5. # secondary threshold for split event correction\n", + "split_evt_mip_threshold = 1000. # minimum ionizing particle threshold\n", + "\n", + "\n", "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n", " from xfel_calibrate.calibrate import balance_sequences as bs\n", " return bs(in_folder, run, sequences, sequences_per_node, karabo_da)" @@ -64,190 +63,157 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:23.455376Z", - "start_time": "2018-12-06T15:54:23.413579Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "import XFELDetAna.xfelprofiler as xprof\n", - "\n", - "profiler = xprof.Profiler()\n", - "profiler.disable()\n", - "from XFELDetAna.util import env\n", - "\n", - "env.iprofile = cluster_profile\n", - "\n", + "import tabulate\n", "import warnings\n", "\n", - "warnings.filterwarnings('ignore')\n", + "import h5py\n", + "import numpy as np\n", + "from IPython.display import Latex, display\n", + "from pathlib import Path\n", "\n", + "import XFELDetAna.xfelprofiler as xprof\n", "from XFELDetAna import xfelpyanatools as xana\n", "from XFELDetAna import xfelpycaltools as xcal\n", "from XFELDetAna.plotting.util import prettyPlotting\n", + "from XFELDetAna.util import env\n", + "from cal_tools.tools import (\n", + " get_constant_from_db,\n", + " get_dir_creation_date,\n", + ")\n", + "from iCalibrationDB import (\n", + " Conditions,\n", + " Constants,\n", + ")\n", "\n", - "prettyPlotting=True\n", - "import copy\n", - "import os\n", - "import time\n", + "warnings.filterwarnings('ignore')\n", "\n", - "import h5py\n", - "import numpy as np\n", - "from cal_tools.tools import get_constant_from_db, get_dir_creation_date\n", - "from iCalibrationDB import Conditions, ConstantMetaData, Constants, Detectors, Versions\n", - "from iCalibrationDB.detectors import DetectorTypes\n", - "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n", - "from XFELDetAna.xfelreaders import ChunkReader\n", + "prettyPlotting = True\n", "\n", - "%matplotlib inline\n", + "profiler = xprof.Profiler()\n", + "profiler.disable()\n", + "env.iprofile = cluster_profile\n", "\n", - "if sequences[0] == -1:\n", - " sequences = None\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# TODO: expose to first cell after fixing common mode correction.\n", + "common_mode = False # Apply common mode correction.\n", "\n", "h5path = h5path.format(karabo_id, receiver_id)\n", "h5path_t = h5path_t.format(karabo_id, receiver_id)\n", "h5path_cntrl = h5path_cntrl.format(karabo_id)\n", - "\n", "plot_unit = 'ADU'\n", - "if not no_relative_gain:\n", + "\n", + "if relative_gain:\n", " plot_unit = 'keV'\n", - " if photon_energy>0:\n", - " plot_unit = '$\\gamma$'" + " if photon_energy > 0:\n", + " plot_unit = '$\\gamma$'" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:23.679069Z", - "start_time": "2018-12-06T15:54:23.662821Z" - } - }, + "metadata": {}, "outputs": [], "source": [ "x = 708 # rows of the ePix100\n", "y = 768 # columns of the ePix100\n", - " \n", - "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n", - "fp_name = path_template.format(run, karabo_da)\n", - "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", "\n", - "print(\"Reading from: {}\".format(fp_path))\n", - "print(\"Run is: {}\".format(run))\n", - "print(\"HDF5 path: {}\".format(h5path))\n", - "print(\"Data is output to: {}\".format(out_folder))\n", + "in_folder = Path(in_folder)\n", + "ped_dir = in_folder / f\"r{run:04d}\"\n", + "fp_name = path_template.format(run, karabo_da)\n", "\n", - "import datetime\n", + "print(f\"Reading from: {ped_dir / fp_name}\")\n", + "print(f\"Run is: {run}\")\n", + "print(f\"HDF5 path: {h5path}\")\n", + "print(f\"Data is output to: {out_folder}\")\n", "\n", "creation_time = None\n", "if use_dir_creation_date:\n", " creation_time = get_dir_creation_date(in_folder, run)\n", "if creation_time:\n", - " print(\"Using {} as creation time\".format(creation_time.isoformat())) " + " print(f\"Using {creation_time.isoformat()} as creation time\")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:23.913269Z", - "start_time": "2018-12-06T15:54:23.868910Z" - }, - "scrolled": true - }, + "metadata": {}, "outputs": [], "source": [ "sensorSize = [x, y]\n", - "chunkSize = 100 #Number of images to read per chunk\n", - "blockSize = [sensorSize[0]//2, sensorSize[1]//2] # Sensor area will be analysed according to blocksize\n", + "chunkSize = 100 # Number of images to read per chunk\n", + "# Sensor area will be analysed according to blocksize\n", + "blockSize = [sensorSize[0]//2, sensorSize[1]//2]\n", "xcal.defaultBlockSize = blockSize\n", - "memoryCells = 1 # ePIX has no memory cell\n", + "memoryCells = 1 # ePIX has no memory cells\n", "run_parallel = True\n", "\n", - "\n", - "filename = fp_path.format(sequences[0] if sequences else 0)\n", + "# Read slow data from the first available sequence file.\n", + "filename = ped_dir / fp_name.format(sequences[0] if sequences[0] != -1 else 0)\n", "with h5py.File(filename, 'r') as f:\n", - " integration_time = int(f['{}/CONTROL/expTime/value'.format(h5path_cntrl)][0])\n", - " temperature = np.mean(f[h5path_t])/100.\n", + " integration_time = int(f[f\"{h5path_cntrl}/CONTROL/expTime/value\"][0])\n", + " temperature = np.mean(f[h5path_t]) / 100.\n", " temperature_k = temperature + 273.15\n", " if fix_temperature != 0:\n", " temperature_k = fix_temperature\n", " print(\"Temperature is fixed!\")\n", - " print(\"Bias voltage is {} V\".format(bias_voltage))\n", - " print(\"Detector integration time is set to {}\".format(integration_time))\n", - " print(\"Mean temperature was {:0.2f} °C / {:0.2f} K at beginning of run\".format(temperature, temperature_k))\n", - " print(\"Operated in vacuum: {} \".format(in_vacuum))\n", - "\n", - "if not os.path.exists(out_folder):\n", - " os.makedirs(out_folder)\n", - "elif not overwrite:\n", - " raise AttributeError(\"Output path exists! Exiting\") " + " print(f\"Bias voltage is {bias_voltage} V\")\n", + " print(f\"Detector integration time is set to {integration_time}\")\n", + " print(\n", + " f\"Mean temperature was {temperature:0.2f} °C \"\n", + " f\"/ {temperature_k:0.2f} K at beginning of run\"\n", + " )\n", + " print(f\"Operated in vacuum: {in_vacuum} \")\n", + "\n", + "out_folder = Path(out_folder)\n", + "if out_folder.is_dir() and not overwrite:\n", + " raise AttributeError(\"Output path exists! Exiting\")\n", + "out_folder.mkdir(parents=True, exist_ok=True)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:24.088948Z", - "start_time": "2018-12-06T15:54:24.059925Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "dirlist = sorted(os.listdir(ped_dir))\n", - "file_list = []\n", - "total_sequences = 0\n", - "fsequences = []\n", - "for entry in dirlist:\n", - "\n", - " #only h5 file\n", - " abs_entry = \"{}/{}\".format(ped_dir, entry)\n", - " if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == \".h5\":\n", - " \n", - " if sequences is None:\n", - " for seq in range(len(dirlist)):\n", - " \n", - " if path_template.format(run, karabo_da).format(seq) in abs_entry:\n", - " file_list.append(abs_entry)\n", - " total_sequences += 1\n", - " fsequences.append(seq)\n", - " else:\n", - " for seq in sequences:\n", - " \n", - " if path_template.format(run, karabo_da).format(seq) in abs_entry:\n", - " file_list.append(os.path.abspath(abs_entry))\n", - " total_sequences += 1\n", - " fsequences.append(seq)\n", - "sequences = fsequences" + "# Glob the right *.h5 fast data files.\n", + "seq_files = sorted(ped_dir.glob(f\"*{karabo_da}*.h5\"))\n", + "\n", + "# If a set of sequences requested to correct,\n", + "# adapt seq_files list.\n", + "if sequences != [-1]:\n", + " seq_files = [f for f in seq_files\n", + " if any(f.match(f\"*-S{s:05d}.h5\") for s in sequences)]\n", + "\n", + "print(f\"Processing a total of {len(seq_files)} sequence files\")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T18:43:39.776018Z", - "start_time": "2018-12-06T18:43:39.759185Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "import tabulate\n", - "from IPython.display import HTML, Latex, Markdown, display\n", - "\n", - "print(\"Processing a total of {} sequence files\".format(total_sequences))\n", - "table = []\n", - "\n", - "\n", - "for k, f in enumerate(file_list):\n", - " table.append((k, f))\n", - "if len(table): \n", - " md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"#\", \"file\"]))) " + "# Table of sequence files to process\n", + "table = [(k, f) for k, f in enumerate(seq_files)]\n", + "\n", + "if len(table):\n", + " md = display(Latex(tabulate.tabulate(\n", + " table,\n", + " tablefmt='latex',\n", + " headers=[\"#\", \"file\"]\n", + " )))" ] }, { @@ -260,138 +226,110 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:28.254544Z", - "start_time": "2018-12-06T15:54:24.709521Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "dclass=\"ePix100\"\n", - "const_name = \"Offset\"\n", - "offsetMap = None\n", "temp_limits = 5.\n", "\n", - "# Offset\n", - "det = getattr(Detectors, db_module)\n", - "dconstants = getattr(Constants, dclass)\n", + "cond_dict = {\n", + " \"bias_voltage\": bias_voltage,\n", + " \"integration_time\": integration_time,\n", + " \"temperature\": temperature_k,\n", + " \"in_vacuum\": in_vacuum,\n", + "}\n", + "\n", + "dark_condition = Conditions.Dark.ePix100(**cond_dict)\n", + "\n", + "# update conditions with illuminated conditins.\n", + "cond_dict.update({\n", + " \"photon_energy\": gain_photon_energy\n", + " })\n", "\n", - "condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " temperature=temperature_k,\n", - " in_vacuum=in_vacuum)\n", + "illum_condition = Conditions.Illuminated.ePix100(**cond_dict)\n", "\n", - "for parm in condition.parameters:\n", + "const_cond = {\n", + " \"Offset\": dark_condition,\n", + " \"Noise\": dark_condition,\n", + " \"RelativeGain\": illum_condition,\n", + "}\n", + "\n", + "const_data = dict()\n", + "\n", + "for cname, condition in const_cond.items():\n", + " if cname == \"RelativeGain\" and not relative_gain:\n", + " continue\n", + " # TODO: Fix this logic.\n", + " for parm in condition.parameters:\n", " if parm.name == \"Sensor Temperature\":\n", " parm.lower_deviation = temp_limits\n", " parm.upper_deviation = temp_limits\n", "\n", - "\n", - "\n", - "offsetMap = get_constant_from_db(karabo_id, karabo_da,\n", - " getattr(dconstants, const_name)(),\n", - " condition,\n", - " None,\n", - " cal_db_interface,\n", - " creation_time=creation_time,\n", - " print_once=2,\n", - " timeout=cal_db_timeout)\n", - "\n", - "# Noise\n", - "const_name = \"Noise\"\n", - "condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " temperature=temperature_k,\n", - " in_vacuum=in_vacuum)\n", - "\n", - "noiseMap = get_constant_from_db(karabo_id, karabo_da,\n", - " getattr(dconstants, const_name)(),\n", - " condition,\n", - " None,\n", - " cal_db_interface,\n", - " creation_time=creation_time,\n", - " print_once=2,\n", - " timeout=cal_db_timeout)\n", - "\n", - "# Gain\n", - "if not no_relative_gain:\n", - " const_name = \"RelativeGain\"\n", - " condition = Conditions.Illuminated.ePix100(photon_energy=gain_photon_energy,\n", - " bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " temperature=temperature_k,\n", - " in_vacuum=in_vacuum)\n", - "\n", - " gainMap = get_constant_from_db(karabo_id, karabo_da,\n", - " getattr(dconstants, const_name)(),\n", - " condition,\n", - " None,\n", - " cal_db_interface,\n", - " creation_time=creation_time,\n", - " print_once=2,\n", - " timeout=cal_db_timeout)\n", - " \n", - " if gainMap is None:\n", - " print(\"Gain map requested, but not found\")\n", - " print(\"No gain correction will be applied\")\n", - " no_relative_gain = True\n", - " plot_unit = 'ADU'\n", - " gainMap = np.ones(sensorSize, np.float32)\n", - "\n", - "else:\n", - " gainMap = np.ones(sensorSize, np.float32)" + " const_data[cname] = get_constant_from_db(\n", + " karabo_id=karabo_id,\n", + " karabo_da=karabo_da,\n", + " constant=getattr(Constants.ePix100, cname)(),\n", + " condition=condition,\n", + " empty_constant=None,\n", + " cal_db_interface=cal_db_interface,\n", + " creation_time=creation_time,\n", + " print_once=2,\n", + " timeout=cal_db_timeout\n", + " )\n", + "\n", + "if relative_gain and const_data[\"RelativeGain\"] is None:\n", + " print(\n", + " \"WARNING: RelativeGain map is requested, but not found./n\"\n", + " \"No gain correction will be applied\"\n", + " )\n", + " relative_gain = False\n", + " plot_unit = 'ADU'" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T15:54:28.771629Z", - "start_time": "2018-12-06T15:54:28.346051Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "#************************Calculators************************#\n", - "\n", - "offsetCorrection = xcal.OffsetCorrection(sensorSize, \n", - " offsetMap, \n", - " nCells = memoryCells, \n", - " cores=cpuCores, gains=None,\n", - " blockSize=blockSize,\n", - " parallel=run_parallel)\n", - "\n", - "gainCorrection = xcal.RelativeGainCorrection(\n", - " sensorSize, \n", - " 1. / gainMap[..., None],\n", - " nCells=memoryCells,\n", - " parallel=run_parallel,\n", - " cores=cpuCores,\n", - " blockSize=blockSize,\n", - " gains=None)" + "# ************************Calculators************************ #\n", + "offsetCorrection = xcal.OffsetCorrection(\n", + " sensorSize,\n", + " const_data[\"Offset\"],\n", + " nCells=memoryCells,\n", + " cores=cpuCores,\n", + " gains=None,\n", + " blockSize=blockSize,\n", + " parallel=run_parallel\n", + ")\n", + "\n", + "if relative_gain:\n", + " gainCorrection = xcal.RelativeGainCorrection(\n", + " sensorSize,\n", + " 1./const_data[\"RelativeGain\"][..., None],\n", + " nCells=memoryCells,\n", + " parallel=run_parallel,\n", + " cores=cpuCores,\n", + " blockSize=blockSize,\n", + " gains=None,\n", + " )" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:08:51.886343Z", - "start_time": "2018-12-06T16:08:51.842837Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "#*****************Histogram Calculators******************#\n", - "\n", - "histCalOffsetCor = xcal.HistogramCalculator(sensorSize, \n", - " bins=1050, \n", - " range=[-50, 1000], parallel=run_parallel,\n", - " nCells=memoryCells, \n", - " cores=cpuCores,\n", - " blockSize=blockSize)" + "# *****************Histogram Calculators****************** #\n", + "histCalOffsetCor = xcal.HistogramCalculator(\n", + " sensorSize,\n", + " bins=1050,\n", + " range=[-50, 1000],\n", + " parallel=run_parallel,\n", + " nCells=memoryCells,\n", + " cores=cpuCores,\n", + " blockSize=blockSize\n", + ")" ] }, { @@ -404,17 +342,13 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:08:53.042555Z", - "start_time": "2018-12-06T16:08:53.034522Z" - } - }, + "metadata": {}, "outputs": [], "source": [ "histCalOffsetCor.debug()\n", "offsetCorrection.debug()\n", - "gainCorrection.debug()" + "if relative_gain:\n", + " gainCorrection.debug()" ] }, { @@ -423,44 +357,53 @@ "metadata": {}, "outputs": [], "source": [ - "#************************Calculators************************#\n", - "\n", - "commonModeBlockSize = [x//2, y//2]\n", - "commonModeAxisR = 'row'\n", - "cmCorrection = xcal.CommonModeCorrection([x, y], \n", - " commonModeBlockSize, \n", - " commonModeAxisR,\n", - " nCells = memoryCells, \n", - " noiseMap = noiseMap,\n", - " runParallel=True,\n", - " stats=True)\n", - "\n", - "patternClassifier = xcal.PatternClassifier([x, y], \n", - " noiseMap, \n", - " split_evt_primary_threshold, \n", - " split_evt_secondary_threshold,\n", - " split_evt_mip_threshold,\n", - " tagFirstSingles = 0, \n", - " nCells=memoryCells, \n", - " cores=cpuCores, \n", - " allowElongated = False,\n", - " blockSize=[x, y],\n", - " runParallel=True)\n", - "\n", - "\n", - "histCalCMCor = xcal.HistogramCalculator(sensorSize, \n", - " bins=1050, \n", - " range=[-50, 1000], parallel=run_parallel,\n", - " nCells=memoryCells, \n", - " cores=cpuCores,\n", - " blockSize=blockSize)\n", - "\n", - "histCalSECor = xcal.HistogramCalculator(sensorSize, \n", - " bins=1050, \n", - " range=[-50, 1000], parallel=run_parallel,\n", - " nCells=memoryCells, \n", - " cores=cpuCores,\n", - " blockSize=blockSize)" + "# ************************Calculators************************ #\n", + "if common_mode:\n", + " commonModeBlockSize = [x//2, y//2]\n", + " commonModeAxisR = 'row'\n", + " cmCorrection = xcal.CommonModeCorrection(\n", + " [x, y],\n", + " commonModeBlockSize,\n", + " commonModeAxisR,\n", + " nCells=memoryCells,\n", + " noiseMap=const_data[\"Noise\"],\n", + " runParallel=run_parallel,\n", + " stats=True,\n", + " )\n", + "\n", + " histCalCMCor = xcal.HistogramCalculator(\n", + " sensorSize,\n", + " bins=1050,\n", + " range=[-50, 1000],\n", + " parallel=run_parallel,\n", + " nCells=memoryCells,\n", + " cores=cpuCores,\n", + " blockSize=blockSize,\n", + " )\n", + "\n", + "patternClassifier = xcal.PatternClassifier(\n", + " [x, y],\n", + " const_data[\"Noise\"],\n", + " split_evt_primary_threshold,\n", + " split_evt_secondary_threshold,\n", + " split_evt_mip_threshold,\n", + " tagFirstSingles=0,\n", + " nCells=memoryCells,\n", + " cores=cpuCores,\n", + " allowElongated=False,\n", + " blockSize=[x, y],\n", + " runParallel=run_parallel,\n", + ")\n", + "\n", + "histCalSECor = xcal.HistogramCalculator(\n", + " sensorSize,\n", + " bins=1050,\n", + " range=[-50, 1000],\n", + " parallel=run_parallel,\n", + " nCells=memoryCells,\n", + " cores=cpuCores,\n", + " blockSize=blockSize,\n", + ")" ] }, { @@ -469,32 +412,30 @@ "metadata": {}, "outputs": [], "source": [ - "cmCorrection.debug()\n", + "if common_mode:\n", + " cmCorrection.debug()\n", + " histCalCMCor.debug()\n", "patternClassifier.debug()\n", - "histCalCMCor.debug()\n", "histCalSECor.debug()" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:08:53.551111Z", - "start_time": "2018-12-06T16:08:53.531064Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "def copy_and_sanitize_non_cal_data(infile, outfile, h5base):\n", - " \"\"\" Copy and sanitize data in `infile` that is not touched by `correctEPIX`\n", - " \"\"\"\n", - " \n", + "def copy_and_sanitize_non_cal_data(\n", + " infile: h5py,\n", + " outfile: h5py,\n", + " h5base: str\n", + "):\n", + " \"\"\" Copy and sanitize data in `infile`\n", + " that is not touched by `correctEPIX`. \"\"\"\n", + "\n", " if h5base.startswith(\"/\"):\n", " h5base = h5base[1:]\n", - " dont_copy = ['pixels']\n", - " dont_copy = [h5base+\"/{}\".format(do)\n", - " for do in dont_copy]\n", + " dont_copy = [h5base+\"/pixels\"]\n", "\n", " def visitor(k, item):\n", " if k not in dont_copy:\n", @@ -504,140 +445,144 @@ " group = str(k).split(\"/\")\n", " group = \"/\".join(group[:-1])\n", " infile.copy(k, outfile[group])\n", - " \n", + "\n", " infile.visititems(visitor)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:10:55.917179Z", - "start_time": "2018-12-06T16:09:01.603633Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "for k, f in enumerate(file_list):\n", - " with h5py.File(f, 'r') as infile:\n", - " out_fileb = \"{}/{}\".format(out_folder, f.split(\"/\")[-1])\n", - " out_file = out_fileb.replace(\"RAW\", \"CORR\")\n", - " #out_filed = out_fileb.replace(\"RAW\", \"CORR-SC\")\n", - " data = None\n", - " with h5py.File(out_file, \"w\") as ofile:\n", - " try:\n", - " copy_and_sanitize_non_cal_data(infile, ofile, h5path)\n", - " data = infile[h5path+\"/pixels\"][()]\n", - " data = np.compress(np.any(data>0, axis=(1,2)), data, axis=0)\n", - " if limit_images > 0:\n", - " data = data[:limit_images,...]\n", - " \n", - " oshape = data.shape\n", - " data = np.moveaxis(data, 0, 2)\n", - " ddset = ofile.create_dataset(h5path+\"/pixels\",\n", - " oshape,\n", - " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", - " dtype=np.float32)\n", - "\n", - " data = offsetCorrection.correct(data.astype(np.float32)) #correct for the offset\n", - " if not no_relative_gain:\n", - " data = gainCorrection.correct(data.astype(np.float32)) #correct for the gain\n", - " if photon_energy>0:\n", - " data /= photon_energy\n", - " \n", - " histCalOffsetCor.fill(data)\n", - " ddset[...] = np.moveaxis(data, 2, 0)\n", - " except Exception as e:\n", - " print(\"Couldn't calibrate data in {}: {}\".format(f, e))\n", - " \n", - " if False:\n", - " with h5py.File(out_file, \"a\") as ofiled:\n", - " try:\n", - " #copy_and_sanitize_non_cal_data(infile, ofiled, h5path)\n", - "\n", - " ddsetcm = ofiled.create_dataset(h5path+\"/pixels_cm\",\n", - " oshape,\n", - " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", - " dtype=np.float32)\n", - "\n", - " ddsetc = ofiled.create_dataset(h5path+\"/pixels_classified\",\n", - " oshape,\n", - " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", - " dtype=np.float32, compression=\"gzip\")\n", - "\n", - " ddsetp = ofiled.create_dataset(h5path+\"/patterns\",\n", - " oshape,\n", - " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", - " dtype=np.int32, compression=\"gzip\")\n", - "\n", - "\n", - " data = cmCorrection.correct(data) #correct for the row common mode\n", - " histCalCMCor.fill(data)\n", - " ddsetcm[...] = np.moveaxis(data, 2, 0)\n", - "\n", - " data, patterns = patternClassifier.classify(data)\n", - "\n", - "\n", - " data[data < (split_evt_primary_threshold*noiseMap)] = 0\n", - " ddsetc[...] = np.moveaxis(data, 2, 0)\n", - " ddsetp[...] = np.moveaxis(patterns, 2, 0)\n", - "\n", - " data[patterns != 100] = np.nan\n", - " histCalSECor.fill(data)\n", - " except Exception as e:\n", - " print(\"Couldn't calibrate data in {}: {}\".format(f, e))" + "for f in seq_files:\n", + " data = None\n", + " out_file = out_folder / f.name.replace(\"RAW\", \"CORR\")\n", + " with h5py.File(f, \"r\") as infile, h5py.File(out_file, \"w\") as ofile:\n", + " try:\n", + " copy_and_sanitize_non_cal_data(infile, ofile, h5path)\n", + " data = infile[h5path+\"/pixels\"][()]\n", + " data = np.compress(np.any(data > 0, axis=(1, 2)), data, axis=0)\n", + " if limit_images > 0:\n", + " data = data[:limit_images, ...]\n", + "\n", + " oshape = data.shape\n", + " data = np.moveaxis(data, 0, 2)\n", + " ddset = ofile.create_dataset(\n", + " h5path+\"/pixels\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.float32)\n", + "\n", + " # Offset correction.\n", + " data = offsetCorrection.correct(data.astype(np.float32))\n", + "\n", + " # relative gain correction.\n", + " if relative_gain:\n", + " data = gainCorrection.correct(data.astype(np.float32))\n", + " if photon_energy > 0:\n", + " data /= photon_energy\n", + "\n", + " histCalOffsetCor.fill(data)\n", + " ddset[...] = np.moveaxis(data, 2, 0)\n", + "\n", + " # Common mode correction\n", + " # TODO: Fix conflict between common mode and relative gain.\n", + "\n", + " \"\"\"The gain correction is currently applying an absolute correction\n", + " (not a relative correction as the implied by the name);\n", + " it changes the scale (the unit of measurement) of the data from ADU\n", + " to either keV or n_of_photons. But the common mode correction\n", + " relies on comparing data with the noise map, which is still in ADU.\n", + "\n", + " The best solution is probably to do a relative gain correction first\n", + " (correct) and apply the global absolute gain to the data at the end,\n", + " after common mode and clustering.\n", + " \"\"\"\n", + " if common_mode:\n", + " ddsetcm = ofile.create_dataset(\n", + " h5path+\"/pixels_cm\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.float32)\n", + "\n", + " ddsetc = ofile.create_dataset(\n", + " h5path+\"/pixels_classified\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.float32, compression=\"gzip\")\n", + "\n", + " ddsetp = ofile.create_dataset(\n", + " h5path+\"/patterns\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.int32, compression=\"gzip\")\n", + "\n", + " # row common mode correction.\n", + " data = cmCorrection.correct(data)\n", + " histCalCMCor.fill(data)\n", + " ddsetcm[...] = np.moveaxis(data, 2, 0)\n", + "\n", + " data, patterns = patternClassifier.classify(data)\n", + "\n", + " data[data < (split_evt_primary_threshold*const_data[\"Noise\"])] = 0 # noqa\n", + " ddsetc[...] = np.moveaxis(data, 2, 0)\n", + " ddsetp[...] = np.moveaxis(patterns, 2, 0)\n", + "\n", + " data[patterns != 100] = np.nan\n", + " histCalSECor.fill(data)\n", + " except Exception as e:\n", + " print(f\"ERROR applying common mode correction for {f}: {e}\")" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:13:12.889583Z", - "start_time": "2018-12-06T16:13:11.122653Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "ho,eo,co,so = histCalOffsetCor.get()\n", - "\n", - "d = [{'x': co,\n", - " 'y': ho,\n", - " 'y_err': np.sqrt(ho[:]),\n", - " 'drawstyle': 'steps-mid',\n", - " 'errorstyle': 'bars',\n", - " 'errorcoarsing': 2,\n", - " 'label': 'Offset corr.'\n", - " },\n", - " \n", - " ]\n", - "if False:\n", - " ho,eo,co,so = histCalCMCor.get()\n", - " d.append({'x': co,\n", - " 'y': ho,\n", - " 'y_err': np.sqrt(ho[:]),\n", - " 'drawstyle': 'steps-mid',\n", - " 'errorstyle': 'bars',\n", - " 'errorcoarsing': 2,\n", - " 'label': 'CM corr.'\n", - " })\n", - "\n", - " ho,eo,co,so = histCalSECor.get()\n", - " d.append({'x': co,\n", - " 'y': ho,\n", - " 'y_err': np.sqrt(ho[:]),\n", - " 'drawstyle': 'steps-mid',\n", - " 'errorstyle': 'bars',\n", - " 'errorcoarsing': 2,\n", - " 'label': 'Single split events'\n", - " })\n", - "\n", - "\n", - "fig = xana.simplePlot(d, aspect=1, x_label='Energy({})'.format(plot_unit), \n", - " y_label='Number of occurrences', figsize='2col',\n", - " y_log=True, x_range=(-50,500),\n", - " legend='top-center-frame-2col')" + "ho, eo, co, so = histCalOffsetCor.get()\n", + "\n", + "d = [{\n", + " 'x': co,\n", + " 'y': ho,\n", + " 'y_err': np.sqrt(ho[:]),\n", + " 'drawstyle': 'steps-mid',\n", + " 'errorstyle': 'bars',\n", + " 'errorcoarsing': 2,\n", + " 'label': 'Offset corr.'\n", + "}]\n", + "\n", + "if common_mode:\n", + " ho, eo, co, so = histCalCMCor.get()\n", + " d.append({\n", + " 'x': co,\n", + " 'y': ho,\n", + " 'y_err': np.sqrt(ho[:]),\n", + " 'drawstyle': 'steps-mid',\n", + " 'errorstyle': 'bars',\n", + " 'errorcoarsing': 2,\n", + " 'label': 'CM corr.'\n", + " })\n", + "\n", + " ho, eo, co, so = histCalSECor.get()\n", + " d.append({\n", + " 'x': co,\n", + " 'y': ho,\n", + " 'y_err': np.sqrt(ho[:]),\n", + " 'drawstyle': 'steps-mid',\n", + " 'errorstyle': 'bars',\n", + " 'errorcoarsing': 2,\n", + " 'label': 'Single split events'\n", + " })\n", + "\n", + "\n", + "fig = xana.simplePlot(\n", + " d, aspect=1, x_label=f'Energy({plot_unit})',\n", + " y_label='Number of occurrences', figsize='2col',\n", + " y_log=True, x_range=(-50, 500),\n", + " legend='top-center-frame-2col'\n", + ")" ] }, { @@ -650,26 +595,21 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:11:08.317130Z", - "start_time": "2018-12-06T16:11:05.788655Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "fig = xana.heatmapPlot(np.nanmedian(data, axis=2),\n", - " x_label='Columns', y_label='Rows',\n", - " lut_label='Signal ({})'.format(plot_unit),\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmin=-50, vmax=50)" + "fig = xana.heatmapPlot(\n", + " np.nanmedian(data, axis=2),\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label=f'Signal ({plot_unit})',\n", + " x_range=(0, y),\n", + " y_range=(0, x),\n", + " vmin=-50, vmax=50)" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "## Single Shot of last Sequence ##" ] @@ -677,27 +617,17 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T16:11:10.908912Z", - "start_time": "2018-12-06T16:11:08.318486Z" - } - }, + "metadata": {}, "outputs": [], "source": [ - "fig = xana.heatmapPlot(data[...,0],\n", - " x_label='Columns', y_label='Rows',\n", - " lut_label='Signal ({})'.format(plot_unit),\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmin=-50, vmax=50)" + "fig = xana.heatmapPlot(\n", + " data[..., 0],\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label=f'Signal ({plot_unit})',\n", + " x_range=(0, y),\n", + " y_range=(0, x),\n", + " vmin=-50, vmax=50)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": {