diff --git a/cal_tools/cal_tools/ana_tools.py b/cal_tools/cal_tools/ana_tools.py index 5ac6680742f1232ac7496315214616d90ea27a6e..a262cf8bb6e6be746e0cbb7dc26f53b131467d06 100644 --- a/cal_tools/cal_tools/ana_tools.py +++ b/cal_tools/cal_tools/ana_tools.py @@ -138,7 +138,7 @@ def recursively_save_dict_contents_to_group(h5file, path, dic): def list_runner(*args, **kwargs): """ Combine several lists to list of dictionary or list of lists - Each dictionary contain set of elements, one from each list + Each dictionary containa a set of elements, one from each list """ params = list(map(tuple, args)) * kwargs.get('repeat', 1) diff --git a/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb b/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb index 1d5907b72009f076284701bd46ac33f0051d3a54..3ad153403b313692ba4f39355a805ba2972f1e46 100644 --- a/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb +++ b/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb @@ -26,8 +26,8 @@ "outputs": [], "source": [ "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "start_date = \"2018-01-30\" # Date to start investigation interval from\n", - "end_date = \"2018-12-12\" # Date to end investigation interval at, can be \"now\"\n", + "start_date = \"2019-01-01\" # Date to start investigation interval from\n", + "end_date = \"2019-12-12\" # Date to end investigation interval at, can be \"now\"\n", "constants = [\"Noise\", \"SlopesFF\", \"SlopesPC\", \"Offset\"] # Constants to plot\n", "modules = [1] # Modules, set to -1 for all, range allowed\n", "bias_voltages = [300, 500] # Bias voltage\n", @@ -69,7 +69,7 @@ "\n", "import h5py\n", "import matplotlib\n", - "%matplotlib inline\n", + "# %matplotlib inline\n", "\n", "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", "from cal_tools.tools import get_from_db, get_random_db_interface\n", @@ -175,6 +175,7 @@ " meta_only=True,\n", " version_info=True)\n", "\n", + " print(data)\n", " # Request BP constant versions\n", " print('constantDark:', constantsDark[const], ) \n", " dataBP = get_from_db(getattr(det, pars['module']),\n", @@ -188,6 +189,8 @@ " meta_only=True,\n", " version_info=True)\n", " \n", + " print('BP!!!!!', dataBP)\n", + " \n", " if not isinstance(data, list) or not isinstance(dataBP, list):\n", " continue\n", " \n", diff --git a/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb b/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb index 8046e5f1ae7f4aabe842bd2425d2fe16f35150ea..854c2b85e9961e2a43bf8f3011f42ee2bd50f0b1 100644 --- a/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb +++ b/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb @@ -8,7 +8,7 @@ "\n", "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.2\n", "\n", - "Calibration constants for LPD1M1 detector from the data base with injection time between start_date and end_date are considered.\n", + "Calibration constants for LPD1M detector from the data base with injection time between start_date and end_date are considered.\n", "\n", "To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant and for each module. Summary plots overall modules are created.\n", "\n", @@ -31,18 +31,19 @@ "start_date = \"2018-01-30\" # Date to start investigation interval from\n", "end_date = \"2018-12-12\" # Date to end investigation interval at, can be \"now\"\n", "constants = [\"Offset\", \"Noise\", \"SlopesFF\", \"SlopesCI\"] # constants to plot\n", - "modules = [2] # Modules, range allowed\n", + "modules = [2] # Modules, set to -1 for all, range allowed\n", "bias_voltages = [250, 500] # Bias voltage\n", "mem_cells = [1, 128, 256, 512] # Number of used memory cells. Typically: 4,32,64,128,176.\n", "photon_energy = 9.2 # Photon energy of the beam\n", "out_folder = \"/gpfs/exfel/data/scratch/karnem/testLPD_11/\" # Output folder, required\n", - "use_existing = \"/gpfs/exfel/data/scratch/karnem/testLPD_10/\" # Input folder\n", + "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", "cal_db_timeout = 180000 # timeout on caldb requests\",\n", "adu_to_photon = 33.17 # ADU to photon conversion factor (8000 / 3.6 / 67.)\n", "nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs\n", "db_module = \"LPD1M1\" # detector entry in the DB to investigate\n", "dclass = \"LPD\" # Detector class\n", "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # the database interface to use\n", + "max_time = 15 # the max margin in min. for the matching closest bad pixels\n", "range_offset = [800., 1500, 600, 900] # plotting range for offset: high gain l, r, medium gain l, r \n", "range_noise = [2.0, 16, 1.0, 7.0] # plotting range for noise: high gain l, r, medium gain l, r \n", "range_gain = [20, 30, 20, 30] # plotting range for gain: high gain l, r, medium gain l, r \n", @@ -55,11 +56,11 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": false, "scrolled": true }, "outputs": [], "source": [ + "import copy\n", "import datetime\n", "import dateutil.parser\n", "import numpy as np\n", @@ -68,36 +69,34 @@ "import warnings\n", "warnings.filterwarnings('ignore')\n", "\n", + "import h5py\n", "import matplotlib\n", - "% matplotlib inline\n", + "# %matplotlib inline\n", "\n", - "from iCalibrationDB import Constants, Conditions, Detectors\n", + "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", "from cal_tools.tools import get_from_db, get_random_db_interface\n", "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", " combine_constants, HMType,\n", - " hm_combine)" + " hm_combine, list_runner)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false - }, + "metadata": {}, "outputs": [], "source": [ "# Prepare variables\n", - "interval = 1 # interval for evaluation in days\n", - "\n", "nMem = max(mem_cells) # Number of mem Cells to store\n", "spShape = (64,64) # Shape of superpixel\n", "\n", - "in_mod_names = []\n", - "for mod in modules:\n", - " in_mod_names.append(\"Q{}M{}\".format(mod // 4 + 1, mod % 4 + 1))\n", + "if modules[0] == -1:\n", + " modules = range(16)\n", + " \n", + "modules = [\"Q{}M{}\".format(x // 4 + 1, x % 4 + 1) for x in modules]\n", "\n", "constantsDark = {\"SlopesFF\": 'BadPixelsFF',\n", - " 'SlopesCI': 'BadPixelsCI',\n", + " 'SlopesPC': 'BadPixelsPC',\n", " 'Noise': 'BadPixelsDark',\n", " 'Offset': 'BadPixelsDark'}\n", "print('Bad pixels data: ', constantsDark)\n", @@ -107,8 +106,6 @@ " start_date)\n", "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", " end_date)\n", - "step = datetime.timedelta(hours=interval)\n", - "dt = end\n", "\n", "# Create output folder\n", "os.makedirs(out_folder, exist_ok=True)\n", @@ -117,21 +114,150 @@ "det = getattr(Detectors, db_module)\n", "dconstants = getattr(Constants, dclass)\n", "\n", - "cal_db_interface = get_random_db_interface(cal_db_interface)\n", - "print('CalDB Interface {}'.format(cal_db_interface))" + "print('CalDB Interface: {}'.format(cal_db_interface))\n", + "print('Start time at: ', start)\n", + "print('End time at: ', end)\n", + "print('Modules: ', modules)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false - }, + "metadata": {}, "outputs": [], "source": [ - "import copy\n", + "parameter_list = list_runner(bias_voltages, modules, mem_cells, names = ['bias_voltage', 'module', 'mem_cells'])\n", + "print(parameter_list)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Retrieve list of meta-data\n", + "constant_versions = []\n", + "constant_parameters = []\n", + "constantBP_versions = []\n", "\n", + "# Loop over constants\n", + "for c, const in enumerate(constants):\n", + " \n", + " if use_existing != \"\":\n", + " break\n", + " \n", + " # Loop over parameters\n", + " for pars in parameter_list:\n", + " \n", + " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", + " dcond = Conditions.Dark\n", + " mcond = getattr(dcond, dclass)(\n", + " memory_cells=pars['mem_cells'],\n", + " bias_voltage=pars['bias_voltage'])\n", + " else:\n", + " dcond = Conditions.Illuminated\n", + " mcond = getattr(dcond, dclass)(\n", + " memory_cells=pars['mem_cells'],\n", + " bias_voltage=pars['bias_voltage'],\n", + " photon_energy=photon_energy)\n", + "\n", + " print('Request: ', const, 'with paramters:', pars)\n", + " # Request Constant versions for given parameters and module\n", + " data = get_from_db(getattr(det, pars['module']),\n", + " getattr(dconstants,\n", + " const)(),\n", + " copy.deepcopy(mcond), None,\n", + " cal_db_interface,\n", + " creation_time=start,\n", + " verbosity=0,\n", + " timeout=cal_db_timeout,\n", + " meta_only=True,\n", + " version_info=True)\n", + "\n", + " print(data)\n", + " # Request BP constant versions\n", + " print('constantDark:', constantsDark[const], ) \n", + " dataBP = get_from_db(getattr(det, pars['module']),\n", + " getattr(dconstants, \n", + " constantsDark[const])(),\n", + " copy.deepcopy(mcond), None,\n", + " cal_db_interface,\n", + " creation_time=start,\n", + " verbosity=0,\n", + " timeout=cal_db_timeout,\n", + " meta_only=True,\n", + " version_info=True)\n", + " \n", + " print('BP!!!!!', dataBP)\n", + " \n", + " if not isinstance(data, list) or not isinstance(dataBP, list):\n", + " continue\n", + " \n", + " found_BPmatch = False\n", + " for d in data:\n", + " # Match proper BP constant version\n", + " # and get constant version within\n", + " # requested time range\n", + " if d is None:\n", + " print('Time or data is not found!')\n", + " continue\n", + "\n", + " dt = dateutil.parser.parse(d['begin_at'])\n", + "\n", + " if dt.replace(tzinfo=None) > end or dt.replace(tzinfo=None) < start:\n", + " continue\n", + " \n", + " closest_BP = None\n", + " closest_BPtime = None\n", + " \n", + " for dBP in dataBP:\n", + " if dBP is None:\n", + " print(\"Bad pixels are not found!\")\n", + " continue\n", + " \n", + " dt = dateutil.parser.parse(d['begin_at'])\n", + " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", + " \n", + " if dt == dBPt:\n", + " found_BPmatch = True\n", + " else:\n", "\n", + " if np.abs(dBPt-dt).seconds < (max_time*60):\n", + " if closest_BP is None:\n", + " closest_BP = dBP\n", + " closest_BPtime = dBPt\n", + " else:\n", + " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", + " closest_BP = dBP\n", + " closest_BPtime = dBPt\n", + " \n", + " if dataBP.index(dBP) == len(dataBP)-1:\n", + " if closest_BP:\n", + " dBP = closest_BP\n", + " dBPt = closest_BPtime\n", + " found_BPmatch = True\n", + " else:\n", + " print('Bad pixels are not found!')\n", + " \n", + " if found_BPmatch:\n", + " print(\"Found constant {}: begin at {}\".format(const, dt))\n", + " print(\"Found bad pixels at {}\".format(dBPt))\n", + " constantBP_versions.append(dBP)\n", + " constant_versions.append(d)\n", + " constant_parameters.append(copy.deepcopy(pars))\n", + " found_BPmatch = False\n", + " break\n", + " \n", + "print('Number of retrieved constants with a bad pixel match is {}'.format(len(constant_versions)))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "def prepare_to_store(a, nMem):\n", " shape = list(a.shape[:2])+[nMem, 2]\n", " b = np.full(shape, np.nan)\n", @@ -167,151 +293,68 @@ "\n", "\n", "ret_constants = {}\n", + "constand_data = ConstantMetaData()\n", + "constant_BP = ConstantMetaData()\n", + "for i, constant_version in enumerate(constant_versions):\n", "\n", - "if use_existing != '':\n", - " mem_cells = []\n", - "\n", - "# Loop over max_memory cells\n", - "for the_mem_cells in mem_cells:\n", - "\n", - " # Loop over bias voltages\n", - " for bias_voltage in bias_voltages:\n", - "\n", - " # Loop over constants\n", - " for c, const in enumerate(constants):\n", - "\n", - " if not const in ret_constants:\n", - " ret_constants[const] = {}\n", - "\n", - " # Loop over modules\n", - " for module in modules:\n", - "\n", - " dt = end\n", - "\n", - " qm = \"Q{}M{}\".format(module // 4 + 1, module % 4 + 1)\n", - " if not qm in ret_constants[const]:\n", - " ret_constants[const][qm] = []\n", - "\n", - " # Loop over time stamps\n", - " while dt > start:\n", - " creation_time = dt\n", - "\n", - " if (const in [\"Offset\", \"Noise\",\n", - " \"SlopesCI\"] or \"DARK\" in const.upper()):\n", - " dcond = Conditions.Dark\n", - " mcond = getattr(dcond, dclass)(\n", - " memory_cells=the_mem_cells,\n", - " bias_voltage=bias_voltage)\n", - " else:\n", - " dcond = Conditions.Illuminated\n", - " mcond = getattr(dcond, dclass)(\n", - " memory_cells=the_mem_cells,\n", - " bias_voltage=bias_voltage,\n", - " photon_energy=photon_energy)\n", - "\n", - " print('Request: ', const, qm, the_mem_cells, bias_voltage,\n", - " creation_time)\n", - " cdata, ctime = get_from_db(getattr(det, qm),\n", - " getattr(dconstants, const)(),\n", - " copy.deepcopy(mcond),\n", - " None,\n", - " cal_db_interface,\n", - " creation_time=creation_time,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True )\n", - " \n", - " if ctime is None or cdata is None:\n", - " print('Time or Data is None')\n", - " break\n", - "\n", - " ctime = ctime.calibration_constant_version.begin_at\n", - "\n", - " print(\"Found constant {}: begin at {}\".format(const,\n", - " cdata is not None),\n", - " ctime)\n", - " print(cdata.shape)\n", - " cdata = modify_const(const, cdata)\n", - " print(cdata.shape)\n", - "\n", - " # Request bad pixel mask\n", - " if const in constantsDark:\n", - " print('Request bad pixels ',\n", - " (creation_time + step + step + step + step))\n", - " cdataBP, ctimeBP = get_from_db(getattr(det, qm),\n", - " getattr(dconstants,\n", - " constantsDark[const])(),\n", - " copy.deepcopy(mcond),\n", - " None,\n", - " cal_db_interface,\n", - " creation_time=(\n", - " creation_time + step + step + step + step),\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True)\n", - " ctimeBP = ctimeBP.calibration_constant_version.begin_at\n", - "\n", - " if cdataBP is None:\n", - " print(\"Bad pixels are not found!\")\n", - " ctime = ctime.replace(tzinfo=None)\n", - " dt = ctime - step\n", - " continue\n", - "\n", - " print(\"Found bad pixels {}\".format(ctimeBP))\n", - " print(cdataBP.shape)\n", - " cdataBP = modify_const(const, cdataBP)\n", - " print(cdataBP.shape)\n", - "\n", - " if cdataBP.shape != cdata.shape:\n", - " print('Wrong bad pixel shape!')\n", - " ctime = ctime.replace(tzinfo=None)\n", - " dt = ctime - step\n", - " continue\n", - "\n", - " # Apply bad pixel mask\n", - " cdataABP = np.copy(cdata)\n", - " cdataABP[cdataBP > 0] = np.nan\n", - "\n", - " # Create superpixels for constants with BP applied\n", - " cdataABP = get_rebined(cdataABP, spShape)\n", - " toStoreBP = prepare_to_store(np.nanmean(cdataABP, axis=(1, 3)), nMem)\n", - " toStoreBPStd = prepare_to_store(np.nanstd(cdataABP, axis=(1, 3)), nMem)\n", - " \n", - " # Prepare number of bad pixels per superpixels\n", - " cdataBP = get_rebined(cdataBP, spShape)\n", - " cdataNBP = prepare_to_store(np.nansum(cdataBP > 0, axis=(1, 3)), nMem)\n", - " else:\n", - " toStoreExtBP = 0\n", - " cdataBPExt = 0\n", - "\n", - " # Create superpixels for constants without BP applied\n", - " cdata = get_rebined(cdata, spShape)\n", - " toStoreStd = prepare_to_store(np.nanstd(cdata, axis=(1, 3)), nMem)\n", - " toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)\n", - "\n", - " # Convert parameters to dict\n", - " dpar = {}\n", - " for par in mcond.parameters:\n", - " dpar[par.name] = par.value\n", - "\n", - " print(\"Store values in dict\", const, qm, ctime)\n", - " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': cdataNBP,\n", - " 'dataBP': toStoreBP,\n", - " 'dataBPStd': toStoreBPStd,\n", - " 'data': toStore,\n", - " 'dataStd': toStoreStd,\n", - " 'mdata': dpar})\n", - " \n", - " ctime = ctime.replace(tzinfo=None)\n", - " dt = ctime - step" + " const = constant_version['data_set_name'].split('/')[-2]\n", + " qm = constant_parameters[i]['module']\n", + " \n", + " constand_data.retrieve_from_version_info(constant_version)\n", + " constant_BP.retrieve_from_version_info(constantBP_versions[i])\n", + " \n", + " cdata = constand_data.calibration_constant.data\n", + " cdataBP = constant_BP.calibration_constant.data\n", + " ctime = constand_data.calibration_constant_version.begin_at \n", + " \n", + " print(\"constant: {}, module {}, begin_at {}\".format(const, qm, ctime))\n", + " \n", + " if not const in ret_constants:\n", + " ret_constants[const] = {}\n", + " if not qm in ret_constants[const]:\n", + " ret_constants[const][qm] = []\n", + " \n", + " cdata = modify_const(const, cdata)\n", + " cdataBP = modify_const(const, cdataBP)\n", + "\n", + " if cdataBP.shape != cdata.shape:\n", + " print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n", + " continue\n", + "\n", + " # Apply bad pixel mask\n", + " cdataABP = np.copy(cdata)\n", + " cdataABP[cdataBP > 0] = np.nan\n", + "\n", + " # Create superpixels for constants with BP applied\n", + " cdataABP = get_rebined(cdataABP, spShape)\n", + " toStoreBP = prepare_to_store(np.nanmean(cdataABP, axis=(1, 3)), nMem)\n", + " toStoreBPStd = prepare_to_store(np.nanstd(cdataABP, axis=(1, 3)), nMem)\n", + "\n", + " # Prepare number of bad pixels per superpixels\n", + " cdataBP = get_rebined(cdataBP, spShape)\n", + " cdataNBP = prepare_to_store(np.nansum(cdataBP > 0, axis=(1, 3)), nMem)\n", + "\n", + " # Create superpixels for constants without BP applied\n", + " cdata = get_rebined(cdata, spShape)\n", + " toStoreStd = prepare_to_store(np.nanstd(cdata, axis=(1, 3)), nMem)\n", + " toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)\n", + " \n", + " dpar = {p.name: p.value for p in constand_data.detector_condition.parameters}\n", + "\n", + " print(\"Store values in dict\", const, qm, ctime)\n", + " ret_constants[const][qm].append({'ctime': ctime,\n", + " 'nBP': cdataNBP,\n", + " 'dataBP': toStoreBP,\n", + " 'dataBPStd': toStoreBPStd,\n", + " 'data': toStore,\n", + " 'dataStd': toStoreStd,\n", + " 'mdata': dpar}) \n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": false, "scrolled": true }, "outputs": [], @@ -326,7 +369,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": false, "scrolled": true }, "outputs": [], @@ -343,9 +385,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": false - }, + "metadata": {}, "outputs": [], "source": [ "# Combine FF and PC data to calculate Gain\n", @@ -354,12 +394,12 @@ "\n", "ret_constants[\"Gain\"] = {}\n", "ret_constants[\"Noise-e\"] = {}\n", - "for module in list(range(16)):\n", + "for mod in list(range(16)):\n", " if (\"SlopesFF\" not in ret_constants or\n", " \"SlopesCI\" not in ret_constants):\n", " break\n", "\n", - " qm = \"Q{}M{}\".format(module // 4 + 1, module % 4 + 1)\n", + " qm = \"Q{}M{}\".format(mod // 4 + 1, mod % 4 + 1)\n", " print(qm)\n", "\n", " if (qm not in ret_constants[\"SlopesFF\"] or\n", @@ -477,7 +517,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": false, "scrolled": true }, "outputs": [], @@ -485,7 +524,7 @@ "print('Plot calibration constants')\n", "\n", "# loop over constat type\n", - "for const, modules in ret_constants.items():\n", + "for const, mods in ret_constants.items():\n", "\n", " # Loop over gain\n", " for gain in range(2):\n", @@ -504,15 +543,15 @@ " mod_times = []\n", "\n", " # Loop over modules\n", - " for mod, data in modules.items():\n", + " for mod, data in mods.items():\n", " \n", - " if mod not in in_mod_names:\n", + " if mod not in modules:\n", " continue\n", "\n", " print(mod)\n", "\n", " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%m-%d') for x in ctimes]\n", + " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", "\n", " if (\"mdata\" in data):\n", " cmdata = np.array(data[\"mdata\"])\n", @@ -633,7 +672,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.3" + "version": "3.6.7" } }, "nbformat": 4, diff --git a/notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb b/notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..42c278d6c8206e8ed9232ffc6ebcf4ea5f524bae --- /dev/null +++ b/notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb @@ -0,0 +1,501 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Statistical analysis of calibration factors#\n", + "\n", + "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1\n", + "\n", + "Calibration constants for ePix100 detector from the data base with injection time between start_date and end_date are considered.\n", + "\n", + "To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant.\n", + "\n", + "Values shown in plots are saved in h5 files." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster_profile = \"noDB\" # The ipcluster profile to use\n", + "start_date = \"2019-01-30\" # date to start investigation interval from\n", + "end_date = \"2019-05-01\" # date to end investigation interval at, can be \"now\"\n", + "dclass=\"ePix100\" # Detector class\n", + "db_module = \"ePix100_M15\" # detector entry in the DB to investigate\n", + "constants = [\"Noise\", \"Offset\"] # constants to plot\n", + "bias_voltage = [200] # Bias voltage\n", + "temperature = [288] # Operation temperature\n", + "integration_time = [1, 50] # Integration time\n", + "in_vacuum = [0] # 0 if detector is operated in room pressure\n", + "parameter_names = ['bias_voltage', 'integration_time', 'temperature', 'in_vacuum'] # names of parameters\n", + "photon_energy = 9.2 # Photon energy of the beam\n", + "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_ePix/\" # output folder\n", + "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", + "cal_db_interface = \"tcp://max-exfl016:8016\" # the database interface to use\n", + "cal_db_timeout = 180000 # timeout on caldb requests\",\n", + "range_offset = [1000., 2200] # plotting range for offset: high gain l, r, medium gain l, r \n", + "range_noise = [1.5, 3.3] # plotting range for noise: high gain l, r, medium gain l, r " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "import copy\n", + "import datetime\n", + "import dateutil.parser\n", + "import numpy as np\n", + "import os\n", + "import sys\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "import h5py\n", + "import matplotlib\n", + "%matplotlib inline\n", + "\n", + "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", + "from cal_tools.tools import get_from_db, get_random_db_interface\n", + "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", + " combine_constants, HMType,\n", + " hm_combine, list_runner)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Prepare variables\n", + "spShape = (354, 96) # Shape of superpixel\n", + "\n", + "parameters = [globals()[x] for x in parameter_names]\n", + "\n", + "constantsDark = {'Noise_': 'BadPixelsDark',\n", + " 'Offset_': 'BadPixelsDark'}\n", + "print('Bad pixels data: ', constantsDark)\n", + "\n", + "# Define parameters in order to perform loop over time stamps\n", + "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n", + " start_date)\n", + "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", + " end_date)\n", + "\n", + "# Create output folder\n", + "os.makedirs(out_folder, exist_ok=True)\n", + "\n", + "# Get getector conditions\n", + "det = getattr(Detectors, db_module)\n", + "dconstants = getattr(Constants, dclass)\n", + "\n", + "print('CalDB Interface: {}'.format(cal_db_interface))\n", + "print('Start time at: ', start)\n", + "print('End time at: ', end)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "parameter_list = list_runner(*parameters, names = parameter_names)\n", + "print(parameter_list)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Retrieve list of meta-data\n", + "constant_versions = []\n", + "constant_parameters = []\n", + "constantBP_versions = []\n", + "\n", + "# Loop over constants\n", + "for c, const in enumerate(constants):\n", + " \n", + " if use_existing != \"\":\n", + " break\n", + " \n", + " # Loop over parameters\n", + " for pars in parameter_list:\n", + " \n", + " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", + " dcond = Conditions.Dark\n", + " mcond = getattr(dcond, dclass)(**pars)\n", + " else:\n", + " dcond = Conditions.Illuminated\n", + " mcond = getattr(dcond, dclass)(**pars,\n", + " photon_energy=photon_energy)\n", + "\n", + " \n", + " \n", + " print('Request: ', const, 'with paramters:', pars)\n", + " # Request Constant versions for given parameters and module\n", + " data = get_from_db(det,\n", + " getattr(dconstants,\n", + " const)(),\n", + " copy.deepcopy(mcond), None,\n", + " cal_db_interface,\n", + " creation_time=start,\n", + " verbosity=0,\n", + " timeout=cal_db_timeout,\n", + " meta_only=True,\n", + " version_info=True)\n", + " \n", + " if not isinstance(data, list):\n", + " continue\n", + " \n", + " if const in constantsDark:\n", + " # Request BP constant versions\n", + " print('constantDark:', constantsDark[const], ) \n", + " dataBP = get_from_db(det,\n", + " getattr(dconstants, \n", + " constantsDark[const])(),\n", + " copy.deepcopy(mcond), None,\n", + " cal_db_interface,\n", + " creation_time=start,\n", + " verbosity=0,\n", + " timeout=cal_db_timeout,\n", + " meta_only=True,\n", + " version_info=True)\n", + " \n", + " print(dataBP)\n", + " \n", + " if not isinstance(data, list) or not isinstance(dataBP, list):\n", + " continue\n", + " \n", + " found_BPmatch = False\n", + " for d in data:\n", + " # Match proper BP constant version\n", + " # and get constant version within\n", + " # requested time range\n", + " if d is None:\n", + " print('Time or data is not found!')\n", + " continue\n", + "\n", + " dt = dateutil.parser.parse(d['begin_at'])\n", + "\n", + " if dt.replace(tzinfo=None) > end or dt.replace(tzinfo=None) < start:\n", + " continue\n", + "\n", + " closest_BP = None\n", + " closest_BPtime = None\n", + "\n", + " for dBP in dataBP:\n", + " if dBP is None:\n", + " print(\"Bad pixels are not found!\")\n", + " continue\n", + "\n", + " dt = dateutil.parser.parse(d['begin_at'])\n", + " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", + "\n", + " if dt == dBPt:\n", + " found_BPmatch = True\n", + " else:\n", + "\n", + " if np.abs(dBPt-dt).seconds < (max_time*60):\n", + " if closest_BP is None:\n", + " closest_BP = dBP\n", + " closest_BPtime = dBPt\n", + " else:\n", + " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", + " closest_BP = dBP\n", + " closest_BPtime = dBPt\n", + "\n", + " if dataBP.index(dBP) == len(dataBP)-1:\n", + " if closest_BP:\n", + " dBP = closest_BP\n", + " dBPt = closest_BPtime\n", + " found_BPmatch = True\n", + " else:\n", + " print('Bad pixels are not found!')\n", + "\n", + " if found_BPmatch:\n", + " print(\"Found constant {}: begin at {}\".format(const, dt))\n", + " print(\"Found bad pixels at {}\".format(dBPt))\n", + " constantBP_versions.append(dBP)\n", + " constant_versions.append(d)\n", + " constant_parameters.append(copy.deepcopy(pars))\n", + " found_BPmatch = False\n", + " break\n", + " else:\n", + " constant_versions += data\n", + " constant_parameters += [copy.deepcopy(pars)]*len(data)\n", + "\n", + "# Remove dublications\n", + "constant_versions_tmp = []\n", + "constant_parameters_tmp = []\n", + "for i, x in enumerate(constant_versions):\n", + " if x not in constant_versions_tmp:\n", + " constant_versions_tmp.append(x)\n", + " constant_parameters_tmp.append(constant_parameters[i])\n", + " \n", + "constant_versions=constant_versions_tmp\n", + "constant_parameters=constant_parameters_tmp\n", + "\n", + "print('Number of stored constant versions is {}'.format(len(constant_versions)))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def get_rebined(a, rebin):\n", + " return a[:,:,0].reshape(\n", + " int(a.shape[0] / rebin[0]),\n", + " rebin[0],\n", + " int(a.shape[1] / rebin[1]),\n", + " rebin[1])\n", + " \n", + "def modify_const(const, data, isBP = False):\n", + " return data\n", + "\n", + "ret_constants = {}\n", + "constand_data = ConstantMetaData()\n", + "constant_BP = ConstantMetaData()\n", + "for i, constant_version in enumerate(constant_versions):\n", + "\n", + " const = constant_version['data_set_name'].split('/')[-2]\n", + " qm = db_module\n", + " \n", + " print(\"constant: {}, module {}\".format(const,qm))\n", + " \n", + " constand_data.retrieve_from_version_info(constant_version)\n", + " \n", + " if not const in ret_constants:\n", + " ret_constants[const] = {}\n", + " if not qm in ret_constants[const]:\n", + " ret_constants[const][qm] = []\n", + " \n", + " cdata = constand_data.calibration_constant.data\n", + " ctime = constand_data.calibration_constant_version.begin_at\n", + " \n", + " cdata = modify_const(const, cdata)\n", + " \n", + " # Create superpixels for constants without BP applied\n", + " cdata = get_rebined(cdata, spShape)\n", + " toStoreStd = np.nanstd(cdata, axis=(1, 3))\n", + " toStore = np.nanmean(cdata, axis=(1, 3))\n", + " \n", + " # Convert parameters to dict\n", + " dpar = {p.name: p.value for p in constand_data.detector_condition.parameters}\n", + " \n", + " print(\"Store values in dict\", const, qm, ctime)\n", + " ret_constants[const][qm].append({'ctime': ctime,\n", + " 'nBP': 0,\n", + " 'dataBP': 0,\n", + " 'dataBPStd': 0,\n", + " 'data': toStore,\n", + " 'dataStd': toStoreStd,\n", + " 'mdata': dpar}) \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "if use_existing == \"\":\n", + " print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, db_module))\n", + " save_dict_to_hdf5(ret_constants,\n", + " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if use_existing == \"\":\n", + " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", + "else:\n", + " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", + "\n", + "print('Load data from {}'.format(fpath))\n", + "ret_constants = load_data_from_hdf5(fpath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Parameters for plotting\n", + "\n", + "# Define range for plotting\n", + "rangevals = {\n", + " \"OffsetEPix100\": [range_offset[0:2], range_offset[2:4]],\n", + " \"NoiseEPix100\": [range_noise[0:2], range_noise[2:4]],\n", + "}\n", + "\n", + "keys = {\n", + " 'Mean': ['data', '', 'Mean over pixels'],\n", + " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", + " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", + " 'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],\n", + " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", + " 'stdASIC': ['', '', '$\\sigma$ over ASICs'],\n", + " 'stdCell': ['', '', '$\\sigma$ over Cells'],\n", + "}\n", + "\n", + "gain_name = ['High', 'Medium', 'Low']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "print('Plot calibration constants')\n", + "\n", + "# loop over constat type\n", + "for const, modules in ret_constants.items():\n", + "\n", + " print('Const: {}'.format(const))\n", + "\n", + " # loop over modules\n", + " mod_data = {}\n", + " mod_data['stdASIC'] = []\n", + " mod_data['stdCell'] = []\n", + " mod_names = []\n", + " mod_times = []\n", + "\n", + " # Loop over modules\n", + " for mod, data in modules.items():\n", + " print(mod)\n", + "\n", + " ctimes = np.array(data[\"ctime\"])\n", + " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", + "\n", + " if (\"mdata\" in data):\n", + " cmdata = np.array(data[\"mdata\"])\n", + " for i, tick in enumerate(ctimes_ticks):\n", + " ctimes_ticks[i] = ctimes_ticks[i] + \\\n", + " ', V={:1.0f}'.format(cmdata[i]['Sensor Temperature']) + \\\n", + " ', T={:1.0f}'.format(\n", + " cmdata[i]['Integration Time'])\n", + "\n", + " sort_ind = np.argsort(ctimes_ticks)\n", + " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", + "\n", + " # Create sorted by data dataset\n", + " rdata = {}\n", + " for key, item in keys.items():\n", + " if item[0] in data:\n", + " rdata[key] = np.array(data[item[0]])[sort_ind]\n", + "\n", + " nTimes = rdata['Mean'].shape[0]\n", + " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", + " nBins = nPixels\n", + "\n", + " # Avoid to low values\n", + " if const in [\"Noise\", \"Offset\", \"Noise-e\"]:\n", + " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", + " if 'MeanBP' in rdata:\n", + " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", + "\n", + " # Reshape: ASICs over cells for plotting\n", + " pdata = {}\n", + " for key in rdata:\n", + " if key not in ['Mean', 'std']:\n", + " continue\n", + " pdata[key] = rdata[key][:, :, :].reshape(nTimes, nBins).swapaxes(0, 1)\n", + "\n", + " # Summary over ASICs\n", + " adata = {}\n", + " for key in rdata:\n", + " if key not in ['Mean', 'std']:\n", + " continue\n", + " adata[key] = np.nanmean(rdata[key], axis=(1, 2))\n", + "\n", + " # Summary information over modules\n", + " for key in pdata:\n", + " if key not in mod_data:\n", + " mod_data[key] = []\n", + " mod_data[key].append(np.nanmean(pdata[key], axis=0))\n", + "\n", + " mod_data['stdASIC'].append(np.nanstd(rdata['Mean'], axis=(1, 2)))\n", + "\n", + " mod_names.append(mod)\n", + " mod_times.append(ctimes_ticks)\n", + "\n", + " # Plotting\n", + " for key in pdata:\n", + " \n", + " if key not in ['Mean', 'std']:\n", + " continue\n", + " \n", + " vmin = None\n", + " vmax = None\n", + " if const in rangevals and key in ['Mean', 'MeanBP']:\n", + " vmin = rangevals[const][0][0]\n", + " vmax = rangevals[const][0][1]\n", + "\n", + " if key == 'NBP':\n", + " unit = '[%]'\n", + " else:\n", + " unit = '[ADU]'\n", + " if const == 'Noise-e':\n", + " unit = '[$e^-$]'\n", + "\n", + " title = '{}, module {}, {}'.format(\n", + " const, mod, keys[key][1])\n", + " cb_label = '{}, {} {}'.format(const, keys[key][2], unit)\n", + "\n", + " hm_combine(pdata[key][::-1], htype=HMType.mro,\n", + " x_label='Creation Time', y_label='ASIC ID',\n", + " x_ticklabels=ctimes_ticks,\n", + " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", + " title=title, cb_label=cb_label,\n", + " vmin=vmin, vmax=vmax,\n", + " fname='{}/{}_{}_g{}_ASIC_{}.png'.format(\n", + " out_folder, const, mod.replace('_', ''), 0, key),\n", + " pad=[0.125, 0.125, 0.12, 0.185])\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/xfel_calibrate/notebooks.py b/xfel_calibrate/notebooks.py index 5e7badc9f57e37c9b7ef279d60ba8f9da0bbf469..cfc9cf55ed83733035e77d0ea3d4cd239c16a5a5 100644 --- a/xfel_calibrate/notebooks.py +++ b/xfel_calibrate/notebooks.py @@ -161,6 +161,12 @@ notebooks = { "use function": "balance_sequences", "cluster cores": 4}, }, + "STATS_FROM_DB": { + "notebook": "notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb", + "concurrency": {"parameter": None, + "default concurrency": None, + "cluster cores": 1}, + }, }, "EPIX10K": { "DARK": {