diff --git a/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb b/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb deleted file mode 100644 index 842f9db5bc79439ceb893585e25adc621ba96613..0000000000000000000000000000000000000000 --- a/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb +++ /dev/null @@ -1,694 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Statistical analysis of calibration factors#\n", - "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.2\n", - "\n", - "Calibration constants for AGIPD1M detector from the data base with injection time between start_date and end_date are considered.\n", - "\n", - "To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant and for each module. Summary plots overall modules are created.\n", - "\n", - "In additional gain-slopes flat-field and pulse-capacitor are combined to relative-gain constant and presented as well. Noise in electron units is derived using gain factors and presented.\n", - "\n", - "Values shown in plots are saved in h5 files.\n", - "\n", - "All presented values corresponds to high and medium gain stages." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "start_date = \"2019-01-01\" # Date to start investigation interval from\n", - "end_date = \"NOW\" # Date to end investigation interval at, can be \"now\"\n", - "nconstants = 20 # Number of time stamps to plot. If not 0, overcome start_date.\n", - "constants = [\"Noise\", \"Offset\", \"SlopesFF\", \"SlopesPC\"] # Constants to plot\n", - "modules = [1] # Modules, set to -1 for all, range allowed\n", - "bias_voltages = [300] # Bias voltage\n", - "mem_cells = [250] # Number of used memory cells. Typically: 4,32,64,128,176.\n", - "acquisition_rate = [0.0, 1.1, 2.2, 4.5]\n", - "photon_energy = 9.2 # Photon energy of the beam\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_AGIPD55/\" # Output folder, required\n", - "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", - "cal_db_timeout = 120000 # timeout on caldb requests\",\n", - "adu_to_photon = 33.17 # ADU to photon conversion factor (8000 / 3.6 / 67.)\n", - "nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs\n", - "db_module = \"AGIPD1M1\" # detector entry in the DB to investigate\n", - "dclass = \"AGIPD\" # Detector class\n", - "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # the database interface to use\n", - "max_time = 15 # the max margin in min. for the matching closest bad pixels\n", - "range_offset = [4000., 5500, 6500, 8500] # plotting range for offset: high gain l, r, medium gain l, r \n", - "range_noise = [2.5, 15, 7.5, 17.0] # plotting range for noise: high gain l, r, medium gain l, r \n", - "range_gain = [0.8, 1.2, 0.8, 1.2] # plotting range for gain: high gain l, r, medium gain l, r \n", - "range_noise_e = [85., 500., 85., 500.] # plotting range for noise in [e-]: high gain l, r, medium gain l, r \n", - "range_slopesPC = [22.0, 27.0, -0.5, 1.5] # plotting range for slope PC: high gain l, r, medium gain l, r \n", - "range_slopesFF = [0.8, 1.2, 0.6, 1.2] # plotting range for slope FF: high gain l, r, medium gain l, r \n", - "plot_range = 3 # range for plotting in units of median absolute deviations\n", - "x_labels = ['Acquisition rate', 'Memory cells'] # parameters to be shown on X axis" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import copy\n", - "import datetime\n", - "import dateutil.parser\n", - "import numpy as np\n", - "import os\n", - "import sys\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", - "from cal_tools.tools import get_from_db, get_random_db_interface\n", - "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", - " combine_constants, HMType, IMType,\n", - " hm_combine, combine_lists, get_range)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare variables\n", - "nMem = max(mem_cells) # Number of mem Cells to store\n", - "spShape = (64,64) # Shape of superpixel\n", - "\n", - "if modules[0] == -1:\n", - " modules = range(16)\n", - " \n", - "modules = [\"Q{}M{}\".format(x // 4 + 1, x % 4 + 1) for x in modules]\n", - "\n", - "acquisition_rate[acquisition_rate==0] = None\n", - "\n", - "constantsDark = {\"SlopesFF\": 'BadPixelsFF',\n", - " 'SlopesPC': 'BadPixelsPC',\n", - " 'Noise': 'BadPixelsDark',\n", - " 'Offset': 'BadPixelsDark'}\n", - "print('Bad pixels data: ', constantsDark)\n", - "\n", - "# Define parameters in order to perform loop over time stamps\n", - "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " start_date)\n", - "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " end_date)\n", - "\n", - "# Create output folder\n", - "os.makedirs(out_folder, exist_ok=True)\n", - "\n", - "# Get getector conditions\n", - "det = getattr(Detectors, db_module)\n", - "dconstants = getattr(Constants, dclass)\n", - "\n", - "print('CalDB Interface: {}'.format(cal_db_interface))\n", - "print('Start time at: ', start)\n", - "print('End time at: ', end)\n", - "print('Modules: ', modules)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "parameter_list = combine_lists(bias_voltages, modules, mem_cells, acquisition_rate,\n", - " names = ['bias_voltage', 'module', 'mem_cells', 'acquisition_rate'])\n", - "print(parameter_list)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "# Retrieve list of meta-data\n", - "constant_versions = []\n", - "constant_parameters = []\n", - "constantBP_versions = []\n", - "\n", - "# Loop over constants\n", - "for c, const in enumerate(constants):\n", - " \n", - " if use_existing != \"\":\n", - " break\n", - " \n", - " # Loop over parameters\n", - " for pars in parameter_list:\n", - " \n", - " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", - " dcond = Conditions.Dark\n", - " mcond = getattr(dcond, dclass)(\n", - " memory_cells=pars['mem_cells'],\n", - " bias_voltage=pars['bias_voltage'],\n", - " acquisition_rate=pars['acquisition_rate'])\n", - " else:\n", - " dcond = Conditions.Illuminated\n", - " mcond = getattr(dcond, dclass)(\n", - " memory_cells=pars['mem_cells'],\n", - " bias_voltage=pars['bias_voltage'],\n", - " acquisition_rate=pars['acquisition_rate'],\n", - " photon_energy=photon_energy)\n", - "\n", - " print('Request: ', const, 'with paramters:', pars)\n", - " # Request Constant versions for given parameters and module\n", - " data = get_from_db(getattr(det, pars['module']),\n", - " getattr(dconstants,\n", - " const)(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=2,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - "\n", - " if not isinstance(data, list):\n", - " continue\n", - " \n", - " # Request BP constant versions\n", - " print('constantDark:', constantsDark[const], ) \n", - " dataBP = get_from_db(getattr(det, pars['module']),\n", - " getattr(dconstants, \n", - " constantsDark[const])(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=2,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - " \n", - " for d in data:\n", - " # print('Item: ', d)\n", - " # Match proper BP constant version\n", - " # and get constant version within\n", - " # requested time range\n", - " if d is None:\n", - " print('Time or data is not found!')\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - "\n", - " if (dt.replace(tzinfo=None) > end or \n", - " (nconstants==0 and dt.replace(tzinfo=None) < start)):\n", - " continue\n", - " \n", - " closest_BP = None\n", - " closest_BPtime = None\n", - " found_BPmatch = False\n", - " \n", - " if not isinstance(dataBP, list):\n", - " dataBP = []\n", - " \n", - " for dBP in dataBP:\n", - " if dBP is None:\n", - " print(\"Bad pixels are not found!\")\n", - " continue\n", - " \n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", - " \n", - " if dt == dBPt:\n", - " found_BPmatch = True\n", - " else:\n", - "\n", - " if np.abs(dBPt-dt).seconds < (max_time*60):\n", - " if closest_BP is None:\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " else:\n", - " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " \n", - " if dataBP.index(dBP) == len(dataBP)-1:\n", - " if closest_BP:\n", - " dBP = closest_BP\n", - " dBPt = closest_BPtime\n", - " found_BPmatch = True\n", - " else:\n", - " print('Bad pixels are not found!')\n", - " \n", - " if found_BPmatch:\n", - " print(\"Found constant {}: begin at {}\".format(const, dt))\n", - " print(\"Found bad pixels at {}\".format(dBPt))\n", - " constantBP_versions.append(dBP)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " break\n", - " \n", - " if not found_BPmatch:\n", - " print('Bad pixels are not matched')\n", - " constantBP_versions.append(None)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " \n", - "print('Number of retrieved constants {}'.format(len(constant_versions)))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def prepare_to_store(a, nMem):\n", - " shape = list(a.shape[:2])+[nMem, 2]\n", - " b = np.full(shape, np.nan)\n", - " b[:, :, :a.shape[2]] = a[:, :, :, :2]\n", - " return b\n", - "\n", - "def get_rebined(a, rebin):\n", - " return a.reshape(\n", - " int(a.shape[0] / rebin[0]),\n", - " rebin[0],\n", - " int(a.shape[1] / rebin[1]),\n", - " rebin[1],\n", - " a.shape[2],\n", - " a.shape[3])\n", - " \n", - "def modify_const(const, data, isBP = False):\n", - " if const in ['SlopesFF']:\n", - " if (len(data.shape) == 4):\n", - " data = data[:, :, :, 0][..., None]\n", - " else:\n", - " data = data[..., None]\n", - " \n", - " if data.shape[2]<3:\n", - " data = data[:,:,0,None]\n", - "\n", - " if not isBP:\n", - " if data.shape[0] != 128:\n", - " data = data.swapaxes(0, 2).swapaxes(1, 3).swapaxes(2, 3)\n", - "\n", - " # Copy slope medium to be saved later\n", - " if const in ['SlopesPC']:\n", - " data[:, :, :, 1] = data[:, :, :, 3]\n", - " else:\n", - " if const in ['SlopesPC']:\n", - " if len(data.shape) == 3:\n", - " data = data[:, :, :, None].repeat(10, axis=3)\n", - "\n", - " if data.shape[0] != 128:\n", - " data = data.swapaxes(0, 1).swapaxes(1, 2)\n", - " \n", - " if len(data.shape) < 4:\n", - " print(data.shape, \"Unexpected shape!\")\n", - " return data\n", - "\n", - "\n", - "\n", - "ret_constants = {}\n", - "constant_data = ConstantMetaData()\n", - "constant_BP = ConstantMetaData()\n", - "# sort over begin_at\n", - "idxs, _ = zip(*sorted(enumerate(constant_versions), \n", - " key=lambda x: x[1]['begin_at'], reverse=True))\n", - "\n", - "for i in idxs:\n", - " const = constant_versions[i]['data_set_name'].split('/')[-2]\n", - " qm = constant_parameters[i]['module']\n", - " \n", - " if not const in ret_constants:\n", - " ret_constants[const] = {}\n", - " if not qm in ret_constants[const]:\n", - " ret_constants[const][qm] = []\n", - " \n", - " if nconstants>0 and len(ret_constants[const][qm])>=nconstants:\n", - " continue\n", - " \n", - "\n", - " constant_data.retrieve_from_version_info(constant_versions[i])\n", - " cdata = constant_data.calibration_constant.data\n", - " ctime = constant_data.calibration_constant_version.begin_at \n", - " cdata = modify_const(const, cdata)\n", - " print(\"constant: {}, module {}, begin_at {}\".format(const, qm, ctime))\n", - "\n", - " if constantBP_versions[i]:\n", - " constant_BP.retrieve_from_version_info(constantBP_versions[i])\n", - " cdataBP = constant_BP.calibration_constant.data\n", - " cdataBP = modify_const(const, cdataBP, True)\n", - "\n", - " if cdataBP.shape != cdata.shape:\n", - " print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n", - " cdataBP = np.full_like(cdata, -1)\n", - "\n", - " # Apply bad pixel mask\n", - " cdataABP = np.copy(cdata)\n", - " cdataABP[cdataBP > 0] = np.nan\n", - "\n", - " # Create superpixels for constants with BP applied\n", - " cdataABP = get_rebined(cdataABP, spShape)\n", - " toStoreBP = prepare_to_store(np.nanmean(cdataABP, axis=(1, 3)), nMem)\n", - " toStoreBPStd = prepare_to_store(np.nanstd(cdataABP, axis=(1, 3)), nMem)\n", - "\n", - " # Prepare number of bad pixels per superpixels\n", - " cdataBP = get_rebined(cdataBP, spShape)\n", - " cdataNBP = prepare_to_store(np.nansum(cdataBP > 0, axis=(1, 3)), nMem)\n", - "\n", - " # Create superpixels for constants without BP applied\n", - " cdata = get_rebined(cdata, spShape)\n", - " toStoreStd = prepare_to_store(np.nanstd(cdata, axis=(1, 3)), nMem)\n", - " toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)\n", - " \n", - " if not constantBP_versions[i]:\n", - " toStoreBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n", - " toStoreBPStd = np.full_like(toStore, IMType.NO_BPMAP.value)\n", - " cdataNBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n", - " \n", - " dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n", - "\n", - " print(\"Store values in dict\", const, qm, ctime)\n", - " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': cdataNBP,\n", - " 'dataBP': toStoreBP,\n", - " 'dataBPStd': toStoreBPStd,\n", - " 'data': toStore,\n", - " 'dataStd': toStoreStd,\n", - " 'mdata': dpar}) \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, modules[0]))\n", - " save_dict_to_hdf5(ret_constants,\n", - " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, modules[0]))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", - "else:\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", - "\n", - "print('Load data from {}'.format(fpath))\n", - "ret_constants = load_data_from_hdf5(fpath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Combine FF and PC data to calculate Gain\n", - "# Estimate Noise in units of electrons\n", - "print ('Calculate Gain and Noise in electron units')\n", - "\n", - "ret_constants[\"Gain\"] = {}\n", - "ret_constants[\"Noise-e\"] = {}\n", - "for mod in list(range(16)):\n", - " if (\"SlopesFF\" not in ret_constants or\n", - " \"SlopesPC\" not in ret_constants):\n", - " break\n", - "\n", - " qm = \"Q{}M{}\".format(mod // 4 + 1, mod % 4 + 1)\n", - " print(qm)\n", - "\n", - " if (qm not in ret_constants[\"SlopesFF\"] or\n", - " qm not in ret_constants[\"SlopesPC\"]):\n", - " continue\n", - "\n", - " ret_constants[\"Gain\"][qm] = {}\n", - "\n", - " dataFF = ret_constants[\"SlopesFF\"][qm]\n", - " dataPC = ret_constants[\"SlopesPC\"][qm]\n", - "\n", - " if (len(dataFF) == 0 or len(dataPC) == 0):\n", - " continue\n", - "\n", - " ctimesFF = np.array(dataFF[\"ctime\"])\n", - " ctimesPC = np.array(dataPC[\"ctime\"])\n", - "\n", - " ctime, icomb = combine_constants(ctimesFF, ctimesPC)\n", - "\n", - " cdataPC_vs_time = np.array(dataPC[\"data\"])[..., 0]\n", - " cdataFF_vs_time = np.array(dataFF[\"data\"])[..., 0]\n", - "\n", - " cdataFF_vs_time = np.nanmedian(cdataFF_vs_time, axis=3)[..., None]\n", - "\n", - " cdataFF_vs_time /= np.nanmedian(cdataFF_vs_time, axis=(1, 2, 3))[:, None,\n", - " None, None]\n", - " cdataPC_vs_time /= np.nanmedian(cdataPC_vs_time, axis=(1, 2, 3))[:, None,\n", - " None, None]\n", - "\n", - " gain_vs_time = []\n", - " for iFF, iPC in icomb:\n", - " gain_vs_time.append(cdataFF_vs_time[iFF] * cdataPC_vs_time[iPC])\n", - "\n", - " print(np.array(gain_vs_time).shape)\n", - " \n", - " ctime_ts = [t.timestamp() for t in ctime]\n", - " \n", - " ret_constants[\"Gain\"][qm][\"ctime\"] = ctime\n", - " ret_constants[\"Gain\"][qm][\"data\"] = np.array(gain_vs_time)\n", - " # Fill missing data for compatibility with plotting code\n", - " ret_constants[\"Gain\"][qm][\"dataBP\"] = np.array(gain_vs_time)\n", - " ret_constants[\"Gain\"][qm][\"nBP\"] = np.array(gain_vs_time)\n", - "\n", - " if \"Noise\" not in ret_constants:\n", - " continue\n", - "\n", - " if qm not in ret_constants[\"Noise\"]:\n", - " continue\n", - "\n", - " dataN = ret_constants[\"Noise\"][qm]\n", - " if len(dataN) == 0:\n", - " continue\n", - "\n", - " ret_constants[\"Noise-e\"][qm] = {}\n", - " \n", - " ctimesG = np.array(ctime)\n", - " ctimesN = np.array(dataN[\"ctime\"])\n", - "\n", - " ctime, icomb = combine_constants(ctimesG, ctimesN)\n", - "\n", - " cdataG_vs_time = np.array(gain_vs_time)\n", - " cdataN_vs_time = np.array(dataN[\"data\"])[..., 0]\n", - "\n", - " data_vs_time = []\n", - " for iG, iN in icomb:\n", - " data_vs_time.append(\n", - " cdataN_vs_time[iN] * adu_to_photon / cdataG_vs_time[iG])\n", - "\n", - " print(np.array(gain_vs_time).shape)\n", - " ctime_ts = [t.timestamp() for t in ctime]\n", - " ret_constants[\"Noise-e\"][qm][\"ctime\"] = ctime\n", - " ret_constants[\"Noise-e\"][qm][\"data\"] = np.array(data_vs_time)\n", - " # Fill missing data for compatibility with plotting code\n", - " ret_constants[\"Noise-e\"][qm][\"dataBP\"] = np.array(data_vs_time)\n", - " ret_constants[\"Noise-e\"][qm][\"nBP\"] = np.array(data_vs_time)\n", - " \n", - "save_dict_to_hdf5({k:v for k,v in ret_constants.items() if k in ['Gain', 'Noise-e']},\n", - " '{}/CalDBAna_{}_Gain.h5'.format(out_folder, dclass))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Parameters for plotting\n", - "\n", - "# Define range for plotting\n", - "rangevals = {\n", - " \"Offset\": [range_offset[0:2], range_offset[2:4]],\n", - " \"Noise\": [range_noise[0:2], range_noise[2:4]],\n", - " \"Gain\": [range_gain[0:2], range_gain[2:4]],\n", - " \"Noise-e\": [range_noise_e[0:2], range_noise_e[2:4]],\n", - " \"SlopesPC\": [range_slopesPC[0:2], range_slopesPC[2:4]],\n", - " \"SlopesFF\": [range_slopesFF[0:2], range_slopesFF[2:4]]\n", - "}\n", - "\n", - "keys = {\n", - " 'Mean': ['data', '', 'Mean over pixels'],\n", - " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", - " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],\n", - " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", - " 'stdASIC': ['', '', '$\\sigma$ over ASICs'],\n", - " 'stdCell': ['', '', '$\\sigma$ over Cells'],\n", - "}\n", - "\n", - "gain_name = ['High', 'Medium', 'Low']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "print('Plot calibration constants')\n", - "\n", - "# loop over constat type\n", - "for const, mods in ret_constants.items():\n", - "\n", - " # Loop over gain\n", - " for gain in range(2):\n", - " print('Const: {}, gain {}'.format(const, gain))\n", - "\n", - " if const in [\"Gain\", \"Noise-e\"] and gain == 1:\n", - " continue\n", - " else:\n", - " pass\n", - "\n", - " # Loop over modules\n", - " for mod, data in mods.items():\n", - " if mod not in modules:\n", - " continue\n", - "\n", - " print(mod)\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", - "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " ctimes_ticks[i] = ctimes_ticks[i] + \\\n", - " ', A={}'.format(cmdata[i].get('Acquisition rate', None)) + \\\n", - " ', M={:1.0f}'.format(\n", - " cmdata[i]['Memory cells'])\n", - "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", - "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", - "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nMemToShow * nPixels\n", - "\n", - " # Select gain\n", - " if const not in [\"Gain\", \"Noise-e\"]:\n", - " for key in rdata:\n", - " rdata[key] = rdata[key][..., gain]\n", - "\n", - " # Avoid to low values\n", - " if const in [\"Noise\", \"Offset\", \"Noise-e\"]:\n", - " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", - " if 'MeanBP' in rdata:\n", - " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", - "\n", - " if 'NBP' in rdata:\n", - " rdata['NBP'] = rdata['NBP'].astype(float)\n", - " rdata[\"NBP\"][rdata[\"NBP\"] == (spShape[0] * spShape[1])] = np.nan\n", - " rdata[\"NBP\"] = rdata[\"NBP\"] / (spShape[0] * spShape[1]) * 100\n", - "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", - " pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(\n", - " nTimes, nBins).swapaxes(0, 1)\n", - "\n", - " # Summary over ASICs\n", - " adata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", - " adata[key] = np.nanmean(rdata[key], axis=(1, 2)).swapaxes(0, 1)\n", - "\n", - " # Plotting\n", - " for key in pdata:\n", - " vmin,vmax = get_range(pdata[key][::-1], plot_range)\n", - " if const in rangevals and key in ['Mean', 'MeanBP']:\n", - " vmin = rangevals[const][gain][0]\n", - " vmax = rangevals[const][gain][1]\n", - "\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - " if const == 'Noise-e':\n", - " unit = '[$e^-$]'\n", - "\n", - " title = '{}, module {}, {} gain, {}'.format(\n", - " const, mod, gain_name[gain], keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const, keys[key][2], unit)\n", - "\n", - " hm_combine(pdata[key][::-1], htype=HMType.INSET_AXIS,\n", - " x_label='Creation Time', y_label='ASIC ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " vmin=vmin, vmax=vmax,\n", - " fname='{}/{}_{}_g{}_ASIC_{}.png'.format(\n", - " out_folder, const, mod, gain, key),\n", - " y_ticks=np.arange(nBins, step=nMemToShow)+16,\n", - " y_ticklabels=np.arange(nPixels)[::-1]+1,\n", - " pad=[0.125, 0.125, 0.12, 0.185])\n", - "\n", - " hm_combine(adata[key],\n", - " x_label='Creation Time', y_label='Memory cell ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " fname='{}/{}_{}_g{}_MEM_{}.png'.format(\n", - " out_folder, const, mod, gain, key),\n", - " vmin=vmin, vmax=vmax)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/FastCCD/Characterize_Darks_NewDAQ_FastCCD_NBC.ipynb b/notebooks/FastCCD/Characterize_Darks_NewDAQ_FastCCD_NBC.ipynb deleted file mode 100644 index 6ee85833c3b5a5bad96148f38525a4b54c138b56..0000000000000000000000000000000000000000 --- a/notebooks/FastCCD/Characterize_Darks_NewDAQ_FastCCD_NBC.ipynb +++ /dev/null @@ -1,574 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# FastCCD Dark Characterization\n", - "\n", - "Author: I. KlaÄková, S. Hauf, Version 1.0\n", - "\n", - "The following notebook provides dark image analysis of the FastCCD detector.\n", - "\n", - "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. Resulting maps are saved as .h5 files for a latter use." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:38.999974Z", - "start_time": "2018-12-06T10:54:38.983406Z" - } - }, - "outputs": [], - "source": [ - "in_folder = \"/gpfs/exfel/exp/SCS/201930/p900074/raw/\" # input folder, required\n", - "out_folder = 'gpfs/exfel/data/scratch/haufs/test/' # output folder, required\n", - "path_template = 'RAW-R{:04d}-DA05-S{{:05d}}.h5' # the template to use to access data\n", - "run = 321 # which run to read data from, required\n", - "number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used\n", - "cluster_profile = \"noDB\" # ipcluster profile to use\n", - "operation_mode = \"FF\" #o r \"FF\". FS stands for frame-store and FF for full-frame opeartion\n", - "sigma_noise = 10. # Pixel exceeding 'sigmaNoise' * noise value in that pixel will be masked\n", - "h5path = '/INSTRUMENT/SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput/data/image/pixels' # path in the HDF5 file the data is at\n", - "h5path_t = '/CONTROL/SCS_CDIDET_FCCD2M/CTRL/LSLAN/inputA/crdg/value' # path to find temperature at\n", - "h5path_cntrl = '/RUN/SCS_CDIDET_FCCD2M/DET/FCCD' # path to control data\n", - "cal_db_interface = \"tcp://max-exfl016:8020\" # calibration DB interface to use\n", - "local_output = False # output also in as H5 files\n", - "temp_limits = 5 # limits within which temperature is considered the same\n", - "sequence = 0 # sequence file to use\n", - "multi_iteration = False # use multiple iterations\n", - "use_dir_creation_date = True # use dir creation date\n", - "bad_pixel_offset_sigma = 5. # offset standard deviations above which to consider pixel bad \n", - "bad_pixel_noise_sigma = 5. # noise standard deviations above which to consider pixel bad \n", - "fix_temperature = 0. # fix temperature to this value, set to 0 to use slow control value" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:39.190907Z", - "start_time": "2018-12-06T10:54:39.186154Z" - } - }, - "outputs": [], - "source": [ - "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", - "from iCalibrationDB.detectors import DetectorTypes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:39.467334Z", - "start_time": "2018-12-06T10:54:39.427784Z" - } - }, - "outputs": [], - "source": [ - "import XFELDetAna.xfelprofiler as xprof\n", - "\n", - "profiler = xprof.Profiler()\n", - "profiler.disable()\n", - "from XFELDetAna.util import env\n", - "env.iprofile = cluster_profile\n", - "\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "from XFELDetAna import xfelpycaltools as xcal\n", - "from XFELDetAna import xfelpyanatools as xana\n", - "from XFELDetAna.plotting.util import prettyPlotting\n", - "prettyPlotting=True\n", - "from XFELDetAna.xfelreaders import ChunkReader\n", - "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n", - "from cal_tools.tools import get_dir_creation_date\n", - "\n", - "import numpy as np\n", - "import h5py\n", - "import matplotlib.pyplot as plt\n", - "from iminuit import Minuit\n", - "\n", - "import time\n", - "import copy\n", - "\n", - "from prettytable import PrettyTable\n", - "\n", - "%matplotlib inline\n", - "\n", - "def nImagesOrLimit(nImages, limit):\n", - " if limit == 0:\n", - " return nImages\n", - " else:\n", - " return min(nImages, limit)\n", - " \n", - "sigmaNoise = sigma_noise" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n", - "file_loc = 'proposal:{} runs:{}'.format(proposal, run)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:40.058101Z", - "start_time": "2018-12-06T10:54:40.042615Z" - } - }, - "outputs": [], - "source": [ - "if operation_mode == \"FS\":\n", - " x = 960 # rows of the FastCCD to analyze in FS mode \n", - " y = 960 # columns of the FastCCD to analyze in FS mode \n", - " print('\\nYou are analyzing data in FS mode.')\n", - "else:\n", - " x = 1934 # rows of the FastCCD to analyze in FF mode \n", - " y = 960 # columns of the FastCCD to analyze in FF mode\n", - " print('\\nYou are analyzing data in FF mode.\\n')\n", - " \n", - "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n", - "fp_name = path_template.format(run)\n", - "\n", - "import datetime\n", - "creation_time = None\n", - "if use_dir_creation_date:\n", - " creation_time = get_dir_creation_date(in_folder, run)\n", - "\n", - "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", - "\n", - "print(\"Reading data from: {}\\n\".format(fp_path))\n", - "print(\"Run is: {}\".format(run))\n", - "print(\"HDF5 path: {}\".format(h5path))\n", - "if creation_time:\n", - " print(\"Using {} as creation time\".format(creation_time.isoformat()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:40.555804Z", - "start_time": "2018-12-06T10:54:40.452978Z" - } - }, - "outputs": [], - "source": [ - "filename = fp_path.format(sequence)\n", - "sensorSize = [x, y]\n", - "chunkSize = 100 #Number of images to read per chunk\n", - "#Sensor area will be analysed according to blocksize\n", - "blockSize = [sensorSize[0]//2, sensorSize[1]//4] \n", - "xcal.defaultBlockSize = blockSize\n", - "cpuCores = 8 #Specifies the number of running cpu cores\n", - "memoryCells = 1 #FastCCD has 1 memory cell\n", - "#Specifies total number of images to proceed\n", - "nImages = fastccdreaderh5.getDataSize(filename, h5path)[0] \n", - "nImages = nImagesOrLimit(nImages, number_dark_frames)\n", - "print(\"\\nNumber of dark images to analyze: \",nImages)\n", - "commonModeBlockSize = blockSize\n", - "commonModeAxisR = 'row'#Axis along which common mode will be calculated\n", - "run_parallel = True\n", - "profile = False\n", - "\n", - "with h5py.File(filename, 'r') as f:\n", - " bias_voltage = int(f['{}/biasclock/bias/value'.format(h5path_cntrl)][0])\n", - " det_gain = int(f['{}/exposure/gain/value'.format(h5path_cntrl)][0])\n", - " integration_time = int(f['{}/acquisitionTime/value'.format(h5path_cntrl)][0])\n", - " temperature = np.mean(f[h5path_t])\n", - " temperature_k = temperature + 273.15\n", - " \n", - " if fix_temperature != 0.:\n", - " temperature_k = fix_temperature\n", - " print(\"Using fixed temperature\")\n", - " print(\"Bias voltage is {} V\".format(bias_voltage))\n", - " print(\"Detector gain is set to x{}\".format(det_gain))\n", - " print(\"Detector integration time is set to {}\".format(integration_time))\n", - " print(\"Mean temperature was {:0.2f} °C / {:0.2f} K\".format(temperature, temperature_k))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:41.584031Z", - "start_time": "2018-12-06T10:54:41.578462Z" - } - }, - "outputs": [], - "source": [ - "reader = ChunkReader(filename, fastccdreaderh5.readData, \n", - " nImages, chunkSize, \n", - " path = h5path, \n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:41.899511Z", - "start_time": "2018-12-06T10:54:41.864816Z" - } - }, - "outputs": [], - "source": [ - "noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, \n", - " cores=cpuCores, blockSize=blockSize,\n", - " runParallel=run_parallel)\n", - "histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000, \n", - " range=[0, 10000], parallel=False, \n", - " memoryCells=memoryCells, \n", - " cores=cpuCores, blockSize=blockSize)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### First Iteration" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. Firstly, initial offset and noise maps are produced from raw dark data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:55:21.238009Z", - "start_time": "2018-12-06T10:54:54.586435Z" - } - }, - "outputs": [], - "source": [ - "for data in reader.readChunks():\n", - " data = np.bitwise_and(data.astype(np.uint16), 0b0011111111111111).astype(np.float32)\n", - " dx = np.count_nonzero(data, axis=(0, 1))\n", - " data = data[:,:,dx != 0]\n", - " histCalRaw.fill(data)\n", - " #Filling calculators with data\n", - " noiseCal.fill(data)\n", - " \n", - "offsetMap = noiseCal.getOffset() #Produce offset map\n", - "noiseMap = noiseCal.get() #Produce noise map\n", - "noiseCal.reset() #Reset noise calculator\n", - "print(\"Initial maps were created\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:56:20.686534Z", - "start_time": "2018-12-06T10:56:11.721829Z" - } - }, - "outputs": [], - "source": [ - "#**************OFFSET MAP HISTOGRAM***********#\n", - "ho,co = np.histogram(offsetMap.flatten(), bins=700)\n", - "\n", - "do = {'x': co[:-1],\n", - " 'y': ho,\n", - " 'y_err': np.sqrt(ho[:]),\n", - " 'drawstyle': 'bars',\n", - " 'color': 'cornflowerblue',\n", - " }\n", - "\n", - "fig = xana.simplePlot(do, figsize='1col', aspect=2, \n", - " x_label = 'Offset (ADU)', \n", - " y_label=\"Counts\", y_log=True,\n", - " )\n", - " \n", - "\n", - "#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#\n", - "hn,cn = np.histogram(noiseMap.flatten(), bins=200)\n", - "\n", - "dn = {'x': cn[:-1],\n", - " 'y': hn,\n", - " 'y_err': np.sqrt(hn[:]),\n", - " 'drawstyle': 'bars',\n", - " 'color': 'cornflowerblue',\n", - " }\n", - "\n", - "fig = xana.simplePlot(dn, figsize='1col', aspect=2, \n", - " x_label = 'Noise (ADU)', \n", - " y_label=\"Counts\", \n", - " y_log=True)\n", - "\n", - "\n", - "#**************HEAT MAPS*******************#\n", - "fig = xana.heatmapPlot(offsetMap[:,:,0],\n", - " x_label='Columns', y_label='Rows',\n", - " lut_label='Offset (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmin=3000, vmax=4500)\n", - "\n", - "fig = xana.heatmapPlot(noiseMap[:,:,0],\n", - " x_label='Columns', y_label='Rows',\n", - " lut_label='Noise (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmax=2*np.mean(noiseMap))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:56:22.741284Z", - "start_time": "2018-12-06T10:56:20.688393Z" - } - }, - "outputs": [], - "source": [ - "\n", - "## offset\n", - "\n", - "metadata = ConstantMetaData()\n", - "offset = Constants.CCD(DetectorTypes.fastCCD).Offset()\n", - "offset.data = offsetMap.data\n", - "metadata.calibration_constant = offset\n", - "\n", - "# set the operating condition\n", - "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " gain_setting=det_gain,\n", - " temperature=temperature_k,\n", - " pixels_x=1934,\n", - " pixels_y=960)\n", - "for parm in condition.parameters:\n", - " if parm.name == \"Sensor Temperature\":\n", - " parm.lower_deviation = temp_limits\n", - " parm.upper_deviation = temp_limits\n", - "\n", - "device = Detectors.fastCCD1\n", - "\n", - "\n", - "metadata.detector_condition = condition\n", - "\n", - "# specify the version for this constant\n", - "if creation_time is None:\n", - " metadata.calibration_constant_version = Versions.Now(device=device)\n", - "else:\n", - " metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n", - "metadata.calibration_constant_version.raw_data_location = file_loc\n", - "metadata.send(cal_db_interface)\n", - "\n", - "## noise\n", - "\n", - "metadata = ConstantMetaData()\n", - "noise = Constants.CCD(DetectorTypes.fastCCD).Noise()\n", - "noise.data = noiseMap.data\n", - "metadata.calibration_constant = noise\n", - "\n", - "# set the operating condition\n", - "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " gain_setting=det_gain,\n", - " temperature=temperature_k,\n", - " pixels_x=1934,\n", - " pixels_y=960)\n", - "\n", - "for parm in condition.parameters:\n", - " if parm.name == \"Sensor Temperature\":\n", - " parm.lower_deviation = temp_limits\n", - " parm.upper_deviation = temp_limits\n", - "\n", - "\n", - "device = Detectors.fastCCD1\n", - "\n", - "\n", - "metadata.detector_condition = condition\n", - "\n", - "# specify the a version for this constant\n", - "if creation_time is None:\n", - " metadata.calibration_constant_version = Versions.Now(device=device)\n", - "else:\n", - " metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n", - "metadata.calibration_constant_version.raw_data_location = file_loc\n", - "metadata.send(cal_db_interface)\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from cal_tools.enums import BadPixels\n", - "bad_pixels = np.zeros(offsetMap.shape, np.uint32)\n", - "mnoffset = np.nanmedian(offsetMap)\n", - "stdoffset = np.nanstd(offsetMap)\n", - "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) | \n", - " (offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n", - "\n", - "mnnoise = np.nanmedian(noiseMap)\n", - "stdnoise = np.nanstd(noiseMap)\n", - "bad_pixels[(noiseMap < mnnoise-bad_pixel_noise_sigma*stdnoise) | \n", - " (noiseMap > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", - "\n", - "fig = xana.heatmapPlot(np.log2(bad_pixels[:,:,0]),\n", - " x_label='Columns', y_label='Rows',\n", - " lut_label='Bad Pixel Value (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmin=0, vmax=32)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "metadata = ConstantMetaData()\n", - "badpix = Constants.CCD(DetectorTypes.fastCCD).BadPixelsDark()\n", - "badpix.data = bad_pixels.data\n", - "metadata.calibration_constant = badpix\n", - "\n", - "# set the operating condition\n", - "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", - " integration_time=integration_time,\n", - " gain_setting=det_gain,\n", - " temperature=temperature_k,\n", - " pixels_x=1934,\n", - " pixels_y=960)\n", - "\n", - "for parm in condition.parameters:\n", - " if parm.name == \"Sensor Temperature\":\n", - " parm.lower_deviation = temp_limits\n", - " parm.upper_deviation = temp_limits\n", - "\n", - "\n", - "device = Detectors.fastCCD1\n", - "\n", - "\n", - "metadata.detector_condition = condition\n", - "\n", - "# specify the a version for this constant\n", - "if creation_time is None:\n", - " metadata.calibration_constant_version = Versions.Now(device=device)\n", - "else:\n", - " metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n", - "metadata.calibration_constant_version.raw_data_location = file_loc\n", - "metadata.send(cal_db_interface)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "histCalCorr = xcal.HistogramCalculator(sensorSize, bins=200, \n", - " range=[-200, 200], parallel=False, \n", - " memoryCells=memoryCells, \n", - " cores=cpuCores, blockSize=blockSize)\n", - "\n", - "\n", - "for data in reader.readChunks():\n", - " data = np.bitwise_and(data.astype(np.uint16), 0b0011111111111111).astype(np.float32)\n", - " data -= offsetMap.data\n", - " histCalCorr.fill(data)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ho,eo,co,so = histCalCorr.get()\n", - "\n", - "\n", - "d = [{'x': co,\n", - " 'y': ho,\n", - " 'y_err': np.sqrt(ho[:]),\n", - " 'drawstyle': 'steps-mid',\n", - " 'errorstyle': 'bars',\n", - " 'errorcoarsing': 2,\n", - " 'label': 'Offset corr.'\n", - " },\n", - " \n", - " ]\n", - " \n", - "\n", - "fig = xana.simplePlot(d, aspect=1, x_label='Energy(ADU)', \n", - " y_label='Number of occurrences', figsize='2col',\n", - " y_log=True, x_range=(-50,500),\n", - " legend='top-center-frame-2col')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - }, - "latex_envs": { - "LaTeX_envs_menu_present": true, - "autocomplete": true, - "bibliofile": "biblio.bib", - "cite_by": "apalike", - "current_citInitial": 1, - "eqLabelWithNumbers": true, - "eqNumInitial": 1, - "hotkeys": { - "equation": "Ctrl-E", - "itemize": "Ctrl-I" - }, - "labels_anchors": false, - "latex_user_defs": false, - "report_style_numbering": false, - "user_envs_cfg": false - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/notebooks/FastCCD/PlotFromCalDB_FastCCD_NBC.ipynb b/notebooks/FastCCD/PlotFromCalDB_FastCCD_NBC.ipynb deleted file mode 100644 index 908d75feac24609e6aa28157f92efa219b076dba..0000000000000000000000000000000000000000 --- a/notebooks/FastCCD/PlotFromCalDB_FastCCD_NBC.ipynb +++ /dev/null @@ -1,505 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Statistical analysis of calibration factors#\n", - "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1\n", - "\n", - "A description of the notebook." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "start_date = \"2019-01-30\" # date to start investigation interval from\n", - "end_date = \"2019-08-30\" # date to end investigation interval at, can be \"now\"\n", - "nconstants = 10 # Number of time stamps to plot. If not 0, overcome start_date.\n", - "dclass=\"CCD\" # Detector class\n", - "db_module = \"fastCCD1\" # detector entry in the DB to investigate\n", - "constants = [\"Noise\", \"Offset\"] # constants to plot\n", - "\n", - "gain_setting = [0,1,2,8] # gain stages\n", - "bias_voltage = [79] # Bias voltage\n", - "temperature = [235, 216, 245] # Operation temperature\n", - "integration_time = [1, 50] # Integration time\n", - "pixels_x=[1934] # number of pixels along X axis\n", - "pixels_y=[960] # number of pixels along Y axis\n", - "max_time = 15 # max time margin in minutes to match bad pixels\n", - "parameter_names = ['bias_voltage', 'integration_time', 'temperature', \n", - " 'gain_setting', 'pixels_x', 'pixels_y'] # names of parameters\n", - "\n", - "separate_plot = ['integration_time', 'gain_setting', 'temperature'] # Plot on separate plots\n", - "photon_energy = 9.2 # Photon energy of the beam\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_FCCD/\" # output folder\n", - "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", - "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # the database interface to use\n", - "cal_db_timeout = 180000 # timeout on caldb requests\",\n", - "plot_range = 3 # range for plotting in units of median absolute deviations" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import copy\n", - "import datetime\n", - "import dateutil.parser\n", - "import numpy as np\n", - "from operator import itemgetter\n", - "import os\n", - "import sys\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", - "from cal_tools.tools import get_from_db\n", - "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", - " HMType, hm_combine,\n", - " combine_lists, get_range)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare variables\n", - "spShape = (967, 10) # Shape of superpixel\n", - "\n", - "parameters = [globals()[x] for x in parameter_names]\n", - "\n", - "constantsDark = {'Noise': 'BadPixelsDark',\n", - " 'Offset': 'BadPixelsDark'}\n", - "print('Bad pixels data: ', constantsDark)\n", - "\n", - "# Define parameters in order to perform loop over time stamps\n", - "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " start_date)\n", - "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " end_date)\n", - "\n", - "# Create output folder\n", - "os.makedirs(out_folder, exist_ok=True)\n", - "\n", - "# Get getector conditions\n", - "det = getattr(Detectors, db_module)\n", - "dconstants = getattr(Constants, dclass)(det.detector_type)\n", - "\n", - "print('CalDB Interface: {}'.format(cal_db_interface))\n", - "print('Start time at: ', start)\n", - "print('End time at: ', end)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "parameter_list = combine_lists(*parameters, names = parameter_names)\n", - "print(parameter_list)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "# Retrieve list of meta-data\n", - "constant_versions = []\n", - "constant_parameters = []\n", - "constantBP_versions = []\n", - "\n", - "# Loop over constants\n", - "for c, const in enumerate(constants):\n", - " \n", - " if use_existing != \"\":\n", - " break\n", - " \n", - " # Loop over parameters\n", - " for pars in parameter_list:\n", - " \n", - " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", - " dcond = Conditions.Dark\n", - " mcond = getattr(dcond, dclass)(**pars)\n", - " else:\n", - " dcond = Conditions.Illuminated\n", - " mcond = getattr(dcond, dclass)(**pars,\n", - " photon_energy=photon_energy)\n", - "\n", - " \n", - " \n", - " print('Request: ', const, 'with paramters:', pars)\n", - " # Request Constant versions for given parameters and module\n", - " data = get_from_db(det,\n", - " getattr(dconstants,\n", - " const)(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - " \n", - " if not isinstance(data, list):\n", - " continue\n", - " \n", - " data = sorted(data, key=itemgetter('begin_at'), reverse=True)\n", - " print('Number of retrieved constants: {}'.format(len(data)) )\n", - " \n", - " if const in constantsDark:\n", - " # Request BP constant versions\n", - " dataBP = get_from_db(det,\n", - " getattr(dconstants, \n", - " constantsDark[const])(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - " \n", - " if not isinstance(data, list) or not isinstance(dataBP, list):\n", - " continue\n", - " print('Number of retrieved darks: {}'.format(len(dataBP)) )\n", - " found_BPmatch = False\n", - " for d in data:\n", - " # Match proper BP constant version\n", - " # and get constant version within\n", - " # requested time range\n", - " if d is None:\n", - " print('Time or data is not found!')\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - "\n", - " if (dt.replace(tzinfo=None) > end or \n", - " (nconstants==0 and dt.replace(tzinfo=None) < start)):\n", - " continue\n", - " \n", - " if nconstants>0 and constant_parameters.count(pars)>nconstants-1:\n", - " break\n", - "\n", - " closest_BP = None\n", - " closest_BPtime = None\n", - "\n", - " for dBP in dataBP:\n", - " if dBP is None:\n", - " print(\"Bad pixels are not found!\")\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", - "\n", - " if dt == dBPt:\n", - " found_BPmatch = True\n", - " else:\n", - "\n", - " if np.abs(dBPt-dt).seconds < (max_time*60):\n", - " if closest_BP is None:\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " else:\n", - " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - "\n", - " if dataBP.index(dBP) == len(dataBP)-1:\n", - " if closest_BP:\n", - " dBP = closest_BP\n", - " dBPt = closest_BPtime\n", - " found_BPmatch = True\n", - " else:\n", - " print('Bad pixels are not found!')\n", - "\n", - " if found_BPmatch:\n", - " print(\"Found constant {}: begin at {}\".format(const, dt))\n", - " print(\"Found bad pixels at {}\".format(dBPt))\n", - " constantBP_versions.append(dBP)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " found_BPmatch = False\n", - " break\n", - " else:\n", - " constant_versions += data\n", - " constant_parameters += [copy.deepcopy(pars)]*len(data)\n", - "\n", - "# Remove dublications\n", - "constant_versions_tmp = []\n", - "constant_parameters_tmp = []\n", - "constantBP_versions_tmp = []\n", - "for i, x in enumerate(constant_versions):\n", - " if x not in constant_versions_tmp:\n", - " constant_versions_tmp.append(x)\n", - " constant_parameters_tmp.append(constant_parameters[i])\n", - " if i<len(constantBP_versions)-1:\n", - " constantBP_versions_tmp.append(constantBP_versions[i])\n", - "constant_versions=constant_versions_tmp\n", - "constantBP_versions=constantBP_versions_tmp\n", - "constant_parameters=constant_parameters_tmp\n", - "\n", - "print('Number of stored constant versions is {}'.format(len(constant_versions)))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def get_rebined(a, rebin):\n", - " return a[:,:,0].reshape(\n", - " int(a.shape[0] / rebin[0]),\n", - " rebin[0],\n", - " int(a.shape[1] / rebin[1]),\n", - " rebin[1])\n", - " \n", - "def modify_const(const, data, isBP = False):\n", - " return data\n", - "\n", - "ret_constants = {}\n", - "constant_data = ConstantMetaData()\n", - "constant_BP = ConstantMetaData()\n", - "for i, constant_version in enumerate(constant_versions):\n", - "\n", - " const = constant_version['data_set_name'].split('/')[-2]\n", - " qm = db_module\n", - " \n", - " print(\"constant: {}, module {}\".format(const,qm))\n", - " constant_data.retrieve_from_version_info(constant_version)\n", - " \n", - " for key in separate_plot:\n", - " const = '{}_{}'.format(const, constant_parameters[i][key])\n", - " \n", - " if not const in ret_constants:\n", - " ret_constants[const] = {}\n", - " if not qm in ret_constants[const]:\n", - " ret_constants[const][qm] = []\n", - " \n", - " cdata = constant_data.calibration_constant.data\n", - " ctime = constant_data.calibration_constant_version.begin_at\n", - " \n", - " cdata = modify_const(const, cdata)\n", - " \n", - " if len(constantBP_versions)>0:\n", - " constant_BP.retrieve_from_version_info(constantBP_versions[i])\n", - " cdataBP = constant_BP.calibration_constant.data\n", - " cdataBP = modify_const(const, cdataBP, True)\n", - " \n", - " if cdataBP.shape != cdata.shape:\n", - " print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n", - " continue\n", - " \n", - " # Apply bad pixel mask\n", - " cdataABP = np.copy(cdata)\n", - " cdataABP[cdataBP > 0] = np.nan\n", - " \n", - " # Create superpixels for constants with BP applied\n", - " cdataABP = get_rebined(cdataABP, spShape)\n", - " toStoreBP = np.nanmean(cdataABP, axis=(1, 3))\n", - " toStoreBPStd = np.nanstd(cdataABP, axis=(1, 3))\n", - "\n", - " # Prepare number of bad pixels per superpixels\n", - " cdataBP = get_rebined(cdataBP, spShape)\n", - " cdataNBP = np.nansum(cdataBP > 0, axis=(1, 3))\n", - " else:\n", - " toStoreBP = 0\n", - " toStoreBPStd = 0\n", - " cdataNBP = 0\n", - "\n", - " # Create superpixels for constants without BP applied\n", - " cdata = get_rebined(cdata, spShape)\n", - " toStoreStd = np.nanstd(cdata, axis=(1, 3))\n", - " toStore = np.nanmean(cdata, axis=(1, 3))\n", - " \n", - " # Convert parameters to dict\n", - " dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n", - " \n", - " print(\"Store values in dict\", const, qm, ctime)\n", - " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': cdataNBP,\n", - " 'dataBP': toStoreBP,\n", - " 'dataBPStd': toStoreBPStd,\n", - " 'data': toStore,\n", - " 'dataStd': toStoreStd,\n", - " 'mdata': dpar}) \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " print('Save data to {}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))\n", - " save_dict_to_hdf5(ret_constants,\n", - " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", - "else:\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", - "\n", - "print('Load data from {}'.format(fpath))\n", - "ret_constants = load_data_from_hdf5(fpath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Parameters for plotting\n", - "\n", - "keys = {\n", - " 'Mean': ['data', '', 'Mean over pixels'],\n", - " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", - " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],\n", - " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", - " 'stdASIC': ['', '', '$\\sigma$ over ASICs'],\n", - " 'stdCell': ['', '', '$\\sigma$ over Cells'],\n", - "}\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print('Plot calibration constants')\n", - "\n", - "# loop over constat type\n", - "for const, modules in ret_constants.items():\n", - "\n", - " const = const.split(\"_\")\n", - " print('Const: {}'.format(const))\n", - "\n", - " # Loop over modules\n", - " for mod, data in modules.items():\n", - " print(mod)\n", - "\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", - "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " ctimes_ticks[i] = ctimes_ticks[i] + \\\n", - " ', V={:1.0f}'.format(cmdata[i]['Sensor Temperature']) + \\\n", - " ', T={:1.0f}'.format(\n", - " cmdata[i]['Integration Time'])\n", - "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", - "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", - "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nPixels\n", - "\n", - " # Avoid too low values\n", - " if const[0] in [\"Noise\", \"Offset\"]:\n", - " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", - " if 'MeanBP' in rdata:\n", - " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", - " \n", - " if 'NBP' in rdata:\n", - " rdata['NBP'] = rdata['NBP'].astype(float)\n", - " rdata[\"NBP\"][rdata[\"NBP\"] == (spShape[0] * spShape[1])] = np.nan\n", - " rdata[\"NBP\"] = rdata[\"NBP\"] / spShape[0] / spShape[1] * 100\n", - "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", - " pdata[key] = rdata[key][:, :, :].reshape(nTimes, nBins).swapaxes(0, 1)\n", - "\n", - " # Plotting\n", - " for key in pdata:\n", - " if len(pdata[key].shape)<2:\n", - " continue\n", - "\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - "\n", - " title = '{}, module {}, {}'.format(\n", - " const[0], mod, keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)\n", - "\n", - " fname = '{}/{}_{}'.format(out_folder, const[0], mod.replace('_', ''))\n", - " for item in const[1:]:\n", - " fname = '{}_{}'.format(fname, item)\n", - " fname = '{}_ASIC_{}.png'.format(fname, key)\n", - " \n", - " vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n", - " hm_combine(pdata[key][::-1], htype=HMType.mro,\n", - " x_label='Creation Time', y_label='ASIC ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " vmin=vmin, vmax=vmax,\n", - " fname=fname,\n", - " pad=[0.125, 0.125, 0.12, 0.185])\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/Jungfrau/PlotFromCalDB_Jungfrau_NBC.ipynb b/notebooks/Jungfrau/PlotFromCalDB_Jungfrau_NBC.ipynb deleted file mode 100644 index 1b428701c5d70bdf46a1e64b2a90444e0352fba2..0000000000000000000000000000000000000000 --- a/notebooks/Jungfrau/PlotFromCalDB_Jungfrau_NBC.ipynb +++ /dev/null @@ -1,574 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Statistical analysis of calibration factors#\n", - "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1\n", - "\n", - "Calibration constants for JungFrau detector from the data base with injection time between start_date and end_date are considered.\n", - "\n", - "To be visualized, calibration constants are averaged per group of pixels. Plots shows calibration constant over time for each constant.\n", - "\n", - "Values shown in plots are saved in h5 files." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "start_date = \"2019-06-30\" # date to start investigation interval from\n", - "end_date = \"2019-09-01\" # date to end investigation interval at, can be \"now\"\n", - "dclass=\"jungfrau\" # Detector class\n", - "modules = [\"Jungfrau_M125\", \"Jungfrau_M260\"] # detector entry in the DB to investigate\n", - "constants = [\"Noise\", \"Offset\"] # constants to plot\n", - "nconstants = 10 # Number of time stamps to plot. If not 0, overcome start_date.\n", - "bias_voltage = [90, 180] # bias voltage\n", - "memory_cells = [1] # number of memory cells\n", - "pixels_x = [1024] # number of pixels along X axis\n", - "pixels_y = [512, 1024] # number of pixels along Y axis\n", - "temperature = [291] # operational temperature\n", - "integration_time = [50, 250] # integration time\n", - "gain_setting = [0] # gain stage\n", - "\n", - "parameter_names = ['bias_voltage', 'integration_time', 'pixels_x', 'pixels_y', 'gain_setting',\n", - " 'temperature', 'memory_cells'] # names of parameters\n", - "\n", - "separate_plot = ['integration_time'] # Plot on separate plots\n", - "max_time = 15 # max time margin in minutes to match bad pixels\n", - "photon_energy = 9.2 # Photon energy of the beam\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_JF/\" # output folder\n", - "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", - "cal_db_interface = \"tcp://max-exfl016:8016\" # the database interface to use\n", - "cal_db_timeout = 180000 # timeout on caldb requests\",\n", - "plot_range = 3 # range for plotting in units of median absolute deviations\n", - "spShape = [256, 64] # Shape of superpixel" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import copy\n", - "import datetime\n", - "import dateutil.parser\n", - "import numpy as np\n", - "import os\n", - "import sys\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "\n", - "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", - "from cal_tools.tools import get_from_db, get_random_db_interface\n", - "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", - " HMType, hm_combine,\n", - " combine_lists, get_range)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare variables\n", - "parameters = [globals()[x] for x in parameter_names]\n", - "\n", - "constantsDark = {'Noise': 'BadPixelsDark',\n", - " 'Offset': 'BadPixelsDark'}\n", - "print('Bad pixels data: ', constantsDark)\n", - "\n", - "# Define parameters in order to perform loop over time stamps\n", - "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " start_date)\n", - "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " end_date)\n", - "\n", - "# Create output folder\n", - "os.makedirs(out_folder, exist_ok=True)\n", - "\n", - "# Get getector conditions\n", - "dconstants = getattr(Constants, dclass)\n", - "\n", - "print('CalDB Interface: {}'.format(cal_db_interface))\n", - "print('Start time at: ', start)\n", - "print('End time at: ', end)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "parameter_list = combine_lists(*parameters, names = parameter_names)\n", - "print(parameter_list)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "# Retrieve list of meta-data\n", - "constant_versions = []\n", - "constant_parameters = []\n", - "constantBP_versions = []\n", - "\n", - "# Loop over constants\n", - "for c, const in enumerate(constants):\n", - " \n", - " for db_module in modules:\n", - " det = getattr(Detectors, db_module)\n", - " \n", - " if use_existing != \"\":\n", - " break\n", - "\n", - " # Loop over parameters\n", - " for pars in parameter_list:\n", - "\n", - " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", - " dcond = Conditions.Dark\n", - " mcond = getattr(dcond, dclass)(**pars)\n", - " else:\n", - " dcond = Conditions.Illuminated\n", - " mcond = getattr(dcond, dclass)(**pars,\n", - " photon_energy=photon_energy)\n", - "\n", - "\n", - "\n", - " print('Request: ', const, 'with paramters:', pars)\n", - " # Request Constant versions for given parameters and module\n", - " data = get_from_db(det,\n", - " getattr(dconstants,\n", - " const)(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - "\n", - " if not isinstance(data, list):\n", - " continue\n", - "\n", - " if const in constantsDark:\n", - " # Request BP constant versions\n", - " print('constantDark:', constantsDark[const], ) \n", - " dataBP = get_from_db(det,\n", - " getattr(dconstants, \n", - " constantsDark[const])(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - "\n", - " if not isinstance(data, list) or not isinstance(dataBP, list):\n", - " continue\n", - "\n", - " found_BPmatch = False\n", - " for d in data:\n", - " # Match proper BP constant version\n", - " # and get constant version within\n", - " # requested time range\n", - " if d is None:\n", - " print('Time or data is not found!')\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - "\n", - " if dt.replace(tzinfo=None) > end or dt.replace(tzinfo=None) < start:\n", - " continue\n", - "\n", - " closest_BP = None\n", - " closest_BPtime = None\n", - "\n", - " for dBP in dataBP:\n", - " if dBP is None:\n", - " print(\"Bad pixels are not found!\")\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", - "\n", - " if dt == dBPt:\n", - " found_BPmatch = True\n", - " else:\n", - "\n", - " if np.abs(dBPt-dt).seconds < (max_time*60):\n", - " if closest_BP is None:\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " else:\n", - " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - "\n", - " if dataBP.index(dBP) == len(dataBP)-1:\n", - " if closest_BP:\n", - " dBP = closest_BP\n", - " dBPt = closest_BPtime\n", - " found_BPmatch = True\n", - " else:\n", - " print('Bad pixels are not found!')\n", - "\n", - " if found_BPmatch:\n", - " print(\"Found constant {}: begin at {}\".format(const, dt))\n", - " print(\"Found bad pixels at {}\".format(dBPt))\n", - " constantBP_versions.append(dBP)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " found_BPmatch = False\n", - " break\n", - " else:\n", - " constant_versions += data\n", - " constant_parameters += [copy.deepcopy(pars)]*len(data)\n", - "\n", - "# Remove dublications\n", - "constant_versions_tmp = []\n", - "constant_parameters_tmp = []\n", - "for i, x in enumerate(constant_versions):\n", - " if x not in constant_versions_tmp:\n", - " constant_versions_tmp.append(x)\n", - " constant_parameters_tmp.append(constant_parameters[i])\n", - " \n", - "constant_versions=constant_versions_tmp\n", - "constant_parameters=constant_parameters_tmp\n", - "\n", - "print('Number of stored constant versions is {}'.format(len(constant_versions)))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def get_rebined(a, rebin):\n", - " return a.reshape(\n", - " int(a.shape[0] / rebin[0]),\n", - " rebin[0],\n", - " int(a.shape[1] / rebin[1]),\n", - " rebin[1],\n", - " a.shape[2],\n", - " a.shape[3])\n", - "\n", - "def modify_const(const, data, isBP = False):\n", - " return data\n", - "\n", - "ret_constants = {}\n", - "constant_data = ConstantMetaData()\n", - "constant_BP = ConstantMetaData()\n", - "\n", - "# sort over begin_at\n", - "idxs, _ = zip(*sorted(enumerate(constant_versions), \n", - " key=lambda x: x[1]['begin_at'], reverse=True))\n", - "\n", - "for i in idxs:\n", - " const = constant_versions[i]['data_set_name'].split('/')[-2]\n", - " qm = constant_versions[i]['physical_device']['name']\n", - " \n", - " for key in separate_plot:\n", - " const = '{}_{}'.format(const, constant_parameters[i][key])\n", - " \n", - " if not const in ret_constants:\n", - " ret_constants[const] = {}\n", - " if not qm in ret_constants[const]:\n", - " ret_constants[const][qm] = []\n", - " \n", - " if nconstants>0 and len(ret_constants[const][qm])>=nconstants:\n", - " continue\n", - " \n", - " print(\"constant: {}, module {}\".format(const,qm))\n", - " constant_data.retrieve_from_version_info(constant_versions[i])\n", - " \n", - " cdata = constant_data.calibration_constant.data\n", - " ctime = constant_data.calibration_constant_version.begin_at\n", - " cdata = modify_const(const, cdata)\n", - " \n", - " if len(constantBP_versions)>0:\n", - " constant_BP.retrieve_from_version_info(constantBP_versions[i])\n", - " cdataBP = constant_BP.calibration_constant.data\n", - " cdataBP = modify_const(const, cdataBP, True)\n", - " \n", - " if cdataBP.shape != cdata.shape:\n", - " print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n", - " continue\n", - " \n", - " # Apply bad pixel mask\n", - " cdataABP = np.copy(cdata)\n", - " cdataABP[cdataBP > 0] = np.nan\n", - " \n", - " # Create superpixels for constants with BP applied\n", - " cdataABP = get_rebined(cdataABP, spShape)\n", - " toStoreBP = np.nanmean(cdataABP, axis=(1, 3))\n", - " toStoreBPStd = np.nanstd(cdataABP, axis=(1, 3))\n", - "\n", - " # Prepare number of bad pixels per superpixels\n", - " cdataBP = get_rebined(cdataBP, spShape)\n", - " cdataNBP = np.nansum(cdataBP > 0, axis=(1, 3))\n", - " else:\n", - " toStoreBP = 0\n", - " toStoreBPStd = 0\n", - " cdataNBP = 0\n", - "\n", - " # Create superpixels for constants without BP applied\n", - " cdata = get_rebined(cdata, spShape)\n", - " toStoreStd = np.nanstd(cdata, axis=(1, 3))\n", - " toStore = np.nanmean(cdata, axis=(1, 3))\n", - " \n", - " # Convert parameters to dict\n", - " dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n", - " \n", - " print(\"Store values in dict\", const, qm, ctime)\n", - " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': cdataNBP,\n", - " 'dataBP': toStoreBP,\n", - " 'dataBPStd': toStoreBPStd,\n", - " 'data': toStore,\n", - " 'dataStd': toStoreStd,\n", - " 'mdata': dpar}) \n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, db_module))\n", - " save_dict_to_hdf5(ret_constants,\n", - " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", - "else:\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", - "\n", - "print('Load data from {}'.format(fpath))\n", - "ret_constants = load_data_from_hdf5(fpath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Parameters for plotting\n", - "\n", - "keys = {\n", - " 'Mean': ['data', '', 'Mean over pixels'],\n", - " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", - " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Number of BP'],\n", - " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", - "}\n", - "\n", - "gain_name = ['High', 'Medium', 'Low']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "print('Plot calibration constants')\n", - "\n", - "# loop over constat type\n", - "for const, modules in ret_constants.items():\n", - " \n", - " const = const.split(\"_\")\n", - " for gain in range(3):\n", - "\n", - " print('Const: {}'.format(const))\n", - "\n", - " # summary over modules\n", - " mod_data = {}\n", - " mod_names = []\n", - " mod_times = []\n", - " \n", - " # Loop over modules\n", - " for mod, data in modules.items():\n", - " print(mod)\n", - "\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", - "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " ctimes_ticks[i] = ctimes_ticks[i] + \\\n", - " ', V={:1.0f}'.format(cmdata[i]['Sensor Temperature']) + \\\n", - " ', T={:1.0f}'.format(\n", - " cmdata[i]['Integration Time'])\n", - "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", - "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", - "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nPixels\n", - " \n", - " # Select gain\n", - " if const[0] not in [\"Gain\", \"Noise-e\"]:\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<5:\n", - " continue\n", - " rdata[key] = rdata[key][..., 0, gain]\n", - "\n", - " # Avoid to low values\n", - " if const[0] in [\"Noise10Hz\", \"Offset10Hz\"]:\n", - " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", - " if 'MeanBP' in rdata:\n", - " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", - " if 'NBP' in rdata:\n", - " rdata['NBP'] = rdata['NBP'].astype(float)\n", - " rdata['NBP'][rdata['NBP'] == spShape[0]*spShape[1]] = np.nan\n", - "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", - " pdata[key] = rdata[key].reshape(nTimes, nBins).swapaxes(0, 1)\n", - "\n", - " # Summary over ASICs\n", - " adata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", - " adata[key] = np.nansum(rdata[key], axis=(1, 2))\n", - "\n", - " # Summary information over modules\n", - " for key in pdata:\n", - " if key not in mod_data:\n", - " mod_data[key] = []\n", - " if key == 'NBP':\n", - " mod_data[key].append(np.nansum(pdata[key], axis=0))\n", - " else:\n", - " mod_data[key].append(np.nanmean(pdata[key], axis=0))\n", - "\n", - " mod_names.append(mod)\n", - " mod_times.append(ctimes[sort_ind])\n", - " \n", - " # Plotting\n", - " for key in pdata:\n", - " \n", - " if len(pdata[key].shape)<2:\n", - " continue\n", - " \n", - " vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - "\n", - " title = '{}, module {}, {}'.format(\n", - " const[0], mod, keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)\n", - "\n", - " fname = '{}/{}_{}'.format(out_folder, const[0], mod.replace('_', ''))\n", - " for item in const[1:]:\n", - " fname = '{}_{}'.format(fname, item)\n", - " fname = '{}_ASIC_{}.png'.format(fname, key)\n", - " \n", - " hm_combine(pdata[key][::-1], htype=HMType.mro,\n", - " x_label='Creation Time', y_label='ASIC ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " vmin=vmin, vmax=vmax,\n", - " fname=fname,\n", - " pad=[0.125, 0.125, 0.12, 0.185])\n", - "\n", - " \n", - " # Summary over modules\n", - " for key in mod_data:\n", - " \n", - " if key == 'NBP':\n", - " unit = ''\n", - " else:\n", - " unit = '[ADU]'\n", - "\n", - " title = '{}, All modules, {} gain, {}'.format(\n", - " const[0], gain_name[gain], keys[key][1])\n", - " \n", - " fname = '{}/{}_{}'.format(out_folder, const[0], 'All')\n", - " for item in const[1:]:\n", - " fname = '{}_{}'.format(fname, item)\n", - " fname = '{}_ASIC_{}.png'.format(fname, key)\n", - " \n", - " fig = plt.figure(figsize=(12,12) )\n", - " for i in range(len(mod_data[key])):\n", - " plt.scatter(mod_times[i], mod_data[key][i], label=mod_names[i])\n", - " plt.grid()\n", - " plt.xlabel('Creation Time')\n", - " plt.ylabel('{}, {} {}'.format(const[0], keys[key][2], unit)) \n", - " plt.legend(loc='best guess')\n", - " plt.title(title)\n", - " fig.savefig(fname)\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb b/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb deleted file mode 100644 index 6f797cae32338fb12d06bba753296416093114a8..0000000000000000000000000000000000000000 --- a/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb +++ /dev/null @@ -1,673 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Statistical analysis of calibration factors#\n", - "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.2\n", - "\n", - "Calibration constants for LPD1M detector from the data base with injection time between start_date and end_date are considered.\n", - "\n", - "To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant and for each module. Summary plots overall modules are created.\n", - "\n", - "In additional gain-slopes flat-field and pulse-capacitor are combined to relative-gain constant and presented as well. Noise in electron units is derived using gain factors and presented.\n", - "\n", - "Values shown in plots are saved in h5 files.\n", - "\n", - "All presented values corresponds to high and medium gain stages." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "start_date = \"2019-01-30\" # Date to start investigation interval from\n", - "end_date = \"2019-12-12\" # Date to end investigation interval at, can be \"now\"\n", - "nconstants = 20 # Number of time stamps to plot. If not 0, overcome start_date.\n", - "constants = [\"Noise\", \"Offset\", \"SlopesFF\", \"SlopesCI\"] # constants to plot\n", - "modules = [2] # Modules, set to -1 for all, range allowed\n", - "bias_voltages = [250, 500] # Bias voltage\n", - "mem_cells = [1, 128, 512] # Number of used memory cells.\n", - "photon_energy = 9.2 # Photon energy of the beam\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_LPD2\" # Output folder, required\n", - "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", - "cal_db_timeout = 180000 # timeout on caldb requests\",\n", - "adu_to_photon = 33.17 # ADU to photon conversion factor (8000 / 3.6 / 67.)\n", - "nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs\n", - "db_module = \"LPD1M1\" # detector entry in the DB to investigate\n", - "dclass = \"LPD\" # Detector class\n", - "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # the database interface to use\n", - "max_time = 15 # the max margin in min. for the matching closest bad pixels\n", - "range_offset = [800., 1500, 600, 900] # plotting range for offset: high gain l, r, medium gain l, r \n", - "range_noise = [2.0, 16, 1.0, 7.0] # plotting range for noise: high gain l, r, medium gain l, r \n", - "range_gain = [20, 30, 20, 30] # plotting range for gain: high gain l, r, medium gain l, r \n", - "range_noise_e = [100., 600., 100., 600.] # plotting range for noise in [e-]: high gain l, r, medium gain l, r \n", - "range_slopesCI = [0.95, 1.05, 0.0, 0.5] # plotting range for slope CI: high gain l, r, medium gain l, r \n", - "range_slopesFF = [0.8, 1.2, 0.8, 1.2] # plotting range for slope FF: high gain l, r, medium gain l, r \n", - "plot_range = 3 # range for plotting in units of median absolute deviations\n", - "x_labels = ['Sensor Bias Voltage', 'Memory cells'] # parameters to be shown on X axis" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import copy\n", - "import datetime\n", - "import dateutil.parser\n", - "import numpy as np\n", - "import os\n", - "import sys\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", - "from cal_tools.tools import get_from_db, get_random_db_interface\n", - "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", - " combine_constants, HMType, IMType,\n", - " hm_combine, combine_lists, get_range)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare variables\n", - "nMem = max(mem_cells) # Number of mem Cells to store\n", - "spShape = (64,64) # Shape of superpixel\n", - "\n", - "if modules[0] == -1:\n", - " modules = range(16)\n", - " \n", - "modules = [\"Q{}M{}\".format(x // 4 + 1, x % 4 + 1) for x in modules]\n", - "\n", - "constantsDark = {\"SlopesFF\": 'BadPixelsFF',\n", - " 'SlopesCI': 'BadPixelsCI',\n", - " 'Noise': 'BadPixelsDark',\n", - " 'Offset': 'BadPixelsDark'}\n", - "print('Bad pixels data: ', constantsDark)\n", - "\n", - "# Define parameters in order to perform loop over time stamps\n", - "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " start_date)\n", - "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " end_date)\n", - "\n", - "# Create output folder\n", - "os.makedirs(out_folder, exist_ok=True)\n", - "\n", - "# Get getector conditions\n", - "det = getattr(Detectors, db_module)\n", - "dconstants = getattr(Constants, dclass)\n", - "\n", - "print('CalDB Interface: {}'.format(cal_db_interface))\n", - "print('Start time at: ', start)\n", - "print('End time at: ', end)\n", - "print('Modules: ', modules)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "parameter_list = combine_lists(bias_voltages, modules, mem_cells, names = ['bias_voltage', 'module', 'mem_cells'])\n", - "print(parameter_list)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Retrieve list of meta-data\n", - "constant_versions = []\n", - "constant_parameters = []\n", - "constantBP_versions = []\n", - "\n", - "# Loop over constants\n", - "for c, const in enumerate(constants):\n", - " \n", - " if use_existing != \"\":\n", - " break\n", - " \n", - " # Loop over parameters\n", - " for pars in parameter_list:\n", - " \n", - " if (const in [\"Offset\", \"Noise\", \"SlopesCI\"] or \"DARK\" in const.upper()):\n", - " dcond = Conditions.Dark\n", - " mcond = getattr(dcond, dclass)(\n", - " memory_cells=pars['mem_cells'],\n", - " bias_voltage=pars['bias_voltage'])\n", - " else:\n", - " dcond = Conditions.Illuminated\n", - " mcond = getattr(dcond, dclass)(\n", - " memory_cells=pars['mem_cells'],\n", - " bias_voltage=pars['bias_voltage'],\n", - " photon_energy=photon_energy)\n", - "\n", - " print('Request: ', const, 'with paramters:', pars)\n", - " # Request Constant versions for given parameters and module\n", - " data = get_from_db(getattr(det, pars['module']),\n", - " getattr(dconstants,\n", - " const)(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - "\n", - " if not isinstance(data, list):\n", - " continue\n", - " \n", - " # Request BP constant versions\n", - " print('constantDark:', constantsDark[const], ) \n", - " dataBP = get_from_db(getattr(det, pars['module']),\n", - " getattr(dconstants, \n", - " constantsDark[const])(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - " \n", - " \n", - " for d in data:\n", - " # print('Item: ', d)\n", - " # Match proper BP constant version\n", - " # and get constant version within\n", - " # requested time range\n", - " if d is None:\n", - " print('Time or data is not found!')\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - "\n", - " if (dt.replace(tzinfo=None) > end or \n", - " (nconstants==0 and dt.replace(tzinfo=None) < start)):\n", - " continue\n", - " \n", - " closest_BP = None\n", - " closest_BPtime = None\n", - " found_BPmatch = False\n", - " \n", - " if not isinstance(dataBP, list):\n", - " dataBP = []\n", - " \n", - " for dBP in dataBP:\n", - " if dBP is None:\n", - " print(\"Bad pixels are not found!\")\n", - " continue\n", - " \n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", - " \n", - " if dt == dBPt:\n", - " found_BPmatch = True\n", - " else:\n", - "\n", - " if np.abs(dBPt-dt).seconds < (max_time*60):\n", - " if closest_BP is None:\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " else:\n", - " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " \n", - " if dataBP.index(dBP) == len(dataBP)-1:\n", - " if closest_BP:\n", - " dBP = closest_BP\n", - " dBPt = closest_BPtime\n", - " found_BPmatch = True\n", - " else:\n", - " print('Bad pixels are not found!')\n", - " \n", - " if found_BPmatch:\n", - " print(\"Found constant {}: begin at {}\".format(const, dt))\n", - " print(\"Found bad pixels at {}\".format(dBPt))\n", - " constantBP_versions.append(dBP)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " break\n", - " \n", - " if not found_BPmatch:\n", - " print('Bad pixels are not matched')\n", - " constantBP_versions.append(None)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " \n", - "print('Number of retrieved constants {}'.format(len(constant_versions)))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def prepare_to_store(a, nMem):\n", - " shape = list(a.shape[:2])+[nMem, 2]\n", - " b = np.full(shape, np.nan)\n", - " b[:, :, :a.shape[2]] = a[:, :, :, :2]\n", - " return b\n", - "\n", - "\n", - "def get_rebined(a, rebin):\n", - " return a.reshape(\n", - " int(a.shape[0] / rebin[0]),\n", - " rebin[0],\n", - " int(a.shape[1] / rebin[1]),\n", - " rebin[1],\n", - " a.shape[2],\n", - " a.shape[3])\n", - "\n", - "\n", - "def modify_const(const, data):\n", - "\n", - " if const in ['SlopesFF']:\n", - " data = data[..., None, None]\n", - "\n", - " if(len(data.shape)==5):\n", - " data = data[:,:,:,:,0]\n", - "\n", - " if len(data.shape) < 4:\n", - " print(data.shape, \"Unexpected shape!\")\n", - "\n", - " if data.shape[0] != 256:\n", - " data = data.swapaxes(0, 2).swapaxes(1,3).swapaxes(2,3) \n", - " \n", - " return data\n", - "\n", - "\n", - "ret_constants = {}\n", - "constant_data = ConstantMetaData()\n", - "constant_BP = ConstantMetaData()\n", - "\n", - "# sort over begin_at\n", - "idxs, _ = zip(*sorted(enumerate(constant_versions), \n", - " key=lambda x: x[1]['begin_at'], reverse=True))\n", - "\n", - "for i in idxs:\n", - " const = constant_versions[i]['data_set_name'].split('/')[-2]\n", - " qm = constant_parameters[i]['module']\n", - " \n", - " if not const in ret_constants:\n", - " ret_constants[const] = {}\n", - " if not qm in ret_constants[const]:\n", - " ret_constants[const][qm] = []\n", - " \n", - " if nconstants>0 and len(ret_constants[const][qm])>=nconstants:\n", - " continue\n", - " \n", - " constant_data.retrieve_from_version_info(constant_versions[i])\n", - " cdata = constant_data.calibration_constant.data\n", - " ctime = constant_data.calibration_constant_version.begin_at \n", - " cdata = modify_const(const, cdata)\n", - " print(\"constant: {}, module {}, begin_at {}\".format(const, qm, ctime))\n", - "\n", - " if constantBP_versions[i]:\n", - " constant_BP.retrieve_from_version_info(constantBP_versions[i])\n", - " cdataBP = constant_BP.calibration_constant.data\n", - " cdataBP = modify_const(const, cdataBP)\n", - "\n", - " if cdataBP.shape != cdata.shape:\n", - " print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n", - " cdataBP = np.full_like(cdata, -1)\n", - "\n", - " # Apply bad pixel mask\n", - " cdataABP = np.copy(cdata)\n", - " cdataABP[cdataBP > 0] = np.nan\n", - "\n", - " # Create superpixels for constants with BP applied\n", - " cdataABP = get_rebined(cdataABP, spShape)\n", - " toStoreBP = prepare_to_store(np.nanmean(cdataABP, axis=(1, 3)), nMem)\n", - " toStoreBPStd = prepare_to_store(np.nanstd(cdataABP, axis=(1, 3)), nMem)\n", - "\n", - " # Prepare number of bad pixels per superpixels\n", - " cdataBP = get_rebined(cdataBP, spShape)\n", - " cdataNBP = prepare_to_store(np.nansum(cdataBP > 0, axis=(1, 3)), nMem)\n", - "\n", - " # Create superpixels for constants without BP applied\n", - " cdata = get_rebined(cdata, spShape)\n", - " toStoreStd = prepare_to_store(np.nanstd(cdata, axis=(1, 3)), nMem)\n", - " toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)\n", - " \n", - " if not constantBP_versions[i]:\n", - " toStoreBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n", - " toStoreBPStd = np.full_like(toStore, IMType.NO_BPMAP.value)\n", - " cdataNBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n", - " \n", - " dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n", - "\n", - " print(\"Store values in dict\", const, qm, ctime)\n", - " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': cdataNBP,\n", - " 'dataBP': toStoreBP,\n", - " 'dataBPStd': toStoreBPStd,\n", - " 'data': toStore,\n", - " 'dataStd': toStoreStd,\n", - " 'mdata': dpar}) \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, modules[0]))\n", - " save_dict_to_hdf5(ret_constants,\n", - " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, modules[0]))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", - "else:\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", - "\n", - "print('Load data from {}'.format(fpath))\n", - "ret_constants = load_data_from_hdf5(fpath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Combine FF and PC data to calculate Gain\n", - "# Estimate Noise in units of electrons\n", - "print ('Calculate Gain and Noise in electron units')\n", - "\n", - "ret_constants[\"Gain\"] = {}\n", - "ret_constants[\"Noise-e\"] = {}\n", - "for mod in list(range(16)):\n", - " if (\"SlopesFF\" not in ret_constants or\n", - " \"SlopesCI\" not in ret_constants):\n", - " break\n", - "\n", - " qm = \"Q{}M{}\".format(mod // 4 + 1, mod % 4 + 1)\n", - " print(qm)\n", - "\n", - " if (qm not in ret_constants[\"SlopesFF\"] or\n", - " qm not in ret_constants[\"SlopesCI\"]):\n", - " continue\n", - "\n", - " ret_constants[\"Gain\"][qm] = {}\n", - "\n", - " dataFF = ret_constants[\"SlopesFF\"][qm]\n", - " dataPC = ret_constants[\"SlopesCI\"][qm]\n", - "\n", - " if (len(dataFF) == 0 or len(dataPC) == 0):\n", - " continue\n", - "\n", - " ctimesFF = np.array(dataFF[\"ctime\"])\n", - " ctimesPC = np.array(dataPC[\"ctime\"])\n", - "\n", - " ctime, icomb = combine_constants(ctimesFF, ctimesPC)\n", - "\n", - " cdataPC_vs_time = np.array(dataPC[\"data\"])[..., 0]\n", - " cdataFF_vs_time = np.array(dataFF[\"data\"])[..., 0]\n", - "\n", - " cdataFF_vs_time = np.nanmedian(cdataFF_vs_time, axis=3)[..., None]\n", - "\n", - " cdataFF_vs_time /= np.nanmedian(cdataFF_vs_time, axis=(1, 2, 3))[:, None,\n", - " None, None]\n", - " cdataPC_vs_time /= np.nanmedian(cdataPC_vs_time, axis=(1, 2, 3))[:, None,\n", - " None, None]\n", - "\n", - " gain_vs_time = []\n", - " for iFF, iPC in icomb:\n", - " gain_vs_time.append(cdataFF_vs_time[iFF] * cdataPC_vs_time[iPC])\n", - "\n", - " print(np.array(gain_vs_time).shape)\n", - " \n", - " ctime_ts = [t.timestamp() for t in ctime]\n", - " \n", - " ret_constants[\"Gain\"][qm][\"ctime\"] = ctime\n", - " ret_constants[\"Gain\"][qm][\"data\"] = np.array(gain_vs_time)\n", - " # Fill missing data for compatibility with plotting code\n", - " ret_constants[\"Gain\"][qm][\"dataBP\"] = np.array(gain_vs_time)\n", - " ret_constants[\"Gain\"][qm][\"nBP\"] = np.array(gain_vs_time)\n", - "\n", - " if \"Noise\" not in ret_constants:\n", - " continue\n", - "\n", - " if qm not in ret_constants[\"Noise\"]:\n", - " continue\n", - "\n", - " dataN = ret_constants[\"Noise\"][qm]\n", - " if len(dataN) == 0:\n", - " continue\n", - "\n", - " ret_constants[\"Noise-e\"][qm] = {}\n", - " \n", - " ctimesG = np.array(ctime)\n", - " ctimesN = np.array(dataN[\"ctime\"])\n", - "\n", - " ctime, icomb = combine_constants(ctimesG, ctimesN)\n", - "\n", - " cdataG_vs_time = np.array(gain_vs_time)\n", - " cdataN_vs_time = np.array(dataN[\"data\"])[..., 0]\n", - "\n", - " data_vs_time = []\n", - " for iG, iN in icomb:\n", - " data_vs_time.append(\n", - " cdataN_vs_time[iN] * adu_to_photon / cdataG_vs_time[iG])\n", - "\n", - " print(np.array(gain_vs_time).shape)\n", - " ctime_ts = [t.timestamp() for t in ctime]\n", - " ret_constants[\"Noise-e\"][qm][\"ctime\"] = ctime\n", - " ret_constants[\"Noise-e\"][qm][\"data\"] = np.array(data_vs_time)\n", - " # Fill missing data for compatibility with plotting code\n", - " ret_constants[\"Noise-e\"][qm][\"dataBP\"] = np.array(data_vs_time)\n", - " ret_constants[\"Noise-e\"][qm][\"nBP\"] = np.array(data_vs_time)\n", - " \n", - "save_dict_to_hdf5({k:v for k,v in ret_constants.items() if k in ['Gain', 'Noise-e']},\n", - " '{}/CalDBAna_{}_Gain.h5'.format(out_folder, dclass))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Parameters for plotting\n", - "\n", - "# Define range for plotting\n", - "rangevals = {\n", - " \"Offset\": [range_offset[0:2], range_offset[2:4]],\n", - " \"Noise\": [range_noise[0:2], range_noise[2:4]],\n", - " \"Gain\": [range_gain[0:2], range_gain[2:4]],\n", - " \"Noise-e\": [range_noise_e[0:2], range_noise_e[2:4]],\n", - " \"SlopesCI\": [range_slopesCI[0:2], range_slopesCI[2:4]],\n", - " \"SlopesFF\": [range_slopesFF[0:2], range_slopesFF[2:4]]\n", - "}\n", - "\n", - "keys = {\n", - " 'Mean': ['data', '', 'Mean over pixels'],\n", - " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", - " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],\n", - " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", - " 'stdASIC': ['', '', '$\\sigma$ over ASICs'],\n", - " 'stdCell': ['', '', '$\\sigma$ over Cells'],\n", - "}\n", - "\n", - "gain_name = ['High', 'Medium', 'Low']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "print('Plot calibration constants')\n", - "\n", - "# loop over constat type\n", - "for const, mods in ret_constants.items():\n", - "\n", - " # Loop over gain\n", - " for gain in range(2):\n", - " print('Const: {}, gain {}'.format(const, gain))\n", - "\n", - " if const in [\"Gain\", \"Noise-e\"] and gain == 1:\n", - " continue\n", - " else:\n", - " pass\n", - "\n", - " # Loop over modules\n", - " for mod, data in mods.items():\n", - " \n", - " if mod not in modules:\n", - " continue\n", - "\n", - " print(mod)\n", - "\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", - "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " ctimes_ticks[i] = ctimes_ticks[i] + \\\n", - " ', V={:1.0f}'.format(cmdata[i]['Sensor Bias Voltage']) + \\\n", - " ', M={:1.0f}'.format(\n", - " cmdata[i]['Memory cells'])\n", - "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", - "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", - "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nMemToShow * nPixels\n", - "\n", - " # Select gain\n", - " if const not in [\"Gain\", \"Noise-e\"]:\n", - " for key in rdata:\n", - " rdata[key] = rdata[key][..., gain]\n", - "\n", - " # Avoid to low values\n", - " if const in [\"Noise\", \"Offset\", \"Noise-e\"]:\n", - " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", - " if 'MeanBP' in rdata:\n", - " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", - "\n", - " if 'NBP' in rdata:\n", - " rdata['NBP'] = rdata['NBP'].astype(float)\n", - " rdata[\"NBP\"][rdata[\"NBP\"] == (spShape[0] * spShape[1])] = np.nan\n", - " rdata[\"NBP\"] = rdata[\"NBP\"] / (spShape[0] * spShape[1]) * 100\n", - "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(\n", - " nTimes, nBins).swapaxes(0, 1)\n", - "\n", - " # Summary over ASICs\n", - " adata = {}\n", - " for key in rdata:\n", - " adata[key] = np.nanmean(rdata[key], axis=(1, 2)).swapaxes(0, 1)\n", - "\n", - " # Plotting\n", - " for key in pdata:\n", - " vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n", - " if const in rangevals and key in ['Mean', 'MeanBP']:\n", - " vmin = rangevals[const][gain][0]\n", - " vmax = rangevals[const][gain][1]\n", - "\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - " if const == 'Noise-e':\n", - " unit = '[$e^-$]'\n", - "\n", - " title = '{}, module {}, {} gain, {}'.format(\n", - " const, mod, gain_name[gain], keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const, keys[key][2], unit)\n", - "\n", - " hm_combine(pdata[key][::-1], htype=HMType.INSET_AXIS,\n", - " x_label='Creation Time', y_label='ASIC ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " vmin=vmin, vmax=vmax,\n", - " fname='{}/{}_{}_g{}_ASIC_{}.png'.format(\n", - " out_folder, const, mod, gain, key),\n", - " y_ticks=np.arange(nBins, step=nMemToShow)+16,\n", - " y_ticklabels=np.arange(nPixels)[::-1]+1,\n", - " pad=[0.125, 0.125, 0.12, 0.185])\n", - "\n", - " hm_combine(adata[key],\n", - " x_label='Creation Time', y_label='Memory cell ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " fname='{}/{}_{}_g{}_MEM_{}.png'.format(\n", - " out_folder, const, mod, gain, key),\n", - " vmin=vmin, vmax=vmax)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb b/notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb deleted file mode 100644 index e6163d8d494d6a60b374dbb2b6b7ea9e35a2ec01..0000000000000000000000000000000000000000 --- a/notebooks/ePix/PlotFromCalDB_ePix100_NBC.ipynb +++ /dev/null @@ -1,481 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Statistical analysis of calibration factors#\n", - "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1\n", - "\n", - "Calibration constants for ePix100 detector from the data base with injection time between start_date and end_date are considered.\n", - "\n", - "To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant.\n", - "\n", - "Values shown in plots are saved in h5 files." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "start_date = \"2019-01-30\" # date to start investigation interval from\n", - "end_date = \"2019-05-01\" # date to end investigation interval at, can be \"now\"\n", - "nconstants = 10 # Number of time stamps to plot. If not 0, overcome start_date.\n", - "dclass=\"ePix100\" # Detector class\n", - "db_module = \"ePix100_M15\" # detector entry in the DB to investigate\n", - "constants = [\"Noise\", \"Offset\"] # constants to plot\n", - "bias_voltage = [200] # Bias voltage\n", - "temperature = [288] # Operation temperature\n", - "integration_time = [1, 50] # Integration time\n", - "in_vacuum = [0] # 0 if detector is operated in room pressure\n", - "parameter_names = ['bias_voltage', 'integration_time', 'temperature', 'in_vacuum'] # names of parameters\n", - "photon_energy = 9.2 # Photon energy of the beam\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_ePix/\" # output folder\n", - "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", - "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # the database interface to use\n", - "cal_db_timeout = 180000 # timeout on caldb requests\",\n", - "range_offset = [1000., 2200] # plotting range for offset\n", - "range_noise = [1.5, 3.3] # plotting range for noise\n", - "plot_range = 3 # range for plotting in units of median absolute deviations" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import copy\n", - "import datetime\n", - "import dateutil.parser\n", - "import numpy as np\n", - "import os\n", - "import sys\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", - "from cal_tools.tools import get_from_db, get_random_db_interface\n", - "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", - " HMType, hm_combine, \n", - " combine_lists, get_range)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare variables\n", - "spShape = (354, 96) # Shape of superpixel\n", - "\n", - "parameters = [globals()[x] for x in parameter_names]\n", - "\n", - "constantsDark = {'Noise_': 'BadPixelsDark',\n", - " 'Offset_': 'BadPixelsDark'}\n", - "print('Bad pixels data: ', constantsDark)\n", - "\n", - "# Define parameters in order to perform loop over time stamps\n", - "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " start_date)\n", - "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n", - " end_date)\n", - "\n", - "# Create output folder\n", - "os.makedirs(out_folder, exist_ok=True)\n", - "\n", - "# Get getector conditions\n", - "det = getattr(Detectors, db_module)\n", - "dconstants = getattr(Constants, dclass)\n", - "\n", - "print('CalDB Interface: {}'.format(cal_db_interface))\n", - "print('Start time at: ', start)\n", - "print('End time at: ', end)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "parameter_list = combine_lists(*parameters, names = parameter_names)\n", - "print(parameter_list)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Retrieve list of meta-data\n", - "constant_versions = []\n", - "constant_parameters = []\n", - "constantBP_versions = []\n", - "\n", - "# Loop over constants\n", - "for c, const in enumerate(constants):\n", - " \n", - " if use_existing != \"\":\n", - " break\n", - " \n", - " # Loop over parameters\n", - " for pars in parameter_list:\n", - " \n", - " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", - " dcond = Conditions.Dark\n", - " mcond = getattr(dcond, dclass)(**pars)\n", - " else:\n", - " dcond = Conditions.Illuminated\n", - " mcond = getattr(dcond, dclass)(**pars,\n", - " photon_energy=photon_energy)\n", - "\n", - " \n", - " \n", - " print('Request: ', const, 'with paramters:', pars)\n", - " # Request Constant versions for given parameters and module\n", - " data = get_from_db(det,\n", - " getattr(dconstants,\n", - " const)(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - " \n", - " if not isinstance(data, list):\n", - " continue\n", - " \n", - " if const in constantsDark:\n", - " # Request BP constant versions\n", - " print('constantDark:', constantsDark[const], ) \n", - " dataBP = get_from_db(det,\n", - " getattr(dconstants, \n", - " constantsDark[const])(),\n", - " copy.deepcopy(mcond), None,\n", - " cal_db_interface,\n", - " creation_time=start,\n", - " verbosity=0,\n", - " timeout=cal_db_timeout,\n", - " meta_only=True,\n", - " version_info=True)\n", - " \n", - " if not isinstance(data, list) or not isinstance(dataBP, list):\n", - " continue\n", - " \n", - " found_BPmatch = False\n", - " for d in data:\n", - " # Match proper BP constant version\n", - " # and get constant version within\n", - " # requested time range\n", - " if d is None:\n", - " print('Time or data is not found!')\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - "\n", - " if (dt.replace(tzinfo=None) > end or \n", - " (nconstants==0 and dt.replace(tzinfo=None) < start)):\n", - " continue\n", - "\n", - " closest_BP = None\n", - " closest_BPtime = None\n", - "\n", - " for dBP in dataBP:\n", - " if dBP is None:\n", - " print(\"Bad pixels are not found!\")\n", - " continue\n", - "\n", - " dt = dateutil.parser.parse(d['begin_at'])\n", - " dBPt = dateutil.parser.parse(dBP['begin_at'])\n", - "\n", - " if dt == dBPt:\n", - " found_BPmatch = True\n", - " else:\n", - "\n", - " if np.abs(dBPt-dt).seconds < (max_time*60):\n", - " if closest_BP is None:\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - " else:\n", - " if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n", - " closest_BP = dBP\n", - " closest_BPtime = dBPt\n", - "\n", - " if dataBP.index(dBP) == len(dataBP)-1:\n", - " if closest_BP:\n", - " dBP = closest_BP\n", - " dBPt = closest_BPtime\n", - " found_BPmatch = True\n", - " else:\n", - " print('Bad pixels are not found!')\n", - "\n", - " if found_BPmatch:\n", - " print(\"Found constant {}: begin at {}\".format(const, dt))\n", - " print(\"Found bad pixels at {}\".format(dBPt))\n", - " constantBP_versions.append(dBP)\n", - " constant_versions.append(d)\n", - " constant_parameters.append(copy.deepcopy(pars))\n", - " found_BPmatch = False\n", - " break\n", - " else:\n", - " constant_versions += data\n", - " constant_parameters += [copy.deepcopy(pars)]*len(data)\n", - "\n", - "# Remove dublications\n", - "constant_versions_tmp = []\n", - "constant_parameters_tmp = []\n", - "constantBP_versions_tmp = []\n", - "for i, x in enumerate(constant_versions):\n", - " if x not in constant_versions_tmp:\n", - " constant_versions_tmp.append(x)\n", - " constant_parameters_tmp.append(constant_parameters[i])\n", - " if i<len(constantBP_versions)-1:\n", - " constantBP_versions_tmp.append(constantBP_versions[i])\n", - "constant_versions=constant_versions_tmp\n", - "constantBP_versions=constantBP_versions_tmp\n", - "constant_parameters=constant_parameters_tmp\n", - "\n", - "print('Number of stored constant versions is {}'.format(len(constant_versions)))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def get_rebined(a, rebin):\n", - " return a[:,:,0].reshape(\n", - " int(a.shape[0] / rebin[0]),\n", - " rebin[0],\n", - " int(a.shape[1] / rebin[1]),\n", - " rebin[1])\n", - " \n", - "def modify_const(const, data, isBP = False):\n", - " return data\n", - "\n", - "ret_constants = {}\n", - "constand_data = ConstantMetaData()\n", - "constant_BP = ConstantMetaData()\n", - "\n", - "# sort over begin_at\n", - "idxs, _ = zip(*sorted(enumerate(constant_versions), \n", - " key=lambda x: x[1]['begin_at'], reverse=True))\n", - "\n", - "for i in idxs:\n", - " const = constant_versions[i]['data_set_name'].split('/')[-2]\n", - " qm = db_module\n", - " \n", - " if not const in ret_constants:\n", - " ret_constants[const] = {}\n", - " if not qm in ret_constants[const]:\n", - " ret_constants[const][qm] = []\n", - " \n", - " if nconstants>0 and len(ret_constants[const][qm])>=nconstants:\n", - " continue\n", - " \n", - " print(\"constant: {}, module {}\".format(const,qm))\n", - " constand_data.retrieve_from_version_info(constant_versions[i])\n", - " \n", - " cdata = constand_data.calibration_constant.data\n", - " ctime = constand_data.calibration_constant_version.begin_at\n", - " \n", - " cdata = modify_const(const, cdata)\n", - " \n", - " # Create superpixels for constants without BP applied\n", - " cdata = get_rebined(cdata, spShape)\n", - " toStoreStd = np.nanstd(cdata, axis=(1, 3))\n", - " toStore = np.nanmean(cdata, axis=(1, 3))\n", - " \n", - " # Convert parameters to dict\n", - " dpar = {p.name: p.value for p in constand_data.detector_condition.parameters}\n", - " \n", - " print(\"Store values in dict\", const, qm, ctime)\n", - " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': 0,\n", - " 'dataBP': 0,\n", - " 'dataBPStd': 0,\n", - " 'data': toStore,\n", - " 'dataStd': toStoreStd,\n", - " 'mdata': dpar}) \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, db_module))\n", - " save_dict_to_hdf5(ret_constants,\n", - " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "if use_existing == \"\":\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", - "else:\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", - "\n", - "print('Load data from {}'.format(fpath))\n", - "ret_constants = load_data_from_hdf5(fpath)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Parameters for plotting\n", - "\n", - "# Define range for plotting\n", - "rangevals = {\n", - " \"OffsetEPix100\": [range_offset[0:2], range_offset[2:4]],\n", - " \"NoiseEPix100\": [range_noise[0:2], range_noise[2:4]],\n", - "}\n", - "\n", - "keys = {\n", - " 'Mean': ['data', '', 'Mean over pixels'],\n", - " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", - " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],\n", - " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", - " 'stdASIC': ['', '', '$\\sigma$ over ASICs'],\n", - " 'stdCell': ['', '', '$\\sigma$ over Cells'],\n", - "}\n", - "\n", - "gain_name = ['High', 'Medium', 'Low']" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "print('Plot calibration constants')\n", - "\n", - "# loop over constat type\n", - "for const, modules in ret_constants.items():\n", - "\n", - " print('Const: {}'.format(const))\n", - "\n", - " # Loop over modules\n", - " for mod, data in modules.items():\n", - " print(mod)\n", - "\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", - "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " ctimes_ticks[i] = ctimes_ticks[i] + \\\n", - " ', V={:1.0f}'.format(cmdata[i]['Sensor Temperature']) + \\\n", - " ', T={:1.0f}'.format(\n", - " cmdata[i]['Integration Time'])\n", - "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", - "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", - "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nPixels\n", - "\n", - " # Avoid to low values\n", - " if const in [\"Noise\", \"Offset\", \"Noise-e\"]:\n", - " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", - " if 'MeanBP' in rdata:\n", - " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", - "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " if key not in ['Mean', 'std']:\n", - " continue\n", - " pdata[key] = rdata[key][:, :, :].reshape(nTimes, nBins).swapaxes(0, 1)\n", - "\n", - " # Plotting\n", - " for key in pdata:\n", - " \n", - " if key not in ['Mean', 'std']:\n", - " continue\n", - " \n", - " vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n", - " if const in rangevals and key in ['Mean', 'MeanBP']:\n", - " vmin = rangevals[const][0][0]\n", - " vmax = rangevals[const][0][1]\n", - "\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - " if const == 'Noise-e':\n", - " unit = '[$e^-$]'\n", - "\n", - " title = '{}, module {}, {}'.format(\n", - " const, mod, keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const, keys[key][2], unit)\n", - "\n", - " hm_combine(pdata[key][::-1], htype=HMType.mro,\n", - " x_label='Creation Time', y_label='ASIC ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " vmin=vmin, vmax=vmax,\n", - " fname='{}/{}_{}_g{}_ASIC_{}.png'.format(\n", - " out_folder, const, mod.replace('_', ''), 0, key),\n", - " pad=[0.125, 0.125, 0.12, 0.185])\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -}