From 1058d075d1a967af6473a812a1533f92bb9b93a0 Mon Sep 17 00:00:00 2001 From: karnem <mikhail.karnevskiy@desy.de> Date: Wed, 17 Jul 2019 13:56:12 +0200 Subject: [PATCH] Refactoring --- notebooks/LPD/LPDChar_Darks_NBC.ipynb | 457 ++++++++---------- notebooks/LPD/LPDChar_Darks_Summary_NBC.ipynb | 90 +--- 2 files changed, 241 insertions(+), 306 deletions(-) diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb index 00fcd83e1..960adc960 100644 --- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb +++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb @@ -6,47 +6,47 @@ "source": [ "# Offset, Noise and Dead Pixels Characterization #\n", "\n", - "Author: S. Hauf\n", + "Author: M. Karnevskiy, S. Hauf\n", "\n", - "This notebook performs recharacterize of dark images to get offset, noise and bad-pixel maps. All 3 type of constants are evaluated per pixel and per memory cell.\n", + "This notebook performs re-characterize of dark images to derive offset, noise and bad-pixel maps. All three types of constants are evaluated per-pixel and per-memory cell.\n", "\n", - "Notebook will correctly handle veto settings, but note that if you veto cells you will not be able to use these offsets for runs with different veto settings - vetoed cells will have zero offset.\n", + "The notebook will correctly handle veto settings, but note that if you veto cells you will not be able to use these offsets for runs with different veto settings - vetoed cells will have zero offset.\n", "\n", - "Evaluated calibration constants are stored locally and injected in the calibration data base.\n", + "The evaluated calibration constants are stored locally and injected in the calibration data base.\n", "\n", - "**Offset** ($O$) is defined as median ($M$) of dark signal ($Ds$) over trains ($t$) for a given pixel ($x,y$) and memory cell ($c$). \n", + "**The offset** ($O$) is defined as the median ($M$) of the dark signal ($Ds$) over trains ($t$) for a given pixel ($x,y$) and memory cell ($c$). \n", "\n", - "**Noise** $N$ is a standard deviation $\\delta$ of dark signal.\n", + "**The noise** $N$ is the standard deviation $\\sigma$ of the dark signal.\n", "\n", - "$$ O_{x,y,c} = M(Ds)_{t} ,\\,\\,\\,\\,\\,\\, N_{x,y,c} = \\delta(Ds)_{t}$$\n", + "$$ O_{x,y,c} = M(Ds)_{t} ,\\,\\,\\,\\,\\,\\, N_{x,y,c} = \\sigma(Ds)_{t}$$\n", "\n", - "**Bad-pixel** mask is evaluated bitweise.\n", + "**The bad pixel** mask is encoded as a bit mask.\n", "\n", - "**Bit: \"OFFSET_OUT_OF_THRESHOLD\":**\n", + "**\"OFFSET_OUT_OF_THRESHOLD\":**\n", "\n", "Offset outside of bounds:\n", "\n", - "$$M(O)_{x,y} - \\delta(O)_{x,y} * thresholds\\_offset\\_sigma < O < M(O)_{x,y} + \\delta(O)_{x,y} * thresholds\\_offset\\_sigma $$\n", + "$$M(O)_{x,y} - \\sigma(O)_{x,y} * \\mathrm{thresholds\\_offset\\_sigma} < O < M(O)_{x,y} + \\sigma(O)_{x,y} * \\mathrm{thresholds\\_offset\\_sigma} $$\n", "\n", - "Or offset outside of hard limits\n", + "or offset outside of hard limits\n", "\n", - "$$thresholds\\_offset\\_hard\\_low < O < thresholds\\_offset\\_hard\\_high $$\n", + "$$ \\mathrm{thresholds\\_offset\\_hard}_\\mathrm{low} < O < \\mathrm{thresholds\\_offset\\_hard}_\\mathrm{high} $$\n", "\n", - "**Bit: \"NOISE_OUT_OF_THRESHOLD\":**\n", + "**\"NOISE_OUT_OF_THRESHOLD\":**\n", "\n", "Noise outside of bounds:\n", "\n", - "$$M(N)_{x,y} - \\delta(N)_{x,y} * thresholds\\_noise\\_sigma < N < M(N)_{x,y} + \\delta(N)_{x,y} * thresholds\\_noise\\_sigma $$\n", + "$$M(N)_{x,y} - \\sigma(N)_{x,y} * \\mathrm{thresholds\\_noise\\_sigma} < N < M(N)_{x,y} + \\sigma(N)_{x,y} * \\mathrm{thresholds\\_noise\\_sigma} $$\n", "\n", - "Or noise outside of hard limits\n", + "or noise outside of hard limits\n", "\n", - "$$thresholds\\_noise\\_hard\\_low < N < thresholds\\_noise\\_hard\\_high $$\n", + "$$\\mathrm{thresholds\\_noise\\_hard}_\\mathrm{low} < N < \\mathrm{thresholds\\_noise\\_hard}_\\mathrm{high} $$\n", "\n", - "**Bit: \"OFFSET_NOISE_EVAL_ERROR\":**\n", + "**\"OFFSET_NOISE_EVAL_ERROR\":**\n", "\n", "Offset and Noise both not $nan$ values \n", "\n", - "Values: $thresholds\\_offset\\_sigma$, $thresholds\\_offset\\_hard$, $thresholds\\_noise\\_sigma$, $thresholds\\_noise\\_hard$ are given as parameters." + "Values: $\\mathrm{thresholds\\_offset\\_sigma}$, $\\mathrm{thresholds\\_offset\\_hard}$, $\\mathrm{thresholds\\_noise\\_sigma}$, $\\mathrm{thresholds\\_noise\\_hard}$ are given as parameters." ] }, { @@ -61,7 +61,7 @@ "in_folder = \"/gpfs/exfel/exp/FXE/201930/p900063/raw\" # path to input data, required\n", "out_folder = \"/gpfs/exfel/data/scratch/karnem/LPD/\" # path to output to, required\n", "sequences = [0] # sequence files to evaluate\n", - "modules = [7] # list of modules to evaluate, RANGE ALLOWED\n", + "modules = [-1] # list of modules to evaluate, RANGE ALLOWED\n", "\n", "capacitor_setting = 5 # capacitor_setting for which data was taken, required\n", "run_high = 358 # run number in which high gain data was recorded, required\n", @@ -74,9 +74,9 @@ "bias_voltage = 250 # detector bias voltage\n", "cal_db_interface = \"tcp://max-exfl016:8017\" # the database interface to use\n", "\n", - "thresholds_offset_sigma = 3. # bad pixel offset-sigma threshold\n", + "thresholds_offset_sigma = 3. # bad pixel relative threshold in terms of n sigma offset\n", "thresholds_offset_hard = [400, 1500] # bad pixel hard threshold\n", - "thresholds_noise_sigma = 7. # bad pixel noise-sigma threshold\n", + "thresholds_noise_sigma = 7. # bad pixel relative threshold in terms of n sigma noise\n", "thresholds_noise_hard = [1, 35] # bad pixel hard threshold\n", "skip_first_ntrains = 10 # Number of first trains to skip\n", "use_dir_creation_date = False # use the creation date of the directory for database time derivation\n", @@ -93,30 +93,49 @@ }, "outputs": [], "source": [ - "import warnings\n", - "warnings.filterwarnings('ignore')\n", "from collections import OrderedDict\n", + "import copy\n", "from datetime import datetime\n", + "from functools import partial\n", "import os\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "import dateutil.parser\n", "import h5py\n", + "from ipyparallel import Client\n", + "from IPython.display import display, Markdown, Latex\n", "import numpy as np\n", "import matplotlib\n", "matplotlib.use(\"agg\")\n", + "import matplotlib.patches as patches\n", "import matplotlib.pyplot as plt\n", "%matplotlib inline\n", - "#%config InlineBackend.figure_format = 'svg'\n", - "\n", - "from ipyparallel import Client\n", - "\n", - "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", - "from cal_tools.tools import (gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, get_dir_creation_date, get_from_db)\n", + "import tabulate\n", + "from XFELDetAna.plotting.heatmap import heatmapPlot\n", + "from XFELDetAna.plotting.simpleplot import simplePlot\n", + "\n", + "from iCalibrationDB import (ConstantMetaData, Constants, \n", + " Conditions, Detectors, \n", + " Versions)\n", + "from cal_tools.tools import (gain_map_files, parse_runs, \n", + " run_prop_seq_from_path, \n", + " get_notebook_name, \n", + " get_dir_creation_date, get_from_db)\n", "from cal_tools.influx import InfluxLogger\n", "from cal_tools.enums import BadPixels\n", - "from cal_tools.plotting import show_overview, plot_badpix_3d, create_constant_overview\n", - "\n", - "from XFELDetAna import xfelpyanatools as xana\n", - "from IPython.display import display, Markdown, Latex\n", - "\n", + "from cal_tools.plotting import (show_overview, plot_badpix_3d, \n", + " create_constant_overview)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ "client = Client(profile=cluster_profile)\n", "view = client[:]\n", "view.use_dill()\n", @@ -128,7 +147,6 @@ "if modules[0] == -1:\n", " modules = list(range(16))\n", "\n", - "from collections import OrderedDict\n", "gain_runs = OrderedDict()\n", "if capacitor_setting == 5:\n", " gain_runs[\"high_5pf\"] = \"r{:04d}\".format(run_high)\n", @@ -166,74 +184,6 @@ "print(\"Bias voltage: {}V\".format(bias_voltage))" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import matplotlib.patches as patches\n", - "\n", - "qm = \"Q{}M{}\".format(modules[0]//4+1, modules[0]%4+1)\n", - "display(Markdown('## Position of the module {}, it tiles and ASICs of tile ##'.format(qm)))\n", - "\n", - "fig, ax = plt.subplots(1,figsize=(10,10))\n", - "ax.set_axis_off()\n", - "\n", - "ax.set_xlim(0, 97)\n", - "ax.set_ylim(0, 97)\n", - "\n", - "q_poses = np.array([[51,47], [47,1], [1,5], [5,51]])\n", - "m_poses= np.array([[22.5,20.5], [22.5,0.5], [0.5,0.5], [0.5,20.5]])\n", - "\n", - "for iq, q_pos in enumerate(q_poses):\n", - " ax.add_patch(patches.Rectangle(q_pos,45,45,linewidth=2,edgecolor='r',\n", - " facecolor='y', fill=True))\n", - "\n", - " ax.text(q_pos[0]+20, q_pos[1]+41.5, 'Q{}'.format(iq+1), fontsize=22)\n", - " for im, m_pos in enumerate(m_poses):\n", - " ax.add_patch(patches.Rectangle(q_pos+m_pos,22,20,linewidth=3,edgecolor='r',\n", - " facecolor='g', fill=True))\n", - "\n", - " if iq*4+im==modules[0]:\n", - " for a_posx in range(2):\n", - " for a_posy in range(8):\n", - " a_pos = np.array([a_posx*11., a_posy*20/8.])\n", - " pos = q_pos+m_pos+a_pos\n", - " \n", - " ax.add_patch(patches.Rectangle(q_pos+m_pos+a_pos,11,20/8.,linewidth=1,edgecolor='black',\n", - " facecolor='r', fill=True))\n", - " \n", - " if a_posx == 0:\n", - " label = str(a_posy+9)\n", - " else:\n", - " label = str(-a_posy+(a_posx*8))\n", - " \n", - " ax.text(pos[0]+4, pos[1]+0.3, label, fontsize=14)\n", - " else:\n", - " #pass\n", - " #print(q_pos[0]+4, m_pos[0]+0.3, 'Q{}M{}'.format(iq+1, im+1))\n", - " pos = q_pos+m_pos+np.array([5,8])\n", - " ax.text(pos[0], pos[1], 'Q{}M{}'.format(iq+1, im+1), fontsize=22, color='y')\n", - " \n", - "ax.add_patch(patches.Rectangle([65,93],30, 4,linewidth=1,edgecolor='black',\n", - " facecolor='r', fill=True))\n", - "\n", - "ax.text(52, 94, 'ASICs:', fontsize=22, color='black')\n", - "\n", - "for i_pos in range(8):\n", - " \n", - " pos = np.array([65,93]) + np.array([i_pos*30/8.+0.3, 0.3])\n", - " \n", - " ax.add_patch(patches.Rectangle(pos,24/8., 3.4,linewidth=1,edgecolor='black',\n", - " facecolor='deepskyblue', fill=True))\n", - " \n", - " \n", - " ax.text(pos[0]+0.5, pos[1]+0.5, '{}'.format(i_pos + 1), fontsize=18, color='black')" - ] - }, { "cell_type": "code", "execution_count": null, @@ -247,9 +197,7 @@ " os.makedirs(out_folder)\n", "\n", "gmf = gain_map_files(in_folder, gain_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD)\n", - "gain_mapped_files, total_sequences, total_file_size = gmf\n", - "\n", - "#print(\"Will process at total of {} sequences: {:0.2f} GB of data.\".format(total_sequences, total_file_size))" + "gain_mapped_files, total_sequences, total_file_size = gmf" ] }, { @@ -268,10 +216,6 @@ "outputs": [], "source": [ "# the actual characterization - to not eded this without consultation\n", - "import copy\n", - "from functools import partial\n", - "\n", - "\n", "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, inp):\n", " import numpy as np\n", " import copy\n", @@ -425,14 +369,18 @@ "if local_output:\n", " for cap in capacitor_settings:\n", " runs = [v for k, v in gain_runs.items() if cap in k]\n", - " ofile = \"{}/lpd_offset_store_{}_{}_{}.h5\".format(out_folder, \"_\".join(runs), cap, \"_\".join([str(m) for m in modules]))\n", + " ofile = \"{}/lpd_offset_store_{}_{}_{}.h5\".format(out_folder, \n", + " \"_\".join(runs), \n", + " cap, \n", + " \"_\".join([str(m) for m in modules]))\n", " store_file = h5py.File(ofile, \"w\")\n", " for qm in offset_g[cap].keys():\n", " store_file[\"{}/Offset/0/data\".format(qm)] = offset_g[cap][qm]\n", " store_file[\"{}/Noise/0/data\".format(qm)] = noise_g[cap][qm]\n", - " store_file[\"{}/BadPixelsDark/0/data\".format(qm)] = badpix_g[cap][qm]\n", + " store_file[\"{}/BadPixelsDark/0/data\".format(qm)\n", + " ] = badpix_g[cap][qm]\n", " store_file.close()\n", - " print('Constants are stored to {}'.format(ofile))\n" + " print('Constants are stored to {}'.format(ofile))" ] }, { @@ -447,7 +395,6 @@ "clist = [\"Offset\", \"Noise\", \"BadPixelsDark\"]\n", "old_const = {}\n", "old_mdata = {}\n", - "import dateutil.parser\n", "creation_time = dateutil.parser.parse(\"2019-02-14\")\n", "print('Retrieve pre-existing constants for comparison.')\n", "for cap in capacitor_settings:\n", @@ -458,19 +405,39 @@ " capacitor=cap)\n", "\n", " data, mdata = get_from_db(getattr(Detectors.LPD1M1, qm),\n", - " getattr(Constants.LPD, const)(),\n", - " condition,\n", - " None,\n", - " cal_db_interface, creation_time=creation_time,\n", - " verbosity=2, timeout=30000)\n", - " \n", + " getattr(Constants.LPD, const)(),\n", + " condition,\n", + " None,\n", + " cal_db_interface, creation_time=creation_time,\n", + " verbosity=2, timeout=30000)\n", + "\n", " old_const[const] = data\n", - " \n", + "\n", " if mdata is not None and data is not None:\n", " time = mdata.calibration_constant_version.begin_at\n", " old_mdata[const] = time.isoformat()\n", " else:\n", - " old_mdata[const] = \"Not found\"\n" + " old_mdata[const] = \"Not found\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "res = OrderedDict()\n", + "for cap in capacitor_settings:\n", + " res[cap] = OrderedDict()\n", + " for i in modules:\n", + " qm = \"Q{}M{}\".format(i//4+1, i % 4+1)\n", + "\n", + " res[cap][qm] = {'Offset': offset_g[cap][qm],\n", + " 'Noise': noise_g[cap][qm],\n", + " 'BadPixelsDark': badpix_g[cap][qm]\n", + " }" ] }, { @@ -484,67 +451,33 @@ "# Save constants in the calibration DB\n", "if db_output:\n", " for cap in capacitor_settings:\n", - " for qm in offset_g[cap].keys():\n", - " metadata = ConstantMetaData()\n", - " offset = Constants.LPD.Offset()\n", - " offset.data = offset_g[cap][qm]\n", - " metadata.calibration_constant = offset\n", - "\n", - " # set the operating condition\n", - " condition = Conditions.Dark.LPD(memory_cells=max_cells, bias_voltage=bias_voltage,\n", - " capacitor=cap)\n", - " device = getattr(Detectors.LPD1M1, qm)\n", - " if device:\n", - "\n", - " metadata.detector_condition = condition\n", + " for qm in res[cap]:\n", + " for const in res[cap][qm]:\n", "\n", - " # specify the a version for this constant\n", - " if creation_time is None:\n", - " metadata.calibration_constant_version = Versions.Now(device=device)\n", - " else:\n", - " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", - " start=creation_time)\n", - " # metadata.send(cal_db_interface)\n", - " \n", - " metadata = ConstantMetaData()\n", - " noise = Constants.LPD.Noise()\n", - " noise.data = noise_g[cap][qm]\n", - " metadata.calibration_constant = noise\n", + " metadata = ConstantMetaData()\n", + " dconst = getattr(Constants.LPD, const)()\n", + " dconst.data = res[cap][qm][const]\n", + " metadata.calibration_constant = dconst\n", "\n", - " # set the operating condition\n", - " condition = Conditions.Dark.LPD(memory_cells=max_cells, bias_voltage=bias_voltage,\n", - " capacitor=cap)\n", - " device = getattr(Detectors.LPD1M1, qm)\n", - " \n", - " if device:\n", - "\n", - " metadata.detector_condition = condition\n", - " \n", - " # specify the a version for this constant\n", - " if creation_time is None:\n", - " metadata.calibration_constant_version = Versions.Now(device=device)\n", - " else:\n", - " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", - " start=creation_time)\n", - " # metadata.send(cal_db_interface)\n", - " \n", - " metadata = ConstantMetaData()\n", - " badpixels = Constants.LPD.BadPixelsDark()\n", - " badpixels.data = badpix_g[cap][qm]\n", - " metadata.calibration_constant = badpixels\n", - "\n", - " # set the operating condition\n", - " condition = Conditions.Dark.LPD(memory_cells=max_cells, bias_voltage=bias_voltage,\n", - " capacitor=cap)\n", - " device = getattr(Detectors.LPD1M1, qm)\n", - " if device:\n", + " # set the operating condition\n", + " condition = Conditions.Dark.LPD(memory_cells=max_cells,\n", + " bias_voltage=bias_voltage,\n", + " capacitor=cap)\n", + " device = getattr(Detectors.LPD1M1, qm)\n", + " if device:\n", "\n", - " metadata.detector_condition = condition\n", + " metadata.detector_condition = condition\n", "\n", - " # specify the a version for this constant\n", - " metadata.calibration_constant_version = Versions.Now(device=device)\n", - " # metadata.send(cal_db_interface)\n", - " print(\"Injected to the calibration DB. Begin at: {}\".format(metadata.calibration_constant_version.begin_at))" + " # specify the a version for this constant\n", + " if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + " else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", + " start=creation_time)\n", + " # metadata.send(cal_db_interface)\n", + " msg = 'Const {} for module {} was injected to the calibration DB. Begin at: {}'\n", + " print(msg.format(const, qm,\n", + " metadata.calibration_constant_version.begin_at))" ] }, { @@ -555,16 +488,61 @@ }, "outputs": [], "source": [ - "res = OrderedDict()\n", - "for cap in capacitor_settings:\n", - " res[cap] = OrderedDict()\n", - " for i in modules:\n", - " qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n", + "qm = \"Q{}M{}\".format(modules[0]//4+1, modules[0] % 4+1)\n", + "display(Markdown('## Position of the module {}, it tiles and ASICs of tile ##'.format(qm)))\n", "\n", - " res[cap][qm] = {'Offset': offset_g[cap][qm],\n", - " 'Noise': noise_g[cap][qm],\n", - " 'BadPixelsDark': badpix_g[cap][qm] \n", - " }\n" + "fig, ax = plt.subplots(1, figsize=(10, 10))\n", + "ax.set_axis_off()\n", + "\n", + "ax.set_xlim(0, 97)\n", + "ax.set_ylim(0, 97)\n", + "\n", + "q_poses = np.array([[51, 47], [47, 1], [1, 5], [5, 51]])\n", + "m_poses = np.array([[22.5, 20.5], [22.5, 0.5], [0.5, 0.5], [0.5, 20.5]])\n", + "\n", + "for iq, q_pos in enumerate(q_poses):\n", + " ax.add_patch(patches.Rectangle(q_pos, 45, 45, linewidth=2, edgecolor='r',\n", + " facecolor='y', fill=True))\n", + "\n", + " ax.text(q_pos[0]+20, q_pos[1]+41.5, 'Q{}'.format(iq+1), fontsize=22)\n", + " for im, m_pos in enumerate(m_poses):\n", + " ax.add_patch(patches.Rectangle(q_pos+m_pos, 22, 20, linewidth=3, edgecolor='r',\n", + " facecolor='g', fill=True))\n", + "\n", + " if iq*4+im == modules[0]:\n", + " for a_posx in range(2):\n", + " for a_posy in range(8):\n", + " a_pos = np.array([a_posx*11., a_posy*20/8.])\n", + " pos = q_pos+m_pos+a_pos\n", + "\n", + " ax.add_patch(patches.Rectangle(q_pos+m_pos+a_pos, 11, 20/8., \n", + " linewidth=1, edgecolor='black',\n", + " facecolor='r', fill=True))\n", + "\n", + " if a_posx == 0:\n", + " label = str(a_posy+9)\n", + " else:\n", + " label = str(-a_posy+(a_posx*8))\n", + "\n", + " ax.text(pos[0]+4, pos[1]+0.3, label, fontsize=14)\n", + " else:\n", + " pos = q_pos+m_pos+np.array([5, 8])\n", + " ax.text(pos[0], pos[1], 'Q{}M{}'.format(\n", + " iq+1, im+1), fontsize=22, color='y')\n", + "\n", + "ax.add_patch(patches.Rectangle([65, 93], 30, 4, linewidth=1, edgecolor='black',\n", + " facecolor='r', fill=True))\n", + "\n", + "ax.text(52, 94, 'ASICs:', fontsize=22, color='black')\n", + "\n", + "for i_pos in range(8):\n", + " pos = np.array([65, 93]) + np.array([i_pos*30/8.+0.3, 0.3])\n", + "\n", + " ax.add_patch(patches.Rectangle(pos, 24/8., 3.4, linewidth=1, edgecolor='black',\n", + " facecolor='deepskyblue', fill=True))\n", + "\n", + " ax.text(pos[0]+0.5, pos[1]+0.5, '{}'.format(i_pos + 1),\n", + " fontsize=18, color='black')" ] }, { @@ -584,74 +562,78 @@ }, "outputs": [], "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "#plt.rcParams.update({'font.size': 20})\n", - "fig, grid = plt.subplots(3,1,sharex=\"col\", sharey=\"row\",figsize=(10,7))\n", + "fig, grid = plt.subplots(3, 1, sharex=\"col\", sharey=\"row\", figsize=(10, 7))\n", "fig.subplots_adjust(wspace=0, hspace=0)\n", "\n", "for cap in capacitor_settings:\n", " for i in modules:\n", - " qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n", + " qm = \"Q{}M{}\".format(i//4+1, i % 4+1)\n", " if data_g[cap][qm].shape[0] == 0:\n", " break\n", " for gain in range(3):\n", - " data = data_g[cap][qm][:,gain]\n", + " data = data_g[cap][qm][:, gain]\n", " offset = np.nanmedian(data)\n", " noise = np.nanstd(data)\n", " xrange = [np.nanmin(data_g[cap][qm]), np.nanmax(data_g[cap][qm])]\n", - " nbins = int( xrange[1] - xrange[0])\n", - " \n", - " hn,cn = np.histogram(data, bins=nbins, range=xrange )\n", + " nbins = int(xrange[1] - xrange[0])\n", + "\n", + " hn, cn = np.histogram(data, bins=nbins, range=xrange)\n", "\n", " grid[gain].hist(data, range=xrange, bins=nbins)\n", - " grid[gain].plot([offset-noise,offset-noise], [0,np.nanmax(hn)], linewidth=1.5, color='red', \n", + " grid[gain].plot([offset-noise, offset-noise], [0, np.nanmax(hn)], \n", + " linewidth=1.5, color='red',\n", " label='1 $\\sigma$ deviation')\n", - " grid[gain].plot([offset+noise,offset+noise], [0,np.nanmax(hn)], linewidth=1.5, color='red')\n", - " grid[gain].plot([offset,offset], [0,0], linewidth=1.5, color='y', label='median')\n", - " \n", - " grid[gain].plot([np.nanmedian(offset_g[cap][qm][:,:,12,gain]),np.nanmedian(offset_g[cap][qm][:,:,12,gain])], \n", - " [0,np.nanmax(hn)], linewidth=1.5, color='green', label='average over pixels')\n", - " \n", + " grid[gain].plot([offset+noise, offset+noise],\n", + " [0, np.nanmax(hn)], linewidth=1.5, color='red')\n", + " grid[gain].plot([offset, offset], [0, 0],\n", + " linewidth=1.5, color='y', label='median')\n", + "\n", + " grid[gain].plot([np.nanmedian(offset_g[cap][qm][:, :, 12, gain]), \n", + " np.nanmedian(offset_g[cap][qm][:, :, 12, gain])],\n", + " [0, np.nanmax(hn)], linewidth=1.5, color='green', \n", + " label='average over pixels')\n", + "\n", " grid[gain].set_xlim(xrange)\n", " grid[gain].set_ylim(0, np.nanmax(hn)*1.1)\n", " grid[gain].set_xlabel(\"Offset value [ADU]\")\n", " grid[gain].set_ylabel(\"# of occurance\")\n", - " \n", + "\n", " if gain == 0:\n", - " leg = grid[gain].legend(loc='outside-top', ncol=3, bbox_to_anchor=(0.1, 0.25, 0.7, 1.0))\n", - " \n", - " \n", - " grid[gain].text(820, np.nanmax(hn)*0.4, \"{} gain\".format(gain_names[gain]), fontsize=20)\n", - " \n", + " leg = grid[gain].legend(\n", + " loc='outside-top', ncol=3, \n", + " bbox_to_anchor=(0.1, 0.25, 0.7, 1.0))\n", + "\n", + " grid[gain].text(820, np.nanmax(hn)*0.4,\n", + " \"{} gain\".format(gain_names[gain]), fontsize=20)\n", + "\n", " a = plt.axes([.125, .1, 0.775, .8], frame_on=False)\n", " a.patch.set_alpha(0.05)\n", " a.set_xlim(xrange)\n", - " plt.plot([offset,offset], [0,1], linewidth=1.5, color='y')\n", - " #plt.title('Probability')\n", + " plt.plot([offset, offset], [0, 1], linewidth=1.5, color='y')\n", " plt.xticks([])\n", " plt.yticks([])\n", - " \n", - " \n", + "\n", " ypos = 0.9\n", - " x1pos = (np.nanmedian(data_g[cap][qm][:,0])+np.nanmedian(data_g[cap][qm][:,2]))/2.\n", - " x2pos = (np.nanmedian(data_g[cap][qm][:,2])+np.nanmedian(data_g[cap][qm][:,1]))/2.-10\n", - " \n", - " plt.annotate(\"\", xy=(np.nanmedian(data_g[cap][qm][:,0]), ypos), xycoords='data',\n", - " xytext=(np.nanmedian(data_g[cap][qm][:,2]), ypos), textcoords='data',\n", - " arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n", - " \n", - " plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:,0])-np.nanmedian(data_g[cap][qm][:,2])), \n", + " x1pos = (np.nanmedian(data_g[cap][qm][:, 0]) +\n", + " np.nanmedian(data_g[cap][qm][:, 2]))/2.\n", + " x2pos = (np.nanmedian(data_g[cap][qm][:, 2]) +\n", + " np.nanmedian(data_g[cap][qm][:, 1]))/2.-10\n", + "\n", + " plt.annotate(\"\", xy=(np.nanmedian(data_g[cap][qm][:, 0]), ypos), xycoords='data',\n", + " xytext=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), textcoords='data',\n", + " arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n", + "\n", + " plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 0])-np.nanmedian(data_g[cap][qm][:, 2])),\n", " xy=(x1pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')\n", - " \n", - " plt.annotate(\"\", xy=(np.nanmedian(data_g[cap][qm][:,2]), ypos), xycoords='data',\n", - " xytext=(np.nanmedian(data_g[cap][qm][:,1]), ypos), textcoords='data',\n", - " arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n", - " \n", - " plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:,2])-np.nanmedian(data_g[cap][qm][:,1])), \n", + "\n", + " plt.annotate(\"\", xy=(np.nanmedian(data_g[cap][qm][:, 2]), ypos), xycoords='data',\n", + " xytext=(np.nanmedian(data_g[cap][qm][:, 1]), ypos), textcoords='data',\n", + " arrowprops=dict(arrowstyle=\"<->\", connectionstyle=\"arc3\"))\n", + "\n", + " plt.annotate('{}'.format(np.nanmedian(data_g[cap][qm][:, 2])-np.nanmedian(data_g[cap][qm][:, 1])),\n", " xy=(x2pos, ypos), xycoords='data', xytext=(5, 5), textcoords='offset points')\n", - " \n", - "plt.show()\n" + "\n", + "plt.show()" ] }, { @@ -671,8 +653,6 @@ }, "outputs": [], "source": [ - "from XFELDetAna.plotting.heatmap import *\n", - "\n", "# Loop over capacitor settings, modules, constants\n", "for cap in capacitor_settings:\n", " for i in modules:\n", @@ -712,7 +692,7 @@ " title='p-Value for cell 12, {} gain'.format(gain_names[gain]) )\n", " \n", " ax = fig.add_subplot(224)\n", - " _ = xana.simplePlot(d, #aspect=1.6, \n", + " _ = simplePlot(d, #aspect=1.6, \n", " x_label = \"p-Value\".format(gain), \n", " y_label=\"# of occurance\",\n", " use_axis=ax,\n", @@ -817,7 +797,7 @@ " ]\n", "\n", " ax = fig.add_subplot(121+iconst)\n", - " _ = xana.simplePlot(d, figsize=(5, 7), aspect=1,\n", + " _ = simplePlot(d, figsize=(5, 7), aspect=1,\n", " x_label=\"{} value [ADU]\".format(const),\n", " y_label=\"# of occurance\",\n", " title='', legend_pad=0.1, legend_size='10%',\n", @@ -910,9 +890,6 @@ }, "outputs": [], "source": [ - "from XFELDetAna.plotting.heatmap import *\n", - "#plt.rcParams.update({'font.size': 14})\n", - "\n", "# Loop over capacitor settings, modules, constants\n", "for cap in res:\n", " for qm in res[cap]:\n", @@ -1153,7 +1130,7 @@ " new_max = (local_max - data_min*(1-frac))/frac\n", " new_max = np.max([data_max, new_max])\n", " \n", - " _ = xana.simplePlot(d, figsize=(10,10), aspect=2, xrange=(-12, 510),\n", + " _ = simplePlot(d, figsize=(10,10), aspect=2, xrange=(-12, 510),\n", " x_label = 'Memory Cell ID', \n", " y_label=y_title, use_axis=ax,\n", " title=title,\n", @@ -1164,9 +1141,6 @@ " y_log=False, legend='outside-top-ncol2-frame', legend_size='18%',\n", " legend_pad=0.00)\n", " \n", - " #old_min, old_max = ax.get_ylim()\n", - "\n", - " \n", " plt.tight_layout(pad=1.08, h_pad=0.35)\n", " \n", " plt.show()" @@ -1241,7 +1215,6 @@ " \n", " table.append(line)\n", "\n", - "import tabulate\n", "display(Markdown('### Number of bad pixels ###'.format(qm)))\n", "md = display(Latex(tabulate.tabulate(table, tablefmt='latex', \n", " headers=[\"Pixel type\", \"Threshold\", \"New constant\", \"Old constant \"]))) " diff --git a/notebooks/LPD/LPDChar_Darks_Summary_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_Summary_NBC.ipynb index 975eeff63..136a8e324 100644 --- a/notebooks/LPD/LPDChar_Darks_Summary_NBC.ipynb +++ b/notebooks/LPD/LPDChar_Darks_Summary_NBC.ipynb @@ -16,7 +16,7 @@ "outputs": [], "source": [ "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/LPD/\" # path to output to, required\n" + "out_folder = \"/gpfs/exfel/data/scratch/karnem/LPD/\" # path to output to, required" ] }, { @@ -27,27 +27,26 @@ }, "outputs": [], "source": [ - "import warnings\n", - "warnings.filterwarnings('ignore')\n", "from collections import OrderedDict\n", + "import copy\n", "from datetime import datetime\n", "import os\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", "import h5py\n", + "from IPython.display import display, Markdown, Latex\n", "import numpy as np\n", - "import glob\n", "import matplotlib\n", "matplotlib.use(\"agg\")\n", + "import matplotlib.patches as patches\n", "import matplotlib.pyplot as plt\n", "%matplotlib inline\n", - "#%config InlineBackend.figure_format = ['svg', 'pdf']\n", - "from cal_tools.enums import BadPixels\n", - "\n", - "from XFELDetAna import xfelpyanatools as xana\n", - "from XFELDetAna.plotting.heatmap import *\n", - "\n", - "from IPython.display import display, Markdown, Latex\n", "import tabulate\n", - "gain_names = ['High', 'Medium', 'Low']\n" + "from XFELDetAna.plotting.heatmap import heatmapPlot\n", + "from XFELDetAna.plotting.simpleplot import simplePlot\n", + "\n", + "gain_names = ['High', 'Medium', 'Low']" ] }, { @@ -59,7 +58,6 @@ "outputs": [], "source": [ "# Load constants from local files\n", - "\n", "files = glob.glob('{}/*h5'.format(out_folder))\n", "\n", "data = OrderedDict()\n", @@ -90,14 +88,14 @@ "source": [ "mod_idx = np.argsort(mod_names)\n", "\n", - "constants = {'Offset': np.zeros((len(mod_names), 256, 256,512,3)),\n", - " 'Noise': np.zeros((len(mod_names), 256, 256,512,3)),\n", - " 'BadPixelsDark': np.zeros((len(mod_names), 256, 256,512,3))}\n", + "constants = {'Offset': np.zeros((len(mod_names), 256, 256, 512, 3)),\n", + " 'Noise': np.zeros((len(mod_names), 256, 256, 512, 3)),\n", + " 'BadPixelsDark': np.zeros((len(mod_names), 256, 256, 512, 3))}\n", "\n", "for i, idx in enumerate(mod_idx):\n", " for key, item in constants.items():\n", - " item[i] = data[mod_names[idx]][key] \n", - " \n", + " item[i] = data[mod_names[idx]][key]\n", + "\n", "mod_names = np.array(mod_names)[mod_idx]" ] }, @@ -109,8 +107,6 @@ }, "outputs": [], "source": [ - "import matplotlib.patches as patches\n", - "\n", "display(Markdown('## Processed modules ##'))\n", "\n", "fig, ax = plt.subplots(1, figsize=(10, 10))\n", @@ -196,18 +192,14 @@ " if 'Q{}M{}'.format(iq+1, im+1) in mod_names:\n", " values = np.nanmean(const[counter, :, :, :, gain], axis=2)\n", " values[values == 0] = np.nan\n", - " #print(iq, im, 'x ', q_pos[1]+m_pos[1], q_pos[1]+m_pos[1]+m_size)\n", - " #print(iq, im, 'y ', q_pos[0]+m_pos[0], q_pos[0]+m_pos[0] + m_size)\n", " image[q_pos[1]+m_pos[1]: q_pos[1]+m_pos[1]+m_size,\n", " q_pos[0]+m_pos[0]: q_pos[0]+m_pos[0] + m_size] = values\n", " counter += 1\n", - " # break\n", - " # break\n", "\n", " std = np.nanstd(image)\n", " mean = np.nanmedian(image)\n", " if const_name == 'Noise':\n", - " std=mean/4.\n", + " std = mean/4.\n", " _ = heatmapPlot(image, add_panels=False, figsize=(20, 20),\n", " vmin=mean-std*2, vmax=mean+std*2,\n", " x_label='columns', y_label='rows',\n", @@ -215,8 +207,7 @@ " const_name),\n", " cmap='viridis',\n", " title='{}. {} gain'.format(const_name, gain_names[gain]))\n", - " plt.show()\n", - " #break" + " plt.show()" ] }, { @@ -228,8 +219,6 @@ }, "outputs": [], "source": [ - "# plt.rcParams.update({'font.size': 14})\n", - "\n", "# Loop over capacitor settings, modules, constants\n", "for const_name, const in constants.items():\n", "\n", @@ -255,32 +244,6 @@ " 'pad': 0.2, 'w_pad': 1.3, 'h_pad': 1.3})\n", " ax = fig.add_subplot(111)\n", "\n", - " '''\n", - " _ = heatmapPlot(datamean, \n", - " add_panels=False,\n", - " y_label='Module ID', x_label='Memory Cell ID',\n", - " lut_label=label, use_axis=ax,\n", - " panel_y_label=label, panel_x_label=label,\n", - " cmap='viridis',\n", - " y_ticklabels=mod_names,\n", - " y_ticks=np.arange(len(mod_names))+0.5,\n", - " title = '{} gain'.format(gain_names[gain])\n", - " )\n", - " \n", - " if const_name != 'BadPixelsDark':\n", - " ax = fig.add_subplot(122)\n", - " label = '$\\sigma$ {} [ADU]'.format(const_name)\n", - " _ = heatmapPlot(np.nanstd(data, axis=(1, 2)), \n", - " add_panels=False,\n", - " y_label='Module ID', x_label='Memory Cell ID',\n", - " lut_label=label, use_axis=ax,\n", - " panel_y_label=label, panel_x_label=label,\n", - " cmap='viridis',\n", - " y_ticklabels=mod_names,\n", - " y_ticks=np.arange(len(mod_names))+0.5,\n", - " title = '{} gain'.format(gain_names[gain])\n", - " )\n", - " '''\n", " d = []\n", " for im, mod in enumerate(datamean):\n", " d.append({'x': np.arange(mod.shape[0]),\n", @@ -289,7 +252,7 @@ " 'label': mod_names[im],\n", " })\n", "\n", - " _ = xana.simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n", + " _ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n", " x_label='Memory Cell ID',\n", " y_label=label,\n", " use_axis=ax,\n", @@ -309,7 +272,7 @@ " 'label': mod_names[im],\n", " })\n", "\n", - " _ = xana.simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n", + " _ = simplePlot(d, figsize=(10, 10), xrange=(-12, 510),\n", " x_label='Memory Cell ID',\n", " y_label=label,\n", " use_axis=ax,\n", @@ -318,8 +281,7 @@ " legend='outside-top-ncol6-frame', legend_size='18%',\n", " legend_pad=0.00)\n", "\n", - " plt.show()\n", - " # break" + " plt.show()" ] }, { @@ -375,7 +337,7 @@ "\n", " t_line.append('{:6.0f} ({:6.3f}) '.format(\n", " datasum, datamean))\n", - " \n", + "\n", " label = '## Number (fraction) of bad pixels'\n", " else:\n", "\n", @@ -384,14 +346,14 @@ "\n", " t_line.append('{:6.1f} $\\\\pm$ {:6.1f}'.format(\n", " np.nanmean(data), np.nanstd(data)))\n", - " \n", + "\n", " label = '## Average {} [ADU], good pixels only ##'.format(const_name)\n", - " \n", - " \n", + "\n", " table.append(t_line)\n", "\n", " display(Markdown(label))\n", - " md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))" + " md = display(Latex(tabulate.tabulate(\n", + " table, tablefmt='latex', headers=header)))" ] } ], -- GitLab