From d799645259653a43d77d9cb1c471f859c9c54474 Mon Sep 17 00:00:00 2001 From: Karim Ahmed <karim.ahmed@xfel.eu> Date: Mon, 11 May 2020 09:51:38 +0200 Subject: [PATCH] fix plotting errors and creation time --- .../AGIPD/Chracterize_AGIPD_Gain_PC_NBC.ipynb | 280 ++++++++++-------- 1 file changed, 156 insertions(+), 124 deletions(-) diff --git a/notebooks/AGIPD/Chracterize_AGIPD_Gain_PC_NBC.ipynb b/notebooks/AGIPD/Chracterize_AGIPD_Gain_PC_NBC.ipynb index b8e1f6ccd..2542e9d50 100644 --- a/notebooks/AGIPD/Chracterize_AGIPD_Gain_PC_NBC.ipynb +++ b/notebooks/AGIPD/Chracterize_AGIPD_Gain_PC_NBC.ipynb @@ -37,11 +37,11 @@ "outputs": [], "source": [ "in_folder = '/gpfs/exfel/exp/SPB/202030/p900138/raw/' # path to input data, required\n", - "modules = [1,] # modules to work on, required, range allowed\n", + "modules = [14,] # modules to work on, required, range allowed\n", "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/pc\" # path to output to, required\n", "runs = [466, 467, 468, 469, 470, 471, 472, 473] # runs to use, required, range allowed\n", - "n_sequences = 1 # number of sequence files, starting for 0 to evaluate\n", - "cluster_profile = \"noDB\" # The ipcluster profile to use\n", + "n_sequences = 3 # number of sequence files, starting for 0 to evaluate\n", + "cluster_profile = \"noDB\" # The ipcluster profile to use#\n", "local_output = True # output constants locally\n", "db_output = False # output constants to database\n", "bias_voltage = 300 # detector bias voltage\n", @@ -77,7 +77,6 @@ "import h5py\n", "import os\n", "\n", - "\n", "import numpy as np\n", "import matplotlib\n", "matplotlib.use(\"Qt4Agg\")\n", @@ -101,8 +100,6 @@ "from cal_tools.plotting import show_overview, plot_badpix_3d\n", "from cal_tools.agipdlib import get_acq_rate, get_num_cells, get_gain_setting\n", "\n", - "\n", - "\n", "IL_MODE = interlaced \n", "maxcells = mem_cells if not interlaced else mem_cells*2\n", "cells = mem_cells\n", @@ -116,7 +113,6 @@ "print(\"Sequences: {}\".format(seqs))\n", "print(\"Interlaced mode: {}\".format(IL_MODE))\n", "\n", - "\n", "run, prop, seq = run_prop_seq_from_path(in_folder)\n", "logger = InfluxLogger(detector=\"AGIPD\", instrument=instrument, mem_cells=mem_cells,\n", " notebook=get_notebook_name(), proposal=prop)\n", @@ -165,22 +161,7 @@ " if acq_rate == 0.:\n", " acq_rate = get_acq_rate(fname, loc, channel)\n", " print(\"Acquisition rate set from file: {} MHz\".format(acq_rate))\n", - " \n", - " # Define constant creation time.\n", - " if creation_time:\n", - " try:\n", - " creation_time = datetime.strptime(creation_time, '%Y-%m-%d %H:%M:%S.%f')\n", - " except Exception as e:\n", - " print(f\"creation_time value error: {e}.\" \n", - " \"Use same format as YYYY-MM-DD HR:MN:SC.ms e.g. 2019-07-04 11:02:41.00/n\")\n", - " creation_time = None\n", - " print(\"Given creation time wont be used.\")\n", - " else:\n", - " creation_time = None\n", "\n", - " if not creation_time and use_dir_creation_date:\n", - " creation_time = get_dir_creation_date(in_folder, run)\n", - " \n", " if mem_cells == 0:\n", " cells = get_num_cells(fname, loc, channel)\n", " maxcells = cells\n", @@ -197,6 +178,23 @@ " f.close()\n", "bursts_per_file = np.array(bursts_per_file)\n", "print(\"Bursts per sequence file are: {}\".format(bursts_per_file))\n", + "\n", + "\n", + "# Define constant creation time.\n", + "if creation_time.strip() is not \"\":\n", + " try:\n", + " creation_time = datetime.strptime(creation_time, '%Y-%m-%d %H:%M:%S.%f')\n", + " except Exception as e:\n", + " print(f\"creation_time value error: {e}.\" \n", + " \"Use same format as YYYY-MM-DD HR:MN:SC.ms e.g. 2019-07-04 11:02:41.00/n\")\n", + " creation_time = None\n", + " print(\"Given creation time wont be used.\")\n", + "else:\n", + " creation_time = None\n", + "\n", + "if not creation_time and use_dir_creation_date:\n", + " creation_time = get_dir_creation_date(in_folder, run)\n", + "\n", "if creation_time:\n", " print(\"Using {} as creation time\".format(creation_time.isoformat()))" ] @@ -239,7 +237,6 @@ }, "outputs": [], "source": [ - "\n", "def read_and_merge_module_data(cells, path_temp, image_name_temp,\n", " runs, seqs, il_mode, rawversion, instrument, channel):\n", " import h5py\n", @@ -371,8 +368,8 @@ " pc_data_merged['analog'][:,:,64 + row_i * 8 + run_idx ,:] = pc_data['analog'][:bursts_total,:cells//cfac, 64 + row_i * 8 + run_idx,:]\n", " pc_data_merged['digital'][:,:,row_i * 8 + (7 - run_idx),:] = pc_data['digital'][:bursts_total,:cells//cfac,row_i * 8 + (7 - run_idx),:]\n", " pc_data_merged['digital'][:,:,64 + row_i * 8 + run_idx ,:] = pc_data['digital'][:bursts_total,:cells//cfac, 64 + row_i * 8 + run_idx,:] \n", - " except:\n", - " pass\n", + " except Exception as e:\n", + " print(e)\n", " #Check cellIDs\n", " #Copy cellIDs of first run\n", " if run_idx == 0:\n", @@ -608,7 +605,8 @@ "ExecuteTime": { "end_time": "2019-07-27T23:34:22.590279Z", "start_time": "2019-07-27T23:33:59.257776Z" - } + }, + "scrolled": false }, "outputs": [], "source": [ @@ -646,15 +644,16 @@ " vidx = (y > 1000) & np.isfinite(y)\n", " x = x[vidx]\n", " y = y[vidx]\n", - " \n", + " if x.shape[0] == 0:\n", + " continue\n", + " \n", " ms, labels, centers = calc_m_cluster2(x, y)\n", " bound = None\n", " bound_m = None\n", " markers = ['o','.','x','v']\n", " colors = ['b', 'r', 'g', 'k']\n", " ymin = y.min()\n", - " \n", - " \n", + "\n", " for i, lbl in enumerate(labels):\n", " if np.any(lbl):\n", " #ym = y[lbl]-y[lbl].min()\n", @@ -667,7 +666,7 @@ " # ym += y[labels[0]].max()-y[labels[0]].min()\n", " h, ex, ey = np.histogram2d(x[lbl], ym, range=((0, 600), (-500, 6000)), bins=(300, 650))\n", " H[i] += h\n", - " \n", + "\n", " \n", " \n", " fig = plt.figure(figsize=(10,10))\n", @@ -763,12 +762,14 @@ " #if ms[i] < 0: # slope separating two regions\n", " # bound = np.min(x[lbl])\n", " # bound_m = ms[i]\n", - " bound = np.min(x[labels[1]])\n", - " bound_m = ms[1]\n", + " if labels[1].any():\n", + " bound = np.min(x[labels[1]])\n", + " bound_m = ms[1]\n", " if bound is None or bound < 20 and False:\n", " ya = ana[:,cell, pix[0], pix[1]][vidx]\n", " msa, labels, centers = calc_m_cluster2(x, ya, 25, -10, 25)\n", " if np.count_nonzero(labels[0]) > 0:\n", + " \n", " bound = np.min(x[labels[0]])\n", " bound_m = ms[3]\n", " else:\n", @@ -778,27 +779,29 @@ "\n", " #print(bound)\n", " # fit linear slope\n", - " xl = x[(x<bound)]\n", - " yl = y[(x<bound)] - offset[pix[0], pix[1], cell, 0]\n", - " parms = {'m': bound_m, 'b': np.min(yl)}\n", - " \n", - " errors = np.ones(xl.shape)*noise[pix[0], pix[1], cell, 0]\n", - " fitted = fit_data(lin_fun, xl, yl, errors , parms)\n", - " yf = lin_fun(xl, fitted['m'], fitted['b'])\n", - " max_devl = np.max(np.abs((yl-yf)/yl))\n", - " \n", - " d3.append({'x': xl,\n", - " 'y': yf,\n", - " 'color': 'k',\n", - " 'linewidth': 1,\n", - " 'y2': (yf-yl)/errors\n", - " })\n", - " \n", + " if not np.isnan(bound_m):\n", + " xl = x[(x<bound)]\n", + " yl = y[(x<bound)] - offset[pix[0], pix[1], cell, 0]\n", + " parms = {'m': bound_m, 'b': np.min(yl)}\n", + "\n", + " errors = np.ones(xl.shape)*noise[pix[0], pix[1], cell, 0]\n", + " fitted = fit_data(lin_fun, xl, yl, errors , parms)\n", + " yf = lin_fun(xl, fitted['m'], fitted['b'])\n", + " max_devl = np.max(np.abs((yl-yf)/yl))\n", + "\n", + " d3.append({'x': xl,\n", + " 'y': yf,\n", + " 'color': 'k',\n", + " 'linewidth': 1,\n", + " 'y2': (yf-yl)/errors\n", + " })\n", " # fit hook slope\n", " if fit_hook:\n", " idx = (x >= bound) & (y > 0) & np.isfinite(x) & np.isfinite(y)\n", " xh = x[idx]\n", " yh = y[idx] - offset[pix[0], pix[1], cell, 1]\n", + " if len(yh[yh > 0]) == 0:\n", + " break\n", " parms = {'m': bound_m/10 if bound_m/10>0.3 else 0.5, 'b': np.min(yh[yh > 0]), 'a': np.max(yh), 'c': 5, 'o': bound-1}\n", " parms[\"limit_m\"] = [0.3, 1.0]\n", " parms[\"limit_c\"] = [1., 1000]\n", @@ -823,7 +826,8 @@ " y = y[vidx]\n", " \n", " #ms, labels, centers = calc_m_cluster2(x, y, 25, -10, 25)\n", - " threshold = (np.mean(y[labels[0]])+np.mean(y[labels[2]]))/2\n", + " if len(y[labels[0]]) != 0 and len(y[labels[2]]) != 0: \n", + " threshold = (np.mean(y[labels[0]])+np.mean(y[labels[2]]))/2\n", " \n", " for i, lbl in enumerate(labels):\n", " \n", @@ -863,11 +867,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "ExecuteTime": { - "end_time": "2019-07-27T23:34:23.910956Z", - "start_time": "2019-07-27T23:10:42.639Z" - }, - "scrolled": false + "scrolled": true }, "outputs": [], "source": [ @@ -877,7 +877,6 @@ " for j in range(*tpix_range2[1]):\n", " test_pixels.append((j,i))\n", "\n", - "\n", "for mod, r in enumerate(res):\n", " dig, ana, cellId = r\n", " d = []\n", @@ -888,14 +887,14 @@ " for pix in test_pixels:\n", " for cell in test_cells:\n", " color = np.random.rand(3,1)\n", - " \n", + "\n", " x = np.arange(dig.shape[0])\n", " y = dig[:,cell, pix[0], pix[1]]\n", - " \n", + "\n", " vidx = (y > 1000) & np.isfinite(y)\n", " x = x[vidx]\n", " y = y[vidx]\n", - " \n", + "\n", " ms, labels, centers = calc_m_cluster2(x, y)\n", " bound = None\n", " bound_m = None\n", @@ -915,92 +914,114 @@ " #if ms[i] < 0: # slope separating two regions\n", " # bound = np.min(x[lbl])\n", " # bound_m = ms[i]\n", - " bound = np.min(x[labels[1]])\n", - " bound_m = ms[1]\n", - " \n", - " # fit linear slope\n", - " xl = x[(x<bound)]\n", - " yl = y[(x<bound)] - offset[pix[0], pix[1], cell, 0]\n", - " errors = np.ones(xl.shape)*noise[pix[0], pix[1], cell, 0]\n", - " parms = {'m': bound_m, 'b': np.min(yl)}\n", - " fitted = fit_data(lin_fun, xl, yl, errors, parms)\n", - " \n", - " yf = lin_fun(xl, fitted['m'], fitted['b'])\n", - " max_devl = np.max(np.abs((yl-yf)/yl))\n", - " \n", - " xtt = np.arange(ana.shape[0])\n", - " ytt = ana[:,cell, pix[0], pix[1]]\n", - " \n", - " vidx = (ytt > 1000) & np.isfinite(ytt)\n", - " xtt = xtt[vidx]\n", - " ytt = ytt[vidx]\n", - " \n", - " #ms, labels, centers = calc_m_cluster2(x, y, 25, -10, 25)\n", - " threshold = (np.mean(ytt[labels[0]])+np.mean(ytt[labels[2]]))/2\n", - " \n", - " if threshold > 10000 or threshold < 4000:\n", - " d3.append({'x': xl,\n", - " 'y': yf,\n", - " 'color': 'k',\n", - " 'linewidth': 1,\n", - " 'y2': (yf-yl)/errors\n", - " })\n", - " \n", - " # fit hook slope\n", - " if fit_hook:\n", + " if len(x[labels[1]]):\n", + " bound = np.min(x[labels[1]])\n", + " bound_m = ms[1]\n", + "\n", + " # fit linear slope\n", " idx = (x >= bound) & (y > 0) & np.isfinite(x) & np.isfinite(y)\n", - " xh = x[idx]\n", - " yh = y[idx] - offset[pix[0], pix[1], cell, 1]\n", - " errors = np.ones(xh.shape)*noise[pix[0], pix[1], cell, 1]\n", - " parms = {'m': np.abs(bound_m/10), 'b': np.min(yh[yh > 0]), 'a': np.max(yh), 'c': 5., 'o': bound-1}\n", - " parms[\"limit_m\"] = [0.3, 1.0]\n", - " parms[\"limit_c\"] = [1., 1000]\n", - " fitted = fit_data(hook_fun, xh, yh, errors, parms)\n", - " yf = hook_fun(xh, fitted['a'], fitted['c'], fitted['o'], fitted['m'], fitted['b'])\n", - " max_devh = np.max(np.abs((yh-yf)/yh))\n", - " #print(fitted)\n", - " if threshold > 10000 or threshold < 4000 or fitted['m'] < 0.2:\n", - " d3.append({'x': xh,\n", - " 'y': yf,\n", - " 'color': 'red',\n", - " 'linewidth': 1,\n", - " 'y2': (yf-yh)/errors\n", + " xl = x[(x<bound)]\n", + " yl = y[(x<bound)] - offset[pix[0], pix[1], cell, 0]\n", + " errors = np.ones(xl.shape)*noise[pix[0], pix[1], cell, 0]\n", + " parms = {'m': bound_m, 'b': np.min(yl)}\n", + " fitted = fit_data(lin_fun, xl, yl, errors, parms)\n", + "\n", + " yf = lin_fun(xl, fitted['m'], fitted['b'])\n", + " max_devl = np.max(np.abs((yl-yf)/yl))\n", + "\n", + " xtt = np.arange(ana.shape[0])\n", + " ytt = ana[:,cell, pix[0], pix[1]]\n", + "\n", + " vidx = (ytt > 1000) & np.isfinite(ytt)\n", + " xtt = xtt[vidx]\n", + " ytt = ytt[vidx]\n", + "\n", + " #ms, labels, centers = calc_m_cluster2(x, y, 25, -10, 25)\n", + " if len(y[labels[0]]) != 0 and len(y[labels[2]]) != 0: \n", + " threshold = (np.mean(ytt[labels[0]])+np.mean(ytt[labels[2]]))/2\n", + "\n", + " if threshold > 10000 or threshold < 4000:\n", + " d3.append({\n", + " 'x': xl,\n", + " 'y': yf,\n", + " 'color': 'k',\n", + " 'linewidth': 1,\n", + " 'y2': (yf-yl)/errors\n", " })\n", "\n", - " \n", - " if threshold > 10000 or threshold < 4000:\n", - " for i, lbl in enumerate(labels):\n", + " if bound is None:\n", + " ya = ana[:,cell, pix[0], pix[1]][vidx]\n", + " msa, labels, centers = calc_m_cluster2(x, ya, 25, -10, 25)\n", + " if np.count_nonzero(labels[0]) > 0:\n", + " bound = np.min(x[labels[0]])\n", + " bound_m = ms[3]\n", + " else:\n", + " avg_g = np.nanmean(ya)\n", + " bound = np.max(x[y < avg_g])\n", + " bound_m = ms[3] \n", "\n", - " d2.append({'x': xtt[lbl],\n", - " 'y': ytt[lbl],\n", - " 'marker': markers[i],\n", - " 'color': colors[i],\n", - " 'lw': None\n", + " # fit hook slope\n", + " try:\n", + " if fit_hook and len(yh[yh > 0]) !=0:\n", + " idx = (x >= bound) & (y > 0) & np.isfinite(x) & np.isfinite(y)\n", + " xh = x[idx]\n", + " yh = y[idx] - offset[pix[0], pix[1], cell, 1]\n", + " errors = np.ones(xh.shape)*noise[pix[0], pix[1], cell, 1]\n", + " parms = {\n", + " 'm': np.abs(bound_m/10),\n", + " 'b': np.min(yh[yh > 0]), \n", + " 'a': np.max(yh),\n", + " 'c': 5.,\n", + " 'o': bound-1\n", + " }\n", + " parms[\"limit_m\"] = [0.3, 1.0]\n", + " parms[\"limit_c\"] = [1., 1000]\n", + " fitted = fit_data(hook_fun, xh, yh, errors, parms)\n", + " yf = hook_fun(xh, fitted['a'], fitted['c'], fitted['o'], fitted['m'], fitted['b'])\n", + " max_devh = np.max(np.abs((yh-yf)/yh))\n", + " #print(fitted)\n", + " if threshold > 10000 or threshold < 4000 or fitted['m'] < 0.2:\n", + " d3.append({\n", + " 'x': xh,\n", + " 'y': yf,\n", + " 'color': 'red',\n", + " 'linewidth': 1,\n", + " 'y2': (yf-yh)/errors\n", + " })\n", + " except Exception as e:\n", + " if \"zero-size array\" in str(e):\n", + " pass\n", + " else:\n", + " print(e)\n", "\n", + " if threshold > 10000 or threshold < 4000:\n", + " for i, lbl in enumerate(labels):\n", + " d2.append({\n", + " 'x': xtt[lbl],\n", + " 'y': ytt[lbl],\n", + " 'marker': markers[i],\n", + " 'color': colors[i],\n", + " 'lw': None\n", " })\n", "\n", " d2.append({'x': np.array([xtt[0], xtt[-1]]),\n", " 'y': np.ones(2)*threshold,\n", - "\n", " 'color': 'k',\n", " 'lw': 1\n", - "\n", " })\n", - " \n", + "\n", " #threshold = (np.min(y[x<bound]) + np.max(y[x>=bound]))/2\n", - " \n", - " \n", " fig = xana.simplePlot(d, y_label=\"PC pixel signal (ADU)\", figsize='2col', aspect=2,\n", " x_label=\"step #\")\n", " fig.savefig(\"{}/module_{}_pixel_plot_fail.png\".format(out_folder, modules[mod]))\n", - " \n", + "\n", " fig = xana.simplePlot(d2, y_label=\"PC gain signal (ADU)\", figsize='2col', aspect=2,\n", " x_label=\"step #\")\n", " fig.savefig(\"{}/module_{}_pixel_plot_gain_fail.png\".format(out_folder, modules[mod]))\n", - " \n", + "\n", " fig = xana.simplePlot(d3, secondpanel=True, y_label=\"PC signal (ADU)\", figsize='2col', aspect=2,\n", " x_label=\"step #\", y2_label=\"Residuals ($\\sigma$)\", y2_range=(-5,5))\n", - " fig.savefig(\"{}/module_{}_pixel_plot_fits_fail.png\".format(out_folder, modules[mod]))" + " fig.savefig(\"{}/module_{}_pixel_plot_fits_fail.png\".format(out_folder, modules[mod]))\n" ] }, { @@ -1437,6 +1458,8 @@ " \n", " metadata.calibration_constant_version.raw_data_location = file_loc\n", " metadata.send(cal_db_interface)\n", + " print(f'Constant SlopesPC for module {qm} was injected to the calibration DB. '\n", + " f'Begin at: {metadata.calibration_constant_version.begin_at}')\n", " \n", " # bad pixels\n", " \n", @@ -1457,7 +1480,9 @@ " metadata.calibration_constant_version = Versions.Timespan(device=getattr(Detectors.AGIPD1M1, qm),\n", " start=creation_time)\n", " metadata.calibration_constant_version.raw_data_location = file_loc\n", - " metadata.send(cal_db_interface)" + " metadata.send(cal_db_interface)\n", + " print(f'Constant BadPixelsPC for module {qm} was injected to the calibration DB. '\n", + " f'Begin at: {metadata.calibration_constant_version.begin_at}')" ] }, { @@ -1857,6 +1882,13 @@ " ax.set_xlabel(\"PC scan point (#)\")\n", " " ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { -- GitLab