diff --git a/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb b/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb index ca345b5b104b1a46c06b9d4a01ccd3f0ccede587..0e05035d434af2710418b0db025d1fdcbbb6e032 100644 --- a/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb +++ b/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb @@ -26,7 +26,7 @@ "source": [ "cluster_profile = \"noDB\" # The ipcluster profile to use\n", "in_folder = \"/gpfs/exfel/d/raw/SPB/202030/p900138/\" # path to input data, required\n", - "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD3\" # path to output to, required\n", + "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPDb1\" # path to output to, required\n", "sequences = [0] # sequence files to evaluate.\n", "modules = [-1] # list of modules to evaluate, RANGE ALLOWED\n", "run_high = 264 # run number in which high gain data was recorded, required\n", @@ -57,10 +57,18 @@ "rawversion = 2 # RAW file format version\n", "\n", "thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels\n", - "thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_offset_hard = [0, 0] # This offset threshold is left for back compatability or for defining threshold for the 3 gains\n", + "thresholds_offset_hard_hg = [4000, 8500] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_offset_hard_mg = [4000, 8500] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_offset_hard_lg = [4000, 8500] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", "\n", "thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels\n", - "thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_noise_hard = [0, 0] # This noise threshold is left for back compatability or for defining threshold for the 3 gains\n", + "thresholds_noise_hard_hg = [4, 20] # High-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_noise_hard_mg = [4, 20] # Medium-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "thresholds_noise_hard_lg = [4, 20] # Low-gain thresholds in absolute ADU terms for offset deduced bad pixels\n", + "\n", + "gain_sep_sigma = 5. # gain separation sigma\n", "\n", "high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h" ] @@ -251,13 +259,13 @@ "import copy\n", "from functools import partial\n", "def characterize_module(il_mode, cells, bp_thresh, rawversion, loc, acq_rate,\n", - " h5path, h5path_idx, inp):\n", + " prev_offset, prev_noise, h5path, h5path_idx, inp):\n", " import numpy as np\n", " import copy\n", " import h5py\n", " from cal_tools.enums import BadPixels\n", " from cal_tools.agipdlib import get_num_cells, get_acq_rate\n", - " \n", + "\n", " filename, filename_out, channel = inp\n", " \n", " if cells == 0:\n", @@ -267,7 +275,7 @@ " \n", " if acq_rate == 0.:\n", " acq_rate = get_acq_rate(filename, loc, channel)\n", - " \n", + "\n", " thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh \n", "\n", " infile = h5py.File(filename, \"r\", driver=\"core\")\n", @@ -300,7 +308,7 @@ " else:\n", " ga = im[:, 1, ...]\n", " im = im[:, 0, ...].astype(np.float32)\n", - " \n", + "\n", " im = np.rollaxis(im, 2)\n", " im = np.rollaxis(im, 2, 1)\n", "\n", @@ -311,29 +319,33 @@ " offset = np.zeros((im.shape[0], im.shape[1], mcells))\n", " gains = np.zeros((im.shape[0], im.shape[1], mcells))\n", " noise = np.zeros((im.shape[0], im.shape[1], mcells))\n", - " \n", + "\n", " for cc in np.unique(cellIds[cellIds < mcells]):\n", " cellidx = cellIds == cc\n", " offset[...,cc] = np.median(im[..., cellidx], axis=2)\n", " noise[...,cc] = np.std(im[..., cellidx], axis=2)\n", " gains[...,cc] = np.median(ga[..., cellidx], axis=2)\n", - " \n", + "\n", " # bad pixels\n", " bp = np.zeros(offset.shape, np.uint32)\n", " # offset related bad pixels\n", " offset_mn = np.nanmedian(offset, axis=(0,1))\n", - " offset_std = np.nanstd(offset, axis=(0,1)) \n", - " \n", + " offset_std = np.nanstd(offset, axis=(0,1))\n", + "\n", + " # Bad pixels during bad gain separation.\n", + " if prev_offset is not None:\n", + " bad_sep = (offset-prev_offset) / np.sqrt(noise**2 + prev_noise**2)\n", + " bp[(bad_sep)<gain_sep_sigma]|= BadPixels.GAIN_THRESHOLDING_ERROR.value\n", + "\n", " bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |\n", " (offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n", " bp[(offset < thresholds_offset_hard[0]) | (\n", " offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n", " bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n", - " \n", + "\n", " # noise related bad pixels\n", " noise_mn = np.nanmedian(noise, axis=(0,1))\n", " noise_std = np.nanstd(noise, axis=(0,1)) \n", - " \n", " bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |\n", " (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", " bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", @@ -341,8 +353,7 @@ "\n", "\n", " return offset, noise, gains, bp, cells, acq_rate\n", - " \n", - " \n", + "\n", "offset_g = OrderedDict()\n", "noise_g = OrderedDict()\n", "gain_g = OrderedDict()\n", @@ -352,8 +363,14 @@ "start = datetime.now()\n", "all_cells = []\n", "all_acq_rate = []\n", + "offset = None\n", + "noise = None\n", + "if thresholds_offset_hard == [0, 0]:\n", + " thresholds_offset_hard = [thresholds_offset_hard_hg, thresholds_offset_hard_mg, thresholds_offset_hard_lg]\n", + "if thresholds_noise_hard == [0, 0]\n", + " thresholds_noise_hard = [thresholds_noise_hard_hg, thresholds_noise_hard_mg, thresholds_noise_hard_lg]\n", "for gain, mapped_files in gain_mapped_files.items():\n", - " \n", + "\n", " inp = []\n", " dones = []\n", " for i in modules:\n", @@ -367,9 +384,9 @@ " inp.append((fname_in, fout, i))\n", " first = False\n", " p = partial(characterize_module, IL_MODE, max_cells,\n", - " (thresholds_offset_hard, thresholds_offset_sigma,\n", - " thresholds_noise_hard, thresholds_noise_sigma), \n", - " rawversion, karabo_id, acq_rate, h5path, h5path_idx)\n", + " (thresholds_offset_hard[gg], thresholds_offset_sigma,\n", + " thresholds_noise_hard[gg], thresholds_noise_sigma), \n", + " rawversion, karabo_id, acq_rate, offset, noise, h5path, h5path_idx)\n", " #results = list(map(p, inp))\n", " results = view.map_sync(p, inp)\n", " for ii, r in enumerate(results):\n", @@ -383,7 +400,7 @@ " noise_g[qm] = np.zeros_like(offset_g[qm])\n", " gain_g[qm] = np.zeros_like(offset_g[qm])\n", " badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)\n", - " \n", + "\n", " offset_g[qm][...,gg] = offset\n", " noise_g[qm][...,gg] = noise\n", " gain_g[qm][...,gg] = gain\n", @@ -447,7 +464,7 @@ " 'ThresholdsDark': thresholds_g[qm],\n", " 'BadPixelsDark': badpix_g[qm] \n", " }\n", - " \n", + "\n", "if local_output:\n", " for qm in offset_g.keys():\n", " ofile = \"{}/agipd_offset_store_{}_{}.h5\".format(out_folder,\n", @@ -566,7 +583,7 @@ " print(msg.format(const, qm,\n", " metadata.calibration_constant_version.begin_at))\n", " except Exception as e:\n", - " print(e)\n", + " print(\"Error:\", e)\n", "\n", " if local_output:\n", " save_const_to_h5(metadata, out_folder)\n", @@ -610,7 +627,7 @@ " ax.add_patch(matplotlib.patches.Rectangle((x,(q_x[1]-q_st*im+3)), l_x, l_y/2, linewidth=2,edgecolor='c',\n", " facecolor='sandybrown', fill=True))\n", " x += asic_pos\n", - " \n", + "\n", " if iq*4+im == modules[0]:\n", " # Change Text for current processed module.\n", " ax.text(q_x[0]+13, q_x[1]-q_st*im+1.5, 'Q{}M{}'.format(\n", @@ -720,7 +737,8 @@ "source": [ "cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),\n", " BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),\n", - " BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),\n", + " BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#FFDF0080'),\n", + " BadPixels.GAIN_THRESHOLDING_ERROR.value: (BadPixels.GAIN_THRESHOLDING_ERROR.name, '#BF2080'),\n", " BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}\n", "\n", "rebin = 8 if not high_res_badpix_3d else 2\n", @@ -815,8 +833,8 @@ " bp_thresh[mod] = np.zeros((con.shape[0], con.shape[1], con.shape[2], 5), dtype=con.dtype)\n", " bp_thresh[mod][...,:2] = con[...,:2]\n", " bp_thresh[mod][...,2:] = con\n", - " \n", - " \n", + "\n", + "\n", "create_constant_overview(thresholds_g, \"Threshold (ADU)\", max_cells, 4000, 10000, 5,\n", " out_folder=out_folder, infix=\"{}-{}-{}\".format(*offset_runs.values()),\n", " badpixels=[bp_thresh, np.nan],\n", @@ -867,6 +885,7 @@ " l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.NOISE_OUT_OF_THRESHOLD.value))\n", " l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.OFFSET_OUT_OF_THRESHOLD.value))\n", " l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.OFFSET_NOISE_EVAL_ERROR.value))\n", + " l_data.append(datau32 - np.bitwise_or(datau32,BadPixels.GAIN_THRESHOLDING_ERROR.value))\n", "\n", " if old_const['BadPixelsDark'] is not None:\n", " dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain])\n", @@ -875,12 +894,13 @@ " l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.NOISE_OUT_OF_THRESHOLD.value))\n", " l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.OFFSET_OUT_OF_THRESHOLD.value))\n", " l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.OFFSET_NOISE_EVAL_ERROR.value))\n", + " l_data_old.append(datau32old - np.bitwise_or(datau32old,BadPixels.GAIN_THRESHOLDING_ERROR.value))\n", "\n", " l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', \n", - " 'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']\n", + " 'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR', 'GAIN_THRESHOLDING_ERROR']\n", "\n", " l_threshold = ['', '{}'.format(thresholds_noise_sigma), '{}'.format(thresholds_offset_sigma),\n", - " '{}/{}'.format(thresholds_offset_hard, thresholds_noise_hard)]\n", + " '{}/{}'.format(thresholds_offset_hard[gain], thresholds_noise_hard[gain]), '']\n", "\n", " for i in range(len(l_data)):\n", " line = ['{}, {} gain'.format(l_data_name[i], gain_names[gain]),\n",