diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb index 8f793f13bf319d7d5ed59f78c92f1ed489b2afae..efad5f62810d0d84b7cc2a2f654ab3f20b90f2b2 100644 --- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb +++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb @@ -59,7 +59,7 @@ "in_folder = \"/gpfs/exfel/exp/FXE/201931/p900088/raw\" # path to input data, required\n", "out_folder = \"/gpfs/exfel/data/scratch/karnem/LPD/\" # path to output to, required\n", "sequences = [0] # sequence files to evaluate\n", - "modules = [-1] # list of modules to evaluate, RANGE ALLOWED\n", + "modules = [2] # list of modules to evaluate, RANGE ALLOWED\n", "\n", "capacitor_setting = 5 # capacitor_setting for which data was taken, required\n", "run_high = 112 # run number in which high gain data was recorded, required\n", @@ -80,8 +80,9 @@ "skip_first_ntrains = 10 # Number of first trains to skip\n", "not_use_dir_creation_date = False # do not use the creation date of the directory for database time derivation\n", "instrument = \"FXE\" # instrument name\n", - "ntrains = 300 # number of trains to use\n", - "high_res_badpix_3d = False # plot bad-pixel summary in high resolution" + "ntrains = 30 # number of trains to use\n", + "high_res_badpix_3d = False # plot bad-pixel summary in high resolution\n", + "do_norm_test = False # permorm normality test" ] }, { @@ -211,7 +212,7 @@ "outputs": [], "source": [ "# the actual characterization - to not eded this without consultation\n", - "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, inp):\n", + "def characterize_module(cells, bp_thresh, skip_first_ntrains, ntrains, do_norm_test, inp):\n", " import numpy as np\n", " import copy\n", " import h5py\n", @@ -231,10 +232,17 @@ " thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh\n", "\n", " infile = h5py.File(filename, \"r\", driver=\"core\")\n", + " \n", + " first = np.array(infile[\"/INDEX/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/first\".format(channel)])\n", + " count = np.array(infile[\"/INDEX/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/count\".format(channel)])\n", + " n_images = count[-1]+first[-1]\n", + " first_image = skip_first_ntrains*cells\n", + " last_image = min(skip_first_ntrains*cells+ntrains*cells, n_images)\n", + " \n", " im = np.array(infile[\"/INSTRUMENT/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/data\".format(\n", - " channel)][skip_first_ntrains*cells:skip_first_ntrains*cells+ntrains*cells, ...])\n", + " channel)][first_image:last_image, ...])\n", " cellid = np.squeeze(np.array(infile[\"/INSTRUMENT/FXE_DET_LPD1M-1/DET/{}CH0:xtdf/image/cellId\".format(\n", - " channel)][skip_first_ntrains*cells:skip_first_ntrains*cells+ntrains*cells, ...]))\n", + " channel)][first_image:last_image, ...]))\n", " infile.close()\n", "\n", " im, g = splitOffGainLPD(im[:, 0, ...])\n", @@ -252,8 +260,9 @@ "\n", " offset[..., cc] = np.median(im[:, :, idx], axis=2)\n", " noise[..., cc] = np.std(im[:, :, idx], axis=2)\n", - " _, normal_test[..., cc] = scipy.stats.normaltest(\n", - " im[:, :, idx], axis=2)\n", + " if do_norm_test:\n", + " _, normal_test[..., cc] = scipy.stats.normaltest(\n", + " im[:, :, idx], axis=2)\n", "\n", " # bad pixels\n", " bp = np.zeros(offset.shape, np.uint32)\n", @@ -288,8 +297,11 @@ "ntest_g = OrderedDict()\n", "\n", "gg = 0\n", + "pars = []\n", "old_cap = None\n", "start = datetime.now()\n", + "inp = []\n", + " \n", "for gain, mapped_files in gain_mapped_files.items():\n", " cap = gain.split(\"_\")[1]\n", " if cap != old_cap:\n", @@ -301,49 +313,46 @@ " data_g[cap] = OrderedDict()\n", " ntest_g[cap] = OrderedDict()\n", "\n", - " dones = []\n", - " inp = []\n", - "\n", " for i in modules:\n", " qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n", " if qm in mapped_files and not mapped_files[qm].empty():\n", " fname_in = mapped_files[qm].get()\n", - " dones.append(mapped_files[qm].empty())\n", + " fout = os.path.abspath(\n", + " \"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n", + " print(\"Process file: \", fout)\n", + " inp.append((fname_in, fout, i))\n", + " pars.append((gg, cap))\n", "\n", - " else:\n", - " continue\n", - " fout = os.path.abspath(\n", - " \"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n", - " print(\"Process file: \", fout)\n", - " inp.append((fname_in, fout, i))\n", - " first = False\n", - " p = partial(characterize_module, max_cells,\n", + " gg+=1\n", + "\n", + "\n", + "p = partial(characterize_module, max_cells,\n", " (thresholds_offset_hard, thresholds_offset_sigma,\n", " thresholds_noise_hard, thresholds_noise_sigma),\n", - " skip_first_ntrains, ntrains)\n", - " results = view.map_sync(p, inp)\n", - " for r in results:\n", - " offset, noise, i, bp, data, normal = r\n", - " qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n", - " if qm not in offset_g[cap]:\n", - " offset_g[cap][qm] = np.zeros(\n", - " (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n", - " noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n", - " badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n", - " data_g[cap][qm] = np.zeros((ntrains, 3))\n", - " ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n", - "\n", - " offset_g[cap][qm][..., gg] = offset\n", - " noise_g[cap][qm][..., gg] = noise\n", - " badpix_g[cap][qm][..., gg] = bp\n", - " data_g[cap][qm][:data.shape[0], gg] = data\n", - " ntest_g[cap][qm][..., gg] = normal\n", - "\n", - " hn, cn = np.histogram(data, bins=20)\n", - " print(\"{} gain. Module: {}. Number of processed trains per cell: {}.\\n\".format(\n", - " gain_names[gg], qm, data.shape[0]))\n", - " gg += 1\n", - " plt.show()\n", + " skip_first_ntrains, ntrains, do_norm_test)\n", + "results = view.map_sync(p, inp)\n", + "\n", + "for ir, r in enumerate(results):\n", + " offset, noise, i, bp, data, normal = r\n", + " qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n", + " gg, cap = pars[ir]\n", + " if qm not in offset_g[cap]:\n", + " offset_g[cap][qm] = np.zeros(\n", + " (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n", + " noise_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n", + " badpix_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n", + " data_g[cap][qm] = np.full((ntrains, 3), np.nan)\n", + " ntest_g[cap][qm] = np.zeros_like(offset_g[cap][qm])\n", + "\n", + " offset_g[cap][qm][..., gg] = offset\n", + " noise_g[cap][qm][..., gg] = noise\n", + " badpix_g[cap][qm][..., gg] = bp\n", + " data_g[cap][qm][:data.shape[0], gg] = data\n", + " ntest_g[cap][qm][..., gg] = normal\n", + "\n", + " hn, cn = np.histogram(data, bins=20)\n", + " print(\"{} gain, Capacitor {}, Module: {}. Number of processed trains per cell: {}.\\n\".format(\n", + " gain_names[gg], cap, qm, data.shape[0]))\n", "\n", "duration = (datetime.now()-start).total_seconds()\n", "logger.runtime_summary_entry(success=True, runtime=duration,\n", @@ -636,6 +645,9 @@ "source": [ "# Loop over capacitor settings, modules, constants\n", "for cap in capacitor_settings:\n", + " if not do_norm_test:\n", + " print('Normality test was not requested. Flag `do_norm_test` False')\n", + " break\n", " for i in modules:\n", " qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n", "\n",