From d410ad36972b1349e960c1f4d8237fbacdc7cead Mon Sep 17 00:00:00 2001
From: Kiana Setoodehnia <setoodeh@Kianas-MBP.fritz.box>
Date: Fri, 3 Apr 2020 15:15:02 +0200
Subject: [PATCH] Dark notebooks is revised for masking cosmic ray events and
 bad pixles.

---
 .../pnCCD/Characterize_pnCCD_Dark_NBC.ipynb   | 452 +++++++++++++++---
 1 file changed, 374 insertions(+), 78 deletions(-)

diff --git a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb
index 8cc5d8762..2f0106a80 100644
--- a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb
+++ b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb
@@ -1,16 +1,27 @@
 {
  "cells": [
+  {
+   "cell_type": "raw",
+   "metadata": {},
+   "source": []
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
     "# pnCCD Dark Characterization\n",
     "\n",
-    "Author: Kiana Setoodehnia, DET Group, Version 2.0\n",
+    "Author: DET Group, modified by Kiana Setoodehnia, Version 2.0\n",
+    "\n",
+    "The following notebook provides dark image analysis of the pnCCD detector. Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. \n",
     "\n",
-    "The following notebook provides dark image analysis of the pnCCD detector.\n",
+    "On the first iteration, the offset and noise maps are generated. Initial bad pixels map is obtained based on the offset and initial noise maps. \n",
     "\n",
-    "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. The noise map is corrected for common mode. The resulting map together with the offset and bad pixels maps are sent to the database and are also saved as .h5 files to a local path for a later use."
+    "On the second iteration, the noise map is corrected for common mode. A second bad pixel map is generated based on the offset map and offset-and-common-mode-corrected noise map. Then, the hole in the center of the CCD is added to the second bad pixel map.\n",
+    "\n",
+    "On the third and final iteration, the pixels that show up on the abovementioned bad pixels map are masked. Possible events due to cosmic rays are found and masked. The data are then again offset and common mode corrected and a new final noise and bad pixels maps are generated.\n",
+    "\n",
+    "These latter resulting maps together with the offset map are saved as .h5 files to a local path for a later use. These dark constants are not automatically sent to the database."
    ]
   },
   {
@@ -24,41 +35,32 @@
    },
    "outputs": [],
    "source": [
-    "cluster_profile = \"noDB\"  # ipcluster profile to use\n",
-    "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw/\"  # input folder, required\n",
-    "out_folder = 'gpfs/exfel/data/scratch/karnem/test/'  # output folder, required\n",
+    "in_folder = \"/gpfs/exfel/exp/SQS/201930/p900075/raw\"  # input folder, required\n",
+    "out_folder = '/gpfs/exfel/exp/SQS/201930/p900075/scratch'  # output folder, required\n",
+    "path_template = 'RAW-R{:04d}-PNCCD01-S{{:05d}}.h5'  # the template to use to access data\n",
+    "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/image/' # path to the data in the HDF5 file\n",
+    "run = 364 # which run to read data from, required\n",
     "sequence = 0  # sequence file to use\n",
-    "run = 745  # which run to read data from, required\n",
-    "\n",
-    "karabo_da = 'PNCCD01' # data aggregators\n",
-    "karabo_id = \"SQS_NQS_PNCCD1MP\" # karabo prefix of PNCCD devices\n",
-    "receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n",
-    "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5'  # the template to use to access data\n",
-    "h5path = '/INSTRUMENT/{}/CAL/{}:output/data/image/' # path in the HDF5 file the data is at\n",
-    "\n",
-    "use_dir_creation_date = True  # use dir creation date as data production reference date\n",
-    "cal_db_interface = \"tcp://max-exfl016:8021\"  # calibration DB interface to use\n",
-    "cal_db_timeout = 300000 # timeout on caldb requests\n",
-    "db_output = False # if True, the notebook sends dark constants to the calibration database\n",
-    "local_output = True # if True, the notebook saves dark constants locally\n",
-    "\n",
-    "\n",
     "number_dark_frames = 0  # number of images to be used, if set to 0 all available images are used\n",
     "chunkSize = 100 # number of images to read per chunk\n",
     "fix_temperature = 233.  # fix temperature in K, set to -1 to use value from slow data\n",
     "gain = 0  # the detector's gain setting, only 0 is currently implemented\n",
     "bias_voltage = 300  # detector's bias voltage\n",
     "integration_time = 70  # detector's integration time\n",
-    "commonModeAxis = 0 # axis along which common mode will be calculated (0: along rows, 1: along columns)\n",
-    "sigmaNoise = 10.  # pixels whose signal value exceeds sigmaNoise*noise value in that pixel will be masked\n",
+    "commonModeAxis = 'row' # axis along which common mode will be calculated (0: along rows, 1: along columns)\n",
+    "commonModeBlockSize = [512, 512] # size of the detector in pixels for common mode calculations\n",
+    "sigmaNoise = 10.  # pixels whose signal value exceeds sigmaNoise*noise will be considered as cosmics and are masked\n",
     "bad_pixel_offset_sigma = 5.  # any pixel whose offset is beyond 5 standard deviations, is a bad pixel\n",
     "bad_pixel_noise_sigma = 5.  # any pixel whose noise is beyond 5 standard deviations, is a bad pixel\n",
+    "cluster_profile = \"noDB\"  # ipcluster profile to use\n",
     "run_parallel = True # for parallel computation \n",
     "cpuCores = 40 # specifies the number of running cpu cores\n",
-    "\n",
+    "cal_db_interface = \"tcp://max-exfl016:8021\"  # calibration DB interface to use\n",
+    "temp_limits = 5  # temperature limits in which calibration parameters are considered equal\n",
+    "db_output = False # if True, the notebook sends dark constants to the calibration database\n",
+    "local_output = True # if True, the notebook saves dark constants locally\n",
     "# for database time derivation:\n",
-    "temp_limits = 5  # temperature limits in which to consider calibration parameters equal\n",
-    "multi_iteration = False  # use multiple iterations"
+    "use_dir_creation_date = True  # To be used to retrieve calibration constants later on"
    ]
   },
   {
@@ -157,12 +159,11 @@
     "x = 1024 # rows of the FastCCD to analyze in FS mode \n",
     "y = 1024 # columns of the FastCCD to analyze in FS mode \n",
     "print(\"pnCCD size is: {}x{} pixels.\".format(x, y))\n",
-    "\n",
+    "    \n",
     "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n",
-    "fp_name = path_template.format(run, karabo_da)\n",
+    "fp_name = path_template.format(run)\n",
     "fp_path = '{}/{}'.format(ped_dir, fp_name)\n",
     "filename = fp_path.format(sequence)\n",
-    "h5path = h5path.format(karabo_id, receiver_id)\n",
     "\n",
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
@@ -175,7 +176,7 @@
     "print(\"Reading data from: {}\".format(filename))\n",
     "print(\"Run number: {}\".format(run))\n",
     "if creation_time:\n",
-    "    print(\"Creation time: {}\".format(creation_time.isoformat()))"
+    "    print(\"Creation time: {}\".format(creation_time))"
    ]
   },
   {
@@ -264,7 +265,7 @@
    "source": [
     "### First Iteration\n",
     "\n",
-    "Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. Firstly, initial offset and noise maps are produced from the raw dark data."
+    "Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. The initial offset and noise maps are produced from the raw dark data."
    ]
   },
   {
@@ -327,7 +328,7 @@
    "source": [
     "### Offset and Noise Maps prior to Common Mode Correction\n",
     "\n",
-    "In the following, the histogram of the pnCCD offset, pnCCD offset map, as well as the initial uncorrected noise map are plotted:"
+    "In the following, the histograms of the pnCCD offset and initial noise, and the heat maps of pnCCD offset, as well as the initial uncorrected noise are plotted."
    ]
   },
   {
@@ -343,17 +344,17 @@
    "outputs": [],
    "source": [
     "#************** OFFSET MAP HISTOGRAM ***********#\n",
-    "ho, co = np.histogram(offsetMap.flatten(), bins=700) # ho = offset histogram; co = offset bin centers\n",
+    "ho, co = np.histogram(offsetMap.flatten(), bins=2000) # ho = offset histogram; co = offset bin centers\n",
     "\n",
     "do = {'x': co[:-1],\n",
     "     'y': ho,\n",
-    "     'y_err': np.sqrt(ho[:]),\n",
+    "     #'y_err': np.sqrt(ho[:]), Markus thinks these errors bars are not correctly calculated!\n",
     "     'drawstyle': 'bars',\n",
     "     'color': 'cornflowerblue',\n",
-    "     'label': 'Raw Signal (ADU)'\n",
+    "     'label': 'Raw Pedestal (ADU)'\n",
     "     }\n",
     "                      \n",
-    "fig = xana.simplePlot(do, figsize='1col', aspect=1, x_label = 'Raw Signal (ADU)', y_label=\"Counts\", \n",
+    "fig = xana.simplePlot(do, figsize='1col', aspect=1, x_label = 'Raw Pedestal (ADU)', y_label=\"Counts\", \n",
     "                      title = 'Offset Histogram', x_range=(0, np.nanmax(offsetMap)), y_log=True)\n",
     "\n",
     "# Calculating mean, median and standard deviation for the above histogram:\n",
@@ -364,7 +365,7 @@
     "\n",
     "# Table of statistics on raw signal:\n",
     "t0 = PrettyTable()\n",
-    "t0.title = \"Statistics on Raw Signal (Offset)\"\n",
+    "t0.title = \"Statistics on Raw Pedestal (Offset)\"\n",
     "t0.field_names = [\"Mean\", \"Standard Deviation\"]\n",
     "t0.add_row([\"{:0.3f} (ADU)\".format(mean), \"{:0.3f} (ADU)\".format(std)])\n",
     "print(t0,'\\n')\n",
@@ -374,25 +375,25 @@
     "\n",
     "dn = {'x': cn[:-1],\n",
     "     'y': hn,\n",
-    "     'y_err': np.sqrt(hn[:]),\n",
+    "     #'y_err': np.sqrt(hn[:]),\n",
     "     'drawstyle': 'bars',\n",
     "     'color': 'cornflowerblue',\n",
     "     'label': 'Noise (ADU)'\n",
     "     }\n",
     "\n",
-    "fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label = 'Noise (ADU)', y_label=\"Counts\", \n",
+    "fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label = 'Raw Noise (ADU)', y_label=\"Counts\", \n",
     "                      title = 'Noise Histogram', y_log=True, x_range=(0, 600))\n",
     "\n",
     "\n",
     "#************** HEAT MAPS *******************#\n",
     "fig = xana.heatmapPlot(offsetMap[:,:,0], x_label='Column Number', y_label='Row Number',  aspect=1,\n",
     "                       x_range=(0, y), y_range=(0, x), vmin=6000, vmax=14500, lut_label='Offset (ADU)', \n",
-    "                       panel_x_label='Columns Stat (ADU)', panel_y_label='Rows Stat (ADU)', title = 'OffsetMap')\n",
+    "                       panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)', title = 'Offset Map')\n",
     "\n",
     "fig = xana.heatmapPlot(noiseMap[:,:,0], x_label='Column Number', y_label='Row Number', aspect=1,\n",
-    "                       lut_label='Uncorrected Noise (ADU)', x_range=(0, y),\n",
-    "                       y_range=(0, x), vmax=2*np.mean(noiseMap), panel_x_label='Columns Stat (ADU)', \n",
-    "                       panel_y_label='Rows Stat (ADU)', title = 'Uncorrected NoiseMap')"
+    "                       lut_label='Noise (ADU)', x_range=(0, y),\n",
+    "                       y_range=(0, x), vmax=2*np.mean(noiseMap), panel_x_label='Row Stat (ADU)', \n",
+    "                       panel_y_label='Column Stat (ADU)', title = 'Uncorrected NoiseMap')"
    ]
   },
   {
@@ -401,7 +402,7 @@
    "source": [
     "### Initial BadPixelMap\n",
     "\n",
-    "This is generated based on the offset and uncorrected noise maps:"
+    "This is generated based on the offset and uncorrected noise maps."
    ]
   },
   {
@@ -425,7 +426,9 @@
     "                       x_label='Columns', y_label='Rows',\n",
     "                       lut_label='Bad Pixel Value (ADU)',\n",
     "                       x_range=(0, y),\n",
-    "                       y_range=(0, x), vmin=0, vmax=32)"
+    "                       y_range=(0, x), vmin=0, vmax=32,\n",
+    "                       panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)', \n",
+    "                       title = 'Initial Bad Pixels Map')"
    ]
   },
   {
@@ -434,10 +437,17 @@
    "metadata": {},
    "outputs": [],
    "source": [
+    "# Offset Correction:\n",
+    "\n",
+    "offsetCorrection = xcal.OffsetCorrection(sensorSize, offsetMap, nCells = memoryCells, cores=cpuCores, gains=None,\n",
+    "                                         runParallel=run_parallel, blockSize=blockSize)\n",
+    "\n",
+    "\n",
     "# Common Mode Correction:\n",
     "# In this method, the median of all pixels that are read out at the same time along a row is subtracted from the\n",
     "# signal in each pixel:\n",
-    "cmCorrection = xcal.CommonModeCorrection(sensorSize, [data.shape[0]//2, data.shape[1]//2],\n",
+    "cmCorrection = xcal.CommonModeCorrection([x, y],\n",
+    "                                         commonModeBlockSize,\n",
     "                                         commonModeAxis, parallel=run_parallel, dType=np.float32, stride=1,\n",
     "                                         noiseMap=noiseMap.astype(np.float32), minFrac=0)\n",
     "\n",
@@ -451,16 +461,15 @@
    "outputs": [],
    "source": [
     "# Histogram Calculators:\n",
+    "# The number of bins and bin ranges are based on the offset histogram. We set 1 ADU per bin and we consider some\n",
+    "# negative bin to ensure the offset and common mode corrections actually move the signal to zero:\n",
     "\n",
     "# For offset corrected data:\n",
-    "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-200, 200], memoryCells=memoryCells,\n",
+    "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=1100, range=[-50, 1050], memoryCells=memoryCells,\n",
     "                                            cores=cpuCores, gains=None, blockSize=blockSize)\n",
     "# For common mode corrected data:\n",
-    "histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-200, 200], memoryCells=memoryCells,\n",
-    "                                              cores=cpuCores, gains=None, blockSize=blockSize)\n",
-    "\n",
-    "noiseCalCM = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, blockSize=blockSize,\n",
-    "                                  runParallel=run_parallel)"
+    "histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=1100, range=[-50, 1050], memoryCells=memoryCells,\n",
+    "                                              cores=cpuCores, gains=None, blockSize=blockSize)"
    ]
   },
   {
@@ -469,7 +478,7 @@
    "source": [
     "### Second Iteration\n",
     "\n",
-    "During the second iteration, the data are offset and common mode corrected to produce a common mode corrected noise map. The offset and common mode corrections are calculated by subtracting out the median of all pixels that are read out at the same time along a row."
+    "During the second iteration, the data are offset and common mode corrected to produce an offset-and-common-mode-corrected noise map. The common mode correction is calculated by subtracting out the median of all pixels that are read out at the same time along a row."
    ]
   },
   {
@@ -505,7 +514,7 @@
     "    cellTable=np.zeros(data.shape[2], np.int32) # Common mode correction\n",
     "    data = cmCorrection.correct(data.astype(np.float32), cellTable=cellTable) \n",
     "    histCalCMCorrected.fill(data)\n",
-    "    noiseCalCM.fill(data)  # Filling calculators with data\n",
+    "    noiseCal.fill(data)  # Filling calculators with data\n",
     "    chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk\n",
     "\n",
     "print('A total number of {} images are processed.'.format(images))\n",
@@ -518,7 +527,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "noiseMapCM = noiseCalCM.get()  # Producing common mode corrected noise map\n",
+    "noiseMapCM = noiseCal.get() # Producing common mode corrected noise map\n",
     "ho, eo, co, so = histCalCorrected.get()\n",
     "hCM, eCM, cCM, sCM = histCalCMCorrected.get()"
    ]
@@ -542,7 +551,7 @@
    "source": [
     "### Signal after Offset and Common Mode Corrections\n",
     "\n",
-    "Here, the offset corrected signal is compared to the common-mode corrected signal (in the form of binned histograms):  "
+    "Here, the offset corrected signal is compared to the common-mode corrected signal (in the form of binned histograms)."
    ]
   },
   {
@@ -553,27 +562,29 @@
    "source": [
     "do = [{'x': co,\n",
     "     'y': ho,\n",
-    "     'y_err': np.sqrt(ho[:]),\n",
-    "     'drawstyle': 'steps-mid',\n",
+    "     #'y_err': np.sqrt(ho[:]),\n",
+    "     #'drawstyle': 'steps-mid',\n",
+    "     'drawstyle': 'steps-post',\n",
     "     'color': 'cornflowerblue',\n",
     "     'label': 'Offset Corrected Signal'\n",
     "     },\n",
     "     {'x': cCM,\n",
     "     'y': hCM,\n",
-    "     'y_err': np.sqrt(hCM[:]),\n",
-    "     'drawstyle': 'steps-mid',\n",
+    "     #'y_err': np.sqrt(hCM[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
     "     'color': 'red',\n",
     "     'label': 'Common Mode Corrected Signal'\n",
     "     }]\n",
     "      \n",
-    "fig = xana.simplePlot(do, figsize='2col', aspect=1, x_label = 'Corrected Signal (ADU)', y_label=\"Counts\", \n",
-    "                      x_range = (-200, 200), legend='top-right-frame-1col', \n",
-    "                      title = 'Corrected Signal - 2nd Iteration') \n",
+    "fig = xana.simplePlot(do, figsize='2col', aspect=1, x_label = 'Corrected Pedestal (ADU)', y_label=\"Counts\", \n",
+    "                      x_range = (-50, 1050), y_log=True, legend='top-right-frame-1col', \n",
+    "                      title = 'Corrected Pedestal - 2nd Iteration') \n",
     "\n",
     "t0 = PrettyTable()\n",
     "t0.title = \"Comparison of the First Round of Corrections - Bad Pixels Not Excluded\"\n",
     "t0.field_names = [\"After Offset Correction\", \"After Common Mode Correction\"]\n",
-    "t0.add_row([\"Mean: {:0.3f} (ADU)\".format(np.mean(offset_corr_data)), \"Mean: {:0.3f} (ADU)\".format(np.mean(data))])\n",
+    "t0.add_row([\"Mean: {:0.3f} (ADU)\".format(np.mean(offset_corr_data)), \"Mean: {:0.3f} (ADU)\"\n",
+    "            .format(np.mean(data))])\n",
     "t0.add_row([\"Median: {:0.3f} (ADU)\".format(np.median(offset_corr_data)), \"Median: {:0.3f} (ADU)\"\n",
     "            .format(np.median(data))])\n",
     "t0.add_row([\"Standard Deviation: {:0.3f} (ADU)\".format(np.std(offset_corr_data)), \n",
@@ -588,7 +599,7 @@
    "source": [
     "### Noise Map after Common Mode Correction\n",
     "\n",
-    "In the following, the effect of common mode correction on the noise is shown. Finally common mode corrected noise map (noiseMapCM) is displayed and compared to the initial uncorrected noise map:"
+    "In the following, the effect of common mode correction on the noise is shown. Finally common mode corrected noise map (noiseMapCM) is displayed and compared to the initial uncorrected noise map."
    ]
   },
   {
@@ -604,17 +615,17 @@
     "dn = [{'x': cn[:-1],\n",
     "       'y': hn,\n",
     "       # 'y_err': np.sqrt(hn[:]),\n",
-    "       'drawstyle': 'steps-mid',  # 'bars',\n",
+    "       'drawstyle': 'steps-post',  # 'bars' or 'steps-mid',\n",
     "       'color': 'blue',  # 'cornflowerblue',\n",
-    "       'label': 'Uncorrected Noise (ADU)'\n",
+    "       'label': 'Uncorrected'\n",
     "       },\n",
     "      {'x': cn_CM[:-1],\n",
     "       'y': hn_CM,\n",
     "       # 'y_err': np.sqrt(hn_CM[:]),\n",
-    "       'drawstyle': 'steps-mid',  # 'bars',\n",
+    "       'drawstyle': 'steps-post',  # 'bars',\n",
     "       'color': 'crimson',  # 'red',#'cornflowerblue',\n",
     "       # 'ecolor': 'crimson',\n",
-    "       'label': 'Common Mode Corrected Noise (ADU)'\n",
+    "       'label': 'Common Mode Corrected'\n",
     "       }]\n",
     "fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label='Noise (ADU)', y_label=\"Counts\",\n",
     "                      x_range=(0, 600), y_log=True, legend='top-center-frame-1col',\n",
@@ -625,7 +636,8 @@
     "                       x_label='Columns', y_label='Rows',\n",
     "                       lut_label='Common Mode Corrected Noise (ADU)',\n",
     "                       x_range=(0, y),\n",
-    "                       y_range=(0, x), vmax=2*np.mean(noiseMapCM), title='Common Mode Corrected Noise Map')"
+    "                       y_range=(0, x), vmax=2*np.mean(noiseMapCM), panel_x_label='Row Stat (ADU)', \n",
+    "                       panel_y_label='Column Stat (ADU)', title='Common Mode Corrected Noise Map')"
    ]
   },
   {
@@ -635,7 +647,7 @@
    "outputs": [],
    "source": [
     "# Resetting the calculators:\n",
-    "noiseCalCM.reset() # resetting noise calculator\n",
+    "noiseCal.reset() # resetting noise calculator\n",
     "histCalCorrected.reset() # resetting histogram calculator\n",
     "histCalCMCorrected.reset() # resetting histogram calculator\n",
     "cmCorrection.reset()"
@@ -645,9 +657,9 @@
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Final BadPixelMap\n",
+    "### Second BadPixelMap\n",
     "\n",
-    "This is generated based on the offset and common mode corrected noise maps:"
+    "This is generated based on the offset map and offset-and-common-mode-corrected noise map."
    ]
   },
   {
@@ -656,7 +668,6 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "bad_pixels = np.zeros(offsetMap.shape, np.uint32)\n",
     "mnoffset = np.nanmedian(offsetMap)\n",
     "stdoffset = np.nanstd(offsetMap)\n",
     "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |\n",
@@ -671,14 +682,292 @@
     "                       x_label='Columns', y_label='Rows',\n",
     "                       lut_label='Bad Pixel Value (ADU)',\n",
     "                       x_range=(0, y),\n",
-    "                       y_range=(0, x), vmin=0, vmax=32)"
+    "                       y_range=(0, x), vmin=0, vmax=32,\n",
+    "                       panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)', \n",
+    "                       title = 'Second Bad Pixels Map')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Here, we are masking the pixels in the hole (center of the pnCCD)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "hole_mask = np.zeros(bad_pixels.shape, np.uint32) \n",
+    "#hole_mask[485:539,481:543,:] = BadPixels.NON_SENSITIVE.value # This is from Markus estimate\n",
+    "hole_mask[483:539,477:543,:] = BadPixels.NON_SENSITIVE.value\n",
+    "\n",
+    "# Assigning this masked area as bad pixels:\n",
+    "bad_pixels = np.bitwise_or(bad_pixels, hole_mask)\n",
+    "\n",
+    "fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),\n",
+    "                       x_label='Columns', y_label='Rows',\n",
+    "                       lut_label='Bad Pixel Value (ADU)',\n",
+    "                       x_range=(0, y),\n",
+    "                       y_range=(0, x), vmin=0, vmax=32, panel_x_label='Row Stat (ADU)', \n",
+    "                       panel_y_label='Column Stat (ADU)', title = 'Second Bad Pixels Map with the Hole in Center')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Third Iteration\n",
+    "\n",
+    "During the third iteration, the last bad pixel map is applied to the data. Bad pixels are masked. Possible cosmic ray events are also masked. Offset and common mode corrections are applied once again to the data, which now have bad pixdels excluded, to produce a newly corrected noise map."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "event_threshold = sigmaNoise*np.median(noiseMapCM) # for exclusion of possible cosmic ray events\n",
+    "noiseCal.setBadPixelMask(bad_pixels != 0) # setting bad pixels map for the noise calculator"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "counter1 = 1 # To count how many \"if data.shape[2] >= chunkSize\" instances are there.\n",
+    "counter2 = 0 # To count how many \"if data.shape[2] < chunkSize\" instances are there.\n",
+    "chunkSize_new = 0 # See below\n",
+    "\n",
+    "for data in reader.readChunks():\n",
+    "    data = data.astype(np.float32)\n",
+    "    dx = np.count_nonzero(data, axis=(0, 1))\n",
+    "    data = data[:,:,dx != 0]\n",
+    "    data_mask = np.repeat(bad_pixels, data.shape[2], axis=2) # Converting bad_pixels to the same shape as the data\n",
+    "    data[data_mask != 0] = np.nan # masking data for bad pixels and equating the values to np.nan\n",
+    "    \n",
+    "    # Some sequences may have less than 500 frames in them. To find out how many images there are, we will \n",
+    "    # temporarily change chunkSize to be the same as whatever number of frames the last chunk of data has:\n",
+    "    if data.shape[2] < chunkSize:\n",
+    "        chunkSize_new = data.shape[2]\n",
+    "        print(\"Number of images are less than chunkSize. chunkSize is temporarily changed to {}.\"\n",
+    "              .format(chunkSize_new))\n",
+    "        images = images + chunkSize_new\n",
+    "        counter2 += 1 \n",
+    "    else:\n",
+    "        images = counter1*chunkSize + counter2*chunkSize_new\n",
+    "        counter1 += 1\n",
+    "    \n",
+    "    data_copy = offsetCorrection.correct(copy.copy(data))\n",
+    "    cellTable=np.zeros(data_copy.shape[2], np.int32)\n",
+    "    data_copy = cmCorrection.correct(data_copy.astype(np.float32), cellTable=cellTable)\n",
+    "    data[data_copy > event_threshold] = np.nan # discarding events caused by cosmic rays\n",
+    "    data = np.ma.MaskedArray(data, np.isnan(data), fill_value=0) # masking cosmics, default fill_value = 1e+20 \n",
+    "\n",
+    "    data -= offsetMap.data # Offset correction\n",
+    "    offset_corr_data2 = copy.copy(data) # I am copying this so that I can have access to it in the table below\n",
+    "    histCalCorrected.fill(data)\n",
+    "    cellTable=np.zeros(data.shape[2], np.int32) # Common mode correction\n",
+    "    data = cmCorrection.correct(data.astype(np.float32), cellTable=cellTable) \n",
+    "    histCalCMCorrected.fill(data)\n",
+    "    noiseCal.fill(data)  # Filling calculators with data\n",
+    "    chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk\n",
+    "\n",
+    "print(\"Final iteration is Performed.\")\n",
+    "print('A total number of {} images are processed.'.format(images))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "noiseMapCM_2nd = noiseCal.get().filled(0) # the masked pixels are filled with 0\n",
+    "ho2, eo2, co2, so2 = histCalCorrected.get()\n",
+    "hCM2, eCM2, cCM2, sCM2 = histCalCMCorrected.get()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Plots of the Final Results\n",
+    "\n",
+    "The following plot and table compare the offset and common mode corrected signal with and without the bad pixels."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "do_Final = [{'x': co_second_trial,\n",
+    "     'y': ho_second_trial,\n",
+    "     #'y_err': np.sqrt(ho_second_trial[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
+    "     'color': 'blue',#'cornflowerblue',\n",
+    "     #'errorstyle': 'bars',\n",
+    "     'label': 'Offset Correction, Bad Pixels Included - 2nd Trial'\n",
+    "     },\n",
+    "    {'x': cCM_second_trial,\n",
+    "     'y': hCM_second_trial,\n",
+    "     #'y_err': np.sqrt(hCM_second_trial[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
+    "     'color': 'red',\n",
+    "     #'errorstyle': 'bars',\n",
+    "     'ecolor': 'crimson',\n",
+    "     'label': 'Common Mode Correction, Bad Pixels Included - 2nd Trial' \n",
+    "     },\n",
+    "    {'x': co2,\n",
+    "     'y': ho2,\n",
+    "     #'y_err': np.sqrt(ho2[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
+    "     'color': 'black', #'cornflowerblue',\n",
+    "     #'errorstyle': 'bars',\n",
+    "     'label': 'Offset Correction, Bad Pixels Excluded - 3rd Trial'\n",
+    "     },\n",
+    "    {'x': cCM2,\n",
+    "     'y': hCM2,\n",
+    "     #'y_err': np.sqrt(hCM2[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
+    "     'color': 'orange', #'cornflowerblue',\n",
+    "     #'errorstyle': 'bars',\n",
+    "     'label': 'Common Mode Correction, Bad Pixels Excluded - 3rd Trial'\n",
+    "     }]\n",
+    "\n",
+    "fig = xana.simplePlot(do_Final, figsize='2col', aspect=1, x_label = 'Corrected Signal (ADU)', \n",
+    "                      y_label=\"Counts (Logarithmic Scale)\", y_log=True, x_range = (0, 1100),\n",
+    "                      legend='top-right-frame-1col', title = 'Comparison of Corrections')\n",
+    "\n",
+    "t0 = PrettyTable()\n",
+    "t0.title = \"Comparison of the Second Round of Corrections - Bad Pixels Excluded\"\n",
+    "t0.field_names = [\"After Offset Correction\", \"After Common Mode Correction\"]\n",
+    "t0.add_row([\"Mean: {:0.3f} (ADU)\".format(np.nanmean(offset_corr_data2)), \"Mean: {:0.3f} (ADU)\"\n",
+    "            .format(np.nanmean(data))])\n",
+    "t0.add_row([\"Median: {:0.3f} (ADU)\".format(np.nanmedian(offset_corr_data2)), \"Median: {:0.3f} (ADU)\"\n",
+    "            .format(np.nanmedian(data))])\n",
+    "t0.add_row([\"Standard Deviation: {:0.3f} (ADU)\".format(np.nanstd(offset_corr_data2)), \n",
+    "            \"Standard Deviation: {:0.3f} (ADU)\".format(np.nanstd(data))])\n",
+    "print(t0,'\\n')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Final NoiseMap\n",
+    "\n",
+    "Finally offset and common mode corrected noise map with bad pixels and possible cosmic events excluded (noiseMapCM_2nd) is displayed."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#*****NOISE MAP HISTOGRAM FROM THE COMMON MODE CORRECTED DATA*******#\n",
+    "hn_CM2, cn_CM2 = np.histogram(noiseMapCM_2nd.flatten(), bins=2000, range=(0, 600))\n",
+    "\n",
+    "dn2 = [{'x': cn[:-1],\n",
+    "     'y': hn,\n",
+    "     #'y_err': np.sqrt(hn[:]),\n",
+    "     'drawstyle': 'steps-post',#'bars',\n",
+    "     'color': 'blue', #'cornflowerblue',\n",
+    "     'label': 'Uncorrected'\n",
+    "     },\n",
+    "    {'x': cn_CM[:-1],\n",
+    "     'y': hn_CM,\n",
+    "     #'y_err': np.sqrt(hn_CM[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
+    "     'color': 'red',\n",
+    "     #'ecolor': 'crimson',\n",
+    "     'label': 'Common Mode Corrected prior to Bad Pixels Exclusion'\n",
+    "     },\n",
+    "    {'x': cn_CM2[:-1],\n",
+    "     'y': hn_CM2,\n",
+    "     #'y_err': np.sqrt(hn_CM2[:]),\n",
+    "     'drawstyle': 'steps-post',\n",
+    "     'color': 'black', #'cornflowerblue',\n",
+    "     'label': 'Common Mode Corrected after Bad Pixels Exclusion'\n",
+    "     }]\n",
+    "\n",
+    "fig = xana.simplePlot(dn2, figsize='1col', aspect = 1, x_label = 'Noise (ADU)', y_label=\"Counts\", y_log=True, \n",
+    "                      legend='top-right-frame-1col', \n",
+    "                      title = 'Final Noise Comparison')\n",
+    "\n",
+    "fig = xana.heatmapPlot(noiseMapCM_2nd[:,:,0], aspect=1, x_label='Column Number', y_label='Row Number',\n",
+    "                       lut_label='Noise (ADU)', x_range=(0, y), y_range=(0, x),\n",
+    "                       title = 'Final Common Mode Corrected Noise\\n (Bad Pixels Excluded)', \n",
+    "                       panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Final Bad Pixel Map\n",
+    "\n",
+    "Lastly, the final bad pixel map is generated based on the OffsetMap and the noiseMapCM_2nd (offset and common mode corrected noise after exclusion of the bad pixels)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "mnoffset = np.nanmedian(offsetMap)\n",
+    "stdoffset = np.nanstd(offsetMap)\n",
+    "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |\n",
+    "           (offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
+    "\n",
+    "mnnoise = np.nanmedian(noiseMapCM_2nd)\n",
+    "stdnoise = np.nanstd(noiseMapCM_2nd)\n",
+    "bad_pixels[(noiseMapCM_2nd < mnnoise-bad_pixel_noise_sigma*stdnoise) | \n",
+    "           (noiseMapCM_2nd > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "\n",
+    "bad_pixels = np.bitwise_or(bad_pixels, hole_mask)\n",
+    "\n",
+    "fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),\n",
+    "                       x_label='Columns', y_label='Rows',\n",
+    "                       lut_label='Bad Pixel Value (ADU)',\n",
+    "                       x_range=(0, y),\n",
+    "                       y_range=(0, x), vmin=0, vmax=32, panel_x_label='Row Stat (ADU)', \n",
+    "                       panel_y_label='Column Stat (ADU)', title = 'Final Bad Pixels Map')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "display(Markdown('### Statistics on the Bad Pixels'))\n",
+    "num_bad_pixels = np.count_nonzero(bad_pixels)\n",
+    "num_all_pixels = x*y\n",
+    "percentage_bad_pixels = num_bad_pixels*100/num_all_pixels\n",
+    "print(\"Number of bad pixels: {:0.0f}, i.e. {:0.2f}% of all pixels\".format(num_bad_pixels, percentage_bad_pixels))"
    ]
   },
   {
    "cell_type": "markdown",
    "metadata": {},
    "source": [
-    "### Calibration Constants"
+    "### Calibration Constants\n",
+    "\n",
+    "Here, we send the dark constants to the database and/or save them locally."
    ]
   },
   {
@@ -694,7 +983,7 @@
    "source": [
     "constant_maps = {\n",
     "    'Offset': offsetMap,\n",
-    "    'Noise': noiseMapCM,\n",
+    "    'Noise': noiseMapCM_2nd,\n",
     "    'BadPixelsDark': bad_pixels\n",
     "}\n",
     "\n",
@@ -728,7 +1017,7 @@
     "\n",
     "    if db_output:\n",
     "        try:\n",
-    "            metadata.send(cal_db_interface, timeout=cal_db_timeout)\n",
+    "            metadata.send(cal_db_interface)\n",
     "            print(\"Inject {} constants from {}\".format(const_name,\n",
     "                                                       metadata.calibration_constant_version.begin_at))\n",
     "        except Exception as e:\n",
@@ -738,6 +1027,13 @@
     "        save_const_to_h5(metadata, out_folder)\n",
     "        print(\"Calibration constant {} is stored to {}.\".format(const, out_folder))"
    ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
   }
  ],
  "metadata": {
-- 
GitLab