diff --git a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb index 9a9d68b109caf95f260f375c7da57b96d9e51035..77af078b9ae981a516f3dc3d6f4b64a15f057e22 100644 --- a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb +++ b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb @@ -6,11 +6,11 @@ "source": [ "# pnCCD Dark Characterization\n", "\n", - "Author: DET Group, Version 1.0\n", + "Author: Kiana Setoodehnia, DET Group, Version 2.0\n", "\n", "The following notebook provides dark image analysis of the pnCCD detector.\n", "\n", - "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. Resulting maps are saved as .h5 files for a latter use." + "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. The noise map is corrected for common mode. The resulting map together with the offset and bad pixels maps are sent to the database and are also saved as .h5 files to a local path for a later use." ] }, { @@ -20,48 +20,35 @@ "ExecuteTime": { "end_time": "2018-12-06T10:54:38.999974Z", "start_time": "2018-12-06T10:54:38.983406Z" - }, - "collapsed": true + } }, "outputs": [], "source": [ - "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw/\" # input folder, required\n", - "out_folder = 'gpfs/exfel/data/scratch/haufs/test/' # output folder, required\n", + "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw\" # input folder, required\n", + "out_folder = '/gpfs/exfel/exp/SQS/201921/p002430/scratch' # output folder, required\n", "path_template = 'RAW-R{:04d}-PNCCD01-S{{:05d}}.h5' # the template to use to access data\n", + "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/image/' # path to the data in the HDF5 file\n", "run = 745 # which run to read data from, required\n", + "sequence = 0 # sequence file to use\n", "number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used\n", + "chunkSize = 100 # number of images to read per chunk\n", + "fix_temperature = 233. # fix temperature in K, set to -1 to use value from slow data\n", + "gain = 0 # the detector's gain setting, only 0 is currently implemented\n", + "bias_voltage = 300 # detector's bias voltage\n", + "integration_time = 70 # detector's integration time\n", + "commonModeAxis = 0 # axis along which common mode will be calculated (0: along rows, 1: along columns)\n", + "sigmaNoise = 10. # pixels whose signal value exceeds sigmaNoise*noise value in that pixel will be masked\n", + "bad_pixel_offset_sigma = 5. # any pixel whose offset is beyond 5 standard deviations, is a bad pixel\n", + "bad_pixel_noise_sigma = 5. # any pixel whose noise is beyond 5 standard deviations, is a bad pixel\n", "cluster_profile = \"noDB\" # ipcluster profile to use\n", - "sigma_noise = 10. # Pixel exceeding 'sigmaNoise' * noise value in that pixel will be masked\n", - "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/image/' # path in the HDF5 file the data is at\n", + "run_parallel = True # for parallel computation \n", + "cpuCores = 40 # specifies the number of running cpu cores\n", "cal_db_interface = \"tcp://max-exfl016:8021\" # calibration DB interface to use\n", - "temp_limits = 5 # temperature limits in which to consider calibration parameters equal\n", - "sequence = 0 # sequence file to use\n", - "multi_iteration = False # use multiple iterations\n", - "use_dir_creation_date = True # use dir creation date as data production reference date\n", - "bad_pixel_offset_sigma = 5. # deviation in standard deviations from mean offset to consider pixel bad\n", - "bad_pixel_noise_sigma = 5. # deviation in standard deviations from mean noise to consider pixel bad\n", - "fix_temperature = 233. # fix temperature to this value in K, set to -1 to use value from slow data\n", - "gain = 0 # the detector gain setting, only 0 is currently implemented\n", - "bias_voltage = 300 # detector bias voltage\n", - "integration_time = 70 # detector integration time\n", - "db_output = False # Output constants to the calibration database\n", - "local_output = True # output constants locally" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "ExecuteTime": { - "end_time": "2018-12-06T10:54:39.190907Z", - "start_time": "2018-12-06T10:54:39.186154Z" - }, - "collapsed": true - }, - "outputs": [], - "source": [ - "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", - "from iCalibrationDB.detectors import DetectorTypes" + "temp_limits = 5 # temperature limits in which calibration parameters are considered equal\n", + "db_output = False # if True, the notebook sends dark constants to the calibration database\n", + "local_output = True # if True, the notebook saves dark constants locally\n", + "# for database time derivation:\n", + "use_dir_creation_date = True # To be used to retrieve calibration constants later on" ] }, { @@ -75,16 +62,24 @@ }, "outputs": [], "source": [ - "import XFELDetAna.xfelprofiler as xprof\n", + "import os\n", + "import copy\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "%matplotlib inline\n", + "from prettytable import PrettyTable\n", + "from IPython.display import display, Markdown\n", + "\n", + "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", + "from iCalibrationDB.detectors import DetectorTypes\n", + "import XFELDetAna.xfelprofiler as xprof\n", "profiler = xprof.Profiler()\n", "profiler.disable()\n", "from XFELDetAna.util import env\n", "env.iprofile = cluster_profile\n", - "\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", "from XFELDetAna import xfelpycaltools as xcal\n", "from XFELDetAna import xfelpyanatools as xana\n", "from XFELDetAna.plotting.util import prettyPlotting\n", @@ -92,27 +87,36 @@ "from XFELDetAna.xfelreaders import ChunkReader\n", "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n", "from cal_tools.tools import get_dir_creation_date, save_const_to_h5\n", - "\n", - "import numpy as np\n", - "import h5py\n", - "import matplotlib.pyplot as plt\n", - "from iminuit import Minuit\n", - "\n", - "import time\n", - "import copy\n", - "\n", - "from prettytable import PrettyTable\n", - "\n", - "%matplotlib inline\n", - "\n", + "from cal_tools.enums import BadPixels" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Output Folder Creation:\n", + "if not os.path.exists(out_folder):\n", + " os.makedirs(out_folder)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:39.467334Z", + "start_time": "2018-12-06T10:54:39.427784Z" + } + }, + "outputs": [], + "source": [ "def nImagesOrLimit(nImages, limit):\n", " if limit == 0:\n", " return nImages\n", " else:\n", - " return min(nImages, limit)\n", - " \n", - "sigmaNoise = sigma_noise\n", - "temperature_k = fix_temperature" + " return min(nImages, limit)" ] }, { @@ -122,7 +126,8 @@ "outputs": [], "source": [ "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n", - "file_loc = 'proposal:{} runs:{}'.format(proposal, run)" + "file_loc = 'Proposal: {}, Run: {}'.format(proposal, run)\n", + "print(file_loc)" ] }, { @@ -136,26 +141,29 @@ }, "outputs": [], "source": [ + "# Calibration Database Settings, and Some Initial Run Parameters & Paths:\n", "\n", + "display(Markdown('### Initial Settings'))\n", "x = 1024 # rows of the FastCCD to analyze in FS mode \n", "y = 1024 # columns of the FastCCD to analyze in FS mode \n", - "\n", + "print(\"pnCCD size is: {}x{} pixels.\".format(x, y))\n", " \n", "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n", "fp_name = path_template.format(run)\n", + "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", + "filename = fp_path.format(sequence)\n", "\n", - "import datetime\n", "creation_time = None\n", "if use_dir_creation_date:\n", " creation_time = get_dir_creation_date(in_folder, run)\n", - "\n", - "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", - "\n", - "print(\"Reading data from: {}\\n\".format(fp_path))\n", - "print(\"Run is: {}\".format(run))\n", - "print(\"HDF5 path: {}\".format(h5path))\n", + " \n", + "print('Calibration database interface: {}'.format(cal_db_interface))\n", + "print(\"Sending constants to the calibration database: {}\".format(db_output))\n", + "print(\"HDF5 path to data: {}\".format(h5path))\n", + "print(\"Reading data from: {}\".format(filename))\n", + "print(\"Run number: {}\".format(run))\n", "if creation_time:\n", - " print(\"Using {} as creation time\".format(creation_time.isoformat()))" + " print(\"Creation time: {}\".format(creation_time.isoformat()))" ] }, { @@ -169,22 +177,37 @@ }, "outputs": [], "source": [ - "filename = fp_path.format(sequence)\n", + "# Reading Parameters such as Detector Bias, Gain, etc. from the Data:\n", + "\n", + "memoryCells = 1 # pnCCD has 1 memory cell\n", "sensorSize = [x, y]\n", - "chunkSize = 100 #Number of images to read per chunk\n", - "#Sensor area will be analysed according to blocksize\n", - "blockSize = [sensorSize[0]//2, sensorSize[1]//4] \n", + "blockSize = [sensorSize[0]//2, sensorSize[1]//2]# sensor area will be analysed according to blocksize\n", "xcal.defaultBlockSize = blockSize\n", - "cpuCores = 8 #Specifies the number of running cpu cores\n", - "memoryCells = 1 #FastCCD has 1 memory cell\n", - "#Specifies total number of images to proceed\n", - "nImages = fastccdreaderh5.getDataSize(filename, h5path)[0] \n", + "nImages = fastccdreaderh5.getDataSize(filename, h5path)[0] # specifies total number of images to proceed\n", "nImages = nImagesOrLimit(nImages, number_dark_frames)\n", - "print(\"\\nNumber of dark images to analyze: \",nImages)\n", - "commonModeBlockSize = blockSize\n", - "commonModeAxisR = 'row'#Axis along which common mode will be calculated\n", - "run_parallel = True\n", - "profile = False\n" + "profile = False" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Printing the Parameters Read from the Data File:\n", + "\n", + "display(Markdown('### Detector Parameters'))\n", + "print(\"Bias voltage is {} V.\".format(bias_voltage))\n", + "print(\"Detector gain is set to {}.\".format(gain))\n", + "print(\"Detector integration time is set to {} ms\".format(integration_time)) \n", + " \n", + "if fix_temperature != 0.:\n", + " print(\"Using a fixed temperature of {} K\".format(fix_temperature))\n", + " temperature_k = fix_temperature\n", + "else:\n", + " print(\"Temperature is not fixed.\")\n", + " \n", + "print(\"Number of dark images to analyze:\", nImages) " ] }, { @@ -194,16 +217,15 @@ "ExecuteTime": { "end_time": "2018-12-06T10:54:41.584031Z", "start_time": "2018-12-06T10:54:41.578462Z" - }, - "collapsed": true + } }, "outputs": [], "source": [ - "reader = ChunkReader(filename, fastccdreaderh5.readData, \n", - " nImages, chunkSize, \n", - " path = h5path, \n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],)" + "# Reading Files in Chunks:\n", + "\n", + "# Chunk reader returns an iterator to access the data in the file within the ranges:\n", + "reader = ChunkReader(filename, fastccdreaderh5.readData, nImages, chunkSize, path = h5path, \n", + " pixels_x = sensorSize[0], pixels_y = sensorSize[1],)" ] }, { @@ -213,33 +235,24 @@ "ExecuteTime": { "end_time": "2018-12-06T10:54:41.899511Z", "start_time": "2018-12-06T10:54:41.864816Z" - }, - "collapsed": true + } }, "outputs": [], "source": [ - "noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, \n", - " cores=cpuCores, blockSize=blockSize,\n", - " runParallel=run_parallel)\n", - "histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000, \n", - " range=[0, 10000], parallel=False, \n", - " memoryCells=memoryCells, \n", - " cores=cpuCores, blockSize=blockSize)\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### First Iteration" + "# Calculators:\n", + "\n", + "# noiseCal is a noise map calculator, which internally also produces a per-pixel mean map, i.e. an offset map:\n", + "noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, blockSize=blockSize,\n", + " runParallel=run_parallel)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. Firstly, initial offset and noise maps are produced from raw dark data." + "### First Iteration\n", + "\n", + "Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. Firstly, initial offset and noise maps are produced from the raw dark data." ] }, { @@ -253,18 +266,56 @@ }, "outputs": [], "source": [ + "counter1 = 1 # To count how many \"if data.shape[2] >= chunkSize\" instances are there.\n", + "counter2 = 0 # To count how many \"if data.shape[2] < chunkSize\" instances are there.\n", + "chunkSize_new = 0 # See below\n", + "\n", "for data in reader.readChunks():\n", " data = data.astype(np.float32)\n", " dx = np.count_nonzero(data, axis=(0, 1))\n", " data = data[:,:,dx != 0]\n", - " histCalRaw.fill(data)\n", - " #Filling calculators with data\n", - " noiseCal.fill(data)\n", - " \n", - "offsetMap = noiseCal.getOffset() #Produce offset map\n", - "noiseMap = noiseCal.get() #Produce noise map\n", - "noiseCal.reset() #Reset noise calculator\n", - "print(\"Initial maps were created\")" + " # Some sequences may have less than 500 frames in them. To find out how many images there are, we will \n", + " # temporarily change chunkSize to be the same as whatever number of frames the last chunk of data has:\n", + " if data.shape[2] < chunkSize:\n", + " chunkSize_new = data.shape[2]\n", + " print(\"Number of images are less than chunkSize. chunkSize is temporarily changed to {}.\"\n", + " .format(chunkSize_new))\n", + " images = images + chunkSize_new\n", + " counter2 += 1 \n", + " else:\n", + " images = counter1*chunkSize + counter2*chunkSize_new\n", + " counter1 += 1\n", + "\n", + " noiseCal.fill(data) # Filling the histogram calculator with data\n", + " chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk\n", + "\n", + "print('A total number of {} images are processed.'.format(images))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:55:21.238009Z", + "start_time": "2018-12-06T10:54:54.586435Z" + } + }, + "outputs": [], + "source": [ + "offsetMap = noiseCal.getOffset() # Producing offset map\n", + "noiseMap = noiseCal.get() # Producing noise map\n", + "noiseCal.reset() # Resetting noise calculator\n", + "print(\"Initial maps are created.\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Offset and Noise Maps prior to Common Mode Correction\n", + "\n", + "In the following, the histogram of the pnCCD offset, pnCCD offset map, as well as the initial uncorrected noise map are plotted:" ] }, { @@ -274,99 +325,179 @@ "ExecuteTime": { "end_time": "2018-12-06T10:56:20.686534Z", "start_time": "2018-12-06T10:56:11.721829Z" - } + }, + "scrolled": false }, "outputs": [], "source": [ - "#**************OFFSET MAP HISTOGRAM***********#\n", - "ho,co = np.histogram(offsetMap.flatten(), bins=700)\n", + "#************** OFFSET MAP HISTOGRAM ***********#\n", + "ho, co = np.histogram(offsetMap.flatten(), bins=700) # ho = offset histogram; co = offset bin centers\n", "\n", "do = {'x': co[:-1],\n", " 'y': ho,\n", " 'y_err': np.sqrt(ho[:]),\n", " 'drawstyle': 'bars',\n", " 'color': 'cornflowerblue',\n", + " 'label': 'Raw Signal (ADU)'\n", " }\n", - "\n", - "fig = xana.simplePlot(do, figsize='1col', aspect=2, \n", - " x_label = 'Offset (ADU)', \n", - " y_label=\"Counts\", y_log=True,\n", - " )\n", " \n", + "fig = xana.simplePlot(do, figsize='1col', aspect=1, x_label = 'Raw Signal (ADU)', y_label=\"Counts\", \n", + " title = 'Offset Histogram', x_range=(0, np.nanmax(offsetMap)), y_log=True)\n", + "\n", + "# Calculating mean, median and standard deviation for the above histogram:\n", + "mids = 0.5*(co[1:] + co[:-1])\n", + "mean = np.average(mids, weights=ho)\n", + "variance = np.average((mids - mean)**2, weights=ho)\n", + "std = np.sqrt(variance)\n", "\n", - "#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#\n", - "hn,cn = np.histogram(noiseMap.flatten(), bins=2000)\n", + "# Table of statistics on raw signal:\n", + "t0 = PrettyTable()\n", + "t0.title = \"Statistics on Raw Signal (Offset)\"\n", + "t0.field_names = [\"Mean\", \"Standard Deviation\"]\n", + "t0.add_row([\"{:0.3f} (ADU)\".format(mean), \"{:0.3f} (ADU)\".format(std)])\n", + "print(t0,'\\n')\n", + "\n", + "#***** NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA *******#\n", + "hn, cn = np.histogram(noiseMap.flatten(), bins=2000) # hn = noise histogram; cn = noise bin centers\n", "\n", "dn = {'x': cn[:-1],\n", " 'y': hn,\n", " 'y_err': np.sqrt(hn[:]),\n", " 'drawstyle': 'bars',\n", " 'color': 'cornflowerblue',\n", + " 'label': 'Noise (ADU)'\n", " }\n", "\n", - "fig = xana.simplePlot(dn, figsize='1col', aspect=2, \n", - " x_label = 'Noise (ADU)', \n", - " y_label=\"Counts\", \n", - " y_log=True,\n", - " x_range=(0, 400))\n", + "fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label = 'Noise (ADU)', y_label=\"Counts\", \n", + " title = 'Noise Histogram', y_log=True, x_range=(0, 600))\n", "\n", "\n", - "#**************HEAT MAPS*******************#\n", - "fig = xana.heatmapPlot(offsetMap[:,:,0],\n", - " x_label='Columns', y_label='Rows',\n", - " lut_label='Offset (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmin=6000, vmax=14500)\n", + "#************** HEAT MAPS *******************#\n", + "fig = xana.heatmapPlot(offsetMap[:,:,0], x_label='Column Number', y_label='Row Number', aspect=1,\n", + " x_range=(0, y), y_range=(0, x), vmin=6000, vmax=14500, lut_label='Offset (ADU)', \n", + " panel_x_label='Columns Stat (ADU)', panel_y_label='Rows Stat (ADU)', title = 'OffsetMap')\n", + "\n", + "fig = xana.heatmapPlot(noiseMap[:,:,0], x_label='Column Number', y_label='Row Number', aspect=1,\n", + " lut_label='Uncorrected Noise (ADU)', x_range=(0, y),\n", + " y_range=(0, x), vmax=2*np.mean(noiseMap), panel_x_label='Columns Stat (ADU)', \n", + " panel_y_label='Rows Stat (ADU)', title = 'Uncorrected NoiseMap')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Initial BadPixelMap\n", + "\n", + "This is generated based on the offset and uncorrected noise maps:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "bad_pixels = np.zeros(offsetMap.shape, np.uint32)\n", + "mnoffset = np.nanmedian(offsetMap)\n", + "stdoffset = np.nanstd(offsetMap)\n", + "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |\n", + " (offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n", "\n", - "fig = xana.heatmapPlot(noiseMap[:,:,0],\n", + "mnnoise = np.nanmedian(noiseMap)\n", + "stdnoise = np.nanstd(noiseMap)\n", + "bad_pixels[(noiseMap < mnnoise-bad_pixel_noise_sigma*stdnoise) |\n", + " (noiseMap > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", + "\n", + "fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),\n", " x_label='Columns', y_label='Rows',\n", - " lut_label='Noise (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmax=2*np.mean(noiseMap))" + " lut_label='Bad Pixel Value (ADU)',\n", + " x_range=(0, y),\n", + " y_range=(0, x), vmin=0, vmax=32)" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ - "cmCorrection = xcal.CommonModeCorrection(sensorSize, \n", - " [512, 512], \n", - " 'row',\n", - " nCells = memoryCells, \n", - " dType=np.float32,\n", - " noiseMap = noiseMap.astype(np.float32),\n", - " runParallel=run_parallel,\n", - " stats=True)\n", - "cmCorrection.debug()\n", - "noiseCalCM = xcal.NoiseCalculator(sensorSize, memoryCells, \n", - " cores=cpuCores, blockSize=blockSize,\n", - " runParallel=run_parallel)" + "# Common Mode Correction:\n", + "# In this method, the median of all pixels that are read out at the same time along a row is subtracted from the\n", + "# signal in each pixel:\n", + "cmCorrection = xcal.CommonModeCorrection(sensorSize, [data.shape[0]//2, data.shape[1]//2],\n", + " commonModeAxis, parallel=run_parallel, dType=np.float32, stride=1,\n", + " noiseMap=noiseMap.astype(np.float32), minFrac=0)\n", + "\n", + "cmCorrection.debug()" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true - }, + "metadata": {}, + "outputs": [], + "source": [ + "# Histogram Calculators:\n", + "\n", + "# For offset corrected data:\n", + "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-200, 200], memoryCells=memoryCells,\n", + " cores=cpuCores, gains=None, blockSize=blockSize)\n", + "# For common mode corrected data:\n", + "histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-200, 200], memoryCells=memoryCells,\n", + " cores=cpuCores, gains=None, blockSize=blockSize)\n", + "\n", + "noiseCalCM = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, blockSize=blockSize,\n", + " runParallel=run_parallel)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Second Iteration\n", + "\n", + "During the second iteration, the data are offset and common mode corrected to produce a common mode corrected noise map. The offset and common mode corrections are calculated by subtracting out the median of all pixels that are read out at the same time along a row." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ + "counter1 = 1 # To count how many \"if data.shape[2] >= chunkSize\" instances are there.\n", + "counter2 = 0 # To count how many \"if data.shape[2] < chunkSize\" instances are there.\n", + "chunkSize_new = 0 # See below\n", + "\n", "for data in reader.readChunks():\n", " data = data.astype(np.float32)\n", " dx = np.count_nonzero(data, axis=(0, 1))\n", " data = data[:,:,dx != 0]\n", + " \n", + " # Some sequences may have less than 500 frames in them. To find out how many images there are, we will \n", + " # temporarily change chunkSize to be the same as whatever number of frames the last chunk of data has:\n", + " if data.shape[2] < chunkSize:\n", + " chunkSize_new = data.shape[2]\n", + " print(\"Number of images are less than chunkSize. chunkSize is temporarily changed to {}.\"\n", + " .format(chunkSize_new))\n", + " images = images + chunkSize_new\n", + " counter2 += 1 \n", + " else:\n", + " images = counter1*chunkSize + counter2*chunkSize_new\n", + " counter1 += 1\n", + " \n", + " data -= offsetMap.data # Offset correction\n", + " offset_corr_data = copy.copy(data) # I am copying this so that I can have access to it in the table below\n", + " histCalCorrected.fill(data)\n", " cellTable=np.zeros(data.shape[2], np.int32) # Common mode correction\n", " data = cmCorrection.correct(data.astype(np.float32), cellTable=cellTable) \n", - " #Filling calculators with data\n", - " noiseCalCM.fill(data)\n", - " \n", - "offsetMapCM = noiseCalCM.getOffset() #Produce offset map\n", - "noiseMapCM = noiseCalCM.get() #Produce noise map\n", - "noiseCalCM.reset() #Reset noise calculator\n" + " histCalCMCorrected.fill(data)\n", + " noiseCalCM.fill(data) # Filling calculators with data\n", + " chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk\n", + "\n", + "print('A total number of {} images are processed.'.format(images))\n", + "print(\"Offset and common mode corrections are applied.\")" ] }, { @@ -375,28 +506,136 @@ "metadata": {}, "outputs": [], "source": [ - "#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#\n", - "hn,cn = np.histogram(noiseMapCM.flatten(), bins=2000)\n", + "noiseMapCM = noiseCalCM.get() # Producing common mode corrected noise map\n", + "ho, eo, co, so = histCalCorrected.get()\n", + "hCM, eCM, cCM, sCM = histCalCMCorrected.get()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# We are copying these so that we can replot them later after the calculators are reset:\n", + "ho_second_trial = copy.copy(ho)\n", + "co_second_trial = copy.copy(co)\n", + "hCM_second_trial = copy.copy(hCM)\n", + "cCM_second_trial = copy.copy(cCM)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Signal after Offset and Common Mode Corrections\n", "\n", - "dn = {'x': cn[:-1],\n", - " 'y': hn,\n", - " 'y_err': np.sqrt(hn[:]),\n", - " 'drawstyle': 'bars',\n", + "Here, the offset corrected signal is compared to the common-mode corrected signal (in the form of binned histograms): " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "do = [{'x': co,\n", + " 'y': ho,\n", + " 'y_err': np.sqrt(ho[:]),\n", + " 'drawstyle': 'steps-mid',\n", " 'color': 'cornflowerblue',\n", - " }\n", - "\n", - "fig = xana.simplePlot(dn, figsize='1col', aspect=2, \n", - " x_label = 'Noise (ADU)', \n", - " y_label=\"Counts\", \n", - " y_log=True,\n", - " x_range=(0, 400))\n", - "\n", + " 'label': 'Offset Corrected Signal'\n", + " },\n", + " {'x': cCM,\n", + " 'y': hCM,\n", + " 'y_err': np.sqrt(hCM[:]),\n", + " 'drawstyle': 'steps-mid',\n", + " 'color': 'red',\n", + " 'label': 'Common Mode Corrected Signal'\n", + " }]\n", + " \n", + "fig = xana.simplePlot(do, figsize='2col', aspect=1, x_label = 'Corrected Signal (ADU)', y_label=\"Counts\", \n", + " x_range = (-200, 200), legend='top-right-frame-1col', \n", + " title = 'Corrected Signal - 2nd Iteration') \n", + "\n", + "t0 = PrettyTable()\n", + "t0.title = \"Comparison of the First Round of Corrections - Bad Pixels Not Excluded\"\n", + "t0.field_names = [\"After Offset Correction\", \"After Common Mode Correction\"]\n", + "t0.add_row([\"Mean: {:0.3f} (ADU)\".format(np.mean(offset_corr_data)), \"Mean: {:0.3f} (ADU)\".format(np.mean(data))])\n", + "t0.add_row([\"Median: {:0.3f} (ADU)\".format(np.median(offset_corr_data)), \"Median: {:0.3f} (ADU)\"\n", + " .format(np.median(data))])\n", + "t0.add_row([\"Standard Deviation: {:0.3f} (ADU)\".format(np.std(offset_corr_data)), \n", + " \"Standard Deviation: {:0.3f} (ADU)\"\n", + " .format(np.std(data))])\n", + "print(t0,'\\n')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Noise Map after Common Mode Correction\n", "\n", - "fig = xana.heatmapPlot(noiseMapCM[:,:,0],\n", + "In the following, the effect of common mode correction on the noise is shown. Finally common mode corrected noise map (noiseMapCM) is displayed and compared to the initial uncorrected noise map:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#***** NOISE MAP HISTOGRAM FROM THE COMMON MODE CORRECTED DATA *******#\n", + "hn, cn = np.histogram(noiseMap.flatten(), bins=2000, range=(0, 600))\n", + "hn_CM, cn_CM = np.histogram(noiseMapCM.flatten(), bins=2000, range=(0, 600))\n", + "\n", + "dn = [{'x': cn[:-1],\n", + " 'y': hn,\n", + " # 'y_err': np.sqrt(hn[:]),\n", + " 'drawstyle': 'steps-mid', # 'bars',\n", + " 'color': 'blue', # 'cornflowerblue',\n", + " 'label': 'Uncorrected Noise (ADU)'\n", + " },\n", + " {'x': cn_CM[:-1],\n", + " 'y': hn_CM,\n", + " # 'y_err': np.sqrt(hn_CM[:]),\n", + " 'drawstyle': 'steps-mid', # 'bars',\n", + " 'color': 'crimson', # 'red',#'cornflowerblue',\n", + " # 'ecolor': 'crimson',\n", + " 'label': 'Common Mode Corrected Noise (ADU)'\n", + " }]\n", + "fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label='Noise (ADU)', y_label=\"Counts\",\n", + " x_range=(0, 600), y_log=True, legend='top-center-frame-1col',\n", + " title='Noise Comparison')\n", + "\n", + "\n", + "fig = xana.heatmapPlot(noiseMapCM[:, :, 0],\n", " x_label='Columns', y_label='Rows',\n", - " lut_label='Noise (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmax=2*np.mean(noiseMapCM))" + " lut_label='Common Mode Corrected Noise (ADU)',\n", + " x_range=(0, y),\n", + " y_range=(0, x), vmax=2*np.mean(noiseMapCM), title='Common Mode Corrected Noise Map')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Resetting the calculators:\n", + "noiseCalCM.reset() # resetting noise calculator\n", + "histCalCorrected.reset() # resetting histogram calculator\n", + "histCalCMCorrected.reset() # resetting histogram calculator\n", + "cmCorrection.reset()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Final BadPixelMap\n", + "\n", + "This is generated based on the offset and common mode corrected noise maps:" ] }, { @@ -405,23 +644,29 @@ "metadata": {}, "outputs": [], "source": [ - "from cal_tools.enums import BadPixels\n", "bad_pixels = np.zeros(offsetMap.shape, np.uint32)\n", "mnoffset = np.nanmedian(offsetMap)\n", "stdoffset = np.nanstd(offsetMap)\n", - "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) | \n", + "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |\n", " (offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n", "\n", - "mnnoise = np.nanmedian(noiseMap)\n", - "stdnoise = np.nanstd(noiseMap)\n", - "bad_pixels[(noiseMap < mnnoise-bad_pixel_noise_sigma*stdnoise) | \n", - " (noiseMap > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", + "mnnoise = np.nanmedian(noiseMapCM)\n", + "stdnoise = np.nanstd(noiseMapCM)\n", + "bad_pixels[(noiseMapCM < mnnoise-bad_pixel_noise_sigma*stdnoise) |\n", + " (noiseMapCM > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", "\n", - "fig = xana.heatmapPlot(np.log2(bad_pixels[:,:,0]),\n", + "fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),\n", " x_label='Columns', y_label='Rows',\n", " lut_label='Bad Pixel Value (ADU)',\n", - " x_range=(0,y),\n", - " y_range=(0,x), vmin=0, vmax=32)" + " x_range=(0, y),\n", + " y_range=(0, x), vmin=0, vmax=32)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Calibration Constants" ] }, { @@ -431,13 +676,12 @@ "ExecuteTime": { "end_time": "2018-12-06T10:56:22.741284Z", "start_time": "2018-12-06T10:56:20.688393Z" - }, - "collapsed": true + } }, "outputs": [], "source": [ "constant_maps = {\n", - " 'Offset': offsetMapCM,\n", + " 'Offset': offsetMap,\n", " 'Noise': noiseMapCM,\n", " 'BadPixelsDark': bad_pixels\n", "}\n", @@ -458,7 +702,6 @@ " pixels_x=1024,\n", " pixels_y=1024)\n", "\n", - "\n", " device = Detectors.PnCCD1\n", " metadata.detector_condition = condition\n", "\n", @@ -468,67 +711,20 @@ " else:\n", " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", " start=creation_time)\n", - " \n", + "\n", " metadata.calibration_constant_version.raw_data_location = file_loc\n", - " \n", + "\n", " if db_output:\n", " try:\n", " metadata.send(cal_db_interface)\n", - " print(\"Inject {} constants from {}\".format(const_name, \n", - " metadata.calibration_constant_version.begin_at))\n", + " print(\"Inject {} constants from {}\".format(const_name,\n", + " metadata.calibration_constant_version.begin_at))\n", " except Exception as e:\n", " print(e)\n", - " \n", + "\n", " if local_output:\n", " save_const_to_h5(metadata, out_folder)\n", - " print(\"Calibration constant {} is stored locally.\".format(const)) \n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "histCalCorr = xcal.HistogramCalculator(sensorSize, bins=200, \n", - " range=[-200, 200], parallel=False, \n", - " memoryCells=memoryCells, \n", - " cores=cpuCores, blockSize=blockSize)\n", - "\n", - "\n", - "for data in reader.readChunks():\n", - " data = data.astype(np.float32)\n", - " data -= offsetMap.data\n", - " histCalCorr.fill(data)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ho,eo,co,so = histCalCorr.get()\n", - "\n", - "\n", - "d = [{'x': co,\n", - " 'y': ho,\n", - " 'y_err': np.sqrt(ho[:]),\n", - " 'drawstyle': 'steps-mid',\n", - " 'errorstyle': 'bars',\n", - " 'errorcoarsing': 2,\n", - " 'label': 'Offset corr.'\n", - " },\n", - " \n", - " ]\n", - " \n", - "\n", - "fig = xana.simplePlot(d, aspect=1, x_label='Energy(ADU)', \n", - " y_label='Number of occurrences', figsize='2col',\n", - " y_log=True, x_range=(-50,500),\n", - " legend='top-center-frame-2col')" + " print(\"Calibration constant {} is stored to {}.\".format(const, out_folder))" ] } ], diff --git a/xfel_calibrate/calibrate.py b/xfel_calibrate/calibrate.py index ee157996979ccbdc85b2f7f9d8288afc2980f176..9f77be0a99c4002ba87aea63ab742e4e8ebe6cbe 100755 --- a/xfel_calibrate/calibrate.py +++ b/xfel_calibrate/calibrate.py @@ -788,7 +788,7 @@ def run(): title, author, version = extract_title_author_version(nb) if not title: - title = "{}: {} Calibration".format(detector, caltype) + title = "{} {} Calibration".format(detector, caltype) if not author: author = "anonymous" if not version: