diff --git a/notebooks/pnCCD/Characterize_NBC.ipynb b/notebooks/pnCCD/Characterize_NBC.ipynb deleted file mode 100644 index 4da1f55a899b0240bf8f67bf624d5fb93dd5979b..0000000000000000000000000000000000000000 --- a/notebooks/pnCCD/Characterize_NBC.ipynb +++ /dev/null @@ -1,1365 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# CCD Characterization ##\n", - "\n", - "Authors: S. Hauf and T. Rüter, Version: 1.0\n", - "\n", - "This notebook implements read-out line wise CCD characterization. It requires a dark image data set and an X-ray image data set with a prominent line feature to work. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "dark_images = \"/gpfs/exfel/data/scratch/ruetert/esrf18/c12_id13_180425_003_001.frms6\" # file(s) that contain dark data, required\n", - "xray_images = \"/gpfs/exfel/data/scratch/ruetert/esrf18/c12_id13_180425_003_00[1-2].frms6\" # file(s) that contain x-ray data, required\n", - "out_folder = \"/gpfs/exfel/data/scratch/haufs/test/pnccd\" # output folder, required\n", - "file_format = \"frms6\" # file format to use. Currently frms6 is supported\n", - "split_evt_primary_threshold = 7. # primary threshold for split event classification in terms of n sigma noise\n", - "split_evt_secondary_threshold = 4. # secondary threshold for split event classification in terms of n sigma noise\n", - "split_evt_mip_threshold = 6000. # MIP threshold for event classification\n", - "line_energy_ev = 5898.8010 # energy of the line to fit for gain derivation\n", - "line_position_adu = 2300. # position estimate in adu for line, required\n", - "line_fit_limits_adu = 100. # +- limits for line position in fit estimate.\n", - "tag_first_singles = 0 # whether to treat first singles separately\n", - "limit_dark_images = 2000 # limit to this number of images (per file), set to 0 to use available images\n", - "limit_xray_images = 0 # limit to this number of images (per file), set to 0 to use available images\n", - "cti_limit_low = 2000 # lower bound in ADU of data to use for CTI evaluation\n", - "cti_limit_high = 2450 # upper bound in ADU of data to use for CTI evaluation\n", - "detector_instance = \"PNCCD-Siegen\" # detector instance\n", - "cluster_profile = \"noDB\" # The ipcluster profile to use" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\"\"\"\n", - "First start an ipython cluster with\n", - "\n", - "ipcluster start -n NUM_CORES\n", - "\"\"\"\n", - "import warnings\n", - "warnings.filterwarnings('ignore')\n", - "\n", - "import matplotlib\n", - "#matplotlib.use(\"agg\")\n", - "%matplotlib inline\n", - "\n", - "\n", - "#profiler has to be the first import\n", - "import XFELDetAna.xfelprofiler as xprof\n", - "\n", - "profiler = xprof.Profiler()\n", - "profiler.disable()\n", - "from XFELDetAna.util import env\n", - "env.iprofile = cluster_profile\n", - "\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "import XFELDetAna.xfelpyanatools as xana\n", - "import XFELDetAna.xfelpycaltools as xcal\n", - "from XFELDetAna.core.reader import ChunkReader\n", - "\n", - "xcal.enableGPU=False\n", - "from XFELDetAna.detectors.pnccd.frm6reader import Frms6Reader\n", - "\n", - "import h5py\n", - "\n", - "import glob\n", - "import os\n", - "\n", - "dark_files = []\n", - "for f in glob.glob(dark_images):\n", - " dark_files.append(f)\n", - " \n", - "xray_files = []\n", - "for f in glob.glob(xray_images):\n", - " xray_files.append(f)\n", - " \n", - "def nImagesOrLimit(nImages, limit):\n", - " if limit == 0:\n", - " return nImages\n", - " else:\n", - " return min(nImages, limit)\n", - " \n", - "print(\"Using the following files for dark image analysis: \")\n", - "for f in dark_files:\n", - " dshape = list(Frms6Reader.getDataShape(f))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " print(\"{} ({} images)\".format(f, nImagesOrLimit(nImages, limit_dark_images)))\n", - "\n", - "print(\"\")\n", - "\n", - "print(\"Using the following files for X-ray image analysis: \")\n", - "for f in xray_files:\n", - " dshape = list(Frms6Reader.getDataShape(f))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " print(\"{} ({} images)\".format(f, nImagesOrLimit(nImages, limit_xray_images)))\n", - "\n", - "print(\"\")\n", - "\n", - "try:\n", - " os.makedirs(out_folder)\n", - "except:\n", - " pass\n", - "print(\"Outputting to: {}\".format(out_folder))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Dark Image Evaluation ##\n", - "\n", - "Dark images are evaluated in an iterative fashion to deduce offset and noise maps. In the first iteration, all data is used and no events are excluded." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "filename = dark_files[0]\n", - "\n", - "\n", - "# the sensor-size which corresponds to your data in the file, for the data generator this specifies the image size\n", - "dshape = list(Frms6Reader.getDataShape(filename))\n", - "dshape, nImages = dshape[:-1], dshape[-1]\n", - "nImages = min(nImages, limit_dark_images)\n", - "sensorSize = dshape\n", - "blockSize = [dshape[0]//2,dshape[0]//4]\n", - "xcal.defaultBlockSize = blockSize\n", - "\n", - "\n", - "# the number of images to read per chunk, for the data generator this specifies the number of images to generate on \n", - "# each call\n", - "chunkSize = 100\n", - "\n", - "cpuCores = 8\n", - "memoryCells = 1\n", - "\n", - "\n", - "#nImages = pnccd.getDataSize(filename, '/dark/images/image')[2]\n", - "#nImages = pnccd.getDataSize(filename, '/dark/images/image')[0]\n", - "sigmaNoise = 5.\n", - "commonModeBlockSize = [dshape[0],dshape[1]//8]\n", - "commonModeAxis = 'row'\n", - "profile = False\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=True)\n", - "histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], parallel=False, memoryCells=memoryCells, cores=cpuCores)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# we can use the noise calculator to also retreive the offset\n", - "for fl in dark_files:\n", - " dshape = list(Frms6Reader.getDataShape(fl))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " nImages = nImagesOrLimit(nImages, limit_dark_images)\n", - " reader = ChunkReader(fl, Frms6Reader.readData, nImages, chunkSize, path = \"/frames\",\n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],\n", - " simulated=False)\n", - " for data in reader.readChunks():\n", - " data = data.astype(np.float32)\n", - " noiseCal.fill(data)\n", - " histCalRaw.fill(data)\n", - " \n", - "#get noise and offset maps\n", - "offsetMapInitial = noiseCal.getOffset()\n", - "noiseMapInitial = noiseCal.get()\n", - "noiseCal.reset()\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "#fig = xana.heatmapPlot(noiseMapInitial[:,:,0], lut_label=\"Noise (ADU)\", vmax=3*np.nanmedian(noiseMapInitial))\n", - "#fig = xana.heatmapPlot(offsetMapInitial[:,:,0], lut_label=\"Offset (ADU)\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "rawHist = histCalRaw.get()\n", - "\n", - "#fig = plt.figure(figsize=(8,8))\n", - "#ax = fig.add_subplot(111)\n", - "#xana.histPlot(ax, rawHist, plot_errors=True, alpha=0.3)\n", - "#t = ax.set_title(\"xfelpyanatools.plotHist\")\n", - "#t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "#t = ax.set_ylabel(\"Counts\")\n", - "#r = ax.set_xlim(2000,3700)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "offsetCorrection = xcal.OffsetCorrection(sensorSize, offsetMapInitial, nCells = memoryCells, runOnGPU=False, gpuZDim=4, keepOnGPU=False, gains=None, parallel=True)\n", - "cmCorrection = xcal.CommonModeCorrection(sensorSize, commonModeBlockSize, commonModeAxis, nCells = memoryCells, stats=True, runOnGPU=False, noiseMap = noiseMapInitial, gpuZDim=2, gpuStripeSize=2, gains=None, parallel=True)\n", - "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-500, 500], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "for fl in dark_files:\n", - " dshape = list(Frms6Reader.getDataShape(fl))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " nImages = nImagesOrLimit(nImages, limit_dark_images)\n", - " reader = ChunkReader(fl, Frms6Reader.readData, nImages, chunkSize, path = \"/frames\",\n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],\n", - " simulated=False)\n", - " for data in reader.readChunks():\n", - " data = data.astype(np.float32)\n", - " data = offsetCorrection.correct(data)\n", - " data = cmCorrection.correct(data)\n", - " histCalCorrected.fill(data)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "def do_fit(calCMCorrectedHist, mu, sigma, limit, output=True, xlim=None, ylim=None):\n", - " from iminuit import Minuit\n", - " import inspect\n", - " pplt = plt\n", - "\n", - " hist, e, bin_centres, _ = calCMCorrectedHist\n", - "\n", - "\n", - " hnonzeros = hist != 0 \n", - "\n", - "\n", - " def multiGauss(x, *p):\n", - " r = 0.\n", - " for i in range(1):\n", - " A, mu, sigma = p[i*3:(i+1)*3]\n", - " r += A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - " return r\n", - "\n", - " def multiGaussWrap(A1, mu1, sigma1):\n", - " return np.sum(((multiGauss(bin_centres[hnonzeros],A1, mu1, sigma1)\n", - " - hist[hnonzeros]) / np.sqrt(hist[hnonzeros])) ** 2)\n", - "\n", - " pparm = dict(throw_nan=False, pedantic=False, print_level=1 if output else 0)\n", - "\n", - "\n", - " pparm['A1'] = np.max(hist)\n", - " pparm['mu1'] = mu\n", - " pparm['limit_mu1'] = [mu-limit, mu+limit]\n", - " pparm['sigma1'] = sigma\n", - " pparm['limit_sigma1'] = [0, 5*sigma]\n", - "\n", - "\n", - "\n", - " m = Minuit(multiGaussWrap, **pparm)\n", - " fmin = m.migrad()\n", - " ferr = m.hesse()\n", - " #ferr = m.minos()\n", - " res = m.values\n", - "\n", - "\n", - " x = np.arange(mu-5*limit-5*sigma, mu+5*limit+5*sigma)\n", - " p = [res['A1'], res['mu1'], res['sigma1']]\n", - " y = multiGauss(x, *p)\n", - "\n", - " if output:\n", - " fig = plt.figure(figsize=(10,5))\n", - " ax = fig.add_subplot(111)\n", - " ax.plot(bin_centres, hist, label='data (CTI+RARC)', drawstyle='steps-mid')\n", - " ax.plot(x, y, label='fit: $\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)), color='red', linewidth=2)\n", - "\n", - "\n", - " def gauss(x, *p):\n", - " import numpy as np\n", - " A, mu, sigma = p\n", - " return A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - "\n", - "\n", - "\n", - " ax.set_xlabel('Energy (ADU)')\n", - " ax.set_ylabel('counts')\n", - "\n", - " #ax.text(110, 5000, '$\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)))\n", - " plt.legend(loc=0)\n", - " if ylim is None:\n", - " ax.set_ylim(1, np.max(hist))\n", - " else:\n", - " ax.set_ylim(ylim)\n", - " if xlim is not None:\n", - " ax.set_xlim(xlim)\n", - " ax.semilogy()\n", - " return res" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The initial noise and offset maps are then used to correct data in a second iteration, in which common mode corrections are also applied. The such corrected data is fitted, assuming a noise peak centered around 0. This with of this peak yields and event exclusion threshold for the final iteration at $5*\\sigma$ noise." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "res = do_fit(histCalCorrected.get(), 0, np.median(noiseMapInitial), np.median(noiseMapInitial))\n", - "event_threshold = 5.*res[\"sigma1\"]\n", - "print(\"Using an event exclusion threshold of {:0.2f} ADU\".format(event_threshold))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "scrolled": true - }, - "outputs": [], - "source": [ - "\n", - "\n", - "offsetCorrection = xcal.OffsetCorrection(sensorSize, offsetMapInitial, nCells = memoryCells, runOnGPU=False, gpuZDim=4, keepOnGPU=False, gains=None, parallel=True)\n", - "cmCorrection = xcal.CommonModeCorrection(sensorSize, commonModeBlockSize, commonModeAxis, nCells = memoryCells, stats=True, runOnGPU=False, noiseMap = noiseMapInitial, gpuZDim=2, gpuStripeSize=2, gains=None, parallel=True)\n", - "cmCorrectionD2 = xcal.CommonModeCorrection(sensorSize, commonModeBlockSize, commonModeAxis, nCells = memoryCells, stats=True, runOnGPU=False, noiseMap = noiseMapInitial, gpuZDim=2, gpuStripeSize=2, gains=None, parallel=True)\n", - "offsetCal = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=True)\n", - "\n", - "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-100, 300], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-100, 300], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "\n", - "#statistics can be calculated either individually\n", - "meanCal = xcal.MeanCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=True)\n", - "minCal = xcal.MinCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=True)\n", - "maxCal = xcal.MaxCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=True)\n", - "stdCal = xcal.StdCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=True)\n", - "noiseCal.reset()\n", - "\n", - "\n", - "\n", - "import copy\n", - "\n", - "for fl in dark_files:\n", - " dshape = list(Frms6Reader.getDataShape(fl))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " nImages = nImagesOrLimit(nImages, limit_dark_images)\n", - " reader = ChunkReader(fl, Frms6Reader.readData, nImages, chunkSize, path = \"/frames\",\n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],\n", - " simulated=False)\n", - " \n", - " for data in reader.readChunks():\n", - " data = data.astype(np.float32)\n", - " d2 = offsetCorrection.correct(copy.copy(data))\n", - " d2 = cmCorrectionD2.correct(d2)\n", - " data[d2 > event_threshold] = np.nan\n", - "\n", - " offsetCal.fill(data)\n", - " data = offsetCorrection.correct(data)\n", - "\n", - " histCalCorrected.fill(data)\n", - " data = np.ma.MaskedArray(data, np.isnan(data))\n", - " data = cmCorrection.correct(data)\n", - " histCalCMCorrected.fill(data)\n", - " noiseCal.fill(data)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "noiseMap = noiseCal.get()\n", - "offsetMap = offsetCal.getOffset()\n", - "cmStats = cmCorrection.get()\n", - "#corStats = statsCalCorrected.get" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Dark image Characterization Results ###" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"Median noise is: {:0.2f} ADU\".format(np.nanmedian(noiseMap.data)))\n", - "print(\"Mean noise is: {:0.2f} ADU\".format(np.nanmean(noiseMap.data)))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "# meas = os.path.splitext(os.path.basename(dark_files[0]))[0]\n", - "# with h5py.File(\"{}/offset_store_{}.h5\".format(out_folder, meas), \"w\") as f:\n", - "# f[\"{}/Offset/0/data\".format(detector_instance)] = offsetMap.data\n", - "# f[\"{}/Noise/0/data\".format(detector_instance)] = noiseMap.data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "fig = xana.heatmapPlot(noiseMap.data[:,:,0], lut_label=\"Noise (ADU)\", vmax=10, vmin=3)\n", - "fig = xana.heatmapPlot(offsetMap.data[:,:,0], lut_label=\"Offset (ADU)\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "commonMode = cmCorrection.get()\n", - "fig = plt.figure(figsize=(12,4))\n", - "ax = fig.add_subplot(121)\n", - "xana.histPlot(ax, commonMode, bins=50, plot_errors=True)\n", - "t = ax.set_title(\"Common mode distribution\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "for i in range(0, cmStats.shape[0]//(sensorSize[1]//cmStats.shape[1])):\n", - " cmStatConcat = cmStats[i*(sensorSize[1]//cmStats.shape[1]):(i+1)*(sensorSize[1]//cmStats.shape[1]),:,:].reshape(sensorSize[1]//cmStats.shape[1]*cmStats.shape[1], cmStats.shape[2])\n", - " fig = xana.heatmapPlot(cmStatConcat, figsize=(8,8), lut_label=\"Common mode(ADU)\", x_title = \"frame\", y_title = \"row\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "rawHist = histCalRaw.get(cumulatative = True)\n", - "corHist = histCalCorrected.get(cumulatative = True)\n", - "cmCorHist = histCalCMCorrected.get(cumulatative = True)\n", - "\n", - "fig = plt.figure(figsize=(20,5))\n", - "ax = fig.add_subplot(131)\n", - "xana.histPlot(ax, rawHist, plot_errors=True)\n", - "t = ax.set_title(\"Raw data histogram\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "l = ax.set_xlim(np.nanmin(offsetMap),np.nanmax(offsetMap))\n", - "\n", - "ax = fig.add_subplot(132)\n", - "xana.histPlot(ax, corHist, plot_errors=True)\n", - "t = ax.set_title(\"Offset corrected data histogram\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "l = ax.set_xlim(-10*np.median(noiseMap),10*np.median(noiseMap))\n", - "\n", - "ax = fig.add_subplot(133)\n", - "xana.histPlot(ax, cmCorHist, plot_errors=True)\n", - "t = ax.set_title(\"Common mode corrected data histogram\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "l = ax.set_xlim(-10*np.median(noiseMap),10*np.median(noiseMap))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## X-ray Image Characterization ##\n", - "\n", - "Gain and energy resolution, as well as CTI and relative gain correciton information is derived from X-ray illuminated images." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "filename = xray_files[0]\n", - "dshape = list(Frms6Reader.getDataShape(filename))\n", - "dshape, nImages = dshape[:-1], dshape[-1]\n", - "chunkSize = 1000\n", - "reader = ChunkReader(filename, Frms6Reader.readData, nImages, chunkSize, path = \"/frames\", pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1], simulated=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "patternClassifier = xcal.PatternClassifier(sensorSize, noiseMap, split_evt_primary_threshold,\n", - " split_evt_secondary_threshold, split_evt_mip_threshold,\n", - " tagFirstSingles = tag_first_singles, nCells=memoryCells,\n", - " cores=10, runOnGPU=False, parallel=True )\n", - "patternClassifier._imagesPerChunk = 50\n", - "patternSelector = xcal.PatternSelector(sensorSize, selectionList = [100, 101, 200, 201, 202, 203] , parallel=True)#, 300, 301, 302, 303, 400, 401, 402, 403], parallel=False)\n", - "cmCorrection = xcal.CommonModeCorrection(sensorSize, commonModeBlockSize, commonModeAxis, nCells = memoryCells, stats=True, runOnGPU=False, noiseMap = noiseMap, gpuZDim=2, keepOnGPU=True, gpuStripeSize=2, parallel=True)\n", - "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-100, 10000], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#from ipywidgets import FloatProgress\n", - "from IPython.core.display import display_html\n", - "#f = FloatProgress(min=0, max=nImages//chunkSize)\n", - "row_coords = []\n", - "col_coords = []\n", - "vals = []\n", - "\n", - "\n", - "for fl in xray_files:\n", - " dshape = list(Frms6Reader.getDataShape(fl))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " nImages = nImagesOrLimit(nImages, limit_xray_images)\n", - " reader = ChunkReader(fl, Frms6Reader.readData, nImages, chunkSize, path = \"/frames\",\n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],\n", - " simulated=False)\n", - " \n", - " for i,data in enumerate(reader.readChunks()):\n", - " data = data.astype(np.float32)\n", - " data = offsetCorrection.correct(data)\n", - "\n", - " data = cmCorrection.correct(data)\n", - "\n", - " data, patterns = patternClassifier.classify(data)\n", - " singles, firstsingles, up, right, down, left = patternSelector.select(data, patterns)\n", - " asingles = firstsingles+singles\n", - " \n", - " \n", - " xv, yv = np.meshgrid(np.arange(data.shape[0]), np.arange(data.shape[1]))\n", - " xv = np.repeat(xv[...,None], data.shape[2], axis=2)\n", - " yv = np.repeat(yv[...,None], data.shape[2], axis=2)\n", - "\n", - " col_coords.append(xv[asingles > 0].flatten())\n", - " row_coords.append(yv[asingles > 0].flatten())\n", - " vals.append(asingles[asingles > 0].flatten())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "rcoords = np.concatenate(row_coords)\n", - "ccoords = np.concatenate(col_coords)\n", - "avals = np.concatenate(vals)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Offset + CM Corrected Signal along Columns/Rows ###\n", - "\n", - "CTI effects will show along the charge-shifting direction, relative gain effects for each readout line." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#lim = 10000\n", - "import matplotlib\n", - "fig = plt.figure(figsize=(10,10))\n", - "ax = fig.add_subplot(111)\n", - "idx = (avals > cti_limit_low) & (avals < cti_limit_high)\n", - "\n", - "\n", - "xax = rcoords[~idx]\n", - "yax = ccoords[~idx]\n", - "ax.scatter(xax, avals[~idx], 1, color=\"grey\", alpha=0.2)\n", - "\n", - "\n", - "xax = rcoords[idx]\n", - "yax = ccoords[idx]\n", - "pvals = avals[idx]\n", - "\n", - "unique_coords = np.unique(xax)\n", - "mean_signal = np.asarray([np.mean(pvals[xax==jdx]) for jdx in unique_coords])\n", - "\n", - "cmap = matplotlib.cm.get_cmap('summer')\n", - "colors = cmap(yax/np.max(yax))\n", - "ax.scatter(xax, pvals, 1, color=colors)\n", - "ax.scatter(unique_coords, mean_signal, 3, color='tomato')\n", - "ax.set_ylabel(\"Signal value (ADU)\")\n", - "ax.set_xlabel(\"Column coordinate\")\n", - "l = ax.set_ylim(0, 1.25*cti_limit_high)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "\n", - "fig = plt.figure(figsize=(10,10))\n", - "ax = fig.add_subplot(111)\n", - "\n", - "yax = rcoords[~idx]\n", - "xax = ccoords[~idx]\n", - "ax.scatter(xax, avals[~idx], 1, color=\"grey\", alpha=0.2)\n", - "\n", - "yax = rcoords[idx]\n", - "xax = ccoords[idx]\n", - "pvals = avals[idx]\n", - "\n", - "unique_coords = np.unique(xax)\n", - "mean_signal = np.asarray([np.mean(pvals[xax==jdx]) for jdx in unique_coords])\n", - "\n", - "cmap = matplotlib.cm.get_cmap('summer')\n", - "colors = cmap(yax/np.max(yax))\n", - "ax.scatter(xax, pvals, 1, color=colors)\n", - "ax.scatter(unique_coords, mean_signal, 3, color='tomato')\n", - "ax.set_ylabel(\"Signal value (ADU)\")\n", - "ax.set_xlabel(\"Row coordinate\")\n", - "l = ax.set_ylim(0, 1.25*cti_limit_high)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "CTI and relative gain are evaluated by performing a linear fit to pixel values along each read-out line:\n", - "\n", - "$$ y = m*x +b $$\n", - "\n", - "The CTI for each readout line $i$ is then defined as $CTI_{i}:=m_{i}/b_{i}$ and the relative gain as $G_{r}:=b_{i}/\\overline{b}$ where $\\overline{b}$ is the average $b$ value of all readout lines." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from functools import partial\n", - "from iminuit import Minuit\n", - "\n", - "\n", - "xax = rcoords[idx]\n", - "yax = ccoords[idx]\n", - "avalsf = avals[idx]\n", - "min_data = 5\n", - "\n", - "ms = np.zeros(sensorSize[0], np.float)\n", - "bs = np.zeros(sensorSize[0], np.float)\n", - "mErr = np.zeros(sensorSize[0], np.float)\n", - "bErr = np.zeros(sensorSize[0], np.float)\n", - "\n", - "fig = plt.figure(figsize=(10,10))\n", - "ax = fig.add_subplot(111)\n", - "\n", - "\n", - "\n", - "def linFunc(x, *p):\n", - " return p[1] * x + p[0]\n", - "for i in range(sensorSize[0]):\n", - " idx = (xax == i)\n", - " zm = avalsf[idx]\n", - " zx = yax[idx]\n", - " def linWrap(m, b):\n", - " return np.sum(((linFunc(zx, b, -m) - zm)) ** 2)\n", - " pparm = dict(throw_nan=False, pedantic=False,\n", - " print_level=0)\n", - " pparm[\"m\"] = 0\n", - " pparm[\"limit_m\"] = (0, 1)\n", - " pparm[\"b\"] = np.nanmean(zm)\n", - "\n", - " mini = Minuit(linWrap, **pparm)\n", - " fmin = mini.migrad()\n", - "\n", - " \n", - " if fmin[0]['is_valid'] and fmin[0]['has_covariance'] and (\n", - " min_data is None or zx.size > min_data):\n", - " \n", - " ferr = mini.minos()\n", - " res = mini.values\n", - "\n", - " ms[i] = res[\"m\"]\n", - " bs[i] = res[\"b\"]\n", - "\n", - " mErr[i] = max(-ferr['m']['lower'], ferr['m']['upper'])\n", - " bErr[i] = max(-ferr['b']['lower'], ferr['b']['upper'])\n", - "\n", - " \n", - " \n", - " colors = cmap(xax[idx]/np.max(yax))\n", - " ax.scatter(yax[idx], zm, 1, color=colors)\n", - "\n", - " x = np.arange(np.max(xax)+1)\n", - " y = linFunc(x, res[\"b\"], -res[\"m\"])\n", - " ax.plot(x, y, color=\"grey\", linewidth=0.5, alpha=0.5)\n", - "\n", - "ctiErr = np.sqrt((1. / bs * mErr) ** 2 + (ms / bs ** 2 * bErr) ** 2)\n", - "cti = ms / bs\n", - "relGain = bs/np.nanmedian(bs)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "cti[(cti < 1e-8) | (cti > 1e-3)] = np.nan\n", - "cti[np.isnan(cti)] = np.nan\n", - "cti[~np.isfinite(cti)] = np.nan\n", - "print(\"Median CTI: {:0.2}\".format(np.nanmedian(cti)))\n", - "cti[np.isnan(cti)] = np.nanmedian(cti)\n", - "relGain[(relGain < 0.8) | (relGain > 1.2)] = 1\n", - "relGain[np.isnan(relGain)] = 1\n", - "relGain[~np.isfinite(relGain)] = 1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(10,5))\n", - "ax = fig.add_subplot(111)\n", - "l = plt.plot( cti, drawstyle='steps-mid')\n", - "#ax.semilogy()\n", - "\n", - "ax.set_xlabel('column number')\n", - "l = ax.set_ylabel('CTI')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(10, 5))\n", - "ax = fig.add_subplot(111)\n", - "l = plt.plot( relGain, drawstyle='steps-mid')\n", - "#ax.semilogy()\n", - "\n", - "ax.set_xlabel('column number')\n", - "l = ax.set_ylabel('Rel. Gain')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Per-Pixel Relative Gain ###\n", - "\n", - "Relative gain and CTI can be encoded into a single map, by expanding them along the readout direction" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "ctimap = relGain[:, None]*(1-cti[:, None])**np.repeat(np.arange(sensorSize[1])[None, :], sensorSize[0], axis=0)\n", - "fig = xana.heatmapPlot(ctimap, figsize=(8,8), lut_label=\"Relative gain\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "#ctiCorr = xcal.CTICorrection(sensorSize, cti, 1, nCells=memoryCells,\n", - "# cores=10, runOnGPU=False, parallel=True,\n", - "# gains=None, blockSize=blockSize,)\n", - "\n", - "kernel = {}\n", - "\n", - "kernel[203] = (-1, 0)\n", - " \n", - "\n", - "kernel[202] = (1, 0)\n", - " \n", - "\n", - "kernel[201] = (0, 1)\n", - "\n", - "\n", - "kernel[200] = (0, -1)\n", - "\n", - "def separateDouble(pdat, alldat, p):\n", - " \n", - " \n", - " \n", - " kstack = kernel[p]\n", - " \n", - " \n", - " pdat2 = np.roll(np.roll(pdat, kstack[0], axis=0), kstack[1], axis=1)\n", - " alldat2 = np.roll(np.roll(alldat, kstack[0], axis=0), kstack[1], axis=1)\n", - " idx = (pdat == -pdat2) & (pdat == p)\n", - " p1 = alldat[idx]\n", - " p2 = alldat2[idx]\n", - " \n", - " \n", - " r = np.zeros((p1.shape[0], 2), np.float)\n", - " r[:,0] = np.abs(p1)-np.abs(p2)\n", - " r[:,1] = np.abs(p2)\n", - " \n", - " return r\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "patternClassifier = xcal.PatternClassifier(sensorSize, noiseMap, split_evt_primary_threshold,\n", - " split_evt_secondary_threshold, split_evt_mip_threshold,\n", - " tagFirstSingles = tag_first_singles, nCells=memoryCells,\n", - " cores=10, runOnGPU=False, parallel=True )\n", - "patternClassifier._imagesPerChunk = 50\n", - "histCalCorrectedAll = xcal.HistogramCalculator(sensorSize, bins=4000, range=[0, 4000], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "histCalCorrectedSingles = xcal.HistogramCalculator(sensorSize, bins=4000, range=[0, 4000], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "histCalCorrectedDoubles = xcal.HistogramCalculator(sensorSize, bins=4000, range=[0, 4000], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "histCalCorrectedTriples = xcal.HistogramCalculator(sensorSize, bins=4000, range=[0, 4000], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "histCalCorrectedQuads = xcal.HistogramCalculator(sensorSize, bins=4000, range=[0, 4000], memoryCells=memoryCells, cores=cpuCores, parallel=True)\n", - "patternSelector = xcal.PatternSelector(sensorSize, selectionList = [100, 101, 200, 201, 202, 203, 300, 301, 302, 303, 400, 401, 402, 403], parallel=False)\n", - "\n", - "col_coords = []\n", - "row_coords = []\n", - "vals = []\n", - "\n", - "up_doubles = None\n", - "down_doubles = None\n", - "right_doubles = None\n", - "left_doubles = None\n", - "\n", - "for fl in xray_files:\n", - " dshape = list(Frms6Reader.getDataShape(fl))\n", - " dshape, nImages = dshape[:-1], dshape[-1]\n", - " nImages = nImagesOrLimit(nImages, limit_xray_images)\n", - " reader = ChunkReader(fl, Frms6Reader.readData, nImages, chunkSize, path = \"/frames\",\n", - " pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1],\n", - " simulated=False)\n", - " \n", - " for i,data in enumerate(reader.readChunks()):\n", - " data = data.astype(np.float32)\n", - " data = offsetCorrection.correct(data)\n", - " \n", - "\n", - " data = cmCorrection.correct(data)\n", - "\n", - " data /= ctimap[...,None]\n", - "\n", - " data, patterns = patternClassifier.classify(data)\n", - "\n", - "\n", - " #singles, firstsingles, doubles_ud, doubles_lr, triples_0, triples_1 = patternSelector.select(data, patterns)\n", - " #classPatt = patternSelector.select(data, patterns)\n", - " #data /= relGain[:, None, None]\n", - "\n", - " #data = ctiCorr.correct(data)\n", - " singles, firstsingles, up, right, down, left, t1, t2, t3, t4, q1, q2, q3, q4 = patternSelector.select(data, patterns)\n", - " histCalCorrectedAll.fill(data)\n", - " histCalCorrectedSingles.fill(singles+firstsingles)\n", - " histCalCorrectedDoubles.fill(up+right+down+left)\n", - " histCalCorrectedTriples.fill(t1+t2+t3+t4)\n", - " histCalCorrectedQuads.fill(q1+q2+q3+q4)\n", - "\n", - " asingles = singles + firstsingles\n", - " xv, yv = np.meshgrid(np.arange(data.shape[0]), np.arange(data.shape[1]))\n", - " xv = np.repeat(xv[...,None], data.shape[2], axis=2)\n", - " yv = np.repeat(yv[...,None], data.shape[2], axis=2)\n", - "\n", - " col_coords.append(xv[asingles > 0].flatten())\n", - " row_coords.append(yv[asingles > 0].flatten())\n", - " vals.append(asingles[asingles > 0].flatten())\n", - "\n", - " if i == 0:\n", - " up_doubles = separateDouble(patterns, data, 200)\n", - " right_doubles = separateDouble(patterns, data, 201)\n", - " down_doubles = separateDouble(patterns, data, 202)\n", - " left_doubles = separateDouble(patterns, data, 203)\n", - " else:\n", - " up_doubles = np.concatenate((up_doubles, separateDouble(patterns, data, 200)))\n", - " right_doubles = np.concatenate((right_doubles, separateDouble(patterns, data, 201)))\n", - " down_doubles = np.concatenate((down_doubles, separateDouble(patterns, data, 202)))\n", - " left_doubles = np.concatenate((left_doubles, separateDouble(patterns, data, 203)))\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Offset + CM Corrected Signal along Columns/Rows - Corrected ###\n", - "\n", - "CTI effects will show along the charge-shifting direction, relative gain effects for each readout line.\n", - "\n", - "If evaluation converged to proper values, per-column/per-row artifacts should now be much less pronounced in these plots" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "rcoords = np.concatenate(row_coords)\n", - "ccoords = np.concatenate(col_coords)\n", - "avals = np.concatenate(vals)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#lim = 10000\n", - "import matplotlib\n", - "xax = rcoords#[:lim]\n", - "yax = ccoords#[:lim]\n", - "fig = plt.figure(figsize=(10,10))\n", - "ax = fig.add_subplot(111)\n", - "cmap = matplotlib.cm.get_cmap('Paired')\n", - "colors = cmap(yax/np.max(yax))\n", - "ax.scatter(xax, avals, 1, color=colors)\n", - "ax.set_ylabel(\"Signal value (ADU)\")\n", - "ax.set_xlabel(\"Column coordinate\")\n", - "l = ax.set_ylim(200, 3000)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "yax = rcoords#[:lim]\n", - "xax = ccoords#[:lim]\n", - "fig = plt.figure(figsize=(10,10))\n", - "ax = fig.add_subplot(111)\n", - "cmap = matplotlib.cm.get_cmap('Paired')\n", - "colors = cmap(yax/np.max(yax))\n", - "ax.scatter(xax, avals, 1, color=colors)\n", - "ax.set_ylabel(\"Signal value (ADU)\")\n", - "ax.set_xlabel(\"Row coordinate\")\n", - "l = ax.set_ylim(0, 3000)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "allHists = [histCalCorrectedAll.get(), histCalCorrectedSingles.get(),\n", - " histCalCorrectedDoubles.get(), histCalCorrectedTriples.get(),\n", - " histCalCorrectedQuads.get()]\n", - "\n", - "\n", - "labels = [\"all\", \"singles\", \"doubles\", \"triples\", \"quads\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Corrected Spectra for Split Event Types ###\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(10,5))\n", - "ax = fig.add_subplot(111)\n", - "for i, h in enumerate(allHists):\n", - " H, e, c, _ = h\n", - " ax.plot(c, H, label=labels[i])\n", - " #xana.histPlot(ax, h, plot_errors=False, alpha=0.3)\n", - "\n", - "\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "r = ax.set_xlim(0.5*line_position_adu,1.25*line_position_adu)#\n", - "ch = allHists[0][0]\n", - "ce = allHists[0][2]\n", - "r = ax.set_ylim(1, 1.25*np.max(ch[ce > event_threshold]))\n", - "r = ax.semilogy()\n", - "l = ax.legend()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(10,5))\n", - "ax = fig.add_subplot(111)\n", - "for i, h in enumerate(allHists):\n", - " H, e, c, _ = h\n", - " ax.plot(c, H, label=labels[i])\n", - " #xana.histPlot(ax, h, plot_errors=False, alpha=0.3)\n", - "\n", - "\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "ch = allHists[0][0]\n", - "ce = allHists[0][2]\n", - "r = ax.set_ylim(1, 1.25*np.max(ch[ce > event_threshold]))\n", - "r = ax.semilogy()\n", - "l = ax.legend()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Gain and Energy Resolution ##\n", - "\n", - "The gain is evaluated after all corrections, by fitting a prominent line and comparing it to a known line energy. After proper corrections the gain should be very similar for all split event types. Additionally, there should not be a large difference in energy resolution." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false, - "scrolled": false - }, - "outputs": [], - "source": [ - "h = histCalCorrectedSingles.get()\n", - "ch = h[0]\n", - "ce = h[2]\n", - "\n", - "res = do_fit(h, line_position_adu, line_fit_limits_adu/2,\n", - " line_fit_limits_adu, xlim=[0.5*line_position_adu,1.25*line_position_adu],\n", - " ylim=[1, 1.25*np.max(ch[ce > event_threshold])])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "gains = np.zeros(len(allHists))\n", - "dEs = np.zeros((len(allHists), 2))\n", - "for i, h in enumerate(allHists):\n", - " print(labels[i])\n", - " print(\"-\"*len(labels[i]))\n", - " res = do_fit(h, line_position_adu, line_fit_limits_adu/2, line_fit_limits_adu, False)\n", - " gain = line_energy_ev/res['mu1']\n", - " dE = res['sigma1']*gain\n", - " print(\"dE FWHM: {:0.2f} eV\".format(dE*2.35))\n", - " print(\"Gain: {:0.2f} eV/ADU; {:0.2f} ADU/eV\".format(gain, 1./gain))\n", - " print(\"\")\n", - " gains[i] = gain\n", - " dEs[i, 0] = line_energy_ev\n", - " dEs[i, 1] = dE" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# meas = os.path.splitext(os.path.basename(xray_files[0]))[0]\n", - "# with h5py.File(\"{}/base_store_{}.h5\".format(out_folder, meas), \"w\") as f:\n", - "# f[\"{}/RelativeGain/0/data\".format(detector_instance)] = ctimap[...,None]\n", - "# f[\"{}/Gain/0/data\".format(detector_instance)] = gains\n", - "# f[\"{}/EnergyResolution/0/data\".format(detector_instance)] = dEs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Pattern Statistics and Event Distribution ##" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "patternStats = patternClassifier.getPatternStats()\n", - "fig = plt.figure(figsize=(15,15))\n", - "ax = fig.add_subplot(4,4,1)\n", - "sfields = [\"singles\", \"first singles\", \"clusters\"]\n", - "mfields = [\"doubles\", \"triples\", \"quads\"]\n", - "relativeOccurances = []\n", - "labels = []\n", - "for i, f in enumerate(sfields):\n", - " relativeOccurances.append(patternStats[f])\n", - " labels.append(f)\n", - "for i, f in enumerate(mfields):\n", - " for k in range(len(patternStats[f])):\n", - " relativeOccurances.append(patternStats[f][k])\n", - " labels.append(f+\"(\"+str(k)+\")\")\n", - "relativeOccurances = np.array(relativeOccurances, np.float)\n", - "relativeOccurances/=np.sum(relativeOccurances)\n", - "pie = ax.pie(relativeOccurances, labels=labels, autopct='%1.1f%%', shadow=True)\n", - "ax.set_title(\"Pattern occurance\")\n", - "# Set aspect ratio to be equal so that pie is drawn as a circle.\n", - "a = ax.axis('equal')\n", - "\n", - "smaps = [\"singlemap\", \"firstsinglemap\", \"clustermap\"]\n", - "for i, m in enumerate(smaps):\n", - "\n", - " ax = fig.add_subplot(4,4,2+i)\n", - "\n", - " pmap = ax.imshow(patternStats[m], interpolation=\"nearest\", vmax=2*np.nanmedian(patternStats[m]))\n", - " ax.set_title(m)\n", - " cb = fig.colorbar(pmap)\n", - "\n", - "mmaps = [\"doublemap\", \"triplemap\", \"quadmap\"]\n", - "k = 0\n", - "for i, m in enumerate(mmaps):\n", - "\n", - " for j in range(4):\n", - " ax = fig.add_subplot(4,4,2+len(smaps)+k)\n", - " pmap = ax.imshow(patternStats[m][j], interpolation=\"nearest\", vmax=2*np.median(patternStats[m][j]))\n", - " ax.set_title(m+\"(\"+str(j)+\")\")\n", - " cb = fig.colorbar(pmap)\n", - " k+=1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Double Split Event Ratios - Left/Right Splits ##" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(7,7))\n", - "ax = fig.add_subplot(111)\n", - "ax.scatter(np.abs(right_doubles[:,1]), np.abs(right_doubles[:, 0]), 1, label=\"Right partner major\")\n", - "ax.scatter(np.abs(left_doubles[:,0]), np.abs(left_doubles[:, 1]), 1, label=\"Left partner major\")\n", - "ax.semilogx()\n", - "ax.semilogy()\n", - "ax.set_xlim(10, 100000)\n", - "ax.set_ylim(10, 100000)\n", - "ax.set_xlabel(\"Right partner energy (ADU)\")\n", - "ax.set_ylabel(\"Left partner energy (ADU)\")\n", - "l = ax.legend()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Double Split Event Ratios - Top/Down Splits ##" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(7,7))\n", - "ax = fig.add_subplot(111)\n", - "ax.scatter(np.abs(up_doubles[:,1]), np.abs(up_doubles[:, 0]), 1, label=\"Top partner major\")\n", - "ax.scatter(np.abs(down_doubles[:,0]), np.abs(down_doubles[:, 1]), 1, label=\"Down partner major\")\n", - "ax.semilogx()\n", - "ax.semilogy()\n", - "ax.set_xlim(10, 100000)\n", - "ax.set_ylim(10, 100000)\n", - "ax.set_xlabel(\"Top partner energy (ADU)\")\n", - "ax.set_ylabel(\"Down partner energy (ADU)\")\n", - "l = ax.legend()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.6" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4cf0a0a8cc2753ad4dc0492bbab3dd35cddfa387 --- /dev/null +++ b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb @@ -0,0 +1,636 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# pnCCD Dark Characterization\n", + "\n", + "Author: DET Group, Version 1.0\n", + "\n", + "The following notebook provides dark image analysis of the pnCCD detector.\n", + "\n", + "Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. Resulting maps are saved as .h5 files for a latter use." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:38.999974Z", + "start_time": "2018-12-06T10:54:38.983406Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw/\" # input folder, required\n", + "out_folder = 'gpfs/exfel/data/scratch/haufs/test/' # output folder, required\n", + "path_template = 'RAW-R{:04d}-PNCCD01-S{{:05d}}.h5' # the template to use to access data\n", + "run = 745 # which run to read data from, required\n", + "number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used\n", + "cluster_profile = \"noDB\" # ipcluster profile to use\n", + "sigma_noise = 10. # Pixel exceeding 'sigmaNoise' * noise value in that pixel will be masked\n", + "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/image/' # path in the HDF5 file the data is at\n", + "cal_db_interface = \"tcp://max-exfl016:8021\" # calibration DB interface to use\n", + "local_output = False # output also in as H5 files\n", + "temp_limits = 5 # temperature limits in which to consider calibration parameters equal\n", + "sequence = 0 # sequence file to use\n", + "multi_iteration = False # use multiple iterations\n", + "use_dir_creation_date = True # use dir creation date as data production reference date\n", + "bad_pixel_offset_sigma = 5. # deviation in standard deviations from mean offset to consider pixel bad\n", + "bad_pixel_noise_sigma = 5. # deviation in standard deviations from mean noise to consider pixel bad\n", + "fix_temperature = 233. # fix temperature to this value in K, set to -1 to use value from slow data\n", + "gain = 0 # the detector gain setting, only 0 is currently implemented\n", + "bias_voltage = 300 # detector bias voltage\n", + "integration_time = 70 # detector integration time" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:39.190907Z", + "start_time": "2018-12-06T10:54:39.186154Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", + "from iCalibrationDB.detectors import DetectorTypes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:39.467334Z", + "start_time": "2018-12-06T10:54:39.427784Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "import XFELDetAna.xfelprofiler as xprof\n", + "\n", + "profiler = xprof.Profiler()\n", + "profiler.disable()\n", + "from XFELDetAna.util import env\n", + "env.iprofile = cluster_profile\n", + "\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "from XFELDetAna import xfelpycaltools as xcal\n", + "from XFELDetAna import xfelpyanatools as xana\n", + "from XFELDetAna.plotting.util import prettyPlotting\n", + "prettyPlotting=True\n", + "from XFELDetAna.xfelreaders import ChunkReader\n", + "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n", + "from cal_tools.tools import get_dir_creation_date\n", + "\n", + "import numpy as np\n", + "import h5py\n", + "import matplotlib.pyplot as plt\n", + "from iminuit import Minuit\n", + "\n", + "import time\n", + "import copy\n", + "\n", + "from prettytable import PrettyTable\n", + "\n", + "%matplotlib inline\n", + "\n", + "def nImagesOrLimit(nImages, limit):\n", + " if limit == 0:\n", + " return nImages\n", + " else:\n", + " return min(nImages, limit)\n", + " \n", + "sigmaNoise = sigma_noise\n", + "temperature_k = fix_temperature" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:40.058101Z", + "start_time": "2018-12-06T10:54:40.042615Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "x = 1024 # rows of the FastCCD to analyze in FS mode \n", + "y = 1024 # columns of the FastCCD to analyze in FS mode \n", + "\n", + " \n", + "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n", + "fp_name = path_template.format(run)\n", + "\n", + "import datetime\n", + "creation_time = None\n", + "if use_dir_creation_date:\n", + " creation_time = get_dir_creation_date(in_folder, run)\n", + "\n", + "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", + "\n", + "print(\"Reading data from: {}\\n\".format(fp_path))\n", + "print(\"Run is: {}\".format(run))\n", + "print(\"HDF5 path: {}\".format(h5path))\n", + "if creation_time:\n", + " print(\"Using {} as creation time\".format(creation_time.isoformat()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:40.555804Z", + "start_time": "2018-12-06T10:54:40.452978Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "filename = fp_path.format(sequence)\n", + "sensorSize = [x, y]\n", + "chunkSize = 100 #Number of images to read per chunk\n", + "#Sensor area will be analysed according to blocksize\n", + "blockSize = [sensorSize[0]//2, sensorSize[1]//4] \n", + "xcal.defaultBlockSize = blockSize\n", + "cpuCores = 8 #Specifies the number of running cpu cores\n", + "memoryCells = 1 #FastCCD has 1 memory cell\n", + "#Specifies total number of images to proceed\n", + "nImages = fastccdreaderh5.getDataSize(filename, h5path)[0] \n", + "nImages = nImagesOrLimit(nImages, number_dark_frames)\n", + "print(\"\\nNumber of dark images to analyze: \",nImages)\n", + "commonModeBlockSize = blockSize\n", + "commonModeAxisR = 'row'#Axis along which common mode will be calculated\n", + "run_parallel = True\n", + "profile = False\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:41.584031Z", + "start_time": "2018-12-06T10:54:41.578462Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "reader = ChunkReader(filename, fastccdreaderh5.readData, \n", + " nImages, chunkSize, \n", + " path = h5path, \n", + " pixels_x = sensorSize[0],\n", + " pixels_y = sensorSize[1],)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:54:41.899511Z", + "start_time": "2018-12-06T10:54:41.864816Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, \n", + " cores=cpuCores, blockSize=blockSize,\n", + " runParallel=run_parallel)\n", + "histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000, \n", + " range=[0, 10000], parallel=False, \n", + " memoryCells=memoryCells, \n", + " cores=cpuCores, blockSize=blockSize)\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### First Iteration" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. Firstly, initial offset and noise maps are produced from raw dark data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:55:21.238009Z", + "start_time": "2018-12-06T10:54:54.586435Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "for data in reader.readChunks():\n", + " data = data.astype(np.float32)\n", + " dx = np.count_nonzero(data, axis=(0, 1))\n", + " data = data[:,:,dx != 0]\n", + " histCalRaw.fill(data)\n", + " #Filling calculators with data\n", + " noiseCal.fill(data)\n", + " \n", + "offsetMap = noiseCal.getOffset() #Produce offset map\n", + "noiseMap = noiseCal.get() #Produce noise map\n", + "noiseCal.reset() #Reset noise calculator\n", + "print(\"Initial maps were created\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:56:20.686534Z", + "start_time": "2018-12-06T10:56:11.721829Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "#**************OFFSET MAP HISTOGRAM***********#\n", + "ho,co = np.histogram(offsetMap.flatten(), bins=700)\n", + "\n", + "do = {'x': co[:-1],\n", + " 'y': ho,\n", + " 'y_err': np.sqrt(ho[:]),\n", + " 'drawstyle': 'bars',\n", + " 'color': 'cornflowerblue',\n", + " }\n", + "\n", + "fig = xana.simplePlot(do, figsize='1col', aspect=2, \n", + " x_label = 'Offset (ADU)', \n", + " y_label=\"Counts\", y_log=True,\n", + " )\n", + " \n", + "\n", + "#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#\n", + "hn,cn = np.histogram(noiseMap.flatten(), bins=2000)\n", + "\n", + "dn = {'x': cn[:-1],\n", + " 'y': hn,\n", + " 'y_err': np.sqrt(hn[:]),\n", + " 'drawstyle': 'bars',\n", + " 'color': 'cornflowerblue',\n", + " }\n", + "\n", + "fig = xana.simplePlot(dn, figsize='1col', aspect=2, \n", + " x_label = 'Noise (ADU)', \n", + " y_label=\"Counts\", \n", + " y_log=True,\n", + " x_range=(0, 400))\n", + "\n", + "\n", + "#**************HEAT MAPS*******************#\n", + "fig = xana.heatmapPlot(offsetMap[:,:,0],\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Offset (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmin=6000, vmax=14500)\n", + "\n", + "fig = xana.heatmapPlot(noiseMap[:,:,0],\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Noise (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmax=2*np.mean(noiseMap))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "cmCorrection = xcal.CommonModeCorrection(sensorSize, \n", + " [512, 512], \n", + " 'row',\n", + " nCells = memoryCells, \n", + " noiseMap = noiseMap,\n", + " runParallel=False,\n", + " stats=True)\n", + "cmCorrection.debug()\n", + "noiseCalCM = xcal.NoiseCalculator(sensorSize, memoryCells, \n", + " cores=cpuCores, blockSize=blockSize,\n", + " runParallel=run_parallel)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "for data in reader.readChunks():\n", + " data = data.astype(np.float32)\n", + " dx = np.count_nonzero(data, axis=(0, 1))\n", + " data = data[:,:,dx != 0]\n", + " data = cmCorrection.correct(data)\n", + " #Filling calculators with data\n", + " noiseCalCM.fill(data)\n", + " \n", + "offsetMapCM = noiseCalCM.getOffset() #Produce offset map\n", + "noiseMapCM = noiseCalCM.get() #Produce noise map\n", + "noiseCalCM.reset() #Reset noise calculator\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#\n", + "hn,cn = np.histogram(noiseMapCM.flatten(), bins=2000)\n", + "\n", + "dn = {'x': cn[:-1],\n", + " 'y': hn,\n", + " 'y_err': np.sqrt(hn[:]),\n", + " 'drawstyle': 'bars',\n", + " 'color': 'cornflowerblue',\n", + " }\n", + "\n", + "fig = xana.simplePlot(dn, figsize='1col', aspect=2, \n", + " x_label = 'Noise (ADU)', \n", + " y_label=\"Counts\", \n", + " y_log=True,\n", + " x_range=(0, 400))\n", + "\n", + "\n", + "fig = xana.heatmapPlot(noiseMapCM[:,:,0],\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Noise (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmax=2*np.mean(noiseMapCM))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T10:56:22.741284Z", + "start_time": "2018-12-06T10:56:20.688393Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "\n", + "## offset\n", + "\n", + "metadata = ConstantMetaData()\n", + "offset = Constants.CCD(DetectorTypes.pnCCD).Offset()\n", + "offset.data = offsetMapCM.data\n", + "metadata.calibration_constant = offset\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=temperature_k,\n", + " pixels_x=1024,\n", + " pixels_y=1024)\n", + "\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "# specify the a version for this constant\n", + "if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + "else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n", + "metadata.send(cal_db_interface, timeout=300000)\n", + "\n", + "## noise\n", + "\n", + "metadata = ConstantMetaData()\n", + "noise = Constants.CCD(DetectorTypes.pnCCD).Noise()\n", + "noise.data = noiseMapCM.data\n", + "metadata.calibration_constant = noise\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=temperature_k,\n", + " pixels_x=1024,\n", + " pixels_y=1024)\n", + "\n", + "\n", + "\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "# specify the a version for this constant\n", + "if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + "else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n", + "metadata.send(cal_db_interface, timeout=300000)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from cal_tools.enums import BadPixels\n", + "bad_pixels = np.zeros(offsetMap.shape, np.uint32)\n", + "mnoffset = np.nanmedian(offsetMap)\n", + "stdoffset = np.nanstd(offsetMap)\n", + "bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) | \n", + " (offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n", + "\n", + "mnnoise = np.nanmedian(noiseMap)\n", + "stdnoise = np.nanstd(noiseMap)\n", + "bad_pixels[(noiseMap < mnnoise-bad_pixel_noise_sigma*stdnoise) | \n", + " (noiseMap > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n", + "\n", + "fig = xana.heatmapPlot(np.log2(bad_pixels[:,:,0]),\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Bad Pixel Value (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmin=0, vmax=32)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "metadata = ConstantMetaData()\n", + "badpix = Constants.CCD(DetectorTypes.pnCCD).BadPixelsDark()\n", + "badpix.data = bad_pixels.data\n", + "metadata.calibration_constant = badpix\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=temperature_k,\n", + " pixels_x=1024,\n", + " pixels_y=1024)\n", + "\n", + "\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "# specify the a version for this constant\n", + "if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + "else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)\n", + "metadata.send(cal_db_interface, timeout=300000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "histCalCorr = xcal.HistogramCalculator(sensorSize, bins=200, \n", + " range=[-200, 200], parallel=False, \n", + " memoryCells=memoryCells, \n", + " cores=cpuCores, blockSize=blockSize)\n", + "\n", + "\n", + "for data in reader.readChunks():\n", + " data = data.astype(np.float32)\n", + " data -= offsetMap.data\n", + " histCalCorr.fill(data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ho,eo,co,so = histCalCorr.get()\n", + "\n", + "\n", + "d = [{'x': co,\n", + " 'y': ho,\n", + " 'y_err': np.sqrt(ho[:]),\n", + " 'drawstyle': 'steps-mid',\n", + " 'errorstyle': 'bars',\n", + " 'errorcoarsing': 2,\n", + " 'label': 'Offset corr.'\n", + " },\n", + " \n", + " ]\n", + " \n", + "\n", + "fig = xana.simplePlot(d, aspect=1, x_label='Energy(ADU)', \n", + " y_label='Number of occurrences', figsize='2col',\n", + " y_log=True, x_range=(-50,500),\n", + " legend='top-center-frame-2col')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6" + }, + "latex_envs": { + "LaTeX_envs_menu_present": true, + "autocomplete": true, + "bibliofile": "biblio.bib", + "cite_by": "apalike", + "current_citInitial": 1, + "eqLabelWithNumbers": true, + "eqNumInitial": 1, + "hotkeys": { + "equation": "Ctrl-E", + "itemize": "Ctrl-I" + }, + "labels_anchors": false, + "latex_user_defs": false, + "report_style_numbering": false, + "user_envs_cfg": false + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/notebooks/pnCCD/Characterize_pnCCD_Gain.ipynb b/notebooks/pnCCD/Characterize_pnCCD_Gain.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..8ed501a08c822df9e915bc9d300727b8422c1fd0 --- /dev/null +++ b/notebooks/pnCCD/Characterize_pnCCD_Gain.ipynb @@ -0,0 +1,923 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# pnCCD Gain Characterization ##\n", + "\n", + "Authors: DET Group, Version 1.0\n", + "\n", + "The following notebook provides gain characterization for the pnCCD. It relies on data previously having been corrected using the MDC interface" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.218849Z", + "start_time": "2018-12-06T15:54:23.166497Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "\n", + "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/proc/\" # input folder, required\n", + "out_folder = '/gpfs/exfel/data/scratch/xcal/test/' # output folder, required\n", + "path_template = 'CORR-R{:04d}-PNCCD01-S{{:05d}}.h5' # the template to use to access data\n", + "path_template_seqs = \"{}/r{:04d}/*PNCCD01-S*.h5\"\n", + "run = 747 # which run to read data from, required\n", + "cluster_profile = \"noDB\" # ipcluster profile to use\n", + "sigma_noise = 10. # Pixel exceeding 'sigmaNoise' * noise value in that pixel will be masked\n", + "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/' # path in the HDF5 file the data is at\n", + "multi_iteration = False # use multiple iterations\n", + "use_dir_creation_date = True\n", + "fix_temperature = 233.\n", + "gain = 0\n", + "bias_voltage = 300\n", + "integration_time = 70\n", + "cal_db_interface = \"tcp://max-exfl016:8015\" # calibration DB interface to use\n", + "cal_db_timeout = 300000000 # timeout on caldb requests\n", + "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n", + "chunk_size_idim = 1 # H5 chunking size of output data\n", + "overwrite = True\n", + "sequences_per_node = 1\n", + "limit_images = 0\n", + "time_offset_days = 0\n", + "cpuCores = 8\n", + "\n", + "\n", + "def balance_sequences(in_folder, run, sequences, sequences_per_node, path_template_seqs):\n", + " import glob\n", + " import re\n", + " import numpy as np\n", + " if sequences[0] == -1:\n", + " sequence_files = glob.glob(path_template_seqs.format(in_folder, run))\n", + " seq_nums = set()\n", + " for sf in sequence_files:\n", + " seqnum = re.findall(r\".*-S([0-9]*).h5\", sf)[0]\n", + " seq_nums.add(int(seqnum))\n", + " seq_nums -= set(sequences)\n", + " nsplits = len(seq_nums)//sequences_per_node+1\n", + " while nsplits > 8:\n", + " sequences_per_node += 1\n", + " nsplits = len(seq_nums)//sequences_per_node+1\n", + " print(\"Changed to {} sequences per node to have a maximum of 8 concurrent jobs\".format(sequences_per_node))\n", + " return [l.tolist() for l in np.array_split(list(seq_nums), nsplits)]\n", + " else:\n", + " return sequences" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.455376Z", + "start_time": "2018-12-06T15:54:23.413579Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "import XFELDetAna.xfelprofiler as xprof\n", + "\n", + "profiler = xprof.Profiler()\n", + "profiler.disable()\n", + "from XFELDetAna.util import env\n", + "env.iprofile = cluster_profile\n", + "\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "from XFELDetAna import xfelpycaltools as xcal\n", + "from XFELDetAna import xfelpyanatools as xana\n", + "from XFELDetAna.plotting.util import prettyPlotting\n", + "prettyPlotting=True\n", + "from XFELDetAna.xfelreaders import ChunkReader\n", + "\n", + "import numpy as np\n", + "import h5py\n", + "import matplotlib.pyplot as plt\n", + "from iminuit import Minuit\n", + "import copy\n", + "import os\n", + "\n", + "from prettytable import PrettyTable\n", + "\n", + "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", + "from cal_tools.tools import get_dir_creation_date\n", + "\n", + "import datetime\n", + "from datetime import timedelta\n", + "\n", + "%matplotlib inline\n", + "\n", + "if sequences[0] == -1:\n", + " sequences = None\n", + "\n", + "\n", + "if \"#\" in cal_db_interface:\n", + " prot, serv, ran = cal_db_interface.split(\":\")\n", + " r1, r2 = ran.split(\"#\")\n", + " cal_db_interface = \":\".join(\n", + " [prot, serv, str(np.random.randint(int(r1), int(r2)))])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.679069Z", + "start_time": "2018-12-06T15:54:23.662821Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "x = 1024 # rows of the FastCCD to analyze in FS mode \n", + "y = 1024 # columns of the FastCCD to analyze in FS mode \n", + "sensorSize = [x, y]\n", + " \n", + "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n", + "fp_name = path_template.format(run)\n", + "\n", + "\n", + "creation_time = None\n", + "if use_dir_creation_date:\n", + " creation_time = get_dir_creation_date(in_folder, run)\n", + "\n", + "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", + "\n", + "print(\"Reading data from: {}\\n\".format(fp_path))\n", + "print(\"Run is: {}\".format(run))\n", + "print(\"HDF5 path: {}\".format(h5path))\n", + "if creation_time:\n", + " print(\"Using {} as creation time\".format(creation_time.isoformat()))\n", + " \n", + "out_folder = \"{}/r{:04d}\".format(out_folder, run)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.913269Z", + "start_time": "2018-12-06T15:54:23.868910Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "\n", + "sensorSize = [x, y]\n", + "chunkSize = 100 #Number of images to read per chunk\n", + "blockSize = [sensorSize[0]//2, sensorSize[1]//4] #Sensor area will be analysed according to blocksize\n", + "xcal.defaultBlockSize = blockSize\n", + "memoryCells = 1 #FastCCD has 1 memory cell\n", + "#Specifies total number of images to proceed\n", + "\n", + "commonModeBlockSize = [512, 512]\n", + "commonModeAxisR = 'row'#Axis along which common mode will be calculated\n", + "run_parallel = True\n", + "profile = False\n", + " \n", + "\n", + "if not os.path.exists(out_folder):\n", + " os.makedirs(out_folder)\n", + "elif not overwrite:\n", + " raise AttributeError(\"Output path exists! Exiting\") \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:24.088948Z", + "start_time": "2018-12-06T15:54:24.059925Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "dirlist = sorted(os.listdir(ped_dir))\n", + "file_list = []\n", + "total_sequences = 0\n", + "fsequences = []\n", + "for entry in dirlist:\n", + "\n", + " #only h5 file\n", + " abs_entry = \"{}/{}\".format(ped_dir, entry)\n", + " if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == \".h5\":\n", + " \n", + " if sequences is None:\n", + " for seq in range(len(dirlist)):\n", + " \n", + " if path_template.format(run).format(seq) in abs_entry:\n", + " file_list.append(abs_entry)\n", + " total_sequences += 1\n", + " fsequences.append(seq)\n", + " else:\n", + " for seq in sequences:\n", + " \n", + " if path_template.format(run).format(seq) in abs_entry:\n", + " file_list.append(os.path.abspath(abs_entry))\n", + " total_sequences += 1\n", + " fsequences.append(seq)\n", + "sequences = fsequences" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### List of files to be processed ###" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T18:43:39.776018Z", + "start_time": "2018-12-06T18:43:39.759185Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "import copy\n", + "from IPython.display import HTML, display, Markdown, Latex\n", + "import tabulate\n", + "print(\"Processing a total of {} sequence files\".format(total_sequences))\n", + "table = []\n", + "\n", + "\n", + "for k, f in enumerate(file_list):\n", + " table.append((k, f))\n", + "if len(table): \n", + " md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"#\", \"file\"]))) " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reading in already corrected data ###\n", + "\n", + "In the following we read in already corrected data and separte double event back out\n", + "such as to track split event ratios.\n", + "\n", + "Additionally, data structures for CTI and relative gain determination are created" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "## pattern kernels as defined in pyDetLib\n", + "\n", + "kernel = {}\n", + "\n", + "kernel[\"203\"] = np.array([[0, 1, 0],\n", + " [0, 1, 0],\n", + " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", + "\n", + "kernel[\"201\"] = np.array([[0, 0, 0],\n", + " [0, 1, 1],\n", + " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", + "\n", + "kernel[\"202\"] = np.array([[0, 0, 0],\n", + " [0, 1, 0 ],\n", + " [0, 1, 0]], dtype=np.float)[:,:,np.newaxis]\n", + "\n", + "\n", + "kernel[\"200\"] = np.array([[0, 0, 0],\n", + " [1, 1, 0 ],\n", + " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", + "\n", + "def separateDouble(pdat, alldat, p):\n", + " \"\"\"\n", + " Just a helper function to separate doubles back out\n", + " \n", + " :param pdat: the pattern indices\n", + " :param alldat: the corrected data\n", + " :param p: the pattern indice to work on\n", + " \"\"\"\n", + " \n", + " cdat = np.zeros_like(alldat)\n", + " \n", + " kstack = kernel[str(p)]\n", + " idx = pdat == p\n", + " \n", + " p1 = alldat[idx]\n", + " p2 = None\n", + "\n", + " if np.roll(kstack, 1, axis=0)[1,1] == 1:\n", + " idx = np.roll(pdat, -1, axis = 0) == p\n", + " p2 = alldat[idx]\n", + " if np.roll(kstack, -1, axis=0)[1,1] == 1:\n", + " idx = np.roll(pdat, 1, axis = 0) == p\n", + " p2 = alldat[idx]\n", + " if np.roll(kstack, 1, axis=1)[1,1] == 1:\n", + " idx = np.roll(pdat, -1, axis = 1) == p\n", + " p2 = alldat[idx]\n", + " if np.roll(kstack, -1, axis=1)[1,1] == 1:\n", + " idx = np.roll(pdat, 1, axis = 1) == p\n", + " p2 = alldat[idx]\n", + " \n", + " \n", + " \n", + " r = np.zeros((p1.shape[0], 2), np.float)\n", + " r[:,0] = np.abs(p1)-np.abs(p2)\n", + " r[:,1] = np.abs(p2)\n", + " \n", + " return r" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:55.917179Z", + "start_time": "2018-12-06T16:09:01.603633Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "# actual data reading happens here\n", + "\n", + "# separated doubles will be held in these lists\n", + "up_doubles = [] # up major partner\n", + "down_doubles = [] # down major partner\n", + "left_doubles = [] # left major partner\n", + "right_doubles = [] # right major partner\n", + "\n", + "x = 1024\n", + "y = 1024\n", + "\n", + "# structs for CTI and gain evaluation\n", + "# we work by quadrant\n", + "dstats = {'upper left': {'coords': [0, x//2, 0, y//2], # coordinates of this quadrant\n", + " 'row_coords': [], # will hold our data's row coordinates\n", + " 'col_coords': [], # will hold our data's column coordinates\n", + " 'col_dir': 1, # 1 if columns read out to left, -1 if read out to right\n", + " 'vals': [], # will hold our data values\n", + " },\n", + " 'lower left': {'coords':[0, x//2, y//2, y],\n", + " 'row_coords': [],\n", + " 'col_coords': [],\n", + " 'col_dir': 1,\n", + " 'vals': [],\n", + " },\n", + " 'upper right': {'coords':[x//2, x, 0, y//2],\n", + " 'row_coords': [],\n", + " 'col_coords': [],\n", + " 'col_dir': -1,\n", + " 'vals': [],\n", + " },\n", + " 'lower right': {'coords': [x//2, x, y//2, y],\n", + " 'row_coords': [],\n", + " 'col_coords': [],\n", + " 'col_dir': -1,\n", + " 'vals': [],\n", + " },\n", + " \n", + "}\n", + "\n", + "# structure that identifies and holds pattern statistics\n", + "pstats = {'singles': {'p': 100,},\n", + " 'first singles': {'p': 101,},\n", + " 'clusters': {'p': 1000,},\n", + " 'left major doubles': {'p': 200,},\n", + " 'right major doubles': {'p': 201,},\n", + " 'up major doubles': {'p': 202,},\n", + " 'down major doubles': {'p': 203,},\n", + " 'left major triples': {'p': 300,},\n", + " 'right major triples': {'p': 301,},\n", + " 'up major triples': {'p': 302,},\n", + " 'down major triples': {'p': 303,},\n", + " 'left major quads': {'p': 400,},\n", + " 'right major quads': {'p': 401,},\n", + " 'up major quads': {'p': 402,},\n", + " 'down major quads': {'p': 403,},\n", + "}\n", + "\n", + "# total number of patterns\n", + "tpats = 0\n", + "\n", + "for k, f in enumerate(file_list):\n", + " with h5py.File(f, 'r', driver='core') as infile:\n", + " data = infile[h5path+\"/pixels_classified\"][()]\n", + " patterns = infile[h5path+\"/patterns\"][()]\n", + " bpix = infile[h5path+\"/mask\"][()]\n", + " \n", + " # separate out doubles\n", + " up_doubles.append(separateDouble(patterns, data, 202))\n", + " down_doubles.append(separateDouble(patterns, data, 203))\n", + " right_doubles.append(separateDouble(patterns, data, 201))\n", + " left_doubles.append(separateDouble(patterns, data, 200))\n", + " \n", + " # create pattern statistics\n", + " for pstr, entry in pstats.items():\n", + " cpat = patterns == entry[\"p\"]\n", + " entry[\"counts\"] = np.count_nonzero(cpat)\n", + " hist = entry.get(\"hist\", 0)\n", + " hist += np.sum(cpat, axis=0)\n", + " entry[\"hist\"] = hist\n", + " \n", + " # for relative norms\n", + " tpats += np.count_nonzero((patterns > 0) & (patterns < 1000))\n", + " \n", + " # create coordinates needed for CTI and relative gain fitting\n", + " xv, yv = np.meshgrid(np.arange(data.shape[0]), np.arange(data.shape[1]))\n", + " xv = np.repeat(xv[...,None], data.shape[2], axis=2)\n", + " yv = np.repeat(yv[...,None], data.shape[2], axis=2)\n", + "\n", + " # we take all single values\n", + " all_singles = np.zeros_like(data)\n", + " all_singles[(patterns == 100) | (patterns == 101)] = data[(patterns == 100) | (patterns == 101)]\n", + " \n", + " # and then save data and coordinates quadrant-wise\n", + " for key, entry in dstats.items():\n", + " co = entry[\"coords\"]\n", + " cd = entry[\"col_dir\"]\n", + " asd = all_singles[:, co[2]:co[3], co[0]:co[1]]\n", + " # take column direction into account - right hemisphere reads to right, left to left\n", + " if cd == 1:\n", + " yv, xv = np.meshgrid(np.arange(asd.shape[1]), np.arange(asd.shape[2])+co[2]) # row counting downwards\n", + " else:\n", + " yv, xv = np.meshgrid(np.arange(asd.shape[1], 0, -1), np.arange(asd.shape[2])+co[2]) # row counting downwards\n", + " xv = np.repeat(xv[None,...], data.shape[0], axis=0)\n", + " yv = np.repeat(yv[None,...], data.shape[0], axis=0)\n", + " entry['row_coords'].append(xv[asd > 0].flatten())\n", + " entry['col_coords'].append(yv[asd > 0].flatten())\n", + " entry['vals'].append(asd[asd > 0].flatten())\n", + " \n", + " break\n", + "\n", + "up_doubles = np.concatenate(up_doubles, axis=0)\n", + "down_doubles = np.concatenate(down_doubles, axis=0)\n", + "left_doubles = np.concatenate(left_doubles, axis=0)\n", + "right_doubles = np.concatenate(right_doubles, axis=0)\n", + "\n", + "for key, entry in dstats.items():\n", + " entry['row_coords'] = np.concatenate(entry['row_coords'])\n", + " entry['col_coords'] = np.concatenate(entry['col_coords'])\n", + " entry['vals'] = np.concatenate(entry['vals'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Pattern Statistics ##\n", + "\n", + "Relative occurrences are normalized to non-cluster events" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "table = PrettyTable()\n", + "table.field_names = [\"Pattern type\", \"Absolute #\", \"Relative Occurence\"]\n", + "for key, entry in pstats.items():\n", + " table.add_row([key, entry[\"counts\"], \"{:0.2f}\".format(float(entry[\"counts\"])/tpats)])\n", + " \n", + "print(table)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from mpl_toolkits.axes_grid1 import ImageGrid\n", + "fig = plt.figure(1, (15., 15.))\n", + "grid = ImageGrid(fig, 111, # similar to subplot(111)\n", + " nrows_ncols=(5, 4), # creates 2x2 grid of axes\n", + " axes_pad=0.3, # pad between axes in inch.\n", + " )\n", + "\n", + "i = 0\n", + "for key, entry in pstats.items():\n", + " d = entry[\"hist\"]\n", + " \n", + " grid[i].imshow(d, vmin=0, vmax=2.*np.mean(d[d != 0]), cmap=\"gray_r\") # The AxesGrid object work as a list of axes.\n", + " grid[i].annotate(key, (20, -30), color=\"red\", annotation_clip=False)\n", + " i += 1\n", + " if i == 3:\n", + " i += 1\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(8,8))\n", + "ax = fig.add_subplot(111)\n", + "ax.scatter(np.abs(up_doubles[:,0]), np.abs(up_doubles[:,1]), 0.5, alpha=0.1, color='b', label=\"up doubles\")\n", + "ax.scatter(np.abs(down_doubles[:,1]), np.abs(down_doubles[:,0]), 0.5, alpha=0.1, color='g', label=\"down doubles\")\n", + "ax.semilogx()\n", + "ax.semilogy()\n", + "ax.set_xlim(1000, 50000)\n", + "ax.set_ylim(1000, 50000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(8,8))\n", + "ax = fig.add_subplot(111)\n", + "ax.scatter(np.abs(left_doubles[:,0]), np.abs(left_doubles[:,1]), 0.5, alpha=0.1, color='b', label=\"left doubles\")\n", + "ax.scatter(np.abs(right_doubles[:,1]), np.abs(right_doubles[:,0]), 0.5, alpha=0.1, color='g', label=\"right doubles\")\n", + "ax.semilogx()\n", + "ax.semilogy()\n", + "ax.set_xlim(1000, 50000)\n", + "ax.set_ylim(1000, 50000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import matplotlib\n", + "from mpl_toolkits.axes_grid1 import AxesGrid\n", + "cti_limit_low = 3000\n", + "cti_limit_high = 10000\n", + "max_points = 100000\n", + "\n", + "fig = plt.figure(figsize=(15., 15.))\n", + "grid = AxesGrid(fig, 111, # similar to subplot(111)\n", + " nrows_ncols=(2, 2), # creates 2x2 grid of axes\n", + " axes_pad=0.3, # pad between axes in inch.\n", + " aspect=False,\n", + " )\n", + "\n", + "ais = [0, 2, 1, 3]\n", + "ai = 0\n", + "for key, entry in dstats.items():\n", + " i = ais[ai]\n", + " rcoords = entry[\"row_coords\"][:max_points]\n", + " ccoords = entry[\"col_coords\"][:max_points]\n", + " avals = entry[\"vals\"][:max_points]\n", + " \n", + " idx = (avals > cti_limit_low) & (avals < cti_limit_high)\n", + "\n", + "\n", + " xax = ccoords[~idx]\n", + " yax = rcoords[~idx]\n", + " grid[i].scatter(xax, avals[~idx], 1, color=\"grey\", alpha=0.2)\n", + "\n", + "\n", + " xax = ccoords[idx]\n", + " yax = rcoords[idx]\n", + " pvals = avals[idx]\n", + "\n", + " unique_coords = np.unique(xax)\n", + " mean_signal = np.asarray([np.mean(pvals[xax==jdx]) for jdx in unique_coords])\n", + "\n", + " cmap = matplotlib.cm.get_cmap('summer')\n", + " colors = cmap(yax/np.max(yax))\n", + " grid[i].scatter(xax, pvals, 1, color=colors)\n", + " grid[i].scatter(unique_coords, mean_signal, 3, color='tomato')\n", + " grid[i].set_ylabel(\"Signal value (ADU)\")\n", + " grid[i].set_xlabel(\"Column coordinate\")\n", + " grid[i].annotate(key, (0.1, 1.02), xycoords=\"axes fraction\", color=\"red\", annotation_clip=False)\n", + " if entry[\"col_dir\"] == -1: # columns counting from the right\n", + " grid[i].set_xlim(512, 0)\n", + " l = grid[i].set_ylim(0, 1.25*cti_limit_high)\n", + " ai += 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "fig = plt.figure(figsize=(15., 15.))\n", + "grid = AxesGrid(fig, 111, # similar to subplot(111)\n", + " nrows_ncols=(2, 2), # creates 2x2 grid of axes\n", + " axes_pad=0.3, # pad between axes in inch.\n", + " aspect=False,\n", + " )\n", + "\n", + "\n", + "ai = 0\n", + "for key, entry in dstats.items():\n", + " i = ais[ai]\n", + " rcoords = entry[\"row_coords\"][:max_points]\n", + " ccoords = entry[\"col_coords\"][:max_points]\n", + " avals = entry[\"vals\"][:max_points]\n", + " co = entry[\"coords\"]\n", + "\n", + " \n", + " yax = ccoords[~idx]\n", + " xax = rcoords[~idx]\n", + " grid[i].scatter(avals[~idx], xax, 1, color=\"grey\", alpha=0.2)\n", + "\n", + " yax = ccoords[idx]\n", + " xax = rcoords[idx]\n", + " pvals = avals[idx]\n", + "\n", + " unique_coords = np.unique(xax)\n", + " mean_signal = np.asarray([np.mean(pvals[xax==jdx]) for jdx in unique_coords])\n", + "\n", + " cmap = matplotlib.cm.get_cmap('summer')\n", + " colors = cmap(yax/np.max(yax))\n", + " grid[i].scatter(pvals, xax, 1, color=colors)\n", + " grid[i].scatter(mean_signal, unique_coords, 3, color='tomato')\n", + " grid[i].set_xlabel(\"Signal value (ADU)\")\n", + " grid[i].set_ylabel(\"Row coordinate\")\n", + " grid[i].annotate(key, (0.1, 1.02), xycoords=\"axes fraction\", color=\"red\", annotation_clip=False)\n", + " grid[i].set_ylim(co[3], co[2])\n", + " l = grid[i].set_xlim(0, 1.25*cti_limit_high)\n", + " ai += 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true, + "scrolled": false + }, + "outputs": [], + "source": [ + "from functools import partial\n", + "from iminuit import Minuit\n", + "\n", + "\n", + "\n", + "max_points = -1\n", + "limit_cols = [0, 512]\n", + "\n", + "\n", + "for key, entry in dstats.items():\n", + " \n", + " rcoords = entry[\"row_coords\"][:max_points]\n", + " ccoords = entry[\"col_coords\"][:max_points]\n", + " avals = entry[\"vals\"][:max_points]\n", + " co = entry[\"coords\"]\n", + " idx = (avals > cti_limit_low) & (avals < cti_limit_high) & (ccoords >= limit_cols[0]) & (ccoords <= limit_cols[1])\n", + " xax = rcoords[idx]-co[2]\n", + " yax = ccoords[idx]\n", + " avalsf = avals[idx]\n", + " min_data = 5\n", + "\n", + " ms = np.zeros(sensorSize[0]//2, np.float)\n", + " bs = np.zeros(sensorSize[0]//2, np.float)\n", + " mErr = np.zeros(sensorSize[0]//2, np.float)\n", + " bErr = np.zeros(sensorSize[0]//2, np.float)\n", + " residuals = np.zeros((sensorSize[0]//2, sensorSize[0]//2), np.float)\n", + " \n", + " rtest = np.arange(sensorSize[0]//2)\n", + "\n", + " def linFunc(x, *p):\n", + " return p[1] * x + p[0]\n", + " \n", + " \n", + " \n", + " for i in range(sensorSize[0]//2):\n", + " idx = (xax == i)\n", + " zm = avalsf[idx]\n", + " zx = yax[idx]\n", + "\n", + " \n", + " def linWrap(m, b):\n", + " return np.sum(((linFunc(zx, b, -m) - zm)) ** 2)\n", + " pparm = dict(throw_nan=False, pedantic=False,\n", + " print_level=0)\n", + " pparm[\"m\"] = 0\n", + " pparm[\"limit_m\"] = (0.9, 1)\n", + " pparm[\"b\"] = np.nanmean(zm)\n", + "\n", + " mini = Minuit(linWrap, **pparm)\n", + " fmin = mini.migrad()\n", + "\n", + "\n", + " if fmin[0]['is_valid'] and fmin[0]['has_covariance'] and (\n", + " min_data is None or zx.size > min_data):\n", + "\n", + " ferr = mini.minos()\n", + " res = mini.values\n", + "\n", + " ms[i] = res[\"m\"]\n", + " bs[i] = res[\"b\"]\n", + "\n", + " mErr[i] = max(-ferr['m']['lower'], ferr['m']['upper'])\n", + " bErr[i] = max(-ferr['b']['lower'], ferr['b']['upper'])\n", + "\n", + " \n", + " zx, uidx = np.unique(zx, return_index=True)\n", + " zm = zm[uidx]\n", + " \n", + " yt = linFunc(zx, res[\"b\"], res[\"m\"])\n", + " \n", + " sidx = np.in1d(rtest, zx)\n", + " \n", + " entry['residuals'] = residuals\n", + " \n", + " ctiErr = np.sqrt((1. / bs * mErr) ** 2 + (ms / bs ** 2 * bErr) ** 2)\n", + " cti = ms / bs\n", + " \n", + " relGain = bs/np.nanmedian(bs)\n", + " entry['cti'] = cti\n", + " entry['cti_err'] = ctiErr\n", + " entry['rel_gain'] = relGain\n", + " \n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "fig = plt.figure(figsize=(15., 15.))\n", + "grid = AxesGrid(fig, 111, # similar to subplot(111)\n", + " nrows_ncols=(2, 2), # creates 2x2 grid of axes\n", + " axes_pad=0.3, # pad between axes in inch.\n", + " aspect=False,\n", + " )\n", + "\n", + "ais = [0, 2, 1, 3]\n", + "ai = 0\n", + "for key, entry in dstats.items():\n", + " i = ais[ai]\n", + " co = entry[\"coords\"]\n", + " cti = entry[\"cti\"]\n", + " x = np.arange(cti.size)+co[2]\n", + " grid[i].scatter(cti, x, 1., color='k')\n", + " grid[i].set_xlim(1e-4, 1e-3)\n", + " grid[i].semilogx()\n", + " grid[i].set_xlabel(\"CTI\")\n", + " grid[i].set_ylabel(\"Row coordinate\")\n", + " grid[i].annotate(key, (0.1, 1.02), xycoords=\"axes fraction\", color=\"red\", annotation_clip=False)\n", + " grid[i].set_ylim(co[3], co[2])\n", + " ai += 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "fig = plt.figure(figsize=(15., 15.))\n", + "grid = AxesGrid(fig, 111, # similar to subplot(111)\n", + " nrows_ncols=(2, 2), # creates 2x2 grid of axes\n", + " axes_pad=0.3, # pad between axes in inch.\n", + " aspect=False,\n", + " )\n", + "\n", + "ais = [0, 2, 1, 3]\n", + "ai = 0\n", + "for key, entry in dstats.items():\n", + " i = ais[ai]\n", + " co = entry[\"coords\"]\n", + " rgain = entry[\"rel_gain\"]\n", + " x = np.arange(rgain.size)+co[2]\n", + " grid[i].scatter(rgain, x, 1., color='k')\n", + " grid[i].set_xlim(0.5, 1.5)\n", + " grid[i].set_xlabel(\"Relative gain\")\n", + " grid[i].set_ylabel(\"Row coordinate\")\n", + " grid[i].annotate(key, (0.1, 1.02), xycoords=\"axes fraction\", color=\"red\", annotation_clip=False)\n", + " grid[i].set_ylim(co[3], co[2])\n", + " ai += 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "gainmap = np.zeros((sensorSize[0], sensorSize[1]))\n", + "for key, entry in dstats.items():\n", + "\n", + " co = entry[\"coords\"]\n", + " rel_gain = entry[\"rel_gain\"]\n", + " cti = entry[\"cti\"]\n", + " if entry[\"col_dir\"] == 1:\n", + " quadgain = rel_gain[:, None]*(1-cti[:, None])**np.repeat(np.arange(512)[None, :], 512, axis=0)\n", + " else:\n", + " quadgain = rel_gain[:, None]*(1-cti[:, None])**np.repeat(np.arange(512, 0, -1)[None, :], 512, axis=0)\n", + " gainmap[co[2]:co[3], co[0]:co[1]] = quadgain\n", + "\n", + "fig = xana.heatmapPlot(gainmap, figsize=(8,8), lut_label=\"Relative gain\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6" + }, + "latex_envs": { + "LaTeX_envs_menu_present": true, + "autocomplete": true, + "bibliofile": "biblio.bib", + "cite_by": "apalike", + "current_citInitial": 1, + "eqLabelWithNumbers": true, + "eqNumInitial": 1, + "hotkeys": { + "equation": "Ctrl-E", + "itemize": "Ctrl-I" + }, + "labels_anchors": false, + "latex_user_defs": false, + "report_style_numbering": false, + "user_envs_cfg": false + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb b/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..1af1d0ca1743f1a6c1d5fee347bcd2c30104712b --- /dev/null +++ b/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb @@ -0,0 +1,1176 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# pnCCD Data Correction ##\n", + "\n", + "Authors: DET Group, Version 1.0\n", + "\n", + "The following notebook provides correction of images acquired with the pnCCD." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.218849Z", + "start_time": "2018-12-06T15:54:23.166497Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw/\" # input folder, required\n", + "out_folder = '/gpfs/exfel/data/scratch/xcal/test/' # output folder, required\n", + "path_template = 'RAW-R{:04d}-PNCCD01-S{{:05d}}.h5' # the template to use to access data\n", + "path_template_seqs = \"{}/r{:04d}/*PNCCD01-S*.h5\"\n", + "run = 746 # which run to read data from, required\n", + "number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used\n", + "cluster_profile = \"noDB\" # ipcluster profile to use\n", + "sigma_noise = 10. # Pixel exceeding 'sigmaNoise' * noise value in that pixel will be masked\n", + "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/' # path in the HDF5 file the data is at\n", + "multi_iteration = False # use multiple iterations\n", + "use_dir_creation_date = True\n", + "fix_temperature = 233.\n", + "gain = 0\n", + "bias_voltage = 300\n", + "integration_time = 70\n", + "\n", + "split_evt_primary_threshold = 5. # primary threshold for split event classification in terms of n sigma noise\n", + "split_evt_secondary_threshold = 3. # secondary threshold for split event classification in terms of n sigma noise\n", + "split_evt_mip_threshold = 1000. # MIP threshold for event classification\n", + "cal_db_interface = \"tcp://max-exfl016:8015\" # calibration DB interface to use\n", + "cal_db_timeout = 300000000 # timeout on caldb requests\n", + "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n", + "chunk_size_idim = 1 # H5 chunking size of output data\n", + "overwrite = True\n", + "do_pattern_classification = True # classify split events\n", + "sequences_per_node = 1\n", + "limit_images = 0\n", + "time_offset_days = 0\n", + "photon_energy_gain_map = 2. # energy in keV\n", + "cpuCores = 8\n", + "\n", + "\n", + "def balance_sequences(in_folder, run, sequences, sequences_per_node, path_template_seqs):\n", + " import glob\n", + " import re\n", + " import numpy as np\n", + " if sequences[0] == -1:\n", + " sequence_files = glob.glob(path_template_seqs.format(in_folder, run))\n", + " seq_nums = set()\n", + " for sf in sequence_files:\n", + " seqnum = re.findall(r\".*-S([0-9]*).h5\", sf)[0]\n", + " seq_nums.add(int(seqnum))\n", + " seq_nums -= set(sequences)\n", + " nsplits = len(seq_nums)//sequences_per_node+1\n", + " while nsplits > 8:\n", + " sequences_per_node += 1\n", + " nsplits = len(seq_nums)//sequences_per_node+1\n", + " print(\"Changed to {} sequences per node to have a maximum of 8 concurrent jobs\".format(sequences_per_node))\n", + " return [l.tolist() for l in np.array_split(list(seq_nums), nsplits)]\n", + " else:\n", + " return sequences" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.455376Z", + "start_time": "2018-12-06T15:54:23.413579Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "import XFELDetAna.xfelprofiler as xprof\n", + "\n", + "profiler = xprof.Profiler()\n", + "profiler.disable()\n", + "from XFELDetAna.util import env\n", + "env.iprofile = cluster_profile\n", + "\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "from XFELDetAna import xfelpycaltools as xcal\n", + "from XFELDetAna import xfelpyanatools as xana\n", + "from XFELDetAna.plotting.util import prettyPlotting\n", + "prettyPlotting=True\n", + "from XFELDetAna.xfelreaders import ChunkReader\n", + "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n", + "\n", + "import numpy as np\n", + "import h5py\n", + "import matplotlib.pyplot as plt\n", + "from iminuit import Minuit\n", + "\n", + "import time\n", + "import copy\n", + "import os\n", + "\n", + "from prettytable import PrettyTable\n", + "\n", + "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", + "from iCalibrationDB.detectors import DetectorTypes\n", + "from cal_tools.tools import get_dir_creation_date\n", + "\n", + "from datetime import timedelta\n", + "\n", + "%matplotlib inline\n", + "\n", + "if sequences[0] == -1:\n", + " sequences = None\n", + "\n", + "\n", + "if \"#\" in cal_db_interface:\n", + " prot, serv, ran = cal_db_interface.split(\":\")\n", + " r1, r2 = ran.split(\"#\")\n", + " cal_db_interface = \":\".join(\n", + " [prot, serv, str(np.random.randint(int(r1), int(r2)))])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.679069Z", + "start_time": "2018-12-06T15:54:23.662821Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "\n", + "x = 1024 # rows of the FastCCD to analyze in FS mode \n", + "y = 1024 # columns of the FastCCD to analyze in FS mode \n", + "\n", + " \n", + "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n", + "fp_name = path_template.format(run)\n", + "\n", + "import datetime\n", + "creation_time = None\n", + "if use_dir_creation_date:\n", + " creation_time = get_dir_creation_date(in_folder, run)\n", + "\n", + "fp_path = '{}/{}'.format(ped_dir, fp_name)\n", + "\n", + "print(\"Reading data from: {}\\n\".format(fp_path))\n", + "print(\"Run is: {}\".format(run))\n", + "print(\"HDF5 path: {}\".format(h5path))\n", + "if creation_time:\n", + " print(\"Using {} as creation time\".format(creation_time.isoformat()))\n", + " \n", + "out_folder = \"{}/r{:04d}\".format(out_folder, run)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:23.913269Z", + "start_time": "2018-12-06T15:54:23.868910Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "\n", + "sensorSize = [x, y]\n", + "chunkSize = 100 #Number of images to read per chunk\n", + "blockSize = [sensorSize[0]//2, sensorSize[1]//4] #Sensor area will be analysed according to blocksize\n", + "xcal.defaultBlockSize = blockSize\n", + "memoryCells = 1 #FastCCD has 1 memory cell\n", + "#Specifies total number of images to proceed\n", + "\n", + "commonModeBlockSize = [512, 512]\n", + "commonModeAxisR = 'row'#Axis along which common mode will be calculated\n", + "run_parallel = True\n", + "profile = False\n", + " \n", + "\n", + "if not os.path.exists(out_folder):\n", + " os.makedirs(out_folder)\n", + "elif not overwrite:\n", + " raise AttributeError(\"Output path exists! Exiting\") \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:24.088948Z", + "start_time": "2018-12-06T15:54:24.059925Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "dirlist = sorted(os.listdir(ped_dir))\n", + "file_list = []\n", + "total_sequences = 0\n", + "fsequences = []\n", + "for entry in dirlist:\n", + "\n", + " #only h5 file\n", + " abs_entry = \"{}/{}\".format(ped_dir, entry)\n", + " if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == \".h5\":\n", + " \n", + " if sequences is None:\n", + " for seq in range(len(dirlist)):\n", + " \n", + " if path_template.format(run).format(seq) in abs_entry:\n", + " file_list.append(abs_entry)\n", + " total_sequences += 1\n", + " fsequences.append(seq)\n", + " else:\n", + " for seq in sequences:\n", + " \n", + " if path_template.format(run).format(seq) in abs_entry:\n", + " file_list.append(os.path.abspath(abs_entry))\n", + " total_sequences += 1\n", + " fsequences.append(seq)\n", + "sequences = fsequences" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T18:43:39.776018Z", + "start_time": "2018-12-06T18:43:39.759185Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "import copy\n", + "from IPython.display import HTML, display, Markdown, Latex\n", + "import tabulate\n", + "print(\"Processing a total of {} sequence files\".format(total_sequences))\n", + "table = []\n", + "\n", + "\n", + "for k, f in enumerate(file_list):\n", + " table.append((k, f))\n", + "if len(table): \n", + " md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"#\", \"file\"]))) " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As a first step, dark maps have to be loaded." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:28.254544Z", + "start_time": "2018-12-06T15:54:24.709521Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "offsetMap = None\n", + "offset_temperature = None\n", + "badPixelMap = None\n", + "noiseMap = None\n", + "\n", + "## offset\n", + "metadata = ConstantMetaData()\n", + "offset = Constants.CCD(DetectorTypes.pnCCD).Offset()\n", + "metadata.calibration_constant = offset\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=fix_temperature,\n", + " pixels_x=1024,\n", + " pixels_y=1024)\n", + "\n", + "\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "\n", + "\n", + "# specify the a version for this constant\n", + "if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + " metadata.retrieve(cal_db_interface)\n", + "else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", + " start=creation_time)\n", + " metadata.retrieve(cal_db_interface, when=creation_time.isoformat(), timeout=3000000)\n", + "\n", + "\n", + "if offsetMap is None:\n", + " offsetMap = np.zeros(list(offset.data.shape)+[3], np.float32)\n", + "offsetMap = offset.data\n", + "\n", + "offset_temperature = None\n", + "for parm in condition.parameters:\n", + "\n", + " if parm.name == \"Sensor Temperature\":\n", + " offset_temperature = parm.value\n", + "\n", + "print(\"Temperature dark images for offset calculation \" +\n", + " \"were taken at: {:0.2f} K @ {}\".format(offset_temperature,\n", + " metadata.calibration_constant_version.begin_at))\n", + "\n", + "## noise\n", + "metadata = ConstantMetaData()\n", + "noise = Constants.CCD(DetectorTypes.pnCCD).Noise()\n", + "metadata.calibration_constant = noise\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=fix_temperature,\n", + " pixels_x=1024,\n", + " pixels_y=1024)\n", + "\n", + "\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "# specify the a version for this constant\n", + "if creation_time is None:\n", + " metadata.calibration_constant_version = Versions.Now(device=device)\n", + " metadata.retrieve(cal_db_interface)\n", + "else:\n", + " metadata.calibration_constant_version = Versions.Timespan(device=device,\n", + " start=creation_time)\n", + " metadata.retrieve(cal_db_interface, when=creation_time.isoformat(), timeout=3000000)\n", + "\n", + "noiseMap = noise.data\n", + "\n", + "## bad pixels \n", + "\n", + "metadata = ConstantMetaData()\n", + "bpix = Constants.CCD(DetectorTypes.pnCCD).BadPixelsDark()\n", + "metadata.calibration_constant = bpix\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=fix_temperature,\n", + " pixels_x=1024,\n", + " pixels_y=1024)\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "# specify the a version for this constant\n", + "metadata.calibration_constant_version = Versions.Now(device=device)\n", + "metadata.retrieve(cal_db_interface, timeout=cal_db_timeout)\n", + "\n", + " \n", + "badPixelMap = bpix.data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Loading cti and relative gain values" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:28.343869Z", + "start_time": "2018-12-06T15:54:28.271344Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "## relative gain\n", + "\n", + "metadata = ConstantMetaData()\n", + "relgain = Constants.CCD(DetectorTypes.pnCCD).RelativeGain()\n", + "metadata.calibration_constant = relgain\n", + "\n", + "# set the operating condition\n", + "condition = Conditions.Illuminated.CCD(bias_voltage=bias_voltage,\n", + " integration_time=integration_time,\n", + " gain_setting=gain,\n", + " temperature=fix_temperature,\n", + " pixels_x=1024,\n", + " pixels_y=1024, photon_energy=photon_energy_gain_map)\n", + "device = Detectors.PnCCD1\n", + "\n", + "\n", + "metadata.detector_condition = condition\n", + "\n", + "# specify the a version for this constant\n", + "metadata.calibration_constant_version = Versions.Now(device=device)\n", + "#metadata.retrieve(cal_db_interface)\n", + "\n", + "relGain = np.ones_like(offsetMap) #relgain.data\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T15:54:28.771629Z", + "start_time": "2018-12-06T15:54:28.346051Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "#************************Calculators************************#\n", + "\n", + "\n", + "cmCorrection = xcal.CommonModeCorrection([x, y], \n", + " commonModeBlockSize, \n", + " commonModeAxisR,\n", + " nCells = memoryCells, \n", + " noiseMap = noiseMap,\n", + " runParallel=True,\n", + " stats=True)\n", + "#cmCorrection.debug()\n", + "\n", + "patternClassifierLH = xcal.PatternClassifier([x, y//2], \n", + " noiseMap[:,:y//2], \n", + " split_evt_primary_threshold, \n", + " split_evt_secondary_threshold,\n", + " split_evt_mip_threshold,\n", + " tagFirstSingles = 3, \n", + " nCells=memoryCells, \n", + " cores=cpuCores, \n", + " allowElongated = False,\n", + " blockSize=[x, y//2],\n", + " runParallel=True)\n", + "\n", + "\n", + "\n", + "patternClassifierUH = xcal.PatternClassifier([x, y//2], \n", + " noiseMap[:, y//2:], \n", + " split_evt_primary_threshold, \n", + " split_evt_secondary_threshold,\n", + " split_evt_mip_threshold,\n", + " tagFirstSingles = 4, \n", + " nCells=memoryCells, \n", + " cores=cpuCores, \n", + " allowElongated = False,\n", + " blockSize=[x, y//2],\n", + " runParallel=True)\n", + "\n", + " \n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:08:51.886343Z", + "start_time": "2018-12-06T16:08:51.842837Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "#*****************Histogram Calculators******************#\n", + "\n", + "histCalOffsetCor = xcal.HistogramCalculator([x, y], \n", + " bins=500, \n", + " range=[-50, 1000],\n", + " nCells=memoryCells, \n", + " cores=cpuCores,\n", + " blockSize=blockSize)\n", + "\n", + "histCalPcorr = xcal.HistogramCalculator([x, y], \n", + " bins=500, \n", + " range=[-50, 1000],\n", + " nCells=memoryCells, \n", + " cores=cpuCores,\n", + " blockSize=blockSize)\n", + "\n", + "histCalPcorrS = xcal.HistogramCalculator([x, y], \n", + " bins=500, \n", + " range=[-50, 1000],\n", + " nCells=memoryCells, \n", + " cores=cpuCores,\n", + " blockSize=blockSize)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Applying corrections" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:08:52.441784Z", + "start_time": "2018-12-06T16:08:52.437284Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "patternClassifierLH._imagesPerChunk = 500\n", + "patternClassifierUH._imagesPerChunk = 500\n", + "#patternClassifierLH.debug()\n", + "#patternClassifierUH.debug()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:08:53.042555Z", + "start_time": "2018-12-06T16:08:53.034522Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "histCalOffsetCor.debug()\n", + "histCalPcorr.debug()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:08:53.551111Z", + "start_time": "2018-12-06T16:08:53.531064Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "def copy_and_sanitize_non_cal_data(infile, outfile, h5base):\n", + " \n", + " if h5base.startswith(\"/\"):\n", + " h5base = h5base[1:]\n", + " dont_copy = ['image']\n", + " dont_copy = [h5base+\"/{}\".format(do)\n", + " for do in dont_copy]\n", + "\n", + " def visitor(k, item):\n", + " if k not in dont_copy:\n", + " if isinstance(item, h5py.Group):\n", + " outfile.create_group(k)\n", + " elif isinstance(item, h5py.Dataset):\n", + " group = str(k).split(\"/\")\n", + " group = \"/\".join(group[:-1])\n", + " infile.copy(k, outfile[group])\n", + " \n", + " infile.visititems(visitor)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:55.917179Z", + "start_time": "2018-12-06T16:09:01.603633Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "mean_im = None\n", + "single_im = None\n", + "mean_im_cc = None\n", + "single_im_cc = None\n", + "drift_lh = []\n", + "drift_uh = []\n", + "offsetMap = np.squeeze(offsetMap)\n", + "noiseMap = np.squeeze(noiseMap)\n", + "badPixelMap = np.squeeze(badPixelMap)\n", + "relGain = np.squeeze(relGain)\n", + "for k, f in enumerate(file_list):\n", + " with h5py.File(f, 'r', driver='core') as infile:\n", + " out_fileb = \"{}/{}\".format(out_folder, f.split(\"/\")[-1])\n", + " out_file = out_fileb.replace(\"RAW\", \"CORR\")\n", + " #out_filed = out_fileb.replace(\"RAW\", \"CORR-SC\")\n", + "\n", + " data = None\n", + " noise = None\n", + " try:\n", + " with h5py.File(out_file, \"w\") as ofile:\n", + " \n", + " copy_and_sanitize_non_cal_data(infile, ofile, h5path)\n", + " data = infile[h5path+\"/image\"][()]\n", + " nzidx = np.count_nonzero(data, axis=(1,2))\n", + " data = data[nzidx != 0, ...]\n", + " if limit_images > 0:\n", + " data = data[:limit_images,...]\n", + " oshape = data.shape\n", + " data = np.moveaxis(data, 0, 2)\n", + " ddset = ofile.create_dataset(h5path+\"/pixels\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.float32)\n", + " \n", + " ddsetm = ofile.create_dataset(h5path+\"/mask\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.uint32, compression=\"gzip\")\n", + " \n", + " \n", + " \n", + " data = data.astype(np.float32) \n", + " offset = np.repeat(offsetMap[...,None], data.shape[2], axis=2)\n", + " rg = np.repeat(relGain[:,:,None], data.shape[2], axis=2)\n", + " noise = np.repeat(noiseMap[...,None], data.shape[2], axis=2)\n", + " bpix = np.repeat(badPixelMap[...,None], data.shape[2], axis=2)\n", + " \n", + " data -= offset\n", + " data *= rg\n", + " \n", + " \n", + " histCalOffsetCor.fill(data)\n", + "\n", + " \n", + " ddset[...] = np.moveaxis(data, 2, 0)\n", + " ddsetm[...] = np.moveaxis(bpix, 2, 0)\n", + " ofile.flush()\n", + " \n", + " if mean_im is None:\n", + " mean_im = np.nanmean(data, axis=2)\n", + " single_im = data[...,0]\n", + " \n", + " if do_pattern_classification:\n", + " #with h5py.File(out_file, \"a\") as ofiled:\n", + " #copy_and_sanitize_non_cal_data(infile, ofiled, h5path)\n", + "\n", + " ddsetcm = ofile.create_dataset(h5path+\"/pixels_cm\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.float32)\n", + "\n", + " ddsetc = ofile.create_dataset(h5path+\"/pixels_classified\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.float32, compression=\"gzip\")\n", + "\n", + " ddsetp = ofile.create_dataset(h5path+\"/patterns\",\n", + " oshape,\n", + " chunks=(chunk_size_idim, oshape[1], oshape[2]),\n", + " dtype=np.int32, compression=\"gzip\")\n", + " \n", + "\n", + " patternClassifierLH._noisemap = noise[:, :x//2, :]\n", + " patternClassifierUH._noisemap = noise[:, x//2:, :]\n", + " data = cmCorrection.correct(data)\n", + " ddsetcm[...] = np.moveaxis(data, 2, 0)\n", + "\n", + " dataLH = data[:, :x//2, :]\n", + " dataUH = data[:, x//2:, :]\n", + "\n", + " dataLH, patternsLH = patternClassifierLH.classify(dataLH)\n", + " dataUH, patternsUH = patternClassifierUH.classify(dataUH)\n", + "\n", + " data[:, :x//2, :] = dataLH\n", + " data[:, x//2:, :] = dataUH\n", + "\n", + " patterns = np.zeros(data.shape, patternsLH.dtype)\n", + " patterns[:, :x//2, :] = patternsLH\n", + " patterns[:, x//2:, :] = patternsUH\n", + "\n", + " data[data < split_evt_primary_threshold*noise] = 0\n", + " ddsetc[...] = np.moveaxis(data, 2, 0)\n", + " ddsetp[...] = np.moveaxis(patterns, 2, 0)\n", + "\n", + " histCalPcorr.fill(data)\n", + " data[patterns != 100] = np.nan\n", + " histCalPcorrS.fill(data)\n", + "\n", + " if mean_im_cc is None:\n", + " mean_im_cc = np.nanmean(data, axis=2)\n", + " single_im_cc = data[...,0]\n", + " \n", + " except Exception as e:\n", + " print(\"Couldn't calibrate data in {}: {}\".format(f, e))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Common mode statistics ##\n", + "\n", + "Common mode statistics of at most the first 500 frames. Frames increase upwards on the y-axis, columns in the image are on the x-axis. The four quadrants are shown individually-" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "cmStats = cmCorrection.get()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "scrolled": false + }, + "outputs": [], + "source": [ + "for i in range(0, cmStats.shape[0]):\n", + " cmStatConcat = cmStats[i, :, :500]\n", + " fig = xana.heatmapPlot(cmStatConcat.T, figsize=(8,14), lut_label=\"Common mode (ADU)\",\n", + " y_label = \"frame\",\n", + " x_label = \"column\",\n", + " vmin=-5000, vmax=5000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:56.126409Z", + "start_time": "2018-12-06T16:10:56.096242Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "if do_pattern_classification:\n", + " print(\"******************LOWER HEMISPHERE******************\\n\")\n", + "\n", + " patternStatsLH = patternClassifierLH.getPatternStats()\n", + " fig = plt.figure(figsize=(15,15))\n", + " ax = fig.add_subplot(4,4,1)\n", + " sfields = [\"singles\", \"first singles\", \"clusters\"]\n", + " mfields = [\"doubles\", \"triples\", \"quads\"]\n", + " relativeOccurances = []\n", + " labels = []\n", + " for i, f in enumerate(sfields):\n", + " relativeOccurances.append(patternStatsLH[f])\n", + " labels.append(f)\n", + " for i, f in enumerate(mfields):\n", + " for k in range(len(patternStatsLH[f])):\n", + " relativeOccurances.append(patternStatsLH[f][k])\n", + " labels.append(f+\"(\"+str(k)+\")\")\n", + " relativeOccurances = np.array(relativeOccurances, np.float)\n", + " relativeOccurances/=np.sum(relativeOccurances)\n", + " pie = ax.pie(relativeOccurances, labels=labels, autopct='%1.1f%%', shadow=True)\n", + " ax.set_title(\"Pattern occurrence\")\n", + " # Set aspect ratio to be equal so that pie is drawn as a circle.\n", + " a = ax.axis('equal')\n", + "\n", + " smaps = [\"singlemap\", \"firstsinglemap\", \"clustermap\"]\n", + " for i, m in enumerate(smaps):\n", + "\n", + " ax = fig.add_subplot(4,4,2+i)\n", + "\n", + " pmap = ax.imshow(patternStatsLH[m], interpolation=\"nearest\", vmax=2*np.nanmedian(patternStatsLH[m]))\n", + " ax.set_title(m)\n", + " cb = fig.colorbar(pmap)\n", + "\n", + " mmaps = [\"doublemap\", \"triplemap\", \"quadmap\"]\n", + " k = 0\n", + " for i, m in enumerate(mmaps):\n", + "\n", + " for j in range(4):\n", + " ax = fig.add_subplot(4,4,2+len(smaps)+k)\n", + " pmap = ax.imshow(patternStatsLH[m][j], interpolation=\"nearest\", vmax=2*np.median(patternStatsLH[m][j]))\n", + " ax.set_title(m+\"(\"+str(j)+\")\")\n", + " cb = fig.colorbar(pmap)\n", + " k+=1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:56.176160Z", + "start_time": "2018-12-06T16:10:56.127853Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "if do_pattern_classification:\n", + " patternStatsUH = patternClassifierUH.getPatternStats()\n", + " fig = plt.figure(figsize=(15,15))\n", + " ax = fig.add_subplot(4,4,1)\n", + " sfields = [\"singles\", \"first singles\", \"clusters\"]\n", + " mfields = [\"doubles\", \"triples\", \"quads\"]\n", + " relativeOccurances = []\n", + " labels = []\n", + " for i, f in enumerate(sfields):\n", + " relativeOccurances.append(patternStatsUH[f])\n", + " labels.append(f)\n", + " for i, f in enumerate(mfields):\n", + " for k in range(len(patternStatsUH[f])):\n", + " relativeOccurances.append(patternStatsUH[f][k])\n", + " labels.append(f+\"(\"+str(k)+\")\")\n", + " relativeOccurances = np.array(relativeOccurances, np.float)\n", + " relativeOccurances/=np.sum(relativeOccurances)\n", + " pie = ax.pie(relativeOccurances, labels=labels, autopct='%1.1f%%', shadow=True)\n", + " ax.set_title(\"Pattern occurrence\")\n", + " # Set aspect ratio to be equal so that pie is drawn as a circle.\n", + " a = ax.axis('equal')\n", + "\n", + " smaps = [\"singlemap\", \"firstsinglemap\", \"clustermap\"]\n", + " for i, m in enumerate(smaps):\n", + "\n", + " ax = fig.add_subplot(4,4,2+i)\n", + "\n", + " pmap = ax.imshow(patternStatsUH[m], interpolation=\"nearest\", vmax=2*np.nanmedian(patternStatsUH[m]))\n", + " ax.set_title(m)\n", + " cb = fig.colorbar(pmap)\n", + "\n", + " mmaps = [\"doublemap\", \"triplemap\", \"quadmap\"]\n", + " k = 0\n", + " for i, m in enumerate(mmaps):\n", + "\n", + " for j in range(4):\n", + " ax = fig.add_subplot(4,4,2+len(smaps)+k)\n", + " pmap = ax.imshow(patternStatsUH[m][j], interpolation=\"nearest\", vmax=np.median(patternStatsUH[m][j]))\n", + " ax.set_title(m+\"(\"+str(j)+\")\")\n", + " cb = fig.colorbar(pmap)\n", + " k+=1\n", + "\n", + " print(\"******************UPPER HEMISPHERE******************\\n\") " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:56.190150Z", + "start_time": "2018-12-06T16:10:56.177570Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "if do_pattern_classification:\n", + " t0 = PrettyTable()\n", + " t0.title = \"Total number of Counts after all corrections\"\n", + " t0.field_names = [\"Hemisphere\",\"Singles\", \"First-singles\", \"Clusters\"]\n", + " t0.add_row([\"LH\", patternStatsLH['singles'], patternStatsLH['first singles'], patternStatsLH['clusters']])\n", + " t0.add_row([\"UH\", patternStatsUH['singles'], patternStatsUH['first singles'], patternStatsUH['clusters']])\n", + "\n", + " print(t0)\n", + "\n", + " t1 = PrettyTable()\n", + "\n", + " t1.field_names = [\"Index\",\"D-LH\", \"D-UH\", \"T-LH\", \"T-UH\", \"Q-LH\", \"Q-UH\"]\n", + "\n", + " t1.add_row([0, patternStatsLH['doubles'][0], patternStatsUH['doubles'][0], patternStatsLH['triples'][0], patternStatsUH['triples'][0], patternStatsLH['quads'][0], patternStatsUH['quads'][0]])\n", + " t1.add_row([1, patternStatsLH['doubles'][1], patternStatsUH['doubles'][1], patternStatsLH['triples'][1], patternStatsUH['triples'][1], patternStatsLH['quads'][1], patternStatsUH['quads'][1]])\n", + " t1.add_row([2, patternStatsLH['doubles'][2], patternStatsUH['doubles'][2], patternStatsLH['triples'][2], patternStatsUH['triples'][2], patternStatsLH['quads'][2], patternStatsUH['quads'][2]])\n", + " t1.add_row([3, patternStatsLH['doubles'][3], patternStatsUH['doubles'][3], patternStatsLH['triples'][3], patternStatsUH['triples'][3], patternStatsLH['quads'][3], patternStatsUH['quads'][3]])\n", + "\n", + " print(t1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:56.203219Z", + "start_time": "2018-12-06T16:10:56.191509Z" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "if do_pattern_classification:\n", + " doublesLH = patternStatsLH['doubles'][0] + patternStatsLH['doubles'][1] + patternStatsLH['doubles'][2] + patternStatsLH['doubles'][3]\n", + " triplesLH = patternStatsLH['triples'][0] + patternStatsLH['triples'][1] + patternStatsLH['triples'][2] + patternStatsLH['triples'][3]\n", + " quadsLH = patternStatsLH['quads'][0] + patternStatsLH['quads'][1] + patternStatsLH['quads'][2] + patternStatsLH['quads'][3]\n", + " allsinglesLH = patternStatsLH['singles'] + patternStatsLH['first singles']\n", + " eventsLH = allsinglesLH + doublesLH + triplesLH + quadsLH\n", + "\n", + " doublesUH = patternStatsUH['doubles'][0] + patternStatsUH['doubles'][1] + patternStatsUH['doubles'][2] + patternStatsUH['doubles'][3]\n", + " triplesUH = patternStatsUH['triples'][0] + patternStatsUH['triples'][1] + patternStatsUH['triples'][2] + patternStatsUH['triples'][3]\n", + " quadsUH = patternStatsUH['quads'][0] + patternStatsUH['quads'][1] + patternStatsUH['quads'][2] + patternStatsUH['quads'][3]\n", + " allsinglesUH = patternStatsUH['singles'] + patternStatsUH['first singles']\n", + " eventsUH = allsinglesUH + doublesUH + triplesUH + quadsUH\n", + "\n", + " if eventsLH > 0.:\n", + " reloccurLH = np.array([allsinglesLH/eventsLH, doublesLH/eventsLH, triplesLH/eventsLH, quadsLH/eventsLH])\n", + " else:\n", + " reloccurLH = np.array([0]*4)\n", + " if eventsUH > 0.:\n", + " reloccurUH = np.array([allsinglesUH/eventsUH, doublesUH/eventsUH, triplesUH/eventsUH, quadsUH/eventsUH])\n", + " else:\n", + " reloccurUH = np.array([0]*4)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:10:56.212586Z", + "start_time": "2018-12-06T16:10:56.204731Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "if do_pattern_classification:\n", + " fig = plt.figure(figsize=(10,5))\n", + " ax = fig.add_subplot(1,2,1)\n", + " labels = ['singles', 'doubles', 'triples', 'quads']\n", + " pie = ax.pie(reloccurLH, labels=labels, autopct='%1.1f%%', shadow=True)\n", + " ax.set_title(\"Pattern occurrence LH\")\n", + " # Set aspect ratio to be equal so that pie is drawn as a circle.\n", + " a = ax.axis('equal')\n", + " ax = fig.add_subplot(1,2,2)\n", + " pie = ax.pie(reloccurUH, labels=labels, autopct='%1.1f%%', shadow=True)\n", + " ax.set_title(\"Pattern occurrence UH\")\n", + " # Set aspect ratio to be equal so that pie is drawn as a circle.\n", + " a = ax.axis('equal')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Mean Image of first Sequence ##" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:11:08.317130Z", + "start_time": "2018-12-06T16:11:05.788655Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "fig = xana.heatmapPlot(mean_im,\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Signal (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmin=-50, vmax=70000)\n", + "\n", + "if do_pattern_classification:\n", + " fig = xana.heatmapPlot(mean_im_cc,\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Signal (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmin=-50, vmax=70000)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "## Single Shot of first Sequnce ##" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "ExecuteTime": { + "end_time": "2018-12-06T16:11:10.908912Z", + "start_time": "2018-12-06T16:11:08.318486Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "fig = xana.heatmapPlot(single_im,\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Signal (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmin=-50, vmax=70000)\n", + "\n", + "if do_pattern_classification:\n", + " fig = xana.heatmapPlot(single_im_cc,\n", + " x_label='Columns', y_label='Rows',\n", + " lut_label='Signal (ADU)',\n", + " x_range=(0,y),\n", + " y_range=(0,x), vmin=-50, vmax=70000)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true, + "scrolled": false + }, + "outputs": [], + "source": [ + "single = None\n", + "doubles = []\n", + "triples = []\n", + "quads = []\n", + "import copy\n", + "with h5py.File(\"{}/CORR-R{:04d}-PNCCD01-S{:05d}.h5\".format(out_folder, run, sequences[0]), 'r', driver='core') as infile:\n", + " data = infile[h5path+\"/pixels_classified\"][()].astype(np.float)\n", + " patterns = infile[h5path+\"/patterns\"][()].astype(np.float)\n", + "\n", + " i = 4\n", + " single = copy.copy(data[...])\n", + " single[patterns != 100] = np.nan\n", + " for d in range(200,204):\n", + " double = copy.copy(data[...])\n", + " double[patterns != d] = np.nan\n", + " doubles.append(double)\n", + " for d in range(300,304):\n", + " triple = copy.copy(data[...])\n", + " triple[patterns != d] = np.nan\n", + " triples.append(triple)\n", + " \n", + " for d in range(400,404):\n", + " quad = copy.copy(data[...])\n", + " quad[patterns != d] = np.nan\n", + " quads.append(quad)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "scrolled": false + }, + "outputs": [], + "source": [ + "hA = 0\n", + "h, e = np.histogram(single.flatten(), bins=1000, range=(-500, 50000))\n", + "fig = plt.figure(figsize=(10,7))\n", + "ax = fig.add_subplot(111)\n", + "ax.step(e[:-1], h, color='blue', label='single pixel events')\n", + "ax.semilogy()\n", + "ax.set_xlabel(\"Energy (ADU)\")\n", + "ax.set_ylabel(\"Events\")\n", + "hA += h\n", + "h = 0\n", + "for double in doubles:\n", + " hd, e = np.histogram(double.flatten(), bins=1000, range=(-500, 50000))\n", + " h += hd\n", + " hA += hd\n", + " \n", + "ax.step(e[:-1], h, color='red', label='double split events')\n", + "\n", + "h = 0\n", + "for triple in triples:\n", + " hd, e = np.histogram(triple.flatten(), bins=1000, range=(-500, 50000))\n", + " h += hd\n", + " hA += hd\n", + " \n", + "ax.step(e[:-1], h, color='green', label='triple split events')\n", + "\n", + "h = 0\n", + "for quad in quads:\n", + " hd, e = np.histogram(quad.flatten(), bins=1000, range=(-500, 50000))\n", + " h += hd\n", + " hA += hd\n", + " \n", + "ax.step(e[:-1], h, color='purple', label='quadruple split events')\n", + "ax.step(e[:-1], hA, color='grey', label='all events')\n", + "\n", + "l = ax.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6" + }, + "latex_envs": { + "LaTeX_envs_menu_present": true, + "autocomplete": true, + "bibliofile": "biblio.bib", + "cite_by": "apalike", + "current_citInitial": 1, + "eqLabelWithNumbers": true, + "eqNumInitial": 1, + "hotkeys": { + "equation": "Ctrl-E", + "itemize": "Ctrl-I" + }, + "labels_anchors": false, + "latex_user_defs": false, + "report_style_numbering": false, + "user_envs_cfg": false + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/notebooks/pnCCD/pnCCDChar.ipynb b/notebooks/pnCCD/pnCCDChar.ipynb deleted file mode 100644 index f39d07f678d0edfbf175da1b542afd0a53db8b28..0000000000000000000000000000000000000000 --- a/notebooks/pnCCD/pnCCDChar.ipynb +++ /dev/null @@ -1,1675 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\"\"\"\n", - "First start an ipython cluster with\n", - "\n", - "ipcluster start -n NUM_CORES\n", - "\"\"\"\n", - "\n", - "%matplotlib inline\n", - "\n", - "\n", - "#profiler has to be the first import\n", - "\n", - "\n", - "\n", - "\n", - "import XFELDetAna.xfelprofiler as xprof\n", - "\n", - "profiler = xprof.Profiler()\n", - "profiler.disable()\n", - "\n", - "\n", - "#profiler.enable('OffsetCorrection')\n", - "\n", - "\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "import XFELDetAna.xfelpyanatools as xana\n", - "import XFELDetAna.xfelpycaltools as xcal\n", - "\n", - "xcal.enableGPU=False\n", - "import reader as reader" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "filename = \"/gpfs/exfel/data/scratch/ruetert/esrf18/c12_id13_180425_003_000.darkcal\"\n", - "\n", - "\n", - "# the sensor-size which corresponds to your data in the file, for the data generator this specifies the image size\n", - "sensorSize = [128, 200] \n", - "blockSize = [32,8]\n", - "xcal.defaultBlockSize = blockSize\n", - "\n", - "\n", - "# the number of images to read per chunk, for the data generator this specifies the number of images to generate on \n", - "# each call\n", - "chunkSize = 100\n", - "\n", - "cpuCores = 8\n", - "memoryCells = 1\n", - "\n", - "nImages = pnccd.getDataSize(filename, '/frames')[2]\n", - "#nImages = pnccd.getDataSize(filename, '/dark/images/image')[2]\n", - "#nImages = pnccd.getDataSize(filename, '/dark/images/image')[0]\n", - "sigmaNoise = 5.\n", - "commonModeBlockSize = [32,50]\n", - "commonModeAxis = 'row'\n", - "profile = False" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#pnccd.getDataSize(filename, '/dark/images/image')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reader = xcal.ChunkReader(filename, pnccd.readData, nImages, chunkSize, path = \"/frames\", pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1], simulated=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "offsetCal = xcal.OffsetCalculator(sensorSize, memoryCells, cores=cpuCores)\n", - "noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, parallel=False)\n", - "histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "# we can use the noise calculator to also retreive the offset\n", - "for data in reader.readChunks():\n", - " print(data.shape)\n", - " noiseCal.fill(data)\n", - " histCalRaw.fill(data)\n", - " \n", - "#get noise and offset maps\n", - "offsetMap = noiseCal.getOffset()\n", - "noiseMap = noiseCal.get()\n", - "noiseCal.reset()\n", - "\n", - "#produce thresholds out of this\n", - "thresholds = xcal.thresholdsFromNoiseAndOffset(offsetMap, noiseMap, sigmaNoise)\n", - "thresholdsNoise = xcal.thresholdsFromNoiseAndOffset(np.zeros_like(offsetMap), noiseMap, sigmaNoise)\n", - "\n", - "offsetCal.setThresholds(thresholds = thresholds)\n", - "noiseCal.setThresholds(thresholds = thresholdsNoise)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "rawHist = histCalRaw.get()\n", - "\n", - "fig = plt.figure(figsize=(8,8))\n", - "ax = fig.add_subplot(111)\n", - "xana.histPlot(ax, rawHist, plot_errors=True, alpha=0.3)\n", - "t = ax.set_title(\"xfelpyanatools.plotHist\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "r = ax.set_xlim(2000,3700)\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "offsetCorrection = xcal.OffsetCorrection(sensorSize, offsetMap, nCells = memoryCells, runOnGPU=False, gpuZDim=4, keepOnGPU=False)\n", - "cmCorrection = xcal.CommonModeCorrection(sensorSize, commonModeBlockSize, commonModeAxis, nCells = memoryCells, stats=True, runOnGPU=False, noiseMap = noiseMap, gpuZDim=2, gpuStripeSize=2)\n", - "#cmCorrection.setThresholds(thresholds = thresholdsNoise)\n", - "\n", - "histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-100, 300], memoryCells=memoryCells, cores=cpuCores)\n", - "histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=400, range=[-100, 300], memoryCells=memoryCells, cores=cpuCores)\n", - "\n", - "\n", - "#statistics can be calculated either individually\n", - "meanCal = xcal.MeanCalculator(sensorSize, memoryCells, cores=cpuCores)\n", - "minCal = xcal.MinCalculator(sensorSize, memoryCells, cores=cpuCores)\n", - "maxCal = xcal.MaxCalculator(sensorSize, memoryCells, cores=cpuCores)\n", - "stdCal = xcal.StdCalculator(sensorSize, memoryCells, cores=cpuCores)\n", - "noiseCal.reset()\n", - "#or in one calculator\n", - "statsCalCorrected = xcal.StatCalculator(sensorSize, memoryCells, cores=cpuCores, runOnGPU=True, keepOnGPU=True)\n", - "dp = None\n", - "for data in reader.readChunks():\n", - " \n", - " offsetCal.fill(data)\n", - " meanCal.fill(data)\n", - " minCal.fill(data)\n", - " maxCal.fill(data)\n", - " stdCal.fill(data)\n", - " data = offsetCorrection.correct(data)\n", - " \n", - " histCalCorrected.fill(data)\n", - " data = cmCorrection.correct(data)\n", - " dp = data\n", - " histCalCMCorrected.fill(data)\n", - " noiseCal.fill(data)\n", - " #statsCalCorrected.fill(data)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "noiseMap = noiseCal.get()\n", - "offsetMap = offsetCal.get()\n", - "meanRaw = meanCal.get()\n", - "minRaw = minCal.get()\n", - "maxRaw = maxCal.get()\n", - "stdRaw = stdCal.get()\n", - "cmStats = cmCorrection.get()\n", - "#corStats = statsCalCorrected.get()\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "fig = xana.heatmapPlot(noiseMap[:,:,0], lut_label=\"Noise (ADU)\", vmax=15)\n", - "fig = xana.heatmapPlot(offsetMap[:,:,0], lut_label=\"Offset (ADU)\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = xana.heatmapPlot(maxRaw[:,:,0], figsize=(7,7), lut_label=\"Max. pixel value (ADU)\")\n", - "\n", - "commonMode = cmCorrection.get()\n", - "\n", - "fig = plt.figure(figsize=(12,4))\n", - "ax = fig.add_subplot(121)\n", - "xana.histPlot(ax, commonMode, bins=50, plot_errors=True)\n", - "t = ax.set_title(\"Common mode distribution\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "cmStats.size\n", - "cmStats.shape[0]*cmStats.shape[1]*nImages\n", - "#sensorSize[0]/commonModeBlockSize[1]*sensorSize[1]*nImages" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#cmStatConcat = cmStats.reshape(cmStats.shape[0]*cmStats.shape[1], cmStats.shape[2])#cmStats[0,:,:]\n", - "for i in range(0, cmStats.shape[0]//(sensorSize[1]//cmStats.shape[1])):\n", - " cmStatConcat = cmStats[i*(sensorSize[1]//cmStats.shape[1]):(i+1)*(sensorSize[1]//cmStats.shape[1]),:,:].reshape(sensorSize[1]//cmStats.shape[1]*cmStats.shape[1], cmStats.shape[2])\n", - " #cmStatConcat = np.concatenate((cmStatConcat, cmStats[i,:,:]) , axis=0)\n", - " fig = xana.heatmapPlot(cmStatConcat, figsize=(8,8), lut_label=\"Common mode(ADU)\", x_title = \"frame\", y_title = \"row\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "rawHist = histCalRaw.get(cumulatative = True)\n", - "corHist = histCalCorrected.get(cumulatative = True)\n", - "cmCorHist = histCalCMCorrected.get(cumulatative = True)\n", - "\n", - "fig = plt.figure(figsize=(20,5))\n", - "ax = fig.add_subplot(131)\n", - "xana.histPlot(ax, rawHist, plot_errors=True)\n", - "t = ax.set_title(\"Raw data histogram\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "l = ax.set_xlim(2000,3700)\n", - "\n", - "ax = fig.add_subplot(132)\n", - "xana.histPlot(ax, corHist, plot_errors=True)\n", - "t = ax.set_title(\"Offset corrected data histogram\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "l = ax.set_xlim(-50,50)\n", - "\n", - "ax = fig.add_subplot(133)\n", - "xana.histPlot(ax, cmCorHist, plot_errors=True)\n", - "t = ax.set_title(\"Common mode corrected data histogram\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "l = ax.set_xlim(-50,50)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = xana.heatmapPlot(corStats[\"max_map\"][:,:,0], fig_size=(7,7), lut_label=\"Max. pixel value (ADU)\", vmax=100)\n", - "fig = xana.heatmapPlot(corStats[\"min_map\"][:,:,0], fig_size=(7,7), lut_label=\"Max. pixel value (ADU)\", vmin=-50, vmax= 5)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "filename = \"/media/haufs/data1/frame2Hdf5/C14_07_23_131108_02_001.h5\"\n", - "#filename= \"/media/haufs/VERBATIM HD/pnCCD_forThesis/3/final.h5\"\n", - "\n", - "nImages = 5000#pnccd.getDataSize(filename, '/illuminated/images/image')[0]\n", - "#nImages = pnccd.getDataSize(filename, '/illuminated/images/image')[0]\n", - "\n", - "chunkSize = 500" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "reader = xcal.ChunkReader(filename, pnccd.readData, nImages, chunkSize, path = \"/frames\", pixels_x = sensorSize[0],\n", - " pixels_y = sensorSize[1], simulated=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "peakEstimates = [7500.,8250]\n", - "allowedSigmas =[100,100]\n", - "\n", - "#gainCal = xcal.GainMapCalculator(sensorSize, peakEstimates, allowedSigmas, noiseMap, memoryCells, cores=cpuCores, peakHeights = [100, 10],\n", - "# photonEnergy=5.9, ADUperPhoton=0.8)\n", - "cmCorrection = xcal.CommonModeCorrection(sensorSize, commonModeBlockSize, commonModeAxis, nCells = memoryCells, stats=True, runOnGPU=False, noiseMap = noiseMap, gpuZDim=2, keepOnGPU=True, gpuStripeSize=2)\n", - "patternClassifier = xcal.PatternClassifier(sensorSize, noiseMap, 150., 10., 3., tagFirstSingles = 4, nCells=memoryCells, cores=10, runOnGPU=True, )\n", - "#patternSelector = xcal.PatternSelecter(sensorSize, selectionList = [100, 101, 200, 201, 202, 203, 300, 301, 302, 303, 400, 401, 402, 403])\n", - "patternSelector = xcal.PatternSelecter(sensorSize, selectionList = [200, 201, 202, 203])\n", - "\n", - "#histCalNoCMCorrected = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "#histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "#histCalCMCorrectedDoubles = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "#histCalCMCorrectedTriples = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "#histCalCMCorrectedQuads = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "\n", - "#histCalCMCorrectedDoublesUn = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "#histCalCMCorrectedTriplesUn = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "#histCalCMCorrectedQuadsUn = xcal.HistogramCalculator(sensorSize, bins=1000, range=[0, 10000], memoryCells=memoryCells, cores=cpuCores)\n", - "\n", - "\n", - "meanCal.reset()\n", - "histCalCorrected.reset()\n", - "histCalCMCorrected.reset()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from scipy.ndimage.filters import convolve\n", - "import copy\n", - "\n", - "kernel = {}\n", - "\n", - "kernel[\"203\"] = np.array([[0, 1, 0],\n", - " [0, 1, 0],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "kernel[\"201\"] = np.array([[0, 0, 0],\n", - " [0, 1, 1],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "kernel[\"202\"] = np.array([[0, 0, 0],\n", - " [0, 1, 0 ],\n", - " [0, 1, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"200\"] = np.array([[0, 0, 0],\n", - " [1, 1, 0 ],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"300\"] = np.array([[0, 0, 0],\n", - " [1, 1,0],\n", - " [0, 1, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"301\"] = np.array([[0, 1, 0],\n", - " [1, 1,0],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "kernel[\"302\"] = np.array([[0, 1, 0],\n", - " [0, 1, 1 ],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"303\"] = np.array([[0, 0, 0],\n", - " [0, 1, 1 ],\n", - " [0, 1, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"400\"] = np.array([[0, 0, 0],\n", - " [1, 1,0],\n", - " [1, 1, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"401\"] = np.array([[1, 1, 0],\n", - " [1, 1,0],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "kernel[\"402\"] = np.array([[0, 1, 1],\n", - " [0, 1, 1 ],\n", - " [0, 0, 0]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "kernel[\"403\"] = np.array([[0, 0, 0],\n", - " [0, 1, 1 ],\n", - " [0, 1, 1]], dtype=np.float)[:,:,np.newaxis]\n", - "\n", - "\n", - "def separateDouble(pdat, alldat, p):\n", - " \n", - " cdat = np.zeros_like(alldat)\n", - " \n", - " kstack = kernel[str(p)]\n", - " idx = pdat == p\n", - " \n", - " #alldat[~idx]*=-1.\n", - " #r = convolve(alldat, kstack, mode = 'constant', cval=0)\n", - " \n", - " p1 = alldat[idx]\n", - " p2 = None\n", - "\n", - " if np.roll(kstack, 1, axis=0)[1,1] == 1:\n", - " idx = np.roll(pdat, -1, axis = 0) == p\n", - " p2 = alldat[idx]\n", - " if np.roll(kstack, -1, axis=0)[1,1] == 1:\n", - " idx = np.roll(pdat, 1, axis = 0) == p\n", - " p2 = alldat[idx]\n", - " if np.roll(kstack, 1, axis=1)[1,1] == 1:\n", - " idx = np.roll(pdat, -1, axis = 1) == p\n", - " p2 = alldat[idx]\n", - " if np.roll(kstack, -1, axis=1)[1,1] == 1:\n", - " idx = np.roll(pdat, 1, axis = 1) == p\n", - " p2 = alldat[idx]\n", - " \n", - " \n", - " \n", - " r = np.zeros((p1.shape[0], 2), np.float)\n", - " r[:,0] = np.abs(p1)-np.abs(p2)\n", - " r[:,1] = np.abs(p2)\n", - " \n", - " return r\n", - " #return cdat\n", - "\n", - "\n", - "\n", - "def uncorrectPattern(pdat, alldat, p):\n", - " \n", - " cdat = np.zeros_like(alldat)\n", - " \n", - " kstack = kernel[str(p)]\n", - " idx = pdat == p\n", - " \n", - " r = convolve(alldat, kstack, mode = 'constant', cval=0)\n", - " cdat[idx] = r[idx]\n", - " \n", - "\n", - " if np.roll(kstack, 1, axis=0)[1,1] == 1:\n", - " idx = np.roll(pdat, -1, axis = 0) == p\n", - " cdat[idx] = alldat[idx]\n", - " if np.roll(kstack, -1, axis=0)[1,1] == 1:\n", - " idx = np.roll(pdat, 1, axis = 0) == p\n", - " cdat[idx] = alldat[idx]\n", - " if np.roll(kstack, 1, axis=1)[1,1] == 1:\n", - " idx = np.roll(pdat, -1, axis = 1) == p\n", - " cdat[idx] = alldat[idx]\n", - " if np.roll(kstack, -1, axis=1)[1,1] == 1:\n", - " idx = np.roll(pdat, 1, axis = 1) == p\n", - " cdat[idx] = alldat[idx]\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " return cdat" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#firstSinglesAll = np.zeros((sensorSize[0],sensorSize[1], nImages))\n", - "\n", - "dc = None\n", - "dnc = None\n", - "pp = None\n", - "up_doubles = None\n", - "down_doubles = None\n", - "right_doubles = None\n", - "left_doubles = None\n", - "for i,data in enumerate(reader.readChunks()):\n", - " \n", - " data = offsetCorrection.correct(data)\n", - " print(data.shape)\n", - " \n", - " meanCal.fill(data)\n", - " data = cmCorrection.correct(data)\n", - " \n", - " dnc = data\n", - " \n", - " data, patterns = patternClassifier.classify(data)\n", - " pp = patterns\n", - " \n", - " dc = data\n", - " histCalCorrected.fill(data)\n", - " \n", - " #singles, firstsingles, doubles_ud, doubles_lr, triples_0, triples_1 = patternSelector.select(data, patterns)\n", - " #classPatt = patternSelector.select(data, patterns)\n", - " up, right, down, left = patternSelector.select(data, patterns)\n", - " \n", - " if i == 0:\n", - " up_doubles = separateDouble(patterns, data, 200)\n", - " right_doubles = separateDouble(patterns, data, 201)\n", - " down_doubles = separateDouble(patterns, data, 202)\n", - " left_doubles = separateDouble(patterns, data, 203)\n", - " else:\n", - " up_doubles = np.concatenate((up_doubles, separateDouble(patterns, data, 200)))\n", - " right_doubles = np.concatenate((right_doubles, separateDouble(patterns, data, 201)))\n", - " down_doubles = np.concatenate((down_doubles, separateDouble(patterns, data, 202)))\n", - " left_doubles = np.concatenate((left_doubles, separateDouble(patterns, data, 203)))\n", - " \n", - " \n", - " #firstSinglesAll[:, :,i*chunkSize:(i+1)*chunkSize] = firstsingles[:,:,:]\n", - " #histCalCMCorrected.fill(classPatt[0])\n", - " #histCalCMCorrected.fill(classPatt[1])\n", - " #histCalCMCorrectedDoubles.fill(classPatt[2])\n", - " #histCalCMCorrectedDoubles.fill(classPatt[3])\n", - " #histCalCMCorrectedDoubles.fill(classPatt[4])\n", - " #histCalCMCorrectedDoubles.fill(classPatt[5])\n", - " \n", - " #histCalCMCorrectedDoublesUn.fill(uncorrectPattern(patterns, data, 200))\n", - " #histCalCMCorrectedDoublesUn.fill(uncorrectPattern(patterns, data, 201))\n", - " #histCalCMCorrectedDoublesUn.fill(uncorrectPattern(patterns, data, 202))\n", - " #histCalCMCorrectedDoublesUn.fill(uncorrectPattern(patterns, data, 203))\n", - " \n", - " #histCalCMCorrectedTriples.fill(classPatt[6])\n", - " #histCalCMCorrectedTriples.fill(classPatt[7])\n", - " #histCalCMCorrectedTriples.fill(classPatt[8])\n", - " #histCalCMCorrectedTriples.fill(classPatt[9])\n", - " \n", - " #histCalCMCorrectedTriplesUn.fill(uncorrectPattern(patterns, data, 300))\n", - " #histCalCMCorrectedTriplesUn.fill(uncorrectPattern(patterns, data, 301))\n", - " #histCalCMCorrectedTriplesUn.fill(uncorrectPattern(patterns, data, 302))\n", - " #histCalCMCorrectedTriplesUn.fill(uncorrectPattern(patterns, data, 303))\n", - " \n", - " #histCalCMCorrectedQuads.fill(classPatt[10])\n", - " #histCalCMCorrectedQuads.fill(classPatt[11])\n", - " #histCalCMCorrectedQuads.fill(classPatt[12])\n", - " #histCalCMCorrectedQuads.fill(classPatt[13])\n", - " \n", - " #histCalCMCorrectedQuadsUn.fill(uncorrectPattern(patterns, data, 400))\n", - " #histCalCMCorrectedQuadsUn.fill(uncorrectPattern(patterns, data, 401))\n", - " #histCalCMCorrectedQuadsUn.fill(uncorrectPattern(patterns, data, 402))\n", - " #histCalCMCorrectedQuadsUn.fill(uncorrectPattern(patterns, data, 403))\n", - " \n", - " \n", - " #gainCal.fill(firstsingles)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "plt.scatter(np.abs(right_doubles[:,1]), np.abs(right_doubles[:, 0]))\n", - "plt.scatter(np.abs(left_doubles[:,0]), np.abs(left_doubles[:, 1]))\n", - "plt.semilogx()\n", - "plt.semilogy()\n", - "plt.xlim(10, 100000)\n", - "plt.ylim(10, 100000)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "h = np.histogram2d(np.concatenate((np.abs(right_doubles[:,1]), np.abs(left_doubles[:,0]))), np.concatenate((np.abs(right_doubles[:,0]), np.abs(left_doubles[:,1]))), bins=50)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "plt.imshow(np.log10(h[0]), interpolation=\"nearest\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "calCMCorrectedHist = histCalCMCorrected.get()\n", - "\n", - "fig = plt.figure(figsize=(8,8))\n", - "ax = fig.add_subplot(111)\n", - "xana.histPlot(ax, calCMCorrectedHist, plot_errors=True, alpha=0.3)\n", - "t = ax.set_title(\"xfelpyanatools.plotHist\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "r = ax.set_xlim(5000,8500)\n", - "r = ax.set_ylim(1, 1e5)\n", - "r = ax.semilogy()\n", - "\n", - "\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_single_spec.png', dpi=600)\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_single_spec.eps')\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_single_spec.pdf')\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "calCMCorrectedHist = histCalCMCorrectedDoubles.get()\n", - "calCMCorrectedHistUn = histCalCMCorrectedDoublesUn.get()\n", - "fig = plt.figure(figsize=(8,8))\n", - "ax = fig.add_subplot(111)\n", - "xana.histPlot(ax, calCMCorrectedHist, plot_errors=True, alpha=0.3)\n", - "xana.histPlot(ax, calCMCorrectedHistUn, plot_errors=True, alpha=0.3)\n", - "t = ax.set_title(\"xfelpyanatools.plotHist\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "r = ax.set_xlim(5000,8500)\n", - "r = ax.set_ylim(1, 1e5)\n", - "r = ax.semilogy()\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_double_spec.png', dpi=600)\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_double_spec.eps')\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_double_spec.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "calCMCorrectedHist = histCalCMCorrectedTriples.get()\n", - "calCMCorrectedHistUn = histCalCMCorrectedTriplesUn.get()\n", - "fig = plt.figure(figsize=(8,8))\n", - "ax = fig.add_subplot(111)\n", - "xana.histPlot(ax, calCMCorrectedHist, plot_errors=True, alpha=0.3)\n", - "xana.histPlot(ax, calCMCorrectedHistUn, plot_errors=True, alpha=0.3)\n", - "t = ax.set_title(\"xfelpyanatools.plotHist\")\n", - "t = ax.set_xlabel(\"Intensity (ADU)\")\n", - "t = ax.set_ylabel(\"Counts\")\n", - "r = ax.set_xlim(5000,8500)\n", - "r = ax.set_ylim(1, 1e5)\n", - "r = ax.semilogy()\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_triple_spec.png', dpi=600)\n", - "#fig.savefig('/home/haufs/calPaper/pnccd_triple_spec.eps')\n", - "#\n", - "fig.savefig('/home/haufs/calPaper/pnccd_triple_spec.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import seaborn as sb\n", - "from seaborn import plt as pplt\n", - "sb.set(style=\"ticks\")\n", - "\n", - "fig = pplt.figure(figsize = (10, 4.5))\n", - "ax = fig.add_subplot(111)\n", - "\n", - "h,e,c,s = histCalCMCorrected.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/singles', h, e, c)\n", - "\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='singles')\n", - "\n", - "\n", - "\n", - "h,e,c,s = histCalCMCorrectedDoubles.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/doubles', h, e, c)\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h/2, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='doubles (corrected)')\n", - "\n", - "h,e,c,s = histCalCMCorrectedDoublesUn.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/uncor_doubles', h, e, c)\n", - "\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='doubles (uncorrected)')\n", - "\n", - "h,e,c,s = histCalCMCorrectedTriples.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/triples', h, e, c)\n", - "\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h/3, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='triples (corrected)')\n", - "\n", - "h,e,c,s = histCalCMCorrectedTriplesUn.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/uncor_triples', h, e, c)\n", - "\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='triples (uncorrected)')\n", - "\n", - "\n", - "h,e,c,s = histCalCMCorrectedQuads.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/quads', h, e, c)\n", - "\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h/3, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='quads (corrected)')\n", - "\n", - "h,e,c,s = histCalCMCorrectedQuadsUn.get()\n", - "\n", - "np.savez('/home/haufs/TonnSaves/sim3/uncor_quads', h, e, c)\n", - "\n", - "yerr=np.sqrt(h)\n", - "ax.errorbar(c, h, yerr=yerr, elinewidth=1, barsabove=True, drawstyle='steps-mid', label='quads (uncorrected)')\n", - "\n", - "\n", - "ax.legend(loc=0)\n", - "\n", - "plt.xlim(5000, 9000)\n", - "plt.ylim(1, 100000)\n", - "plt.semilogy()\n", - "\n", - "ax.set_xlabel('E (ADU)')\n", - "ax.set_ylabel('counts')\n", - "\n", - "#fig.savefig('/home/haufs/calPaper/pat_correct_pnCCD.png', dpi=600)\n", - "#fig.savefig('/home/haufs/calPaper/pat_correct_pnCCD.pdf')\n", - "#fig.savefig('/home/haufs/calPaper/pat_correct_pnCCD.eps')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from matplotlib.patches import Rectangle\n", - "\n", - "fig = pplt.figure(figsize = (10, 4.5))\n", - "dnct = np.zeros_like(dnc)\n", - "dnct[dnc > 6500] = dnc[dnc > 6500]\n", - "\n", - "\n", - "dct = np.zeros_like(dc)\n", - "dct[dc > 6500] = dc[dc > 6500]\n", - "\n", - "ax = fig.add_subplot(121)\n", - "\n", - "ax.imshow(dnct[16:64,0:48,0], interpolation='nearest', vmin=6500, vmax =9000, cmap='OrRd' )\n", - "r = Rectangle((2,2), 20, 5, facecolor='white', edgecolor='black')\n", - "ax.add_patch(r)\n", - "ax.text(4,5, \"%i photons total\"%np.count_nonzero(dnct[:,:,0]))\n", - "\n", - "ax = fig.add_subplot(122)\n", - "ax.imshow(dct[16:64,0:48,0], interpolation='nearest', vmin=6500, vmax =9000, cmap='OrRd' )\n", - "r = Rectangle((2,2), 20, 5, facecolor='white', edgecolor='black')\n", - "ax.add_patch(r)\n", - "ax.text(3.5,5, \"%i photons total\"%np.count_nonzero(dct[:,:,0]))\n", - "fig.savefig('/home/haufs/calPaper/pat_correct_pnCCD_count.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pat_correct_pnCCD_count.pdf')\n", - "fig.savefig('/home/haufs/calPaper/pat_correct_pnCCD_count.eps')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "patternStats = patternClassifier.getPatternStats()\n", - "fig = plt.figure(figsize=(15,15))\n", - "ax = fig.add_subplot(4,4,1)\n", - "sfields = [\"singles\", \"first singles\", \"clusters\"]\n", - "mfields = [\"doubles\", \"triples\", \"quads\"]\n", - "relativeOccurances = []\n", - "labels = []\n", - "for i, f in enumerate(sfields):\n", - " relativeOccurances.append(patternStats[f])\n", - " labels.append(f)\n", - "for i, f in enumerate(mfields):\n", - " for k in range(len(patternStats[f])):\n", - " relativeOccurances.append(patternStats[f][k])\n", - " labels.append(f+\"(\"+str(k)+\")\")\n", - "relativeOccurances = np.array(relativeOccurances, np.float)\n", - "relativeOccurances/=np.sum(relativeOccurances)\n", - "pie = ax.pie(relativeOccurances, labels=labels, autopct='%1.1f%%', shadow=True)\n", - "ax.set_title(\"Pattern occurance\")\n", - "# Set aspect ratio to be equal so that pie is drawn as a circle.\n", - "a = ax.axis('equal')\n", - "\n", - "smaps = [\"singlemap\", \"firstsinglemap\", \"clustermap\"]\n", - "for i, m in enumerate(smaps):\n", - "\n", - " ax = fig.add_subplot(4,4,2+i)\n", - "\n", - " pmap = ax.imshow(patternStats[m], interpolation=\"nearest\")\n", - " ax.set_title(m)\n", - " cb = fig.colorbar(pmap)\n", - "\n", - "mmaps = [\"doublemap\", \"triplemap\", \"quadmap\"]\n", - "k = 0\n", - "for i, m in enumerate(mmaps):\n", - "\n", - " for j in range(4):\n", - " ax = fig.add_subplot(4,4,2+len(smaps)+k)\n", - " pmap = ax.imshow(patternStats[m][j], interpolation=\"nearest\")\n", - " ax.set_title(m+\"(\"+str(j)+\")\")\n", - " cb = fig.colorbar(pmap)\n", - " k+=1\n", - "\n", - "relativeOccurances" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "relGainMap, summedWeights = gainCal.get()\n", - "\n", - "fig = xana.heatmapPlot(np.rot90(np.mean(relGainMap, axis=2)), figsize=(20,10), lut_label = \"Relative gain\")\n", - "fig = xana.heatmapPlot(np.rot90(np.mean(summedWeights, axis=2)), figsize=(20,10), lut_label = \"Summed weights\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import prettyplotlib as pplt\n", - "import copy\n", - "from brewer2mpl import sequential\n", - "fig = plt.figure(figsize=(5,5))\n", - "ax = fig.add_subplot(111)\n", - "from scipy.stats import linregress\n", - "indices = np.arange(0,50,5)\n", - "for r in indices:\n", - " xcoords1 = []\n", - " ycoords1 = []\n", - " xcoords2 = []\n", - " ycoords2 = []\n", - " for i in range(firstSinglesAll.shape[1]):\n", - " idx = firstSinglesAll[r, i, :] > 7000. \n", - " idx2 = firstSinglesAll[r, i, :] < 7800.\n", - " idx = idx & idx2\n", - " \n", - " [xcoords1.append(k) for k in (np.ones(np.count_nonzero(idx))*i).tolist()]\n", - " [ycoords1.append(k) for k in firstSinglesAll[r, i, idx].tolist()]\n", - " \n", - " \n", - " pplt.scatter(ax, np.ones(np.count_nonzero(idx))*i, firstSinglesAll[r, i, idx], color='grey', alpha=0.3)\n", - " \n", - " idx = firstSinglesAll[r, i, :] > 7800. \n", - " idx2 = firstSinglesAll[r, i, :] < 9000.\n", - " idx = idx & idx2\n", - " [xcoords2.append(k) for k in (np.ones(np.count_nonzero(idx))*i).tolist()]\n", - " [ycoords2.append(k) for k in firstSinglesAll[r, i, idx].tolist()]\n", - " \n", - " pplt.scatter(ax, np.ones(np.count_nonzero(idx))*i, firstSinglesAll[r, i, idx], color='grey', alpha=0.3)\n", - " \n", - " \n", - " \n", - " m,b,r,p,e = linregress(np.array(xcoords1), np.array(ycoords1))\n", - " x = np.arange(sensorSize[1])\n", - " y = m*x+b\n", - " l, = pplt.plot(ax, x, y)\n", - " \n", - " m,b,r,p,e = linregress(np.array(xcoords2), np.array(ycoords2))\n", - " x = np.arange(sensorSize[1])\n", - " y = m*x+b\n", - " pplt.plot(ax, x, y, color = l.get_color())\n", - "ax.set_xlim(0, 200)\n", - "ax.set_ylim(7000, 8800)\n", - "ax.set_xlabel('row number')\n", - "ax.set_ylabel('pixel value (ADU)')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_rel_gain_scatter.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_rel_gain_scatter.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_rel_gain_scatter.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import seaborn as sb\n", - "from seaborn import plt as pplt\n", - "sb.set(style=\"ticks\")\n", - "import copy\n", - "from brewer2mpl import sequential\n", - "from scipy.stats import linregress\n", - "ctis = np.zeros(firstSinglesAll.shape[0])\n", - "relGain = np.zeros(firstSinglesAll.shape[0])\n", - "pvals = np.zeros(firstSinglesAll.shape[0])\n", - "for r in range(firstSinglesAll.shape[0]):\n", - " xcoords1 = []\n", - " ycoords1 = []\n", - " xcoords2 = []\n", - " ycoords2 = []\n", - " for i in range(firstSinglesAll.shape[1]):\n", - " idx = firstSinglesAll[r, i, :] > 7000. \n", - " idx2 = firstSinglesAll[r, i, :] < 7800.\n", - " idx = idx & idx2\n", - " \n", - " [xcoords1.append(k) for k in (np.ones(np.count_nonzero(idx))*i).tolist()]\n", - " [ycoords1.append(k) for k in firstSinglesAll[r, i, idx].tolist()]\n", - " \n", - " \n", - " \n", - " \n", - " m,b,r2,p,e = linregress(np.array(xcoords1), np.array(ycoords1))\n", - " ctis[r] = -1*m/b\n", - " relGain[r] = b\n", - " pvals[r] = p\n", - " \n", - "relGain/=np.mean(relGain[np.isfinite(relGain)])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(5,2.5))\n", - "ax = fig.add_subplot(111)\n", - "l = pplt.plot(np.arange(126)+1, ctis[1:-1], drawstyle='steps-mid')\n", - "ax.semilogy()\n", - "ax.set_xlim(0,128)\n", - "ax.set_xlabel('column number')\n", - "ax.set_ylabel('CTI')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_cti.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_cti.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_cti.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(5,3))\n", - "ax = fig.add_subplot(111)\n", - "l = pplt.plot(np.arange(128)+1, relGain, drawstyle='steps-mid')\n", - "#ax.semilogy()\n", - "ax.set_xlim(0,128)\n", - "ax.set_xlabel('column number')\n", - "ax.set_ylabel('relative gain')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_relGain.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_relGain.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_relGain.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "ctiMap = np.zeros((firstSinglesAll.shape[0], firstSinglesAll.shape[1]))\n", - "for k in range(firstSinglesAll.shape[1]):\n", - " ctiMap[:,k] = (1-ctis)**k" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "h = plt.hist(ctiMap.flatten(), 100, range=(0.95, 1.0))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "firstSinglesAllCTICorr = firstSinglesAll/ctiMap[:,:,np.newaxis]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "#cti correction with trap saturation\n", - "from numba import double\n", - "from numba.decorators import jit\n", - "\n", - "firstSinglesAllCTICorr2 = np.zeros_like(firstSinglesAllCTICorr)\n", - "res = np.zeros(firstSinglesAll.shape[1], dtype=np.double)\n", - "@jit('void(double[:,:,:], double[:,:,:], double[:])',nopython=True)\n", - "def ctiWithSat(firstSinglesAll, firstSinglesAllCTICorr2, res):\n", - " for im in range(firstSinglesAll.shape[2]):\n", - " for c in range(firstSinglesAll.shape[0]):\n", - " res[:] = 0.\n", - " for r in range(firstSinglesAll.shape[1]):\n", - " \n", - " val = firstSinglesAll[c,r,im]\n", - " for rr in range(r,0,-1):\n", - " \n", - " v2 = val/(1.-ctis[c]) - res[rr]\n", - " res[rr] += val*ctis[c]\n", - " val = v2\n", - " firstSinglesAllCTICorr2[c,r,im] = val\n", - " \n", - "ctiWithSat(firstSinglesAll, firstSinglesAllCTICorr2, res)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "firstSinglesAllRelGainCorr = firstSinglesAllCTICorr/relGain[:,np.newaxis,np.newaxis]\n", - "firstSinglesAllM = meanCal.get()\n", - "firstSinglesFlatfieldCorr = np.mean(firstSinglesAllM)*firstSinglesAll/firstSinglesAllM\n", - "#firstSinglesAllRelGainCorr2 = firstSinglesAllCTICorr2/relGain[:,np.newaxis,np.newaxis]\n", - "firstSinglesAllM.shape" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(5,5))\n", - "ax = fig.add_subplot(111)\n", - "\n", - "h,e = np.histogram(firstSinglesAllRelGainCorr.flatten(), 500, range=(5000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(c, h, label='uncorrected', color='green', drawstyle='steps-mid', linewidth=1)\n", - "\n", - "h,e = np.histogram(firstSinglesFlatfieldCorr.flatten(), 500, range=(5000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(c, h, label='uncorrected', color='red', drawstyle='steps-mid', linewidth=1)\n", - "ax.semilogy()\n", - "\n", - "h,e = np.histogram(firstSinglesAll.flatten(), 500, range=(5000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(c, h, label='uncorrected', color='blue', drawstyle='steps-mid', linewidth=1)\n", - "ax.semilogy()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig = plt.figure(figsize=(5,5))\n", - "ax = fig.add_subplot(111)\n", - "\n", - "h,e = np.histogram(firstSinglesAll.flatten(), 500, range=(7000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(ax, c, h, label='uncorrected', color='red', drawstyle='steps-mid', linewidth=1)\n", - "\n", - "h,e = np.histogram((firstSinglesAllCTICorr).flatten(), 500, range=(7000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(ax, c, h, label='CTI', color='orange', drawstyle='steps-mid', linewidth=1)\n", - "#h,e = np.histogram((firstSinglesAll/relGain[:,np.newaxis,np.newaxis]).flatten(), 500, range=(0, 1000))\n", - "#c = (e[:-1]+e[1:])/2.\n", - "#pplt.plot(ax, c, h, label='RARC', color='blue', drawstyle='steps-mid', linewidth=5)\n", - "\n", - "\n", - "h,e = np.histogram(firstSinglesAllRelGainCorr.flatten(), 500, range=(7000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(ax, c, h, label='CTI + RARC', color='green', drawstyle='steps-mid', linewidth=3)\n", - "\n", - "h,e = np.histogram(firstSinglesAllRelGainCorr2.flatten(), 500, range=(7000, 8500))\n", - "c = (e[:-1]+e[1:])/2.\n", - "pplt.plot(ax, c, h, label='CTI (SAT) + RARC', color='black', drawstyle='steps-mid',linewidth=1)\n", - "\n", - "\n", - "\n", - "#h = pplt.hist(ax, firstSinglesAll.flatten(), 100, range=(7000, 8500), log=True, alpha=0.5, label='uncorrected', color='red')\n", - "#h = pplt.hist(ax, firstSinglesAllRelGainCorr.flatten(), 100, range=(7000, 8500), log=True, alpha=0.8, label='CTI + RARC', color='green')\n", - "#h = pplt.hist(ax, firstSinglesAllRelGainCorr2.flatten(), 100, range=(7000, 8500), log=True, alpha=0.8, label='CTI (TSAT) + RARC', color='blue')\n", - "ax.set_xlabel('energy (ADU)')\n", - "ax.set_ylabel('counts')\n", - "plt.legend(loc=0)\n", - "ax.set_ylim(10, 1500)\n", - "#ax.set_xlim(100, 350)\n", - "ax.semilogy()\n", - "fig.savefig('/home/haufs/calPaper/pnccd_relGainCor.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_relGainCor.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_relGainCor.pdf')\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "\n", - "from iminuit import Minuit\n", - "import inspect\n", - "\n", - "\n", - "hist ,e = np.histogram(firstSinglesAll.flatten(), 500, range=(7000, 8500))\n", - "bin_centres = (e[:-1]+e[1:])/2.\n", - "\n", - "hnonzeros = hist != 0 \n", - "\n", - "\n", - "def multiGauss(x, *p):\n", - " r = 0.\n", - " for i in range(2):\n", - " A, mu, sigma = p[i*3:(i+1)*3]\n", - " r += A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - " return r\n", - " \n", - "def multiGaussWrap(A1, mu1, sigma1, A2, mu2, sigma2):\n", - " return np.sum(((multiGauss(bin_centres[hnonzeros],A1, mu1, sigma1, A2, mu2, sigma2)\n", - " - hist[hnonzeros]) / np.sqrt(hist[hnonzeros])) ** 2)\n", - "\n", - "\n", - "pparm = dict(throw_nan=False, pedantic=False)\n", - " \n", - "\n", - "pparm['A1'] = 1000.\n", - "pparm['mu1'] = 7400.\n", - "pparm['sigma1'] = 100.\n", - "\n", - "pparm['A2'] = 120.\n", - "pparm['mu2'] = 8200.\n", - "pparm['sigma2'] = 100.\n", - "\n", - "\n", - "\n", - " \n", - " \n", - "m = Minuit(multiGaussWrap, **pparm)\n", - "fmin = m.migrad()\n", - "#ferr = m.hesse()\n", - "res = m.values\n", - " \n", - "\n", - "\n", - "x = np.arange(7000, 8500)\n", - "p = [res['A1'], res['mu1'], res['sigma1'], res['A2'], res['mu2'], res['sigma2']]\n", - "y = multiGauss(x, *p)\n", - "\n", - "fig = plt.figure(figsize=(5,5))\n", - "ax = fig.add_subplot(111)\n", - "pplt.plot(ax, bin_centres, hist, label='data (uncorrected)', drawstyle='steps-mid')\n", - "pplt.plot(ax, x, y, label='fit: $\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)), color='red', linewidth=2)\n", - "\n", - "\n", - "def gauss(x, *p):\n", - " import numpy as np\n", - " A, mu, sigma = p\n", - " return A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - "\n", - "\n", - "\n", - "ax.set_xlabel('energy (ADU)')\n", - "ax.set_ylabel('counts')\n", - "\n", - "#ax.text(110, 5000, '$\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)))\n", - "plt.legend(loc=0)\n", - "ax.set_ylim(1, 1000)\n", - "#ax.set_xlim(100, 350)\n", - "ax.semilogy()\n", - "\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_uncor.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_uncor.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_uncor.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from iminuit import Minuit\n", - "import inspect\n", - "\n", - "\n", - "hist ,e = np.histogram(firstSinglesAllRelGainCorr.flatten(), 500, range=(7000, 8500))\n", - "bin_centres = (e[:-1]+e[1:])/2.\n", - "\n", - "hnonzeros = hist != 0 \n", - "\n", - "\n", - "def multiGauss(x, *p):\n", - " r = 0.\n", - " for i in range(2):\n", - " A, mu, sigma = p[i*3:(i+1)*3]\n", - " r += A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - " return r\n", - " \n", - "def multiGaussWrap(A1, mu1, sigma1, A2, mu2, sigma2):\n", - " return np.sum(((multiGauss(bin_centres[hnonzeros],A1, mu1, sigma1, A2, mu2, sigma2)\n", - " - hist[hnonzeros]) / np.sqrt(hist[hnonzeros])) ** 2)\n", - "\n", - "\n", - "pparm = dict(throw_nan=False, pedantic=False)\n", - " \n", - "\n", - "pparm['A1'] = 1000.\n", - "pparm['mu1'] = 7400.\n", - "pparm['sigma1'] = 100.\n", - "\n", - "pparm['A2'] = 120.\n", - "pparm['mu2'] = 8200.\n", - "pparm['sigma2'] = 100.\n", - "\n", - "\n", - "\n", - " \n", - " \n", - "m = Minuit(multiGaussWrap, **pparm)\n", - "fmin = m.migrad()\n", - "#ferr = m.hesse()\n", - "res = m.values\n", - " \n", - "\n", - "\n", - "x = np.arange(7000, 8500)\n", - "p = [res['A1'], res['mu1'], res['sigma1'], res['A2'], res['mu2'], res['sigma2']]\n", - "y = multiGauss(x, *p)\n", - "\n", - "fig = plt.figure(figsize=(5,5))\n", - "ax = fig.add_subplot(111)\n", - "pplt.plot(ax, bin_centres, hist, label='data (CTI+RARC)', drawstyle='steps-mid')\n", - "pplt.plot(ax, x, y, label='fit: $\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)), color='red', linewidth=2)\n", - "\n", - "\n", - "def gauss(x, *p):\n", - " import numpy as np\n", - " A, mu, sigma = p\n", - " return A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - "\n", - "\n", - "\n", - "ax.set_xlabel('energy (ADU)')\n", - "ax.set_ylabel('counts')\n", - "\n", - "#ax.text(110, 5000, '$\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)))\n", - "plt.legend(loc=0)\n", - "ax.set_ylim(1, 1000)\n", - "#ax.set_xlim(100, 350)\n", - "ax.semilogy()\n", - "\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_cor.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_cor.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_cor.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from iminuit import Minuit\n", - "import inspect\n", - "\n", - "\n", - "hist ,e = np.histogram(firstSinglesAllRelGainCorr2.flatten(), 500, range=(7000, 8500))\n", - "bin_centres = (e[:-1]+e[1:])/2.\n", - "\n", - "hnonzeros = hist != 0 \n", - "\n", - "\n", - "def multiGauss(x, *p):\n", - " r = 0.\n", - " for i in range(2):\n", - " A, mu, sigma = p[i*3:(i+1)*3]\n", - " r += A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - " return r\n", - " \n", - "def multiGaussWrap(A1, mu1, sigma1, A2, mu2, sigma2):\n", - " return np.sum(((multiGauss(bin_centres[hnonzeros],A1, mu1, sigma1, A2, mu2, sigma2)\n", - " - hist[hnonzeros]) / np.sqrt(hist[hnonzeros])) ** 2)\n", - "\n", - "\n", - "pparm = dict(throw_nan=False, pedantic=False)\n", - " \n", - "\n", - "pparm['A1'] = 1000.\n", - "pparm['mu1'] = 7400.\n", - "pparm['sigma1'] = 100.\n", - "\n", - "pparm['A2'] = 120.\n", - "pparm['mu2'] = 8200.\n", - "pparm['sigma2'] = 100.\n", - "\n", - "\n", - "\n", - " \n", - " \n", - "m = Minuit(multiGaussWrap, **pparm)\n", - "fmin = m.migrad()\n", - "ferr = m.hesse()\n", - "ferr = m.minos()\n", - "res = m.values\n", - " \n", - "\n", - "\n", - "x = np.arange(7000, 8500)\n", - "p = [res['A1'], res['mu1'], res['sigma1'], res['A2'], res['mu2'], res['sigma2']]\n", - "y = multiGauss(x, *p)\n", - "\n", - "fig = plt.figure(figsize=(5,5))\n", - "ax = fig.add_subplot(111)\n", - "pplt.plot(ax, bin_centres, hist, label='data (CTI+RARC)', drawstyle='steps-mid')\n", - "pplt.plot(ax, x, y, label='fit: $\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)), color='red', linewidth=2)\n", - "\n", - "\n", - "def gauss(x, *p):\n", - " import numpy as np\n", - " A, mu, sigma = p\n", - " return A*np.exp(-(x-mu)**2/(2.*sigma**2))\n", - "\n", - "\n", - "\n", - "ax.set_xlabel('energy (ADU)')\n", - "ax.set_ylabel('counts')\n", - "\n", - "#ax.text(110, 5000, '$\\chi^2/\\mathrm{d.o.f.}=%0.1f$'%(getattr(fmin[0], 'fval')/float(np.count_nonzero(hnonzeros)-3)))\n", - "plt.legend(loc=0)\n", - "ax.set_ylim(1, 1000)\n", - "#ax.set_xlim(100, 350)\n", - "ax.semilogy()\n", - "\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_cor2.png', dpi=600)\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_cor2.eps')\n", - "fig.savefig('/home/haufs/calPaper/pnccd_fit_cor2.pdf')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "F > eps/E\n", - "F > 3.6eV" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "1./(0.115/3.6)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "3.6/500" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [] - } - ], - "metadata": { - "celltoolbar": "Edit Metadata", - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.6" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -}