{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# ePIX Data Correction ##\n",
    "\n",
    "Authors: Q. Tian S. Hauf, Version 1.0\n",
    "\n",
    "The following notebook provides Offset correction of images acquired with the ePix100 detector."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:23.218849Z",
     "start_time": "2018-12-06T15:54:23.166497Z"
    }
   },
   "outputs": [],
   "source": [
    "cluster_profile = \"noDB\" # ipcluster profile to use\n",
    "in_folder = \"/gpfs/exfel/exp/MID/201930/p900071/raw\" # input folder, required\n",
    "out_folder = '/gpfs/exfel/data/scratch/karnem/test/' # output folder, required\n",
    "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n",
    "run = 466 # which run to read data from, required\n",
    "\n",
    "karabo_id = \"MID_EXP_EPIX-1\" # karabo karabo_id\n",
    "karabo_da = \"DA01\"  # data aggregators\n",
    "receiver_id = \"RECEIVER\" # inset for receiver devices\n",
    "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
    "h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data/image' # path in the HDF5 file to images\n",
    "h5path_t = '/INSTRUMENT/{}/DET/{}:daqOutput/data/backTemp'  # path to find temperature at\n",
    "h5path_cntrl = '/CONTROL/{}/DET'  # path to control data\n",
    "\n",
    "use_dir_creation_date = True # date constants injected before directory creation time\n",
    "cal_db_interface = \"tcp://max-exfl016:8015#8025\" # calibration DB interface to use\n",
    "cal_db_timeout = 300000 # timeout on caldb requests\n",
    "\n",
    "cpuCores = 4 # Specifies the number of running cpu cores\n",
    "chunk_size_idim = 1 # H5 chunking size of output data\n",
    "overwrite = True # overwrite output folder\n",
    "limit_images = 0 # process only first N images, 0 - process all\n",
    "db_module = \"ePix100_M15\" # module id in the database\n",
    "sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n",
    "bias_voltage = 200 # bias voltage\n",
    "in_vacuum = False # detector operated in vacuum\n",
    "fix_temperature = 290. # fix temperature to this value\n",
    "gain_photon_energy = 9.0 # Photon energy used for gain calibration\n",
    "photon_energy = 8.0 # Photon energy to calibrate in number of photons, 0 for calibration in keV  \n",
    "no_relative_gain = False # do not do gain correction\n",
    "split_evt_primary_threshold = 7. # primary threshold for split event correction\n",
    "split_evt_secondary_threshold = 5. # secondary threshold for split event correction\n",
    "split_evt_mip_threshold = 1000. # minimum ionizing particle threshold\n",
    "    \n",
    "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n",
    "    from xfel_calibrate.calibrate import balance_sequences as bs\n",
    "    return bs(in_folder, run, sequences, sequences_per_node, karabo_da)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:23.455376Z",
     "start_time": "2018-12-06T15:54:23.413579Z"
    }
   },
   "outputs": [],
   "source": [
    "import XFELDetAna.xfelprofiler as xprof\n",
    "\n",
    "profiler = xprof.Profiler()\n",
    "profiler.disable()\n",
    "from XFELDetAna.util import env\n",
    "\n",
    "env.iprofile = cluster_profile\n",
    "\n",
    "import warnings\n",
    "\n",
    "warnings.filterwarnings('ignore')\n",
    "\n",
    "from XFELDetAna import xfelpyanatools as xana\n",
    "from XFELDetAna import xfelpycaltools as xcal\n",
    "from XFELDetAna.plotting.util import prettyPlotting\n",
    "\n",
    "prettyPlotting=True\n",
    "import copy\n",
    "import os\n",
    "import time\n",
    "\n",
    "import h5py\n",
    "import numpy as np\n",
    "from cal_tools.tools import get_constant_from_db, get_dir_creation_date\n",
    "from iCalibrationDB import Conditions, ConstantMetaData, Constants, Detectors, Versions\n",
    "from iCalibrationDB.detectors import DetectorTypes\n",
    "from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5\n",
    "from XFELDetAna.xfelreaders import ChunkReader\n",
    "\n",
    "%matplotlib inline\n",
    "\n",
    "if sequences[0] == -1:\n",
    "    sequences = None\n",
    "\n",
    "h5path = h5path.format(karabo_id, receiver_id)\n",
    "h5path_t = h5path_t.format(karabo_id, receiver_id)\n",
    "h5path_cntrl = h5path_cntrl.format(karabo_id)\n",
    "\n",
    "plot_unit = 'ADU'\n",
    "if not no_relative_gain:\n",
    "    plot_unit = 'keV'\n",
    "    if photon_energy>0:\n",
    "            plot_unit = '$\\gamma$'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:23.679069Z",
     "start_time": "2018-12-06T15:54:23.662821Z"
    }
   },
   "outputs": [],
   "source": [
    "x = 708  # rows of the ePix100\n",
    "y = 768  # columns of the ePix100\n",
    "    \n",
    "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n",
    "fp_name = path_template.format(run, karabo_da)\n",
    "fp_path = '{}/{}'.format(ped_dir, fp_name)\n",
    "\n",
    "print(\"Reading from: {}\".format(fp_path))\n",
    "print(\"Run is: {}\".format(run))\n",
    "print(\"HDF5 path: {}\".format(h5path))\n",
    "print(\"Data is output to: {}\".format(out_folder))\n",
    "\n",
    "import datetime\n",
    "\n",
    "creation_time = None\n",
    "if use_dir_creation_date:\n",
    "    creation_time = get_dir_creation_date(in_folder, run)\n",
    "if creation_time:\n",
    "    print(\"Using {} as creation time\".format(creation_time.isoformat()))    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:23.913269Z",
     "start_time": "2018-12-06T15:54:23.868910Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "sensorSize = [x, y]\n",
    "chunkSize = 100 #Number of images to read per chunk\n",
    "blockSize = [sensorSize[0]//2, sensorSize[1]//2] # Sensor area will be analysed according to blocksize\n",
    "xcal.defaultBlockSize = blockSize\n",
    "memoryCells = 1 # ePIX has no memory cell\n",
    "run_parallel = True\n",
    "\n",
    "\n",
    "filename = fp_path.format(sequences[0] if sequences else 0)\n",
    "with h5py.File(filename, 'r') as f:\n",
    "    integration_time = int(f['{}/CONTROL/expTime/value'.format(h5path_cntrl)][0])\n",
    "    temperature = np.mean(f[h5path_t])/100.\n",
    "    temperature_k = temperature + 273.15\n",
    "    if fix_temperature != 0:\n",
    "        temperature_k = fix_temperature\n",
    "        print(\"Temperature is fixed!\")\n",
    "    print(\"Bias voltage is {} V\".format(bias_voltage))\n",
    "    print(\"Detector integration time is set to {}\".format(integration_time))\n",
    "    print(\"Mean temperature was {:0.2f} °C / {:0.2f} K at beginning of run\".format(temperature, temperature_k))\n",
    "    print(\"Operated in vacuum: {} \".format(in_vacuum))\n",
    "\n",
    "if not os.path.exists(out_folder):\n",
    "    os.makedirs(out_folder)\n",
    "elif not overwrite:\n",
    "    raise AttributeError(\"Output path exists! Exiting\")    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:24.088948Z",
     "start_time": "2018-12-06T15:54:24.059925Z"
    }
   },
   "outputs": [],
   "source": [
    "dirlist = sorted(os.listdir(ped_dir))\n",
    "file_list = []\n",
    "total_sequences = 0\n",
    "fsequences = []\n",
    "for entry in dirlist:\n",
    "\n",
    "    #only h5 file\n",
    "    abs_entry = \"{}/{}\".format(ped_dir, entry)\n",
    "    if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == \".h5\":\n",
    "        \n",
    "        if sequences is None:\n",
    "            for seq in range(len(dirlist)):\n",
    "                \n",
    "                if path_template.format(run, karabo_da).format(seq) in abs_entry:\n",
    "                    file_list.append(abs_entry)\n",
    "                    total_sequences += 1\n",
    "                    fsequences.append(seq)\n",
    "        else:\n",
    "            for seq in sequences:\n",
    "                \n",
    "                if path_template.format(run, karabo_da).format(seq) in abs_entry:\n",
    "                    file_list.append(os.path.abspath(abs_entry))\n",
    "                    total_sequences += 1\n",
    "                    fsequences.append(seq)\n",
    "sequences = fsequences"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T18:43:39.776018Z",
     "start_time": "2018-12-06T18:43:39.759185Z"
    }
   },
   "outputs": [],
   "source": [
    "import tabulate\n",
    "from IPython.display import HTML, Latex, Markdown, display\n",
    "\n",
    "print(\"Processing a total of {} sequence files\".format(total_sequences))\n",
    "table = []\n",
    "\n",
    "\n",
    "for k, f in enumerate(file_list):\n",
    "    table.append((k, f))\n",
    "if len(table):  \n",
    "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"#\", \"file\"])))      "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "As a first step, dark maps have to be loaded."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:28.254544Z",
     "start_time": "2018-12-06T15:54:24.709521Z"
    }
   },
   "outputs": [],
   "source": [
    "dclass=\"ePix100\"\n",
    "const_name = \"Offset\"\n",
    "offsetMap = None\n",
    "temp_limits = 5.\n",
    "\n",
    "# Offset\n",
    "det = getattr(Detectors, db_module)\n",
    "dconstants = getattr(Constants, dclass)\n",
    "\n",
    "condition =  Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n",
    "                                     integration_time=integration_time,\n",
    "                                     temperature=temperature_k,\n",
    "                                     in_vacuum=in_vacuum)\n",
    "\n",
    "for parm in condition.parameters:\n",
    "        if parm.name == \"Sensor Temperature\":\n",
    "            parm.lower_deviation = temp_limits\n",
    "            parm.upper_deviation = temp_limits\n",
    "\n",
    "\n",
    "\n",
    "offsetMap = get_constant_from_db(karabo_id, karabo_da,\n",
    "                                 getattr(dconstants, const_name)(),\n",
    "                                 condition,\n",
    "                                 None,\n",
    "                                 cal_db_interface,\n",
    "                                 creation_time=creation_time,\n",
    "                                 print_once=2,\n",
    "                                 timeout=cal_db_timeout)\n",
    "\n",
    "# Noise\n",
    "const_name = \"Noise\"\n",
    "condition =  Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n",
    "                                     integration_time=integration_time,\n",
    "                                     temperature=temperature_k,\n",
    "                                     in_vacuum=in_vacuum)\n",
    "\n",
    "noiseMap = get_constant_from_db(karabo_id, karabo_da,\n",
    "                                 getattr(dconstants, const_name)(),\n",
    "                                 condition,\n",
    "                                 None,\n",
    "                                 cal_db_interface,\n",
    "                                 creation_time=creation_time,\n",
    "                                 print_once=2,\n",
    "                                 timeout=cal_db_timeout)\n",
    "\n",
    "# Gain\n",
    "if not no_relative_gain:\n",
    "    const_name = \"RelativeGain\"\n",
    "    condition =  Conditions.Illuminated.ePix100(photon_energy=gain_photon_energy,\n",
    "                                         bias_voltage=bias_voltage,\n",
    "                                         integration_time=integration_time,\n",
    "                                         temperature=temperature_k,\n",
    "                                         in_vacuum=in_vacuum)\n",
    "\n",
    "    gainMap = get_constant_from_db(karabo_id, karabo_da,\n",
    "                                     getattr(dconstants, const_name)(),\n",
    "                                     condition,\n",
    "                                     None,\n",
    "                                     cal_db_interface,\n",
    "                                     creation_time=creation_time,\n",
    "                                     print_once=2,\n",
    "                                     timeout=cal_db_timeout)\n",
    "    \n",
    "    if gainMap is None:\n",
    "        print(\"Gain map requested, but not found\")\n",
    "        print(\"No gain correction will be applied\")\n",
    "        no_relative_gain = True\n",
    "        plot_unit = 'ADU'\n",
    "        gainMap = np.ones(sensorSize, np.float32)\n",
    "\n",
    "else:\n",
    "    gainMap = np.ones(sensorSize, np.float32)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T15:54:28.771629Z",
     "start_time": "2018-12-06T15:54:28.346051Z"
    }
   },
   "outputs": [],
   "source": [
    "#************************Calculators************************#\n",
    "\n",
    "offsetCorrection = xcal.OffsetCorrection(sensorSize, \n",
    "                                          offsetMap, \n",
    "                                          nCells = memoryCells, \n",
    "                                          cores=cpuCores, gains=None,\n",
    "                                          blockSize=blockSize,\n",
    "                                          parallel=run_parallel)\n",
    "\n",
    "gainCorrection =  xcal.RelativeGainCorrection(\n",
    "            sensorSize, \n",
    "            1. / gainMap[..., None],\n",
    "            nCells=memoryCells,\n",
    "            parallel=run_parallel,\n",
    "            cores=cpuCores,\n",
    "            blockSize=blockSize,\n",
    "            gains=None)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:08:51.886343Z",
     "start_time": "2018-12-06T16:08:51.842837Z"
    }
   },
   "outputs": [],
   "source": [
    "#*****************Histogram Calculators******************#\n",
    "\n",
    "histCalOffsetCor = xcal.HistogramCalculator(sensorSize, \n",
    "                                             bins=1050, \n",
    "                                             range=[-50, 1000], parallel=run_parallel,\n",
    "                                             nCells=memoryCells, \n",
    "                                             cores=cpuCores,\n",
    "                                             blockSize=blockSize)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Applying corrections"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:08:53.042555Z",
     "start_time": "2018-12-06T16:08:53.034522Z"
    }
   },
   "outputs": [],
   "source": [
    "histCalOffsetCor.debug()\n",
    "offsetCorrection.debug()\n",
    "gainCorrection.debug()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#************************Calculators************************#\n",
    "\n",
    "commonModeBlockSize = [x//2, y//2]\n",
    "commonModeAxisR = 'row'\n",
    "cmCorrection = xcal.CommonModeCorrection([x, y], \n",
    "                                         commonModeBlockSize, \n",
    "                                         commonModeAxisR,\n",
    "                                         nCells = memoryCells, \n",
    "                                         noiseMap = noiseMap,\n",
    "                                        runParallel=True,\n",
    "                                        stats=True)\n",
    "\n",
    "patternClassifier = xcal.PatternClassifier([x, y], \n",
    "                                            noiseMap, \n",
    "                                            split_evt_primary_threshold, \n",
    "                                            split_evt_secondary_threshold,\n",
    "                                            split_evt_mip_threshold,\n",
    "                                            tagFirstSingles = 0, \n",
    "                                            nCells=memoryCells, \n",
    "                                            cores=cpuCores, \n",
    "                                            allowElongated = False,\n",
    "                                            blockSize=[x, y],\n",
    "                                            runParallel=True)\n",
    "\n",
    "\n",
    "histCalCMCor = xcal.HistogramCalculator(sensorSize, \n",
    "                                        bins=1050, \n",
    "                                        range=[-50, 1000], parallel=run_parallel,\n",
    "                                        nCells=memoryCells, \n",
    "                                        cores=cpuCores,\n",
    "                                        blockSize=blockSize)\n",
    "\n",
    "histCalSECor = xcal.HistogramCalculator(sensorSize, \n",
    "                                        bins=1050, \n",
    "                                        range=[-50, 1000], parallel=run_parallel,\n",
    "                                        nCells=memoryCells, \n",
    "                                        cores=cpuCores,\n",
    "                                        blockSize=blockSize)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "cmCorrection.debug()\n",
    "patternClassifier.debug()\n",
    "histCalCMCor.debug()\n",
    "histCalSECor.debug()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:08:53.551111Z",
     "start_time": "2018-12-06T16:08:53.531064Z"
    }
   },
   "outputs": [],
   "source": [
    "def copy_and_sanitize_non_cal_data(infile, outfile, h5base):\n",
    "    \"\"\" Copy and sanitize data in `infile` that is not touched by `correctEPIX`\n",
    "    \"\"\"\n",
    "   \n",
    "    if h5base.startswith(\"/\"):\n",
    "        h5base = h5base[1:]\n",
    "    dont_copy = ['pixels']\n",
    "    dont_copy = [h5base+\"/{}\".format(do)\n",
    "                for do in dont_copy]\n",
    "\n",
    "    def visitor(k, item):\n",
    "        if k not in dont_copy:\n",
    "            if isinstance(item, h5py.Group):\n",
    "                outfile.create_group(k)\n",
    "            elif isinstance(item, h5py.Dataset):\n",
    "                group = str(k).split(\"/\")\n",
    "                group = \"/\".join(group[:-1])\n",
    "                infile.copy(k, outfile[group])\n",
    "        \n",
    "    infile.visititems(visitor)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:10:55.917179Z",
     "start_time": "2018-12-06T16:09:01.603633Z"
    }
   },
   "outputs": [],
   "source": [
    "for k, f in enumerate(file_list):\n",
    "    with h5py.File(f, 'r') as infile:\n",
    "        out_fileb = \"{}/{}\".format(out_folder, f.split(\"/\")[-1])\n",
    "        out_file = out_fileb.replace(\"RAW\", \"CORR\")\n",
    "        #out_filed = out_fileb.replace(\"RAW\", \"CORR-SC\")\n",
    "        data = None\n",
    "        with h5py.File(out_file, \"w\") as ofile:\n",
    "            try:\n",
    "                copy_and_sanitize_non_cal_data(infile, ofile, h5path)\n",
    "                data = infile[h5path+\"/pixels\"][()]\n",
    "                data = np.compress(np.any(data>0, axis=(1,2)), data, axis=0)\n",
    "                if limit_images > 0:\n",
    "                    data = data[:limit_images,...]\n",
    "                    \n",
    "                oshape = data.shape\n",
    "                data = np.moveaxis(data, 0, 2)\n",
    "                ddset = ofile.create_dataset(h5path+\"/pixels\",\n",
    "                                             oshape,\n",
    "                                             chunks=(chunk_size_idim, oshape[1], oshape[2]),\n",
    "                                             dtype=np.float32)\n",
    "\n",
    "                data = offsetCorrection.correct(data.astype(np.float32)) #correct for the offset\n",
    "                if not no_relative_gain:\n",
    "                    data = gainCorrection.correct(data.astype(np.float32)) #correct for the gain\n",
    "                    if photon_energy>0:\n",
    "                        data /= photon_energy\n",
    "                \n",
    "                histCalOffsetCor.fill(data)\n",
    "                ddset[...] = np.moveaxis(data, 2, 0)\n",
    "            except Exception as e:\n",
    "                print(\"Couldn't calibrate data in {}: {}\".format(f, e))\n",
    "        \n",
    "        if False:\n",
    "            with h5py.File(out_file, \"a\") as ofiled:\n",
    "                try:\n",
    "                    #copy_and_sanitize_non_cal_data(infile, ofiled, h5path)\n",
    "\n",
    "                    ddsetcm = ofiled.create_dataset(h5path+\"/pixels_cm\",\n",
    "                                         oshape,\n",
    "                                         chunks=(chunk_size_idim, oshape[1], oshape[2]),\n",
    "                                         dtype=np.float32)\n",
    "\n",
    "                    ddsetc = ofiled.create_dataset(h5path+\"/pixels_classified\",\n",
    "                                         oshape,\n",
    "                                         chunks=(chunk_size_idim, oshape[1], oshape[2]),\n",
    "                                         dtype=np.float32, compression=\"gzip\")\n",
    "\n",
    "                    ddsetp = ofiled.create_dataset(h5path+\"/patterns\",\n",
    "                                                 oshape,\n",
    "                                                 chunks=(chunk_size_idim, oshape[1], oshape[2]),\n",
    "                                                 dtype=np.int32, compression=\"gzip\")\n",
    "\n",
    "\n",
    "                    data = cmCorrection.correct(data) #correct for the row common mode\n",
    "                    histCalCMCor.fill(data)\n",
    "                    ddsetcm[...] = np.moveaxis(data, 2, 0)\n",
    "\n",
    "                    data, patterns = patternClassifier.classify(data)\n",
    "\n",
    "\n",
    "                    data[data < (split_evt_primary_threshold*noiseMap)] = 0\n",
    "                    ddsetc[...] = np.moveaxis(data, 2, 0)\n",
    "                    ddsetp[...] = np.moveaxis(patterns, 2, 0)\n",
    "\n",
    "                    data[patterns != 100] = np.nan\n",
    "                    histCalSECor.fill(data)\n",
    "                except Exception as e:\n",
    "                    print(\"Couldn't calibrate data in {}: {}\".format(f, e))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:13:12.889583Z",
     "start_time": "2018-12-06T16:13:11.122653Z"
    }
   },
   "outputs": [],
   "source": [
    "ho,eo,co,so = histCalOffsetCor.get()\n",
    "\n",
    "d = [{'x': co,\n",
    "      'y': ho,\n",
    "      'y_err': np.sqrt(ho[:]),\n",
    "      'drawstyle': 'steps-mid',\n",
    "      'errorstyle': 'bars',\n",
    "      'errorcoarsing': 2,\n",
    "      'label': 'Offset corr.'\n",
    "     },\n",
    "    \n",
    "     ]\n",
    "if False:\n",
    "    ho,eo,co,so = histCalCMCor.get()\n",
    "    d.append({'x': co,\n",
    "          'y': ho,\n",
    "          'y_err': np.sqrt(ho[:]),\n",
    "          'drawstyle': 'steps-mid',\n",
    "          'errorstyle': 'bars',\n",
    "          'errorcoarsing': 2,\n",
    "          'label': 'CM corr.'\n",
    "           })\n",
    "\n",
    "    ho,eo,co,so = histCalSECor.get()\n",
    "    d.append({'x': co,\n",
    "          'y': ho,\n",
    "          'y_err': np.sqrt(ho[:]),\n",
    "          'drawstyle': 'steps-mid',\n",
    "          'errorstyle': 'bars',\n",
    "          'errorcoarsing': 2,\n",
    "          'label': 'Single split events'\n",
    "           })\n",
    "\n",
    "\n",
    "fig = xana.simplePlot(d, aspect=1, x_label='Energy({})'.format(plot_unit), \n",
    "                      y_label='Number of occurrences', figsize='2col',\n",
    "                      y_log=True, x_range=(-50,500),\n",
    "                      legend='top-center-frame-2col')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Mean Image of last Sequence ##"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:11:08.317130Z",
     "start_time": "2018-12-06T16:11:05.788655Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = xana.heatmapPlot(np.nanmedian(data, axis=2),\n",
    "                       x_label='Columns', y_label='Rows',\n",
    "                       lut_label='Signal ({})'.format(plot_unit),\n",
    "                       x_range=(0,y),\n",
    "                       y_range=(0,x), vmin=-50, vmax=50)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "## Single Shot of last Sequence ##"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-12-06T16:11:10.908912Z",
     "start_time": "2018-12-06T16:11:08.318486Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = xana.heatmapPlot(data[...,0],\n",
    "                       x_label='Columns', y_label='Rows',\n",
    "                       lut_label='Signal ({})'.format(plot_unit),\n",
    "                       x_range=(0,y),\n",
    "                       y_range=(0,x), vmin=-50, vmax=50)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  },
  "latex_envs": {
   "LaTeX_envs_menu_present": true,
   "autocomplete": true,
   "bibliofile": "biblio.bib",
   "cite_by": "apalike",
   "current_citInitial": 1,
   "eqLabelWithNumbers": true,
   "eqNumInitial": 1,
   "hotkeys": {
    "equation": "Ctrl-E",
    "itemize": "Ctrl-I"
   },
   "labels_anchors": false,
   "latex_user_defs": false,
   "report_style_numbering": false,
   "user_envs_cfg": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}