{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# AGIPD Offline Correction #\n",
    "\n",
    "Author: European XFEL Detector Group, Version: 1.0\n",
    "\n",
    "Offline Calibration for the AGIPD Detector"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-21T11:30:06.730220Z",
     "start_time": "2019-02-21T11:30:06.658286Z"
    }
   },
   "outputs": [],
   "source": [
    "cluster_profile = \"noDB\"\n",
    "in_folder = \"/gpfs/exfel/exp/MID/201931/p900107/raw\" # the folder to read data from, required\n",
    "out_folder =  \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_Corr\"  # the folder to output to, required\n",
    "sequences =  [-1] # sequences to correct, set to -1 for all, range allowed\n",
    "modules = [-1] # modules to correct, set to -1 for all, range allowed\n",
    "run = 11 # runs to process, required\n",
    "\n",
    "karabo_id = \"MID_DET_AGIPD1M-1\" # karabo karabo_id\n",
    "karabo_da = [-1]  # data aggregators\n",
    "receiver_id = \"{}CH0\" # inset for receiver devices\n",
    "path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data\n",
    "h5path = 'INSTRUMENT/{}/DET/{}:xtdf/' # path in the HDF5 file to images\n",
    "h5path_idx = 'INDEX/{}/DET/{}:xtdf/' # path in the HDF5 file to images\n",
    "h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP_TEST' # path to control information\n",
    "karabo_id_control = \"SPB_IRU_AGIPD1M1\" # karabo-id for control device\n",
    "karabo_da_control = 'DA02' # karabo DA for control infromation\n",
    "\n",
    "use_dir_creation_date = True # use the creation data of the input dir for database queries\n",
    "cal_db_interface = \"tcp://max-exfl016:8015#8045\" # the database interface to use\n",
    "cal_db_timeout = 30000 # in milli seconds\n",
    "creation_date_offset = \"00:00:00\" # add an offset to creation date, e.g. to get different constants\n",
    "\n",
    "calfile =  \"\" # path to calibration file. Leave empty if all data should come from DB\n",
    "nodb = False # if set only file-based constants will be used\n",
    "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n",
    "bias_voltage = 300\n",
    "acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine\n",
    "gain_setting = 0.1 # the gain setting, use 0.1 to try to auto-determine\n",
    "photon_energy = 9.2 # photon energy in keV\n",
    "interlaced = False # whether data is in interlaced layout\n",
    "overwrite = True # set to True if existing data should be overwritten\n",
    "max_pulses = [0, 500, 1] # range list [st, end, step] of maximum pulse indices. 3 allowed maximum list input elements.   \n",
    "local_input = False\n",
    "sequences_per_node = 2 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n",
    "index_v = 2 # version of RAW index type\n",
    "blc_noise_threshold = 5000 # above this mean signal intensity now baseline correction via noise is attempted\n",
    "melt_snow = \"\" # if set to \"none\" snowy pixels are identified and resolved to NaN, if set to \"interpolate\", the value is interpolated from neighbouring pixels\n",
    "max_cells_db_dark = 0  # set to a value different than 0 to use this value for dark data DB queries\n",
    "max_cells_db = 0 # set to a value different than 0 to use this value for DB queries\n",
    "chunk_size_idim = 1  # chunking size of imaging dimension, adjust if user software is sensitive to this.\n",
    "force_hg_if_below = 1000 # set to a value other than 0 to force a pixel into high gain if it's high gain offset subtracted value is below this threshold\n",
    "force_mg_if_below = 1000 # set to a value other than 0 to force a pixel into medium gain if it's medium gain offset subtracted value is below this threshold\n",
    "mask_noisy_adc = 0.25 # set to a value other than 0 and below 1 to mask entire ADC if fraction of noisy pixels is above\n",
    "\n",
    "# Correction Booleans\n",
    "only_offset = False # Apply only Offset correction. if False, Offset is applied by Default. if True, Offset is only applied.\n",
    "rel_gain = False # do relative gain correction based on PC data\n",
    "xray_gain = False # do relative gain correction based on xray data\n",
    "blc_noise = False # if set, baseline correction via noise peak location is attempted\n",
    "blc_stripes = False # if set, baseline corrected via stripes\n",
    "blc_hmatch = False # if set, base line correction via histogram matching is attempted \n",
    "match_asics = False # if set, inner ASIC borders are matched to the same signal level\n",
    "adjust_mg_baseline = False # adjust medium gain baseline to match highest high gain value\n",
    "dont_zero_nans = False # do not zero NaN values in corrected data\n",
    "dont_zero_orange = False # do not zero very negative and very large values\n",
    "blc_set_min = False # Shift to 0 negative medium gain pixels after offset corr\n",
    "corr_asic_diag = False # if set, diagonal drop offs on ASICs are correted \n",
    "\n",
    "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n",
    "    from xfel_calibrate.calibrate import balance_sequences as bs\n",
    "    return bs(in_folder, run, sequences, sequences_per_node, karabo_da)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Fill dictionaries comprising bools and arguments for correction and data analysis\n",
    "\n",
    "# Here the herarichy and dependability for correction booleans are defined \n",
    "corr_bools = {}\n",
    "\n",
    "# offset is at the bottom of AGIPD correction pyramid.\n",
    "corr_bools[\"only_offset\"] = only_offset\n",
    "\n",
    "# Dont apply any corrections if only_offset is requested \n",
    "if not only_offset:\n",
    "    corr_bools[\"adjust_mg_baseline\"] = adjust_mg_baseline\n",
    "    corr_bools[\"rel_gain\"] = rel_gain\n",
    "    corr_bools[\"xray_corr\"] = xray_gain\n",
    "    corr_bools[\"blc_noise\"] = blc_noise\n",
    "    corr_bools[\"blc_stripes\"] = blc_stripes\n",
    "    corr_bools[\"blc_hmatch\"] = blc_hmatch\n",
    "    corr_bools[\"blc_set_min\"] = blc_set_min\n",
    "    corr_bools[\"match_asics\"] = match_asics\n",
    "    corr_bools[\"corr_asic_diag\"] = corr_asic_diag\n",
    "    corr_bools[\"dont_zero_nans\"] = dont_zero_nans\n",
    "    corr_bools[\"dont_zero_orange\"] = dont_zero_orange\n",
    "\n",
    "# Here the herarichy and dependability for data analysis booleans and arguments are defined \n",
    "data_analysis_parms = {}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-21T11:30:07.086286Z",
     "start_time": "2019-02-21T11:30:06.929722Z"
    }
   },
   "outputs": [],
   "source": [
    "import sys\n",
    "from collections import OrderedDict\n",
    "\n",
    "# make sure a cluster is running with ipcluster start --n=32, give it a while to start\n",
    "import os\n",
    "import h5py\n",
    "import numpy as np\n",
    "import matplotlib\n",
    "matplotlib.use(\"agg\")\n",
    "import matplotlib.pyplot as plt\n",
    "from ipyparallel import Client\n",
    "print(f\"Connecting to profile {cluster_profile}\")\n",
    "view = Client(profile=cluster_profile)[:]\n",
    "view.use_dill()\n",
    "\n",
    "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n",
    "from cal_tools.tools import (map_modules_from_folder, parse_runs, run_prop_seq_from_path, get_notebook_name,\n",
    "                                 get_dir_creation_date, get_constant_from_db)\n",
    "from cal_tools.agipdlib import get_gain_setting\n",
    "from dateutil import parser\n",
    "from datetime import timedelta\n",
    "\n",
    "il_mode = interlaced\n",
    "max_cells = mem_cells\n",
    "gains = np.arange(3)\n",
    "cells = np.arange(max_cells)\n",
    "\n",
    "creation_time = None\n",
    "if use_dir_creation_date:\n",
    "    creation_time = get_dir_creation_date(in_folder, run)\n",
    "    offset = parser.parse(creation_date_offset)\n",
    "    delta = timedelta(hours=offset.hour, minutes=offset.minute, seconds=offset.second)\n",
    "    creation_time += delta\n",
    "    print(f\"Using {creation_time} as creation time\")\n",
    "\n",
    "print(f\"Working in IL Mode: {il_mode}. Actual cells in use are: {max_cells}\")\n",
    "\n",
    "if sequences[0] == -1:\n",
    "    sequences = None\n",
    "\n",
    "CHUNK_SIZE = 250\n",
    "MAX_PAR = 32\n",
    "\n",
    "if in_folder[-1] == \"/\":\n",
    "    in_folder = in_folder[:-1]\n",
    "print(\"Outputting to {}\".format(out_folder))\n",
    "\n",
    "if not os.path.exists(out_folder):\n",
    "    os.makedirs(out_folder)\n",
    "elif not overwrite:\n",
    "    raise AttributeError(\"Output path exists! Exiting\")\n",
    "\n",
    "import warnings\n",
    "warnings.filterwarnings('ignore')\n",
    "\n",
    "from cal_tools.agipdlib import SnowResolution\n",
    "melt_snow = False if melt_snow == \"\" else SnowResolution(melt_snow)\n",
    "\n",
    "special_opts = blc_noise_threshold, blc_hmatch, melt_snow\n",
    "\n",
    "instrument = karabo_id.split(\"_\")[0]\n",
    "if instrument == \"SPB\":\n",
    "    dinstance = \"AGIPD1M1\"\n",
    "else:\n",
    "    dinstance = \"AGIPD1M2\"\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "control_fname = '{}/RAW-R{:04d}-{}-S00000.h5'.format(in_folder, run, karabo_da_control)\n",
    "\n",
    "if gain_setting == 0.1:\n",
    "    if creation_time.replace(tzinfo=None) < parser.parse('2020-01-31'):\n",
    "        print(\"Set gain-setting to None for runs taken before 2020-01-31\")\n",
    "        gain_setting = None\n",
    "    else:\n",
    "        try:\n",
    "            gain_setting = get_gain_setting(control_fname, h5path_ctrl)\n",
    "        except Exception as e:\n",
    "            print(f'Error while reading gain setting from: \\n{control_fname}')\n",
    "            print(e)\n",
    "            print(\"Set gain settion to 0\")\n",
    "            gain_setting = 0\n",
    "            \n",
    "print(f\"Gain setting: {gain_setting}\")\n",
    "print(f\"Detector in use is {karabo_id}\")\n",
    "print(f\"Instrument {instrument}\")\n",
    "print(f\"Detector instance {dinstance}\")\n",
    "\n",
    "if karabo_da[0] == -1:\n",
    "    if modules[0] == -1:\n",
    "        modules = list(range(16))\n",
    "    karabo_da = [\"AGIPD{:02d}\".format(i) for i in modules]\n",
    "else:\n",
    "    modules = [int(x[-2:]) for x in karabo_da]\n",
    "print(\"Process modules: \",\n",
    "      ', '.join([f\"Q{x // 4 + 1}M{x % 4 + 1}\" for x in modules]))\n",
    "\n",
    "h5path = h5path.format(karabo_id, receiver_id)\n",
    "h5path_idx = h5path_idx.format(karabo_id, receiver_id)\n",
    "h5path_ctrl = h5path_ctrl.format(karabo_id_control)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-21T11:30:07.263445Z",
     "start_time": "2019-02-21T11:30:07.217070Z"
    }
   },
   "outputs": [],
   "source": [
    "def combine_stack(d, sdim):\n",
    "    combined = np.zeros((sdim, 2048, 2048))\n",
    "    combined[...] = np.nan\n",
    "    \n",
    "    dy = 0\n",
    "    for i in range(16):\n",
    "        \n",
    "        try:\n",
    "            if i < 8:\n",
    "                dx = -512\n",
    "                if i > 3:\n",
    "                    dx -= 25\n",
    "                mx = 1\n",
    "                my = i % 8\n",
    "                combined[:, my*128+dy:(my+1)*128+dy,\n",
    "                         mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,:,::-1]\n",
    "                dy += 30\n",
    "                if i == 3:\n",
    "                    dy += 30\n",
    "\n",
    "            elif i < 12:\n",
    "                dx = 80 - 50\n",
    "                if i == 8:\n",
    "                    dy = 4*30 + 30 +50 -30\n",
    "\n",
    "                mx = 1\n",
    "                my = i % 8 +4\n",
    "                combined[:, my*128+dy:(my+1)*128+dy,\n",
    "                         mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]\n",
    "                dy += 30\n",
    "            else:\n",
    "                dx = 100 - 50\n",
    "                if i == 11:\n",
    "                    dy = 20\n",
    "\n",
    "                mx = 1\n",
    "                my = i - 14\n",
    "\n",
    "                combined[:, my*128+dy:(my+1)*128+dy,\n",
    "                         mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]\n",
    "                dy += 30\n",
    "        except:\n",
    "            continue\n",
    "        \n",
    "    return combined"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-21T11:30:07.974174Z",
     "start_time": "2019-02-21T11:30:07.914832Z"
    }
   },
   "outputs": [],
   "source": [
    "# set everything up filewise\n",
    "mmf = map_modules_from_folder(in_folder, run, path_template, karabo_da, sequences)\n",
    "mapped_files, mod_ids, total_sequences, sequences_qm, _ = mmf\n",
    "MAX_PAR = min(MAX_PAR, total_sequences)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Processed Files ##"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-21T11:30:08.870802Z",
     "start_time": "2019-02-21T11:30:08.826285Z"
    }
   },
   "outputs": [],
   "source": [
    "import copy\n",
    "from IPython.display import HTML, display, Markdown, Latex\n",
    "import tabulate\n",
    "print(f\"Processing a total of {total_sequences} sequence files in chunks of {MAX_PAR}\")\n",
    "table = []\n",
    "mfc = copy.copy(mapped_files)\n",
    "ti = 0\n",
    "for k, files in mfc.items():\n",
    "    i = 0\n",
    "    while not files.empty():\n",
    "        f = files.get()\n",
    "        if i == 0:\n",
    "            table.append((ti, k, i, f))\n",
    "        else:\n",
    "            table.append((ti, \"\", i,  f))\n",
    "        i += 1\n",
    "        ti += 1\n",
    "md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"#\", \"module\", \"# module\", \"file\"])))      \n",
    "# restore the queue\n",
    "mmf = map_modules_from_folder(in_folder, run, path_template, karabo_da, sequences)\n",
    "mapped_files, mod_ids, total_sequences, sequences_qm, _ = mmf"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-21T11:30:16.057429Z",
     "start_time": "2019-02-21T11:30:10.082114Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import copy\n",
    "from functools import partial\n",
    "def correct_module(max_cells, index_v, CHUNK_SIZE, total_sequences, sequences_qm, \n",
    "                   bins_gain_vs_signal, bins_signal_low_range, bins_signal_high_range,\n",
    "                   bins_dig_gain_vs_signal, max_pulses, dbparms, fileparms, nodb, chunk_size_idim,\n",
    "                   special_opts, il_mode, loc, dinstance, force_hg_if_below, force_mg_if_below,\n",
    "                   mask_noisy_adc, acq_rate, gain_setting, corr_bools, h5path, h5path_idx, inp):\n",
    "    print(\"foo\")\n",
    "    import numpy as np\n",
    "    import copy\n",
    "    import h5py\n",
    "    import socket\n",
    "    from datetime import datetime\n",
    "    import re\n",
    "    import os\n",
    "    from influxdb import InfluxDBClient\n",
    "    import subprocess\n",
    "    from cal_tools.enums import BadPixels\n",
    "    from cal_tools.agipdlib import AgipdCorrections, SnowResolution\n",
    "    from cal_tools.agipdlib import get_num_cells, get_acq_rate\n",
    "    \n",
    "  \n",
    "    #client = InfluxDBClient('exflqr18318', 8086, 'root', 'root', 'calstats')\n",
    "\n",
    "    def create_influx_entry(run, proposal, qm, sequence, filesize, chunksize,\n",
    "                            total_sequences, success, runtime, reason=\"\"):\n",
    "        return {\n",
    "            \"measurement\": \"run_correction\",\n",
    "            \"tags\": {\n",
    "                \"host\": socket.gethostname(),\n",
    "                \"run\": run,\n",
    "                \"proposal\": proposal,\n",
    "                \"mem_cells\": max_cells,\n",
    "                \"sequence\": sequence,\n",
    "                \"module\": qm,\n",
    "                \"filesize\": filesize,\n",
    "                \"chunksize\": chunksize,\n",
    "                \"total_sequences\": total_sequences,\n",
    "                \"sequences_module\": sequences_qm[qm],\n",
    "                \"detector\": \"AGIPD\",\n",
    "                \"instrument\": \"SPB\",\n",
    "                \n",
    "            },\n",
    "            \"time\": datetime.utcnow().isoformat(),\n",
    "            \"fields\": {\n",
    "                \"success\": success,\n",
    "                \"reason\": reason,\n",
    "                \"runtime\": runtime,                \n",
    "            }\n",
    "        }\n",
    "    \n",
    "    hists_signal_low = None\n",
    "    hists_signal_high = None\n",
    "    hists_gain_vs_signal = None\n",
    "    hists_dig_gain_vs_signal = None\n",
    "    hist_pulses = None\n",
    "    low_edges = None\n",
    "    high_edges = None\n",
    "    signal_edges = None\n",
    "    dig_signal_edges = None\n",
    "    gain_stats = 0\n",
    "    when = None\n",
    "    err = None\n",
    "    \n",
    "   \n",
    "    try:\n",
    "        start = datetime.now()\n",
    "        success = True\n",
    "        reason = \"\"\n",
    "        filename, filename_out, channel, qm = inp\n",
    "        print(\"Have input\")\n",
    "        \n",
    "        if max_cells == 0:\n",
    "            max_cells = get_num_cells(filename, loc, channel)\n",
    "            if max_cells is None:\n",
    "                raise ValueError(\"No raw images found for {}\".format(qm))\n",
    "            else:\n",
    "                cells = np.arange(max_cells)\n",
    "            \n",
    "        if acq_rate == 0.:\n",
    "            acq_rate = get_acq_rate(filename, loc, channel)\n",
    "        else:\n",
    "            acq_rate = None\n",
    "\n",
    "        if dbparms[2] == 0:\n",
    "            dbparms[2] = max_cells\n",
    "        if dbparms[5] == 0:\n",
    "            dbparms[5] = dbparms[2]\n",
    "\n",
    "        print(\"Set memory cells to {}\".format(max_cells))\n",
    "        print(\"Set acquistion rate cells to {} MHz\".format(acq_rate))\n",
    "\n",
    "        # AGIPD correction requires path without the leading \"/\"\n",
    "        if h5path[0] == '/':\n",
    "            h5path = h5path[1:]\n",
    "        if h5path_idx[0] == '/':\n",
    "            h5path_idx = h5path_idx[1:]\n",
    "            \n",
    "\n",
    "        infile = h5py.File(filename, \"r\", driver=\"core\")\n",
    "        outfile = h5py.File(filename_out, \"w\")\n",
    "        try:\n",
    "            agipd_corr = AgipdCorrections(infile, outfile, max_cells, channel, max_pulses,\n",
    "                                          bins_gain_vs_signal, bins_signal_low_range,\n",
    "                                          bins_signal_high_range, bins_dig_gain_vs_signal,\n",
    "                                          chunk_size_idim=chunk_size_idim,\n",
    "                                          il_mode=il_mode, raw_fmt_version=index_v, \n",
    "                                          h5_data_path=h5path,\n",
    "                                          h5_index_path=h5path_idx,\n",
    "                                          cal_det_instance=dinstance, force_hg_if_below=force_hg_if_below,\n",
    "                                          force_mg_if_below=force_mg_if_below, mask_noisy_adc=mask_noisy_adc,\n",
    "                                          acquisition_rate=acq_rate, gain_setting=gain_setting,\n",
    "                                          corr_bools=corr_bools)\n",
    "\n",
    "            blc_noise_threshold, blc_hmatch, melt_snow = special_opts\n",
    "            if not corr_bools[\"only_offset\"]:\n",
    "                blc_hmatch = False\n",
    "                melt_snow = False\n",
    "            agipd_corr.baseline_corr_noise_threshold = blc_noise_threshold\n",
    "            agipd_corr.melt_snow = melt_snow\n",
    "            try:\n",
    "                agipd_corr.get_valid_image_idx()\n",
    "            except IOError:\n",
    "                return\n",
    "            if not nodb:\n",
    "                when = agipd_corr.initialize_from_db(dbparms, qm, only_dark=(fileparms != \"\"))\n",
    "            if fileparms != \"\" and not corr_bools[\"only_offset\"]:\n",
    "                agipd_corr.initialize_from_file(fileparms, qm, with_dark=nodb)\n",
    "            print(\"Initialized constants\")\n",
    "\n",
    "            for irange in agipd_corr.get_iteration_range():\n",
    "                agipd_corr.correct_agipd(irange)\n",
    "                print(\"Iterated\")\n",
    "\n",
    "            print(\"All iterations are finished\")\n",
    "            hists, edges = agipd_corr.get_histograms()\n",
    "            hists_signal_low, hists_signal_high, hists_gain_vs_signal, hists_dig_gain_vs_signal, hist_pulses = hists\n",
    "            low_edges, high_edges, signal_edges, dig_signal_edges = edges\n",
    "            gain_stats = np.array(agipd_corr.gain_stats)\n",
    "            \n",
    "        finally:\n",
    "            outfile.close()\n",
    "            infile.close()\n",
    "            print(\"Closed files\")\n",
    "        \n",
    "    except Exception as e:\n",
    "        print(e)\n",
    "        err = e\n",
    "        success = False\n",
    "        reason = \"Error\"\n",
    "        \n",
    "    finally:\n",
    "        run = re.findall(r'.*r([0-9]{4}).*', filename)[0]\n",
    "        proposal = re.findall(r'.*p([0-9]{6}).*', filename)[0]\n",
    "        sequence = re.findall(r'.*S([0-9]{5}).*', filename)[0]\n",
    "        filesize = os.path.getsize(filename)\n",
    "        duration = (datetime.now()-start).total_seconds()\n",
    "        #influx = create_influx_entry(run, proposal, qm, sequence, filesize, CHUNK_SIZE, total_sequences, success, duration, reason)\n",
    "        #client.write_points([influx])\n",
    "    return (hists_signal_low, hists_signal_high, hists_gain_vs_signal, hists_dig_gain_vs_signal, hist_pulses,\n",
    "            low_edges, high_edges, signal_edges, dig_signal_edges, gain_stats, max_cells, when, qm, err)\n",
    "    \n",
    "done = False\n",
    "first_files = []\n",
    "inp = []\n",
    "left = total_sequences\n",
    "\n",
    "# Display Information about the selected pulses indices for correction.\n",
    "pulses_lst = list(range(*max_pulses)) if not (len(max_pulses)==1 and max_pulses[0]==0) else max_pulses  \n",
    "\n",
    "try:\n",
    "    if len(pulses_lst) > 1:        \n",
    "        print(\"A range of {} pulse indices is selected: from {} to {} with a step of {}\"\n",
    "               .format(len(pulses_lst), pulses_lst[0] , pulses_lst[-1] + (pulses_lst[1] - pulses_lst[0]),\n",
    "                       pulses_lst[1] - pulses_lst[0]))\n",
    "    else:\n",
    "        print(\"one pulse is selected: a pulse of idx {}\".format(pulses_lst[0]))\n",
    "except Exception as e:\n",
    "    raise ValueError('max_pulses input Error: {}'.format(e))\n",
    "    \n",
    "bins_gain_vs_signal = (100, 100)\n",
    "bins_signal_low_range = 100\n",
    "bins_signal_high_range = 100\n",
    "bins_dig_gain_vs_signal = (100, 4)\n",
    "\n",
    "hists_gain_vs_signal =  np.zeros((bins_gain_vs_signal), np.float64)\n",
    "hists_dig_gain_vs_signal =  np.zeros((bins_dig_gain_vs_signal), np.float64)\n",
    "gain_stats = 0\n",
    "\n",
    "low_edges, high_edges, signal_edges, dig_signal_edges = None, None, None, None\n",
    "dbparms = [cal_db_interface, creation_time, max_cells_db, bias_voltage,\n",
    "           photon_energy, max_cells_db_dark, cal_db_timeout]\n",
    "\n",
    "fileparms = calfile\n",
    "\n",
    "all_cells = []\n",
    "whens = []\n",
    "errors = []\n",
    "while not done:\n",
    "    \n",
    "    dones = []\n",
    "    first = True\n",
    "    for i in range(16):\n",
    "        qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1)\n",
    "        if qm in mapped_files and not mapped_files[qm].empty():\n",
    "            fname_in = str(mapped_files[qm].get())\n",
    "            dones.append(mapped_files[qm].empty())\n",
    "        else:\n",
    "            print(\"Skipping {}\".format(qm))\n",
    "            first_files.append((None, None))\n",
    "            continue\n",
    "        fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
    "        if first:\n",
    "            first_files.append((fname_in, fout))\n",
    "        inp.append((fname_in, fout, i,  qm))\n",
    "    first = False\n",
    "    if len(inp) >= min(MAX_PAR, left):\n",
    "        print(\"Running {} tasks parallel\".format(len(inp)))\n",
    "        p = partial(correct_module, max_cells, index_v, CHUNK_SIZE, total_sequences,\n",
    "                    sequences_qm, bins_gain_vs_signal, bins_signal_low_range, bins_signal_high_range,\n",
    "                    bins_dig_gain_vs_signal, max_pulses, dbparms, fileparms, nodb, chunk_size_idim,\n",
    "                    special_opts, il_mode, karabo_id, dinstance, force_hg_if_below, force_mg_if_below,\n",
    "                    mask_noisy_adc, acq_rate, gain_setting, corr_bools, h5path, h5path_idx)\n",
    "\n",
    "        r = view.map_sync(p, inp)\n",
    "\n",
    "        #r = list(map(p, inp))\n",
    "\n",
    "        inp = []\n",
    "        left -= MAX_PAR\n",
    "\n",
    "        init_hist = False\n",
    "        for rr in r:\n",
    "            if rr is not None:\n",
    "                hl, hh, hg, hdg, hp, low_edges, high_edges, signal_edges, dig_signal_edges, gs, cells, when, qm, err = rr\n",
    "                all_cells.append(cells)\n",
    "                whens.append((qm, when))\n",
    "                errors.append(err)\n",
    "                # Validate hp to be int not None.\n",
    "                if not init_hist and hp is not None:\n",
    "                    hists_signal_low =  np.zeros((bins_signal_low_range, hp), np.float64)\n",
    "                    hists_signal_high =  np.zeros((bins_signal_low_range, hp), np.float64)\n",
    "                    init_hist = True\n",
    "                if hl is not None:  # any one being None will also make the others None\n",
    "                    hists_signal_low += hl.astype(np.float64)\n",
    "                    hists_signal_high += hh.astype(np.float64)\n",
    "                    hists_gain_vs_signal += hg.astype(np.float64)\n",
    "                    hists_dig_gain_vs_signal += hdg.astype(np.float64)\n",
    "                    gain_stats += gs\n",
    "    \n",
    "    done = all(dones)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "print(\"Constants were injected on: \")\n",
    "to_store = []\n",
    "for i, (qm, when) in enumerate(whens):\n",
    "    print(qm)\n",
    "    line = [qm]\n",
    "    # If correction is crashed\n",
    "    if errors[i] is not None:\n",
    "        print(\"Error: {}\".format(errors[i]) )\n",
    "    else:\n",
    "        for key, item in when.items():\n",
    "            if hasattr(item, 'strftime'):\n",
    "                item = item.strftime('%y-%m-%d %H:%M')\n",
    "            when[key] = item\n",
    "            print('{:.<12s}'.format(key), item)\n",
    "            \n",
    "    # Store few time stamps if exists\n",
    "    # Add NA to keep array structure\n",
    "    for key in ['offset', 'slopesPC', 'slopesFF']:\n",
    "        if when and key in when:\n",
    "            line.append(when[key])\n",
    "        else:\n",
    "            if errors[i] is not None:\n",
    "                line.append('Err')\n",
    "            else:\n",
    "                line.append('NA')\n",
    "    to_store.append(line)\n",
    "\n",
    "np.savetxt(\"{}/tmp_const_beginat_S{:05d}.csv\".format(out_folder, sequences[0]), \n",
    "           np.array(to_store).astype(str), fmt='%s', delimiter = ',')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:28:51.765030Z",
     "start_time": "2019-02-18T17:28:51.714783Z"
    }
   },
   "outputs": [],
   "source": [
    "from mpl_toolkits.mplot3d import Axes3D\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib import cm\n",
    "from matplotlib.ticker import LinearLocator, FormatStrFormatter\n",
    "import numpy as np\n",
    "%matplotlib inline\n",
    "def do_3d_plot(data, edges, x_axis, y_axis):\n",
    "    fig = plt.figure(figsize=(10,10))\n",
    "    ax = fig.gca(projection='3d')\n",
    "\n",
    "    # Make data.\n",
    "    X = edges[0][:-1]\n",
    "    Y = edges[1][:-1]\n",
    "    X, Y = np.meshgrid(X, Y)\n",
    "    \n",
    "    Z = data.T\n",
    "\n",
    "    # Plot the surface.\n",
    "    surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,\n",
    "                           linewidth=0, antialiased=False)\n",
    "    ax.set_xlabel(x_axis)\n",
    "    ax.set_ylabel(y_axis)\n",
    "    ax.set_zlabel(\"Counts\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Signal vs. Analogue Gain ##\n",
    "\n",
    "The following plot shows plots signal vs. gain for the first 128 images."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:28:52.857960Z",
     "start_time": "2019-02-18T17:28:51.767217Z"
    }
   },
   "outputs": [],
   "source": [
    "if signal_edges is not None:\n",
    "    do_3d_plot(hists_gain_vs_signal, signal_edges, \"Signal (ADU)\", \"Analogue gain (ADU)\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:28:53.690522Z",
     "start_time": "2019-02-18T17:28:52.860143Z"
    }
   },
   "outputs": [],
   "source": [
    "def do_2d_plot(data, edges, y_axis, x_axis):\n",
    "    from matplotlib.colors import LogNorm\n",
    "    fig = plt.figure(figsize=(10,10))\n",
    "    ax = fig.add_subplot(111)\n",
    "    extent = [np.min(edges[1]), np.max(edges[1]),np.min(edges[0]), np.max(edges[0])]\n",
    "    im = ax.imshow(data[::-1,:], extent=extent, aspect=\"auto\", norm=LogNorm(vmin=1, vmax=np.max(data)))\n",
    "    ax.set_xlabel(x_axis)\n",
    "    ax.set_ylabel(y_axis)\n",
    "    cb = fig.colorbar(im)\n",
    "    cb.set_label(\"Counts\")\n",
    "    \n",
    "if signal_edges is not None:\n",
    "    do_2d_plot(hists_gain_vs_signal, signal_edges, \"Signal (ADU)\", \"Gain Value (ADU)\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Signal vs. Digitized Gain ##\n",
    "\n",
    "The following plot shows plots signal vs. digitized gain for the first 128 images."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:28:54.370559Z",
     "start_time": "2019-02-18T17:28:53.691959Z"
    }
   },
   "outputs": [],
   "source": [
    "if dig_signal_edges is not None:\n",
    "    do_2d_plot(hists_dig_gain_vs_signal, dig_signal_edges, \"Signal (ADU)\", \"Gain Bit Value\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:31:51.668096Z",
     "start_time": "2019-02-18T17:31:51.529158Z"
    }
   },
   "outputs": [],
   "source": [
    "fig, ax = plt.subplots()\n",
    "if isinstance(gain_stats, np.ndarray):\n",
    "    ax.pie(gain_stats, labels=[\"high\", \"medium\", \"low\"], autopct='%1.2f%%',\n",
    "            shadow=True, startangle=27)\n",
    "    a = ax.axis('equal')  # Equal aspect ratio ensures that pie is drawn as a circle."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Mean Intensity per Pulse ##\n",
    "\n",
    "The following plots show the mean signal for each pulse in a detailed and expanded intensity region."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:28:57.327702Z",
     "start_time": "2019-02-18T17:28:54.377061Z"
    },
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "if low_edges is not None:\n",
    "    do_3d_plot(hists_signal_low, low_edges, \"Signal (ADU)\", \"Pulse id\")\n",
    "    do_2d_plot(hists_signal_low, low_edges, \"Signal (ADU)\", \"Pulse id\")\n",
    "if high_edges is not None:\n",
    "    do_3d_plot(hists_signal_high, high_edges, \"Signal (ADU)\", \"Pulse id\")\n",
    "    do_2d_plot(hists_signal_high, high_edges, \"Signal (ADU)\", \"Pulse id\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:20.634480Z",
     "start_time": "2019-02-18T17:28:57.329231Z"
    }
   },
   "outputs": [],
   "source": [
    "\n",
    "corrected = []\n",
    "raw = []\n",
    "gains = []\n",
    "mask = []\n",
    "cell_fac = 1\n",
    "first_idx = 0\n",
    "last_idx = cell_fac*176+first_idx\n",
    "pulse_ids = []\n",
    "train_ids = []\n",
    "for i, ff in enumerate(first_files[:16]):\n",
    "    try:\n",
    "\n",
    "        rf, cf = ff\n",
    "        if rf is None:\n",
    "            \n",
    "            raise Exception(\"File not present\")\n",
    "        infile = h5py.File(rf, \"r\")\n",
    "        datapath = h5path.format(i)\n",
    "        raw.append(np.array(infile[f\"{datapath}/image/data\"][first_idx:last_idx,0,...]))\n",
    "        infile.close()\n",
    "        \n",
    "        infile = h5py.File(cf, \"r\")\n",
    "        corrected.append(np.array(infile[f\"{datapath}/image/data\"][first_idx:last_idx,...]))\n",
    "        gains.append(np.array(infile[f\"{datapath}/image/gain\"][first_idx:last_idx,...]))\n",
    "        mask.append(np.array(infile[f\"{datapath}/image/mask\"][first_idx:last_idx,...]))\n",
    "        pulse_ids.append(np.squeeze(infile[f\"{datapath}/image/pulseId\"][first_idx:last_idx,...]))\n",
    "        train_ids.append(np.squeeze(infile[f\"{datapath}/image/trainId\"][first_idx:last_idx,...]))\n",
    "        infile.close()\n",
    "        \n",
    "    except Exception as e:\n",
    "        print(e)\n",
    "        corrected.append(np.zeros((last_idx-first_idx, 512, 128)))\n",
    "        gains.append(np.zeros((last_idx-first_idx, 512, 128)))\n",
    "        mask.append(np.zeros((last_idx-first_idx, 512, 128)))\n",
    "        raw.append(np.zeros((last_idx-first_idx, 512, 128)))\n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:27.025667Z",
     "start_time": "2019-02-18T17:29:20.642029Z"
    }
   },
   "outputs": [],
   "source": [
    "domask = False\n",
    "if domask:\n",
    "    for i, c in enumerate(corrected):\n",
    "        c[mask[i] != 0] = 0\n",
    "        #c[pats[i] < 200]  = 0\n",
    "        #c[np.abs(pats[i]) == 1000] = np.abs(c[np.abs(pats[i]) == 1000])\n",
    "combined = combine_stack(corrected, last_idx-first_idx)\n",
    "combined_raw = combine_stack(raw, last_idx-first_idx)\n",
    "combined_g = combine_stack(gains, last_idx-first_idx)\n",
    "combined_mask = combine_stack(mask, last_idx-first_idx)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Mean RAW Preview ###\n",
    "\n",
    "The per pixel mean of the first 128 images of the RAW data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:33.226396Z",
     "start_time": "2019-02-18T17:29:27.027758Z"
    }
   },
   "outputs": [],
   "source": [
    "%matplotlib inline\n",
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "im = ax.imshow(np.mean(combined_raw[:,:1300,400:1600],axis=0),\n",
    "               vmin=min(0.75*np.median(combined_raw[combined_raw > 0]), 4000),\n",
    "               vmax=max(1.5*np.median(combined_raw[combined_raw > 0]), 7000), cmap=\"jet\")\n",
    "cb = fig.colorbar(im, ax=ax)\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Single Shot Preview ###\n",
    "\n",
    "A single shot image from cell 12 of the first train"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:33.761015Z",
     "start_time": "2019-02-18T17:29:33.227922Z"
    }
   },
   "outputs": [],
   "source": [
    "\n",
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "dim = combined[1,:1300,400:1600]\n",
    "\n",
    "im = ax.imshow(dim, vmin=-0, vmax=max(20*np.median(dim[dim > 0]), 100), cmap=\"jet\", interpolation=\"nearest\")\n",
    "cb = fig.colorbar(im, ax=ax)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:35.903487Z",
     "start_time": "2019-02-18T17:29:33.762568Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "h = ax.hist(dim.flatten(), bins=1000, range=(0, 2000))\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Mean CORRECTED Preview ###\n",
    "\n",
    "The per pixel mean of the first 128 images of the CORRECTED data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:39.369686Z",
     "start_time": "2019-02-18T17:29:35.905152Z"
    }
   },
   "outputs": [],
   "source": [
    "\n",
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "im = ax.imshow(np.mean(combined[:,:1300,400:1600], axis=0), vmin=-50,\n",
    "               vmax=max(100*np.median(combined[combined > 0]), 100), cmap=\"jet\", interpolation=\"nearest\")\n",
    "cb = fig.colorbar(im, ax=ax)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:49.217848Z",
     "start_time": "2019-02-18T17:29:39.371232Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "combined[combined <= 0] = 0\n",
    "h = ax.hist(combined.flatten(), bins=1000, range=(-50, 1000), log=True)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Maximum GAIN Preview ###\n",
    "\n",
    "The per pixel maximum of the first 128 images of the digitized GAIN data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:49.641675Z",
     "start_time": "2019-02-18T17:29:49.224167Z"
    }
   },
   "outputs": [],
   "source": [
    "\n",
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "im = ax.imshow(np.max(combined_g[1,:1300,400:1600][None,...], axis=0), vmin=0,\n",
    "               vmax=3, cmap=\"jet\")\n",
    "cb = fig.colorbar(im, ax=ax)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "## Bad Pixels ##\n",
    "The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as:"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:49.651913Z",
     "start_time": "2019-02-18T17:29:49.643556Z"
    }
   },
   "outputs": [],
   "source": [
    "from cal_tools.enums import BadPixels\n",
    "from IPython.display import HTML, display, Markdown, Latex\n",
    "import tabulate\n",
    "table = []\n",
    "for item in BadPixels:\n",
    "    table.append((item.name, \"{:016b}\".format(item.value)))\n",
    "md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"Bad pixel type\", \"Bit mask\"])))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Single Shot Bad Pixels ###\n",
    "\n",
    "A single shot bad pixel map from cell 4 of the first train"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:50.086169Z",
     "start_time": "2019-02-18T17:29:49.653391Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "im = ax.imshow(np.log2(combined_mask[10,:1300,400:1600]), vmin=0,\n",
    "               vmax=32, cmap=\"jet\")\n",
    "cb = fig.colorbar(im, ax=ax)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "### Full Train Bad Pixels ###"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:51.686562Z",
     "start_time": "2019-02-18T17:29:50.088883Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "im = ax.imshow(np.log2(np.max(combined_mask[:,:1300,400:1600], axis=0)), vmin=0,\n",
    "               vmax=32, cmap=\"jet\")\n",
    "cb = fig.colorbar(im, ax=ax)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Full Train Bad Pixels - Only Dark Char. Related ###"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:53.662423Z",
     "start_time": "2019-02-18T17:29:51.688376Z"
    }
   },
   "outputs": [],
   "source": [
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "im = ax.imshow(np.max((combined_mask.astype(np.uint32)[:,:1300,400:1600] & BadPixels.NOISY_ADC.value) != 0, axis=0), vmin=0,\n",
    "               vmax=1, cmap=\"jet\")\n",
    "cb = fig.colorbar(im, ax=ax)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-02-18T17:29:55.483270Z",
     "start_time": "2019-02-18T17:29:53.664226Z"
    }
   },
   "outputs": [],
   "source": [
    "from cal_tools.enums import BadPixels\n",
    "fig = plt.figure(figsize=(20,10))\n",
    "ax = fig.add_subplot(111)\n",
    "cm = combined_mask[:,:1300,400:1600]\n",
    "cm[cm > BadPixels.NO_DARK_DATA.value] = 0\n",
    "im = ax.imshow(np.log2(np.max(cm, axis=0)), vmin=0,\n",
    "               vmax=32, cmap=\"jet\")\n",
    "cb = fig.colorbar(im, ax=ax)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}