diff --git a/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..948ec45f46349254e5cbe7084843dcd62dfc1935
--- /dev/null
+++ b/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb
@@ -0,0 +1,1411 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "826b869a",
+   "metadata": {},
+   "source": [
+    "#  ePix100 Flat Field Characterization\n",
+    "\n",
+    "Author: European XFEL Detector Group, Version 1.0\n",
+    "\n",
+    "Generate gain maps from flat-field runs."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7439b810",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "in_folder = '/gpfs/exfel/exp/MID/202231/p900310/raw' # input folder, required\n",
+    "out_folder = '' # output folder, required\n",
+    "metadata_folder = ''  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
+    "run = 29 # which run to read data from, required\n",
+    "\n",
+    "# Parameters for accessing the raw data.\n",
+    "karabo_id = \"MID_EXP_EPIX-2\"  # karabo ID\n",
+    "karabo_da = \"EPIX02\"  # data aggregator\n",
+    "receiver_template = \"RECEIVER\" # detector receiver template for accessing raw data files\n",
+    "instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files\n",
+    "\n",
+    "# Fit parameters\n",
+    "peak_fitting = 'gauss' # method to find the peak position per pixel: 'median' or 'gauss'\n",
+    "N_sigma_interval = 5   # sigma interval to find singles peak in each per pixel \n",
+    "peak_energy = 8.048    # [keV] Cu K$\\alpha$1\n",
+    "\n",
+    "# ADU range\n",
+    "ADU_range = [-50,500] # expected range that encloses the raw signal from the FF run \n",
+    "\n",
+    "# Cluster calculators (given in N times sigma noise)\n",
+    "split_evt_primary_threshold = 7   # Split event primary threshold \n",
+    "split_evt_secondary_threshold = 3  # Split event secondary threshold\n",
+    "split_evt_mip_threshold = 1000     # Threshold for rejection of MIP events (e.g, cosmic-rays)\n",
+    "\n",
+    "# Parameters for the calibration database.\n",
+    "cal_db_interface = \"tcp://max-exfl-cal001:8020\" # calibration DB interface to use\n",
+    "cal_db_timeout = 300000 # timeout on caldb requests\n",
+    "creation_time = \"\"  # The timestamp to use with Calibration DB. Required Format: \"YYYY-MM-DD hh:mm:ss\" e.g. 2019-07-04 11:02:41\n",
+    "db_output = False # Output constants to the calibration database\n",
+    "local_output = True # Output constants locally\n",
+    "\n",
+    "# Conditions used for injected calibration constants.\n",
+    "bias_voltage = 200 # Bias voltage\n",
+    "in_vacuum = False # Detector operated in vacuum\n",
+    "fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n",
+    "fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n",
+    "temp_limits = 5 # Limit for parameter Operational temperature\n",
+    "\n",
+    "# Parameters used during selecting raw data trains.\n",
+    "min_trains = 1 # Minimum number of trains that should be available. Default 1.\n",
+    "max_trains = 0 # Maximum number of trains to use for processing. Set to 0 to use all available trains."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "43791d97",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import warnings\n",
+    "\n",
+    "import matplotlib.pyplot as plt\n",
+    "from matplotlib.colors import LogNorm\n",
+    "import numpy as np\n",
+    "import pasha as psh\n",
+    "from extra_data import RunDirectory\n",
+    "from pathlib import Path\n",
+    "from prettytable import PrettyTable\n",
+    "from scipy.optimize import curve_fit\n",
+    "\n",
+    "import XFELDetAna.xfelprofiler as xprof\n",
+    "from XFELDetAna import xfelpyanatools as xana\n",
+    "from XFELDetAna import xfelpycaltools as xcal\n",
+    "from XFELDetAna.plotting.util import prettyPlotting\n",
+    "\n",
+    "from cal_tools.enums import BadPixels\n",
+    "from cal_tools.step_timing import StepTimer\n",
+    "from cal_tools.epix100 import epix100lib\n",
+    "from cal_tools.tools import (\n",
+    "    calcat_creation_time,\n",
+    "    get_pdu_from_db,\n",
+    "    get_constant_from_db,\n",
+    "    get_report,\n",
+    "    save_const_to_h5,\n",
+    "    send_to_db,\n",
+    ")\n",
+    "from iCalibrationDB import Conditions, Constants"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4f4d9f62",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "%matplotlib inline\n",
+    "\n",
+    "warnings.filterwarnings('ignore')\n",
+    "\n",
+    "prettyPlotting = True\n",
+    "\n",
+    "profiler = xprof.Profiler()\n",
+    "profiler.disable()\n",
+    "\n",
+    "step_timer = StepTimer()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "6571ae1c",
+   "metadata": {},
+   "source": [
+    "## Load Data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "9c93190b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "instrument_src = instrument_source_template.format(karabo_id, receiver_template)\n",
+    "\n",
+    "# Run directory\n",
+    "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n",
+    "file_loc = f'proposal:{proposal} runs:{run}'\n",
+    "report = get_report(metadata_folder)\n",
+    "\n",
+    "ped_dir = Path(in_folder) / f'r{run:04d}'\n",
+    "run_dc = RunDirectory(ped_dir)\n",
+    "\n",
+    "print(f\"Run is: {run}\")\n",
+    "print(f\"Instrument H5File source: {instrument_src}\")\n",
+    "\n",
+    "creation_time = calcat_creation_time(in_folder, run, creation_time)\n",
+    "print(f\"Using {creation_time.isoformat()} as creation time\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "08d6fef2",
+   "metadata": {
+    "slideshow": {
+     "slide_type": "-"
+    }
+   },
+   "outputs": [],
+   "source": [
+    "# Path to pixels ADC values\n",
+    "pixels_src = (instrument_src, \"data.image.pixels\")\n",
+    "\n",
+    "# Specify the total number of images to process\n",
+    "n_trains = run_dc.get_data_counts(*pixels_src).shape[0]\n",
+    "\n",
+    "# Modify n_trains to process based on the given maximum and minimum number of trains.\n",
+    "if max_trains:\n",
+    "    n_trains = min(max_trains, n_trains)\n",
+    "    \n",
+    "if n_trains < min_trains:\n",
+    "    raise ValueError(\n",
+    "        f\"Less than {min_trains} trains are available in RAW data.\"\n",
+    "         \" Not enough data to process flat fields.\")\n",
+    "\n",
+    "all_trains = len(run_dc.select(instrument_src).train_ids)\n",
+    "if n_trains != all_trains:\n",
+    "    print(f\"Warning: {all_trains - n_trains} trains with empty data.\")\n",
+    "\n",
+    "print(f'Images to analyze: {n_trains}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "9fdf1715",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Read sensor size\n",
+    "sensor_size = run_dc[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected\n",
+    "sensor_size = sensor_size[sensor_size != 1].tolist()  # data.image.dims for old data is [768, 708, 1]\n",
+    "assert sensor_size == [768,708], 'Unexpected sensor dimensions.' \n",
+    "\n",
+    "ctrl_data = epix100lib.epix100Ctrl(\n",
+    "    run_dc=run_dc,\n",
+    "    instrument_src=instrument_src,\n",
+    "    ctrl_src=f\"{karabo_id}/DET/CONTROL\",\n",
+    "    )\n",
+    "# Read integration time\n",
+    "if fix_integration_time == -1:\n",
+    "    integration_time = ctrl_data.get_integration_time()\n",
+    "    integration_time_str_add = ''\n",
+    "else:\n",
+    "    integration_time = fix_integration_time\n",
+    "    integration_time_str_add = '(manual input)'\n",
+    "    \n",
+    "# Read temperature    \n",
+    "if fix_temperature == -1:\n",
+    "    temperature = ctrl_data.get_temprature()\n",
+    "    temperature_k = temperature + 273.15\n",
+    "    temp_str_add = ''\n",
+    "else:\n",
+    "    temperature_k = fix_temperature\n",
+    "    temperature = fix_temperature - 273.15\n",
+    "    temp_str_add = '(manual input)'\n",
+    "    \n",
+    "# Print operating conditions\n",
+    "print(f\"Bias voltage: {bias_voltage} V\")\n",
+    "print(f\"Detector integration time: {integration_time} \\u03BCs {integration_time_str_add}\")\n",
+    "print(f\"Mean temperature: {temperature:0.2f}\\u00B0C / {temperature_k:0.2f} K {temp_str_add}\")\n",
+    "print(f\"Operated in vacuum: {in_vacuum}\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a4dd3d8d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "step_timer = StepTimer()\n",
+    "step_timer.start()\n",
+    "\n",
+    "# Read data\n",
+    "data_dc = run_dc.select(*pixels_src, require_all=True).select_trains(np.s_[:n_trains])\n",
+    "dshape = data_dc[pixels_src].shape\n",
+    "\n",
+    "step_timer.done_step('Flat-fields loaded. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "7920cb0b",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "## Retrieve Necessary Calibration Constants"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "593964be",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "const_data = dict()\n",
+    "constants = ['Offset', 'Noise', 'BadPixelsDark']\n",
+    "\n",
+    "condition =  Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n",
+    "                                     integration_time=integration_time,\n",
+    "                                     temperature=temperature_k,\n",
+    "                                     in_vacuum=in_vacuum)\n",
+    "\n",
+    "for cname in constants:        \n",
+    "    const_data[cname] = get_constant_from_db(\n",
+    "        karabo_id=karabo_id,\n",
+    "        karabo_da=karabo_da,\n",
+    "        constant=getattr(Constants.ePix100, cname)(),\n",
+    "        condition=condition,\n",
+    "        empty_constant=None,\n",
+    "        cal_db_interface=cal_db_interface,\n",
+    "        creation_time=creation_time,\n",
+    "        timeout=cal_db_timeout\n",
+    "    )"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "ea05e961",
+   "metadata": {},
+   "source": [
+    "## Instantiate calculators"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f05e8297",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "block_size = [sensor_size[0]//2, sensor_size[1]//2]\n",
+    "noiseSigma = 5\n",
+    "\n",
+    "cmCorrection_block = xcal.CommonModeCorrection(\n",
+    "    sensor_size,\n",
+    "    block_size,\n",
+    "    'block',\n",
+    "    noiseMap=const_data['Noise'].swapaxes(0,1),\n",
+    "    noiseSigma=noiseSigma,\n",
+    "    parallel=False)\n",
+    "cmCorrection_col = xcal.CommonModeCorrection(\n",
+    "    sensor_size,\n",
+    "    block_size,\n",
+    "    'col',\n",
+    "    noiseMap=const_data['Noise'].swapaxes(0,1),\n",
+    "    noiseSigma=noiseSigma,\n",
+    "    parallel=False)\n",
+    "cmCorrection_row = xcal.CommonModeCorrection(\n",
+    "    sensor_size,\n",
+    "    block_size,\n",
+    "    'row',\n",
+    "    noiseMap=const_data['Noise'].swapaxes(0,1),\n",
+    "    noiseSigma=noiseSigma,\n",
+    "    parallel=False)\n",
+    "  \n",
+    "patternClassifier = xcal.PatternClassifier(\n",
+    "    shape=sensor_size,\n",
+    "    noisemap=const_data['Noise'].swapaxes(0,1),\n",
+    "    primaryThreshold=split_evt_primary_threshold,\n",
+    "    secondaryThreshold=split_evt_secondary_threshold,\n",
+    "    upperThreshold=split_evt_mip_threshold,\n",
+    "    blockSize=block_size,\n",
+    "    setPixelMask = const_data['BadPixelsDark'].flatten(),\n",
+    "    parallel=False\n",
+    ")\n",
+    "\n",
+    "patternSelector = xcal.PatternSelector(\n",
+    "    sensor_size, \n",
+    "    selectionList = [100, 101], # singles patterns\n",
+    "    blockSize=block_size, \n",
+    "    parallel=False)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "8977c9ff",
+   "metadata": {},
+   "source": [
+    "## Correct data"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "de145b05",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "bin_min = ADU_range[0]\n",
+    "bin_max = ADU_range[1]\n",
+    "bin_width = 1\n",
+    "\n",
+    "bins = np.arange(bin_min,bin_max,bin_width)\n",
+    "hist = {'O': 0,'CM': 0,'CS': 0, 'S': 0}"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1765d3f8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def correct_train(worker_id, index, train_id, dc):\n",
+    "\n",
+    "    d = dc[pixels_src[0]][pixels_src[1]].astype(np.float32)\n",
+    "\n",
+    "    # Offset correction\n",
+    "    d -= const_data['Offset'].squeeze()\n",
+    "    hist['O'] += np.histogram(d.flatten(),bins=bins)[0]\n",
+    "     \n",
+    "    # Common Mode correction\n",
+    "    d = d.swapaxes(0,-1)\n",
+    "    d = cmCorrection_block.correct(d)\n",
+    "    d = cmCorrection_col.correct(d)\n",
+    "    d = cmCorrection_row.correct(d)\n",
+    "    d = d.swapaxes(0,-1)\n",
+    "    hist['CM'] += np.histogram(d.flatten(),bins=bins)[0]\n",
+    "    \n",
+    "    # Charge Sharing correction\n",
+    "    d = d.swapaxes(0,-1)\n",
+    "    d, patterns = patternClassifier.classify(d)\n",
+    "    sing,fs = patternSelector.select(d,patterns)\n",
+    "    d = d.swapaxes(0,-1)\n",
+    "    hist['CS'] += np.histogram(d[d>0].flatten(),bins=bins)[0]\n",
+    "    hist['S'] += np.histogram(sing[sing>0].flatten(),bins=bins)[0]\n",
+    "    \n",
+    "    data_corr[index+prev_chunk] = d\n",
+    "    data_singles[index+prev_chunk] = sing.swapaxes(0,-1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7c4dcd5b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "step_timer.start()\n",
+    "\n",
+    "chunk_size = 1000\n",
+    "\n",
+    "psh.set_default_context('threads', num_workers=35) # num_workers=35 was found to be optimal\n",
+    "data_corr = psh.alloc(shape=dshape, dtype=np.float32)\n",
+    "data_singles = psh.alloc(shape=dshape, dtype=int)\n",
+    "\n",
+    "chunk = 0\n",
+    "while chunk < dshape[0]-1:\n",
+    "    \n",
+    "    prev_chunk = chunk\n",
+    "    chunk+=chunk_size\n",
+    "    if chunk > dshape[0]: # last chunk may have different size\n",
+    "        chunk = dshape[0]-1\n",
+    "        \n",
+    "    psh.map(correct_train, data_dc.select_trains(np.arange(prev_chunk,chunk)))\n",
+    "        \n",
+    "    print(f'Corrected trains: {chunk} ({round(chunk/dshape[0]*100)}%)',end='\\r')\n",
+    "\n",
+    "step_timer.done_step('Corrected data. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "923a5ac4",
+   "metadata": {},
+   "source": [
+    "## Plot histograms"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c43ae1dd",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "bins_c = bins[:-1]+np.diff(bins)[0]/2 # center of bins\n",
+    "\n",
+    "plt.figure(figsize=(12,8))\n",
+    "plt.plot(bins_c,hist['O'], label='Offset corrected')\n",
+    "plt.plot(bins_c,hist['CM'], label='Common Mode corrected')\n",
+    "plt.plot(bins_c,hist['CS'], label='Charge Sharing corrected')\n",
+    "plt.plot(bins_c,hist['S'], label='Singles')\n",
+    "plt.xlim(ADU_range)\n",
+    "plt.yscale('log')\n",
+    "plt.xlabel('ADU',fontsize=12)\n",
+    "plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=14)\n",
+    "plt.legend(fontsize=12);\n",
+    "plt.grid(ls=':')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "46d9fe80",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(f'Primary threshold: {split_evt_primary_threshold}')\n",
+    "print(f'Secondary threshold: {split_evt_secondary_threshold}')\n",
+    "\n",
+    "patternStats = patternClassifier.getPatternStats()\n",
+    "\n",
+    "n_singles = np.sum(patternStats['singles'])\n",
+    "n_doubles = np.sum(patternStats['doubles'])\n",
+    "n_triples = np.sum(patternStats['triples'])\n",
+    "n_quads = np.sum(patternStats['quads'])\n",
+    "n_clusters = np.sum(patternStats['clusters'])\n",
+    "known_patterns = np.sum((n_singles, n_doubles, n_triples, n_quads))\n",
+    "\n",
+    "t1,t2 = PrettyTable(),PrettyTable()\n",
+    "t1.field_names = ['Photon Hits', 'Frequency']\n",
+    "t1.add_row(['Big Clusters', f'{n_clusters/(known_patterns+n_clusters)*100: .2f} %'])\n",
+    "t1.add_row(['Listed Patterns', f'{known_patterns/(known_patterns+n_clusters)*100: .2f} %'])\n",
+    "\n",
+    "print(t1)\n",
+    "\n",
+    "t2.field_names = ['Listed Patterns', 'Frequency']\n",
+    "t2.add_row(['Singles', f'{n_singles/known_patterns*100: .2f} %'])\n",
+    "t2.add_row(['Doubles', f'{n_doubles/known_patterns*100: .2f} %'])\n",
+    "t2.add_row(['Triples', f'{n_triples/known_patterns*100: .2f} %'])\n",
+    "t2.add_row(['Quadruplets', f'{n_quads/known_patterns*100: .2f} %'])\n",
+    "\n",
+    "print(t2)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "7739d666",
+   "metadata": {},
+   "source": [
+    "## Flat-Field Statistics"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ed100e6a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Definition of gaussian function for fitting\n",
+    "def gauss(x, *p):\n",
+    "    A, mu, sigma = p\n",
+    "    return A*np.exp(-(x-mu)**2/(2.*sigma**2))\n",
+    "\n",
+    "# rough initial estimate of fit parameters\n",
+    "fit_estimates = [np.max(hist['S']),           # amplitude\n",
+    "                 bins[np.argmax(hist['S'])],  # centroid\n",
+    "                 10]                          # sigma"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a649666b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "coeff, _ = curve_fit(gauss, bins_c, hist['S'], p0=fit_estimates)\n",
+    "singles_mu = coeff[1]\n",
+    "singles_sig = abs(coeff[2])\n",
+    "ROI = np.round([singles_mu-N_sigma_interval*singles_sig, # region of interest to find first photopeak per pixel\n",
+    "                singles_mu+N_sigma_interval*singles_sig]).astype(int)\n",
+    "y_fit = gauss(bins_c, *coeff)\n",
+    "\n",
+    "plt.figure(figsize=(9,6))\n",
+    "plt.plot(bins_c,hist['S'],'k',label = 'singles')\n",
+    "plt.plot(bins_c,y_fit,'g--',label = 'gauss fit') \n",
+    "plt.ylim(1,max(hist['S'])*1.5);\n",
+    "plt.xlim(ADU_range)\n",
+    "plt.vlines(coeff[1],0,plt.gca().get_ylim()[1],color='g',ls=':')\n",
+    "\n",
+    "plt.axvspan(ROI[0],\n",
+    "            ROI[1],\n",
+    "            alpha = .2,\n",
+    "            color = 'green',\n",
+    "            label = f'\\u03BC ± {N_sigma_interval}\\u03c3')\n",
+    "\n",
+    "plt.legend(fontsize=12);\n",
+    "plt.xlabel('ADU',fontsize=12)\n",
+    "plt.yscale('log')\n",
+    "plt.grid(ls=':')\n",
+    "plt.show()\n",
+    "\n",
+    "print('--------------------')\n",
+    "print('Fit parameters:')\n",
+    "print(f'  centroid = {np.round(singles_mu,3)}')\n",
+    "print(f'     sigma = {np.round(singles_sig,3)}')\n",
+    "print('---------------------')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d4bba07d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Calculate singles per pixel\n",
+    "step_timer.start()\n",
+    "\n",
+    "singles_per_pixel = np.empty(np.flip(sensor_size))\n",
+    "\n",
+    "for py in range(0,int(sensor_size[1])):\n",
+    "    for px in range(0,int(sensor_size[0])):\n",
+    "        singles_per_pixel[py,px] = np.sum((data_singles[:,py,px]>=ROI[0]) & (data_singles[:,py,px]<ROI[1]))\n",
+    "\n",
+    "step_timer.done_step('Calculated singles per pixel. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c5986694",
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "mask_bins = np.unique(singles_per_pixel,return_counts=True)[1] > np.max(np.unique(singles_per_pixel,return_counts=True)[1])*.01\n",
+    "last_bin = np.max(np.unique(singles_per_pixel)[mask_bins]) # xlim on bin that has less than 1% of max counts\n",
+    "\n",
+    "# Plot singles distribution\n",
+    "fig = xana.heatmapPlot(\n",
+    "    singles_per_pixel,\n",
+    "    lut_label='# singles',\n",
+    "    x_label='Column',\n",
+    "    y_label='Row',\n",
+    "    vmax = last_bin\n",
+    ")\n",
+    "fig.suptitle(f'Singles Distribution', x=.48, y=.9, fontsize=14)\n",
+    "fig.set_size_inches(h=10, w=10);\n",
+    "\n",
+    "plt.figure(figsize=(7,5))\n",
+    "plt.hist(singles_per_pixel.flatten(),bins=np.arange(0,last_bin,1),\n",
+    "         align = 'left',\n",
+    "         histtype = 'bar',\n",
+    "         edgecolor='black', \n",
+    "         linewidth=1.2)\n",
+    "plt.xlabel('Singles per pixel',fontsize=12)\n",
+    "plt.grid(ls='--',axis='y',color='b',alpha=.5)\n",
+    "plt.show()\n",
+    "\n",
+    "print(f'Average number of singles per pixel: {np.round(np.sum(data_singles>0)/np.prod(sensor_size),2)}')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "3d58fc82",
+   "metadata": {},
+   "source": [
+    "## Plot random sample pixels "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "62b2650e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "N_sample_pixels = 16\n",
+    "\n",
+    "# Plot some random pixels, avoiding bad ones\n",
+    "np.random.seed(0)\n",
+    "sample_pixels = np.transpose([np.random.randint(0, sensor_size[0], N_sample_pixels),\n",
+    "                              np.random.randint(0, sensor_size[1], N_sample_pixels)])\n",
+    "while np.sum(const_data['BadPixelsDark'][sample_pixels[:,1],sample_pixels[:,0]]):\n",
+    "    sample_pixels = np.transpose([np.random.randint(0, sensor_size[0], N_sample_pixels),\n",
+    "                                  np.random.randint(0, sensor_size[1], N_sample_pixels)])\n",
+    "\n",
+    "fig = plt.figure(figsize=(20,20))\n",
+    "roi_bins = np.arange(ROI[0], ROI[1])\n",
+    "it_counter = 0\n",
+    "for px,py in sample_pixels:\n",
+    "    it_counter+=1    \n",
+    "    \n",
+    "    plt.subplot(int(np.sqrt(N_sample_pixels)),int(np.sqrt(N_sample_pixels)),it_counter)\n",
+    "    \n",
+    "    h,ADU = np.histogram(data_singles[:,py,px],bins=roi_bins)\n",
+    "    ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
+    "    \n",
+    "    p1 = plt.plot([],[],' ',label = f'({px},{py})')\n",
+    "    p2 = plt.scatter(ADU_c[h>0], h[h>0],marker = 'x',c = 'k', label = 'singles')\n",
+    "\n",
+    "    mdn = np.median(ADU_c[h>0])\n",
+    "    if ~np.isnan(mdn):\n",
+    "        p3 = plt.plot([mdn, mdn],[0,plt.gca().get_ylim()[1]],color='g', label = f'median={int(mdn)}')\n",
+    "    else:\n",
+    "        p3 = plt.plot([],[],' ', label = 'empty')\n",
+    "        \n",
+    "    try:\n",
+    "        coeff, _ = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n",
+    "        y_fit = gauss(ADU_c, *coeff)\n",
+    "        p4 = plt.plot(ADU_c, y_fit, label = f'fit: \\u03BC={int(np.round(coeff[1]))}')\n",
+    "\n",
+    "    except (RuntimeError, ValueError):\n",
+    "        p4 = plt.plot([],[],' ', label = 'fit error')\n",
+    "    \n",
+    "    plt.grid(ls=':')\n",
+    "    plt.xlabel('ADU')\n",
+    "    plt.xlim(ROI)\n",
+    "    plt.ylim(bottom=0)\n",
+    "    plt.legend()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a968c8df",
+   "metadata": {},
+   "source": [
+    "## Fit single photon peaks per pixel"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "49d52f2b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "step_timer.start()\n",
+    "peak_map = np.zeros(np.flip(sensor_size))[...,np.newaxis]\n",
+    "\n",
+    "for py in range(0,int(sensor_size[1])):\n",
+    "    for px in range(0,int(sensor_size[0])):            \n",
+    "        h,ADU = np.histogram(data_singles[:,py,px],bins=np.arange(ROI[0],ROI[1]))\n",
+    "        ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
+    "        \n",
+    "        if np.sum(h):\n",
+    "            if peak_fitting=='median':\n",
+    "                peak_map[py,px] = np.median(ADU_c[h>0])\n",
+    "            elif peak_fitting=='gauss':\n",
+    "                try:\n",
+    "                    coeff, _ = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n",
+    "                    peak_map[py,px] = coeff[1]\n",
+    "                except RuntimeError:\n",
+    "                    pass         # Failed fits remain 0 \n",
+    "        else:\n",
+    "            peak_map[py,px] = -1 # Assign -1 to empty pixels\n",
+    "\n",
+    "peak_map[np.isnan(peak_map)] = 0 # Failed fits can throw no expection but return nan coeffs\n",
+    "step_timer.done_step(f'Calculated relative gain map using {peak_fitting} fit. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8891bcd4",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plt.figure(figsize=(7,5))\n",
+    "plt.hist(peak_map.flatten(),bins=np.arange(ROI[0],ROI[1]),\n",
+    "         histtype = 'bar',\n",
+    "         edgecolor='black',\n",
+    "         alpha = .5,\n",
+    "         linewidth=1.2);\n",
+    "\n",
+    "h,ADU = np.histogram(peak_map.flatten(),bins=np.arange(ROI[0],ROI[1]))\n",
+    "ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
+    "\n",
+    "coeff, _ = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n",
+    "BP_fit_threshold = [coeff[1]-N_sigma_interval*abs(coeff[2]),\n",
+    "                    coeff[1]+N_sigma_interval*abs(coeff[2])]\n",
+    "y_fit = gauss(ADU_c, *coeff)\n",
+    "plt.plot(ADU_c,y_fit, label = f'fit: \\u03BC={int(np.round(coeff[1]))}')\n",
+    "plt.vlines(coeff[1],0,plt.gca().get_ylim()[1],color='orange',ls=':')\n",
+    "plt.axvspan(BP_fit_threshold[0],\n",
+    "            BP_fit_threshold[1],\n",
+    "            alpha = .3,\n",
+    "            color = 'orange',\n",
+    "            label = f'\\u03BC ± {N_sigma_interval}\\u03c3')\n",
+    "\n",
+    "plt.grid(ls=':')\n",
+    "plt.xlim(np.array(BP_fit_threshold)*[.9,1.1])\n",
+    "plt.xlabel('Peak position [ADU]',fontsize=12);\n",
+    "plt.legend(fontsize=12)\n",
+    "plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=12)\n",
+    "plt.ylim((1, coeff[0]*1.2))\n",
+    "plt.show()\n",
+    "\n",
+    "print('--------------------')\n",
+    "print('Fit parameters:')\n",
+    "print(f'  centroid = {np.round(coeff[1],3)}')\n",
+    "print(f'     sigma = {np.round(abs(coeff[2]),3)}')\n",
+    "print('---------------------')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "e17b27ef",
+   "metadata": {},
+   "source": [
+    "## Flat-Field Bad Pixels"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "0816af0f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "const_data['BadPixelsFF'] = np.zeros(np.flip(sensor_size))[...,np.newaxis]\n",
+    "\n",
+    "# Empty Pixels\n",
+    "const_data['BadPixelsFF'][peak_map==-1] = BadPixels.FF_NO_ENTRIES.value\n",
+    "\n",
+    "# Failed Fits\n",
+    "const_data['BadPixelsFF'][peak_map==0] = BadPixels.FF_GAIN_EVAL_ERROR.value\n",
+    "\n",
+    "# Gain out of range\n",
+    "const_data['BadPixelsFF'][(peak_map!=0) & (peak_map!=-1) & ((peak_map<BP_fit_threshold[0]) | (peak_map>BP_fit_threshold[1]))] = BadPixels.FF_GAIN_DEVIATION.value\n",
+    "\n",
+    "# Plot Bad Pixels Map\n",
+    "fig = xana.heatmapPlot(\n",
+    "    np.nan_to_num(np.log2(const_data['BadPixelsFF'].squeeze())+1, neginf=np.nan),\n",
+    "    cb_label='Bad pixel bit',\n",
+    "    x_label='Column',\n",
+    "    y_label='Row',\n",
+    ")\n",
+    "fig.suptitle(f'FF Bad Pixels Map({karabo_id} | {proposal} - r{run})', x=.5, y=.9, fontsize=16)\n",
+    "fig.set_size_inches(h=12, w=12)\n",
+    "\n",
+    "t = PrettyTable()\n",
+    "t.title = 'Flat-Field Bad Pixel Analysis'\n",
+    "t.field_names = ['Bit', 'Value', 'Type       ', 'Counts', '%']\n",
+    "t.align['Type       '] = 'r'\n",
+    "\n",
+    "for BP_type in [BadPixels.FF_GAIN_DEVIATION, BadPixels.FF_GAIN_EVAL_ERROR, BadPixels.FF_NO_ENTRIES]:\n",
+    "    t.add_row([BP_type.bit_length(),\n",
+    "               BP_type.value,\n",
+    "               BP_type.name,\n",
+    "               np.sum(const_data['BadPixelsFF']==BP_type.value),\n",
+    "               np.round(100*np.sum(const_data['BadPixelsFF']==BP_type.value)/np.prod(sensor_size),2)\n",
+    "              ])\n",
+    "t.add_row(['-','-',\n",
+    "           'Total',\n",
+    "           np.sum(const_data['BadPixelsFF']>0),\n",
+    "           np.round(100*np.sum(const_data['BadPixelsFF']>0)/np.prod(sensor_size),2)\n",
+    "          ])\n",
+    "print(t)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "e97610e2",
+   "metadata": {},
+   "source": [
+    "## Relative Gain Map"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1ea03d36",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Replace FF bad pixels with mean peak value\n",
+    "peak_map[const_data['BadPixelsFF']>0] = np.nanmean(peak_map[const_data['BadPixelsFF']==0])\n",
+    "\n",
+    "# Calculate relative gain\n",
+    "rel_gain_map = 1/(peak_map.squeeze()/np.mean(peak_map))\n",
+    "\n",
+    "fig = xana.heatmapPlot(\n",
+    "    rel_gain_map,\n",
+    "    cb_label='Relative gain',\n",
+    "    x_label='Column',\n",
+    "    y_label='Row',\n",
+    "    vmin=np.floor(np.min(rel_gain_map)/.2)*.2, # force cb limits to be multiples of 0.2 \n",
+    "    vmax=np.ceil(np.max(rel_gain_map)/.2)*.2\n",
+    ")\n",
+    "fig.suptitle(f'Relative Gain Map ({karabo_id} | {proposal} - r{run})', x=.48, y=.9, fontsize=16)\n",
+    "fig.set_size_inches(h=12, w=12)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c2870edc",
+   "metadata": {},
+   "source": [
+    "## Absolute Gain Conversion Constant"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "282ad58a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "step_timer.start()\n",
+    "\n",
+    "# Correct data with calculated gain map\n",
+    "data_gain_corrected = data_corr*rel_gain_map\n",
+    "\n",
+    "h,ADU = np.histogram(data_gain_corrected.flatten(),\n",
+    "                     bins=np.arange(BP_fit_threshold[0],BP_fit_threshold[1]).astype(int))\n",
+    "ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
+    "\n",
+    "coeff, _ = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n",
+    "y_fit = gauss(ADU_c, *coeff)\n",
+    "\n",
+    "gain_conv_const = coeff[1] / peak_energy\n",
+    "\n",
+    "abs_gain_map = rel_gain_map / gain_conv_const\n",
+    "\n",
+    "step_timer.done_step('Calculated Gain Conversion Constant. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "3a0daabf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "plt.figure(figsize=(7,5))\n",
+    "\n",
+    "plt.scatter(ADU_c/gain_conv_const, h, color='k', marker='x', label='Gain Corrected')\n",
+    "plt.plot(ADU_c/gain_conv_const, y_fit, color='orange', label = f'fit: \\u03BC={(np.round(coeff[1],2))} ADU');\n",
+    "\n",
+    "plt.ylim(bottom=0)\n",
+    "plt.legend()\n",
+    "plt.grid(ls=':')\n",
+    "\n",
+    "plt.plot([peak_energy, peak_energy],[0,plt.gca().get_ylim()[1]],color='orange', ls = '--')\n",
+    "\n",
+    "ax1 = plt.gca()\n",
+    "ax2 = ax1.twiny()\n",
+    "ax2.set_xticks(ax1.get_xticks())\n",
+    "ax2.set_xbound(ax1.get_xbound())\n",
+    "ax2.set_xticklabels((ax1.get_xticks()*gain_conv_const).astype(int))\n",
+    "ax2.set_xlabel('ADU',fontsize=12)\n",
+    "ax1.set_xlabel('keV',fontsize=12)\n",
+    "\n",
+    "ax1.xaxis.label.set_color('red')\n",
+    "ax1.tick_params(axis='x', colors='red')\n",
+    "ax2.xaxis.label.set_color('blue')\n",
+    "ax2.tick_params(axis='x', colors='blue')\n",
+    "\n",
+    "plt.suptitle(f'Absolute Gain Conversion ({karabo_id} | {proposal} - r{run})',y =1.02,fontsize = 12)\n",
+    "plt.show()\n",
+    "\n",
+    "print(f'Gain conversion constant: {np.round(gain_conv_const,4)} ADU/keV')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c93fb9ac",
+   "metadata": {},
+   "source": [
+    "## Gain Map Validation\n",
+    "\n",
+    "Validation tests:\n",
+    "1. Inspect correlation between calculated gain map and gain map loaded from DB\n",
+    "2. Perform gain correction of current FF with calculated gain map and DB gain map and compare energy resolution and linearity"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8792ff72",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Retrieve DB RelativeGain Map\n",
+    "illum_condition_db = Conditions.Illuminated.ePix100(\n",
+    "    bias_voltage=bias_voltage,\n",
+    "    integration_time=integration_time,\n",
+    "    temperature=temperature_k,\n",
+    "    in_vacuum=in_vacuum,\n",
+    "    photon_energy=peak_energy\n",
+    ")\n",
+    "\n",
+    "db_gain_map = get_constant_from_db(\n",
+    "    karabo_id=karabo_id,\n",
+    "    karabo_da=karabo_da,\n",
+    "    constant=getattr(Constants.ePix100, 'RelativeGain')(),\n",
+    "    condition=illum_condition_db,\n",
+    "    empty_constant=None,\n",
+    "    cal_db_interface=cal_db_interface,\n",
+    "    creation_time=creation_time,\n",
+    "    timeout=cal_db_timeout\n",
+    ")\n",
+    "\n",
+    "if db_gain_map is None:\n",
+    "    print('Waring: No previous RelativeGain map was found for this detector conditions.')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1150be55",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if db_gain_map is not None:\n",
+    "    \n",
+    "    # Calculate gain conversion constant of DB gain map\n",
+    "    gain_conv_const_db = 1/np.median(db_gain_map[const_data['BadPixelsDark'].squeeze()>0])\n",
+    "    \n",
+    "    # Correlate new and DB gain maps\n",
+    "    plt.figure(figsize=(7,7))\n",
+    "\n",
+    "    plt.hist2d(db_gain_map.flatten(),\n",
+    "               abs_gain_map.flatten(),\n",
+    "               bins = 200,\n",
+    "               norm=LogNorm(),\n",
+    "              );\n",
+    "    plt.xlabel('DB noise map',fontsize=12)\n",
+    "    plt.ylabel('New noise map',fontsize=12)\n",
+    "\n",
+    "    plt.xlim(np.min([db_gain_map,abs_gain_map]),np.max([db_gain_map,abs_gain_map]))\n",
+    "    plt.ylim(np.min([db_gain_map,abs_gain_map]),np.max([db_gain_map,abs_gain_map]))\n",
+    "    plt.grid(ls=':')\n",
+    "\n",
+    "    rel_change = np.mean(abs(abs_gain_map-db_gain_map)/abs_gain_map)\n",
+    "    print(f'Average relative change of new gain map: {np.round(rel_change*100,3)} %')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e55aa651",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def correct_validation_train(worker_id, index, train_id, dc):\n",
+    "\n",
+    "    d = dc[pixels_src[0]][pixels_src[1]].astype(np.float32)\n",
+    "\n",
+    "    # Offset correction\n",
+    "    d -= const_data['Offset'].squeeze()\n",
+    "\n",
+    "    # Common Mode correction\n",
+    "    d = d.swapaxes(0,-1)\n",
+    "    d = cmCorrection_block.correct(d)\n",
+    "    d = cmCorrection_col.correct(d)\n",
+    "    d = cmCorrection_row.correct(d)\n",
+    "    d = d.swapaxes(0,-1)\n",
+    "\n",
+    "    # Relative Gain correction\n",
+    "    d_new_map = d*rel_gain_map\n",
+    "    if db_gain_map is not None:\n",
+    "        d_db_map  = d*db_gain_map*gain_conv_const_db\n",
+    "\n",
+    "    # Charge Sharing correction\n",
+    "    d, patterns = patternClassifier.classify(d.swapaxes(0,-1))\n",
+    "    FF_data[index] = d.swapaxes(0,-1) # no gain correction\n",
+    "    \n",
+    "    d_new_map, patterns = patternClassifier.classify(d_new_map.swapaxes(0,-1))\n",
+    "    FF_data_new_map[index] = d_new_map.swapaxes(0,-1) # gain correction with new gain map\n",
+    "    \n",
+    "    if db_gain_map is not None:\n",
+    "        d_db_map, patterns = patternClassifier.classify(d_db_map.swapaxes(0,-1))\n",
+    "        FF_data_db_map[index] = d_db_map.swapaxes(0,-1) # gain correction with DB gain map"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a1319015",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Correct validation trains\n",
+    "step_timer.start()\n",
+    "\n",
+    "N_validation_trains = 1000\n",
+    "\n",
+    "FF_data = psh.alloc(shape=(N_validation_trains,dshape[1],dshape[2]), dtype=np.float32)\n",
+    "FF_data_new_map = psh.alloc(shape=(N_validation_trains,dshape[1],dshape[2]), dtype=np.float32)\n",
+    "if db_gain_map is not None:\n",
+    "    FF_data_db_map = psh.alloc(shape=(N_validation_trains,dshape[1],dshape[2]), dtype=np.float32)\n",
+    "\n",
+    "psh.map(correct_validation_train, data_dc.select_trains(np.s_[:N_validation_trains]))\n",
+    "\n",
+    "step_timer.done_step('Corrected evaluation data. Elapsed Time')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "20f9faa5",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Calculate histograms\n",
+    "bins_FF = np.arange(-50,800)\n",
+    "FF_hist_CS = np.histogram(FF_data[FF_data>0].flatten(),bins=bins_FF)[0]\n",
+    "\n",
+    "bins_keV_new = bins_FF/gain_conv_const\n",
+    "FF_hist_GC_new_map = np.histogram(FF_data_new_map/gain_conv_const,bins=bins_keV_new)[0]\n",
+    "\n",
+    "plt.figure(figsize=(12,8))\n",
+    "bins_ADU = bins_FF[:-1] + np.diff(bins_FF)[0]/2 # center of bins\n",
+    "bins_keV_new = bins_keV_new[:-1] + np.diff(bins_keV_new)[0]/2 # center of bins\n",
+    "plt.plot(bins_ADU,FF_hist_CS, color='black', label='Before gain correction')\n",
+    "plt.plot(bins_keV_new*gain_conv_const, FF_hist_GC_new_map, color='b', label='Gain correction with new map')\n",
+    "\n",
+    "if db_gain_map is not None:\n",
+    "    bins_keV_db = bins_FF/gain_conv_const_db\n",
+    "    FF_hist_GC_db_map = np.histogram(FF_data_db_map/gain_conv_const_db,bins=bins_keV_db)[0]\n",
+    "    bins_keV_db = bins_keV_db[:-1] + np.diff(bins_keV_db)[0]/2 # center of bins\n",
+    "    plt.plot(bins_keV_db*gain_conv_const_db, FF_hist_GC_db_map, color='r', label='Gain correction with DB map')\n",
+    "\n",
+    "plt.yscale('log')\n",
+    "plt.xlim(1,bins_FF[-1]+1)\n",
+    "\n",
+    "plt.xlabel('ADU',fontsize=12)\n",
+    "plt.legend(fontsize=12)\n",
+    "plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=14)\n",
+    "plt.grid(ls=':')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c35bddec",
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "N_peaks = 4\n",
+    "sigma_tol = 2 # sigma tolerance to show in gauss fit\n",
+    "\n",
+    "# Ignore split events below primary energy threshold\n",
+    "E_cut = np.mean(const_data['Noise'])*split_evt_primary_threshold/gain_conv_const\n",
+    "\n",
+    "too_many_peaks = True\n",
+    "while too_many_peaks: # Iterate backwards on number of peaks until no exception is thrown\n",
+    "    try:\n",
+    "        FF_hist_AGC_new = FF_hist_GC_new_map/gain_conv_const\n",
+    "        if db_gain_map is not None:\n",
+    "            FF_hist_AGC_db = FF_hist_GC_db_map/gain_conv_const_db\n",
+    "        else:\n",
+    "            FF_hist_AGC_db=None\n",
+    "            bins_keV_db=None\n",
+    "\n",
+    "        E_res,rel_dev,abs_dev = [],[],[]\n",
+    "        colors = ['blue','red']\n",
+    "\n",
+    "        for FF_hist_AGC,bins_keV,leg in zip([FF_hist_AGC_new,FF_hist_AGC_db],[bins_keV_new,bins_keV_db],['new','DB']):\n",
+    "\n",
+    "            if FF_hist_AGC is None:\n",
+    "                continue\n",
+    "\n",
+    "            FF_hist_AGC = FF_hist_AGC[bins_keV>E_cut]\n",
+    "            FF_hist_AGC[0]=0\n",
+    "\n",
+    "            bins_keV = bins_keV[bins_keV>E_cut]\n",
+    "            c = colors[0]\n",
+    "            colors.pop(0)\n",
+    "\n",
+    "            fig = plt.figure(figsize=(12,6))\n",
+    "            plt.suptitle('Correction with '+leg+' gain map',fontsize = 15)\n",
+    "\n",
+    "            plt.fill(bins_keV,FF_hist_AGC, color='k',alpha=.2,label='Corrected data')\n",
+    "            plt.title(f'{karabo_id} | {proposal} - r{run}', fontsize=14)\n",
+    "\n",
+    "            ylim_top = plt.gca().get_ylim()[1]\n",
+    "\n",
+    "            ROI_shift = 0\n",
+    "            for p in range(1,N_peaks+1):\n",
+    "\n",
+    "                peak_ROI = np.array([p*peak_energy-peak_energy/2, p*peak_energy+peak_energy/2]) + ROI_shift\n",
+    "                xx = (bins_keV>peak_ROI[0]) & (bins_keV<peak_ROI[1])\n",
+    "\n",
+    "                coeff, _ = curve_fit(gauss, bins_keV[xx], FF_hist_AGC[xx], p0=[FF_hist_AGC[xx].max(), p*peak_energy, 1])\n",
+    "                y_fit = gauss(bins_keV[xx], *coeff)\n",
+    "\n",
+    "                xx_sigma_lim = (bins_keV>coeff[1]-abs(coeff[2])*sigma_tol) & (bins_keV<coeff[1]+abs(coeff[2])*sigma_tol)\n",
+    "\n",
+    "                plt.vlines(p*peak_energy,0,ylim_top,ls='-',color='grey',label=f'expected peaks')\n",
+    "                plt.fill_between(bins_keV[xx_sigma_lim],\n",
+    "                                 FF_hist_AGC[xx_sigma_lim],\n",
+    "                                 color='orange',\n",
+    "                                 alpha=.5,\n",
+    "                                 label=f'\\u03BC ± {sigma_tol}\\u03c3')\n",
+    "                plt.plot(bins_keV[xx],y_fit,color=c)\n",
+    "                plt.vlines(coeff[1],0,ylim_top,ls='--',color=c,label=f'peak {p}: {coeff[1]:,.2f} keV')\n",
+    "\n",
+    "                ROI_shift = coeff[1] - p*peak_energy   \n",
+    "\n",
+    "                E_res.append(abs(2*np.sqrt(2*np.log(2))*coeff[2]/coeff[1])*100)\n",
+    "                abs_dev.append(coeff[1]-peak_energy*p)\n",
+    "                rel_dev.append(abs(abs_dev[-1])/(peak_energy*p)*100)\n",
+    "\n",
+    "            plt.yscale('log')    \n",
+    "            plt.xlabel('keV',fontsize=12)\n",
+    "            plt.xlim(left=0)\n",
+    "            plt.ylim(.1,ylim_top)\n",
+    "\n",
+    "            # Remove repeated entries from legend\n",
+    "            handles, labels = plt.gca().get_legend_handles_labels()\n",
+    "            by_label = dict(zip(labels, handles))\n",
+    "            plt.legend(by_label.values(), by_label.keys())\n",
+    "            plt.grid(ls=':')\n",
+    "\n",
+    "            t = PrettyTable()\n",
+    "            t.field_names = ['Peak','Energy Resolution','Rel. Deviation','Abs. Deviation']\n",
+    "            t.title = f'{leg} gain map'\n",
+    "            for p in range(-N_peaks,0):\n",
+    "                t.add_row([f'#{p+N_peaks+1}: {peak_energy*(p+N_peaks+1):,.3f} keV',\n",
+    "                            f'{E_res[p]:,.2f} %', \n",
+    "                            f'{rel_dev[p]:,.2f} %',\n",
+    "                            f'{abs_dev[p]:,.2f} keV'])        \n",
+    "            print(t)\n",
+    "            \n",
+    "            too_many_peaks = False\n",
+    "            plt.show()\n",
+    "\n",
+    "    # throw exception if fit fails due to wrong estimate of number of peaks\n",
+    "    except RuntimeError: \n",
+    "        N_peaks -= 1\n",
+    "        plt.close(fig) # delete plots if exception was found due to wrong number of peaks"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "205f27b9",
+   "metadata": {},
+   "source": [
+    "## Linearity Analysis"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d6cf1264",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "peaks = np.arange(1,N_peaks+1)\n",
+    "plt.figure(figsize=(15,6))\n",
+    "plt.subplot(1,2,1)\n",
+    "plt.plot(peaks,[peak_energy*p for p in peaks], '-', c='k', label='expected')\n",
+    "plt.plot(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[:N_peaks]), 'o', c='b')\n",
+    "fit_coeffs= np.polyfit(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[:N_peaks]),1)\n",
+    "\n",
+    "plt.plot(peaks,fit_coeffs[0]*peaks+fit_coeffs[1], '--', c='b', label='New gain map')\n",
+    "str_theo  = f'$a_1$={peak_energy :,.4f}, $a_0$=0'\n",
+    "str_new = f'$a_1$={fit_coeffs[0]:,.4f}, $a_0$={fit_coeffs[1]:,.4f}'\n",
+    "plt.annotate(s=str_theo,xy=(.36,.94),xycoords='axes fraction',fontsize=11,bbox=dict(facecolor='k',alpha=.2,pad=1))\n",
+    "plt.annotate(s=str_new ,xy=(.36,.88),xycoords='axes fraction',fontsize=11,bbox=dict(facecolor='b',alpha=.2,pad=1))\n",
+    "\n",
+    "xx = np.arange(1,100,.1) # in photons\n",
+    "y_fit_new = fit_coeffs[0]*xx+fit_coeffs[1] # extrapolation for 100 photons\n",
+    "\n",
+    "plt.xticks(peaks)\n",
+    "plt.title(f'Linearity ({karabo_id} | {proposal} - r{run})')\n",
+    "plt.xlabel('# Photons')\n",
+    "plt.ylabel('Energy (keV)')\n",
+    "plt.legend(fontsize=12)\n",
+    "plt.grid(ls=':')\n",
+    "\n",
+    "plt.subplot(1,2,2)\n",
+    "dev_new = (y_fit_new-(peak_energy*xx))/(peak_energy*xx)*100\n",
+    "plt.plot(xx*peak_energy,dev_new,c='b', label='New gain map')\n",
+    "plt.xscale('log')\n",
+    "plt.xlim(right=100)\n",
+    "plt.xlabel('Energy (keV)')\n",
+    "plt.ylabel('Linearity Deviation (%)')\n",
+    "plt.title(f'Linearity extrapolation ({karabo_id} | {proposal} - r{run})')\n",
+    "plt.grid(ls=':',which='both')\n",
+    "\n",
+    "if db_gain_map is not None:\n",
+    "    plt.subplot(1,2,1)\n",
+    "    \n",
+    "    db_fit = np.polyfit(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[N_peaks:]),1)\n",
+    "    plt.plot(peaks,[peak_energy*p for p in peaks]+np.array(abs_dev[N_peaks:]), 'o', c='r')\n",
+    "    plt.plot(peaks,db_fit[0]*peaks+db_fit[1], '--', c='r', label='DB gain map')\n",
+    "    \n",
+    "    str_db  = f'$a_1$={db_fit[0] :,.4f}, $a_0$={db_fit[1] :,.4f}'\n",
+    "    y_fit_db = db_fit[0]*xx+db_fit[1] # extrapolation for 100 photons\n",
+    "    plt.annotate(s=str_db  ,xy=(.36,.82),xycoords='axes fraction',fontsize=11,bbox=dict(facecolor='r',alpha=.2,pad=1))\n",
+    "\n",
+    "    plt.subplot(1,2,2)\n",
+    "    dev_db = (y_fit_db-(peak_energy*xx))/(peak_energy*xx)*100\n",
+    "    plt.plot(xx*peak_energy,dev_db,c='r', label='DB gain map')\n",
+    "    plt.legend(fontsize=12)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "441e426a",
+   "metadata": {},
+   "source": [
+    "## Energy Resolution Analysis"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "feb7a5bf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def power_function(x,*p):\n",
+    "    a,b,c = p\n",
+    "    return a*x**b + c\n",
+    "# rough initial estimate of fit parameters\n",
+    "fit_estimates = [20,-.5,0]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "25b3f89a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Linearity of the visualized peaks\n",
+    "plt.figure(figsize=(8,6))\n",
+    "\n",
+    "xx = np.arange(0,50,.1)\n",
+    "if db_gain_map is not None:\n",
+    "    plt.plot(peaks*peak_energy,E_res[N_peaks:], 'o', c='r', label='DB gain map')\n",
+    "    coeff,_ = curve_fit(power_function,peaks*peak_energy,E_res[N_peaks:],p0=fit_estimates)\n",
+    "    power_fit = power_function(xx,*coeff)\n",
+    "    plt.plot(xx,power_fit, '--', c='r')\n",
+    "\n",
+    "plt.plot(peaks*peak_energy,E_res[:N_peaks], 'o', c='b', label='New gain map')\n",
+    "coeff,_ = curve_fit(power_function,peaks*peak_energy,E_res[:N_peaks],p0=fit_estimates)\n",
+    "power_fit = power_function(xx,*coeff)\n",
+    "plt.plot(xx,power_fit, '--', c='b')\n",
+    "\n",
+    "plt.title(f'Energy Resolution ({karabo_id} | {proposal} - r{run})')\n",
+    "plt.xlabel('Energy (keV)')\n",
+    "plt.ylabel('Energy Resolution (%)')\n",
+    "plt.legend(fontsize=12)\n",
+    "plt.xlim(1,np.ceil(xx[-1]))\n",
+    "plt.ylim(0,30)\n",
+    "plt.grid(ls=':')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f85f601d",
+   "metadata": {},
+   "source": [
+    "## Calibration Constants DB\n",
+    "Send the flat-field constants (RelativeGain and BadPixelsIlluminated) to the database and/or save them locally."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "b898799f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Save constants to DB\n",
+    "\n",
+    "md = None\n",
+    "\n",
+    "constant_maps = {'RelativeGain': abs_gain_map,\n",
+    "                 'BadPixelsIlluminated': const_data['BadPixelsFF']\n",
+    "                } \n",
+    "\n",
+    "for const_name in constant_maps.keys():\n",
+    "    const = getattr(Constants.ePix100, const_name)()\n",
+    "    const.data = constant_maps[const_name].data\n",
+    "\n",
+    "    for parm in illum_condition_db.parameters:\n",
+    "        if parm.name == \"Sensor Temperature\":\n",
+    "            parm.lower_deviation = temp_limits\n",
+    "            parm.upper_deviation = temp_limits\n",
+    "\n",
+    "    # Get physical detector unit\n",
+    "    db_module = get_pdu_from_db(\n",
+    "        karabo_id=karabo_id,\n",
+    "        karabo_da=karabo_da,\n",
+    "        constant=const,\n",
+    "        condition=illum_condition_db,\n",
+    "        cal_db_interface=cal_db_interface,\n",
+    "        snapshot_at=creation_time)[0]\n",
+    "\n",
+    "    # Inject or save calibration constants\n",
+    "    if db_output:\n",
+    "        md = send_to_db(\n",
+    "            db_module=db_module,\n",
+    "            karabo_id=karabo_id,\n",
+    "            constant=const,\n",
+    "            condition=illum_condition_db,\n",
+    "            file_loc=file_loc,\n",
+    "            report_path=report,\n",
+    "            cal_db_interface=cal_db_interface,\n",
+    "            creation_time=creation_time,\n",
+    "            timeout=cal_db_timeout\n",
+    "        )\n",
+    "\n",
+    "    if local_output:\n",
+    "        Path(out_folder).mkdir(parents=True, exist_ok=True)\n",
+    "        md = save_const_to_h5(\n",
+    "            db_module=db_module,\n",
+    "            karabo_id=karabo_id,\n",
+    "            constant=const,\n",
+    "            condition=illum_condition_db,\n",
+    "            data=const.data,\n",
+    "            file_loc=file_loc,\n",
+    "            report=report,\n",
+    "            creation_time=creation_time,\n",
+    "            out_folder=out_folder\n",
+    "        )\n",
+    "        print(f\"Calibration constant {const_name} is stored locally at {out_folder} \\n\")\n",
+    "\n",
+    "print(\"Constants parameter conditions are:\\n\"\n",
+    "      f\"• Bias voltage: {bias_voltage}\\n\"\n",
+    "      f\"• Integration time: {integration_time}\\n\"\n",
+    "      f\"• Temperature: {temperature_k}\\n\"\n",
+    "      f\"• Source Energy: {peak_energy}\\n\"      \n",
+    "      f\"• In Vacuum: {in_vacuum}\\n\"\n",
+    "      f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "cal_venv",
+   "language": "python",
+   "name": "cal_venv"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.8.11"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/src/xfel_calibrate/notebooks.py b/src/xfel_calibrate/notebooks.py
index fef99624c513b4f7087c7d2a1767717b97d9f1cc..3d241b27e7f1d41050ce49b28be9823611d9040d 100644
--- a/src/xfel_calibrate/notebooks.py
+++ b/src/xfel_calibrate/notebooks.py
@@ -234,6 +234,12 @@ notebooks = {
                             "use function": "balance_sequences",
                             "cluster cores": 4},
         },
+        "FF": {
+            "notebook": "notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb",
+            "concurrency": {"parameter": None,
+                            "default concurrency": None,
+                            "cluster cores": 4},
+        },
     },
     "EPIX10K": {
         "DARK": {