diff --git a/cal_tools/cal_tools/ana_tools.py b/cal_tools/cal_tools/ana_tools.py
index 83ee3a30fd65d15bc5d48432cd7fabe39de53f2a..0b36016b8d18f7a069a9c3bd9995ce2e8fbfd256 100644
--- a/cal_tools/cal_tools/ana_tools.py
+++ b/cal_tools/cal_tools/ana_tools.py
@@ -352,6 +352,13 @@ class HMType(Enum):
     INSET_AXIS = 2
 
 
+class IMType(Enum):
+    NO_CONST = -1e+4
+    ALL_NAN = -2e+4
+    STRANGE_VAL = -3e+4
+    ALL_BAD = -4e+4
+    NO_BPMAP = -5e+4
+
 def get_range(data, scale):
     """
     Return a range calculated by median absolute deviations
@@ -386,10 +393,24 @@ def hm_combine(data, fname=None, htype=None, **kwargs):
     fig = plt.figure(figsize=(10, 12))
     ax = fig.add_subplot(111)
 
+    data_txt = np.full(data.shape, '').astype(str)
+    data_txt[data == IMType.NO_CONST.value] = 'X'
+    data_txt[data == IMType.ALL_NAN.value] = 'n'
+    data_txt[data == IMType.ALL_BAD.value] = 'B'
+    data_txt[data == IMType.STRANGE_VAL.value] = '0'
+    data_txt[data == IMType.NO_BPMAP.value] = 'N'
+
+    data[data == IMType.NO_CONST.value] = np.nan
+    data[data == IMType.ALL_NAN.value] = np.nan
+    data[data == IMType.ALL_BAD.value] = np.nan
+    data[data == IMType.STRANGE_VAL.value] = np.nan
+    data[data == IMType.NO_BPMAP.value] = np.nan
+
     xana.heatmapPlot(data, add_panels=False, cmap='viridis',
                      cb_pad=0.6 if htype == HMType.INSET_AXIS else 0.1,
                      use_axis=ax,
                      **kwargs)
+
     plt.setp(ax.yaxis.get_majorticklabels(), rotation=90)
 
     pad = kwargs.get('pad', [0.125, 0.125, 0.1, 0.18])
@@ -406,6 +427,14 @@ def hm_combine(data, fname=None, htype=None, **kwargs):
     h_frame = 1 - pad_b - pad_t
     w_frame = 1 - pad_l - pad_r
 
+    # Loop over data dimensions and create text annotations.
+    for i in range(data.shape[0]):
+        for j in range(data.shape[1]):
+            _ = ax.text(j+0.5, i+0.5, data_txt[i, j],
+                        horizontalalignment="center",
+                        verticalalignment="center",
+                        color="black")
+
     if htype == HMType.INSET_1D:
         ax.tick_params(axis='y', which='major', pad=50)
         for y in range(data.shape[0]):
diff --git a/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb b/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb
index 3d9032582b7d2da7f2e1fc33c9cefa49eb675ebe..842f9db5bc79439ceb893585e25adc621ba96613 100644
--- a/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb
+++ b/notebooks/AGIPD/PlotFromCalDB_AGIPD_NBC.ipynb
@@ -74,7 +74,7 @@
     "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n",
     "from cal_tools.tools import get_from_db, get_random_db_interface\n",
     "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n",
-    "                                 combine_constants, HMType,\n",
+    "                                 combine_constants, HMType, IMType,\n",
     "                                 hm_combine, combine_lists, get_range)"
    ]
   },
@@ -370,9 +370,9 @@
     "    toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)\n",
     "    \n",
     "    if not constantBP_versions[i]:\n",
-    "        toStoreBP = np.full_like(toStore, np.nan)\n",
-    "        toStoreBPStd = np.full_like(toStore, np.nan)\n",
-    "        cdataNBP = np.full_like(toStore, np.nan)\n",
+    "        toStoreBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n",
+    "        toStoreBPStd = np.full_like(toStore, IMType.NO_BPMAP.value)\n",
+    "        cdataNBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n",
     "    \n",
     "    dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n",
     "\n",
diff --git a/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb b/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb
index 00c1b59cda883ddd574b7d8484473580a275bc7d..6f797cae32338fb12d06bba753296416093114a8 100644
--- a/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb
+++ b/notebooks/LPD/PlotFromCalDB_LPD_NBC.ipynb
@@ -28,13 +28,13 @@
     "cluster_profile = \"noDB\"  # The ipcluster profile to use\n",
     "start_date = \"2019-01-30\"  # Date to start investigation interval from\n",
     "end_date = \"2019-12-12\"  # Date to end investigation interval at, can be \"now\"\n",
-    "nconstants = 10 # Number of time stamps to plot. If not 0, overcome start_date.\n",
+    "nconstants = 20 # Number of time stamps to plot. If not 0, overcome start_date.\n",
     "constants = [\"Noise\", \"Offset\", \"SlopesFF\", \"SlopesCI\"] # constants to plot\n",
     "modules = [2]  # Modules, set to -1 for all, range allowed\n",
-    "bias_voltages = [250]  # Bias voltage\n",
-    "mem_cells = [512]  # Number of used memory cells.\n",
+    "bias_voltages = [250, 500]  # Bias voltage\n",
+    "mem_cells = [1, 128, 512]  # Number of used memory cells.\n",
     "photon_energy = 9.2  # Photon energy of the beam\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_LPD/\"  # Output folder, required\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_LPD2\"  # Output folder, required\n",
     "use_existing = \"\" # If not empty, constants stored in given folder will be used\n",
     "cal_db_timeout = 180000 # timeout on caldb requests\",\n",
     "adu_to_photon = 33.17 # ADU to photon conversion factor (8000 / 3.6 / 67.)\n",
@@ -73,7 +73,7 @@
     "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n",
     "from cal_tools.tools import get_from_db, get_random_db_interface\n",
     "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n",
-    "                                 combine_constants, HMType,\n",
+    "                                 combine_constants, HMType, IMType,\n",
     "                                 hm_combine, combine_lists, get_range)"
    ]
   },
@@ -351,9 +351,9 @@
     "    toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)\n",
     "    \n",
     "    if not constantBP_versions[i]:\n",
-    "        toStoreBP = np.full_like(toStore, np.nan)\n",
-    "        toStoreBPStd = np.full_like(toStore, np.nan)\n",
-    "        cdataNBP = np.full_like(toStore, np.nan)\n",
+    "        toStoreBP = np.full_like(toStore,  IMType.NO_BPMAP.value)\n",
+    "        toStoreBPStd = np.full_like(toStore,  IMType.NO_BPMAP.value)\n",
+    "        cdataNBP = np.full_like(toStore,  IMType.NO_BPMAP.value)\n",
     "    \n",
     "    dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n",
     "\n",
diff --git a/notebooks/generic/PlotFromCalDB_NBC.ipynb b/notebooks/generic/PlotFromCalDB_NBC.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..88472fbbedf8d9b4c8468f4662e578c18144ff17
--- /dev/null
+++ b/notebooks/generic/PlotFromCalDB_NBC.ipynb
@@ -0,0 +1,625 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Statistical analysis of calibration factors#\n",
+    "\n",
+    "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1\n",
+    "\n",
+    "Calibration constants for pnCCDdetector from the data base with injection time between start_date and end_date are considered.\n",
+    "\n",
+    "To be visualized, calibration constants are averaged per group of pixels. Plots shows calibration constant over time for each constant.\n",
+    "\n",
+    "Values shown in plots are saved in h5 files."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "cluster_profile = \"noDB\" # The ipcluster profile to use\n",
+    "start_date = \"2019-06-30\" # date to start investigation interval from\n",
+    "end_date = \"NOW\" # date to end investigation interval at, can be \"now\"\n",
+    "dclass=\"jungfrau\" # Detector class\n",
+    "modules = [\"Jungfrau_M039\"] # detector entry in the DB to investigate\n",
+    "constants = [\"Noise\", \"Offset\"] # constants to plot\n",
+    "nconstants = 20 # Number of time stamps to plot. If not 0, overcome start_date.\n",
+    "max_time = 15 # max time margin in minutes to match bad pixels\n",
+    "\n",
+    "gain_setting = [0] # gain stages\n",
+    "bias_voltage = [90, 180] # Bias voltage\n",
+    "temperature = [291] # Operation temperature\n",
+    "integration_time = [250, 50] # Integration time\n",
+    "pixels_x=[1024] # number of pixels along X axis\n",
+    "pixels_y=[512, 1024] # number of pixels along Y axis\n",
+    "in_vacuum = [0] # 0 if detector is operated in room pressure\n",
+    "memory_cells = [1] # number of memory cells\n",
+    "parameter_names = ['bias_voltage', 'integration_time', 'temperature', \n",
+    "                   'gain_setting', 'memory_cells', 'pixels_x', 'pixels_y'] # names of parameters\n",
+    "\n",
+    "separate_plot = ['integration_time'] # Plot on separate plots\n",
+    "x_labels = ['Sensor Temperature', 'Integration Time'] # parameters to be shown on X axis: Acquisition rate, Memory cells, Sensor Temperature, Integration Time\n",
+    "photon_energy = 9.2 # Photon energy of the beam\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_bla2/\" # output folder\n",
+    "use_existing = \"\" # If not empty, constants stored in given folder will be used\n",
+    "cal_db_interface = \"tcp://max-exfl016:8016\" # the database interface to use\n",
+    "cal_db_timeout = 180000 # timeout on caldb requests\",\n",
+    "plot_range = 3 # range for plotting in units of median absolute deviations\n",
+    "spShape = [256, 256] # Shape of superpixel"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "import copy\n",
+    "import datetime\n",
+    "import dateutil.parser\n",
+    "import numpy as np\n",
+    "import os\n",
+    "import sys\n",
+    "import warnings\n",
+    "warnings.filterwarnings('ignore')\n",
+    "\n",
+    "import matplotlib.pyplot as plt\n",
+    "\n",
+    "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n",
+    "from cal_tools.tools import get_from_db, get_random_db_interface\n",
+    "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n",
+    "                                 HMType, IMType, hm_combine,\n",
+    "                                 combine_lists, get_range)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Prepare variables\n",
+    "parameters = [globals()[x] for x in parameter_names]\n",
+    "\n",
+    "constantsDark = {'Noise': 'BadPixelsDark',\n",
+    "                 'Offset': 'BadPixelsDark'}\n",
+    "print('Bad pixels data: ', constantsDark)\n",
+    "\n",
+    "# Define parameters in order to perform loop over time stamps\n",
+    "start = datetime.datetime.now() if start_date.upper() == \"NOW\" else dateutil.parser.parse(\n",
+    "    start_date)\n",
+    "end = datetime.datetime.now() if end_date.upper() == \"NOW\" else dateutil.parser.parse(\n",
+    "    end_date)\n",
+    "\n",
+    "# Create output folder\n",
+    "os.makedirs(out_folder, exist_ok=True)\n",
+    "\n",
+    "print('CalDB Interface: {}'.format(cal_db_interface))\n",
+    "print('Start time at: ', start)\n",
+    "print('End time at: ', end)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "parameter_list = combine_lists(*parameters, names = parameter_names)\n",
+    "print(parameter_list)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "# Retrieve list of meta-data\n",
+    "constant_versions = []\n",
+    "constant_parameters = []\n",
+    "constantBP_versions = []\n",
+    "\n",
+    "# Loop over constants\n",
+    "for c, const in enumerate(constants):\n",
+    "    \n",
+    "    for db_module in modules:\n",
+    "        det = getattr(Detectors, db_module)\n",
+    "        \n",
+    "        # Get getector conditions\n",
+    "        if dclass=='CCD':\n",
+    "            dconstants = getattr(Constants, dclass)(det.detector_type)\n",
+    "        else:\n",
+    "            dconstants = getattr(Constants, dclass)\n",
+    "\n",
+    "        if use_existing != \"\":\n",
+    "            break\n",
+    "\n",
+    "        # Loop over parameters\n",
+    "        for pars in parameter_list:\n",
+    "\n",
+    "            if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n",
+    "                dcond = Conditions.Dark\n",
+    "                mcond = getattr(dcond, dclass)(**pars)\n",
+    "            else:\n",
+    "                dcond = Conditions.Illuminated\n",
+    "                mcond = getattr(dcond, dclass)(**pars,\n",
+    "                                    photon_energy=photon_energy)\n",
+    "\n",
+    "\n",
+    "\n",
+    "            print('Request: ', const, 'with paramters:', pars)\n",
+    "            # Request Constant versions for given parameters and module\n",
+    "            data = get_from_db(det,\n",
+    "                               getattr(dconstants,\n",
+    "                                       const)(),\n",
+    "                               copy.deepcopy(mcond), None,\n",
+    "                               cal_db_interface,\n",
+    "                               creation_time=start,\n",
+    "                               verbosity=0,\n",
+    "                               timeout=cal_db_timeout,\n",
+    "                               meta_only=True,\n",
+    "                               version_info=True)\n",
+    "\n",
+    "            if not isinstance(data, list):\n",
+    "                    continue\n",
+    "\n",
+    "            if const in constantsDark:\n",
+    "                # Request BP constant versions\n",
+    "                print('constantDark:', constantsDark[const], )        \n",
+    "                dataBP = get_from_db(det,\n",
+    "                                     getattr(dconstants, \n",
+    "                                             constantsDark[const])(),\n",
+    "                                     copy.deepcopy(mcond), None,\n",
+    "                                     cal_db_interface,\n",
+    "                                     creation_time=start,\n",
+    "                                     verbosity=0,\n",
+    "                                     timeout=cal_db_timeout,\n",
+    "                                     meta_only=True,\n",
+    "                                     version_info=True)\n",
+    "\n",
+    "                if not isinstance(dataBP, list):\n",
+    "                    constant_versions += data\n",
+    "                    constant_parameters += [copy.deepcopy(pars)]*len(data)\n",
+    "                    constantBP_versions += [None]*len(data)\n",
+    "                    continue\n",
+    "\n",
+    "                for d in data:\n",
+    "                    # Match proper BP constant version\n",
+    "                    # and get constant version within\n",
+    "                    # requested time range\n",
+    "                    if d is None:\n",
+    "                        print('Time or data is not found!')\n",
+    "                        continue\n",
+    "\n",
+    "                    dt = dateutil.parser.parse(d['begin_at'])\n",
+    "\n",
+    "                    if (dt.replace(tzinfo=None) > end or \n",
+    "                        (nconstants==0 and dt.replace(tzinfo=None) < start)):\n",
+    "                        continue\n",
+    " \n",
+    "                    closest_BP = None\n",
+    "                    closest_BPtime = None\n",
+    "                    found_BPmatch = False\n",
+    "            \n",
+    "                    for dBP in dataBP:\n",
+    "                        if dBP is None:\n",
+    "                            constantBP_versions.append(None)\n",
+    "                            constant_versions.append(d)\n",
+    "                            constant_parameters.append(copy.deepcopy(pars))\n",
+    "                            print(\"Bad pixels are not found!\")\n",
+    "                            continue\n",
+    "\n",
+    "                        dt = dateutil.parser.parse(d['begin_at'])\n",
+    "                        dBPt = dateutil.parser.parse(dBP['begin_at'])\n",
+    "\n",
+    "                        if dt == dBPt:\n",
+    "                            found_BPmatch = True\n",
+    "                        else:\n",
+    "\n",
+    "                            if np.abs(dBPt-dt).total_seconds() < (max_time*60):\n",
+    "                                if closest_BP is None:\n",
+    "                                    closest_BP = dBP\n",
+    "                                    closest_BPtime = dBPt\n",
+    "                                else:\n",
+    "                                    if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):\n",
+    "                                        closest_BP = dBP\n",
+    "                                        closest_BPtime = dBPt\n",
+    "\n",
+    "                            if dataBP.index(dBP) ==  len(dataBP)-1:\n",
+    "                                if closest_BP:\n",
+    "                                    dBP = closest_BP\n",
+    "                                    dBPt = closest_BPtime\n",
+    "                                    found_BPmatch = True\n",
+    "                                else:\n",
+    "                                    print('Bad pixels are not found!')\n",
+    "\n",
+    "                        if found_BPmatch:\n",
+    "                            print(\"Found constant {}: begin at {}\".format(const, dt))\n",
+    "                            print(\"Found bad pixels at {}\".format(dBPt))\n",
+    "                            constantBP_versions.append(dBP)\n",
+    "                            constant_versions.append(d)\n",
+    "                            constant_parameters.append(copy.deepcopy(pars))\n",
+    "                            found_BPmatch = False\n",
+    "                            break\n",
+    "                if not found_BPmatch:\n",
+    "                    print('Bad pixels are not matched')\n",
+    "                    constantBP_versions.append(None)\n",
+    "                    constant_versions.append(d)\n",
+    "                    constant_parameters.append(copy.deepcopy(pars))\n",
+    "                \n",
+    "            else:\n",
+    "                constant_versions += data\n",
+    "                constant_parameters += [copy.deepcopy(pars)]*len(data)\n",
+    "                constantBP_versions += [None]*len(data)\n",
+    "\n",
+    "# Remove dublications\n",
+    "constant_versions_tmp = []\n",
+    "constant_parameters_tmp = []\n",
+    "constantBP_versions_tmp = []\n",
+    "for i, x in enumerate(constant_versions):\n",
+    "    if x not in constant_versions_tmp:\n",
+    "        constant_versions_tmp.append(x)\n",
+    "        constant_parameters_tmp.append(constant_parameters[i])\n",
+    "        if i<len(constantBP_versions):\n",
+    "            constantBP_versions_tmp.append(constantBP_versions[i])\n",
+    "constant_versions=constant_versions_tmp\n",
+    "constantBP_versions=constantBP_versions_tmp\n",
+    "constant_parameters=constant_parameters_tmp\n",
+    "\n",
+    "print('Number of stored constant versions is {}'.format(len(constant_versions)))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [],
+   "source": [
+    "def get_rebined(a, rebin):\n",
+    "    if dclass==\"jungfrau\":\n",
+    "        return a.reshape(\n",
+    "                int(a.shape[0] / rebin[0]),\n",
+    "                rebin[0],\n",
+    "                int(a.shape[1] / rebin[1]),\n",
+    "                rebin[1],\n",
+    "                a.shape[2],\n",
+    "                a.shape[3])\n",
+    "        \n",
+    "    else:\n",
+    "        return a[:,:,0].reshape(\n",
+    "                int(a.shape[0] / rebin[0]),\n",
+    "                rebin[0],\n",
+    "                int(a.shape[1] / rebin[1]),\n",
+    "                rebin[1])\n",
+    "        \n",
+    "\n",
+    "def modify_const(const, data, isBP = False):\n",
+    "    if dclass==\"jungfrau\" and data.shape[1] == 512:\n",
+    "        data = data.swapaxes(0, 1)\n",
+    "    return data\n",
+    "\n",
+    "ret_constants = {}\n",
+    "constant_data = ConstantMetaData()\n",
+    "constant_BP = ConstantMetaData()\n",
+    "\n",
+    "# sort over begin_at\n",
+    "idxs, _ = zip(*sorted(enumerate(constant_versions), \n",
+    "                     key=lambda x: x[1]['begin_at'], reverse=True))\n",
+    "\n",
+    "for i in idxs:\n",
+    "    const = constant_versions[i]['data_set_name'].split('/')[-2]\n",
+    "    qm = constant_versions[i]['physical_device']['name']\n",
+    "    # fix naming for Jungfrau039\n",
+    "    if qm == 'Jungfrau1':\n",
+    "        qm = 'JungfrauM039'\n",
+    "    \n",
+    "    for key in separate_plot:\n",
+    "        const = '{}_{}{}'.format(const, key[0], constant_parameters[i][key])\n",
+    "    # Constant for jungfrau already contains gain stages\n",
+    "    if dclass == \"jungfrau\":\n",
+    "        const += '_g0'\n",
+    "        \n",
+    "    if not const in ret_constants:\n",
+    "        ret_constants[const] = {}\n",
+    "    if not qm in ret_constants[const]:\n",
+    "            ret_constants[const][qm] = []\n",
+    "            \n",
+    "    if nconstants>0 and len(ret_constants[const][qm])>=nconstants:\n",
+    "        continue\n",
+    "        \n",
+    "    constant_data.retrieve_from_version_info(constant_versions[i])\n",
+    "    cdata = constant_data.calibration_constant.data\n",
+    "    ctime = constant_data.calibration_constant_version.begin_at\n",
+    "    cdata = modify_const(const, cdata)\n",
+    "    print(\"constant: {}, module {}, begin_at {}\".format(const, qm, ctime))\n",
+    "    \n",
+    "    if constantBP_versions[i]:\n",
+    "        constant_BP.retrieve_from_version_info(constantBP_versions[i])\n",
+    "        cdataBP = constant_BP.calibration_constant.data\n",
+    "        cdataBP = modify_const(const, cdataBP, True)\n",
+    "        \n",
+    "        if cdataBP.shape != cdata.shape:\n",
+    "            print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n",
+    "            continue\n",
+    "        \n",
+    "        # Apply bad pixel mask\n",
+    "        cdataABP = np.copy(cdata)\n",
+    "        cdataABP[cdataBP > 0] = np.nan\n",
+    "    \n",
+    "        # Create superpixels for constants with BP applied\n",
+    "        cdataABP = get_rebined(cdataABP, spShape)\n",
+    "        toStoreBP = np.nanmean(cdataABP, axis=(1, 3))\n",
+    "        toStoreBPStd = np.nanstd(cdataABP, axis=(1, 3))\n",
+    "\n",
+    "        # Prepare number of bad pixels per superpixels\n",
+    "        cdataBP = get_rebined(cdataBP, spShape)\n",
+    "        cdataNBP = np.nansum(cdataBP > 0, axis=(1, 3))\n",
+    "\n",
+    "    # Create superpixels for constants without BP applied\n",
+    "    cdata = get_rebined(cdata, spShape)\n",
+    "    toStoreStd = np.nanstd(cdata, axis=(1, 3))\n",
+    "    toStore = np.nanmean(cdata, axis=(1, 3))\n",
+    "    \n",
+    "    if not constantBP_versions[i]:\n",
+    "        toStoreBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n",
+    "        toStoreBPStd = np.full_like(toStore, IMType.NO_BPMAP.value)\n",
+    "        cdataNBP = np.full_like(toStore, IMType.NO_BPMAP.value)\n",
+    "    \n",
+    "    # Convert parameters to dict\n",
+    "    dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n",
+    "    \n",
+    "    if len(toStore.shape)==4:\n",
+    "        for i in range(3):\n",
+    "            if i>0:\n",
+    "                const = const.replace('_g{}'.format(i-1), '_g{}'.format(i))\n",
+    "                \n",
+    "            if not const in ret_constants:\n",
+    "                ret_constants[const] = {}\n",
+    "            if not qm in ret_constants[const]:\n",
+    "                ret_constants[const][qm] = []\n",
+    "            print(\"Store values in dict\", const, qm, ctime)\n",
+    "            ret_constants[const][qm].append({'ctime': ctime,\n",
+    "                                     'nBP': cdataNBP[:,:,0,i],\n",
+    "                                     'dataBP': toStoreBP[:,:,0,i],\n",
+    "                                     'dataBPStd': toStoreBPStd[:,:,0,i],\n",
+    "                                     'data': toStore[:,:,0,i],\n",
+    "                                     'dataStd': toStoreStd[:,:,0,i],\n",
+    "                                     'mdata': dpar}) \n",
+    "        \n",
+    "        \n",
+    "        \n",
+    "    else:\n",
+    "        print(\"Store values in dict\", const, qm, ctime)\n",
+    "        ret_constants[const][qm].append({'ctime': ctime,\n",
+    "                                     'nBP': cdataNBP,\n",
+    "                                     'dataBP': toStoreBP,\n",
+    "                                     'dataBPStd': toStoreBPStd,\n",
+    "                                     'data': toStore,\n",
+    "                                     'dataStd': toStoreStd,\n",
+    "                                     'mdata': dpar})  \n",
+    "    "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "if use_existing == \"\":\n",
+    "    print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, db_module))\n",
+    "    save_dict_to_hdf5(ret_constants,\n",
+    "                      '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if use_existing == \"\":\n",
+    "    fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n",
+    "else:\n",
+    "    fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n",
+    "\n",
+    "print('Load data from {}'.format(fpath))\n",
+    "ret_constants = load_data_from_hdf5(fpath)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Parameters for plotting\n",
+    "\n",
+    "keys = {\n",
+    "    'Mean': ['data', '', 'Mean over pixels'],\n",
+    "    'std': ['dataStd', '', '$\\sigma$ over pixels'],\n",
+    "    'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n",
+    "    'NBP': ['nBP', 'Fraction of BP', 'Number of BP'],\n",
+    "    'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n",
+    "}\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "scrolled": true
+   },
+   "outputs": [],
+   "source": [
+    "print('Plot calibration constants')\n",
+    "\n",
+    "# loop over constat type\n",
+    "for const, modules in ret_constants.items():\n",
+    "    \n",
+    "    const = const.split(\"_\")\n",
+    "    if True:\n",
+    "\n",
+    "        print('Const: {}'.format(const))\n",
+    "\n",
+    "        # summary over modules\n",
+    "        mod_data = {}\n",
+    "        mod_names = []\n",
+    "        mod_times = []\n",
+    "        \n",
+    "        # Loop over modules\n",
+    "        for mod, data in modules.items():\n",
+    "            print('Module: {}'.format(mod))\n",
+    "\n",
+    "            ctimes = np.array(data[\"ctime\"])\n",
+    "            ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n",
+    "\n",
+    "            if (\"mdata\" in data):\n",
+    "                cmdata = np.array(data[\"mdata\"])\n",
+    "                for i, tick in enumerate(ctimes_ticks):\n",
+    "                    for entr in x_labels:\n",
+    "                        ctimes_ticks[i] += ', {}={}'.format(entr[0].upper(), \n",
+    "                                                           cmdata[i].get(entr, None))\n",
+    "\n",
+    "            sort_ind = np.argsort(ctimes_ticks)\n",
+    "            ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n",
+    "\n",
+    "            # Create sorted by data dataset\n",
+    "            rdata = {}\n",
+    "            for key, item in keys.items():\n",
+    "                if item[0] in data:\n",
+    "                    rdata[key] = np.array(data[item[0]])[sort_ind]\n",
+    "\n",
+    "            nTimes = rdata['Mean'].shape[0]\n",
+    "            nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n",
+    "            nBins = nPixels\n",
+    "            \n",
+    "            # Avoid to low values\n",
+    "            if const[0] in [\"Noise10Hz\", \"Offset10Hz\"]:\n",
+    "                rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n",
+    "                if 'MeanBP' in rdata:\n",
+    "                    rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n",
+    "                if 'NBP' in rdata:\n",
+    "                    rdata['NBP'] = rdata['NBP'].astype(float)\n",
+    "                    rdata['NBP'][rdata['NBP'] == spShape[0]*spShape[1]] = np.nan\n",
+    "\n",
+    "            # Reshape: ASICs over cells for plotting\n",
+    "            pdata = {}\n",
+    "            for key in rdata:\n",
+    "                if len(rdata[key].shape)<3:\n",
+    "                    continue\n",
+    "                pdata[key] = rdata[key].reshape(nTimes, nBins).swapaxes(0, 1)\n",
+    "\n",
+    "            # Summary over ASICs\n",
+    "            adata = {}\n",
+    "            for key in rdata:\n",
+    "                if len(rdata[key].shape)<3:\n",
+    "                    continue\n",
+    "                adata[key] = np.nansum(rdata[key], axis=(1, 2))\n",
+    "\n",
+    "            # Summary information over modules\n",
+    "            for key in pdata:\n",
+    "                if key not in mod_data:\n",
+    "                    mod_data[key] = []\n",
+    "                if key == 'NBP':\n",
+    "                    mod_data[key].append(np.nansum(pdata[key], axis=0))\n",
+    "                else:\n",
+    "                    mod_data[key].append(np.nanmean(pdata[key], axis=0))\n",
+    "\n",
+    "            mod_names.append(mod)\n",
+    "            mod_times.append(ctimes[sort_ind])\n",
+    "            \n",
+    "            # Plotting\n",
+    "            for key in pdata:\n",
+    "                \n",
+    "                if len(pdata[key].shape)<2:\n",
+    "                    continue\n",
+    "                    \n",
+    "                vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n",
+    "                if key == 'NBP':\n",
+    "                    unit = '[%]'\n",
+    "                else:\n",
+    "                    unit = '[ADU]'\n",
+    "\n",
+    "                title = '{}, module {}, {}'.format(\n",
+    "                    const[0], mod,  keys[key][1])\n",
+    "                cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)\n",
+    "\n",
+    "                fname = '{}/{}_{}'.format(out_folder, const[0], mod.replace('_', ''))\n",
+    "                for item in const[1:]:\n",
+    "                    fname = '{}_{}'.format(fname, item)\n",
+    "                fname = '{}_ASIC_{}.png'.format(fname, key)\n",
+    "               \n",
+    "                hm_combine(pdata[key][::-1].astype(float), htype=HMType.mro,\n",
+    "                          x_label='Creation Time', y_label='ASIC ID',\n",
+    "                          x_ticklabels=ctimes_ticks,\n",
+    "                          x_ticks=np.arange(len(ctimes_ticks))+0.3,\n",
+    "                          title=title, cb_label=cb_label,\n",
+    "                          vmin=vmin, vmax=vmax,\n",
+    "                          fname=fname,\n",
+    "                          pad=[0.125, 0.125, 0.12, 0.185])\n",
+    "\n",
+    "                \n",
+    "        # Summary over modules\n",
+    "        for key in mod_data:\n",
+    "            \n",
+    "            if key == 'NBP':\n",
+    "                unit = ''\n",
+    "            else:\n",
+    "                unit = '[ADU]'\n",
+    "\n",
+    "            title = '{}, All modules, {}'.format(\n",
+    "                    const[0], keys[key][1])\n",
+    "            \n",
+    "            fname = '{}/{}_{}'.format(out_folder, const[0], 'all')\n",
+    "            for item in const[1:]:\n",
+    "                fname = '{}_{}'.format(fname, item)\n",
+    "            fname = '{}_ASIC_{}.png'.format(fname, key)\n",
+    "                \n",
+    "            fig = plt.figure(figsize=(12,12) )\n",
+    "            for i in range(len(mod_data[key])):\n",
+    "                plt.scatter(mod_times[i], mod_data[key][i], label=mod_names[i])\n",
+    "            plt.grid()\n",
+    "            plt.xlabel('Creation Time')\n",
+    "            plt.ylabel('{}, {} {}'.format(const[0], keys[key][2], unit))  \n",
+    "            plt.legend(loc='best guess')\n",
+    "            plt.title(title)\n",
+    "            fig.savefig(fname)\n"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.6.7"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb b/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb
index e1a45eb7228dd58394bcd425e0cc316b0b78129e..a071734e74d3b3b25ec13daaf605b21889ce312d 100644
--- a/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb
+++ b/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb
@@ -18,9 +18,9 @@
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\"  # The ipcluster profile to use\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_LPD/\"  # Output folder, required\n",
-    "use_existing = \"/gpfs/exfel/data/scratch/karnem/test_LPD/\" # Input folder\n",
-    "dclass = \"LPD\"  # Detector class\n",
+    "out_folder = \"/gpfs/exfel/data/scratch/karnem/PlotCalDB/MID/AGIPD1M2/\"  # Output folder, required\n",
+    "use_existing = \"/gpfs/exfel/data/scratch/karnem/PlotCalDB/MID/AGIPD1M2/\" # Input folder\n",
+    "dclass = \"AGIPD\"  # Detector class\n",
     "nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs\n",
     "range_offset = [4000., 5500, 6500, 8500] # plotting range for offset: high gain l, r, medium gain l, r \n",
     "range_noise = [2.5, 15, 7.5, 17.0] # plotting range for noise: high gain l, r, medium gain l, r \n",
@@ -30,7 +30,7 @@
     "range_slopesCI = [22.0, 27.0, -0.5, 1.5] # plotting range for slope CI: high gain l, r, medium gain l, r \n",
     "range_slopesFF = [0.8, 1.2, 0.6, 1.2] # plotting range for slope FF: high gain l, r, medium gain l, r \n",
     "plot_range = 3 # range for plotting in units of median absolute deviations\n",
-    "x_labels = ['Acquisition rate', 'Memory cells'] # parameters to be shown on X axis"
+    "x_labels = ['Sensor Bias Voltage', 'Memory cells'] # parameters to be shown on X axis"
    ]
   },
   {
@@ -43,11 +43,10 @@
    "source": [
     "import warnings\n",
     "warnings.filterwarnings('ignore')\n",
-    "\n",
     "import numpy as np\n",
     "\n",
     "from cal_tools.ana_tools import (load_data_from_hdf5, \n",
-    "                                 HMType, multi_union,\n",
+    "                                 HMType, IMType, multi_union,\n",
     "                                 hm_combine, get_range)"
    ]
   },
@@ -69,7 +68,7 @@
    },
    "outputs": [],
    "source": [
-    "print('Load data from {}/CalDBAna_{}_*.h5'.format(use_existing, dclass))\n",
+    "print('Load data from {}/CalDBAna_{}_Q1M2.h5'.format(use_existing, dclass))\n",
     "ret_constants = load_data_from_hdf5(\n",
     "    '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass))\n",
     "\n",
@@ -167,14 +166,7 @@
     "                for key in rdata:\n",
     "                    rdata[key] = rdata[key][..., gain]\n",
     "\n",
-    "            # Avoid to low values\n",
-    "            if const in [\"Noise\", \"Offset\", \"Noise-e\"]:\n",
-    "                rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n",
-    "                if 'MeanBP' in rdata:\n",
-    "                    rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n",
-    "\n",
     "            if 'NBP' in rdata:\n",
-    "                rdata[\"NBP\"][rdata[\"NBP\"] == 4096] = np.nan\n",
     "                rdata[\"NBP\"] = rdata[\"NBP\"] / (64 * 64) * 100\n",
     "\n",
     "            # Reshape: ASICs over cells for plotting\n",
@@ -187,7 +179,18 @@
     "            for key in pdata:\n",
     "                if key not in mod_data:\n",
     "                    mod_data[key] = []\n",
+    "                    \n",
     "                mod_data[key].append(np.nanmean(pdata[key], axis=0))\n",
+    "                # Avoid too low values\n",
+    "                if const in [\"Noise\", \"Offset\", \"Noise-e\"] and key in ['Mean', 'MeanBP']:\n",
+    "                    mod_data[key][-1][mod_data[key][-1] == 0.0] = IMType.STRANGE_VAL.value\n",
+    "                if key=='NBP':\n",
+    "                    if 'Mean' in mod_data:\n",
+    "                        mod_data['Mean'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n",
+    "                    if 'MeanBP' in mod_data:\n",
+    "                        mod_data['MeanBP'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n",
+    "                    mod_data[key][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n",
+    "                \n",
     "\n",
     "            mod_data['stdASIC'].append(np.nanstd(\n",
     "                np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=(1, 2)), axis=1))\n",
@@ -208,7 +211,7 @@
     "                    tlist.insert(t, time)\n",
     "                    for key in mod_data:\n",
     "                        mod_data[key][i] = np.insert(\n",
-    "                            mod_data[key][i], t, np.nan)\n",
+    "                            mod_data[key][i], t, IMType.NO_CONST.value )\n",
     "\n",
     "        # Plotting\n",
     "        nModules = len(mod_names)\n",
@@ -244,7 +247,9 @@
     "                      fname='{}/{}_all_g{}_{}.png'.format(\n",
     "                out_folder, const, gain, key),\n",
     "                vmin=vmin, vmax=vmax,\n",
-    "                pad=[0.125, 0.151, 0.12, 0.17], htype=htype)"
+    "                pad=[0.125, 0.151, 0.12, 0.17], htype=htype)\n",
+    "        #break\n",
+    "    #break"
    ]
   }
  ],
diff --git a/xfel_calibrate/notebooks.py b/xfel_calibrate/notebooks.py
index d9b14de843db5de4c04d84e4f11c187d9d8e44b7..6baf79bd43d40cd5cafe3bb4763779f4a892b073 100644
--- a/xfel_calibrate/notebooks.py
+++ b/xfel_calibrate/notebooks.py
@@ -107,6 +107,12 @@ notebooks = {
                                                "cluster cores": 32},
                                "extend parms": "extend_parms",
                                },
+                       "STATS_FROM_DB":   {
+                               "notebook": "notebooks/generic/PlotFromCalDB_NBC.ipynb",
+                               "concurrency": {"parameter": None,
+                                                "default concurrency": None,
+                                                "cluster cores": 1},
+                               },
                        },
             "TUTORIAL": {
                        "TEST": {