diff --git a/notebooks/LPD/LPD_Correct_Fast.ipynb b/notebooks/LPD/LPD_Correct_Fast.ipynb
index 184d6ce8689670217f7ca7e831804c14bceeea92..f702ac2741c3d2bea4020e2589613ab00838bc8b 100644
--- a/notebooks/LPD/LPD_Correct_Fast.ipynb
+++ b/notebooks/LPD/LPD_Correct_Fast.ipynb
@@ -40,7 +40,7 @@
     "creation_time = \"\"  # The timestamp to use with Calibration DB. Required Format: \"YYYY-MM-DD hh:mm:ss\" e.g. 2019-07-04 11:02:41\n",
     "cal_db_interface = ''  # Not needed, compatibility with current webservice.\n",
     "cal_db_timeout = 0  # Not needed, compatbility with current webservice.\n",
-    "cal_db_root = '/gpfs/exfel/d/cal/caldb_store'\n",
+    "cal_db_root = '/gpfs/exfel/d/cal/caldb_store'  # The calibration database root path to access constant files. For example accessing constants from the test database.\n",
     "\n",
     "# Operating conditions\n",
     "mem_cells = 512  # Memory cells, LPD constants are always taken with 512 cells.\n",
@@ -85,7 +85,6 @@
    },
    "outputs": [],
    "source": [
-    "from collections import OrderedDict\n",
     "from logging import warning\n",
     "from pathlib import Path\n",
     "from time import perf_counter\n",
@@ -100,19 +99,21 @@
     "import matplotlib.pyplot as plt\n",
     "%matplotlib inline\n",
     "\n",
-    "from calibration_client import CalibrationClient\n",
-    "from calibration_client.modules import CalibrationConstantVersion\n",
     "import extra_data as xd\n",
     "import extra_geom as xg\n",
     "import pasha as psh\n",
-    "\n",
     "from extra_data.components import LPD1M\n",
     "\n",
+    "import cal_tools.restful_config as rest_cfg\n",
+    "from cal_tools.calcat_interface import CalCatError, LPD_CalibrationData\n",
     "from cal_tools.lpdalgs import correct_lpd_frames\n",
     "from cal_tools.lpdlib import get_mem_cell_pattern, make_cell_order_condition\n",
-    "from cal_tools.tools import CalibrationMetadata, calcat_creation_time\n",
-    "from cal_tools.files import DataFile\n",
-    "from cal_tools.restful_config import restful_config"
+    "from cal_tools.tools import (\n",
+    "    CalibrationMetadata,\n",
+    "    calcat_creation_time,\n",
+    "    write_constants_fragment,\n",
+    ")\n",
+    "from cal_tools.files import DataFile"
    ]
   },
   {
@@ -136,19 +137,16 @@
     "\n",
     "output_source = output_source or input_source\n",
     "\n",
-    "cal_db_root = Path(cal_db_root)\n",
-    "\n",
-    "metadata = CalibrationMetadata(metadata_folder or out_folder)\n",
-    "\n",
     "creation_time = calcat_creation_time(in_folder, run, creation_time)\n",
     "print(f'Using {creation_time.isoformat()} as creation time')\n",
     "\n",
     "# Pick all modules/aggregators or those selected.\n",
-    "if not karabo_da or karabo_da == ['']:\n",
-    "    if not modules or modules == [-1]:\n",
+    "if karabo_da == ['']:\n",
+    "    if modules == [-1]:\n",
     "        modules = list(range(16))\n",
-    "\n",
     "    karabo_da = [f'LPD{i:02d}' for i in modules]\n",
+    "else:\n",
+    "    modules = [int(x[-2:]) for x in karabo_da]\n",
     "    \n",
     "# Pick all sequences or those selected.\n",
     "if not sequences or sequences == [-1]:\n",
@@ -239,21 +237,46 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {},
+   "metadata": {
+    "tags": []
+   },
    "outputs": [],
    "source": [
-    "# Connect to CalCat.\n",
-    "calcat_config = restful_config['calcat']\n",
-    "client = CalibrationClient(\n",
-    "    base_api_url=calcat_config['base-api-url'],\n",
-    "    use_oauth2=calcat_config['use-oauth2'],\n",
-    "    client_id=calcat_config['user-id'],\n",
-    "    client_secret=calcat_config['user-secret'],\n",
-    "    user_email=calcat_config['user-email'],\n",
-    "    token_url=calcat_config['token-url'],\n",
-    "    refresh_url=calcat_config['refresh-url'],\n",
-    "    auth_url=calcat_config['auth-url'],\n",
-    "    scope='')"
+    "start = perf_counter()\n",
+    "\n",
+    "cell_ids_pattern_s = None\n",
+    "if use_cell_order != 'never':\n",
+    "    # Read the order of memory cells used\n",
+    "    raw_data = xd.DataCollection.from_paths([e[1] for e in data_to_process])\n",
+    "    cell_ids_pattern_s = make_cell_order_condition(\n",
+    "        use_cell_order, get_mem_cell_pattern(raw_data, det_inp_sources)\n",
+    "    )\n",
+    "print(\"Memory cells order:\", cell_ids_pattern_s)\n",
+    "\n",
+    "lpd_cal = LPD_CalibrationData(\n",
+    "    detector_name=karabo_id,\n",
+    "    modules=karabo_da,\n",
+    "    sensor_bias_voltage=bias_voltage,\n",
+    "    memory_cells=mem_cells,\n",
+    "    feedback_capacitor=capacitor,\n",
+    "    source_energy=photon_energy,\n",
+    "    memory_cell_order=cell_ids_pattern_s,\n",
+    "    category=category,\n",
+    "    event_at=creation_time,\n",
+    "    client=rest_cfg.calibration_client(),\n",
+    "    caldb_root=Path(cal_db_root),\n",
+    ")\n",
+    "\n",
+    "lpd_metadata = lpd_cal.metadata([\"Offset\", \"BadPixelsDark\"])\n",
+    "try:\n",
+    "    illum_metadata = lpd_cal.metadata(lpd_cal.illuminated_calibrations)\n",
+    "    for key, value in illum_metadata.items():\n",
+    "        lpd_metadata.setdefault(key, {}).update(value)\n",
+    "except CalCatError as e:  # TODO: replace when API errors are improved.\n",
+    "    warning(f\"CalCatError: {e}\")\n",
+    "\n",
+    "total_time = perf_counter() - start\n",
+    "print(f'Looking up constants {total_time:.1f}s')"
    ]
   },
   {
@@ -262,9 +285,22 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "metadata = CalibrationMetadata(metadata_folder or out_folder)\n",
-    "# Constant paths & timestamps are saved under retrieved-constants in calibration_metadata.yml\n",
-    "const_yaml = metadata.setdefault(\"retrieved-constants\", {})"
+    "# Validate the constants availability and raise/warn accordingly.\n",
+    "for mod, calibrations in lpd_metadata.items():\n",
+    "    missing_offset = {\"Offset\"} - set(calibrations)\n",
+    "    warn_missing_constants = {\n",
+    "        \"BadPixelsDark\", \"BadPixelsFF\", \"GainAmpMap\",\n",
+    "        \"FFMap\", \"RelativeGain\"} - set(calibrations)\n",
+    "    if missing_offset:\n",
+    "        warning(f\"Offset constant is not available to correct {mod}.\")\n",
+    "        karabo_da.remove(mod)\n",
+    "    if warn_missing_constants:\n",
+    "        warning(f\"Constants {warn_missing_constants} were not retrieved for {mod}.\")\n",
+    "if not karabo_da:  # Offsets are missing for all modules.\n",
+    "    raise Exception(\"Could not find offset constants for any modules, will not correct data.\")\n",
+    "\n",
+    "# Remove skipped correction modules from data_to_process\n",
+    "data_to_process = [(mod, in_f, out_f) for mod, in_f, out_f in data_to_process if mod in karabo_da]"
    ]
   },
   {
@@ -273,119 +309,15 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "const_data = {}\n",
-    "const_load_mp = psh.ProcessContext(num_workers=24)\n",
-    "\n",
-    "if const_yaml:  # Read constants from YAML file.\n",
-    "    start = perf_counter()\n",
-    "    for da, ccvs in const_yaml.items():\n",
-    "\n",
-    "        for calibration_name, ccv in ccvs['constants'].items():\n",
-    "            if ccv['file-path'] is None:\n",
-    "                warning(f\"Missing {calibration_name} for {da}\")\n",
-    "                continue\n",
-    "\n",
-    "            dtype = np.uint32 if calibration_name.startswith('BadPixels') else np.float32\n",
-    "\n",
-    "            const_data[(da, calibration_name)] = dict(\n",
-    "                path=Path(ccv['file-path']),\n",
-    "                dataset=ccv['dataset-name'],\n",
-    "                data=const_load_mp.alloc(shape=(256, 256, mem_cells, 3), dtype=dtype)\n",
-    "            )\n",
-    "else:  # Retrieve constants from CALCAT.\n",
-    "    dark_calibrations = {\n",
-    "        1: 'Offset',  # np.float32\n",
-    "        14: 'BadPixelsDark'  # should be np.uint32, but is np.float64\n",
-    "    }\n",
-    "\n",
-    "    base_condition = [\n",
-    "        dict(parameter_name='Sensor Bias Voltage', value=bias_voltage),\n",
-    "        dict(parameter_name='Memory cells', value=mem_cells),\n",
-    "        dict(parameter_name='Feedback capacitor', value=capacitor),\n",
-    "        dict(parameter_name='Pixels X', value=256),\n",
-    "        dict(parameter_name='Pixels Y', value=256),\n",
-    "    ]\n",
-    "    cell_ids_pattern_s = None\n",
-    "    if use_cell_order != 'never':\n",
-    "        # Read the order of memory cells used\n",
-    "        raw_data = xd.DataCollection.from_paths([e[1] for e in data_to_process])\n",
-    "        cell_ids_pattern_s = make_cell_order_condition(\n",
-    "            use_cell_order, get_mem_cell_pattern(raw_data, det_inp_sources)\n",
-    "        )\n",
-    "        print(\"Memory cells order:\", cell_ids_pattern_s)\n",
-    "\n",
-    "    if cell_ids_pattern_s is not None:\n",
-    "        dark_condition = base_condition + [\n",
-    "            dict(parameter_name='Memory cell order', value=cell_ids_pattern_s),\n",
-    "        ]\n",
-    "    else:\n",
-    "        dark_condition = base_condition.copy()\n",
-    "\n",
-    "    illuminated_calibrations = {\n",
-    "        20: 'BadPixelsFF',  # np.uint32\n",
-    "        42: 'GainAmpMap',  # np.float32\n",
-    "        43: 'FFMap',  # np.float32\n",
-    "        44: 'RelativeGain'  # np.float32\n",
-    "    }\n",
-    "\n",
-    "    illuminated_condition = base_condition + [\n",
-    "        dict(parameter_name='Source Energy', value=photon_energy),\n",
-    "        dict(parameter_name='category', value=category)\n",
-    "    ]\n",
-    "\n",
-    "    print('Querying calibration database', end='', flush=True)\n",
-    "    start = perf_counter()\n",
-    "    for calibrations, condition in [\n",
-    "        (dark_calibrations, dark_condition),\n",
-    "        (illuminated_calibrations, illuminated_condition)\n",
-    "    ]:\n",
-    "        resp = CalibrationConstantVersion.get_closest_by_time_by_detector_conditions(\n",
-    "            client, karabo_id, list(calibrations.keys()),\n",
-    "            {'parameters_conditions_attributes': condition},\n",
-    "            karabo_da='', event_at=creation_time.isoformat()\n",
-    "        )\n",
-    "\n",
-    "        if not resp['success']:\n",
-    "            raise RuntimeError(resp)\n",
-    "\n",
-    "        for ccv in resp['data']:\n",
-    "            cc = ccv['calibration_constant']\n",
-    "            da = ccv['physical_detector_unit']['karabo_da']\n",
-    "            calibration_name = calibrations[cc['calibration_id']]\n",
-    "            \n",
-    "            dtype = np.uint32 if calibration_name.startswith('BadPixels') else np.float32\n",
-    "            \n",
-    "            const_data[(da, calibration_name)] = dict(\n",
-    "                path=Path(ccv['path_to_file']) / ccv['file_name'],\n",
-    "                dataset=ccv['data_set_name'],\n",
-    "                data=const_load_mp.alloc(shape=(256, 256, mem_cells, 3), dtype=dtype)\n",
-    "            )\n",
-    "        print('.', end='', flush=True)\n",
-    "            \n",
-    "total_time = perf_counter() - start\n",
-    "print(f'{total_time:.1f}s')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def load_constant_dataset(wid, index, const_descr):\n",
-    "    ccv_entry = const_data[const_descr]\n",
-    "    \n",
-    "    with h5py.File(cal_db_root / ccv_entry['path'], 'r') as fp:\n",
-    "        fp[ccv_entry['dataset'] + '/data'].read_direct(ccv_entry['data'])\n",
-    "        \n",
-    "    print('.', end='', flush=True)\n",
-    "\n",
-    "print('Loading calibration data', end='', flush=True)\n",
-    "start = perf_counter()\n",
-    "const_load_mp.map(load_constant_dataset, list(const_data.keys()))\n",
-    "total_time = perf_counter() - start\n",
-    "\n",
-    "print(f'{total_time:.1f}s')"
+    "# write constants metadata to fragment YAML\n",
+    "write_constants_fragment(\n",
+    "    out_folder=(metadata_folder or out_folder),\n",
+    "    det_metadata=lpd_metadata,\n",
+    "    caldb_root=lpd_cal.caldb_root,\n",
+    ")\n",
+    "\n",
+    "# Load constants data for all constants\n",
+    "const_data = lpd_cal.ndarray_map(metadata=lpd_metadata)"
    ]
   },
   {
@@ -411,12 +343,14 @@
     "}\n",
     "\n",
     "def prepare_constants(wid, index, aggregator):\n",
-    "    consts = {calibration_name: entry['data']\n",
-    "              for (aggregator_, calibration_name), entry\n",
-    "              in const_data.items()\n",
-    "              if aggregator == aggregator_}\n",
-    "    \n",
+    "    consts = const_data.get(aggregator, {})\n",
     "    def _prepare_data(calibration_name, dtype):\n",
+    "        # Some old BadPixels constants have <f8 dtype.\n",
+    "        # Convert nan to float 0 to avoid having 2147483648 after\n",
+    "        # converting float64 to uint32.\n",
+    "        if \"BadPixels\" in calibration_name and consts[calibration_name].dtype != np.uint32:\n",
+    "            consts[calibration_name] = np.nan_to_num(\n",
+    "                consts[calibration_name], nan=0.0)\n",
     "        return consts[calibration_name] \\\n",
     "            .transpose(constant_order[calibration_name]) \\\n",
     "            .astype(dtype, copy=True)  # Make sure array is contiguous.\n",
diff --git a/notebooks/LPD/LPD_retrieve_constants_precorrection.ipynb b/notebooks/LPD/LPD_retrieve_constants_precorrection.ipynb
deleted file mode 100644
index 419a9a43f18cfbb5c1ded27e4adcba718cce0e39..0000000000000000000000000000000000000000
--- a/notebooks/LPD/LPD_retrieve_constants_precorrection.ipynb
+++ /dev/null
@@ -1,243 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# LPD Retrieving Constants Pre-correction #\n",
-    "\n",
-    "Author: European XFEL Detector Group, Version: 1.0\n",
-    "\n",
-    "The following notebook provides a constants metadata in a YAML file to use while correcting LPD images."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Input parameters\n",
-    "in_folder = \"/gpfs/exfel/exp/FXE/202201/p003073/raw/\"  # the folder to read data from, required\n",
-    "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/remove/LPD_test\"  # the folder to output to, required\n",
-    "metadata_folder = ''  # Directory containing calibration_metadata.yml when run by xfel-calibrate.\n",
-    "modules = [-1]  # Modules indices to correct, use [-1] for all, only used when karabo_da is empty\n",
-    "karabo_da = ['']  # Data aggregators names to correct, use [''] for all\n",
-    "run = 10  # run to process, required\n",
-    "\n",
-    "# Source parameters\n",
-    "karabo_id = 'FXE_DET_LPD1M-1'  # Karabo domain for detector.\n",
-    "input_source = '{karabo_id}/DET/{module_index}CH0:xtdf'  # Input fast data source.\n",
-    "\n",
-    "# CalCat parameters\n",
-    "creation_time = \"\"  # The timestamp to use with Calibration DB. Required Format: \"YYYY-MM-DD hh:mm:ss\" e.g. 2019-07-04 11:02:41\n",
-    "\n",
-    "# Operating conditions\n",
-    "mem_cells = 512  # Memory cells, LPD constants are always taken with 512 cells.\n",
-    "bias_voltage = 250.0  # Detector bias voltage.\n",
-    "capacitor = '5pF'  # Capacitor setting: 5pF or 50pF\n",
-    "photon_energy = 9.2  # Photon energy in keV.\n",
-    "category = 0  # Whom to blame.\n",
-    "use_cell_order = 'auto'  # Whether to use memory cell order as a detector condition (not stored for older constants)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from pathlib import Path\n",
-    "from time import perf_counter\n",
-    "\n",
-    "import numpy as np\n",
-    "\n",
-    "from calibration_client import CalibrationClient\n",
-    "from calibration_client.modules import CalibrationConstantVersion\n",
-    "import extra_data as xd\n",
-    "\n",
-    "from cal_tools.lpdlib import get_mem_cell_pattern, make_cell_order_condition\n",
-    "from cal_tools.tools import (\n",
-    "    CalibrationMetadata,\n",
-    "    calcat_creation_time,\n",
-    "    save_constant_metadata,\n",
-    ")\n",
-    "from cal_tools.restful_config import restful_config"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "out_folder = Path(out_folder)\n",
-    "out_folder.mkdir(exist_ok=True)\n",
-    "\n",
-    "metadata = CalibrationMetadata(metadata_folder or out_folder)\n",
-    "# Constant paths & timestamps are saved under retrieved-constants in calibration_metadata.yml\n",
-    "retrieved_constants = metadata.setdefault(\"retrieved-constants\", {})\n",
-    "\n",
-    "creation_time = calcat_creation_time(in_folder, run, creation_time)\n",
-    "print(f'Using {creation_time.isoformat()} as creation time')\n",
-    "\n",
-    "# Pick all modules/aggregators or those selected.\n",
-    "if not karabo_da or karabo_da == ['']:\n",
-    "    if not modules or modules == [-1]:\n",
-    "        modules = list(range(16))\n",
-    "\n",
-    "    karabo_da = [f'LPD{i:02d}' for i in modules]\n",
-    "\n",
-    "# List of detector sources.\n",
-    "det_inp_sources = [input_source.format(karabo_id=karabo_id, module_index=int(da[-2:])) for da in karabo_da]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Connect to CalCat.\n",
-    "calcat_config = restful_config['calcat']\n",
-    "client = CalibrationClient(\n",
-    "    base_api_url=calcat_config['base-api-url'],\n",
-    "    use_oauth2=calcat_config['use-oauth2'],\n",
-    "    client_id=calcat_config['user-id'],\n",
-    "    client_secret=calcat_config['user-secret'],\n",
-    "    user_email=calcat_config['user-email'],\n",
-    "    token_url=calcat_config['token-url'],\n",
-    "    refresh_url=calcat_config['refresh-url'],\n",
-    "    auth_url=calcat_config['auth-url'],\n",
-    "    scope='')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "dark_calibrations = {\n",
-    "    1: 'Offset',\n",
-    "    14: 'BadPixelsDark',\n",
-    "}\n",
-    "\n",
-    "base_condition = [\n",
-    "    dict(parameter_name='Sensor Bias Voltage', value=bias_voltage),\n",
-    "    dict(parameter_name='Memory cells', value=mem_cells),\n",
-    "    dict(parameter_name='Feedback capacitor', value=capacitor),\n",
-    "    dict(parameter_name='Pixels X', value=256),\n",
-    "    dict(parameter_name='Pixels Y', value=256),\n",
-    "]\n",
-    "cell_ids_pattern_s = None\n",
-    "if use_cell_order != 'never':\n",
-    "    # Read the order of memory cells used\n",
-    "    raw_data = xd.RunDirectory(Path(in_folder, f'r{run:04d}'))\n",
-    "    cell_ids_pattern_s = make_cell_order_condition(\n",
-    "        use_cell_order, get_mem_cell_pattern(raw_data, det_inp_sources)\n",
-    "    )\n",
-    "    print(\"Memory cells order:\", cell_ids_pattern_s)\n",
-    "\n",
-    "if cell_ids_pattern_s is not None:\n",
-    "    dark_condition = base_condition + [\n",
-    "        dict(parameter_name='Memory cell order', value=cell_ids_pattern_s),\n",
-    "    ]\n",
-    "else:\n",
-    "    dark_condition = base_condition.copy()\n",
-    "\n",
-    "illuminated_calibrations = {\n",
-    "    20: 'BadPixelsFF',\n",
-    "    42: 'GainAmpMap',\n",
-    "    43: 'FFMap',\n",
-    "    44: 'RelativeGain',\n",
-    "}\n",
-    "\n",
-    "illuminated_condition = base_condition + [\n",
-    "    dict(parameter_name='Source Energy', value=photon_energy),\n",
-    "    dict(parameter_name='category', value=category)\n",
-    "]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "const_data = {}\n",
-    "\n",
-    "print('Querying calibration database', end='', flush=True)\n",
-    "start = perf_counter()\n",
-    "for k_da in karabo_da:\n",
-    "    pdu = None\n",
-    "    retrieved_constants[k_da] = dict()\n",
-    "    const_mdata = retrieved_constants[k_da][\"constants\"] = dict()\n",
-    "    for calibrations, condition in [\n",
-    "        (dark_calibrations, dark_condition),\n",
-    "        (illuminated_calibrations, illuminated_condition)\n",
-    "    ]:\n",
-    "        resp = CalibrationConstantVersion.get_closest_by_time_by_detector_conditions(\n",
-    "            client, karabo_id, list(calibrations.keys()),\n",
-    "            {'parameters_conditions_attributes': condition},\n",
-    "            karabo_da=k_da, event_at=creation_time.isoformat())\n",
-    "\n",
-    "        if not resp[\"success\"]:\n",
-    "            print(f\"ERROR: Constants {list(calibrations.values())} \"\n",
-    "            f\"were not retrieved, {resp['app_info']}\")\n",
-    "            for cname in calibrations.values():\n",
-    "                if cname == 'Offset':\n",
-    "                    raise Exception(\"Could not find offset constant, will not correct data\")\n",
-    "                const_mdata[cname] = dict()\n",
-    "                const_mdata[cname][\"file-path\"] = None\n",
-    "                const_mdata[cname][\"dataset-name\"] = None\n",
-    "                const_mdata[cname][\"creation-time\"] = None     \n",
-    "            continue\n",
-    "\n",
-    "        for ccv in resp[\"data\"]:\n",
-    "            cc = ccv['calibration_constant']\n",
-    "            cname = calibrations[cc['calibration_id']]\n",
-    "            const_mdata[cname] = dict()\n",
-    "            const_mdata[cname][\"file-path\"] = str(Path(ccv['path_to_file']) / ccv['file_name'])\n",
-    "            const_mdata[cname][\"dataset-name\"] = ccv['data_set_name']\n",
-    "            const_mdata[cname][\"creation-time\"] = ccv['begin_at']\n",
-    "            pdu = ccv['physical_detector_unit']['physical_name']\n",
-    "\n",
-    "        print('.', end='', flush=True)\n",
-    "    retrieved_constants[k_da][\"physical-detector-unit\"] = pdu\n",
-    "metadata.save()\n",
-    "\n",
-    "total_time = perf_counter() - start\n",
-    "print(f'{total_time:.1f}s')\n",
-    "print(f\"Stored retrieved constants in {metadata.filename}\")"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3.8.11 ('.cal4_venv')",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.11"
-  },
-  "orig_nbformat": 4,
-  "vscode": {
-   "interpreter": {
-    "hash": "ccde353e8822f411c1c49844e1cbe3edf63293a69efd975d1b44f5e852832668"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
index 8a2e065f8eb02892ceff593bb548d736ca74cedb..4b22d84b093aaeb25c778a6babe8da414a45167b 100644
--- a/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Characterize_Darks_ePix100_NBC.ipynb
@@ -53,7 +53,8 @@
     "fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n",
     "fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n",
     "temp_limits = 5 # Limit for parameter Operational temperature\n",
-    "badpixel_threshold_sigma = 5.  # Bad pixels defined by values outside n times this std from median. Default: 5\n",
+    "badpixel_noise_sigma = 5  # Bad pixels defined by noise value outside n * std from median. Default: 5\n",
+    "badpixel_offset_sigma = 2  # Bad pixels defined by offset value outside n * std from median. Default: 2\n",
     "CM_N_iterations = 2  # Number of iterations for common mode correction. Set to 0 to skip it\n",
     "\n",
     "# Parameters used during selecting raw data trains.\n",
@@ -380,15 +381,15 @@
     "    lut_label='[ADU]',\n",
     "    x_label='Column', \n",
     "    y_label='Row',\n",
-    "    vmin=max(0, np.round((stats['median'] - badpixel_threshold_sigma*stats['std']))), \n",
-    "    vmax=np.round(stats['median'] + badpixel_threshold_sigma*stats['std'])\n",
+    "    vmin=max(0, np.round((stats['median'] - badpixel_noise_sigma*stats['std']))), \n",
+    "    vmax=np.round(stats['median'] + badpixel_noise_sigma*stats['std'])\n",
     ")\n",
     "fig.suptitle('Noise Map', x=.5, y=.9, fontsize=16)\n",
     "fig.set_size_inches(h=15, w=15)\n",
     "\n",
     "# Calculate overall noise histogram\n",
-    "bins = np.arange(max(0, stats['mean'] - badpixel_threshold_sigma*stats['std']),\n",
-    "                 stats['mean'] + badpixel_threshold_sigma*stats['std'], \n",
+    "bins = np.arange(max(0, stats['mean'] - badpixel_noise_sigma*stats['std']),\n",
+    "                 stats['mean'] + badpixel_noise_sigma*stats['std'], \n",
     "                 stats['std']/100)\n",
     "\n",
     "h, c = np.histogram(\n",
@@ -436,8 +437,8 @@
     "    aspect=1.5,\n",
     "    x_label='Noise [ADU]',\n",
     "    y_label='Counts',\n",
-    "    x_range=(max(0, stats['median'] - badpixel_threshold_sigma*stats['std']),\n",
-    "             stats['median'] + badpixel_threshold_sigma*stats['std']),\n",
+    "    x_range=(max(0, stats['median'] - badpixel_noise_sigma*stats['std']),\n",
+    "             stats['median'] + badpixel_noise_sigma*stats['std']),\n",
     "    y_range=(0, max(d[0]['y'])*1.1),\n",
     ")\n",
     "plt.grid(linestyle = ':')\n",
@@ -592,11 +593,11 @@
     "constant_maps['BadPixelsDark'] = np.zeros(constant_maps['Offset'].shape, np.uint32)\n",
     "\n",
     "# Find noise related bad pixels\n",
-    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_noise_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "# Find offset related bad pixels\n",
-    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
+    "constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_offset_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
     "constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
     "\n",
     "# Plot Bad Pixels Map\n",
@@ -641,7 +642,7 @@
     "    print('Common mode correction not applied.')\n",
     "else:\n",
     "    \n",
-    "    commonModeBlockSize = sensor_size//2\n",
+    "    commonModeBlockSize = (sensor_size//[8,2]).astype(int) # bank size (x=96,y=354) pixels\n",
     "\n",
     "    # Instantiate common mode calculators for column and row CM correction\n",
     "    cmCorrection_col = xcal.CommonModeCorrection(\n",
@@ -693,7 +694,7 @@
     "        noise_map_corrected = np.nanstd(data, axis=0)[..., np.newaxis]\n",
     "\n",
     "        # Update bad pixels map \n",
-    "        constant_maps['BadPixelsDark'][eval_bpidx(noise_map_corrected, badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
+    "        constant_maps['BadPixelsDark'][eval_bpidx(noise_map_corrected, badpixel_noise_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
     "        bp_offset.append(np.sum(constant_maps['BadPixelsDark']==1))\n",
     "        bp_noise.append(np.sum(constant_maps['BadPixelsDark']==2))\n",
     "\n",
diff --git a/src/cal_tools/calcat_interface.py b/src/cal_tools/calcat_interface.py
index 1ad313f0a97f51ed2fc9f2b4e8eacb36a7d96598..bca8e5a3f08f75050537aac0d89c8e165bf373c4 100644
--- a/src/cal_tools/calcat_interface.py
+++ b/src/cal_tools/calcat_interface.py
@@ -1001,8 +1001,18 @@ class LPD_CalibrationData(SplitConditionCalibrationData):
         "Pixels X",
         "Pixels Y",
         "Feedback capacitor",
+        "Memory cell order",
+    ]
+
+    illuminated_parameters = [
+        "Sensor Bias Voltage",
+        "Memory cells",
+        "Pixels X",
+        "Pixels Y",
+        "Feedback capacitor",
+        "Source Energy",
+        "category"
     ]
-    illuminated_parameters = dark_parameters + ["Source Energy", "category"]
 
     def __init__(
         self,
@@ -1013,6 +1023,7 @@ class LPD_CalibrationData(SplitConditionCalibrationData):
         pixels_x=256,
         pixels_y=256,
         source_energy=9.2,
+        memory_cell_order=None,
         category=1,
         modules=None,
         client=None,
@@ -1034,6 +1045,7 @@ class LPD_CalibrationData(SplitConditionCalibrationData):
         self.pixels_x = pixels_x
         self.pixels_y = pixels_y
         self.feedback_capacitor = feedback_capacitor
+        self.memory_cell_order = memory_cell_order
         self.source_energy = source_energy
         self.category = category
 
diff --git a/src/xfel_calibrate/notebooks.py b/src/xfel_calibrate/notebooks.py
index a35bdaacf97d371abdf06ed5fbbe3d8f4f21a3da..b505e7530d4daf3fe438be191fc0e1743dfca0b9 100644
--- a/src/xfel_calibrate/notebooks.py
+++ b/src/xfel_calibrate/notebooks.py
@@ -76,8 +76,6 @@ notebooks = {
                             "cluster cores": 8},
         },
         "CORRECT": {
-            "pre_notebooks": [
-                "notebooks/LPD/LPD_retrieve_constants_precorrection.ipynb"],
             "notebook": "notebooks/LPD/LPD_Correct_Fast.ipynb",
             "concurrency": {"parameter": "sequences",
                             "default concurrency": [-1],
diff --git a/webservice/job_monitor.py b/webservice/job_monitor.py
index f86a6c7711eb7f82f9a2ea8b24611e00affb2ac3..b447488772cd090cd63fc346da6969ba24759f2d 100644
--- a/webservice/job_monitor.py
+++ b/webservice/job_monitor.py
@@ -23,6 +23,11 @@ except ImportError:
 
 log = logging.getLogger(__name__)
 
+STATES_FINISHED = {  # https://slurm.schedmd.com/squeue.html#lbAG
+    'BOOT_FAIL',  'CANCELLED', 'COMPLETED',  'DEADLINE', 'FAILED',
+    'OUT_OF_MEMORY', 'SPECIAL_EXIT', 'TIMEOUT',
+}
+
 
 class NoOpProducer:
     """Fills in for Kafka producer object when setting that up fails"""
@@ -50,10 +55,10 @@ def slurm_status(filter_user=True):
     :return: a dictionary indexed by slurm jobid and containing a tuple
              of (status, run time) as values.
     """
-    cmd = ["squeue"]
+    cmd = ["squeue", "--states=all"]
     if filter_user:
         cmd += ["--me"]
-    res = run(cmd, stdout=PIPE)
+    res = run(cmd, stdout=PIPE, stderr=PIPE)
     if res.returncode == 0:
         rlines = res.stdout.decode().split("\n")
         statii = {}
@@ -65,6 +70,10 @@ def slurm_status(filter_user=True):
             except ValueError:  # not enough values to unpack in split
                 pass
         return statii
+    else:
+        log.error("Running squeue failed. stdout: %r, stderr: %r",
+                  res.stdout.decode(), res.stderr.decode())
+        return None
 
 
 def slurm_job_status(jobid):
@@ -148,15 +157,19 @@ class JobsMonitor:
 
         Newly completed executions are present with an empty list.
         """
+        jobs_to_check = self.job_db.execute(
+            "SELECT job_id, exec_id FROM slurm_jobs WHERE finished = 0"
+        ).fetchall()
+        if not jobs_to_check:
+            log.debug("No unfinished jobs to check")
+            return {}
+
         statii = slurm_status()
         # Check that slurm is giving proper feedback
         if statii is None:
             return {}
         log.debug(f"SLURM info {statii}")
 
-        jobs_to_check = self.job_db.execute(
-            "SELECT job_id, exec_id FROM slurm_jobs WHERE finished = 0"
-        ).fetchall()
         ongoing_jobs_by_exn = {}
         updates = []
         for r in jobs_to_check:
@@ -166,13 +179,13 @@ class JobsMonitor:
             if str(r['job_id']) in statii:
                 # statii contains jobs which are still going (from squeue)
                 slstatus, runtime = statii[str(r['job_id'])]
-                finished = False
                 execn_ongoing_jobs.append(f"{slstatus}-{runtime}")
 
             else:
                 # These jobs have finished (successfully or otherwise)
                 _, runtime, slstatus = slurm_job_status(r['job_id'])
-                finished = True
+
+            finished = slstatus in STATES_FINISHED
 
             updates.append((finished, runtime, slstatus, r['job_id']))