diff --git a/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb
index 14358ef6fd28396300539ce3e8a2d4a26a82bc28..0fd3cc3dbfab40289ad5c1765bbc36bb86cd117e 100644
--- a/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb
+++ b/notebooks/Jungfrau/Jungfrau_dark_analysis_all_gains_burst_mode_NBC.ipynb
@@ -33,7 +33,7 @@
     "ctrl_source_template = '{}/DET/CONTROL'  # template for control source name (filled with karabo_id_control)\n",
     "\n",
     "# Parameters for calibration database and storing constants.\n",
-    "cal_db_interface = 'tcp://max-exfl-cal001:8016#8045'  # calibrate db interface to connect to\n",
+    "cal_db_interface = ''  # calibrate db interface to connect to  # KEEP FOR THE WEBSERVICE\n",
     "cal_db_timeout = 300000 # timeout on caldb requests\n",
     "local_output = True  # output constants locally\n",
     "db_output = False  # output constants to database\n",
@@ -72,10 +72,9 @@
    "outputs": [],
    "source": [
     "import os\n",
-    "import warnings\n",
     "from datetime import timedelta\n",
     "from logging import warning\n",
-    "warnings.filterwarnings('ignore')\n",
+    "from tempfile import NamedTemporaryFile\n",
     "\n",
     "import matplotlib\n",
     "import matplotlib.pyplot as plt\n",
@@ -85,8 +84,6 @@
     "from IPython.display import Markdown, display\n",
     "from extra_data import RunDirectory\n",
     "\n",
-    "matplotlib.use('agg')\n",
-    "%matplotlib inline\n",
     "\n",
     "from XFELDetAna.plotting.heatmap import heatmapPlot\n",
     "from XFELDetAna.plotting.histogram import histPlot\n",
@@ -95,17 +92,20 @@
     "    CalibrationData,\n",
     "    JUNGFRAUConditions,\n",
     ")\n",
-    "from cal_tools.jungfrau import jungfraulib\n",
+    "from cal_tools.constants import (\n",
+    "    CCVAlreadyInjectedError,\n",
+    "    inject_ccv,\n",
+    "    write_ccv,\n",
+    ")\n",
     "from cal_tools.enums import BadPixels, JungfrauGainMode\n",
-    "from cal_tools.tools import (\n",
-    "    calcat_creation_time,\n",
-    "    get_pdu_from_db,\n",
-    "    get_random_db_interface,\n",
-    "    get_report,\n",
-    "    save_const_to_h5,\n",
-    "    send_to_db,\n",
+    "from cal_tools.jungfrau import jungfraulib\n",
+    "from cal_tools.restful_config import (\n",
+    "    extra_calibration_client,\n",
     ")\n",
-    "from iCalibrationDB import Conditions, Constants"
+    "from cal_tools.tools import calcat_creation_time, pdus_by_detector_id\n",
+    "\n",
+    "matplotlib.use('agg')\n",
+    "%matplotlib inline"
    ]
   },
   {
@@ -129,9 +129,6 @@
     "print(f\"Using {creation_time} as creation time\")\n",
     "os.makedirs(out_folder, exist_ok=True)\n",
     "\n",
-    "cal_db_interface = get_random_db_interface(cal_db_interface)\n",
-    "print(f'Calibration database interface: {cal_db_interface}')\n",
-    "\n",
     "if karabo_id_control == \"\":\n",
     "    karabo_id_control = karabo_id"
    ]
@@ -143,9 +140,6 @@
    "outputs": [],
    "source": [
     "proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n",
-    "file_loc = f\"proposal:{proposal} runs:{run_high} {run_med} {run_low}\"\n",
-    "\n",
-    "report = get_report(metadata_folder)\n",
     "\n",
     "step_timer = step_timing.StepTimer()"
    ]
@@ -233,22 +227,31 @@
    "outputs": [],
    "source": [
     "step_timer.start()\n",
+    "\n",
     "# set the operating condition\n",
-    "condition = Conditions.Dark.jungfrau(\n",
+    "conditions = JUNGFRAUConditions(\n",
+    "    sensor_bias_voltage=bias_voltage,\n",
     "    memory_cells=memory_cells,\n",
-    "    bias_voltage=bias_voltage,\n",
     "    integration_time=integration_time,\n",
     "    gain_setting=gain_setting,\n",
     "    gain_mode=gain_mode,\n",
     ")\n",
     "\n",
-    "db_modules = get_pdu_from_db(\n",
-    "    karabo_id=karabo_id,\n",
-    "    karabo_da=karabo_da,\n",
-    "    constant=Constants.jungfrau.Offset(),\n",
-    "    condition=condition,\n",
-    "    cal_db_interface=cal_db_interface,\n",
-    "    snapshot_at=creation_time)\n",
+    "cc = extra_calibration_client()\n",
+    "det_id = cc.detector_by_identifier(karabo_id)['id']\n",
+    "pdus = pdus_by_detector_id(cc, det_id, snapshot_at=creation_time)\n",
+    "\n",
+    "da_to_pdu = dict()\n",
+    "pdu_to_uuid = dict()\n",
+    "for pdu in pdus:\n",
+    "    if pdu['karabo_da'] in karabo_da:  # exclude unselected das\n",
+    "        da_to_pdu[pdu['karabo_da']] = pdu['physical_name']\n",
+    "        pdu_to_uuid[pdu['physical_name']] = pdu['uuid']\n",
+    "\n",
+    "first_pdu = pdus[0]\n",
+    "detector_info = first_pdu['detector']\n",
+    "detector_info['detector_type'] = first_pdu['detector_type']['name']\n",
+    "\n",
     "step_timer.done_step('Set conditions and get PDU names from CalCat.')"
    ]
   },
@@ -428,7 +431,7 @@
     "    unit = '[ADCu]'\n",
     "    # TODO: Fix plots arrangment and speed for Jungfrau burst mode.\n",
     "    step_timer.start()\n",
-    "    for pdu, mod in zip(db_modules, karabo_da):\n",
+    "    for mod, pdu in da_to_pdu.items():\n",
     "        for g_idx in gains:\n",
     "            for cell in range(0, memory_cells):\n",
     "                f_o0 = heatmapPlot(\n",
@@ -541,7 +544,7 @@
    "source": [
     "step_timer.start()\n",
     "\n",
-    "for pdu, mod in zip(db_modules, karabo_da):\n",
+    "for mod, pdu in da_to_pdu.items():\n",
     "    display(Markdown(f\"### Badpixels for module {mod} ({pdu}):\"))\n",
     "    offset_abs_threshold = np.array(offset_abs_threshold)\n",
     "\n",
@@ -585,50 +588,48 @@
    "outputs": [],
    "source": [
     "step_timer.start()\n",
-    "for mod, db_mod in zip(karabo_da, db_modules):\n",
-    "    constants = {\n",
-    "        'Offset': np.moveaxis(offset_map[mod], 0, 1),\n",
-    "        'Noise': np.moveaxis(noise_map[mod], 0, 1),\n",
-    "        'BadPixelsDark': np.moveaxis(bad_pixels_map[mod], 0, 1),\n",
-    "    }\n",
     "\n",
-    "    md = None\n",
+    "constants = {}\n",
+    "for mod, pdu in da_to_pdu.items():\n",
+    "    constants['Offset10Hz'] = np.moveaxis(offset_map[mod], 0, 1)\n",
+    "    constants['Noise10Hz'] = np.moveaxis(noise_map[mod], 0, 1)\n",
+    "    constants['BadPixelsDark10Hz'] = np.moveaxis(bad_pixels_map[mod], 0, 1)\n",
     "\n",
-    "    for key, const_data in constants.items():\n",
-    "\n",
-    "        const =  getattr(Constants.jungfrau, key)()\n",
-    "        const.data = const_data\n",
-    "\n",
-    "        for parm in condition.parameters:\n",
-    "            if parm.name == \"Integration Time\":\n",
-    "                parm.lower_deviation = time_limits\n",
-    "                parm.upper_deviation = time_limits\n",
-    "\n",
-    "        if db_output:\n",
-    "            md = send_to_db(\n",
-    "                db_module=db_mod,\n",
-    "                karabo_id=karabo_id,\n",
-    "                constant=const,\n",
-    "                condition=condition,\n",
-    "                file_loc=file_loc,\n",
-    "                report_path=report,\n",
-    "                cal_db_interface=cal_db_interface,\n",
-    "                creation_time=creation_time,\n",
-    "                timeout=cal_db_timeout,\n",
-    "            )\n",
-    "        if local_output:\n",
-    "            md = save_const_to_h5(\n",
-    "                db_module=db_mod,\n",
-    "                karabo_id=karabo_id,\n",
-    "                constant=const,\n",
-    "                condition=condition,\n",
-    "                data=const.data,\n",
-    "                file_loc=file_loc,\n",
-    "                report=report,\n",
-    "                creation_time=creation_time,\n",
-    "                out_folder=out_folder,\n",
+    "    md = None\n",
+    "    for const_name, const_data in constants.items():\n",
+    "        with NamedTemporaryFile(dir=out_folder) as tempf:\n",
+    "            ccv_root = write_ccv(\n",
+    "                tempf.name,\n",
+    "                pdu,\n",
+    "                pdu_to_uuid[pdu],\n",
+    "                detector_info[\"detector_type\"],\n",
+    "                const_name,\n",
+    "                conditions,\n",
+    "                creation_time,\n",
+    "                proposal,[run_high, run_med, run_low],\n",
+    "                const_data,\n",
+    "                dims=[\"fast_scan\", \"slow_scan\", \"cell\", \"gain\"],\n",
+    "                deviations={\"integration_time\": time_limits},\n",
     "            )\n",
-    "            print(f\"Calibration constant {key} is stored locally at {out_folder}.\\n\")\n",
+    "\n",
+    "            if db_output:\n",
+    "                try:\n",
+    "                    inject_ccv(tempf.name, ccv_root, metadata_folder)\n",
+    "                    print(f\"{const_name} for {mod}({pdu}) has been injected to the database.\")\n",
+    "                except CCVAlreadyInjectedError:\n",
+    "                    warning(\n",
+    "                        f\"{const_name} calibration constant version for {pdu}\"\n",
+    "                        \" has been already injected.\\n\")\n",
+    "\n",
+    "            if local_output:\n",
+    "                ofile = f\"{out_folder}/const_{const_name}_{pdu}.h5\"\n",
+    "                \n",
+    "                if os.path.isfile(ofile):\n",
+    "                    print(f'File {ofile} already exists and will be overwritten\\n')\n",
+    "\n",
+    "                from shutil import copyfile\n",
+    "                copyfile(tempf.name, ofile)\n",
+    "                print(f\"Calibration constant {const_name} is stored locally at {out_folder}.\\n\")\n",
     "\n",
     "print(\"Constants parameter conditions are:\\n\")\n",
     "print(\n",
@@ -679,17 +680,11 @@
     "    begin_at_strategy=\"prior\",\n",
     ")\n",
     "\n",
-    "# TODO: remove when injection part is updated.\n",
-    "_cnames_mapping = {\n",
-    "    \"Offset\": \"Offset10Hz\",\n",
-    "    \"Noise\": \"Noise10Hz\",\n",
-    "    \"BadPixelsDark\": \"BadPixelsDark10Hz\",\n",
-    "}\n",
     "for mod in karabo_da:\n",
     "    old_const[mod] = {}\n",
     "    old_mdata[mod] = {}\n",
     "    for cname in constants.keys():\n",
-    "        cmdata = jf_caldata.get(_cnames_mapping[cname], None)\n",
+    "        cmdata = jf_caldata.get(cname, None)\n",
     "        data_found = cmdata and mod in cmdata.aggregator_names\n",
     "        if data_found:\n",
     "            old_const[mod][cname] = cmdata[mod].ndarray()\n",
@@ -717,10 +712,10 @@
     "display(Markdown(\"## The following pre-existing constants are used for comparison:\"))\n",
     "\n",
     "for mod, consts in old_mdata.items():\n",
-    "    pdu = db_modules[karabo_da.index(mod)]\n",
+    "    pdu = da_to_pdu[mod]\n",
     "    display(Markdown(f\"- {mod} ({pdu})\"))\n",
     "    for const in consts:\n",
-    "        display(Markdown(f\"    - {const} at {consts[const]['timestamp']}\"))\n",
+    "        display(Markdown(f\"    - {const}: {consts[const]['timestamp']}\"))\n",
     "    # saving locations of old constants for summary notebook\n",
     "    with open(f\"{metadata_folder or out_folder}/module_metadata_{mod}.yml\", \"w\") as fd:\n",
     "        yaml.safe_dump(\n",
diff --git a/notebooks/Jungfrau/Jungfrau_darks_Summary_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_darks_Summary_NBC.ipynb
index 607f15be1f61fca8639abe9eb1efdd6ae121c6b5..9f9df90247da77070bd792f57d9fd504ec890924 100644
--- a/notebooks/Jungfrau/Jungfrau_darks_Summary_NBC.ipynb
+++ b/notebooks/Jungfrau/Jungfrau_darks_Summary_NBC.ipynb
@@ -75,7 +75,7 @@
     "out_folder = Path(out_folder)\n",
     "metadata = CalibrationMetadata(metadata_folder or out_folder)\n",
     "mod_mapping = metadata.setdefault(\"modules-mapping\", {})\n",
-    "dark_constants = [\"Offset\", \"Noise\", \"BadPixelsDark\"]\n",
+    "cnames = [\"Offset10Hz\", \"Noise10Hz\", \"BadPixelsDark10Hz\"]\n",
     "\n",
     "prev_const_metadata = {}\n",
     "for fn in Path(metadata_folder or out_folder).glob(\"module_metadata_*.yml\"):\n",
@@ -113,25 +113,38 @@
    "outputs": [],
    "source": [
     "fixed_gain = False  # constant is adaptive by default.\n",
-    "# Get the constant shape from one of the local constants.\n",
-    "# This is one way to realize the number of memory cells.\n",
-    "with h5py.File(list(out_folder.glob(\"const_Offset_*\"))[0], 'r') as f:\n",
-    "    const_shape = f[\"data\"][()].shape\n",
+    "\n",
+    "# TODO: replace this shape and mode finding part\n",
+    "# by storing detector conditions in the metadata files.\n",
+    "try:\n",
+    "    # Get constant shape and gain mode from an old offset ccv.\n",
+    "    cname = cnames[0]\n",
+    "    old_offset_mod = next(\n",
+    "        mod for mod, v in prev_const_metadata.items() if v[cname][\"filepath\"] is not None)  # noqa\n",
+    "except StopIteration:  # no old ccvs\n",
+    "    print('No old constants to compare against. Skipping summary plots')\n",
+    "    import sys\n",
+    "    sys.exit(0)\n",
+    "\n",
+    "with h5py.File(\n",
+    "    prev_const_metadata[old_offset_mod][cname][\"filepath\"], 'r') as f:\n",
+    "    dataset = prev_const_metadata[old_offset_mod][cname][\"dataset\"]\n",
+    "    const_shape = f[f\"{dataset}/data\"][()].shape\n",
     "    # Get fixed gain value to decide offset vmin, vmax\n",
     "    # for later constant map plots.\n",
-    "    gain_mode = \"condition/Gain mode/value\"\n",
+    "    gain_mode = f\"{dataset}/operating_conditions/gain_mode\"\n",
     "    if gain_mode in f:\n",
     "        fixed_gain = f[gain_mode][()]\n",
     "        \n",
     "\n",
     "initial_stacked_constants = np.full(((nmods,)+const_shape), np.nan)\n",
-    "curr_constants = { c: initial_stacked_constants.copy() for c in dark_constants}\n",
-    "prev_constants = { c: initial_stacked_constants.copy() for c in dark_constants}\n",
+    "curr_constants = { c: initial_stacked_constants.copy() for c in cnames}\n",
+    "prev_constants = { c: initial_stacked_constants.copy() for c in cnames}\n",
     "\n",
-    "exculded_constants = []  # constants excluded from comparison plots.\n",
+    "excluded_constants = []  # constants excluded from comparison plots.\n",
     "\n",
     "# Loop over modules\n",
-    "for cname in dark_constants:\n",
+    "for cname in cnames:\n",
     "    excluded_modules = []  # modules with no previous constants.\n",
     "    for i, mod in enumerate(sorted(expected_modules)):\n",
     "        # Loop over expected dark constants in out_folder.\n",
@@ -139,29 +152,31 @@
     "        pdu = mod_mapping[mod]\n",
     "    \n",
     "        # first load new constant\n",
+    "        # TODO: Loose dependency on local stored ccvs files.\n",
     "        fpath = out_folder / f\"const_{cname}_{pdu}.h5\"\n",
+    "        dataset = f\"{pdu}/{cname}/0/data\"\n",
     "        with h5py.File(fpath, 'r') as f:\n",
-    "            curr_constants[cname][i, ...] = f[\"data\"][()]\n",
+    "            curr_constants[cname][i, ...] = f[dataset][()]\n",
     "\n",
     "        # Load previous constants.\n",
     "        old_mod_mdata = prev_const_metadata[mod]\n",
     "\n",
     "        if cname in old_mod_mdata:  # a module can be missing from detector dark processing.\n",
     "            filepath = old_mod_mdata[cname][\"filepath\"]\n",
-    "            h5path = old_mod_mdata[cname][\"dataset\"]\n",
-    "            if not filepath or not h5path:\n",
+    "            dataset = old_mod_mdata[cname][\"dataset\"]\n",
+    "            if not filepath or not dataset:\n",
     "                excluded_modules.append(mod)\n",
     "                prev_constants[cname][i, ...].fill(np.nan)\n",
     "            else:\n",
     "                with h5py.File(filepath, \"r\") as fd:\n",
-    "                    prev_constants[cname][i, ...] = fd[f\"{h5path}/data\"][()]\n",
+    "                    prev_constants[cname][i, ...] = fd[f\"{dataset}/data\"][()]\n",
     "\n",
     "    if excluded_modules:\n",
     "        print(f\"Previous {cname} constants for {excluded_modules} are not available.\\n.\")\n",
     "    # Exclude constants from comparison plots, if the corresponding\n",
     "    # previous constants are not available for all modules.\n",
     "    if len(excluded_modules) == nmods:\n",
-    "        exculded_constants.append(cname)\n",
+    "        excluded_constants.append(cname)\n",
     "        print(f\"No comparison plots for {cname}.\\n\")"
    ]
   },
@@ -307,7 +322,7 @@
     "\n",
     "            # Avoid difference plots if previous constants\n",
     "            # are missing for the detector.\n",
-    "            if cname in exculded_constants and axname != \"map\":\n",
+    "            if cname in excluded_constants and axname != \"map\":\n",
     "                break\n",
     "            ax = fig.add_subplot(axv[\"gs\"])\n",
     "\n",
@@ -353,7 +368,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "if curr_constants[\"Offset\"].shape[-2] > 1:\n",
+    "if const_shape[-2] > 1:\n",
     "    display(Markdown(\"## Summary across pixels per memory cells\"))\n",
     "\n",
     "    # Plot mean and std of memcells for each module, gain, and constant\n",
diff --git a/src/cal_tools/constants.py b/src/cal_tools/constants.py
index 21b047c003b5c8c0541479e2b75aeefc9c3f85e3..b8cb345a3855fb6b271ffa7722233f77cef9e9bb 100644
--- a/src/cal_tools/constants.py
+++ b/src/cal_tools/constants.py
@@ -1,26 +1,37 @@
-
-from datetime import datetime, timezone
-from struct import pack, unpack
-from pathlib import Path
-from shutil import copyfile
-from hashlib import md5
 import binascii
 import time
+from hashlib import md5
+from pathlib import Path
+from shutil import copyfile
+from struct import pack, unpack
+from typing import List, Optional, Union
 
-import numpy as np
 import h5py
-
+import numpy as np
 from calibration_client import CalibrationClient
-from cal_tools.calcat_interface2 import get_default_caldb_root, get_client
-from cal_tools.tools import run_prop_seq_from_path
+
+from cal_tools.calcat_interface2 import (
+    CalCatAPIError,
+    get_default_caldb_root,
+)
 from cal_tools.restful_config import calibration_client
 
+CONDITION_NAME_MAX_LENGTH = 60
+
+
+class InjectAPIError(CalCatAPIError):
+    ...
+
+
+class CCVAlreadyInjectedError(InjectAPIError):
+    ...
+
 
 def write_ccv(
     const_path,
     pdu_name, pdu_uuid, detector_type,
     calibration, conditions, created_at, proposal, runs,
-    data, dims, key='0'
+    data, dims, key='0', deviations={},
 ):
     """Write CCV data file.
 
@@ -37,8 +48,13 @@ def write_ccv(
         runs (Iterable of int): Raw data runs the calibration data is
             generated from
         data (ndarray): Calibration constant data
-        dims (Iterable of str):
-        key (str, optional):
+        dims (Iterable of str): Dimension names for the constant data.
+        key (str, optional): Key added in constant file dataset when
+            constant data are stored. Defaults to '0'.
+        deviations (dict, optional): Deviation values for operating conditions.
+            Each value can be a tuple (lower, upper) or a single value for
+            symmetric deviations. Defaults to {}.
+            e.g. {"integration_time": 0.025}
 
     Returns:
         (str) CCV HDF group name.
@@ -70,11 +86,18 @@ def write_ccv(
         opcond_dict = conditions.make_dict(
             conditions.calibration_types[calibration])
         for db_name, value in opcond_dict.items():
-            key = db_name.lower().replace(' ', '_')
-            dset = opcond_group.create_dataset(key, data=value,
-                                               dtype=np.float64)
-            dset.attrs['lower_deviation'] = 0.0
-            dset.attrs['upper_deviation'] = 0.0
+            cond_name = db_name.lower().replace(' ', '_')
+            dset = opcond_group.create_dataset(
+                cond_name, data=value, dtype=np.float64)
+
+            deviation = deviations.get(cond_name, (0.0, 0.0))
+            if isinstance(deviation, (int, float)):
+                lower_dev = upper_dev = deviation
+            else:
+                lower_dev, upper_dev = deviation
+
+            dset.attrs['lower_deviation'] = lower_dev
+            dset.attrs['upper_deviation'] = upper_dev
             dset.attrs['database_name'] = db_name
 
         dset = ccv_group.create_dataset('data', data=data)
@@ -83,7 +106,79 @@ def write_ccv(
     return ccv_group_name
 
 
-def inject_ccv(const_src, ccv_root, report_to=None):
+def get_condition_dict(
+    name: str,
+    value: Union[float, str, int, bool],
+    lower_deviation: float = 0.0,
+    upper_deviation: float = 0.0,
+):
+
+    def to_float_or_string(value):
+        """CALCAT expects data to either be float or a string.
+        """
+        try:  # Any digit or boolean
+            return float(value)
+        except:
+            return str(value)
+
+    return {
+        'parameter_name': name,
+        'value': to_float_or_string(value),
+        'lower_deviation_value': lower_deviation,
+        'upper_deviation_value': upper_deviation,
+        'flg_available': True
+    }
+
+
+def generate_unique_condition_name(
+    detector_type: str,
+    pdu_name: str,
+    pdu_uuid: float,
+    cond_params: List[dict],
+):
+    """Generate a unique condition using UUID and timestamp.
+
+    Args:
+        detector_type (str): detector type.
+        pdu_name (str): Physical detector unit db name.
+        pdu_uuid (float): Physical detector unit db id.
+        cond_params (List[dict]): A list of dictionary with each condition
+            e.g. [{
+                "parameter_name": "Memory Cells",
+                "value": 352.0,
+                "lower-deviation": 0.0,
+                "upper-deviation": 0.0
+            }]
+
+    Returns:
+        str:  A unique name used for the table of conditions.
+    """
+    unique_name = detector_type[:detector_type.index('-Type')] + ' Def'
+    cond_hash = md5(pdu_name.encode())
+    cond_hash.update(int(pdu_uuid).to_bytes(
+        length=8, byteorder='little', signed=False))
+
+    for param_dict in cond_params:
+        cond_hash.update(str(param_dict['parameter_name']).encode())
+        cond_hash.update(str(param_dict['value']).encode())
+
+    unique_name += binascii.b2a_base64(cond_hash.digest()).decode()
+    return unique_name[:CONDITION_NAME_MAX_LENGTH]
+
+
+def get_raw_data_location(proposal: str, runs: list):
+    if proposal and len(runs) > 0:
+        return (
+            f'proposal:{proposal} runs: {" ".join([str(x) for x in runs])}')
+    else:
+        return ""  # Fallback for non-run based constants
+
+
+def inject_ccv(
+    const_src: Union[Path, str],
+    ccv_root: str,
+    report_to: Optional[str] = None,
+):
     """Inject new CCV into CalCat.
 
     Args:
@@ -91,69 +186,51 @@ def inject_ccv(const_src, ccv_root, report_to=None):
         ccv_root (str): CCV HDF group name.
         report_to (str): Metadata location.
 
-    Returns:
-        None
-
     Raises:
         RuntimeError: If CalCat POST request fails.
     """
-
-    pdu_name, calibration, key = ccv_root.lstrip('/').split('/')
+    pdu_name, calibration, _ = ccv_root.lstrip('/').split('/')
 
     with h5py.File(const_src, 'r') as const_file:
+        if ccv_root not in const_file:
+            raise ValueError(
+                f"Invalid HDF5 structure: {ccv_root} not found in file.")
+
         pdu_group = const_file[pdu_name]
         pdu_uuid = pdu_group.attrs['uuid']
         detector_type = pdu_group.attrs['detector_type']
 
         ccv_group = const_file[ccv_root]
+
         proposal, runs = ccv_group.attrs['proposal'], ccv_group.attrs['runs']
         begin_at_str = ccv_group.attrs['begin_at']
 
         condition_group = ccv_group['operating_condition']
 
         cond_params = []
-
         # It's really not ideal we're mixing conditionS and condition now.
         for parameter in condition_group:
             param_dset = condition_group[parameter]
-            cond_params.append({
-                'parameter_name': param_dset.attrs['database_name'],
-                'value': float(param_dset[()]),
-                'lower_deviation_value': param_dset.attrs['lower_deviation'],
-                'upper_deviation_value': param_dset.attrs['upper_deviation'],
-                'flg_available': True
-            })
+            cond_params.append(get_condition_dict(
+                param_dset.attrs['database_name'],
+                param_dset[()],
+                param_dset.attrs['lower_deviation'],
+                param_dset.attrs['upper_deviation'],
+            ))
 
     const_rel_path = f'xfel/cal/{detector_type.lower()}/{pdu_name.lower()}'
     const_filename = f'cal.{time.time()}.h5'
 
-    if proposal and len(runs) > 0:
-        raw_data_location = 'proposal:{} runs: {}'.format(
-            proposal, ' '.join([str(x) for x in runs]))
-    else:
-        pass  # Fallback for non-run based constants
-
-    # Generate condition name.
-    unique_name = detector_type[:detector_type.index('-Type')] + ' Def'
-    cond_hash = md5(pdu_name.encode())
-    cond_hash.update(int(pdu_uuid).to_bytes(
-        length=8, byteorder='little', signed=False))
-
-    for param_dict in cond_params:
-        cond_hash.update(str(param_dict['parameter_name']).encode())
-        cond_hash.update(str(param_dict['value']).encode())
+    unique_name = generate_unique_condition_name(
+        detector_type, pdu_name, pdu_uuid, cond_params)
 
-    unique_name += binascii.b2a_base64(cond_hash.digest()).decode()
-    unique_name = unique_name[:60]
+    raw_data_location = get_raw_data_location(proposal, runs)
 
     # Add PDU "UUID" to parameters.
-    cond_params.append({
-        'parameter_name': 'Detector UUID',
-        'value': unpack('d', pack('q', pdu_uuid))[0],
-        'lower_deviation_value': 0.0,
-        'upper_deviation_value': 0.0,
-        'flg_available': True
-    })
+    cond_params.append(get_condition_dict(
+        'Detector UUID',
+        unpack('d', pack('q', pdu_uuid))[0]
+    ))
 
     inject_h = {
         'detector_condition': {
@@ -191,9 +268,18 @@ def inject_ccv(const_src, ccv_root, report_to=None):
     const_dest.parent.mkdir(parents=True, exist_ok=True)
     copyfile(const_src, const_dest)
 
+    # TODO: Consider catching `RequestException`s
+    # when bypassing calibration_client
     resp = CalibrationClient.inject_new_calibration_constant_version(
         calibration_client(), inject_h)
 
     if not resp['success']:
         const_dest.unlink()  # Delete already copied CCV file.
-        raise RuntimeError(resp)
+        # TODO: Remove this when the new injection code is added.
+        if (
+            resp['status_code'] == 422 and
+            "taken" in resp['app_info'].get("begin_at", [""])[0]
+        ):
+            raise CCVAlreadyInjectedError
+        else:
+            raise RuntimeError(resp)
diff --git a/src/cal_tools/plotting.py b/src/cal_tools/plotting.py
index 575346fe45ff15889a1f65ebecee98eb1c4fceef..09323f583663c935d6d003da41f0bbb730cfd219 100644
--- a/src/cal_tools/plotting.py
+++ b/src/cal_tools/plotting.py
@@ -409,8 +409,8 @@ def show_processed_modules_jungfrau(
         if module in processed_modules:
             color = 'green'
             if (
-                'Noise' not in constants.keys() or
-                np.nanmean(constants['Noise'][counter, ..., 0]) == 0
+                'Noise10Hz' not in constants.keys() or
+                np.nanmean(constants['Noise10Hz'][counter, ..., 0]) == 0
                 ):
                 color = 'red'
             counter += 1
diff --git a/src/cal_tools/tools.py b/src/cal_tools/tools.py
index 48c780dd43dc7c4c783dd0b6646218686a522faa..89b1daa38b6b8256c1f0ed03663a55c2ddfcbae8 100644
--- a/src/cal_tools/tools.py
+++ b/src/cal_tools/tools.py
@@ -13,7 +13,7 @@ from pathlib import Path
 from queue import Queue
 from tempfile import NamedTemporaryFile
 from time import sleep
-from typing import List, Optional, Tuple, Union
+from typing import Dict, List, Optional, Tuple, Union
 from urllib.parse import urljoin
 
 import h5py
@@ -1194,3 +1194,28 @@ def latex_warning(message):
     """Show latex warning custom command parsed correctly
     when reports are generated."""
     display(Latex("\warningbox{" + message + "}"))
+
+
+def pdus_by_detector_id(
+    client: "CalCatAPIClient",
+    det_id: int,
+    snapshot_at: str = "",
+) -> List[Dict]:
+    """
+    Retrieve physical detector units (PDUs) for a given detector ID.
+
+    Args:
+        client: The CalCatAPIClient instance.
+        det_id (int): The detector ID.
+        snapshot_at (str, optional): The timestamp for the PDU snapshot.
+
+    Returns:
+        List[Dict]: A list of PDUs associated with the given detector ID.
+    """
+    return client.get(
+        "physical_detector_units/get_all_by_detector",
+        {
+            "detector_id": det_id,
+            "pdu_snapshot_at": client.format_time(snapshot_at),
+        },
+    )
diff --git a/tests/test_constants.py b/tests/test_constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..b78d9e194a38cabbf60fc18ae77d5cfc6dc778c0
--- /dev/null
+++ b/tests/test_constants.py
@@ -0,0 +1,61 @@
+
+from datetime import datetime
+from pathlib import Path
+from tempfile import TemporaryDirectory
+
+import h5py
+import numpy as np
+import pytest
+
+from cal_tools.calcat_interface2 import JUNGFRAUConditions
+from cal_tools.constants import write_ccv
+
+
+def test_write_ccv():
+    # Create a temporary CCV file for testing
+    with TemporaryDirectory() as temp_dir:
+        ccv_path = Path(temp_dir) / "test.ccv"
+        conditions = JUNGFRAUConditions(
+            sensor_bias_voltage=180,
+            memory_cells=1,
+            integration_time=10,
+            gain_setting=0,
+            gain_mode=0,
+        )
+        # Create test data
+        data = np.random.rand(10, 10)
+        dims = ["x", "y"]
+        pdu_name = "Jungfrau_M275"
+        pdu_uuid = 140
+        detector_type = "test-detector"
+        calibration = "Offset10Hz"
+        created_at = datetime.strptime(
+            "2022-10-08T00:07:06.000+02:00", "%Y-%m-%dT%H:%M:%S.%f%z")
+        proposal = 1234
+        runs = [1, 2, 3]
+
+        write_ccv(
+            ccv_path, pdu_name, pdu_uuid, detector_type,
+            calibration, conditions, created_at, proposal, runs,
+            data, dims)
+        # Check the CCV file was created with the expected data
+        with h5py.File(ccv_path, "r") as f:
+            assert f.attrs["version"] == 0
+            pdu_group = f[pdu_name]
+            assert pdu_group.attrs["uuid"] == pdu_uuid
+            assert pdu_group.attrs["detector_type"] == detector_type
+            ccv_group = pdu_group[calibration]["0"]
+            assert ccv_group.attrs["begin_at"] == created_at.isoformat()
+            assert ccv_group.attrs["proposal"] == proposal
+            np.testing.assert_array_equal(ccv_group.attrs["runs"], runs)
+            np.testing.assert_array_equal(ccv_group["data"], data)
+            np.testing.assert_array_equal(
+                ccv_group["data"].attrs["dims"], dims)
+
+
+def test_write_ccv_invalid_dims():
+    # Test that ValueError is raised if data.ndim != len(dims)
+    with pytest.raises(ValueError):
+        write_ccv(
+            None, None, None, None, None, None, None, None, None,
+            np.array([1, 2, 3]), ["x", "y"])