diff --git a/pages/2_Constant_Timeline.py b/pages/2_Constant_Timeline.py
index 0063ae7d96021afcb06438f6edd37f19628fc426..ccde648c04b8a178156f0ad381d81f1b90430676 100644
--- a/pages/2_Constant_Timeline.py
+++ b/pages/2_Constant_Timeline.py
@@ -8,6 +8,7 @@ from streamlit_plotly_events import plotly_events
 
 import utils.calcat_bits as calcat_bits
 from utils import logger, utils
+from widgets.custom_widgets import CustomComponents
 
 logging = logger.setup_logger(st)
 
@@ -21,6 +22,7 @@ st.set_page_config(
     initial_sidebar_state="expanded"
 )
 
+# Keep same values along all keys in a session between multiple pages.
 for k, v in st.session_state.items():
     st.session_state[k] = v
 
@@ -57,14 +59,11 @@ calibration_id = calibration_name_to_id[calibration]
 _pdu_name_to_dict = {
     pdu["physical_name"]: pdu for pdu in cw.get_pdus() if pdu["id"] > 0
 }
-pdus = cw.get_pdu_names_current_in_detector(detector_id, snapshot_at=None)
-if pdus:
-    with st.expander("TODO"):
-        st.info(
-            "These are the current PDUs mapped to the detector "
-            "(TODO: allow snapshot_at to query old mappings"
-        )
-else:
+components = CustomComponents()
+snapshot = components.set_snapshot(sidebar=False)
+pdus = cw.get_pdu_names_current_in_detector(detector_id, snapshot_at=snapshot)
+
+if not pdus:
     st.info("No PDUs in the selected detector.")
     st.stop()
 
@@ -88,17 +87,32 @@ by_condition = utils.partition_ccvs_by_conditions(
     itertools.chain.from_iterable(pdu_to_ccvs.values()),
     cw,
 )
-with st.expander("TODO"):
-    st.info(
-        "These are the current (exact match; TODO: handle deviation)"
-        " conditions in recent constants."
-    )
-to_print = []
+
+# These are the current (exact match; TODO: handle deviation)
+# conditions in recent constants.
+
 for i, (condition, ccvs) in enumerate(sorted(by_condition.items())):
-    to_print.append(f"- {len(ccvs)} CCVs with condition [{i}]:")
-    for param, value in sorted(condition):
-        to_print.append(f"    - {param}: {value}")
-st.markdown("\n".join(to_print))
+    st.markdown(f"- {len(ccvs)} CCVs with condition [{i}]:\n")
+    cond_table = []
+    for (param, value, low_dev, up_dev) in sorted(condition):
+        cond_table.append(
+            {
+                "parameter": param,
+                "value": value,
+                "lower_deviation": low_dev,
+                "upper_deviation": up_dev,
+            })
+    pd.options.display.float_format = '{:.10f}'.format
+    df = pd.DataFrame(cond_table)[
+        [
+            'parameter',
+            'value',
+            'lower_deviation',
+            'upper_deviation',
+        ]]
+
+    # converting values to string to avoid automatic display rounding.
+    st.dataframe(df.applymap(lambda x: str(x)))
 condition_index = st.selectbox("Condition", range(len(by_condition)))
 condition = list(sorted(by_condition.keys()))[condition_index]
 
diff --git a/setup.py b/setup.py
index fb16c0a3275808dd51a2e7f66c097fe44e4a33ad..3976178d4144693222fcf4f2030170183a7c45ee 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,3 @@
-from subprocess import check_output
 from setuptools import setup, find_packages
 from setuptools.command.build_py import build_py
 
@@ -6,14 +5,16 @@ from setuptools.command.build_py import build_py
 class PreInstallCommand(build_py):
     """Pre-installation for installation mode."""
 
-    def run(self):        
+    def run(self):
         super().run()
 
+
 install_requires = [
     "altair",
     "calibration_client==11.2.0",
     "dynaconf==3.1.4",
     "extra_geom==1.10.0",
+    "flake8",
     "h5py==3.8.0",
     "humanfriendly==10.0",
     "matplotlib==3.7.1",
@@ -65,5 +66,6 @@ setup(
     ],
     url="",
     # TODO: find license, assuming this will be open sourced eventually
-    # "License :: OSI Approved :: BSD License",  # Update this when the license is decided
+    # "License :: OSI Approved :: BSD License",
+    # Update this when the license is decided
 )
diff --git a/tests/test_utils/test_utils.py b/tests/test_utils/test_utils.py
index e4cbe7e0727ddd1ecb1d2f5acbbf50ac6ccd1bcd..b1e491f66f0bfaa9a1cc0f87ce231d4b7fcebe6c 100644
--- a/tests/test_utils/test_utils.py
+++ b/tests/test_utils/test_utils.py
@@ -1,7 +1,11 @@
 import numpy as np
-import pytest
 
-from utils.utils import pickle_loading_ccv, pickle_loading_badpixel_ccv
+from utils import calcat_bits
+from utils.utils import (
+    partition_ccvs_by_conditions,
+    pickle_loading_ccv,
+    pickle_loading_badpixel_ccv,
+)
 
 MODULE_1 = 'AGIPD_SIV1_AGIPDV12_M435'
 CCV_1 = {  # for MODULE_1
@@ -103,6 +107,7 @@ CCV_1 = {  # for MODULE_1
     },
 }
 
+
 def test_pickle_loading_badpixel_ccv():
     mod, timestamp, bp_dict = pickle_loading_badpixel_ccv(
         MODULE_1, CCV_1["BadPixelsDark"])
@@ -135,10 +140,86 @@ def test_pickle_loading_badpixel_ccv():
         'total_badpixels': 2.7592196609034683,
     }
 
-    
+
 def test_pickle_loading_ccv():
     mod, timestamp, data = pickle_loading_ccv(
         np.nanmean, MODULE_1, CCV_1["Offset"])
     assert mod == MODULE_1
     assert timestamp == CCV_1["Offset"]["begin_validity_at"]
-    assert data == 6888.537884957863
\ No newline at end of file
+    assert data == 6888.537884957863
+
+
+class ConditionWrapper:
+    def __init__(self, conditions):
+        self.conditions = conditions
+
+    def get_condition(self, condition_id):
+        return self.conditions.get(condition_id, {})
+
+
+def test_partition_ccvs_by_conditions():
+    cw = ConditionWrapper({
+        1: {
+            "parameters_conditions": [
+                {
+                    "parameter_name": "Acquisition rate",
+                    "value": 1.1,
+                    "lower_deviation_value": 0,
+                    "upper_deviation_value": 0},
+                {
+                    "parameter_name": "Gain Setting",
+                    "value": 0,
+                    "lower_deviation_value": 0,
+                    "upper_deviation_value": 0},
+                {"parameter_name": "Detector UUID", "value": 00000},
+            ]
+        },
+        2: {
+            "parameters_conditions": [
+                {
+                    "parameter_name": "Acquisition rate",
+                    "value": 1.1,
+                    "lower_deviation_value": 0,
+                    "upper_deviation_value": 0},
+                {
+                    "parameter_name": "Memory cells",
+                    "value": 352,
+                    "lower_deviation_value": -352,
+                    "upper_deviation_value": 0},
+                {
+                    "parameter_name": "Integration Time",
+                    "value": 13.99,
+                    "lower_deviation_value": 0.25,
+                    "upper_deviation_value": 0.25
+                },
+                {"parameter_name": "Detector UUID", "value": 10000},
+            ]
+        }
+    })
+
+    ccvs = [
+        {"calibration_constant": {"condition_id": 1}},
+        {"calibration_constant": {"condition_id": 2}},
+        {"calibration_constant": {"condition_id": 1}}
+    ]
+
+    res = partition_ccvs_by_conditions(ccvs, cw)
+        
+    expected_result = {
+        frozenset({
+            ('Gain Setting', 0, 0, 0),
+            ('Acquisition rate', 1.1, 0, 0)
+        }): [
+            {"calibration_constant": {"condition_id": 1}},
+            {"calibration_constant": {"condition_id": 1}}
+        ],
+        frozenset({
+            ('Memory cells', 352, -352, 0),
+            ('Integration Time', 13.99, 0.25, 0.25),
+            ('Acquisition rate', 1.1, 0, 0),
+        }): [
+            {"calibration_constant": {"condition_id": 2}}
+        ]
+    }
+
+    assert res == expected_result
diff --git a/utils/config/detector_config.json b/utils/config/detector_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4d433903f67ddd88c558c0629b18ee9aca5e1271
--- /dev/null
+++ b/utils/config/detector_config.json
@@ -0,0 +1,16 @@
+{
+    "contains": {
+        "AGIPD": "AGIPD-Type",
+        "DSSC": "DSSC-Type",
+        "_JF": "jungfrau-Type",
+        "JNGFR": "jungfrau-Type",
+        "EPX100": "ePix100-Type",
+        "EPIX10K": "ePix10K-Type",
+        "EPIX-": "ePix100-Type",
+        "_G2": "Gotthard2-Type",
+        "HIREX": "Gotthard2-Type",
+        "_LPD": "LPD-Type",
+        "_PNCCD": "pnCCD-Type"
+    },
+    "exact": {}
+}
diff --git a/utils/utils.py b/utils/utils.py
index 2b1eb4c5388d987bf495268890692cd3556b496d..ba6b84e71f1f9a482f9f394e017f850fa7b9a1e5 100644
--- a/utils/utils.py
+++ b/utils/utils.py
@@ -55,7 +55,8 @@ def get_data(fn):
         with h5py.File(fn, "r") as fd:
             pdu, const = navigate_constant(fd)
             data = fd[pdu][const]["0"]["data"][:]
-            logging.debug(f"Opened data with shape {data.shape} of {data.dtype}")
+            logging.debug(
+                f"Opened data with shape {data.shape} of {data.dtype}")
         return data
     except OSError as e:
         logging.error(f"Failed to read file {fn}: {e}")
@@ -272,9 +273,13 @@ class LPDType(detectorType):
             geom=extra_geom.LPD_1MGeometry,
             constant_shapes={
                 # unsure about x vs y
-                "BadPixelsDark": ("pixels_y", "pixels_x", "cells", "gains"),
-                "Noise": ("pixels_y", "pixels_x", "cells", "gains"),
-                "Offset": ("pixels_y", "pixels_x", "cells", "gains",),
+                "BadPixelsDark": ("pixels_x", "pixels_y", "cells", "gains"),
+                "Noise": ("pixels_x", "pixels_y", "cells", "gains"),
+                "Offset": ("pixels_x", "pixels_y", "cells", "gains"),
+                "RelativeGain": ("pixels_x", "pixels_y", "cells", "gains"),
+                "GainAmpMap": ("pixels_x", "pixels_y", "cells", "gains"),
+                "FFMap": ("pixels_x", "pixels_y", "cells", "gains"),
+                "BadPixelsFF": ("pixels_x", "pixels_y", "cells", "gains"),
                 }
             )
 
@@ -391,7 +396,8 @@ class DSSCType(detectorType):
             detector_size=(512, 128),
             constant_shapes={
                 "Offset": ("pixels_x", "pixels_y", "cells"),
-                "Noise": ("pixels_x", "pixels_y", "cells")
+                "Noise": ("pixels_x", "pixels_y", "cells"),
+                "BadPixelsDark": ("pixels_x", "pixels_y", "cells")
                 },
             geom=extra_geom.DSSC_1MGeometry,
             )
@@ -441,6 +447,7 @@ class GH2Type(detectorType):
                 "NoiseGotthard2": ("pixels_x", "cells", "gains"),
                 "BadPixelsDarkGotthard2": ("pixels_x", "cells", "gains"),
                 "LUTGotthard2": ("pixels_x", "cells", "gains"),
+                "RelativeGainGotthard2": ("pixels_x", "cells", "gains"),
                 },
             )
 
@@ -481,6 +488,7 @@ class jungfrauType(detectorType):
                     "pixels_x", "pixels_y", "cells", "gains"),
                 "Noise10Hz": ("pixels_y", "pixels_x", "cells", "gains"),
                 "Offset10Hz": ("pixels_y", "pixels_x", "cells", "gains"),
+                "RelativeGain10Hz": ("pixels_y", "pixels_x", "cells", "gains")
                 },
             geom=extra_geom.JUNGFRAUGeometry,
             )
@@ -556,28 +564,25 @@ def detector_type_class(detector_type):
         return jungfrauType
 
 
+def load_detector_config():
+    import os
+    config_path = os.path.join(
+        os.path.dirname(__file__), 'config', 'detector_config.json')
+    with open(config_path, 'r') as file:
+        return json.load(file)
+
+
+# Load the configuration at the module level
+config = load_detector_config()
+
+
 def guess_detector_type(name):
-    if "AGIPD" in name:
-        return "AGIPD-Type"
-    elif "DSSC" in name:
-        return "DSSC-Type"
-    elif "_JF" in name or "JNGFR" in name:
-        return "jungfrau-Type"
-    elif "EPX100" in name:
-        return "ePix100-Type"
-    elif "EPIX10K" in name:
-        return "ePix10K-Type"
-    elif "_G2" in name:
-        return "Gotthard2-Type"
-    elif "_LPD" in name:
-        return "LPD-Type"
-    elif "_PNCCD" in name:
-        return "pnCCD-Type"
-    else:
-        return {
-            "MID_EXP_EPIX-1": "ePix100-Type",
-            "MID_EXP_EPIX-2": "ePix100-Type",
-        }.get(name, None)
+    # Check 'contains' patterns
+    for key, value in config.get("contains", {}).items():
+        if key in name:
+            return value
+    # Check 'exact' matches
+    return config.get("exact", {}).get(name, None)
 
 
 def match_shape(detector_type, constant_type, shape):
@@ -781,17 +786,21 @@ def shmem_empty(shape, dtype):
 
 
 def partition_ccvs_by_conditions(ccvs, cw):
-    res = {}
+    res = dict()
     # TODO: allow deviations?
     for ccv in ccvs:
-        parameters = frozenset(
-            (parameter["parameter_name"], parameter["value"])
-            for parameter in cw.get_condition(
-                ccv["calibration_constant"]["condition_id"]
-            )["parameters_conditions"]
-            if parameter["parameter_name"] != "Detector UUID"
-        )
-        res.setdefault(parameters, []).append(ccv)
+        parameters = set()
+        conditions = cw.get_condition(
+            ccv["calibration_constant"]["condition_id"])
+        for parameter in conditions["parameters_conditions"]:
+            if parameter["parameter_name"] != "Detector UUID":
+                parameters.add((
+                    parameter["parameter_name"],
+                    parameter["value"],
+                    parameter["lower_deviation_value"],
+                    parameter["upper_deviation_value"],
+                ))
+        res.setdefault(frozenset(parameters), []).append(ccv)
     return res
 
 
@@ -828,14 +837,16 @@ def count_bad_pixels(data, axis_order=None, per_axis=None):
     # Auxiliary function to count bad pixels
     def count_bad_pixels(bp_value, per_axis=None):
         if axis_order and per_axis:
-            non_cell_axes = tuple(i for i, axis in enumerate(axis_order) if axis != per_axis)
+            non_cell_axes = tuple(
+                i for i, axis in enumerate(axis_order) if axis != per_axis)
             return np.count_nonzero(bp_value & data, axis=non_cell_axes)
         else:
             return np.count_nonzero(bp_value & data)
     # Use ThreadPoolExecutor to process each bad pixel type concurrently
     with concurrent.futures.ThreadPoolExecutor() as pool:
-        bpixs_by_type = np.array(
-            list(pool.map(count_bad_pixels, [bp.value for bp in BadPixelValues])))
+        bpixs_by_type = np.array(list(
+            pool.map(count_bad_pixels, [bp.value for bp in BadPixelValues])))
     return bpixs_by_type
 
+
 project_root = pathlib.Path(__file__).parent
diff --git a/widgets/custom_widgets.py b/widgets/custom_widgets.py
index 934cbb4d0f375870a422dcd9871f08d8c000989e..82a7108de4db2b8ad9a3a4930afff157ed1e2a29 100644
--- a/widgets/custom_widgets.py
+++ b/widgets/custom_widgets.py
@@ -129,6 +129,14 @@ class CustomComponents:
         logging.debug(f"selected data range - start: {start}, end: {end}")
         return start, end
 
+    def set_snapshot(
+        self, label="CalCat Snapshot", key="snapshot", sidebar=True):
+        if sidebar:
+            return st.sidebar.date_input(
+                label, key=key, value=datetime.datetime.today())
+        else:
+            return st.date_input(
+                label, key=key, value=datetime.datetime.today())
 
 class DetectorConfiguration(CustomComponents):
 
@@ -158,11 +166,7 @@ class DetectorConfiguration(CustomComponents):
             else "detector_id")
 
         if id_key == "detector_id":
-            snapshot = st.sidebar.date_input(
-                "CalCat Snapshot",
-                key="snapshot",
-                value=datetime.datetime.today(),
-            )
+            snapshot = self.set_snapshot()
 
         with self.metric_man["perf.calcat_time"]:
             pdus = (