From 954d2c5632e913f30b49a3ea775a5c901e6ad03d Mon Sep 17 00:00:00 2001
From: Nuno Duarte <nuno.duarte@xfel.eu>
Date: Fri, 8 Sep 2023 15:28:41 +0200
Subject: [PATCH] first round of reviews

---
 .../Characterize_FlatFields_ePix100_NBC.ipynb | 206 +++++++-----------
 1 file changed, 81 insertions(+), 125 deletions(-)

diff --git a/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb b/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb
index 12b279e6b..bc2ccf32d 100644
--- a/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Characterize_FlatFields_ePix100_NBC.ipynb
@@ -22,12 +22,11 @@
     "in_folder = '/gpfs/exfel/exp/MID/202231/p900310/raw' # input folder, required\n",
     "out_folder = '' # output folder, required\n",
     "metadata_folder = ''  # Directory containing calibration_metadata.yml when run by xfel-calibrate\n",
-    "sequences = [-1] # sequences to process, set to -1 for all, range allowed\n",
     "run = 29 # which run to read data from, required\n",
     "\n",
     "# Parameters for accessing the raw data.\n",
-    "karabo_id = \"MID_EXP_EPIX-2\" # karabo karabo_id\n",
-    "karabo_da = \"EPIX02\"  # data aggregators\n",
+    "karabo_id = \"MID_EXP_EPIX-2\"  # karabo ID\n",
+    "karabo_da = \"EPIX02\"  # data aggregator\n",
     "receiver_template = \"RECEIVER\" # detector receiver template for accessing raw data files\n",
     "instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files\n",
     "\n",
@@ -45,7 +44,7 @@
     "split_evt_mip_threshold = 1000     # Threshold for rejection of MIP events (e.g, cosmic-rays)\n",
     "\n",
     "# Parameters for the calibration database.\n",
-    "cal_db_interface = \"tcp://max-exfl016:8020\" # calibration DB interface to use\n",
+    "cal_db_interface = \"tcp://max-exfl-cal001:8020\" # calibration DB interface to use\n",
     "cal_db_timeout = 300000 # timeout on caldb requests\n",
     "creation_time = \"\"  # The timestamp to use with Calibration DB. Required Format: \"YYYY-MM-DD hh:mm:ss\" e.g. 2019-07-04 11:02:41\n",
     "db_output = False # Output constants to the calibration database\n",
@@ -76,7 +75,7 @@
     "from matplotlib.colors import LogNorm\n",
     "import numpy as np\n",
     "import pasha as psh\n",
-    "from extra_data import RunDirectory, H5File\n",
+    "from extra_data import RunDirectory\n",
     "from pathlib import Path\n",
     "from prettytable import PrettyTable\n",
     "from scipy.optimize import curve_fit\n",
@@ -91,14 +90,13 @@
     "from cal_tools.epix100 import epix100lib\n",
     "from cal_tools.tools import (\n",
     "    calcat_creation_time,\n",
-    "    get_dir_creation_date,\n",
     "    get_pdu_from_db,\n",
     "    get_constant_from_db,\n",
     "    get_report,\n",
     "    save_const_to_h5,\n",
     "    send_to_db,\n",
     ")\n",
-    "from iCalibrationDB import Conditions, Constants, Detectors"
+    "from iCalibrationDB import Conditions, Constants"
    ]
   },
   {
@@ -143,7 +141,7 @@
     "report = get_report(metadata_folder)\n",
     "\n",
     "ped_dir = Path(in_folder) / f'r{run:04d}'\n",
-    "run_dir = RunDirectory(ped_dir, _use_voview=False)\n",
+    "run_dc = RunDirectory(ped_dir)\n",
     "\n",
     "print(f\"Run is: {run}\")\n",
     "print(f\"Instrument H5File source: {instrument_src}\")\n",
@@ -166,53 +164,22 @@
     "# Path to pixels ADC values\n",
     "pixels_src = (instrument_src, \"data.image.pixels\")\n",
     "\n",
-    "# Specify total number of images to process\n",
-    "n_trains = run_dir.get_data_counts(*pixels_src).shape[0]\n",
+    "# Specify the total number of images to process\n",
+    "n_trains = run_dc.get_data_counts(*pixels_src).shape[0]\n",
     "\n",
-    "# Modify n_trains to process based on given maximum and minimun number of trains.\n",
+    "# Modify n_trains to process based on the given maximum and minimum number of trains.\n",
     "if max_trains:\n",
     "    n_trains = min(max_trains, n_trains)\n",
     "    \n",
     "if n_trains < min_trains:\n",
     "    raise ValueError(\n",
     "        f\"Less than {min_trains} trains are available in RAW data.\"\n",
-    "         \" Not enough data to process flat-fields.\")\n",
+    "         \" Not enough data to process flat fields.\")\n",
     "\n",
-    "# Sequences to read\n",
-    "seq_files = sorted([Path(f.filename) for f in run_dir.select(f'*{karabo_id}*').files])\n",
-    "seq0_size = H5File(seq_files[0]).get_data_counts(*pixels_src).size\n",
+    "all_trains = len(run_dc.select(instrument_src).train_ids)\n",
+    "if n_trains != all_trains:\n",
+    "    print(f\"Warning: {all_trains - n_trains} trains with empty data.\")\n",
     "\n",
-    "if sequences != [-1]:\n",
-    "    seq_files = [f for f in seq_files if any(f.match(f\"*-S{s:05d}.h5\") for s in sequences)]\n",
-    "\n",
-    "seq_files = seq_files[:(n_trains-1)//seq0_size+1]\n",
-    "\n",
-    "if not len(seq_files):\n",
-    "    raise IndexError(\"No sequence files available for the selected sequences.\")\n",
-    "\n",
-    "# Trains to be processed\n",
-    "trains = np.ndarray(0,dtype=int)\n",
-    "for seq in seq_files:\n",
-    "    seq_str = str(seq)\n",
-    "    H5File(seq).get_data_counts(*pixels_src).size\n",
-    "    seq_size = H5File(seq).get_data_counts(*pixels_src).size\n",
-    "    n = int(seq_str[seq_str.rfind('-S0')+len('-S0'):seq_str.rfind('.h5')])\n",
-    "    t = np.arange(n*seq0_size,n*seq0_size+seq_size)\n",
-    "    trains = np.append(trains,t)\n",
-    "    \n",
-    "n_trains = run_dir.select_trains(trains).get_data_counts(*pixels_src).shape[0]\n",
-    "dshape = run_dir.select_trains(trains).select(*pixels_src)[pixels_src].shape\n",
-    "\n",
-    "if n_trains != dshape[0]:\n",
-    "    print(f\"Warning: {n_trains - dshape[0]} trains with empty data.\")\n",
-    "    n_trains = dshape[0]\n",
-    "\n",
-    "trains = trains[:n_trains]\n",
-    "\n",
-    "print(f\"Reading from: \")\n",
-    "[print(f'\\t{seq}') for seq in seq_files]\n",
-    "print('\\nAvailable sequece files: ' + str(len(run_dir.select(f'*{karabo_id}*').files)))\n",
-    "print(f'Sequence files used for processing: {len(seq_files)}')\n",
     "print(f'Images to analyze: {n_trains}')"
    ]
   },
@@ -224,12 +191,12 @@
    "outputs": [],
    "source": [
     "# Read sensor size\n",
-    "sensor_size = run_dir[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected\n",
-    "sensor_size = sensor_size[sensor_size != 1] # data.image.dims for old data is [768, 708, 1]\n",
-    "assert np.array_equal(sensor_size, [768, 708]), 'Unexpected sensor dimensions.' \n",
+    "sensor_size = run_dc[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected\n",
+    "sensor_size = sensor_size[sensor_size != 1].tolist()  # data.image.dims for old data is [768, 708, 1]\n",
+    "assert sensor_size == [768,708], 'Unexpected sensor dimensions.' \n",
     "\n",
     "ctrl_data = epix100lib.epix100Ctrl(\n",
-    "    run_dc=run_dir,\n",
+    "    run_dc=run_dc,\n",
     "    instrument_src=instrument_src,\n",
     "    ctrl_src=f\"{karabo_id}/DET/CONTROL\",\n",
     "    )\n",
@@ -254,7 +221,7 @@
     "# Print operating conditions\n",
     "print(f\"Bias voltage: {bias_voltage} V\")\n",
     "print(f\"Detector integration time: {integration_time} \\u03BCs {integration_time_str_add}\")\n",
-    "print(f\"Mean temperature: {temperature:0.2f}°C / {temperature_k:0.2f} K {temp_str_add}\")\n",
+    "print(f\"Mean temperature: {temperature:0.2f}\\u00B0C / {temperature_k:0.2f} K {temp_str_add}\")\n",
     "print(f\"Operated in vacuum: {in_vacuum}\")"
    ]
   },
@@ -269,8 +236,8 @@
     "step_timer.start()\n",
     "\n",
     "# Read data\n",
-    "data_dc = run_dir.select(*pixels_src, require_all=True).select_trains(trains)\n",
-    "data = data_dc[pixels_src].ndarray().astype(np.float16)\n",
+    "data_dc = run_dc.select(*pixels_src, require_all=True).select_trains(np.s_[:n_trains])\n",
+    "dshape = data_dc[pixels_src].shape\n",
     "\n",
     "step_timer.done_step('Flat-fields loaded. Elapsed Time')"
    ]
@@ -296,9 +263,9 @@
     "constants = ['Offset', 'Noise', 'BadPixelsDark']\n",
     "\n",
     "condition =  Conditions.Dark.ePix100(bias_voltage=bias_voltage,\n",
-    "                                 integration_time=integration_time,\n",
-    "                                 temperature=temperature_k,\n",
-    "                                 in_vacuum=in_vacuum)\n",
+    "                                     integration_time=integration_time,\n",
+    "                                     temperature=temperature_k,\n",
+    "                                     in_vacuum=in_vacuum)\n",
     "\n",
     "for cname in constants:        \n",
     "    const_data[cname] = get_constant_from_db(\n",
@@ -309,7 +276,6 @@
     "        empty_constant=None,\n",
     "        cal_db_interface=cal_db_interface,\n",
     "        creation_time=creation_time,\n",
-    "        print_once=2,\n",
     "        timeout=cal_db_timeout\n",
     "    )"
    ]
@@ -329,46 +295,46 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "block_size = sensor_size//2\n",
+    "block_size = [sensor_size[0]//2, sensor_size[1]//2]\n",
     "noiseSigma = 5\n",
     "\n",
     "cmCorrection_block = xcal.CommonModeCorrection(\n",
-    "    sensor_size.tolist(),\n",
-    "    block_size.tolist(),\n",
+    "    sensor_size,\n",
+    "    block_size,\n",
     "    'block',\n",
     "    noiseMap=const_data['Noise'].swapaxes(0,1),\n",
     "    noiseSigma=noiseSigma,\n",
     "    parallel=False)\n",
     "cmCorrection_col = xcal.CommonModeCorrection(\n",
-    "    sensor_size.tolist(),\n",
-    "    block_size.tolist(),\n",
+    "    sensor_size,\n",
+    "    block_size,\n",
     "    'col',\n",
     "    noiseMap=const_data['Noise'].swapaxes(0,1),\n",
     "    noiseSigma=noiseSigma,\n",
     "    parallel=False)\n",
     "cmCorrection_row = xcal.CommonModeCorrection(\n",
-    "    sensor_size.tolist(),\n",
-    "    block_size.tolist(),\n",
+    "    sensor_size,\n",
+    "    block_size,\n",
     "    'row',\n",
     "    noiseMap=const_data['Noise'].swapaxes(0,1),\n",
     "    noiseSigma=noiseSigma,\n",
     "    parallel=False)\n",
     "  \n",
     "patternClassifier = xcal.PatternClassifier(\n",
-    "    shape=sensor_size.tolist(),\n",
+    "    shape=sensor_size,\n",
     "    noisemap=const_data['Noise'].swapaxes(0,1),\n",
     "    primaryThreshold=split_evt_primary_threshold,\n",
     "    secondaryThreshold=split_evt_secondary_threshold,\n",
     "    upperThreshold=split_evt_mip_threshold,\n",
-    "    blockSize=block_size.tolist(),\n",
+    "    blockSize=block_size,\n",
     "    setPixelMask = const_data['BadPixelsDark'].flatten(),\n",
     "    parallel=False\n",
     ")\n",
     "\n",
     "patternSelector = xcal.PatternSelector(\n",
-    "    sensor_size.tolist(), \n",
+    "    sensor_size, \n",
     "    selectionList = [100, 101], # singles patterns\n",
-    "    blockSize=block_size.tolist(), \n",
+    "    blockSize=block_size, \n",
     "    parallel=False)"
    ]
   },
@@ -426,7 +392,7 @@
     "    hist['CS'] += np.histogram(d[d>0].flatten(),bins=bins)[0]\n",
     "    hist['S'] += np.histogram(sing[sing>0].flatten(),bins=bins)[0]\n",
     "    \n",
-    "    [index+prev_chunk] = d\n",
+    "    data_corr[index+prev_chunk] = d\n",
     "    data_singles[index+prev_chunk] = sing.swapaxes(0,-1)"
    ]
   },
@@ -497,27 +463,31 @@
    "metadata": {},
    "outputs": [],
    "source": [
+    "print(f'Primary threshold: {split_evt_primary_threshold}')\n",
+    "print(f'Secondary threshold: {split_evt_secondary_threshold}')\n",
+    "\n",
     "patternStats = patternClassifier.getPatternStats()\n",
     "\n",
     "n_singles = np.sum(patternStats['singles'])\n",
     "n_doubles = np.sum(patternStats['doubles'])\n",
     "n_triples = np.sum(patternStats['triples'])\n",
-    "n_quads   = np.sum(patternStats['quads'])\n",
-    "n_clusters= np.sum(patternStats['clusters'])\n",
-    "known_patterns = np.sum((n_singles,n_doubles,n_triples,n_quads))\n",
+    "n_quads = np.sum(patternStats['quads'])\n",
+    "n_clusters = np.sum(patternStats['clusters'])\n",
+    "known_patterns = np.sum((n_singles, n_doubles, n_triples, n_quads))\n",
     "\n",
     "t1,t2 = PrettyTable(),PrettyTable()\n",
-    "t1.field_names = ['Photon Hits','Frequency']\n",
-    "t1.add_row(['Big Clusters',f'{n_clusters/(known_patterns+n_clusters)*100:,.2f} %'])\n",
-    "t1.add_row(['Listed Patterns',f'{known_patterns/(known_patterns+n_clusters)*100:,.2f} %'])\n",
-    "t2.field_names = ['Listed Patterns','Frequency']\n",
-    "t2.add_row(['Singles',f'{n_singles/known_patterns*100:,.2f} %'])\n",
-    "t2.add_row(['Doubles',f'{n_doubles/known_patterns*100:,.2f} %'])\n",
-    "t2.add_row(['Triples',f'{n_triples/known_patterns*100:,.2f} %'])\n",
-    "t2.add_row(['Quadruplets',f'{n_quads/known_patterns*100:,.2f} %'])\n",
-    "print(f'  Primary threshold: {split_evt_primary_threshold}')\n",
-    "print(f'Secondary threshold: {split_evt_secondary_threshold}')\n",
+    "t1.field_names = ['Photon Hits', 'Frequency']\n",
+    "t1.add_row(['Big Clusters', f'{n_clusters/(known_patterns+n_clusters)*100: .2f} %'])\n",
+    "t1.add_row(['Listed Patterns', f'{known_patterns/(known_patterns+n_clusters)*100: .2f} %'])\n",
+    "\n",
     "print(t1)\n",
+    "\n",
+    "t2.field_names = ['Listed Patterns', 'Frequency']\n",
+    "t2.add_row(['Singles', f'{n_singles/known_patterns*100: .2f} %'])\n",
+    "t2.add_row(['Doubles', f'{n_doubles/known_patterns*100: .2f} %'])\n",
+    "t2.add_row(['Triples', f'{n_triples/known_patterns*100: .2f} %'])\n",
+    "t2.add_row(['Quadruplets', f'{n_quads/known_patterns*100: .2f} %'])\n",
+    "\n",
     "print(t2)"
    ]
   },
@@ -554,7 +524,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "coeff, var_matrix = curve_fit(gauss, bins_c, hist['S'], p0=fit_estimates)\n",
+    "coeff, _ = curve_fit(gauss, bins_c, hist['S'], p0=fit_estimates)\n",
     "singles_mu = coeff[1]\n",
     "singles_sig = abs(coeff[2])\n",
     "ROI = np.round([singles_mu-N_sigma_interval*singles_sig, # region of interest to find first photopeak per pixel\n",
@@ -572,7 +542,7 @@
     "            ROI[1],\n",
     "            alpha = .2,\n",
     "            color = 'green',\n",
-    "            label = f'μ ± {N_sigma_interval}σ')\n",
+    "            label = f'\\u03BC ± {N_sigma_interval}\\u03c3')\n",
     "\n",
     "plt.legend(fontsize=12);\n",
     "plt.xlabel('ADU',fontsize=12)\n",
@@ -668,13 +638,14 @@
     "                                  np.random.randint(0, sensor_size[1], N_sample_pixels)])\n",
     "\n",
     "fig = plt.figure(figsize=(20,20))\n",
+    "roi_bins = np.arange(ROI[0], ROI[1])\n",
     "it_counter = 0\n",
     "for px,py in sample_pixels:\n",
     "    it_counter+=1    \n",
     "    \n",
     "    plt.subplot(int(np.sqrt(N_sample_pixels)),int(np.sqrt(N_sample_pixels)),it_counter)\n",
     "    \n",
-    "    h,ADU = np.histogram(data_singles[:,py,px],bins=np.arange(ROI[0],ROI[1]))\n",
+    "    h,ADU = np.histogram(data_singles[:,py,px],bins=roi_bins)\n",
     "    ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
     "    \n",
     "    p1 = plt.plot([],[],' ',label = f'({px},{py})')\n",
@@ -687,18 +658,18 @@
     "        p3 = plt.plot([],[],' ', label = 'empty')\n",
     "        \n",
     "    try:\n",
-    "        coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n",
+    "        coeff, _ = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n",
     "        y_fit = gauss(ADU_c, *coeff)\n",
-    "        p4 = plt.plot(ADU_c, y_fit, label = f'fit: μ={int(np.round(coeff[1]))}')\n",
+    "        p4 = plt.plot(ADU_c, y_fit, label = f'fit: \\u03BC={int(np.round(coeff[1]))}')\n",
     "\n",
-    "    except (RuntimeError,ValueError):\n",
+    "    except (RuntimeError, ValueError):\n",
     "        p4 = plt.plot([],[],' ', label = 'fit error')\n",
     "    \n",
     "    plt.grid(ls=':')\n",
     "    plt.xlabel('ADU')\n",
     "    plt.xlim(ROI)\n",
     "    plt.ylim(bottom=0)\n",
-    "    plt.legend()\n"
+    "    plt.legend()"
    ]
   },
   {
@@ -729,9 +700,9 @@
     "                peak_map[py,px] = np.median(ADU_c[h>0])\n",
     "            elif peak_fitting=='gauss':\n",
     "                try:\n",
-    "                    coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n",
+    "                    coeff, _ = curve_fit(gauss, ADU_c, h, p0=[0, np.median(ADU_c[h>0]), singles_sig]) \n",
     "                    peak_map[py,px] = coeff[1]\n",
-    "                except (RuntimeError):\n",
+    "                except RuntimeError:\n",
     "                    pass         # Failed fits remain 0 \n",
     "        else:\n",
     "            peak_map[py,px] = -1 # Assign -1 to empty pixels\n",
@@ -757,17 +728,17 @@
     "h,ADU = np.histogram(peak_map.flatten(),bins=np.arange(ROI[0],ROI[1]))\n",
     "ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
     "\n",
-    "coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n",
+    "coeff, _ = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n",
     "BP_fit_threshold = [coeff[1]-N_sigma_interval*abs(coeff[2]),\n",
     "                    coeff[1]+N_sigma_interval*abs(coeff[2])]\n",
     "y_fit = gauss(ADU_c, *coeff)\n",
-    "plt.plot(ADU_c,y_fit, label = f'fit: μ={int(np.round(coeff[1]))}')\n",
+    "plt.plot(ADU_c,y_fit, label = f'fit: \\u03BC={int(np.round(coeff[1]))}')\n",
     "plt.vlines(coeff[1],0,plt.gca().get_ylim()[1],color='orange',ls=':')\n",
     "plt.axvspan(BP_fit_threshold[0],\n",
     "            BP_fit_threshold[1],\n",
     "            alpha = .3,\n",
     "            color = 'orange',\n",
-    "            label = f'μ ± {N_sigma_interval}σ')\n",
+    "            label = f'\\u03BC ± {N_sigma_interval}\\u03c3')\n",
     "\n",
     "plt.grid(ls=':')\n",
     "plt.xlim(np.array(BP_fit_threshold)*[.9,1.1])\n",
@@ -897,12 +868,12 @@
     "                     bins=np.arange(BP_fit_threshold[0],BP_fit_threshold[1]).astype(int))\n",
     "ADU_c = ADU[:-1] + np.diff(ADU)[0]/2 # center of bins\n",
     "\n",
-    "coeff, var_matrix = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n",
+    "coeff, _ = curve_fit(gauss, ADU_c, h, p0=[h.max()/2, singles_mu, singles_sig])\n",
     "y_fit = gauss(ADU_c, *coeff)\n",
     "\n",
-    "gain_conv_const = coeff[1]/peak_energy\n",
+    "gain_conv_const = coeff[1] / peak_energy\n",
     "\n",
-    "abs_gain_map = rel_gain_map/gain_conv_const\n",
+    "abs_gain_map = rel_gain_map / gain_conv_const\n",
     "\n",
     "step_timer.done_step('Calculated Gain Conversion Constant. Elapsed Time')"
    ]
@@ -917,7 +888,7 @@
     "plt.figure(figsize=(7,5))\n",
     "\n",
     "plt.scatter(ADU_c/gain_conv_const, h, color='k', marker='x', label='Gain Corrected')\n",
-    "plt.plot(ADU_c/gain_conv_const, y_fit, color='orange', label = f'fit: μ={(np.round(coeff[1],2))} ADU');\n",
+    "plt.plot(ADU_c/gain_conv_const, y_fit, color='orange', label = f'fit: \\u03BC={(np.round(coeff[1],2))} ADU');\n",
     "\n",
     "plt.ylim(bottom=0)\n",
     "plt.legend()\n",
@@ -972,7 +943,6 @@
     "    photon_energy=peak_energy\n",
     ")\n",
     "\n",
-    "const_data_db = dict()\n",
     "db_gain_map = get_constant_from_db(\n",
     "    karabo_id=karabo_id,\n",
     "    karabo_da=karabo_da,\n",
@@ -981,7 +951,6 @@
     "    empty_constant=None,\n",
     "    cal_db_interface=cal_db_interface,\n",
     "    creation_time=creation_time,\n",
-    "    print_once=2,\n",
     "    timeout=cal_db_timeout\n",
     ")\n",
     "\n",
@@ -1075,7 +1044,7 @@
     "if db_gain_map is not None:\n",
     "    FF_data_db_map = psh.alloc(shape=(N_validation_trains,dshape[1],dshape[2]), dtype=np.float32)\n",
     "\n",
-    "psh.map(correct_validation_train, data_dc.select_trains(trains[:N_validation_trains]))\n",
+    "psh.map(correct_validation_train, data_dc.select_trains(np.s_[:N_validation_trains]))\n",
     "\n",
     "step_timer.done_step('Corrected evaluation data. Elapsed Time')"
    ]
@@ -1169,7 +1138,7 @@
     "                peak_ROI = np.array([p*peak_energy-peak_energy/2, p*peak_energy+peak_energy/2]) + ROI_shift\n",
     "                xx = (bins_keV>peak_ROI[0]) & (bins_keV<peak_ROI[1])\n",
     "\n",
-    "                coeff, var_matrix = curve_fit(gauss, bins_keV[xx], FF_hist_AGC[xx], p0=[FF_hist_AGC[xx].max(), p*peak_energy, 1])\n",
+    "                coeff, _ = curve_fit(gauss, bins_keV[xx], FF_hist_AGC[xx], p0=[FF_hist_AGC[xx].max(), p*peak_energy, 1])\n",
     "                y_fit = gauss(bins_keV[xx], *coeff)\n",
     "\n",
     "                xx_sigma_lim = (bins_keV>coeff[1]-abs(coeff[2])*sigma_tol) & (bins_keV<coeff[1]+abs(coeff[2])*sigma_tol)\n",
@@ -1179,7 +1148,7 @@
     "                                 FF_hist_AGC[xx_sigma_lim],\n",
     "                                 color='orange',\n",
     "                                 alpha=.5,\n",
-    "                                 label=f'μ ± {sigma_tol} σ')\n",
+    "                                 label=f'\\u03BC ± {sigma_tol}\\u03c3')\n",
     "                plt.plot(bins_keV[xx],y_fit,color=c)\n",
     "                plt.vlines(coeff[1],0,ylim_top,ls='--',color=c,label=f'peak {p}: {coeff[1]:,.2f} keV')\n",
     "\n",
@@ -1281,11 +1250,7 @@
     "    plt.subplot(1,2,2)\n",
     "    dev_db = (y_fit_db-(peak_energy*xx))/(peak_energy*xx)*100\n",
     "    plt.plot(xx*peak_energy,dev_db,c='r', label='DB gain map')\n",
-    "\n",
-    "plt.subplot(1,2,1)\n",
-    "leg = plt.legend(fontsize=12)\n",
-    "plt.subplot(1,2,2)\n",
-    "leg = plt.legend(fontsize=12)"
+    "    plt.legend(fontsize=12)"
    ]
   },
   {
@@ -1323,12 +1288,12 @@
     "xx = np.arange(0,50,.1)\n",
     "if db_gain_map is not None:\n",
     "    plt.plot(peaks*peak_energy,E_res[N_peaks:], 'o', c='r', label='DB gain map')\n",
-    "    coeff,param = curve_fit(power_function,peaks*peak_energy,E_res[N_peaks:],p0=fit_estimates)\n",
+    "    coeff,_ = curve_fit(power_function,peaks*peak_energy,E_res[N_peaks:],p0=fit_estimates)\n",
     "    power_fit = power_function(xx,*coeff)\n",
     "    plt.plot(xx,power_fit, '--', c='r')\n",
     "\n",
     "plt.plot(peaks*peak_energy,E_res[:N_peaks], 'o', c='b', label='New gain map')\n",
-    "coeff,param = curve_fit(power_function,peaks*peak_energy,E_res[:N_peaks],p0=fit_estimates)\n",
+    "coeff,_ = curve_fit(power_function,peaks*peak_energy,E_res[:N_peaks],p0=fit_estimates)\n",
     "power_fit = power_function(xx,*coeff)\n",
     "plt.plot(xx,power_fit, '--', c='b')\n",
     "\n",
@@ -1347,7 +1312,7 @@
    "metadata": {},
    "source": [
     "## Calibration Constants DB\n",
-    "Send the flat-field constants (RelativeGain and BadPixelsFF) to the database and/or save them locally."
+    "Send the flat-field constants (RelativeGain and BadPixelsIlluminated) to the database and/or save them locally."
    ]
   },
   {
@@ -1362,23 +1327,14 @@
     "md = None\n",
     "\n",
     "constant_maps = {'RelativeGain': abs_gain_map,\n",
-    "                 'BadPixelsFF': const_data['BadPixelsFF']\n",
+    "                 'BadPixelsIlluminated': const_data['BadPixelsFF']\n",
     "                } \n",
     "\n",
     "for const_name in constant_maps.keys():\n",
     "    const = getattr(Constants.ePix100, const_name)()\n",
     "    const.data = constant_maps[const_name].data\n",
     "\n",
-    "    # Set the operating condition\n",
-    "    condition = Conditions.Illuminated.ePix100(\n",
-    "        bias_voltage=bias_voltage,\n",
-    "        photon_energy=peak_energy,\n",
-    "        integration_time=integration_time,\n",
-    "        temperature=temperature_k,\n",
-    "        in_vacuum=in_vacuum,\n",
-    "    )\n",
-    " \n",
-    "    for parm in condition.parameters:\n",
+    "    for parm in illum_condition_db.parameters:\n",
     "        if parm.name == \"Sensor Temperature\":\n",
     "            parm.lower_deviation = temp_limits\n",
     "            parm.upper_deviation = temp_limits\n",
@@ -1388,7 +1344,7 @@
     "        karabo_id=karabo_id,\n",
     "        karabo_da=karabo_da,\n",
     "        constant=const,\n",
-    "        condition=condition,\n",
+    "        condition=illum_condition_db,\n",
     "        cal_db_interface=cal_db_interface,\n",
     "        snapshot_at=creation_time)[0]\n",
     "\n",
@@ -1398,7 +1354,7 @@
     "            db_module=db_module,\n",
     "            karabo_id=karabo_id,\n",
     "            constant=const,\n",
-    "            condition=condition,\n",
+    "            condition=illum_condition_db,\n",
     "            file_loc=file_loc,\n",
     "            report_path=report,\n",
     "            cal_db_interface=cal_db_interface,\n",
@@ -1411,7 +1367,7 @@
     "            db_module=db_module,\n",
     "            karabo_id=karabo_id,\n",
     "            constant=const,\n",
-    "            condition=condition,\n",
+    "            condition=illum_condition_db,\n",
     "            data=const.data,\n",
     "            file_loc=file_loc,\n",
     "            report=report,\n",
-- 
GitLab