diff --git a/bin/slurm_calibrate.sh b/bin/slurm_calibrate.sh
index 6b43b5065f23f4fdf0c4cbc0617a70de52cb1285..44be0e93e511c491fe6171badb0e0f49c708d1f2 100755
--- a/bin/slurm_calibrate.sh
+++ b/bin/slurm_calibrate.sh
@@ -52,7 +52,7 @@ fi
 
 
 echo "Running script"
-${jupyter_path} nbconvert --to rst --ExecutePreprocessor.timeout=86400 --ExecutePreprocessor.allow_errors=True --TemplateExporter.exclude_input=True --execute ${nb_path}
+${jupyter_path} nbconvert --to rst --ExecutePreprocessor.timeout=36000 --ExecutePreprocessor.allow_errors=True --TemplateExporter.exclude_input=True --execute ${nb_path}
 
 # stop the cluster if requested
 if [ "${uuid}" != "NO_CLUSTER" ]
diff --git a/cal_tools/cal_tools/agipdlib.py b/cal_tools/cal_tools/agipdlib.py
index 638dad7c1825f3a063d1424b2962c27355f71549..0b5ba63060d81833ded5fc64383d08860fb29b73 100644
--- a/cal_tools/cal_tools/agipdlib.py
+++ b/cal_tools/cal_tools/agipdlib.py
@@ -908,7 +908,6 @@ class AgipdCorrections:
         """
         if not self.initialized:
             raise RuntimeError("Must call initialize() first!")
-
         if not self.karabo_data_mode:
             agipd_base = self.agipd_base
             cidx = self.cidx
@@ -969,7 +968,6 @@ class AgipdCorrections:
         if self.force_mg_if_below is not None and self.force_mg_if_below > 0:
             gain[(gain == 2) & ((im - self.offset[
                 cellid, ..., 1]) < self.force_mg_if_below)] = 1
-
         if self.force_hg_if_below is not None and self.force_hg_if_below > 0:
             gain[(gain > 0) & ((im - self.offset[
                 cellid, ..., 0]) < self.force_hg_if_below)] = 0
@@ -987,7 +985,6 @@ class AgipdCorrections:
                 std = np.nanstd(im[cellid == c, ...], axis=0)
                 self.sig_zero_mask[
                     c, std == 0] = BadPixels.DATA_STD_IS_ZERO.value
-
         # for feedback we produced histograms for the first chunk
         if cidx == 0:
             H, xe, ye = np.histogram2d(im.flatten(), ga.flatten(),
@@ -1001,31 +998,25 @@ class AgipdCorrections:
                                        range=[[4000, 8192], [0, 4]])
             self.hists_dig_gain_vs_signal += H
             self.dig_signal_edges = (xe, ye)
-
         # now get the correct constants depending on cell id
         offsetb = self.offset[cellid, ...]
         tmask = self.mask[cellid, ...]
-
         # choose constants according to gain setting
         off = np.choose(gain,
                         (offsetb[..., 0], offsetb[..., 1], offsetb[..., 2]))
 
         msk = np.choose(gain, (tmask[..., 0], tmask[..., 1], tmask[..., 2]))
-
         # same for relative gain and then bad pixel mask
         if hasattr(self, "rel_gain"):
             # get the correct rel_gain depending on cell-id
             rc = self.rel_gain[cellid, ...]
             rel_cor = np.choose(gain, (rc[..., 0], rc[..., 1], rc[..., 2]))
-
         # scale raw gain for use in the identifying snowy pixels
         rgain = None
         if self.melt_snow is not False:
             rgain = ga / t0[cellid, ...]
-
         # subtract offset
         im -= off
-
         # before doing relative gain correction we need to evaluate any
         # baseline shifts
         # as they are effectively and additional offset in the data
@@ -1123,7 +1114,6 @@ class AgipdCorrections:
         msk |= self.sig_zero_mask[cellid, ...]
         if self.melt_snow is not False:
             msk |= snowmask
-
         # for the first chunk output some statistics
         if cidx == 0:
             copim = copy.copy(im)
@@ -1169,7 +1159,6 @@ class AgipdCorrections:
             self.ddset[cidx // sd:nidx // sd, ...] = im
             self.gdset[cidx // sd:nidx // sd, ...] = gain
             self.mdset[cidx // sd:nidx // sd, ...] = msk
-
             self.outfile[agipd_base + "image/cellId"][cidx:nidx] = cellid
             self.outfile[agipd_base + "image/trainId"][cidx:nidx] = trainId
             self.outfile[agipd_base + "image/pulseId"][cidx:nidx] = pulseId
@@ -1655,8 +1644,9 @@ class AgipdCorrections:
         for cname, mdata in const_yaml[dname].items():
             when[cname] = mdata["creation-time"]
             if when[cname]:
-                with h5py.File(mdata["file-path"], "r") as cf:
-                    cons_data[cname] = np.copy(cf[f"{dname}/{cname}/0/data"])
+                cf = h5py.File(mdata["file-path"], "r")
+                cons_data[cname] = np.copy(cf[f"{dname}/{cname}/0/data"])
+                cf.close()
             else:
                 # Create empty constant using the list elements
                 cons_data[cname] = \
diff --git a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
index d3832232a0956c87e676e2909479aefdd36b7552..afcd3daba756e66c480d60c6350631cfcc015290 100644
--- a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
+++ b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
@@ -24,11 +24,10 @@
    "source": [
     "cluster_profile = \"noDB\"\n",
     "in_folder = \"/gpfs/exfel/exp/SPB/202030//p900119/raw\" # the folder to read data from, required\n",
-    "out_folder =  \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_SPB4\"  # the folder to output to, required\n",
+    "out_folder =  \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_SPB0\"  # the folder to output to, required\n",
     "sequences =  [-1] # sequences to correct, set to -1 for all, range allowed\n",
     "modules = [-1] # modules to correct, set to -1 for all, range allowed\n",
     "run = 80 # runs to process, required\n",
-    "\n",
     "karabo_id = \"SPB_DET_AGIPD1M-1\" # karabo karabo_id\n",
     "karabo_da = ['-1']  # a list of data aggregators names, Default [-1] for selecting all data aggregators\n",
     "receiver_id = \"{}CH0\" # inset for receiver devices\n",
@@ -172,7 +171,7 @@
     "    sequences = None\n",
     "\n",
     "CHUNK_SIZE = 250\n",
-    "MAX_PAR = 32\n",
+    "MAX_PAR = 16\n",
     "\n",
     "if in_folder[-1] == \"/\":\n",
     "    in_folder = in_folder[:-1]\n",
@@ -382,16 +381,15 @@
     "    from datetime import datetime\n",
     "    import re\n",
     "    import os\n",
-    "    from influxdb import InfluxDBClient\n",
+    "  #  from influxdb import InfluxDBClient\n",
     "    import subprocess\n",
     "    from iCalibrationDB import Constants, Conditions, Detectors\n",
     "    from cal_tools.enums import BadPixels\n",
     "    from cal_tools.agipdlib import AgipdCorrections, SnowResolution\n",
     "    from cal_tools.agipdlib import get_num_cells, get_acq_rate\n",
-    "    \n",
-    "  \n",
-    "    #client = InfluxDBClient('exflqr18318', 8086, 'root', 'root', 'calstats')\n",
     "\n",
+    "    #client = InfluxDBClient('exflqr18318', 8086, 'root', 'root', 'calstats')\n",
+    "    \"\"\"\n",
     "    def create_influx_entry(run, proposal, qm, sequence, filesize, chunksize,\n",
     "                            total_sequences, success, runtime, reason=\"\"):\n",
     "        return {\n",
@@ -418,6 +416,7 @@
     "                \"runtime\": runtime,                \n",
     "            }\n",
     "        }\n",
+    "    \"\"\"\n",
     "    \n",
     "    hists_signal_low = None\n",
     "    hists_signal_high = None\n",
@@ -431,26 +430,23 @@
     "    gain_stats = 0\n",
     "    when = None\n",
     "    err = None\n",
-    "    \n",
-    "   \n",
+    "\n",
     "    try:\n",
     "        start = datetime.now()\n",
     "        success = True\n",
     "        reason = \"\"\n",
     "        filename, filename_out, channel, qm = inp\n",
-    "        print(\"Have input\")\n",
+    "\n",
     "        if max_cells == 0:\n",
     "            max_cells = get_num_cells(filename, loc, channel)\n",
     "            if max_cells is None:\n",
     "                raise ValueError(f\"No raw images found for {qm}\")\n",
     "            else:\n",
     "                cells = np.arange(max_cells)\n",
-    "            \n",
     "        if acq_rate == 0.:\n",
     "            acq_rate = get_acq_rate(filename, loc, channel)\n",
     "        else:\n",
     "            acq_rate = None\n",
-    "\n",
     "        if dbparms[2] == 0:\n",
     "            dbparms[2] = max_cells\n",
     "        if dbparms[5] == 0:\n",
@@ -464,10 +460,10 @@
     "            h5path = h5path[1:]\n",
     "        if h5path_idx[0] == '/':\n",
     "            h5path_idx = h5path_idx[1:]\n",
-    "            \n",
     "\n",
     "        infile = h5py.File(filename, \"r\", driver=\"core\")\n",
     "        outfile = h5py.File(filename_out, \"w\")\n",
+    "\n",
     "        try:\n",
     "            agipd_corr = AgipdCorrections(infile, outfile, max_cells, channel, max_pulses,\n",
     "                                          bins_gain_vs_signal, bins_signal_low_range,\n",
@@ -480,7 +476,6 @@
     "                                          force_mg_if_below=force_mg_if_below, mask_noisy_adc=mask_noisy_adc,\n",
     "                                          acquisition_rate=acq_rate, gain_setting=gain_setting,\n",
     "                                          corr_bools=corr_bools)\n",
-    "\n",
     "            blc_noise_threshold, blc_hmatch, melt_snow = special_opts\n",
     "            if not corr_bools[\"only_offset\"]:\n",
     "                blc_hmatch = False\n",
@@ -491,9 +486,9 @@
     "                agipd_corr.get_valid_image_idx()\n",
     "            except IOError:\n",
     "                return\n",
+    "\n",
     "            device = getattr(getattr(Detectors, dinstance), qm)\n",
-    "            \n",
-    "            # check if there is a yaml file in out_folder that has the device constants.\n",
+    "\n",
     "            if not nodb:\n",
     "                if const_yaml and device.device_name in const_yaml:\n",
     "                    print(fileparms != \"\")\n",
@@ -510,17 +505,14 @@
     "            for irange in agipd_corr.get_iteration_range():\n",
     "                agipd_corr.correct_agipd(irange)\n",
     "                print(\"Iterated\")\n",
-    "            print(\"All iterations are finished\")\n",
     "            hists, edges = agipd_corr.get_histograms()\n",
     "            hists_signal_low, hists_signal_high, hists_gain_vs_signal, hists_dig_gain_vs_signal, hist_pulses = hists\n",
     "            low_edges, high_edges, signal_edges, dig_signal_edges = edges\n",
     "            gain_stats = np.array(agipd_corr.gain_stats)\n",
-    "            \n",
     "        finally:\n",
     "            outfile.close()\n",
     "            infile.close()\n",
     "            print(\"Closed files\")\n",
-    "        \n",
     "    except Exception as e:\n",
     "        err = f\"Error: {e}\\nError traceback: {traceback.format_exc()}\"\n",
     "        print(err)\n",
@@ -598,9 +590,9 @@
     "        fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n",
     "        if first:\n",
     "            first_files.append((fname_in, fout))\n",
+    "        \n",
     "        inp.append((fname_in, fout, i,  qm))\n",
     "    first = False\n",
-    "    \n",
     "\n",
     "    if len(inp) >= min(MAX_PAR, left):\n",
     "        print(f\"Running {len(inp)} tasks parallel\")\n",
@@ -609,7 +601,6 @@
     "                    bins_dig_gain_vs_signal, max_pulses, dbparms, fileparms, nodb, chunk_size_idim,\n",
     "                    special_opts, il_mode, karabo_id, dinstance, force_hg_if_below, force_mg_if_below,\n",
     "                    mask_noisy_adc, acq_rate, gain_setting, corr_bools, h5path, h5path_idx, const_yaml)\n",
-    "\n",
     "        r = view.map_sync(p, inp)\n",
     "        #r = list(map(p, inp))\n",
     "\n",
@@ -634,7 +625,6 @@
     "                    hists_gain_vs_signal += hg.astype(np.float64)\n",
     "                    hists_dig_gain_vs_signal += hdg.astype(np.float64)\n",
     "                    gain_stats += gs\n",
-    "    \n",
     "    done = all(dones)\n",
     "\n",
     "print(f\"Corrected raw data of {cells} memory cells and {acq_rate} MHz acquisition rate\")"
diff --git a/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb b/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb
index 655182d2d13574fb14c4da0b1fe17889d4ef0b33..8f3df25d24065fb556f678d4e27dc4618fa06532 100644
--- a/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb
+++ b/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb
@@ -338,7 +338,7 @@
     "\n",
     "# A dict to connect virtual device\n",
     "# to actual device name.\n",
-    "for i in range(len(modules)):\n",
+    "for i in modules:\n",
     "    qm = f\"Q{i//4+1}M{i%4+1}\"\n",
     "    if qm in mapped_files and not mapped_files[qm].empty():\n",
     "        device = getattr(getattr(Detectors, dinstance), qm)\n",
diff --git a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
index d88429fc776133a6ae582ec132eca76e3a2a4730..ee72412367ee4535ebb68c046c377db51f415d82 100644
--- a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
+++ b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
@@ -18,14 +18,14 @@
    "outputs": [],
    "source": [
     "cluster_profile = \"noDB\" # cluster profile to use\n",
-    "in_folder = \"/gpfs/exfel/exp/SPB/201922/p002566/raw/\" # the folder to read data from, required\n",
-    "out_folder =  \"/gpfs/exfel/data/scratch/karnem/test/006\"  # the folder to output to, required\n",
+    "in_folder = \"/gpfs/exfel/exp/FXE/201901/p002210/raw\" # the folder to read data from, required\n",
+    "out_folder =  \"/gpfs/exfel/data/scratch/ahmedk/test/jf\"  # the folder to output to, required\n",
     "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n",
-    "run = 189 # runs to process, required\n",
+    "run = 249 # runs to process, required\n",
     "\n",
-    "karabo_id = \"SPB_IRDA_JNGFR\" # karabo prefix of Jungfrau devices\n",
-    "karabo_da = ['JNGFR02', 'JNGFR03'] # data aggregators\n",
-    "receiver_id = \"MODULE_{}\" # inset for receiver devices\n",
+    "karabo_id = \"FXE_XAD_JF1M\" # karabo prefix of Jungfrau devices\n",
+    "karabo_da = ['JNGFR01'] # data aggregators\n",
+    "receiver_id = \"RECEIVER-{}\" # inset for receiver devices\n",
     "receiver_control_id = \"CONTROL\" # inset for control devices\n",
     "path_template = 'RAW-R{:04d}-{}-S{:05d}.h5'  # template to use for file name, double escape sequence number\n",
     "h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data'  # path in H5 file under which images are located\n",
@@ -35,7 +35,7 @@
     "karabo_da_control = \"JNGFR01\" # file inset for control data\n",
     "\n",
     "use_dir_creation_date = True # use the creation data of the input dir for database queries\n",
-    "cal_db_interface = \"tcp://max-exfl017:8017#8025\" #\"tcp://max-exfl016:8015#8025\" # the database interface to use\n",
+    "cal_db_interface = \"tcp://max-exfl016:8017#8025\" #\"tcp://max-exfl016:8015#8025\" # the database interface to use\n",
     "cal_db_timeout = 180000 # timeout on caldb requests\",\n",
     "\n",
     "overwrite = True # set to True if existing data should be overwritten\n",
@@ -45,8 +45,9 @@
     "photon_energy = 9.2 # photon energy in keV\n",
     "chunk_size_idim = 1  # chunking size of imaging dimension, adjust if user software is sensitive to this.\n",
     "integration_time = 4.96 # integration time in us, will be overwritten by value in file\n",
+    "mem_cells = 0. # leave memory cells equal 0, as it is saved in control information starting 2019.\n",
     "gmapfile = \"\" # variable is not used but left here for back compatibility\n",
-    "db_module = [\"Jungfrau_M035\", \"Jungfrau_M273\"] # ID of module in calibration database\n",
+    "db_module = [\"Jungfrau_M233\"] # ID of module in calibration database\n",
     "manual_slow_data = False  # if true, use manually entered bias_voltage and integration_time values\n",
     "chunk_size = 0\n",
     "\n",
@@ -185,13 +186,23 @@
     "\n",
     "\n",
     "control_path = h5path_cntrl.format(karabo_id_control, receiver_control_id)\n",
-    "this_run_mcells, sc_start = check_memoryCells(fp_path_contr.format(0), control_path)\n",
-    "if this_run_mcells == 1:\n",
-    "    memoryCells = 1\n",
-    "    print(f'Dark runs in single cell mode\\n storage cell start: {sc_start:02d}')\n",
-    "else:\n",
-    "    memoryCells = 16\n",
-    "    print(f'Dark runs in burst mode\\n storage cell start: {sc_start:02d}')\n",
+    "try:\n",
+    "    this_run_mcells, sc_start = check_memoryCells(fp_path_contr.format(0), control_path)\n",
+    "    if this_run_mcells == 1:\n",
+    "        memoryCells = 1\n",
+    "        print(f'Dark runs in single cell mode\\n storage cell start: {sc_start:02d}')\n",
+    "    else:\n",
+    "        memoryCells = 16\n",
+    "        print(f'Dark runs in burst mode\\n storage cell start: {sc_start:02d}')\n",
+    "except Exception as e:\n",
+    "    if \"Unable to open object\" in str(e):\n",
+    "        if mem_cells==0:\n",
+    "            memoryCells = 1\n",
+    "        else:\n",
+    "            memoryCells = mem_cells\n",
+    "        print(f'Set memory cells to {memoryCells} as it is not saved in control information.')\n",
+    "    else:\n",
+    "        print(f\"Error trying to access memory cell from contol information: {e}\")\n",
     "\n",
     "print(f\"Integration time is {integration_time} us\")\n",
     "print(f\"Bias voltage is {bias_voltage} V\")\n",
@@ -448,7 +459,10 @@
     "                    while ind<max_ind:\n",
     "                        d = infile[h5path_f+\"/adc\"][ind:ind+chunk_size,...].astype(np.float32)\n",
     "                        g = infile[h5path_f+\"/gain\"][ind:ind+chunk_size,...]\n",
-    "                        m = infile[h5path_f+\"/memoryCell\"][ind:ind+chunk_size,...]\n",
+    "                        if h5path_f+\"/memoryCell\" in infile:\n",
+    "                            m = infile[h5path_f+\"/memoryCell\"][ind:ind+chunk_size,...]\n",
+    "                        else:\n",
+    "                            m = None\n",
     "                        print(f'To process: {d.shape}')\n",
     "                        inp.append((d,g,m, ind, k==0))\n",
     "                        ind += chunk_size\n",
diff --git a/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb b/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb
index 0cec5017a78fa113f6c79e7ffbabd555b1cb55d2..bacf630eaf9b0c4b61d0773ddd8c33d0d7310150 100644
--- a/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb
+++ b/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb
@@ -309,6 +309,7 @@
     "        Mod_data[dname.format(const_name)] = OrderedDict()\n",
     "    \n",
     "    display(Markdown(f'##### {const_name}'))\n",
+    "    print_once = True\n",
     "    for gain in range(gainstages):\n",
     "        if const_name == 'ThresholdsDark':\n",
     "            if gain > 1:\n",
@@ -348,6 +349,9 @@
     "                                                                         v), axis=0)\n",
     "                except:\n",
     "                    Mod_data[k.format(const_name)][gain_names[gain]] = v\n",
+    "        if np.count_nonzero(dval) == 0 and print_once:\n",
+    "            display(Markdown(f'New and previous {const_name} are the same, hence there is no difference.'))\n",
+    "            print_once = False\n",
     "        # Plotting constant overall modules.\n",
     "        display(Markdown(f'###### {glabel[gain]} ######'))\n",
     "\n",
diff --git a/requirements.txt b/requirements.txt
index ab85322866f0a07b0d2fa96d3ec77eb0204d3362..7f24418b5e9adec5f0716c7588f36c9f1942a700 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -8,11 +8,16 @@ extra_geom == 0.8.0
 fabio == 0.9.0
 gitpython == 3.1.0
 h5py == 2.10.0
-influxdb == 5.2.3
 iminuit == 1.3.8
+influxdb == 5.2.3
 ipyparallel == 6.2.4
+ipykernel == 5.1.4
+ipython == 7.12.0
+ipython_genutils == 0.2.0
 jupyter == 1.0.0
+jupyter_client == 5.3.4
 jupyter_console == 6.1.0
+jupyter-core == 4.6.1
 karabo_data == 0.7.0
 lxml == 4.5.0
 metadata_client == 3.0.5
diff --git a/setup.py b/setup.py
index e8a538d6a6c60d1436fe1a383355e8caf15ad33f..b93537e269853e03be6300828022df7f61c1efbd 100644
--- a/setup.py
+++ b/setup.py
@@ -43,6 +43,7 @@ for ctypes in notebooks.values():
     for nb in ctypes.values():
         data_files.append(nb["notebook"])
         data_files += nb.get("dep_notebooks", [])
+        data_files += nb.get("pre_notebooks", [])
 
 setup(
     name='European XFEL Offline Calibration',
diff --git a/webservice/webservice.py b/webservice/webservice.py
index 4053e32aea8ae78dd155823da43698248842c48c..f417bdf02c53553e89cbf6af60e3226b118ff9a5 100644
--- a/webservice/webservice.py
+++ b/webservice/webservice.py
@@ -794,7 +794,7 @@ async def server_runner(config, mode):
                         sched_prio=str(config[action]['sched-prio']),
                         action=action, instrument=instrument,
                         cycle=cycle, proposal=proposal,
-                        runs="_r".join(wait_runs),
+                        runs="_".join([f"r{r}" for r in wait_runs]),
                         time_stamp=ts,
                         det_instance=karabo_id,
                         request_time=request_time
diff --git a/webservice/webservice.yaml b/webservice/webservice.yaml
index b87d9b7d01ddbaa8904936c0b416d73cd5301246..ff2a6cca82d9ad8b173c2510a4446516c580f690 100644
--- a/webservice/webservice.yaml
+++ b/webservice/webservice.yaml
@@ -29,10 +29,12 @@ correct:
     cmd : >
         python -m xfel_calibrate.calibrate {detector} CORRECT
         --slurm-scheduling {sched_prio}
+        --slurm-mem 750
         --request-time {request_time}
         --slurm-name {action}_{instrument}_{detector}_{cycle}_p{proposal}_r{runs}
-        --report-to {action}_{det_instance}_{time_stamp}
+        --report-to /gpfs/exfel/exp/{instrument}/{cycle}/p{proposal}/usr/Reports/{runs}/{det_instance}_{action}_{proposal}_{runs}_{time_stamp}
         --cal-db-timeout 300000
+        --cal-db-interface tcp://max-exfl016:8015#8044
 
 dark:
     in-folder: /gpfs/exfel/exp/{instrument}/{cycle}/p{proposal}/raw
@@ -41,8 +43,10 @@ dark:
     cmd: >
         python -m xfel_calibrate.calibrate {detector} DARK
         --priority {priority}
+        --concurrency-par karabo_da
         --slurm-scheduling {sched_prio}
         --request-time {request_time}
         --slurm-name {action}_{instrument}_{detector}_{cycle}_p{proposal}_r{runs}
-        --report-to /gpfs/exfel/d/cal/caldb_store/xfel/reports/{detector}/{instrument}/{det_instance}/{action}/{action}_{proposal}_{runs}_{time_stamp}
+        --report-to /gpfs/exfel/d/cal/caldb_store/xfel/reports/{instrument}/{det_instance}/{action}/{action}_{proposal}_{runs}_{time_stamp}
+        --cal-db-interface tcp://max-exfl016:8015#8044
         --db-output
diff --git a/xfel_calibrate/calibrate.py b/xfel_calibrate/calibrate.py
index 6a9e40efde73e4ccb8c0beb115ad856a292cf0b3..c745c0f4c62598978547b3eefdb5f6525709dc0d 100755
--- a/xfel_calibrate/calibrate.py
+++ b/xfel_calibrate/calibrate.py
@@ -291,7 +291,7 @@ def balance_sequences(in_folder, run, sequences, sequences_per_node,
         sequences_per_node += 1
         nsplits = len(seq_nums) // sequences_per_node + 1
         print("Changed to {} sequences per node".format(sequences_per_node))
-        print("to have a maximum of 8 concurrent jobs")
+        print(f"to have a maximum of {max_nodes} concurrent jobs")
     return [l.tolist() for l in np.array_split(list(seq_nums), nsplits) if
             l.size > 0]