diff --git a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb
index 124039ed8ae4d1a2949260a6c29398fae854c451..8cc5d8762890f38c27dc5e14dcc9c3cf24880d19 100644
--- a/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb
+++ b/notebooks/pnCCD/Characterize_pnCCD_Dark_NBC.ipynb
@@ -24,12 +24,25 @@
    },
    "outputs": [],
    "source": [
-    "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw\"  # input folder, required\n",
-    "out_folder = '/gpfs/exfel/exp/SQS/201921/p002430/scratch'  # output folder, required\n",
-    "path_template = 'RAW-R{:04d}-PNCCD01-S{{:05d}}.h5'  # the template to use to access data\n",
-    "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/image/' # path to the data in the HDF5 file\n",
-    "run = 745  # which run to read data from, required\n",
+    "cluster_profile = \"noDB\"  # ipcluster profile to use\n",
+    "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw/\"  # input folder, required\n",
+    "out_folder = 'gpfs/exfel/data/scratch/karnem/test/'  # output folder, required\n",
     "sequence = 0  # sequence file to use\n",
+    "run = 745  # which run to read data from, required\n",
+    "\n",
+    "karabo_da = 'PNCCD01' # data aggregators\n",
+    "karabo_id = \"SQS_NQS_PNCCD1MP\" # karabo prefix of PNCCD devices\n",
+    "receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n",
+    "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5'  # the template to use to access data\n",
+    "h5path = '/INSTRUMENT/{}/CAL/{}:output/data/image/' # path in the HDF5 file the data is at\n",
+    "\n",
+    "use_dir_creation_date = True  # use dir creation date as data production reference date\n",
+    "cal_db_interface = \"tcp://max-exfl016:8021\"  # calibration DB interface to use\n",
+    "cal_db_timeout = 300000 # timeout on caldb requests\n",
+    "db_output = False # if True, the notebook sends dark constants to the calibration database\n",
+    "local_output = True # if True, the notebook saves dark constants locally\n",
+    "\n",
+    "\n",
     "number_dark_frames = 0  # number of images to be used, if set to 0 all available images are used\n",
     "chunkSize = 100 # number of images to read per chunk\n",
     "fix_temperature = 233.  # fix temperature in K, set to -1 to use value from slow data\n",
@@ -40,15 +53,12 @@
     "sigmaNoise = 10.  # pixels whose signal value exceeds sigmaNoise*noise value in that pixel will be masked\n",
     "bad_pixel_offset_sigma = 5.  # any pixel whose offset is beyond 5 standard deviations, is a bad pixel\n",
     "bad_pixel_noise_sigma = 5.  # any pixel whose noise is beyond 5 standard deviations, is a bad pixel\n",
-    "cluster_profile = \"noDB\"  # ipcluster profile to use\n",
     "run_parallel = True # for parallel computation \n",
     "cpuCores = 40 # specifies the number of running cpu cores\n",
-    "cal_db_interface = \"tcp://max-exfl016:8021\"  # calibration DB interface to use\n",
-    "temp_limits = 5  # temperature limits in which calibration parameters are considered equal\n",
-    "db_output = False # if True, the notebook sends dark constants to the calibration database\n",
-    "local_output = True # if True, the notebook saves dark constants locally\n",
+    "\n",
     "# for database time derivation:\n",
-    "use_dir_creation_date = True  # To be used to retrieve calibration constants later on"
+    "temp_limits = 5  # temperature limits in which to consider calibration parameters equal\n",
+    "multi_iteration = False  # use multiple iterations"
    ]
   },
   {
@@ -147,11 +157,12 @@
     "x = 1024 # rows of the FastCCD to analyze in FS mode \n",
     "y = 1024 # columns of the FastCCD to analyze in FS mode \n",
     "print(\"pnCCD size is: {}x{} pixels.\".format(x, y))\n",
-    "    \n",
+    "\n",
     "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n",
-    "fp_name = path_template.format(run)\n",
+    "fp_name = path_template.format(run, karabo_da)\n",
     "fp_path = '{}/{}'.format(ped_dir, fp_name)\n",
     "filename = fp_path.format(sequence)\n",
+    "h5path = h5path.format(karabo_id, receiver_id)\n",
     "\n",
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
@@ -717,7 +728,7 @@
     "\n",
     "    if db_output:\n",
     "        try:\n",
-    "            metadata.send(cal_db_interface)\n",
+    "            metadata.send(cal_db_interface, timeout=cal_db_timeout)\n",
     "            print(\"Inject {} constants from {}\".format(const_name,\n",
     "                                                       metadata.calibration_constant_version.begin_at))\n",
     "        except Exception as e:\n",
diff --git a/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb b/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb
index 1031c988f38aa63655041150a0c747e6df8c464c..b4ddf1567da81f05fbcc87fa9562fd1467074816 100644
--- a/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb
+++ b/notebooks/pnCCD/Correct_pnCCD_NBC.ipynb
@@ -22,29 +22,32 @@
    },
    "outputs": [],
    "source": [
-    "\n",
+    "cluster_profile = \"noDB\" # ipcluster profile to use\n",
     "in_folder = \"/gpfs/exfel/exp/SQS/201921/p002430/raw/\" # input folder, required\n",
     "out_folder = '/gpfs/exfel/data/scratch/karnem/test/' # output folder, required\n",
-    "path_template = 'RAW-R{:04d}-PNCCD01-S{{:05d}}.h5' # the template to use to access data\n",
-    "path_template_seqs = \"{}/r{:04d}/*PNCCD01-S*.h5\"\n",
+    "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n",
     "run = 746 # which run to read data from, required\n",
+    "\n",
+    "karabo_da = 'PNCCD01' # data aggregators\n",
+    "karabo_id = \"SQS_NQS_PNCCD1MP\" # karabo prefix of PNCCD devices\n",
+    "receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n",
+    "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
+    "h5path = '/INSTRUMENT/{}/CAL/{}:output/data/' # path in the HDF5 file the data is at\n",
+    "\n",
+    "use_dir_creation_date = True\n",
+    "cal_db_interface = \"tcp://max-exfl016:8015\" # calibration DB interface to use\n",
+    "cal_db_timeout = 300000000 # timeout on caldb requests\n",
+    "\n",
     "number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used\n",
-    "cluster_profile = \"noDB\" # ipcluster profile to use\n",
     "sigma_noise = 10. # Pixel exceeding 'sigmaNoise' * noise value in that pixel will be masked\n",
-    "h5path = '/INSTRUMENT/SQS_NQS_PNCCD1MP/CAL/PNCCD_FMT-0:output/data/' # path in the HDF5 file the data is at\n",
     "multi_iteration = False # use multiple iterations\n",
-    "use_dir_creation_date = True\n",
     "fix_temperature = 233.\n",
     "gain = 0\n",
     "bias_voltage = 300\n",
     "integration_time = 70\n",
-    "\n",
     "split_evt_primary_threshold = 5. # primary threshold for split event classification in terms of n sigma noise\n",
     "split_evt_secondary_threshold = 3. # secondary threshold for split event classification in terms of n sigma noise\n",
     "split_evt_mip_threshold = 1000. # MIP threshold for event classification\n",
-    "cal_db_interface = \"tcp://max-exfl016:8015\" # calibration DB interface to use\n",
-    "cal_db_timeout = 300000000 # timeout on caldb requests\n",
-    "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n",
     "chunk_size_idim = 1 # H5 chunking size of output data\n",
     "overwrite = True\n",
     "do_pattern_classification = True # classify split events\n",
@@ -54,26 +57,9 @@
     "photon_energy_gain_map = 2. # energy in keV\n",
     "cpuCores = 8\n",
     "\n",
-    "\n",
-    "def balance_sequences(in_folder, run, sequences, sequences_per_node, path_template_seqs):\n",
-    "    import glob\n",
-    "    import re\n",
-    "    import numpy as np\n",
-    "    if sequences[0] == -1:\n",
-    "        sequence_files = glob.glob(path_template_seqs.format(in_folder, run))\n",
-    "        seq_nums = set()\n",
-    "        for sf in sequence_files:\n",
-    "            seqnum = re.findall(r\".*-S([0-9]*).h5\", sf)[0]\n",
-    "            seq_nums.add(int(seqnum))\n",
-    "        seq_nums -= set(sequences)\n",
-    "        nsplits = len(seq_nums)//sequences_per_node+1\n",
-    "        while nsplits > 8:\n",
-    "            sequences_per_node += 1\n",
-    "            nsplits = len(seq_nums)//sequences_per_node+1\n",
-    "            print(\"Changed to {} sequences per node to have a maximum of 8 concurrent jobs\".format(sequences_per_node))\n",
-    "        return [l.tolist() for l in np.array_split(list(seq_nums), nsplits)]\n",
-    "    else:\n",
-    "        return sequences"
+    "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n",
+    "    from xfel_calibrate.calibrate import balance_sequences as bs\n",
+    "    return bs(in_folder, run, sequences, sequences_per_node, karabo_da)"
    ]
   },
   {
@@ -106,6 +92,7 @@
     "\n",
     "import numpy as np\n",
     "import h5py\n",
+    "import glob\n",
     "import matplotlib.pyplot as plt\n",
     "from iminuit import Minuit\n",
     "\n",
@@ -151,7 +138,8 @@
     "\n",
     "    \n",
     "ped_dir = \"{}/r{:04d}\".format(in_folder, run)\n",
-    "fp_name = path_template.format(run)\n",
+    "fp_name = path_template.format(run, karabo_da)\n",
+    "h5path = h5path.format(karabo_id, receiver_id)\n",
     "\n",
     "import datetime\n",
     "creation_time = None\n",
@@ -209,30 +197,22 @@
    },
    "outputs": [],
    "source": [
-    "dirlist = sorted(os.listdir(ped_dir))\n",
     "file_list = []\n",
     "total_sequences = 0\n",
     "fsequences = []\n",
-    "for entry in dirlist:\n",
     "\n",
-    "    #only h5 file\n",
-    "    abs_entry = \"{}/{}\".format(ped_dir, entry)\n",
-    "    if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == \".h5\":\n",
-    "        \n",
-    "        if sequences is None:\n",
-    "            for seq in range(len(dirlist)):\n",
-    "                \n",
-    "                if path_template.format(run).format(seq) in abs_entry:\n",
-    "                    file_list.append(abs_entry)\n",
-    "                    total_sequences += 1\n",
-    "                    fsequences.append(seq)\n",
-    "        else:\n",
-    "            for seq in sequences:\n",
-    "                \n",
-    "                if path_template.format(run).format(seq) in abs_entry:\n",
-    "                    file_list.append(os.path.abspath(abs_entry))\n",
-    "                    total_sequences += 1\n",
-    "                    fsequences.append(seq)\n",
+    "if sequences is None:\n",
+    "    file_list = glob.glob(fp_path.format(0).replace('00000', '*'))\n",
+    "    total_sequences = len(file_list)\n",
+    "    fsequences = range(total_sequences)\n",
+    "else:\n",
+    "    for seq in sequences:\n",
+    "        abs_entry = fp_path.format(seq)\n",
+    "        if os.path.isfile(abs_entry):\n",
+    "            file_list.append(abs_entry)\n",
+    "            total_sequences += 1\n",
+    "            fsequences.append(seq)\n",
+    "\n",
     "sequences = fsequences"
    ]
   },
@@ -603,7 +583,6 @@
     "    with h5py.File(f, 'r', driver='core') as infile:\n",
     "        out_fileb = \"{}/{}\".format(out_folder, f.split(\"/\")[-1])\n",
     "        out_file = out_fileb.replace(\"RAW\", \"CORR\")\n",
-    "        #out_filed = out_fileb.replace(\"RAW\", \"CORR-SC\")\n",
     "\n",
     "        data = None\n",
     "        noise = None\n",
@@ -652,8 +631,6 @@
     "                        single_im = data[...,0]\n",
     "                        \n",
     "                if do_pattern_classification:\n",
-    "                #with h5py.File(out_file, \"a\") as ofiled:\n",
-    "                    #copy_and_sanitize_non_cal_data(infile, ofiled, h5path)\n",
     "\n",
     "                    ddsetcm = ofile.create_dataset(h5path+\"/pixels_cm\",\n",
     "                                         oshape,\n",