diff --git a/cal_tools/cal_tools/agipdlib.py b/cal_tools/cal_tools/agipdlib.py
index 35aedb6fd12300c74675cfdc58248b4ba87adc7a..598ab81be175bc6a2d62bb719a5a3be99ea3192d 100644
--- a/cal_tools/cal_tools/agipdlib.py
+++ b/cal_tools/cal_tools/agipdlib.py
@@ -173,8 +173,8 @@ class AgipdCorrections:
 
             agipd_corr.allocate_constants(modules, (3, mem_cells_db, 512, 128))
 
-            with open(f'{out_folder}/retrieved_constants.yml', "r") as f:
-                const_yaml = yaml.load(f.read(), Loader=yaml.FullLoader)
+            metadata = cal_tools.tools.CalibrationMetadata(out_folder)
+            const_yaml = metadata["retrieved-constants"]
 
             for mod in modules:
                 qm = f"Q{mod // 4 + 1}M{mod % 4 + 1}"
@@ -1213,9 +1213,10 @@ class AgipdCorrections:
         """
         Initialize calibration constants from a yaml file
 
-        :param const_yaml: (Dict) fromed from a yaml file in pre-notebook,
-        which consists of metadata of either the constant file path or the
-        empty constant shape, and the creation-time of the retrieved constants
+        :param const_yaml: (Dict) from the "retrieved-constants" part of a yaml
+        file in pre-notebook, which consists of metadata of either the constant
+        file path or the empty constant shape, and the creation-time of the
+        retrieved constants
         :param module_idx: Index of module
         :return:
         """
diff --git a/cal_tools/cal_tools/tools.py b/cal_tools/cal_tools/tools.py
index e4e3f44b5aefac4dd7f6aa92f7a08db46c0d305e..d88ca857ef35583728c7881ee8dbbd2ee877f82a 100644
--- a/cal_tools/cal_tools/tools.py
+++ b/cal_tools/cal_tools/tools.py
@@ -8,7 +8,7 @@ from os.path import isfile
 from pathlib import Path
 from queue import Queue
 from time import sleep
-from typing import Optional
+from typing import Optional, Union
 from urllib.parse import urljoin
 
 import dateutil.parser
@@ -16,6 +16,7 @@ import h5py
 import ipykernel
 import numpy as np
 import requests
+import yaml
 import zmq
 from iCalibrationDB import ConstantMetaData, Versions
 from metadata_client.metadata_client import MetadataClient
@@ -231,8 +232,8 @@ def get_run_info(proposal, run):
     return resp.json()
 
 
-def get_dir_creation_date(directory: str, run: int,
-                          verbosity: Optional[int] = 0) -> datetime.datetime:
+def get_dir_creation_date(directory: Union[str, Path], run: int,
+                          verbosity: int = 0) -> datetime.datetime:
     """
     Return run start time from MyDC.
     If not available from MyMDC, retrieve the data from the dataset's metadata
@@ -550,3 +551,32 @@ def get_constant_from_db_and_time(device, constant, condition, empty_constant,
             return data, None
     else:
         return data, None
+
+
+class CalibrationMetadata(dict):
+    """Convenience class: dictionary stored in metadata YAML file
+
+    If metadata file already exists, it will be loaded (this may override
+    additional constructor parameters given to this class).
+    """
+
+    def __init__(self, output_dir: Union[Path, str], *args):
+        dict.__init__(self, args)
+        self._yaml_fn = Path(output_dir) / "calibration_metadata.yml"
+        if self._yaml_fn.exists():
+            with self._yaml_fn.open("r") as fd:
+                data = yaml.safe_load(fd)
+            if isinstance(data, dict):
+                self.update(data)
+            else:
+                print(f"Warning: existing {self._yaml_fn} is malformed, will be overwritten")
+
+
+    def save(self):
+        with self._yaml_fn.open("w") as fd:
+            yaml.safe_dump(dict(self), fd)
+
+
+    def save_copy(self, copy_dir: Path):
+        with (copy_dir / self._yaml_fn.name).open("w") as fd:
+            yaml.safe_dump(dict(self), fd)
diff --git a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
index ec243d1799650f5d5ee9621a55bf92f98c0540e6..12326abe6dc0583d5a54d15da341f4678291ee7d 100644
--- a/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
+++ b/notebooks/AGIPD/AGIPD_Correct_and_Verify.ipynb
@@ -95,7 +95,7 @@
     "\n",
     "def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):\n",
     "    from xfel_calibrate.calibrate import balance_sequences as bs\n",
-    "    return bs(in_folder, run, sequences, sequences_per_node, karabo_da)\n"
+    "    return bs(in_folder, run, sequences, sequences_per_node, karabo_da)"
    ]
   },
   {
@@ -105,55 +105,67 @@
    "outputs": [],
    "source": [
     "import copy\n",
-    "from datetime import timedelta\n",
-    "from dateutil import parser\n",
     "import gc\n",
-    "import glob\n",
     "import itertools\n",
-    "from IPython.display import HTML, display, Markdown, Latex\n",
     "import math\n",
-    "from multiprocessing import Pool\n",
-    "import os\n",
     "import re\n",
-    "import sys\n",
     "import traceback\n",
-    "from time import time, sleep, perf_counter\n",
-    "import tabulate\n",
     "import warnings\n",
+    "from datetime import timedelta\n",
+    "from multiprocessing import Pool\n",
+    "from pathlib import Path\n",
+    "from time import perf_counter, sleep, time\n",
+    "\n",
+    "import tabulate\n",
+    "from dateutil import parser\n",
+    "from IPython.display import HTML, Latex, Markdown, display\n",
+    "\n",
     "warnings.filterwarnings('ignore')\n",
+    "import matplotlib\n",
+    "import matplotlib.pyplot as plt\n",
     "import yaml\n",
-    "\n",
-    "from extra_geom import AGIPD_1MGeometry, AGIPD_500K2GGeometry\n",
     "from extra_data import RunDirectory, stack_detector_data\n",
+    "from extra_geom import AGIPD_1MGeometry, AGIPD_500K2GGeometry\n",
     "from iCalibrationDB import Detectors\n",
-    "from mpl_toolkits.mplot3d import Axes3D\n",
-    "from matplotlib.ticker import LinearLocator, FormatStrFormatter\n",
-    "from matplotlib.colors import LogNorm\n",
     "from matplotlib import cm as colormap\n",
-    "import matplotlib.pyplot as plt\n",
-    "import matplotlib\n",
+    "from matplotlib.colors import LogNorm\n",
+    "from matplotlib.ticker import FormatStrFormatter, LinearLocator\n",
+    "from mpl_toolkits.mplot3d import Axes3D\n",
+    "\n",
     "matplotlib.use(\"agg\")\n",
     "%matplotlib inline\n",
     "import numpy as np\n",
     "import seaborn as sns\n",
+    "\n",
     "sns.set()\n",
     "sns.set_context(\"paper\", font_scale=1.4)\n",
     "sns.set_style(\"ticks\")\n",
     "\n",
+    "import seaborn as sns\n",
     "from cal_tools.agipdlib import (AgipdCorrections, get_acq_rate,\n",
     "                                get_gain_setting, get_num_cells)\n",
-    "from cal_tools.cython import agipdalgs as calgs\n",
     "from cal_tools.ana_tools import get_range\n",
+    "from cal_tools.cython import agipdalgs as calgs\n",
     "from cal_tools.enums import BadPixels\n",
-    "from cal_tools.tools import get_dir_creation_date, map_modules_from_folder\n",
     "from cal_tools.step_timing import StepTimer\n",
+    "from cal_tools.tools import (CalibrationMetadata, get_dir_creation_date,\n",
+    "                             map_modules_from_folder)\n",
     "\n",
-    "import seaborn as sns\n",
     "sns.set()\n",
     "sns.set_context(\"paper\", font_scale=1.4)\n",
     "sns.set_style(\"ticks\")"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "in_folder = Path(in_folder)\n",
+    "out_folder = Path(out_folder)"
+   ]
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
@@ -204,32 +216,25 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "if in_folder[-1] == \"/\":\n",
-    "    in_folder = in_folder[:-1]\n",
     "if sequences[0] == -1:\n",
     "    sequences = None\n",
     "\n",
-    "control_fname = f'{in_folder}/r{run:04d}/RAW-R{run:04d}-{karabo_da_control}-S00000.h5'\n",
+    "control_fn = in_folder / f'r{run:04d}' / f'RAW-R{run:04d}-{karabo_da_control}-S00000.h5'\n",
     "h5path_ctrl = h5path_ctrl.format(karabo_id_control)\n",
     "h5path = h5path.format(karabo_id, receiver_id)\n",
     "h5path_idx = h5path_idx.format(karabo_id, receiver_id)\n",
     "\n",
-    "print(f'Path to control file {control_fname}')"
+    "print(f'Path to control file {control_fn}')"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2019-02-21T11:30:07.086286Z",
-     "start_time": "2019-02-21T11:30:06.929722Z"
-    }
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "# Create output folder\n",
-    "os.makedirs(out_folder, exist_ok=overwrite)\n",
+    "out_folder.mkdir(parents=True, exist_ok=True)\n",
     "\n",
     "# Evaluate detector instance for mapping\n",
     "instrument = karabo_id.split(\"_\")[0]\n",
@@ -288,7 +293,7 @@
    "outputs": [],
    "source": [
     "# set everything up filewise\n",
-    "mmf = map_modules_from_folder(in_folder, run, path_template,\n",
+    "mmf = map_modules_from_folder(str(in_folder), run, path_template,\n",
     "                              karabo_da, sequences)\n",
     "mapped_files, mod_ids, total_sequences, sequences_qm, _ = mmf\n",
     "file_list = []\n",
@@ -345,7 +350,7 @@
     "# Evaluate creation time\n",
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
-    "    creation_time = get_dir_creation_date(in_folder, run)\n",
+    "    creation_time = get_dir_creation_date(str(in_folder), run)\n",
     "    offset = parser.parse(creation_date_offset)\n",
     "    delta = timedelta(hours=offset.hour,\n",
     "                      minutes=offset.minute, seconds=offset.second)\n",
@@ -358,9 +363,9 @@
     "        gain_setting = None\n",
     "    else:\n",
     "        try:\n",
-    "            gain_setting = get_gain_setting(control_fname, h5path_ctrl)\n",
+    "            gain_setting = get_gain_setting(str(control_fn), h5path_ctrl)\n",
     "        except Exception as e:\n",
-    "            print(f'ERROR: while reading gain setting from: \\n{control_fname}')\n",
+    "            print(f'ERROR: while reading gain setting from: \\n{control_fn}')\n",
     "            print(e)\n",
     "            print(\"Set gain setting to 0\")\n",
     "            gain_setting = 0\n",
@@ -417,10 +422,9 @@
     "# Retrieve calibration constants to RAM\n",
     "agipd_corr.allocate_constants(modules, (3, mem_cells_db, 512, 128))\n",
     "\n",
-    "const_yaml = None\n",
-    "if os.path.isfile(f'{out_folder}/retrieved_constants.yml'):\n",
-    "    with open(f'{out_folder}/retrieved_constants.yml', \"r\") as f:\n",
-    "        const_yaml = yaml.safe_load(f.read())\n",
+    "metadata = CalibrationMetadata(out_folder)\n",
+    "# NOTE: this notebook will not overwrite calibration metadata file\n",
+    "const_yaml = metadata.get(\"retrieved-constants\", {})\n",
     "\n",
     "# retrive constants\n",
     "def retrieve_constants(mod):\n",
@@ -433,9 +437,10 @@
     "    err = ''\n",
     "    try:\n",
     "        # check if there is a yaml file in out_folder that has the device constants.\n",
-    "        if const_yaml and device.device_name in const_yaml:\n",
+    "        if device.device_name in const_yaml:\n",
     "            when = agipd_corr.initialize_from_yaml(const_yaml, mod, device)\n",
     "        else:\n",
+    "            # TODO: should we save what is found here in metadata?\n",
     "            when = agipd_corr.initialize_from_db(cal_db_interface, creation_time, mem_cells_db, bias_voltage,\n",
     "                                                 photon_energy, gain_setting, acq_rate, mod, device, False)\n",
     "    except Exception as e:\n",
@@ -553,7 +558,7 @@
     "\n",
     "        # Save corrected data\n",
     "        pool.starmap(agipd_corr.write_file, [\n",
-    "            (i_proc, file_name, os.path.join(out_folder, os.path.basename(file_name).replace(\"RAW\", \"CORR\")))\n",
+    "            (i_proc, file_name, str(out_folder / Path(file_name).name.replace(\"RAW\", \"CORR\")))\n",
     "            for i_proc, file_name in enumerate(file_batch)\n",
     "        ])\n",
     "        step_timer.done_step(\"Save\")"
@@ -579,24 +584,23 @@
    },
    "outputs": [],
    "source": [
-    "# if there is a yml file that means a leading notebook got processed\n",
-    "# and the reporting would be generated from it.\n",
+    "# if the yml file contains \"retrieved-constants\", that means a leading\n",
+    "# notebook got processed and the reporting would be generated from it.\n",
     "fst_print = True\n",
+    "timestamps = {}\n",
     "\n",
-    "to_store = []\n",
-    "line = []\n",
     "for i, (error, modno, when, mod_dev) in enumerate(const_out):\n",
     "    qm = mod_name(modno)\n",
     "    # expose errors while applying correction\n",
     "    if error:\n",
     "        print(\"Error: {}\".format(error) )\n",
     "\n",
-    "    if not const_yaml or mod_dev not in const_yaml:\n",
+    "    if mod_dev not in const_yaml:\n",
     "        if fst_print:\n",
     "            print(\"Constants are retrieved with creation time: \")\n",
     "            fst_print = False\n",
     "    \n",
-    "        line = [qm]\n",
+    "        module_timestamps = {}\n",
     "\n",
     "        # If correction is crashed\n",
     "        if not error:\n",
@@ -611,32 +615,25 @@
     "        # Add NA to keep array structure\n",
     "        for key in ['Offset', 'SlopesPC', 'SlopesFF']:\n",
     "            if when and key in when and when[key]:\n",
-    "                line.append(when[key])\n",
+    "                module_timestamps[key] = when[key]\n",
     "            else:\n",
     "                if error is not None:\n",
-    "                    line.append('Err')\n",
+    "                    module_timestamps[key] = \"Err\"\n",
     "                else:\n",
-    "                    line.append('NA')\n",
-    "\n",
-    "        if len(line) > 0:\n",
-    "            to_store.append(line)\n",
+    "                    module_timestamps[key] = \"NA\"\n",
+    "        timestamps[qm] = module_timestamps\n",
     "\n",
     "seq = sequences[0] if sequences else 0\n",
     "\n",
-    "if len(to_store) > 0:\n",
-    "    with open(f\"{out_folder}/retrieved_constants_s{seq}.yml\",\"w\") as fyml:\n",
-    "        yaml.safe_dump({\"time-summary\": {f\"S{seq}\":to_store}}, fyml)"
+    "if timestamps:\n",
+    "    with open(f\"{out_folder}/retrieved_constants_s{seq}.yml\",\"w\") as fd:\n",
+    "        yaml.safe_dump({\"time-summary\": {f\"S{seq}\": timestamps}}, fd)"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2019-02-18T17:28:51.765030Z",
-     "start_time": "2019-02-18T17:28:51.714783Z"
-    }
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "def do_3d_plot(data, edges, x_axis, y_axis):\n",
@@ -1016,7 +1013,7 @@
     "_ = ax.legend()\n",
     "_ = ax.grid()\n",
     "_ = plt.xlabel('[ADU]')\n",
-    "_ = plt.ylabel('Counts')\n"
+    "_ = plt.ylabel('Counts')"
    ]
   },
   {
diff --git a/notebooks/AGIPD/AGIPD_Correct_and_Verify_Summary_NBC.ipynb b/notebooks/AGIPD/AGIPD_Correct_and_Verify_Summary_NBC.ipynb
index 9158cccf8d15986fdda54af1e492094ac21bc8ee..5ba5cf50a1ed14dc7309c05959f8cd8c4a37ce17 100644
--- a/notebooks/AGIPD/AGIPD_Correct_and_Verify_Summary_NBC.ipynb
+++ b/notebooks/AGIPD/AGIPD_Correct_and_Verify_Summary_NBC.ipynb
@@ -26,20 +26,22 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "import os\n",
+    "import re\n",
+    "import warnings\n",
+    "from pathlib import Path\n",
     "\n",
     "import dateutil.parser\n",
-    "import glob\n",
     "import numpy as np\n",
-    "import re\n",
     "import yaml\n",
-    "import warnings\n",
+    "\n",
     "warnings.filterwarnings('ignore')\n",
     "\n",
     "import matplotlib.pyplot as plt\n",
+    "\n",
     "%matplotlib inline\n",
     "import tabulate\n",
-    "from IPython.display import display, Markdown, Latex"
+    "from cal_tools.tools import CalibrationMetadata\n",
+    "from IPython.display import Latex, Markdown, display"
    ]
   },
   {
@@ -48,11 +50,10 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "if os.path.isfile(f'{out_folder}/retrieved_constants.yml'):\n",
-    "    with open(f\"{out_folder}/retrieved_constants.yml\",\"r\") as fyml:\n",
-    "        main_dict = yaml.load(fyml)\n",
-    "else:\n",
-    "    main_dict = {\"time-summary\":dict()}\n",
+    "out_folder = Path(out_folder)\n",
+    "metadata = CalibrationMetadata(out_folder)\n",
+    "const_dict = metadata.setdefault(\"retrieved-constants\", {})\n",
+    "time_dict = const_dict.setdefault(\"time-summary\", {})\n",
     "\n",
     "# Extracting Instrument string\n",
     "instrument = karabo_id.split(\"_\")[0]\n",
@@ -75,18 +76,15 @@
     "    modules = [int(x[-2:]) for x in karabo_da]\n",
     "\n",
     "# This is needed only if AGIPD Correction notebook had no precorrection notebooks for retrieving constants\n",
-    "# gather all generated sequence yml files for time summary of retrieved constant in retrieved_constants.yml\n",
-    "fnames = sorted(glob.glob(f'{out_folder}/retrieved_constants_*yml'))  \n",
-    "for f in fnames:\n",
-    "    with open(f,\"r\") as fyml:\n",
-    "        fdict = yaml.load(fyml)\n",
-    "    # append different sequences's time summary to the main yaml\n",
-    "    for k, v in fdict[\"time-summary\"].items():\n",
-    "        main_dict[\"time-summary\"][k] = v\n",
-    "    os.remove(f)\n",
+    "# gather all generated sequence yml files for time summary of retrieved constant under retrieved-constants in metadata.yml\n",
+    "for fn in sorted(out_folder.glob(\"retrieved_constants_*.yml\")):\n",
+    "    with fn.open(\"r\") as fd:\n",
+    "        fdict = yaml.safe_load(fd)\n",
+    "    # append different sequences' time summary to the main yaml\n",
+    "    time_dict.update(fdict[\"time-summary\"])\n",
+    "    fn.unlink()\n",
     "\n",
-    "with open(f\"{out_folder}/retrieved_constants.yml\",\"w\") as fyml:\n",
-    "        yaml.safe_dump(main_dict, fyml)"
+    "metadata.save()"
    ]
   },
   {
@@ -95,59 +93,27 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "with open(f\"{out_folder}/retrieved_constants.yml\",\"r\") as fyml:\n",
-    "    time_summary = yaml.load(fyml)\n",
-    "# check if pre-notebook has retrieved constants for all modules.\n",
-    "const_times = []\n",
-    "seq = []\n",
-    "for k, v  in sorted(time_summary[\"time-summary\"].items()):\n",
-    "    arr = np.array(v)\n",
-    "    arr = arr.reshape(arr.shape[0]//len(modules), len(modules), arr.shape[1])\n",
-    "    const_times = const_times + list(arr)\n",
-    "    seq.append(k)\n",
-    "      \n",
-    "const_times = np.array(const_times)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Function print summary of constant injection time\n",
-    "# To reduce printouts only unique entries are shown.\n",
-    "def const_table(const, pos):\n",
-    "    \"\"\"\n",
-    "    Create a summary table for the creation time differences for\n",
-    "    the retrieved constants (Offset, SlopesPC, SlopesFF).\n",
-    "    \"\"\"\n",
-    "    print(f\"{const} were injected on: \")\n",
-    "\n",
-    "    # catch timing difference in retrieve constants\n",
-    "    unique, idx, counts = np.unique(const_times[:,:,pos], return_inverse=True, return_counts=True)\n",
-    "    idx = idx.reshape((const_times.shape[0], len(modules)))\n",
-    "    \n",
+    "def print_const_table(const):\n",
+    "    print(f\"{const} constants were injected on:\")\n",
+    "    table_data = {}\n",
+    "    for seq, mod_data in time_dict.items():\n",
+    "        for mod, const_data in mod_data.items():\n",
+    "            timestamp = const_data[const]\n",
+    "            table_data.setdefault(timestamp, []).append(f\"{seq}:{mod}\")\n",
     "    table = []\n",
-    "    for i in range(0, counts.shape[0]):\n",
-    "        line = [ const_times[:,:,pos][idx==i][0]  ]\n",
-    "        mods = ''\n",
-    "        for i_s, s in enumerate(seq):\n",
-    "            if(const_times[i_s,:,0][idx[i_s]==i].shape[0] > 0):\n",
-    "                mods = mods+ '{}: {}, '.format(s, const_times[i_s,:,0][idx[i_s]==i])\n",
-    "        line.append(mods)\n",
-    "        table.append(line)\n",
-    "\n",
-    "    if counts.shape[0] == 1:\n",
-    "        table[0][1] = 'All modules'\n",
+    "    if len(table_data) == 1:\n",
+    "        table.append([[*table_data][0], \"All modules\"])\n",
     "    else:\n",
-    "        table[np.argmax(counts)][1] = 'Rest of the modules'\n",
+    "        for timestamp, seqmod in table_data.items():\n",
+    "            table.append([timestamp, seqmod[0]])\n",
+    "            for further_module in seqmod[1:]:\n",
+    "                table.append([\"\", further_module])\n",
+    "    display(Latex(tabulate.tabulate(table,\n",
+    "                 tablefmt=\"latex\",\n",
+    "                 headers=[\"Timestamps\", \"Modules and sequences\"])))\n",
     "\n",
-    "    md = display(Latex(tabulate.tabulate(table, tablefmt='latex',\n",
-    "                                         headers=[\"Time stamps\", \"Modules and sequences\"])))\n",
-    "for i_key, key in enumerate(['Offset', 'SlopesPC', 'SlopesFF']):\n",
-    "    if const_times.shape[2] > i_key+1:\n",
-    "        const_table(key, i_key+1)"
+    "for const in ['Offset', 'SlopesPC', 'SlopesFF']:\n",
+    "    print_const_table(const)"
    ]
   },
   {
diff --git a/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb b/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb
index 2c29e05c2e72965cd0ac9ef9929ffd742b585bfa..e33f11761de1207d6dffb25133e3af0b15dc6d2c 100644
--- a/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb
+++ b/notebooks/AGIPD/AGIPD_Retrieve_Constants_Precorrection.ipynb
@@ -94,37 +94,46 @@
     "import sys\n",
     "from collections import OrderedDict\n",
     "\n",
-    "import os\n",
     "import h5py\n",
-    "import numpy as np\n",
     "import matplotlib\n",
+    "import numpy as np\n",
+    "\n",
     "matplotlib.use(\"agg\")\n",
-    "import matplotlib.pyplot as plt\n",
     "import multiprocessing as mp\n",
+    "from datetime import timedelta\n",
+    "from pathlib import Path\n",
     "\n",
-    "from iCalibrationDB import Constants, Conditions, Detectors\n",
-    "from cal_tools.tools import (map_modules_from_folder, get_dir_creation_date)\n",
+    "import matplotlib.pyplot as plt\n",
     "from cal_tools.agipdlib import get_gain_setting\n",
+    "from cal_tools.tools import (CalibrationMetadata, get_dir_creation_date,\n",
+    "                             map_modules_from_folder)\n",
     "from dateutil import parser\n",
-    "from datetime import timedelta"
+    "from iCalibrationDB import Conditions, Constants, Detectors"
    ]
   },
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2019-02-21T11:30:07.086286Z",
-     "start_time": "2019-02-21T11:30:06.929722Z"
-    }
-   },
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# slopes_ff_from_files left as str for now\n",
+    "in_folder = Path(in_folder)\n",
+    "out_folder = Path(out_folder)\n",
+    "metadata = CalibrationMetadata(out_folder)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
    "outputs": [],
    "source": [
     "max_cells = mem_cells\n",
     "\n",
     "creation_time = None\n",
     "if use_dir_creation_date:\n",
-    "    creation_time = get_dir_creation_date(in_folder, run)\n",
+    "    creation_time = get_dir_creation_date(str(in_folder), run)\n",
     "    offset = parser.parse(creation_date_offset)\n",
     "    delta = timedelta(hours=offset.hour, minutes=offset.minute, seconds=offset.second)\n",
     "    creation_time += delta\n",
@@ -132,14 +141,12 @@
     "\n",
     "if sequences[0] == -1:\n",
     "    sequences = None\n",
-    "\n",
-    "if in_folder[-1] == \"/\":\n",
-    "    in_folder = in_folder[:-1]\n",
+    "    \n",
     "print(f\"Outputting to {out_folder}\")\n",
-    "\n",
-    "os.makedirs(out_folder, exist_ok=True)\n",
+    "out_folder.mkdir(parents=True, exist_ok=True)\n",
     "\n",
     "import warnings\n",
+    "\n",
     "warnings.filterwarnings('ignore')\n",
     "\n",
     "from cal_tools.agipdlib import SnowResolution\n",
@@ -153,7 +160,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "control_fname = f'{in_folder}/r{run:04d}/RAW-R{run:04d}-{karabo_da_control}-S00000.h5'\n",
+    "control_fn = in_folder / f'r{run:04d}' / f'RAW-R{run:04d}-{karabo_da_control}-S00000.h5'\n",
     "h5path_ctrl = h5path_ctrl.format(karabo_id_control)\n",
     "\n",
     "if gain_setting == 0.1:\n",
@@ -162,9 +169,9 @@
     "        gain_setting = None\n",
     "    else:\n",
     "        try:\n",
-    "            gain_setting = get_gain_setting(control_fname, h5path_ctrl)\n",
+    "            gain_setting = get_gain_setting(str(control_fn), h5path_ctrl)\n",
     "        except Exception as e:\n",
-    "            print(f'ERROR: while reading gain setting from: \\n{control_fname}')\n",
+    "            print(f'ERROR: while reading gain setting from: \\n{control_fn}')\n",
     "            print(e)\n",
     "            print(\"Set gain setting to 0\")\n",
     "            gain_setting = 0\n",
@@ -211,7 +218,7 @@
    "source": [
     "# set everything up filewise\n",
     "print(f\"Checking the files before retrieving constants\")\n",
-    "mmf = map_modules_from_folder(in_folder, run, path_template, karabo_da, sequences)\n",
+    "mmf = map_modules_from_folder(str(in_folder), run, path_template, karabo_da, sequences)\n",
     "\n",
     "mapped_files, mod_ids, total_sequences, sequences_qm, _ = mmf"
    ]
@@ -230,7 +237,6 @@
    "outputs": [],
    "source": [
     "from functools import partial\n",
-    "import yaml\n",
     "\n",
     "\n",
     "def retrieve_constants(karabo_id, bias_voltage, max_cells, acq_rate, \n",
@@ -259,15 +265,14 @@
     "            dev.device_name: (STR) device name\n",
     "    \"\"\"\n",
     "\n",
-    "    import numpy as np\n",
     "    import sys\n",
     "    import traceback\n",
-    "    \n",
-    "    from cal_tools.agipdlib import get_num_cells, get_acq_rate\n",
+    "\n",
+    "    import numpy as np\n",
+    "    from cal_tools.agipdlib import get_acq_rate, get_num_cells\n",
     "    from cal_tools.agipdutils import assemble_constant_dict\n",
     "    from cal_tools.tools import get_from_db\n",
-    "\n",
-    "    from iCalibrationDB import Constants, Conditions, Detectors\n",
+    "    from iCalibrationDB import Conditions, Constants, Detectors\n",
     "\n",
     "    err = None\n",
     "\n",
@@ -375,15 +380,16 @@
     "        if err:\n",
     "            print(f\"Error for module {qm}: {err}\")\n",
     "        mdata_dict[dname] = md_dict\n",
+    "\n",
     "# check if it is requested not to retrieve any constants from the database\n",
     "if not nodb_with_dark:\n",
-    "    with open(f\"{out_folder}/retrieved_constants.yml\", \"w\") as outfile:\n",
-    "        yaml.safe_dump(mdata_dict, outfile)\n",
+    "    metadata.update({\"retrieved-constants\": mdata_dict})\n",
+    "        \n",
     "    print(\"\\nRetrieved constants for modules: \",\n",
     "          f\"{[', '.join([f'Q{x//4+1}M{x%4+1}' for x in modules])]}\")\n",
     "    print(f\"Operating conditions are:\\n• Bias voltage: {bias_voltage}\\n• Memory cells: {max_cells}\\n\"\n",
     "          f\"• Acquisition rate: {acq_rate}\\n• Gain setting: {gain_setting}\\n• Photon Energy: {photon_energy}\\n\")\n",
-    "    print(f\"Constant metadata is saved in retrieved_constants.yml\\n\")\n",
+    "    print(f\"Constant metadata is saved under \\\"retrieved-constants\\\" in metadata.yml\\n\")\n",
     "else:\n",
     "    print(\"No constants were retrieved as calibrated files will be used.\")"
    ]
@@ -396,12 +402,12 @@
    "source": [
     "print(\"Constants are retrieved with creation time: \")\n",
     "i = 0\n",
-    "when = dict()\n",
-    "to_store = []\n",
+    "timestamps = {}\n",
     "\n",
     "for dname, dinfo in mod_dev.items():\n",
     "    print(dinfo[\"mod\"], \":\")\n",
-    "    line = [dinfo[\"mod\"]]\n",
+    "    module_timestamps = {}\n",
+    "    module_name = dinfo[\"mod\"]\n",
     "    if dname in mdata_dict:\n",
     "        for cname, mdata in mdata_dict[dname].items():\n",
     "            if hasattr(mdata[\"creation-time\"], 'strftime'):\n",
@@ -411,16 +417,16 @@
     "        # Add NA to keep array structure\n",
     "    for cname in ['Offset', 'SlopesPC', 'SlopesFF']:\n",
     "        if not dname in mdata_dict or dinfo[\"err\"]:\n",
-    "            line.append('Err')\n",
+    "            module_timestamps[cname] = \"Err\"\n",
     "        else:\n",
     "            if cname in mdata_dict[dname]:\n",
     "                if mdata_dict[dname][cname][\"creation-time\"]:\n",
-    "                    line.append(mdata_dict[dname][cname][\"creation-time\"])\n",
+    "                    module_timestamps[cname] = mdata_dict[dname][cname][\"creation-time\"]\n",
     "                else:\n",
-    "                    line.append('NA')\n",
+    "                    module_timestamps[cname] = \"NA\"\n",
     "            else:\n",
-    "                line.append('NA')\n",
-    "    to_store.append(line)\n",
+    "                module_timestamps[cname] = \"NA\"\n",
+    "    timestamps[module_name] = module_timestamps\n",
     "\n",
     "    i += 1\n",
     "    if sequences:\n",
@@ -428,14 +434,11 @@
     "    else:\n",
     "        # if sequences[0] changed to None as it was -1\n",
     "        seq_num = 0\n",
-    "        \n",
-    "with open(f\"{out_folder}/retrieved_constants.yml\",\"r\") as fyml:\n",
-    "    time_summary = yaml.safe_load(fyml)\n",
-    "    time_summary.update({\"time-summary\": {\n",
-    "                                          \"SAll\":to_store\n",
-    "                                        }})\n",
-    "with open(f\"{out_folder}/retrieved_constants.yml\",\"w\") as fyml:\n",
-    "        yaml.safe_dump(time_summary, fyml)"
+    "\n",
+    "time_summary = metadata.setdefault(\"retrieved-constants\", {}).setdefault(\"time-summary\", {})\n",
+    "time_summary[\"SAll\"] = timestamps\n",
+    "\n",
+    "metadata.save()"
    ]
   }
  ],
diff --git a/xfel_calibrate/calibrate.py b/xfel_calibrate/calibrate.py
index 03242466e9d9dbb6bf4a9d95a3de67db53cac8fd..6289bdd514a72cc80493577665a28803e35b90f3 100755
--- a/xfel_calibrate/calibrate.py
+++ b/xfel_calibrate/calibrate.py
@@ -10,8 +10,10 @@ import sys
 import textwrap
 import warnings
 from datetime import datetime
+from pathlib import Path
 from subprocess import DEVNULL, check_output
 
+import cal_tools.tools
 import nbconvert
 import nbformat
 from jinja2 import Template
@@ -187,7 +189,7 @@ def get_cell_n(nb, cell_type, cell_n):
         if cell.cell_type == cell_type:
             if counter == cell_n:
                 return cell
-            counter=+1
+            counter += 1
 
 
 def first_code_cell(nb):
@@ -743,6 +745,22 @@ def make_par_table(parms, run_tmp_path):
         finfile.write(textwrap.dedent(tmpl.render(p=col_type, lines=l_parms)))
 
 
+def make_pipeline_yaml(parms, version, report_path, output_dir):
+    """Adds information from arguments to metadata file"""
+
+    metadata = cal_tools.tools.CalibrationMetadata(output_dir)
+
+    parm_subdict = metadata.setdefault("calibration-configurations", {})
+    for p in parms:
+        name = consolize_name(p.name)
+        parm_subdict[name] = p.value
+
+    metadata["pycalibration-version"] = version
+    metadata["report-path"] = f"{report_path}.pdf"
+
+    metadata.save()
+
+
 def run():
     """ Run a calibration task with parser arguments """
     parser = make_extended_parser()
@@ -808,24 +826,34 @@ def run():
     # Write all input parameters to rst file to be included to final report
     parms = parameter_values(parms, **args)
     make_par_table(parms, run_tmp_path)
+    # And save the invocation of this script itself
     save_executed_command(run_tmp_path, version)
 
     # wait on all jobs to run and then finalize the run by creating a report from the notebooks
-    out_path = os.path.join(
-        report_path, detector.upper(), caltype.upper(), datetime.now().isoformat()
-    )
+    out_path = Path(default_report_path) / detector.upper() / caltype.upper() / datetime.now().isoformat()
     if try_report_to_output:
         if "out_folder" in args:
-            out_path = os.path.abspath(args["out_folder"])
+            out_path = Path(args["out_folder"]).absolute()
         else:
-            print("No 'out_folder' defined as argument, outputting to '{}' instead.".format(
-                out_path))
+            print(f"No 'out_folder' given, outputting to '{out_path}' instead.")
 
-    os.makedirs(out_path, exist_ok=True)
+    out_path.mkdir(parents=True, exist_ok=True)
 
-    report_to = title.replace(" ", "")
-    if args["report_to"] is not None:
-        report_to = args["report_to"]
+    # Use given report name, falling back to notebook title
+    if args["report_to"] is None:
+        report_to = out_path / title.replace(" ", "")
+        print(f"report_to not specified, will use {report_to}")
+    else:
+        report_to = Path(args["report_to"])
+        if report_to.is_dir():
+            print(f"report_to is directory, will use title '{title}' for filename")
+            report_to = report_to / title.replace(" ", "")
+        elif len(report_to.parts) == 1:
+            print(f"report_to path contained no path, saving report in '{out_path}'")
+            report_to = out_path / report_to
+
+    # Write metadata about calibration job to output folder
+    make_pipeline_yaml(parms, version, report_to, out_path)
 
     folder = get_par_attr(parms, 'in_folder', 'value', '')
 
diff --git a/xfel_calibrate/finalize.py b/xfel_calibrate/finalize.py
index 1990246790cb892841713a2de509b0aa8dc76432..f3b3ae552d161882305087bcae42f70b8f7db64f 100644
--- a/xfel_calibrate/finalize.py
+++ b/xfel_calibrate/finalize.py
@@ -4,12 +4,15 @@ from glob import glob
 from importlib.machinery import SourceFileLoader
 from os import chdir, listdir, makedirs, path, remove, stat
 from os.path import isdir, isfile, splitext
+from pathlib import Path
 from shutil import copy, copytree, move, rmtree
 from subprocess import CalledProcessError, check_call, check_output
 from textwrap import dedent
 from time import sleep
 
+import cal_tools.tools
 import tabulate
+import yaml
 from jinja2 import Template
 
 from .settings import *
@@ -155,17 +158,17 @@ def make_timing_summary(run_path, joblist, request_time, submission_time):
     tmpl = Template('''
                     Runtime summary
                     ==============
-                    
+
                     .. math::
                         {% for line in time_table %}
                         {{ line }}
                         {%- endfor %}
-                        
+
                     .. math::
                         {% for line in job_table %}
                         {{ line }}
                         {%- endfor %}
-                        
+
                     ''')
 
     time_vals = [['Time of Request', request_time],
@@ -287,7 +290,7 @@ def make_report(run_path: str, tmp_path: str, out_path: str, project: str,
     index_tmp = Template('''
                         Calibration report
                         ==================
-                        
+
                         .. toctree::
                            :maxdepth: 2
                            {% for k in keys %}
@@ -321,9 +324,11 @@ def make_report(run_path: str, tmp_path: str, out_path: str, project: str,
         rmtree(f'{dtmp}/')
 
     # Archiving files in slurm_tmp
-    if os.path.isfile(f'{out_path}/retrieved_constants.yml'):
-        copy(f'{out_path}/retrieved_constants.yml',
-             f"{tmp_path}")
+    out_path = Path(out_path)
+    tmp_path = Path(tmp_path)
+    metadata = cal_tools.tools.CalibrationMetadata(out_path)
+    # TODO: add runtime summary
+    metadata.save_copy(tmp_path)
 
     # Moving temporary files to out-folder after successful execution
     # This helps in keeping elements needed for re-producibility.
diff --git a/xfel_calibrate/settings.py b/xfel_calibrate/settings.py
index a6bbc597940d3b902394461ea13e372cd776d091..709fdd5ec7c61fa01b3dca68112fd31e36409c1c 100644
--- a/xfel_calibrate/settings.py
+++ b/xfel_calibrate/settings.py
@@ -8,7 +8,7 @@ temp_path = os.path.abspath("temp/")
 python_path = sys.executable
 
 # Path to store reports in
-report_path = "{}/calibration_reports/".format(os.getcwd())
+default_report_path = "{}/calibration_reports/".format(os.getcwd())
 
 # Also try to output the report to an out_folder defined by the notebook
 try_report_to_output = True