diff --git a/notebooks/ePix100/Correction_ePix100_NBC.ipynb b/notebooks/ePix100/Correction_ePix100_NBC.ipynb
index 8c229d36e30483b2e05c362c977ea11ff1a91ea7..9c0ca8939e297f8c0c573bcf07570aeeb7b5a527 100644
--- a/notebooks/ePix100/Correction_ePix100_NBC.ipynb
+++ b/notebooks/ePix100/Correction_ePix100_NBC.ipynb
@@ -37,7 +37,8 @@
     "db_module = \"\"  # module id in the database\n",
     "receiver_template = \"RECEIVER\"  # detector receiver template for accessing raw data files\n",
     "path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5'  # the template to use to access data\n",
-    "instrument_source_template = '{}/DET/{}:daqOutput'  # instrument detector data source in h5files\n",
+    "input_source_template = '{karabo_id}/DET/{receiver}:daqOutput'  # input(raw) detector data source in h5files\n",
+    "output_source_template = '{karabo_id}/CORR/{receiver}:daqOutput'  # output(corrected) detector data source in h5files\n",
     "\n",
     "# Parameters affecting writing corrected data.\n",
     "chunk_size_idim = 1  # H5 chunking size of output data\n",
@@ -145,11 +146,14 @@
     "\n",
     "run_folder = in_folder / f\"r{run:04d}\"\n",
     "\n",
-    "instrument_src = instrument_source_template.format(\n",
-    "    karabo_id, receiver_template)\n",
+    "output_source_template = output_source_template or input_source_template\n",
+    "\n",
+    "input_src = input_source_template.format(\n",
+    "    karabo_id=karabo_id, receiver=receiver_template)\n",
+    "output_src = output_source_template.format(\n",
+    "    karabo_id=karabo_id, receiver=receiver_template)\n",
     "\n",
     "print(f\"Correcting run: {run_folder}\")\n",
-    "print(f\"Instrument H5File source: {instrument_src}\")\n",
     "print(f\"Data corrected files are stored at: {out_folder}\")"
    ]
   },
@@ -211,7 +215,7 @@
     "# Read control data.\n",
     "ctrl_data = epix100lib.epix100Ctrl(\n",
     "    run_dc=run_dc,\n",
-    "    instrument_src=instrument_src,\n",
+    "    instrument_src=input_src,\n",
     "    ctrl_src=f\"{karabo_id}/DET/CONTROL\",\n",
     "    )\n",
     "\n",
@@ -557,7 +561,7 @@
     "    out_file = out_folder / f.name.replace(\"RAW\", \"CORR\")\n",
     "\n",
     "    # Data shape in seq_dc excluding trains with empty images. \n",
-    "    ishape = seq_dc[instrument_src, \"data.image.pixels\"].shape\n",
+    "    ishape = seq_dc[input_src, \"data.image.pixels\"].shape\n",
     "    corr_ntrains = ishape[0]\n",
     "    all_train_ids = seq_dc.train_ids\n",
     "\n",
@@ -587,9 +591,9 @@
     "\n",
     "    # Overwrite seq_dc after eliminating empty trains or/and applying limited images.\n",
     "    seq_dc = seq_dc.select(\n",
-    "        instrument_src, \"*\", require_all=True).select_trains(np.s_[:corr_ntrains])\n",
+    "        input_src, \"*\", require_all=True).select_trains(np.s_[:corr_ntrains])\n",
     "\n",
-    "    pixel_data = seq_dc[instrument_src, \"data.image.pixels\"]\n",
+    "    pixel_data = seq_dc[input_src, \"data.image.pixels\"]\n",
     "    context.map(correct_train, pixel_data)\n",
     "\n",
     "    step_timer.done_step(f'Correcting {corr_ntrains} trains.')\n",
@@ -597,7 +601,7 @@
     "    step_timer.start()  # Write corrected data.\n",
     "\n",
     "    # Create CORR files and add corrected data sections.\n",
-    "    image_counts = seq_dc[instrument_src, \"data.image.pixels\"].data_counts(labelled=False)\n",
+    "    image_counts = seq_dc[input_src, \"data.image.pixels\"].data_counts(labelled=False)\n",
     "\n",
     "    # Write corrected data.\n",
     "    with DataFile(out_file, \"w\") as ofile:\n",
@@ -609,10 +613,10 @@
     "        ofile.create_metadata(\n",
     "            like=seq_dc,\n",
     "            sequence=seq_file.sequence,\n",
-    "            instrument_channels=(f'{instrument_src}/data',)\n",
+    "            instrument_channels=sorted({f'{output_src}/data',f'{input_src}/data'})\n",
     "        )\n",
     "        # Create Instrument section to later add corrected datasets.\n",
-    "        outp_source = ofile.create_instrument_source(instrument_src)\n",
+    "        outp_source = ofile.create_instrument_source(output_src)\n",
     "\n",
     "        # Create count/first datasets at INDEX source.\n",
     "        outp_source.create_index(data=image_counts)\n",
@@ -622,7 +626,7 @@
     "            \"encoding\", \"flipX\", \"flipY\", \"roiOffsets\", \"rotation\",\n",
     "        ]\n",
     "        for field in image_raw_fields:\n",
-    "            field_arr = seq_dc[instrument_src, f\"data.image.{field}\"].ndarray()\n",
+    "            field_arr = seq_dc[input_src, f\"data.image.{field}\"].ndarray()\n",
     "\n",
     "            outp_source.create_key(\n",
     "                f\"data.image.{field}\", data=field_arr,\n",
@@ -634,10 +638,10 @@
     "        outp_source.create_key(\n",
     "            \"data.trainId\", data=seq_dc.train_ids, chunks=min(50, len(seq_dc.train_ids)))\n",
     "        \n",
-    "        if np.isin('data.pulseId', list(seq_dc[instrument_src].keys())): # some runs are missing 'data.pulseId'\n",
+    "        if np.isin('data.pulseId', list(seq_dc[input_src].keys())): # some runs are missing 'data.pulseId'\n",
     "            outp_source.create_key(\n",
     "                \"data.pulseId\",\n",
-    "                data=list(seq_dc[instrument_src]['data.pulseId'].ndarray()[:, 0]),\n",
+    "                data=list(seq_dc[input_src]['data.pulseId'].ndarray()[:, 0]),\n",
     "                chunks=min(50, len(seq_dc.train_ids)),\n",
     "            )\n",
     "        \n",
@@ -648,6 +652,11 @@
     "            outp_source.create_key(\n",
     "                \"data.image.patterns\", data=data_patterns, chunks=dataset_chunk)\n",
     "\n",
+    "        if output_src != input_src:\n",
+    "            ofile[f'/INSTRUMENT/{input_src}'] = h5py.SoftLink(f'/INSTRUMENT/{output_src}')\n",
+    "            ofile[f'/INDEX/{input_src}'] = h5py.SoftLink(f'/INDEX/{output_src}')\n",
+    "\n",
+    "\n",
     "        step_timer.done_step('Storing data.')\n",
     "if empty_seq == len(seq_files):\n",
     "    warning(\"No valid trains for RAW data to correct.\")\n",