From a606b97bb3a5e7213685423158885dc42d587d14 Mon Sep 17 00:00:00 2001
From: ahmedk <karim.ahmed@xfel.eu>
Date: Fri, 23 Jun 2023 09:53:44 +0200
Subject: [PATCH] Reduce test duration

- Decrease number of processed sequences
- Choose random sequence numbers to process
- Refactor test code for readability
- Use threadpool for I/O parallel processing
- Add logs instead of prints
- Only apply find-difference code on a random file.
---
 tests/test_reference_runs/callab_tests.py     |  87 +++---
 .../test_pre_deployment.py                    | 268 ++++++++++++------
 2 files changed, 224 insertions(+), 131 deletions(-)

diff --git a/tests/test_reference_runs/callab_tests.py b/tests/test_reference_runs/callab_tests.py
index 9ab0d807c..e1303b160 100644
--- a/tests/test_reference_runs/callab_tests.py
+++ b/tests/test_reference_runs/callab_tests.py
@@ -10,7 +10,7 @@ automated_test_config = {
             "karabo-id-control": "SPB_IRU_AGIPD1M1",
             "karabo-id": "SPB_DET_AGIPD1M-1",
             "slurm-mem": "750",
-            "sequences": "0,1,2",
+            "sequences": "0",
             "rel-gain": True,
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
         },
@@ -44,7 +44,7 @@ automated_test_config = {
             "karabo-id-control": "SPB_IRU_AGIPD1M1",
             "karabo-id": "SPB_DET_AGIPD1M-1",
             "slurm-mem": "750",
-            "sequences": "0,1,2",
+            "sequences": "0",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
         },
         "reference-folder": "{}/{}/{}",
@@ -77,90 +77,94 @@ automated_test_config = {
             "karabo-id-control": "MID_EXP_AGIPD1M1",
             "karabo-id": "MID_DET_AGIPD1M-1",
             "slurm-mem": "750",
+            "sequences": "0,1",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
         },
         "reference-folder": "{}/{}/{}",
     },
-    "MID_DET_AGIPD1M-1-DARK-ADAPTIVE": {
+    "MID_DET_AGIPD1M-1-CORRECT-SELECT_CELLS_CM": {
         "det_type": "AGIPD",
-        "cal_type": "DARK",
+        "cal_type": "CORRECT",
         "config": {
+            "blc-stripes": True,
+            "rel-gain": True,
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/MID/202121/p002929/raw",
-            "run-high": "4",
-            "run-med": "5",
-            "run-low": "6",
+            "run": "21",
+            "max-pulses": [0, 160, 2],
+            "common-mode": True,
             "karabo-id-control": "MID_EXP_AGIPD1M1",
             "karabo-id": "MID_DET_AGIPD1M-1",
             "slurm-mem": "750",
+            "sequences": "0,1",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
+            "sequences-per-node": 1,
         },
         "reference-folder": "{}/{}/{}",
     },
-    "HED_DET_AGIPD500K2G-CORRECT-ADAPTIVE": {
+    "MID_DET_AGIPD1M-1-CORRECT-SELECT_CELLS_NOCM": {
         "det_type": "AGIPD",
         "cal_type": "CORRECT",
         "config": {
             "blc-stripes": True,
             "rel-gain": True,
             "out-folder": "{}/{}/{}",
-            "in-folder": "/gpfs/exfel/exp/HED/202131/p900228/raw",
-            "run": "29",
-            "karabo-id-control": "HED_EXP_AGIPD500K2G",
-            "karabo-id": "HED_DET_AGIPD500K2G",
+            "in-folder": "/gpfs/exfel/exp/MID/202121/p002929/raw",
+            "run": "21",
+            "max-pulses": [0, 160, 2],
+            "no-common-mode": True,
+            "karabo-id-control": "MID_EXP_AGIPD1M1",
+            "karabo-id": "MID_DET_AGIPD1M-1",
             "slurm-mem": "750",
+            "sequences": "0,1",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
         },
         "reference-folder": "{}/{}/{}",
     },
-    "HED_DET_AGIPD500K2G-DARK-ADAPTIVE": {
+    "MID_DET_AGIPD1M-1-DARK-ADAPTIVE": {
         "det_type": "AGIPD",
         "cal_type": "DARK",
         "config": {
             "out-folder": "{}/{}/{}",
-            "in-folder": "/gpfs/exfel/exp/HED/202131/p900228/raw",
-            "run-high": "25",
-            "run-med": "26",
-            "run-low": "27",
-            "karabo-id-control": "HED_EXP_AGIPD500K2G",
-            "karabo-id": "HED_DET_AGIPD500K2G",
+            "in-folder": "/gpfs/exfel/exp/MID/202121/p002929/raw",
+            "run-high": "4",
+            "run-med": "5",
+            "run-low": "6",
+            "karabo-id-control": "MID_EXP_AGIPD1M1",
+            "karabo-id": "MID_DET_AGIPD1M-1",
             "slurm-mem": "750",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
         },
         "reference-folder": "{}/{}/{}",
     },
-    "MID_DET_AGIPD1M-1-CORRECT-SELECT_CELLS_CM": {
+    "HED_DET_AGIPD500K2G-CORRECT-ADAPTIVE": {
         "det_type": "AGIPD",
         "cal_type": "CORRECT",
         "config": {
             "blc-stripes": True,
             "rel-gain": True,
             "out-folder": "{}/{}/{}",
-            "in-folder": "/gpfs/exfel/exp/MID/202121/p002929/raw",
-            "run": "21",
-            "max-pulses": [0, 160, 2],
-            "common-mode": True,
-            "karabo-id-control": "MID_EXP_AGIPD1M1",
-            "karabo-id": "MID_DET_AGIPD1M-1",
+            "in-folder": "/gpfs/exfel/exp/HED/202131/p900228/raw",
+            "run": "29",
+            "karabo-id-control": "HED_EXP_AGIPD500K2G",
+            "karabo-id": "HED_DET_AGIPD500K2G",
             "slurm-mem": "750",
+            "sequences": "0,1",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
-            "sequences-per-node": 1,
         },
         "reference-folder": "{}/{}/{}",
     },
-    "MID_DET_AGIPD1M-1-CORRECT-SELECT_CELLS_NOCM": {
+    "HED_DET_AGIPD500K2G-DARK-ADAPTIVE": {
         "det_type": "AGIPD",
-        "cal_type": "CORRECT",
+        "cal_type": "DARK",
         "config": {
-            "blc-stripes": True,
-            "rel-gain": True,
             "out-folder": "{}/{}/{}",
-            "in-folder": "/gpfs/exfel/exp/MID/202121/p002929/raw",
-            "run": "21",
-            "max-pulses": [0, 160, 2],
-            "no-common-mode": True,
-            "karabo-id-control": "MID_EXP_AGIPD1M1",
-            "karabo-id": "MID_DET_AGIPD1M-1",
+            "in-folder": "/gpfs/exfel/exp/HED/202131/p900228/raw",
+            "run-high": "25",
+            "run-med": "26",
+            "run-low": "27",
+            "karabo-id-control": "HED_EXP_AGIPD500K2G",
+            "karabo-id": "HED_DET_AGIPD500K2G",
             "slurm-mem": "750",
             "ctrl-source-template": "{}/MDL/FPGA_COMP",
         },
@@ -188,6 +192,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/FXE/202101/p002478/raw/",
             "run": "52",
+            "sequences": "0,1,2,50",
             "karabo-id-control": "",
             "karabo-id": "FXE_XAD_JF500K",
             "karabo-da": "JNGFR03",
@@ -216,6 +221,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/FXE/202131/p900226/raw",
             "run": "106",
+            "sequences": "0,1,7",
             "karabo-id-control": "",
             "karabo-id": "FXE_XAD_JF1M",
             "karabo-da": ["JNGFR01", "JNGFR02"],
@@ -259,6 +265,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/FXE/202101/p002478/raw/",
             "run": "52",
+            "sequences": "1,30,49",
             "karabo-id-control": "",
             "karabo-id": "FXE_XAD_JF1M",
             "karabo-da": ["JNGFR01", "JNGFR02"],
@@ -338,6 +345,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/HED/202102/p002656/raw",
             "run": "230",
+            "sequences": "0,2,4",
             "karabo-id": "HED_IA1_JF500K1",
             "karabo-da": "JNGFR01",
         },
@@ -350,6 +358,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/HED/202102/p002656/raw",
             "run": "230",
+            "sequences": "0,1,4",
             "karabo-id": "HED_IA1_JF500K2",
             "karabo-da": "JNGFR02",
         },
@@ -362,6 +371,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/HED/202102/p002656/raw",
             "run": "230",
+            "sequences": "1,2,3",
             "karabo-id": "HED_IA1_JF500K3",
             "karabo-da": "JNGFR03",
         },
@@ -434,6 +444,7 @@ automated_test_config = {
             "run": "230",
             "karabo-id": "HED_IA1_EPX100-1",
             "karabo-da": "EPIX01",
+            "sequences": "1,2,3",
             "fix-temperature": 290,
         },
         "reference-folder": "{}/{}/{}",
@@ -494,6 +505,7 @@ automated_test_config = {
             "out-folder": "{}/{}/{}",
             "in-folder": "/gpfs/exfel/exp/FXE/202131/p900226/raw",
             "run": "145",
+            "sequences": "0",
             "karabo-id": "FXE_DET_LPD1M-1",
             "slurm-mem": "750",
         },
@@ -563,6 +575,7 @@ automated_test_config = {
             "in-folder": "/gpfs/exfel/exp/SPB/202321/p004577/raw",
             "karabo-da": "GH200",
             "run": "98",
+            "sequences":"0,3,6",
             "karabo-id": "SPB_50UM_GH2",
         },
         "reference-folder": "{}/{}/{}",
diff --git a/tests/test_reference_runs/test_pre_deployment.py b/tests/test_reference_runs/test_pre_deployment.py
index 929ca5897..cbc9622f7 100644
--- a/tests/test_reference_runs/test_pre_deployment.py
+++ b/tests/test_reference_runs/test_pre_deployment.py
@@ -15,9 +15,12 @@ import pytest
 import numpy as np
 
 import xfel_calibrate.calibrate as calibrate
-from cal_tools import h5_copy_except
-
+import gc
 from .callab_tests import automated_test_config
+import logging
+
+
+LOGGER = logging.getLogger(__name__)
 
 
 def file_md5(
@@ -42,22 +45,78 @@ def file_md5(
     return md5.digest()
 
 
-def find_differences(file1: pathlib.Path, file2: pathlib.Path, files_mapping):
-    """Find difference in groups, datasest, and objects
-    between two h5files.
-    Args:
-        file1: first h5 file.
-        file2: second h5 file.
-    """
-    def collect_attrs(groups, datasets, objects, name, node):
-        """Collect h5 attrs in groups, datasets, and objects lists."""
+def collect_attrs(groups, datasets, objects, exclude_attrs, name, node):
+    """Collect h5 attrs in groups, datasets, and objects lists."""
+    if node.name not in exclude_attrs:
         if isinstance(node, h5py.Group):
-            groups.append(node.name)
+            groups.append(name)
         elif isinstance(node, h5py.Dataset):
             if node.dtype == 'object':
-                objects.append(node.name)
+                objects.append(name)
             else:
-                datasets.append(node.name)
+                datasets.append(name)
+
+
+def compare_datasets(
+    file1,
+    file2,
+    datasets: list
+):
+    """Compare the values of datasets in two h5 files."""
+    h5_diff = []
+    for d in datasets:
+        try:
+            if not np.allclose(file1[d][:], file2[d][:], equal_nan=True):
+                h5_diff.append(d)
+        except ValueError as e:
+            LOGGER.error(f"ValueError: {e}, {d}")
+            h5_diff.append(d)
+        except AttributeError as e:
+            LOGGER.error(f"AttributeError: {e}, {d}")
+            h5_diff.append(d)
+    return h5_diff
+
+
+def compare_objects(
+    file1,
+    file2,
+    objects: list
+):
+    """Compare the objects in two h5 files."""
+    h5_diff = []
+    for d in objects:
+        try:
+            if isinstance(file1[d][()], bytes):
+                if (
+                    file1[d][()].decode('utf-8') != file2[d][()].decode('utf-8')
+                ):
+                    h5_diff.append(d)
+            elif (
+                file1[d][()].dtype != file1[d][()].dtype and
+                not file1[d][()] != file2[d][()]
+            ):  # pnccd files has only list of bytes
+                h5_diff.append(d)
+        except ValueError as e:
+            LOGGER.error(f"ValueError: {e}, {d}")
+            h5_diff.append(d)
+        except AttributeError as e:
+            LOGGER.error(f"AttributeError: {e}, {d}, "
+                         f"{file1[d][()].decode('utf-8')}")
+            h5_diff.append(d)
+    return h5_diff
+
+
+def find_differences(
+    test_file,
+    reference_file,
+    exclude_attrs,
+):
+    """
+    Find difference in groups, datasets, and objects between two h5files.
+    Args:
+        file1: first h5 file.
+        file2: second h5 file.
+    """
 
     groups_f1 = []
     datasets_f1 = []
@@ -66,49 +125,42 @@ def find_differences(file1: pathlib.Path, file2: pathlib.Path, files_mapping):
     groups_f2 = []
     datasets_f2 = []
     objects_f2 = []
+    
+    with h5py.File(test_file, 'r') as file1, h5py.File(reference_file, 'r') as file2:
+
+        # Fill groups, datasets, and objects list to compare both h5files' attrs.
+        file1.visititems(partial(collect_attrs, groups_f1, datasets_f1, objects_f1, exclude_attrs))
+        file2.visititems(partial(collect_attrs, groups_f2, datasets_f2, objects_f2, exclude_attrs))
 
-    # Fill groups, datasets, and objects list to compare both h5files' attrs.
-    with h5py.File(file1, 'r') as f1, h5py.File(file2, 'r') as f2:
-        _collect_attrs = partial(collect_attrs, groups_f1, datasets_f1, objects_f1)
-        f1.visititems(_collect_attrs)
-        _collect_attrs = partial(collect_attrs, groups_f2, datasets_f2, objects_f2)
-        f2.visititems(_collect_attrs)
+        start_time = time.perf_counter()
         # Compare groups, datasets, and objects to have the same content.
-        assert set(groups_f1) == set(groups_f2), f"{files_mapping[file1]} and {files_mapping[file2]} consists of different groups."  # noqa
-        assert set(datasets_f1) == set(datasets_f2), f"{files_mapping[file1]} and {files_mapping[file2]} consists of different datasets."  # noqa
-        assert set(objects_f1) == set(objects_f2), f"{files_mapping[file1]} and {files_mapping[file2]} consists of different datasets."  # noqa
-
-        # Compare every dataset value.
-        h5_diff = []
-        for d in datasets_f1:
-            try:
-                if not np.allclose(f1[d][()], f2[d][()], equal_nan=True):
-                    h5_diff.append(d)
-            except ValueError as e:
-                print(f"ValueError: {e}, {d}")
-                h5_diff.append(d)
-            except AttributeError as e:
-                print(f"AttributeError: {e}, {d}")
-                h5_diff.append(d)
-        for d in objects_f1:
-            try:
-                if isinstance(f1[d][()], bytes):
-                    if f1[d][()].decode('utf-8') != f2[d][()].decode('utf-8'):
-                        h5_diff.append(d)
-                elif f1[d][()].dtype != f1[d][()].dtype and not f1[d][()] != f2[d][()]:  # pnccd files has only list of bytes
-                    h5_diff.append(d)
-            except ValueError as e:
-                print(f"ValueError: {e}, {d}")
-            except AttributeError as e:
-                print(f"AttributeError: {e}, {d}, {f1[d][()].decode('utf-8')}")
-                h5_diff.append(d)
-        assert not h5_diff, f"{[d for d in h5_diff]} datasets contain different values for {files_mapping[file1]} and {files_mapping[file2]}"  # noqa
+        assert set(groups_f1) == set(groups_f2), f"{test_file} and {reference_file} consists of different groups."  # noqa
+        assert set(datasets_f1) == set(datasets_f2), f"{test_file} and {reference_file} consists of different datasets."  # noqa
+        assert set(objects_f1) == set(objects_f2), f"{test_file} and {reference_file} consists of different datasets."  # noqa
+        duration = time.perf_counter() - start_time
+        LOGGER.info("Elapsed time comparing groups, "
+                    f"datasets, and objects: {duration} seconds")
+        LOGGER.info("Groups, datasets, and objects have the same content.")
+
+        # Compare datasets and objects.
+        start_time = time.perf_counter()
+        h5_diff_datasets = compare_datasets(file1, file2, datasets_f1)
+        duration = time.perf_counter() - start_time
+        LOGGER.info(f"Elapsed time comparing datasets: {duration} seconds")
+        start_time = time.perf_counter()
+        h5_diff_objects = compare_objects(file1, file2, objects_f1)
+        LOGGER.info(f"Elapsed time comparing objects: {duration} seconds")
+
+        assert not h5_diff_datasets, f"{[d for d in h5_diff_datasets]} datasets contain different values for {test_file} and {reference_file}"  # noqa
+        LOGGER.info("Datasets are validated.")
+        assert not h5_diff_objects, f"{[d for d in h5_diff_objects]} objects contain different values for {test_file} and {reference_file}"  # noqa
+        LOGGER.info("Objects are validated.")
+
 
 def validate_files(
     ref_folder: pathlib.PosixPath,
     out_folder: pathlib.PosixPath,
     exclude_attrs: list,
-    find_difference: bool,
     test_file: pathlib.PosixPath,
 ) -> Tuple[bool, pathlib.PosixPath]:
     """Validate file similarities. Create temporary files to exclude
@@ -122,47 +174,64 @@ def validate_files(
         test_file: The output file to be validated.
         exclude_attrs: A list of datasets, groups to exclude
           from validated files.
-        find_difference: A bool to go on and find different attributes
-          if both files are not identical.
     Returns:
         result: validation result for metadata.
         test_file: The validated file.
     """
-    out_tf = tempfile.NamedTemporaryFile(
+    import h5py
+    start_validating = time.perf_counter()
+    def exclude_sources(source_file, dest, excluded_sources):
+        # Open the source file in read-only mode
+        with h5py.File(source_file, 'r') as source:
+
+            # Recursively visit all objects in the source file
+            def visit_func(name, obj):
+                # Check if the object should be excluded
+                if name in excluded_sources:
+                    return
+
+                # Check if the object is a dataset
+                if isinstance(obj, h5py.Dataset):
+                    # Create a new dataset in the destination file and copy the data
+                    dest.create_dataset(name, data=obj[()])
+
+            # Visit all objects in the source file and copy them to the destination file
+            source.visititems(visit_func)
+    
+    with tempfile.NamedTemporaryFile(
         dir=out_folder,
         suffix=".tmp",
         prefix="cal_",
         delete=True,
-    )
-    ref_tf = tempfile.NamedTemporaryFile(
-        dir=out_folder,
-        suffix=".tmp",
-        prefix="cal_",
-        delete=True,
-    )
-    hp1 = h5py.File(out_tf.name, 'w')
-    hp2 = h5py.File(ref_tf.name, 'w')
-    files_mapping = {
-        out_tf.name: test_file,
-        ref_tf.name: ref_folder / test_file.name,
-        }
-
-    # Copy h5 files for validation and exclude selected attrs.
-    for dest, f in zip([hp1, hp2], [test_file, ref_folder / test_file.name]):
-        with h5py.File(f, 'r') as sfile:
-            h5_copy_except.h5_copy_except_paths(
-                sfile, dest, exclude_attrs,
-            )
-    hp1.close()
-    hp2.close()
-    result = file_md5(out_tf.name) == file_md5(ref_tf.name)
-    if not result and find_difference:
-        find_differences(out_tf.name, ref_tf.name, files_mapping)
-        print("No differences found. Most probably there are NANs in the data.")
-        result = True
-    out_tf.close()
-    ref_tf.close()
-
+        ) as out_tf, tempfile.NamedTemporaryFile(
+            dir=out_folder,
+            suffix=".tmp",
+            prefix="cal_",
+            delete=True,
+            ) as ref_tf:
+
+        # Create in-memory HDF5 files for validation
+        with h5py.File(
+            out_tf.name, 'a') as hp1, h5py.File(ref_tf.name, 'a') as hp2:
+
+            start_time = time.perf_counter()
+            # Copy h5 files for validation and exclude selected attrs.
+            exclude_sources(test_file, hp1, exclude_attrs)
+
+            duration = time.perf_counter() - start_time
+            LOGGER.info(f"Elapsed time copying {test_file}: {duration} seconds")
+
+            start_time = time.perf_counter()
+            exclude_sources(ref_folder / test_file.name, hp2, exclude_attrs)
+
+            duration = time.perf_counter() - start_time
+            LOGGER.info(f"Elapsed time copying {ref_folder / test_file.name}: "
+                  f"{duration} seconds")
+
+            start_time = time.perf_counter()
+            result = file_md5(out_tf.name) == file_md5(ref_tf.name)
+            LOGGER.info(f"MD5 validation for {test_file}: {duration} seconds")
+    duration = time.perf_counter() - start_validating
     return result, test_file
 
 
@@ -259,12 +328,13 @@ def test_xfel_calibrate(
         h5files = list(out_folder.glob("*.h5"))
         expected_h5files = list(reference_folder.glob("*.h5"))
         assert len(h5files) == len(expected_h5files), f"{test_key} failure, number of files are not as expected."  # noqa
-        print(f"{test_key}'s calibration h5files numbers are as expected.")
+        LOGGER.info(f"{test_key}'s calibration h5files numbers are as expected.")
+
         non_valid_files = []
         # Hard coded datasets to exclude from numerical validation.
         # These datasets are know to be updated everytime.
         if cal_type.lower() == "correct":
-            exclude_attrs = ["METADATA/creationDate", "METADATA/updateDate", "INDEX/origin", "METADATA/updateDate", "CONTROL/", "/RUN", "/INDEX/HED_IA1_EPX100-1/DET/RECEIVER", "/INDEX/HED_IA1_EPX100-1/DET/CONTROL"]
+            exclude_attrs = ["METADATA/creationDate", "METADATA/updateDate"]
         else:
             exclude_attrs = ["report"]
 
@@ -274,20 +344,29 @@ def test_xfel_calibrate(
             reference_folder,
             out_folder,
             exclude_attrs,
-            find_difference,
         )
-        processes = 16
-        if "LPD" in str(h5files[0]):
-            processes = 4
-        with multiprocessing.Pool(processes=processes) as pool:
-            result = pool.map(_validate_files, h5files)
+        with multiprocessing.pool.ThreadPool(processes=8) as executor:
+            result = executor.map(_validate_files, h5files)
 
         # Collect non-valid files, if any, to display them in the error message.
         for valid, file in result:
             if not valid:
                 non_valid_files.append(file)
-        assert len(non_valid_files) == 0, f"{test_key} failure, while validating metadata for {non_valid_files}"  # noqa
-        print(f"{test_key}'s calibration h5files are validated successfully.")
+
+        if len(non_valid_files) > 0:
+            if find_difference:
+                LOGGER.error(f"Found non valid files: {non_valid_files}. "
+                            f"Checking differences for {non_valid_files[0]}")
+                find_differences(
+                    non_valid_files[0],
+                    reference_folder / non_valid_files[0].name,
+                    exclude_attrs
+                    )
+                LOGGER.info(f"No difference found for {non_valid_files[0]}")
+            else:
+                assert len(non_valid_files) == 0, f"{test_key} failure, while validating metadata for {non_valid_files}"  # noqa
+                LOGGER.info(f"{test_key}'s calibration h5files"
+                            " are validated successfully.")
 
     if only_validate:
         validate_files_now()
@@ -328,12 +407,13 @@ def test_xfel_calibrate(
     # 1st check that all jobs were COMPLETED without errors.
     states = res.stdout.decode().split("\n")[2:-1]
     assert all(s.strip() == "COMPLETED" for s in states), f"{test_key} failure, calibration jobs were not completed. {jobids}: {states}"  # noqa
-    print(f"{test_key}'s jobs were COMPLETED")
+    LOGGER.info(f"{test_key}'s jobs were COMPLETED")
     time.sleep(1.0)
 
     # 2nd check for report availability.
     report_file = out_folder / f"{report_name}.pdf"
     assert report_file.exists(), f"{test_key} failure, report doesn't exists."
+    LOGGER.info("Report found.")
 
     # Stop tests at this point, if desired.
     if not skip_numerical_validation:
-- 
GitLab