diff --git a/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb b/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb index b711f75d6302ab066cf9f3264b0506a07c1c0216..b1fc068c4fa35a18897f405aa3bb97fc3324d8a1 100644 --- a/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb +++ b/notebooks/AGIPD/Characterize_AGIPD_Gain_Darks_NBC.ipynb @@ -91,6 +91,10 @@ "import yaml\n", "\n", "matplotlib.use('agg')\n", + "import itertools\n", + "import multiprocessing\n", + "\n", + "import iCalibrationDB\n", "import matplotlib.pyplot as plt\n", "from IPython.display import Latex, Markdown, display\n", "\n", @@ -124,8 +128,7 @@ " run_prop_seq_from_path,\n", " save_const_to_h5,\n", " send_to_db,\n", - ")\n", - "import iCalibrationDB" + ")" ] }, { @@ -619,10 +622,7 @@ " qm_dict[qm] = {\n", " \"karabo_da\": module_da,\n", " \"db_module\": module_pdu\n", - " }\n", - " # saving mapping information for summary notebook\n", - " with open(f\"{out_folder}/module_mapping_{qm}.yml\", \"w\") as fd:\n", - " yaml.safe_dump({\"module_mapping\": {qm: module_pdu}}, fd)" + " }" ] }, { @@ -681,19 +681,25 @@ " condition=condition,\n", " empty_constant=None,\n", " cal_db_interface=cal_db_interface,\n", - " #creation_time=creation_time,\n", " snapshot_at=timestamp_before_processing,\n", - " strategy=\"pdu_prior_in_time\",\n", " verbosity=2,\n", " timeout=cal_db_timeout\n", " )\n", "\n", " if mdata is None or data is None:\n", " timestamp = \"Not found\"\n", + " filepath = None\n", + " h5path = None\n", " else:\n", " timestamp = mdata.calibration_constant_version.begin_at.isoformat()\n", - " \n", - " return data, timestamp\n", + " filepath = os.path.join(\n", + " mdata.calibration_constant_version.hdf5path,\n", + " mdata.calibration_constant_version.filename\n", + " )\n", + " h5path = mdata.calibration_constant_version.h5path\n", + "\n", + " return data, timestamp, filepath, h5path\n", + "\n", "\n", "old_retrieval_pool = multiprocessing.Pool()\n", "old_retrieval_res = old_retrieval_pool.starmap_async(\n", @@ -898,9 +904,13 @@ "old_mdata = {}\n", "old_retrieval_res.wait()\n", "\n", - "for (qm, const), (data, timestamp) in zip(qm_x_const, old_retrieval_res.get()):\n", + "for (qm, const), (data, timestamp, filepath, h5path) in zip(qm_x_const, old_retrieval_res.get()):\n", " old_const.setdefault(qm, {})[const] = data\n", - " old_mdata.setdefault(qm, {})[const] = timestamp" + " old_mdata.setdefault(qm, {})[const] = {\n", + " \"timestamp\": timestamp,\n", + " \"filepath\": filepath,\n", + " \"h5path\": h5path\n", + " }" ] }, { @@ -913,7 +923,17 @@ "for qm, consts in old_mdata.items():\n", " display(Markdown(f\"- {qm}\"))\n", " for const in consts:\n", - " display(Markdown(f\" - {const} at {consts[const]}\"))" + " display(Markdown(f\" - {const} at {consts[const]['timestamp']}\"))\n", + " # saving locations of old constants for summary notebook\n", + " with open(f\"{out_folder}/module_metadata_{qm}.yml\", \"w\") as fd:\n", + " yaml.safe_dump(\n", + " {\n", + " \"module\": qm,\n", + " \"pdu\": qm_dict[qm][\"db_module\"],\n", + " \"old-constants\": old_mdata[qm]\n", + " },\n", + " fd,\n", + " )" ] }, { @@ -1035,7 +1055,7 @@ " else:\n", " line.append(\"-\")\n", " return line\n", - " \n", + "\n", "\n", " with multiprocessing.pool.ThreadPool(processes=multiprocessing.cpu_count() // len(constants_x_qms)) as pool:\n", " rows = pool.map(compute_row, range(len(f_list)))\n", @@ -1070,9 +1090,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.7" + "version": "3.6.8" } }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb index 9492f7d9bf16e73fc0b4b9ebef560bca0ad962fa..e1ef8825a5f63ee1d6da4015ae6c9fb058351f34 100644 --- a/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb +++ b/notebooks/DSSC/Characterize_DSSC_Darks_NBC.ipynb @@ -85,6 +85,7 @@ "import numpy as np\n", "import tabulate\n", "import yaml\n", + "from iCalibrationDB import Conditions, Constants, Detectors, Versions\n", "\n", "from cal_tools.dssclib import get_dssc_ctrl_data, get_pulseid_checksum\n", "from cal_tools.enums import BadPixels\n", @@ -107,7 +108,6 @@ " save_const_to_h5,\n", " send_to_db,\n", ")\n", - "from iCalibrationDB import Conditions, Constants, Detectors, Versions\n", "\n", "view = Client(profile=cluster_profile)[:]\n", "view.use_dill()\n", @@ -390,6 +390,8 @@ "\n", "print('Retrieve pre-existing constants for comparison.')\n", "for qm in offset_g.keys():\n", + " old_const[qm] = {}\n", + " old_mdata[qm] = {}\n", " qm_db = qm_dict[qm]\n", " karabo_da = qm_db[\"karabo_da\"]\n", " for const in clist:\n", @@ -417,21 +419,28 @@ " cal_db_interface, creation_time=creation_time,\n", " verbosity=2, timeout=cal_db_timeout)\n", "\n", - " old_const[const] = data\n", - "\n", - " if mdata is not None and data is not None:\n", - " time = mdata.calibration_constant_version.begin_at\n", - " old_mdata[const] = time.isoformat()\n", - " os.makedirs(f'{out_folder}/old/', exist_ok=True)\n", - " save_const_to_h5(qm_db[\"db_module\"], karabo_id,\n", - " getattr(Constants.DSSC, const)(),\n", - " condition, data, file_loc, report,\n", - " creation_time,\n", - " f'{out_folder}/old/')\n", + " old_const[qm][const] = data\n", + "\n", + " if mdata is None or data is None:\n", + " old_mdata[qm][const] = {\n", + " \"timestamp\": \"Not found\",\n", + " \"filepath\": None,\n", + " \"h5path\": None\n", + " }\n", " else:\n", - " old_mdata[const] = \"Not found\"\n", - " with open(f\"{out_folder}/module_mapping_{qm}.yml\",\"w\") as fd:\n", - " yaml.safe_dump({\"module_mapping\": {qm: qm_db[\"db_module\"]}}, fd)" + " old_mdata[qm][const] = {\n", + " \"timestamp\": mdata.calibration_constant_version.begin_at.isoformat(),\n", + " \"filepath\": os.path.join(\n", + " mdata.calibration_constant_version.hdf5path,\n", + " mdata.calibration_constant_version.filename,\n", + " ),\n", + " \"h5path\": mdata.calibration_constant_version.h5path,\n", + " }\n", + " with open(f\"{out_folder}/module_metadata_{qm}.yml\", \"w\") as fd:\n", + " yaml.safe_dump(\n", + " {\"module\": qm, \"pdu\": qm_db[\"db_module\"], \"old-constants\": old_mdata[qm]},\n", + " fd,\n", + " )" ] }, { @@ -621,9 +630,12 @@ "metadata": {}, "outputs": [], "source": [ - "display(Markdown('The following pre-existing constants are used for comparison: \\n'))\n", - "for key in old_mdata:\n", - " display(Markdown('**{}** at {}'.format(key, old_mdata[key])))" + "time_summary = []\n", + "for qm, qm_data in old_mdata.items():\n", + " time_summary.append(f\"The following pre-existing constants are used for comparison for module {qm}:\")\n", + " for const, const_data in qm_data.items():\n", + " time_summary.append(f\"- {const} created at {const_data['timestamp']}\")\n", + "display(Markdown(\"\\n\".join(time_summary)))" ] }, { @@ -643,8 +655,8 @@ "\n", " data = np.copy(res[qm][const])\n", "\n", - " if old_const[const] is not None:\n", - " dataold = np.copy(old_const[const])\n", + " if old_const[qm][const] is not None:\n", + " dataold = np.copy(old_const[qm][const])\n", "\n", " f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]\n", " n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']\n", @@ -652,7 +664,7 @@ " for i, f in enumerate(f_list):\n", " line = [n_list[i]]\n", " line.append('{:6.1f}'.format(f(data[...,gain])))\n", - " if old_const[const] is not None:\n", + " if old_const[qm][const] is not None:\n", " line.append('{:6.1f}'.format(f(dataold[...,gain])))\n", " else:\n", " line.append('-')\n", diff --git a/notebooks/LPD/LPDChar_Darks_NBC.ipynb b/notebooks/LPD/LPDChar_Darks_NBC.ipynb index d6ff625f12155701a5c84355fdc2530560a828c3..d912c7c0a0c89040e29c0a235e74ab974c804c06 100644 --- a/notebooks/LPD/LPDChar_Darks_NBC.ipynb +++ b/notebooks/LPD/LPDChar_Darks_NBC.ipynb @@ -89,6 +89,9 @@ "import numpy as np\n", "import tabulate\n", "import yaml\n", + "from iCalibrationDB import Conditions, Constants, Detectors, Versions\n", + "from XFELDetAna.plotting.heatmap import heatmapPlot\n", + "from XFELDetAna.plotting.simpleplot import simplePlot\n", "\n", "from cal_tools.enums import BadPixels\n", "from cal_tools.plotting import (\n", @@ -105,14 +108,12 @@ " get_random_db_interface,\n", " get_report,\n", " map_gain_stages,\n", + " module_index_to_qm,\n", " parse_runs,\n", " run_prop_seq_from_path,\n", " save_const_to_h5,\n", " send_to_db,\n", - ")\n", - "from iCalibrationDB import Conditions, Constants, Detectors, Versions\n", - "from XFELDetAna.plotting.heatmap import heatmapPlot\n", - "from XFELDetAna.plotting.simpleplot import simplePlot" + ")" ] }, { @@ -303,7 +304,7 @@ " ntest_g[cap] = OrderedDict()\n", "\n", " for i in modules:\n", - " qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n", + " qm = module_index_to_qm(i)\n", " if qm in mapped_files and not mapped_files[qm].empty():\n", " fname_in = mapped_files[qm].get()\n", " print(\"Process file: \", fname_in)\n", @@ -323,7 +324,7 @@ "\n", "for ir, r in enumerate(results):\n", " offset, noise, i, gg, cap, bp, data, normal = r\n", - " qm = \"Q{}M{}\".format(i//4 + 1, i % 4 + 1)\n", + " qm = module_index_to_qm(i)\n", " if qm not in offset_g[cap]:\n", " offset_g[cap][qm] = np.zeros(\n", " (offset.shape[0], offset.shape[1], offset.shape[2], 3))\n", @@ -366,7 +367,7 @@ "# Create the modules dict of karabo_das and PDUs\n", "qm_dict = OrderedDict()\n", "for i, k_da in zip(modules, karabo_da):\n", - " qm = f\"Q{i//4+1}M{i%4+1}\"\n", + " qm = module_index_to_qm(i)\n", " qm_dict[qm] = {\"karabo_da\": k_da,\n", " \"db_module\": \"\"}" ] @@ -386,8 +387,11 @@ "detinst = getattr(Detectors, dinstance)\n", "print('Retrieve pre-existing constants for comparison.')\n", "for cap in capacitor_settings:\n", + " old_const[cap] = {}\n", + " old_mdata[cap] = {}\n", " for qm in offset_g[cap].keys():\n", - "\n", + " old_const[cap][qm] = {}\n", + " old_mdata[cap][qm] = {}\n", " qm_db = qm_dict[qm]\n", " karabo_da = qm_db[\"karabo_da\"]\n", "\n", @@ -411,22 +415,34 @@ " creation_time=creation_time,\n", " verbosity=2, timeout=cal_db_timeout)\n", "\n", - " old_const[const] = data\n", - "\n", - " # TODO: Save old constant file paths (meta_only)\n", - " # instead of retrieving the whole data\n", - " if mdata is not None and data is not None:\n", - " time = mdata.calibration_constant_version.begin_at\n", - " old_mdata[const] = time.isoformat()\n", - " os.makedirs('{}/old/'.format(out_folder), exist_ok=True)\n", - " save_const_to_h5(qm_db[\"db_module\"], karabo_id,\n", - " constant, condition, data,\n", - " file_loc, report, creation_time,\n", - " f'{out_folder}/old/')\n", + " old_const[cap][qm][const] = data\n", + "\n", + " if mdata is None or data is None:\n", + " old_mdata[cap][qm][const] = {\n", + " \"timestamp\": \"Not found\",\n", + " \"filepath\": None,\n", + " \"h5path\": None\n", + " }\n", " else:\n", - " old_mdata[const] = \"Not found\"\n", - " with open(f\"{out_folder}/module_mapping_{qm}.yml\",\"w\") as fd:\n", - " yaml.safe_dump({\"module_mapping\": {qm: qm_db[\"db_module\"]}}, fd)" + " timestamp = mdata.calibration_constant_version.begin_at.isoformat()\n", + " filepath = os.path.join(\n", + " mdata.calibration_constant_version.hdf5path,\n", + " mdata.calibration_constant_version.filename\n", + " )\n", + " h5path = mdata.calibration_constant_version.h5path\n", + " old_mdata[cap][qm][const] = {\n", + " \"timestamp\": timestamp,\n", + " \"filepath\": filepath,\n", + " \"h5path\": h5path\n", + " }\n", + "\n", + " with open(f\"{out_folder}/module_metadata_{qm}.yml\",\"w\") as fd:\n", + " yaml.safe_dump(\n", + " {\n", + " \"module\": qm,\n", + " \"pdu\": qm_db[\"db_module\"],\n", + " \"old-constants\": old_mdata[cap][qm]\n", + " }, fd)" ] }, { @@ -439,7 +455,7 @@ "for cap in capacitor_settings:\n", " res[cap] = OrderedDict()\n", " for i in modules:\n", - " qm = \"Q{}M{}\".format(i//4+1, i % 4+1)\n", + " qm = module_index_to_qm(i)\n", "\n", " res[cap][qm] = {'Offset': offset_g[cap][qm],\n", " 'Noise': noise_g[cap][qm],\n", @@ -499,11 +515,12 @@ "metadata": {}, "outputs": [], "source": [ - "mnames = []\n", - "for i in modules:\n", - " qm = f\"Q{modules[0]//4+1}M{modules[0]%4+1}\"\n", - " mnames.append(qm)\n", - " show_processed_modules(dinstance=dinstance, constants=None, mnames=mnames, mode=\"position\")" + "show_processed_modules(\n", + " dinstance=dinstance,\n", + " constants=None,\n", + " mnames=[module_index_to_qm(i) for i in modules],\n", + " mode=\"position\"\n", + ")" ] }, { @@ -526,7 +543,7 @@ "\n", "for cap in capacitor_settings:\n", " for i in modules:\n", - " qm = \"Q{}M{}\".format(i//4+1, i % 4+1)\n", + " qm = module_index_to_qm(i)\n", " if np.count_nonzero(~np.isnan(data_g[cap][qm])) == 0:\n", " break\n", " for gain in range(3):\n", @@ -616,7 +633,7 @@ " print('Normality test was not requested. Flag `test_for_normality` False')\n", " break\n", " for i in modules:\n", - " qm = \"Q{}M{}\".format(i//4+1, i%4+1)\n", + " qm = module_index_to_qm(i)\n", "\n", " data = np.copy(ntest_g[cap][qm][:,:,:,:])\n", " data[badpix_g[cap][qm][:,:,:,:]>0] = 1.01\n", @@ -836,9 +853,14 @@ "metadata": {}, "outputs": [], "source": [ - "display(Markdown('The following pre-existing constants are used for comparison: \\n'))\n", - "for key in old_mdata:\n", - " display(Markdown('**{}** at {}'.format(key, old_mdata[key])))" + "time_summary = []\n", + "for cap, cap_data in old_mdata.items():\n", + " time_summary.append(f\"The following pre-existing constants are used for comparison for capacitor setting **{cap}**:\")\n", + " for qm, qm_data in cap_data.items():\n", + " time_summary.append(f\"- Module {qm}\")\n", + " for const, const_data in qm_data.items():\n", + " time_summary.append(f\" - {const} created at {const_data['timestamp']}\")\n", + "display(Markdown(\"\\n\".join(time_summary)))" ] }, { @@ -884,16 +906,16 @@ " x_ticklabels=np.arange(16)+1,\n", " x_ticks=np.arange(16)+0.5)\n", "\n", - " if old_const[const] is not None:\n", + " if old_const[cap][qm][const] is not None:\n", " ax = fig.add_subplot(122)\n", "\n", - " dataold = np.copy(old_const[const][:, :, :, gain])\n", + " dataold = np.copy(old_const[cap][qm][const][:, :, :, gain])\n", " \n", " label = '$\\Delta$ {}'.format(label)\n", "\n", " if const != 'BadPixelsDark':\n", - " if old_const['BadPixelsDark'] is not None:\n", - " dataold[old_const['BadPixelsDark'][:, :, :, gain] > 0] = np.nan\n", + " if old_const[cap][qm]['BadPixelsDark'] is not None:\n", + " dataold[old_const[cap][qm]['BadPixelsDark'][:, :, :, gain] > 0] = np.nan\n", " else:\n", " dataold[:] = np.nan\n", " else:\n", @@ -1135,12 +1157,12 @@ " for bit in bits:\n", " l_data.append(np.count_nonzero(badpix_g[cap][qm][:,:,:,gain].astype(np.uint32) & bit.value))\n", " \n", - " if old_const['BadPixelsDark'] is not None:\n", - " dataold = np.copy(old_const['BadPixelsDark'][:, :, :, gain])\n", + " if old_const[cap][qm]['BadPixelsDark'] is not None:\n", + " dataold = np.copy(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain])\n", " datau32old = dataold.astype(np.uint32)\n", " l_data_old.append(len(datau32old[datau32old>0].flatten()))\n", " for bit in bits:\n", - " l_data_old.append(np.count_nonzero(old_const['BadPixelsDark'][:, :, :, gain].astype(np.uint32) & bit.value))\n", + " l_data_old.append(np.count_nonzero(old_const[cap][qm]['BadPixelsDark'][:, :, :, gain].astype(np.uint32) & bit.value))\n", "\n", " l_data_name = ['All bad pixels', 'NOISE_OUT_OF_THRESHOLD', \n", " 'OFFSET_OUT_OF_THRESHOLD', 'OFFSET_NOISE_EVAL_ERROR']\n", @@ -1151,7 +1173,7 @@ " for i in range(len(l_data)):\n", " line = [f'{l_data_name[i]}, gain {gain_names[gain]}', l_threshold[i], l_data[i]]\n", " \n", - " if old_const['BadPixelsDark'] is not None:\n", + " if old_const[cap][qm]['BadPixelsDark'] is not None:\n", " line += [l_data_old[i]]\n", " else:\n", " line += ['-']\n", @@ -1191,9 +1213,9 @@ " data = np.copy(res[cap][qm][const])\n", " data[res[cap][qm]['BadPixelsDark']>0] = np.nan\n", " \n", - " if old_const[const] is not None and old_const['BadPixelsDark'] is not None :\n", - " dataold = np.copy(old_const[const])\n", - " dataold[old_const['BadPixelsDark']>0] = np.nan\n", + " if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None :\n", + " dataold = np.copy(old_const[cap][qm][const])\n", + " dataold[old_const[cap][qm]['BadPixelsDark']>0] = np.nan\n", "\n", " f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]\n", " n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']\n", @@ -1202,7 +1224,7 @@ " line = [n_list[i]]\n", " for gain in range(3):\n", " line.append('{:6.1f}'.format(f(data[...,gain])))\n", - " if old_const[const] is not None and old_const['BadPixelsDark'] is not None:\n", + " if old_const[cap][qm][const] is not None and old_const[cap][qm]['BadPixelsDark'] is not None:\n", " line.append('{:6.1f}'.format(f(dataold[...,gain])))\n", " else:\n", " line.append('-')\n", diff --git a/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb b/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb index 94dc039662f4685e87ebbcd34b78ead4b73ec5d3..9d53b5df751de53fae62183397c60424ef2309cf 100644 --- a/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb +++ b/notebooks/generic/overallmodules_Darks_Summary_NBC.ipynb @@ -31,6 +31,7 @@ "warnings.filterwarnings('ignore')\n", "\n", "import glob\n", + "\n", "import h5py\n", "import matplotlib\n", "import numpy as np\n", @@ -181,10 +182,13 @@ "out_folder = Path(out_folder)\n", "metadata = CalibrationMetadata(out_folder)\n", "mod_mapping = metadata.setdefault(\"modules-mapping\", {})\n", - "for fn in out_folder.glob(\"module_mapping_*.yml\"):\n", + "old_constant_metadata = {}\n", + "for fn in out_folder.glob(\"module_metadata_*.yml\"):\n", " with fn.open(\"r\") as fd:\n", " fdict = yaml.safe_load(fd)\n", - " mod_mapping.update(fdict[\"module_mapping\"])\n", + " module = fdict[\"module\"]\n", + " mod_mapping[module] = fdict[\"pdu\"]\n", + " old_constant_metadata[module] = fdict[\"old-constants\"]\n", " fn.unlink()\n", "\n", "metadata.save()" @@ -214,13 +218,16 @@ " qm = module_index_to_qm(i)\n", " if not mod_mapping.get(qm):\n", " continue\n", - " det_name = mod_mapping[qm]\n", + " mod_pdu = mod_mapping[qm]\n", " # loop over constants\n", " for const in ['Offset', 'Noise', 'ThresholdsDark', 'BadPixelsDark']:\n", - " fpath = '{}/const_{}_{}.h5'.format(out_folder, const, det_name)\n", - " oldfpath = '{}/old/const_{}_{}.h5'.format(out_folder, const, det_name)\n", - " if not os.path.isfile(fpath):\n", + " # first load new constant\n", + " fpath = out_folder / f\"const_{const}_{mod_pdu}.h5\"\n", + " \n", + " if not fpath.exists():\n", + " print(f\"No local output file {fpath} found\")\n", " continue\n", + "\n", " with h5py.File(fpath, 'r') as f:\n", " if qm not in data:\n", " mod_names.append(qm)\n", @@ -228,13 +235,21 @@ "\n", " data[qm][const] = f[\"data\"][()]\n", "\n", - " if not os.path.isfile(oldfpath):\n", + " # try to find old constant\n", + " # new version: uses paths from CalCat store\n", + " qm_mdata = old_constant_metadata[qm]\n", + "\n", + " if const not in qm_mdata:\n", + " continue\n", + "\n", + " filepath = qm_mdata[const][\"filepath\"]\n", + " h5path = qm_mdata[const][\"h5path\"]\n", + "\n", + " if not filepath or not h5path:\n", " continue\n", "\n", - " with h5py.File(oldfpath, 'r') as oldf:\n", - " if qm not in old_cons:\n", - " old_cons[qm] = OrderedDict()\n", - " old_cons[qm][const] = oldf[\"data\"][()]" + " with h5py.File(filepath, \"r\") as fd:\n", + " old_cons.setdefault(qm, OrderedDict())[const] = fd[f\"{h5path}/data\"][:]" ] }, { @@ -270,12 +285,10 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ - "display(Markdown('## Processed modules ##'))\n", + "display(Markdown('## Processed modules'))\n", "show_processed_modules(dinstance, constants, mod_names, mode=\"processed\")" ] }, @@ -291,9 +304,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "if \"LPD\" in dinstance:\n", @@ -420,9 +431,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": false - }, + "metadata": {}, "outputs": [], "source": [ "# Loop over modules and constants\n", diff --git a/src/cal_tools/tools.py b/src/cal_tools/tools.py index 551bb3e48cb8f2564dbd7403263b831bbd992269..9daa4fa8b81d5e0aa2b6fe2538468f2ab59f8d8a 100644 --- a/src/cal_tools/tools.py +++ b/src/cal_tools/tools.py @@ -398,7 +398,7 @@ def get_pdu_from_db(karabo_id: str, karabo_da: Union[str, list], :param karabo_id: Karabo identifier. :param karabo_da: Karabo data aggregator. - :param constant: Calibration constant object to + :param constant: Calibration constant object to intialize CalibrationConstantMetadata class. :param condition: Detector condition object to intialize CalibrationConstantMetadata class. @@ -548,8 +548,8 @@ def get_from_db(karabo_id: str, karabo_da: str, raise RuntimeError(f'{e}') if ntries > 0: + mdata_const = metadata.calibration_constant_version if load_data and meta_only: - mdata_const = metadata.calibration_constant_version fpath = Path(mdata_const.hdf5path, mdata_const.filename) with h5py.File(fpath, "r") as f: arr = f[f"{mdata_const.h5path}/data"][()]