diff --git a/notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb b/notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb index 1b848008d85f1f7f44263883a9833209e6f87e7e..40897d331b0b8f0389d0df0251711f607ea147a3 100644 --- a/notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb +++ b/notebooks/DynamicFF/Characterize_DynamicFF_NBC.ipynb @@ -20,26 +20,25 @@ "in_folder = \"/gpfs/exfel/exp/SPB/202430/p900425/raw\" # input folder, required\n", "out_folder = '/gpfs/exfel/data/scratch/esobolev/test/shimadzu' # output folder, required\n", "metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n", - "dark_run = 1 # which run to read data from, required\n", - "flat_run = 2 # which run to read\n", + "runs = [1, 2] # list of two run numbers: dark field and flat field\n", "\n", "# Data files parameters.\n", - "karabo_da = ['HPVX01/1', 'HPVX01/2'] # data aggregators\n", - "karabo_id = \"SPB_EHD_MIC\" # karabo prefix of Shimadzu HPV-X2 devices\n", + "karabo_da = ['-1'] # data aggregators\n", + "karabo_id = \"SPB_MIC_HPVX2\" # karabo prefix of Shimadzu HPV-X2 devices\n", "\n", "#receiver_id = \"PNCCD_FMT-0\" # inset for receiver devices\n", "#path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n", - "instrument_source_template = 'SPB_EHD_MIC/CAM/HPVX2_{module}:daqOutput' # data source path in h5file.\n", + "#instrument_source_template = 'SPB_EHD_MIC/CAM/HPVX2_{module}:daqOutput' # data source path in h5file.\n", "#instrument_source_template = 'SPB_EHD_HPVX2_{module}/CAM/CAMERA:daqOutput'\n", - "image_key = \"data.image.pixels\" # image data key in Karabo or exdf notation\n", + "#image_key = \"data.image.pixels\" # image data key in Karabo or exdf notation\n", "\n", - "db_module_template = \"Shimadzu_HPVX2_{}\"\n", + "#db_module_template = \"Shimadzu_HPVX2_{}\"\n", "\n", "# Database access parameters.\n", "use_dir_creation_date = True # use dir creation date as data production reference date\n", "cal_db_interface = \"tcp://max-exfl-cal001:8021\" # calibration DB interface to use\n", "cal_db_timeout = 300000 # timeout on caldb requests\n", - "db_output = False # if True, the notebook sends dark constants to the calibration database\n", + "db_output = True # if True, the notebook sends dark constants to the calibration database\n", "local_output = True # if True, the notebook saves dark constants locally\n", "creation_time = \"\" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00\n", "\n", @@ -69,14 +68,16 @@ "from cal_tools.step_timing import StepTimer\n", "from cal_tools.tools import (\n", " get_dir_creation_date,\n", - " get_pdu_from_db,\n", + "# get_pdu_from_db,\n", " get_random_db_interface,\n", " get_report,\n", - " save_const_to_h5,\n", + "# save_const_to_h5,\n", " save_dict_to_hdf5,\n", - " send_to_db,\n", + "# send_to_db,\n", " run_prop_seq_from_path,\n", ")\n", + "from cal_tools.restful_config import calibration_client\n", + "from cal_tools.shimadzu import ShimadzuHPVX2\n", "\n", "import dynflatfield as dffc\n", "from dynflatfield.draw import plot_images, plot_camera_image" @@ -90,42 +91,54 @@ "source": [ "creation_time=None\n", "if use_dir_creation_date:\n", - " creation_time = get_dir_creation_date(in_folder, max(dark_run, flat_run))\n", + " creation_time = get_dir_creation_date(in_folder, max(runs))\n", "\n", "print(f\"Using {creation_time} as creation time of constant.\")\n", "\n", - "run, prop, seq = run_prop_seq_from_path(in_folder)\n", - "file_loc = f'proposal: {prop}, runs: {dark_run} {flat_run}'\n", + "run, proposal, seq = run_prop_seq_from_path(in_folder)\n", + "#file_loc = f'proposal: {prop}, runs: {dark_run} {flat_run}'\n", "\n", "# Read report path and create file location tuple to add with the injection\n", - "file_loc = f\"proposal:{prop} runs:{dark_run} {flat_run}\"\n", + "#file_loc = f\"proposal:{proposal} runs:\" + ' '.join(str(run) for run in runs)\n", "\n", "report = get_report(metadata_folder)\n", "cal_db_interface = get_random_db_interface(cal_db_interface)\n", "print(f'Calibration database interface: {cal_db_interface}')\n", "print()\n", "\n", - "instrument, part, component = karabo_id.split('_')\n", + "cc = calibration_client()\n", + "pdus = cc.get_all_phy_det_units_from_detector(\n", + " {\"detector_identifier\": karabo_id})\n", "\n", - "sources = {}\n", - "source_to_db = {}\n", - "print(\"Sources:\")\n", - "for da in karabo_da:\n", - " aggr, _, module = da.partition('/')\n", - " source_name = instrument_source_template.format(\n", - " instrument=instrument, part=part, component=component,\n", - " module=module\n", - " )\n", - " sources[source_name] = aggr\n", - " source_to_db[source_name] = db_module_template.format(module)\n", - " print('-', source_name)\n", - "print()\n", + "if not pdus[\"success\"]:\n", + " print(\"exception\")\n", + "\n", + "detector_info = pdus['data'][0]['detector']\n", + "detector = ShimadzuHPVX2(detector_info[\"source_name_pattern\"])\n", "\n", + "print(f\"Instrument {detector.instrument}\")\n", "print(f\"Detector in use is {karabo_id}\")\n", - "print(f\"Instrument {instrument}\")\n", "\n", - "step_timer = StepTimer()\n", - "constants = {}" + "modules = {}\n", + "for pdu_no, pdu in enumerate(pdus[\"data\"]):\n", + " db_module = pdu[\"physical_name\"]\n", + " module = pdu[\"module_number\"]\n", + " da = pdu[\"karabo_da\"]\n", + " if karabo_da[0] != \"-1\" and da not in karabo_da:\n", + " continue\n", + "\n", + " instrument_source_name = detector.instrument_source(module)\n", + " print('-', da, db_module, module, instrument_source_name)\n", + "\n", + " modules[da] = dict(\n", + " db_module=db_module,\n", + " module=module,\n", + " raw_source_name=instrument_source_name,\n", + " pdu_no=pdu_no,\n", + " )\n", + "\n", + "constants = {}\n", + "step_timer = StepTimer()" ] }, { @@ -141,15 +154,20 @@ "metadata": {}, "outputs": [], "source": [ - "for source, aggr in sources.items():\n", - " display(Markdown(f\"## {source}\"))\n", + "dark_run = runs[0]\n", + "for da, meta in modules.items():\n", + " source_name = detector.instrument_source(meta[\"module\"])\n", + " image_key = detector.image_key\n", + "\n", + " display(Markdown(f\"## {source_name}\"))\n", "\n", " # read\n", " step_timer.start()\n", + " file_da, _, _ = da.partition('/')\n", " dark_dc = RunDirectory(f\"{in_folder}/r{dark_run:04d}\",\n", - " include=f\"RAW-R{dark_run:04d}-{aggr}-S*.h5\")\n", - " dark_dc = dark_dc.select([(source, image_key)])\n", - " key_data = dark_dc[source][image_key]\n", + " include=f\"RAW-R{dark_run:04d}-{file_da}-S*.h5\")\n", + " dark_dc = dark_dc.select([(source_name, image_key)])\n", + " key_data = dark_dc[source_name, image_key]\n", "\n", " images_dark = key_data.ndarray()\n", " ntrain, npulse, ny, nx = images_dark.shape\n", @@ -161,8 +179,12 @@ " # process\n", " step_timer.start()\n", " dark = dffc.process_dark(images_dark)\n", - " module_constants = constants.setdefault(source, {})\n", - " module_constants[\"Offset\"] = dark\n", + "\n", + " # put results in the dict\n", + " conditions = detector.conditions(dark_dc, meta[\"module\"])\n", + " module_constants = constants.setdefault(meta[\"db_module\"], {})\n", + " module_constants[\"Offset\"] = dict(\n", + " conditions=conditions, data=dark, pdu_no=meta[\"pdu_no\"])\n", " step_timer.done_step(\"Process dark images\")\n", " display()\n", "\n", @@ -188,15 +210,20 @@ }, "outputs": [], "source": [ - "for source, aggr in sources.items():\n", - " display(Markdown(f\"## {source}\"))\n", + "flat_run = runs[1]\n", + "for da, meta in modules.items():\n", + " source_name = detector.instrument_source(meta[\"module\"])\n", + " image_key = detector.image_key\n", + "\n", + " display(Markdown(f\"## {source_name}\"))\n", "\n", " # read\n", " step_timer.start()\n", + " file_da, _, _ = da.partition('/')\n", " flat_dc = RunDirectory(f\"{in_folder}/r{flat_run:04d}\",\n", - " include=f\"RAW-R{flat_run:04d}-{aggr}-S*.h5\")\n", - " flat_dc = flat_dc.select([(source, image_key)])\n", - " key_data = flat_dc[source][image_key]\n", + " include=f\"RAW-R{flat_run:04d}-{file_da}-S*.h5\")\n", + " flat_dc = flat_dc.select([(source_name, image_key)])\n", + " key_data = flat_dc[source_name][image_key]\n", "\n", " images_flat = key_data.ndarray()\n", " ntrain, npulse, ny, nx = images_flat.shape\n", @@ -209,9 +236,14 @@ " step_timer.start()\n", " flat, components, explained_variance_ratio = dffc.process_flat(\n", " images_flat, dark, n_components)\n", + " flat_data = np.concatenate([flat[None, ...], components])\n", "\n", - " module_constants = constants.setdefault(source, {})\n", - " module_constants[\"DynamicFF\"] = np.concatenate([flat[None, ...], components])\n", + " # put results in the dict\n", + " conditions = detector.conditions(flat_dc, meta[\"module\"])\n", + " module_constants = constants.setdefault(meta[\"db_module\"], {})\n", + " module_constants[\"DynamicFF\"] = dict(\n", + " conditions=conditions, data=flat_data, pdu_no=meta[\"pdu_no\"]\n", + " )\n", " step_timer.done_step(\"Process flat-field images\")\n", "\n", " # draw plots\n", @@ -253,31 +285,40 @@ "# Output Folder Creation:\n", "os.makedirs(out_folder, exist_ok=True)\n", "\n", - "for source, module_constants in constants.items():\n", - " for constant_name, data in module_constants.items():\n", - " db_module = source_to_db[source]\n", - "\n", - " conditions = {\n", - " 'Frame Size': {'value': 1.0},\n", - " }\n", - "\n", - " data_to_store = {\n", - " 'condition': conditions,\n", - " 'db_module': db_module,\n", - " 'karabo_id': karabo_id,\n", - " 'constant': constant_name,\n", - " 'data': data,\n", - " 'creation_time': creation_time.replace(microsecond=0),\n", - " 'file_loc': file_loc,\n", - " 'report': report,\n", - " }\n", + "def inject_ccv(metadata_folder, calibration, cond, pdu, proposal, runs, const_file, begin_at):\n", + " print(\"* Send to db:\", const_file)\n", + " print(\" -\", metadata_folder)\n", + " print(\" -\", calibration)\n", + " print(\" -\", cond)\n", + " print(\" - proposal\", proposal)\n", + " print(\" - runs\", runs)\n", + " print(\" -\", begin_at)\n", + "\n", + "for db_module, module_constants in constants.items():\n", + " for constant_name, constant in module_constants.items():\n", + " conditions = constant[\"conditions\"]\n", + " conditions_dict = conditions.make_dict(\n", + " conditions.calibration_types[constant_name])\n", + " \n", + " data_to_store = {db_module: {constant_name: {'0': {\n", + " 'conditions': conditions_dict,\n", + " 'data': constant[\"data\"],\n", + " }}}}\n", "\n", " ofile = f\"{out_folder}/const_{constant_name}_{db_module}.h5\"\n", " if os.path.isfile(ofile):\n", " print(f'File {ofile} already exists and will be overwritten')\n", - " save_dict_to_hdf5(data_to_store, ofile)\n", "\n", - "step_timer.done_step(\"Storing calibration constants\")" + " save_dict_to_hdf5(data_to_store, ofile)\n", + " if db_output:\n", + " inject_ccv(\n", + " metadata_folder, constant_name, conditions,\n", + " pdus[\"data\"][constant[\"pdu_no\"]],\n", + " proposal, runs, ofile, creation_time\n", + " )\n", + "\n", + " if not local_output:\n", + " os.unlink(ofile)" ] }, {