diff --git a/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb b/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb index 260ee0552662ea68babb4147137fb22b03acbf50..a738ad33b5553c59d5aa4d640fdea1fb7e20359a 100644 --- a/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb +++ b/notebooks/AGIPD/CS_Characterization_unequalClockStep_NBC.ipynb @@ -45,6 +45,7 @@ "in_folder = '/gpfs/exfel/exp/SPB/202330/p900340/scratch/CSmergedFiles/19012023/' # path to input data, required\n", "out_folder = \"/gpfs/exfel/data/user/klackova/AGIPD/CurrentSource/SPB\" # path to output to, required\n", "raw_folder = '/gpfs/exfel/exp/SPB/202330/p900340/raw/' # path to raw folder, required\n", + "metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n", "dark_run = 5 # run containning CS specific darks, required\n", "\n", "modules = [2] # modules to work on, required, range allowed\n", @@ -110,6 +111,7 @@ " get_constant_from_db_and_time,\n", " get_dir_creation_date,\n", " get_pdu_from_db,\n", + " get_report,\n", " send_to_db,\n", ")\n", "from iCalibrationDB import Conditions, Constants\n", @@ -1548,34 +1550,36 @@ "outputs": [], "source": [ "# INJECTION OF CS CONSTS TO DB DOES NOT WORK YET!\n", + "report = get_report(metadata_folder)\n", "\n", "md = None\n", "\n", - "# set the operating condition\n", - "condition = Conditions.Dark.AGIPD(memory_cells=mem_cells, \n", - " bias_voltage=bias_voltage,\n", - " acquisition_rate=acq_rate, \n", - " gain_setting=gain_setting,\n", - " integration_time=integration_time)\n", - "\n", - "db_modules = get_pdu_from_db(karabo_id, karabo_da, Constants.AGIPD.SlopesPC(),\n", - " condition, cal_db_interface,\n", - " snapshot_at=creation_time)\n", - "\n", - "for pdu in db_modules:\n", - " for const in [\"SlopesCS\", \"BadPixelsCS\"]:\n", - " if const == \"SlopesCS\":\n", - " dbconst.data = slope_dict_to_arr(sanitised_const)\n", - " else:\n", - " dbconst.data = BPmap \n", + "if db_output:\n", + " # set the operating condition\n", + " condition = Conditions.Dark.AGIPD(memory_cells=mem_cells, \n", + " bias_voltage=bias_voltage,\n", + " acquisition_rate=acq_rate, \n", + " gain_setting=gain_setting,\n", + " integration_time=integration_time)\n", + "\n", + " db_modules = get_pdu_from_db(karabo_id, karabo_da, Constants.AGIPD.SlopesPC(),\n", + " condition, cal_db_interface,\n", + " snapshot_at=creation_time)\n", + "\n", + " for pdu in db_modules:\n", + " for const in [\"SlopesCS\", \"BadPixelsCS\"]:\n", + " if const == \"SlopesCS\":\n", + " dbconst.data = slope_dict_to_arr(sanitised_const)\n", + " else:\n", + " dbconst.data = BPmap \n", "\n", - " if db_output:\n", + "# if db_output:\n", " md = send_to_db(pdu, karabo_id, dbconst, condition,\n", " file_loc, report, cal_db_interface,\n", - " creation_time=creation_time,\n", + " creation_time=creation_time)\n", "\n", - "print(\"Constants parameter conditions are:\\n\")\n", - "print(f\"• memory_cells: {mem_cells}\\n• bias_voltage: {bias_voltage}\\n\"\n", + " print(\"Constants parameter conditions are:\\n\")\n", + " print(f\"• memory_cells: {mem_cells}\\n• bias_voltage: {bias_voltage}\\n\"\n", " f\"• acquisition_rate: {acq_rate}\\n• gain_setting: {gain_setting}\\n\"\n", " f\"• integration_time: {integration_time}\\n\"\n", " f\"• creation_time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")" diff --git a/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb b/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb index 41e98c180efc28a55ec8746a53ed9d17d948658f..0b0725122f9577ab9e0182d2838201603f301a36 100644 --- a/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb +++ b/notebooks/AGIPD/CS_Characterization_unequalClockStep_Summary.ipynb @@ -20,18 +20,14 @@ "metadata_folder = \"\" # Directory containing calibration_metadata.yml when run by xfel-calibrate\n", "proc_folder = \"\" # Path to corrected image data used to create histograms and validation plots\n", "raw_folder = '/gpfs/exfel/exp/SPB/202330/p900340/raw/' # folder of raw data. This is used to save information of source data of generated constants, required\n", - "first_run = 6\n", "dark_run = 5\n", "\n", + "\n", "karabo_id_control = \"SPB_IRU_AGIPD1M1\" # karabo-id for the control device e.g. \"MID_EXP_AGIPD1M1\", or \"SPB_IRU_AGIPD1M1\"\n", "karabo_id = \"SPB_DET_AGIPD1M-1\"\n", "ctrl_source_template = '{}/MDL/FPGA_COMP' # path to control information\n", "\n", "use_dir_creation_date = True # use the creation data of the input dir for database queries\n", - "cal_db_interface = \"tcp://max-exfl016:8015#8045\" # the database interface to use\n", - "cal_db_timeout = 30000 # in milli seconds\n", - "local_output = False # output constants locally\n", - "db_output = False # output constants to database\n", "\n", "# Detector conditions\n", "bias_voltage = -1 # detector bias voltage\n", @@ -47,12 +43,8 @@ "metadata": {}, "outputs": [], "source": [ - "import glob\n", - "import os\n", - "import re\n", "import traceback\n", "import warnings\n", - "from multiprocessing import Pool\n", "\n", "import h5py\n", "import matplotlib.pyplot as plt\n", @@ -61,29 +53,19 @@ "import numpy as np\n", "import tabulate\n", "from cal_tools.agipdlib import AgipdCtrl\n", - "from cal_tools.agipdutils_ff import (\n", - " BadPixelsFF,\n", - " any_in,\n", - " fit_n_peaks,\n", - " gaussian_sum,\n", - " get_starting_parameters,\n", - ")\n", - "from cal_tools.ana_tools import get_range, save_dict_to_hdf5\n", + "\n", + "from cal_tools.ana_tools import get_range\n", "from cal_tools.enums import BadPixels\n", "from cal_tools.tools import (\n", - " get_dir_creation_date,\n", - " get_pdu_from_db,\n", - " get_report,\n", + " get_dir_creation_date, \n", " module_index_to_qm,\n", - " send_to_db\n", + " get_report\n", ")\n", - "from dateutil import parser\n", - "from extra_data import H5File, RunDirectory, stack_detector_data\n", + "\n", + "from extra_data import RunDirectory\n", "from extra_geom import AGIPD_1MGeometry, AGIPD_500K2GGeometry\n", - "from iCalibrationDB import Conditions, Constants, Detectors\n", - "from iminuit import Minuit\n", - "from IPython.display import HTML, Latex, Markdown, display\n", - "from XFELDetAna.plotting.heatmap import heatmapPlot\n", + "\n", + "from IPython.display import Latex, display\n", "from XFELDetAna.plotting.simpleplot import simplePlot\n", "\n", "%matplotlib inline\n", @@ -99,7 +81,7 @@ "# Get operation conditions\n", "ctrl_source = ctrl_source_template.format(karabo_id_control)\n", "\n", - "run_folder = f'{raw_folder}/r{first_run:04d}/'\n", + "run_folder = f'{raw_folder}/r{dark_run:04d}/'\n", "\n", "raw_dc = RunDirectory(run_folder)\n", "\n", @@ -111,7 +93,7 @@ "# Evaluate creation time\n", "creation_time = None\n", "if use_dir_creation_date:\n", - " creation_time = get_dir_creation_date(raw_folder, first_run)\n", + " creation_time = get_dir_creation_date(raw_folder, dark_run)\n", "\n", "agipd_cond = AgipdCtrl(\n", " run_dc=raw_dc,\n", @@ -127,7 +109,7 @@ " acq_rate = agipd_cond.get_acq_rate()\n", "if gain_setting < 0:\n", " gain_setting = agipd_cond.get_gain_setting(creation_time)\n", - "if bias_voltage == 0.:\n", + "if bias_voltage == -1:\n", " bias_voltage = agipd_cond.get_bias_voltage(karabo_id_control)\n", "if integration_time < 0:\n", " integration_time = agipd_cond.get_integration_time()\n", @@ -170,6 +152,7 @@ " ratios[mod] = {}\n", " fit_data[mod] = {}\n", " constants_file = f'{out_folder}/CSconst_{karabo_id}_M{mod}.h5'\n", + "# print(constants_file)\n", " try:\n", " with h5py.File(constants_file, 'r') as hf:\n", " BPmap[mod] = hf['/BadPixels/data'][()].swapaxes(1,2) \n", @@ -221,6 +204,7 @@ "if cell_range==[0,0]:\n", " cell_range[1] = shape[0]\n", "\n", + "# const_data contains ratios of slopes and BP\n", "const_data = {}\n", "for key in keys:\n", " const_data[key] = np.full((nmods, shape[0],512,128), np.nan)\n", @@ -230,7 +214,6 @@ " pixel_range[1]:pixel_range[3]] = ratios[i][key]\n", " \n", "const_data['mask'] = np.full((nmods, shape[0],512,128), np.nan)\n", - "# labels['mask'] = 'Bad pixel map'\n", "for i in range(nmods):\n", " if i in BPmap:\n", " const_data['mask'][i,:,pixel_range[0]:pixel_range[2],\n", @@ -242,10 +225,13 @@ " for key in keys_fit:\n", " fit_const_data[g][key] = np.full((nmods, shape[0],512,128), np.nan)\n", " for i in range(nmods):\n", - " if key in fit_data[i][g]:\n", - " fit_const_data[g][key][i,:,pixel_range[0]:pixel_range[2],\n", - " pixel_range[1]:pixel_range[3]] = fit_data[i][g][key]\n", - " " + " try:\n", + " if key in fit_data[i][g].keys():\n", + "# print(i,g,key)\n", + " fit_const_data[g][key][i,:,pixel_range[0]:pixel_range[2],\n", + " pixel_range[1]:pixel_range[3]] = fit_data[i][g][key]\n", + " except Exception as e:\n", + " continue\n" ] }, { @@ -380,12 +366,15 @@ "all_HM = []\n", "all_ML = []\n", "for nmod in ratios.keys():\n", - " all_HM.extend(ratios[nmod]['H-M'])\n", - " all_ML.extend(ratios[nmod]['M-L'])\n", + " #try except is used for the script to work with less than 16 modules\n", + " try:\n", + " all_HM.extend(ratios[nmod]['H-M'])\n", + " all_ML.extend(ratios[nmod]['M-L'])\n", + " except Exception as e:\n", + " continue\n", "all_HM = np.array(all_HM)\n", "all_ML = np.array(all_ML)\n", "\n", - "# all_HM = np.array([list(hm) for hm in ratios.values()])\n", "all_MSK = np.array([list(msk) for msk in BPmap.values()])\n", "\n", "table.append(('overall',\n", @@ -450,6 +439,15 @@ "\n", "plt.show()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "report = get_report(metadata_folder)" + ] } ], "metadata": { diff --git a/notebooks/AGIPD/CS_parallelMerging_NBC.ipynb b/notebooks/AGIPD/CS_parallelMerging_NBC.ipynb index bb7eb315fd561b338a562ceb3be482abdea66f6c..19557b8a039bd96fa2778535aaa734cb9c6a38cd 100644 --- a/notebooks/AGIPD/CS_parallelMerging_NBC.ipynb +++ b/notebooks/AGIPD/CS_parallelMerging_NBC.ipynb @@ -17,8 +17,8 @@ "outputs": [], "source": [ "# cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "in_folder ='/gpfs/exfel/exp/SPB/202330/p900340/raw/' # path to input data, required\n", - "out_folder = \"/gpfs/exfel/exp/SPB/202330/p900340/scratch/CSmergedFiles/19012023/\" # path to output to, required\n", + "in_folder = \"/gpfs/exfel/exp/SPB/202330/p900340/raw/\" # path to input data, required\n", + "out_folder = \"/gpfs/exfel/exp/SPB/202330/p900340/scratch/CSmergedFiles/test/\" # path to output to, required\n", "first_run = 6 # first taken run, required, it has to be run with the smallest ITESTC, otherwise define runs manually\n", "\n", "runs1 = [-1] # list of runs to use, range allowed ITESTC 65, use -1 for auto-completion\n", @@ -27,7 +27,6 @@ "modules = [3] # modules to work on, required, range allowed\n", "\n", "karabo_da = [\"all\"]\n", - "karabo_da_control = \"AGIPD1MCTRL00\" # karabo DA for control infromation\n", "karabo_id_control = \"SPB_IRU_AGIPD1M1\" # karabo-id for the control device e.g. \"MID_EXP_AGIPD1M1\", or \"SPB_IRU_AGIPD1M1\"\n", "karabo_id = \"SPB_DET_AGIPD1M-1\"\n", "ctrl_source_template = '{}/MDL/FPGA_COMP' # path to control information\n", @@ -69,47 +68,27 @@ "metadata": {}, "outputs": [], "source": [ - "import os, psutil\n", "import warnings\n", "from datetime import datetime, timedelta\n", "from functools import partial\n", "\n", "warnings.filterwarnings('ignore')\n", "\n", - "import dateutil.parser\n", "import h5py\n", "import matplotlib\n", "import numpy as np\n", "\n", - "from scipy.stats import median_abs_deviation as mad\n", "from collections import OrderedDict\n", "import xarray\n", "\n", "import matplotlib.pyplot as plt\n", - "from matplotlib import gridspec\n", "\n", - "import XFELDetAna.xfelpyanatools as xana\n", - "from extra_data import RunDirectory, components\n", + "from extra_data import RunDirectory\n", "import pasha as psh\n", - "import multiprocessing\n", - "import itertools\n", "from concurrent.futures import ProcessPoolExecutor\n", "\n", "from cal_tools.agipdlib import AgipdCtrl\n", - "from cal_tools.enums import BadPixels\n", - "from cal_tools.plotting import plot_badpix_3d, show_overview\n", - "from cal_tools.tools import (\n", - " gain_map_files,\n", - " get_constant_from_db_and_time,\n", - " get_dir_creation_date,\n", - " get_notebook_name,\n", - " get_pdu_from_db,\n", - " get_report,\n", - " module_index_to_qm,\n", - " parse_runs,\n", - " send_to_db,\n", - ")\n", - "from iCalibrationDB import Conditions, ConstantMetaData, Constants, Detectors, Versions\n", + "from cal_tools.tools import get_dir_creation_date\n", "\n", "%matplotlib inline" ] @@ -121,8 +100,7 @@ "outputs": [], "source": [ "cells = mem_cells\n", - "path_temp = in_folder+\"/r{:04d}/\"\n", - "image_name_temp = 'RAW-R{:04d}-AGIPD{:02d}-S{:05d}.h5'\n", + "\n", "print(\"Parameters are:\")\n", "if mem_cells < 0:\n", " print(\"Memory cells: auto-detection on\")\n", @@ -133,7 +111,6 @@ "\n", "instrument = karabo_id.split(\"_\")[0]\n", "\n", - "\n", "if instrument == \"HED\":\n", " nmods = 8\n", "else:\n", @@ -141,7 +118,6 @@ "\n", "print(f\"Detector in use is {karabo_id}\")\n", "\n", - "\n", "if karabo_da == [\"all\"]:\n", " if modules[0] == -1:\n", " modules = list(range(nmods))\n", @@ -195,6 +171,7 @@ "if integration_time < 0:\n", " integration_time = agipd_cond.get_integration_time()\n", "\n", + "print(f\"Bias voltage: {bias_voltage} V\")\n", "print(f\"Acquisition rate: {acq_rate} MHz\")\n", "print(f\"Memory cells: {mem_cells}\")\n", "print(f\"Gain setting: {gain_setting}\")\n", @@ -209,6 +186,7 @@ "outputs": [], "source": [ "def count_min_bursts(in_folder, channel, run):\n", + " \"\"\"Calculate maximum number of trains in the provided runs.\"\"\"\n", " \n", " bursts = []\n", " print(run)\n", @@ -226,6 +204,7 @@ " return bursts\n", "\n", "trains = []\n", + "\n", "for module in modules:\n", " partial_check = partial(count_min_bursts, in_folder+\"/r{:04d}/\", module)\n", "\n", @@ -246,8 +225,8 @@ "metadata": {}, "outputs": [], "source": [ - "# check_ASIC looks which set of runs to use for a given ASIC\n", "def check_ASIC(in_folder, runs, trains, channel, tresh):\n", + " \"\"\"Check which set of runs to use for a given ASIC\"\"\"\n", " \n", " ASIC_code = []\n", " \n", @@ -283,8 +262,9 @@ " return ASIC_code\n", "\n", "\n", - "# Specify the runs to be used and ASIC indexing within data array in OrderedDict\n", + "\n", "def defineInitParams(m_num):\n", + " \"\"\"Specify the ASIC indexing and runs to be used within data array in OrderedDict.\"\"\"\n", " \n", " channel = modules[m_num]\n", " runs_dict = OrderedDict()\n", @@ -323,13 +303,6 @@ "metadata": {}, "outputs": [], "source": [ - "# %%time\n", - "# partial_check = partial(check_ASIC, in_folder+\"/r{:04d}/\", runs2[0], trains)\n", - "\n", - "# from concurrent.futures import ProcessPoolExecutor\n", - "\n", - "# with ProcessPoolExecutor(max_workers=2) as pool:\n", - "# ASIC_code = list(pool.map(partial_check, modules))\n", "ASIC_code_list = []\n", "for module in modules:\n", " start = datetime.now()\n", @@ -344,7 +317,6 @@ " ASIC_code2 = np.array(ASIC_code2)\n", " ASIC_code2[np.where(ASIC_code == 1)[0]] = 1\n", " if 2 in ASIC_code2:\n", - "# ASIC_code2 = np.asarray(ASIC_code2)\n", " substitute = np.where(ASIC_code2 == 2)[0]\n", " ASIC_code[substitute] = 3\n", " if 3 in ASIC_code:\n", @@ -354,7 +326,6 @@ " ASIC_code3[np.where(ASIC_code == 1)[0]] = 1\n", " ASIC_code3[np.where(ASIC_code == 2)[0]] = 5\n", " if 2 in ASIC_code3:\n", - " # ASIC_code2 = np.asarray(ASIC_code2)\n", " substitute = np.where(ASIC_code3 == 2)[0]\n", " ASIC_code[substitute] = 4\n", " ASIC_code_list.append(ASIC_code)\n", @@ -377,8 +348,6 @@ "metadata": {}, "outputs": [], "source": [ - "# with multiprocessing.Pool(processes=len(modules)) as pool:\n", - "# runs_dict_list = pool.map(defineInitParams, range(len(modules)))\n", "runs_dict_list = []\n", "for module in range(len(modules)):\n", " runs_dict_list.append(defineInitParams(module))" @@ -391,16 +360,13 @@ "outputs": [], "source": [ "def process_module(channel, runs_dict, cell_id):\n", + " \"\"\"Merge runs to create complete image\"\"\"\n", "\n", " instrument_source = karabo_id+'/DET/{}CH0:xtdf'.format(runs_dict[channel][0]['module'])\n", " print(instrument_source)\n", "\n", " context= psh.context.ProcessContext(num_workers=4)\n", " cs_data = {'analog': context.alloc(shape=(trains, 352, 128, 512), dtype=np.float32),\n", - "# 'cellId': context.alloc(shape=(trains, 176), dtype=np.uint32)\n", - " # do we have to save also digital data? --> \n", - " # they are not used anywhere in context with CS, hence lets save space.\n", - "# 'digital': context.alloc(shape=(trains, 36, 128, 512), dtype=np.float32),\n", " }\n", " \n", " \n", @@ -418,7 +384,6 @@ " d = xarray.DataArray(d, coords={'cell': range(0, 352)}, dims=[\"train\", \"cell\", \"output\", \"x\", \"y\"])\n", " d = d.sel(cell=range(cell_id[0], cell_id[1])) # select chunk of mem cells\n", "\n", - "# cs_data['cellId'] = np.tile(d.cell.values, trains).reshape(trains, 44)\n", " if asic < 8:\n", " for i in range(3-counter, 64, 4):\n", " cs_data['analog'][..., index[1][0]:index[1][1], index[0][0]+i] = d[:, :, 0, :, i]\n", @@ -448,18 +413,16 @@ " apendix = []\n", " for val in values:\n", " apendix.append('r{}-{}'.format(run_lbl[val][0], run_lbl[val][-1]))\n", - "# print(apendix)\n", + "\n", " merged_file = \"{}/agipd_CH{:02d}_cs_\".format(out_folder, channel)+\"_\".join(apendix)+'.h5'\n", " print('Saving to:',merged_file)\n", " \n", " with h5py.File(merged_file, \"w\") as f:\n", " dset = f.require_dataset(\"{}/Analog/data\".format(channel), (trains, mem_cells, 128, 512), dtype='float32')\n", - "# cellset = f.require_dataset(\"{}/CellId/data\".format(channel), (trains, mem_cells), dtype='uint32')\n", " for cell in range(0,352,352):\n", " cell_id = [cell, cell+352]\n", " cs_data = process_module(module, runs_dict_list, cell_id) #cs_data[0] - channel, cs_data[1] - analog, cs_data[2] digital\n", " dset[:, cell_id[0]:cell_id[1], ...] = cs_data[1]\n", - "# cellset[:, cell_id[0]:cell_id[1]] = cs_data[2]\n", " f.flush()\n", " del cs_data" ] @@ -581,13 +544,6 @@ " ax.set_xticks([])\n", " ax.grid()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": {