diff --git a/cal_tools/cal_tools/ana_tools.py b/cal_tools/cal_tools/ana_tools.py index 0b36016b8d18f7a069a9c3bd9995ce2e8fbfd256..a01475006b62d88d932cd12f5a81aebee10feb6c 100644 --- a/cal_tools/cal_tools/ana_tools.py +++ b/cal_tools/cal_tools/ana_tools.py @@ -359,16 +359,18 @@ class IMType(Enum): ALL_BAD = -4e+4 NO_BPMAP = -5e+4 -def get_range(data, scale): + +def get_range(data, scale, threshold = -1000): """ Return a range calculated by median absolute deviations :param data: numpy.array of data points :param scale: range in units of median absolute deviations - :return: + :param threshold: lower threshold for data to be considered + :return: Range [min, max] calculated by median absolute deviations """ - med = np.nanmedian(data) - mad = np.nanmedian(np.abs(data.flatten() - med)) + med = np.nanmedian(data[data>threshold]) + mad = np.nanmedian(np.abs(data[data>threshold].flatten() - med)) return med - scale * mad, med + scale * mad diff --git a/notebooks/generic/PlotFromCalDB_NBC.ipynb b/notebooks/generic/PlotFromCalDB_NBC.ipynb index 88472fbbedf8d9b4c8468f4662e578c18144ff17..3e3ba7791731c5ce5b08804aa6d6737192c9d123 100644 --- a/notebooks/generic/PlotFromCalDB_NBC.ipynb +++ b/notebooks/generic/PlotFromCalDB_NBC.ipynb @@ -6,9 +6,9 @@ "source": [ "# Statistical analysis of calibration factors#\n", "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1\n", + "Author: Mikhail Karnevskiy, Version 0.2\n", "\n", - "Calibration constants for pnCCDdetector from the data base with injection time between start_date and end_date are considered.\n", + "Plot calibration constants retrieved from the cal. DB.\n", "\n", "To be visualized, calibration constants are averaged per group of pixels. Plots shows calibration constant over time for each constant.\n", "\n", @@ -24,32 +24,36 @@ "cluster_profile = \"noDB\" # The ipcluster profile to use\n", "start_date = \"2019-06-30\" # date to start investigation interval from\n", "end_date = \"NOW\" # date to end investigation interval at, can be \"now\"\n", - "dclass=\"jungfrau\" # Detector class\n", - "modules = [\"Jungfrau_M039\"] # detector entry in the DB to investigate\n", - "constants = [\"Noise\", \"Offset\"] # constants to plot\n", - "nconstants = 20 # Number of time stamps to plot. If not 0, overcome start_date.\n", + "dclass=\"LPD\" # Detector class\n", + "modules = [\"LPD1M1\"] # detector entry in the DB to investigate\n", + "submodules = [2] # module index of a modular detector (1 for Q1M1 of AGIPD), range allowed \n", + "constants = ['Noise'] # constants to plot\n", + "nconstants = 7 # Number of time stamps to plot. If not 0, overcome start_date.\n", "max_time = 15 # max time margin in minutes to match bad pixels\n", + "nMemToShow = 32 # Number of memory cells to be shown in plots\n", "\n", "gain_setting = [0] # gain stages\n", - "bias_voltage = [90, 180] # Bias voltage\n", + "bias_voltage = [250, 500] # Bias voltage\n", "temperature = [291] # Operation temperature\n", "integration_time = [250, 50] # Integration time\n", "pixels_x=[1024] # number of pixels along X axis\n", "pixels_y=[512, 1024] # number of pixels along Y axis\n", "in_vacuum = [0] # 0 if detector is operated in room pressure\n", - "memory_cells = [1] # number of memory cells\n", - "parameter_names = ['bias_voltage', 'integration_time', 'temperature', \n", - " 'gain_setting', 'memory_cells', 'pixels_x', 'pixels_y'] # names of parameters\n", + "memory_cells = [1, 512] # number of memory cells\n", + "acquisition_rate = [1.1] # aquisition rate\n", + "parameter_names = ['bias_voltage', 'memory_cells'] # names of parameters\n", "\n", - "separate_plot = ['integration_time'] # Plot on separate plots\n", + "separate_plot = ['gain_setting'] # Plot on separate plots\n", "x_labels = ['Sensor Temperature', 'Integration Time'] # parameters to be shown on X axis: Acquisition rate, Memory cells, Sensor Temperature, Integration Time\n", "photon_energy = 9.2 # Photon energy of the beam\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_bla2/\" # output folder\n", + "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_bla3/\" # output folder\n", "use_existing = \"\" # If not empty, constants stored in given folder will be used\n", "cal_db_interface = \"tcp://max-exfl016:8016\" # the database interface to use\n", "cal_db_timeout = 180000 # timeout on caldb requests\",\n", "plot_range = 3 # range for plotting in units of median absolute deviations\n", - "spShape = [256, 256] # Shape of superpixel" + "spShape = [64, 64] # Shape of superpixel\n", + "sp_name = 'ASIC IDs' # name of superpixel\n", + "gain_titles = ['High gain', 'Medium gain', 'Low gain'] # Title inset related to gain" ] }, { @@ -74,7 +78,7 @@ "from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData\n", "from cal_tools.tools import get_from_db, get_random_db_interface\n", "from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5, \n", - " HMType, IMType, hm_combine,\n", + " HMType, IMType, hm_combine, combine_constants,\n", " combine_lists, get_range)" ] }, @@ -85,9 +89,28 @@ "outputs": [], "source": [ "# Prepare variables\n", + "submodules = [\"Q{}M{}\".format(x // 4 + 1, x % 4 + 1) for x in submodules]\n", + "\n", + "# No submodules for small detectors\n", + "if dclass not in ['AGIPD', 'LPD']:\n", + " submodules = ['']\n", + "\n", + "# 0 is considered as None.\n", + "acquisition_rate = [x if x>0 else None for x in acquisition_rate]\n", + "\n", + "nMem = max(memory_cells) # Number of mem Cells to store\n", + "\n", "parameters = [globals()[x] for x in parameter_names]\n", "\n", - "constantsDark = {'Noise': 'BadPixelsDark',\n", + "# Empty list from the command line may not work\n", + "if separate_plot == ['']:\n", + " separate_plot = []\n", + "\n", + "# Mapping between consatnts and their bad pixel maps \n", + "constantsDark = {\"SlopesFF\": 'BadPixelsFF',\n", + " 'SlopesPC': 'BadPixelsPC',\n", + " 'SlopesCI': 'BadPixelsCI',\n", + " 'Noise': 'BadPixelsDark',\n", " 'Offset': 'BadPixelsDark'}\n", "print('Bad pixels data: ', constantsDark)\n", "\n", @@ -134,6 +157,9 @@ " for db_module in modules:\n", " det = getattr(Detectors, db_module)\n", " \n", + " if dclass in ['AGIPD', 'LPD']:\n", + " det = getattr(det, submodules[0])\n", + " \n", " # Get getector conditions\n", " if dclass=='CCD':\n", " dconstants = getattr(Constants, dclass)(det.detector_type)\n", @@ -146,7 +172,7 @@ " # Loop over parameters\n", " for pars in parameter_list:\n", "\n", - " if (const in [\"Offset\", \"Noise\", \"SlopesPC\"] or \"DARK\" in const.upper()):\n", + " if (const in [\"Offset\", \"Noise\", \"SlopesPC\", \"SlopesCI\"] or \"DARK\" in const.upper()):\n", " dcond = Conditions.Dark\n", " mcond = getattr(dcond, dclass)(**pars)\n", " else:\n", @@ -286,8 +312,24 @@ }, "outputs": [], "source": [ + "def prepare_to_store(a, nMem):\n", + " \"\"\"\n", + " Different constants for AGIPD and LPD may have different array shape.\n", + " This function unify array shape.\n", + " \"\"\"\n", + " if dclass in ['AGIPD', 'LPD']:\n", + " shape = list(a.shape[:2])+[nMem]\n", + " b = np.full(shape, np.nan)\n", + " b[:, :, :a.shape[2]] = a[:, :, :]\n", + " return b\n", + " else:\n", + " return a\n", + "\n", "def get_rebined(a, rebin):\n", - " if dclass==\"jungfrau\":\n", + " \"\"\"\n", + " Group of pixels are formed here for better visialization\n", + " \"\"\"\n", + " if dclass in ['AGIPD', 'LPD', 'jungfrau']:\n", " return a.reshape(\n", " int(a.shape[0] / rebin[0]),\n", " rebin[0],\n", @@ -305,8 +347,59 @@ " \n", "\n", "def modify_const(const, data, isBP = False):\n", + " \"\"\"\n", + " Shape of an array for some constants changes over time.\n", + " Modification is needed to unify shape of array and\n", + " make possible to show constants on the same plot.\n", + " \"\"\"\n", " if dclass==\"jungfrau\" and data.shape[1] == 512:\n", " data = data.swapaxes(0, 1)\n", + " return data\n", + " \n", + " if dclass==\"AGIPD\":\n", + " const = const.split('_')[0]\n", + " if const in ['SlopesFF']:\n", + " if (len(data.shape) == 4):\n", + " data = data[:, :, :, 0][..., None]\n", + " else:\n", + " data = data[..., None]\n", + "\n", + " if data.shape[2]<3:\n", + " data = data[:,:,0,None]\n", + "\n", + " if not isBP:\n", + " if data.shape[0] != 128:\n", + " data = data.swapaxes(0, 2).swapaxes(1, 3).swapaxes(2, 3)\n", + "\n", + " # Copy slope medium to be saved later\n", + " if const in ['SlopesPC']:\n", + " data[:, :, :, 1] = data[:, :, :, 3]\n", + " else:\n", + " if const in ['SlopesPC']:\n", + " if len(data.shape) == 3:\n", + " data = data[:, :, :, None].repeat(10, axis=3)\n", + "\n", + " if data.shape[0] != 128:\n", + " data = data.swapaxes(0, 1).swapaxes(1, 2)\n", + "\n", + " if len(data.shape) < 4:\n", + " print(data.shape, \"Unexpected shape!\")\n", + " return data\n", + " \n", + " if dclass==\"LPD\":\n", + " const = const.split('_')[0]\n", + " if const in ['SlopesFF']:\n", + " data = data[..., None, None]\n", + "\n", + " if(len(data.shape)==5):\n", + " data = data[:,:,:,:,0]\n", + "\n", + " if len(data.shape) < 4:\n", + " print(data.shape, \"Unexpected shape!\")\n", + "\n", + " if data.shape[0] != 256:\n", + " data = data.swapaxes(0, 2).swapaxes(1,3).swapaxes(2,3) \n", + " \n", " return data\n", "\n", "ret_constants = {}\n", @@ -323,12 +416,18 @@ " # fix naming for Jungfrau039\n", " if qm == 'Jungfrau1':\n", " qm = 'JungfrauM039'\n", + " # use submodule name for big detectors\n", + " if dclass in ['AGIPD', 'LPD']:\n", + " qm = submodules[0]\n", " \n", + " # Add insets for parameters\n", " for key in separate_plot:\n", - " const = '{}_{}{}'.format(const, key[0], constant_parameters[i][key])\n", - " # Constant for jungfrau already contains gain stages\n", - " if dclass == \"jungfrau\":\n", - " const += '_g0'\n", + " # Several constant already contains gain stages\n", + " if key == 'gain_setting' and dclass in ['AGIPD', 'LPD', 'jungfrau']:\n", + " val = 0\n", + " else:\n", + " val = constant_parameters[i][key]\n", + " const = '{}_{}{}'.format(const, key[0], val)\n", " \n", " if not const in ret_constants:\n", " ret_constants[const] = {}\n", @@ -351,7 +450,7 @@ " \n", " if cdataBP.shape != cdata.shape:\n", " print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))\n", - " continue\n", + " cdataBP = np.full_like(cdata, IMType.NO_BPMAP.value)\n", " \n", " # Apply bad pixel mask\n", " cdataABP = np.copy(cdata)\n", @@ -379,10 +478,18 @@ " # Convert parameters to dict\n", " dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}\n", " \n", + " # Several constants have dimensions running over gain.\n", + " # All gain stages are stored as separate arrays.\n", " if len(toStore.shape)==4:\n", " for i in range(3):\n", - " if i>0:\n", + " if i>0 and 'gain_setting' in separate_plot:\n", " const = const.replace('_g{}'.format(i-1), '_g{}'.format(i))\n", + " # FF has only high gain\n", + " if 'SlopesFF' in const and i>0:\n", + " continue\n", + " # PC only high and medium.\n", + " if 'SlopesPC' in const and i>1:\n", + " continue\n", " \n", " if not const in ret_constants:\n", " ret_constants[const] = {}\n", @@ -390,11 +497,11 @@ " ret_constants[const][qm] = []\n", " print(\"Store values in dict\", const, qm, ctime)\n", " ret_constants[const][qm].append({'ctime': ctime,\n", - " 'nBP': cdataNBP[:,:,0,i],\n", - " 'dataBP': toStoreBP[:,:,0,i],\n", - " 'dataBPStd': toStoreBPStd[:,:,0,i],\n", - " 'data': toStore[:,:,0,i],\n", - " 'dataStd': toStoreStd[:,:,0,i],\n", + " 'nBP': prepare_to_store(cdataNBP[:,:,:,i], nMem),\n", + " 'dataBP': prepare_to_store(toStoreBP[:,:,:,i], nMem),\n", + " 'dataBPStd': prepare_to_store(toStoreBPStd[:,:,:,i], nMem),\n", + " 'data': prepare_to_store(toStore[:,:,:,i], nMem),\n", + " 'dataStd': prepare_to_store(toStoreStd[:,:,:,i], nMem),\n", " 'mdata': dpar}) \n", " \n", " \n", @@ -420,9 +527,9 @@ "outputs": [], "source": [ "if use_existing == \"\":\n", - " print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, db_module))\n", + " print('Save data to {}/CalDBAna_{}_{}_{}.h5'.format(out_folder, dclass, db_module, submodules[0]))\n", " save_dict_to_hdf5(ret_constants,\n", - " '{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))" + " '{}/CalDBAna_{}_{}_{}.h5'.format(out_folder, dclass, db_module, submodules[0]))" ] }, { @@ -432,14 +539,109 @@ "outputs": [], "source": [ "if use_existing == \"\":\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)\n", + " fpath = '{}/CalDBAna_{}_{}_{}.h5'.format(out_folder, dclass, db_module, submodules[0])\n", "else:\n", - " fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)\n", + " fpath = '{}/CalDBAna_{}_{}_{}.h5'.format(use_existing, dclass, db_module, submodules[0])\n", "\n", "print('Load data from {}'.format(fpath))\n", "ret_constants = load_data_from_hdf5(fpath)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# For AGIPD and LPD\n", + "# Combine FF and PC data to calculate Gain\n", + "# Estimate Noise in units of electrons\n", + "\n", + "ret_constants[\"Gain_g0\"] = {}\n", + "ret_constants[\"Noise-e_g0\"] = {}\n", + "\n", + "for mod in list(range(16)):\n", + " # The check is perform inside the for loop\n", + " # in order to use break\n", + " # This make code more readable\n", + " if (\"SlopesFF_g0\" not in ret_constants or\n", + " \"SlopesPC_g0\" not in ret_constants):\n", + " break\n", + "\n", + " qm = \"Q{}M{}\".format(mod // 4 + 1, mod % 4 + 1)\n", + " if (qm not in ret_constants[\"SlopesFF_g0\"] or\n", + " qm not in ret_constants[\"SlopesPC_g0\"]):\n", + " continue\n", + "\n", + " print(qm)\n", + " ret_constants[\"Gain_g0\"][qm] = {}\n", + "\n", + " dataFF = ret_constants[\"SlopesFF_g0\"][qm]\n", + " dataPC = ret_constants[\"SlopesPC_g0\"][qm]\n", + "\n", + " if (len(dataFF) == 0 or len(dataPC) == 0):\n", + " continue\n", + "\n", + " ctimesFF = np.array(dataFF[\"ctime\"])\n", + " ctimesPC = np.array(dataPC[\"ctime\"])\n", + "\n", + " ctime, icomb = combine_constants(ctimesFF, ctimesPC)\n", + "\n", + " cdataPC_vs_time = np.array(dataPC[\"data\"])[...]\n", + " cdataFF_vs_time = np.array(dataFF[\"data\"])[...]\n", + "\n", + " cdataFF_vs_time = np.nanmedian(cdataFF_vs_time, axis=3)[..., None]\n", + "\n", + " cdataFF_vs_time /= np.nanmedian(cdataFF_vs_time, axis=(1, 2, 3))[:, None,\n", + " None, None]\n", + " cdataPC_vs_time /= np.nanmedian(cdataPC_vs_time, axis=(1, 2, 3))[:, None,\n", + " None, None]\n", + "\n", + " gain_vs_time = []\n", + " for iFF, iPC in icomb:\n", + " gain_vs_time.append(cdataFF_vs_time[iFF] * cdataPC_vs_time[iPC])\n", + "\n", + " print('Shape of gain array: ', np.array(gain_vs_time).shape)\n", + " \n", + " ctime_ts = [t.timestamp() for t in ctime]\n", + " \n", + " ret_constants[\"Gain_g0\"][qm][\"ctime\"] = ctime\n", + " ret_constants[\"Gain_g0\"][qm][\"data\"] = np.array(gain_vs_time)\n", + "\n", + " if \"Noise_g0\" not in ret_constants:\n", + " continue\n", + "\n", + " if qm not in ret_constants[\"Noise_g0\"]:\n", + " continue\n", + "\n", + " dataN = ret_constants[\"Noise_g0\"][qm]\n", + " if len(dataN) == 0:\n", + " continue\n", + "\n", + " ret_constants[\"Noise-e_g0\"][qm] = {}\n", + " \n", + " ctimesG = np.array(ctime)\n", + " ctimesN = np.array(dataN[\"ctime\"])\n", + "\n", + " ctime, icomb = combine_constants(ctimesG, ctimesN)\n", + "\n", + " cdataG_vs_time = np.array(gain_vs_time)\n", + " cdataN_vs_time = np.array(dataN[\"data\"])[...]\n", + "\n", + " data_vs_time = []\n", + " for iG, iN in icomb:\n", + " data_vs_time.append(\n", + " cdataN_vs_time[iN] * adu_to_photon / cdataG_vs_time[iG])\n", + "\n", + " print('Shape of gain array: ',np.array(gain_vs_time).shape)\n", + " ctime_ts = [t.timestamp() for t in ctime]\n", + " ret_constants[\"Noise-e_g0\"][qm][\"ctime\"] = ctime\n", + " ret_constants[\"Noise-e_g0\"][qm][\"data\"] = np.array(data_vs_time)\n", + " \n", + "save_dict_to_hdf5({k:v for k,v in ret_constants.items() if k in ['Gain_g0', 'Noise-e_g0']},\n", + " '{}/CalDBAna_{}_Gain.h5'.format(out_folder, dclass))" + ] + }, { "cell_type": "code", "execution_count": null, @@ -452,7 +654,7 @@ " 'Mean': ['data', '', 'Mean over pixels'],\n", " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Number of BP'],\n", + " 'NBP': ['nBP', '', 'Fraction of BP'],\n", " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", "}\n" ] @@ -470,134 +672,171 @@ "# loop over constat type\n", "for const, modules in ret_constants.items():\n", " \n", + " # split key to constant name and list of insets\n", " const = const.split(\"_\")\n", - " if True:\n", - "\n", - " print('Const: {}'.format(const))\n", + " gain = [int(x[1]) for x in const if 'g' in x]\n", + " gain = gain[0] if len(gain)>0 else None\n", + " \n", + " print('Const: {}'.format(const))\n", "\n", - " # summary over modules\n", - " mod_data = {}\n", - " mod_names = []\n", - " mod_times = []\n", + " # summary over modules\n", + " mod_data = {}\n", + " mod_names = []\n", + " mod_times = []\n", " \n", - " # Loop over modules\n", - " for mod, data in modules.items():\n", - " print('Module: {}'.format(mod))\n", - "\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", - "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " for entr in x_labels:\n", - " ctimes_ticks[i] += ', {}={}'.format(entr[0].upper(), \n", - " cmdata[i].get(entr, None))\n", - "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", - "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", - "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nPixels\n", - " \n", - " # Avoid to low values\n", - " if const[0] in [\"Noise10Hz\", \"Offset10Hz\"]:\n", - " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", - " if 'MeanBP' in rdata:\n", - " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", - " if 'NBP' in rdata:\n", - " rdata['NBP'] = rdata['NBP'].astype(float)\n", - " rdata['NBP'][rdata['NBP'] == spShape[0]*spShape[1]] = np.nan\n", - "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", + " # Loop over modules\n", + " for mod, data in modules.items():\n", + " print('Module: {}'.format(mod))\n", + "\n", + " ctimes = np.array(data[\"ctime\"])\n", + " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", + "\n", + " if (\"mdata\" in data):\n", + " cmdata = np.array(data[\"mdata\"])\n", + " for i, tick in enumerate(ctimes_ticks):\n", + " for entr in x_labels:\n", + " key = entr[0].upper()\n", + " val = cmdata[i].get(entr, None)\n", + " if val is not None:\n", + " ctimes_ticks[i] += ', {}={:.1f}'.format(key, val)\n", + " \n", + " sort_ind = np.argsort(ctimes_ticks)\n", + " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", + "\n", + " # Create sorted by data dataset\n", + " rdata = {}\n", + " for key, item in keys.items():\n", + " if item[0] in data:\n", + " rdata[key] = np.array(data[item[0]])[sort_ind]\n", + "\n", + " nTimes = rdata['Mean'].shape[0]\n", + " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", + " nBins = nMemToShow * nPixels\n", + "\n", + " # Avoid too low values\n", + " if const[0] in [\"Noise10Hz\", \"Offset10Hz\"]:\n", + " rdata['Mean'][rdata['Mean'] < 0.1] = np.nan\n", + " if 'MeanBP' in rdata:\n", + " rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan\n", + " if 'NBP' in rdata:\n", + " rdata['NBP'] = rdata['NBP'].astype(float)\n", + " rdata['NBP'][rdata['NBP'] == spShape[0]*spShape[1]] = np.nan\n", + " rdata[\"NBP\"] = rdata[\"NBP\"] / (spShape[0] * spShape[1]) * 100\n", + "\n", + " # Reshape: ASICs over cells for plotting\n", + " pdata = {}\n", + " for key in rdata:\n", + " if len(rdata[key].shape)<3:\n", + " continue\n", + " if dclass in ['AGIPD', 'LPD']:\n", + " pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(\n", + " nTimes, nBins).swapaxes(0, 1)\n", + " else:\n", " pdata[key] = rdata[key].reshape(nTimes, nBins).swapaxes(0, 1)\n", "\n", - " # Summary over ASICs\n", - " adata = {}\n", - " for key in rdata:\n", - " if len(rdata[key].shape)<3:\n", - " continue\n", - " adata[key] = np.nansum(rdata[key], axis=(1, 2))\n", - "\n", - " # Summary information over modules\n", - " for key in pdata:\n", - " if key not in mod_data:\n", - " mod_data[key] = []\n", - " if key == 'NBP':\n", - " mod_data[key].append(np.nansum(pdata[key], axis=0))\n", - " else:\n", - " mod_data[key].append(np.nanmean(pdata[key], axis=0))\n", - "\n", - " mod_names.append(mod)\n", - " mod_times.append(ctimes[sort_ind])\n", - " \n", - " # Plotting\n", - " for key in pdata:\n", - " \n", - " if len(pdata[key].shape)<2:\n", - " continue\n", - " \n", - " vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - "\n", - " title = '{}, module {}, {}'.format(\n", - " const[0], mod, keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)\n", - "\n", - " fname = '{}/{}_{}'.format(out_folder, const[0], mod.replace('_', ''))\n", - " for item in const[1:]:\n", - " fname = '{}_{}'.format(fname, item)\n", - " fname = '{}_ASIC_{}.png'.format(fname, key)\n", - " \n", - " hm_combine(pdata[key][::-1].astype(float), htype=HMType.mro,\n", - " x_label='Creation Time', y_label='ASIC ID',\n", - " x_ticklabels=ctimes_ticks,\n", - " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " vmin=vmin, vmax=vmax,\n", - " fname=fname,\n", - " pad=[0.125, 0.125, 0.12, 0.185])\n", + " # Summary over ASICs\n", + " adata = {}\n", + " for key in rdata:\n", + " if len(rdata[key].shape)<3 or nMemToShow==1:\n", + " continue\n", + " adata[key] = np.nanmean(rdata[key], axis=(1, 2)).swapaxes(0, 1)\n", + "\n", + " # Summary information over modules\n", + " for key in pdata:\n", + " if key not in mod_data:\n", + " mod_data[key] = []\n", + " if key == 'NBP':\n", + " mod_data[key].append(np.nansum(pdata[key], axis=0))\n", + " else:\n", + " mod_data[key].append(np.nanmean(pdata[key], axis=0))\n", "\n", - " \n", - " # Summary over modules\n", - " for key in mod_data:\n", - " \n", + " mod_names.append(mod)\n", + " mod_times.append(ctimes[sort_ind])\n", + "\n", + " # Plotting\n", + " for key in pdata:\n", + "\n", + " if len(pdata[key].shape)<2:\n", + " continue\n", + "\n", + " vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)\n", " if key == 'NBP':\n", - " unit = ''\n", + " unit = '[%]'\n", + " title = 'BadPixelsDark'\n", " else:\n", " unit = '[ADU]'\n", + " title = const[0]\n", "\n", - " title = '{}, All modules, {}'.format(\n", - " const[0], keys[key][1])\n", + " title += ', module {}'.format(mod)\n", + " if keys[key][1] != '':\n", + " title += ', {}'.format(keys[key][1])\n", + " if gain is not None:\n", + " title += ', {}'.format(gain_titles[gain])\n", " \n", - " fname = '{}/{}_{}'.format(out_folder, const[0], 'all')\n", + " cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)\n", + "\n", + " fname = '{}/{}_{}'.format(out_folder, const[0], mod.replace('_', ''))\n", " for item in const[1:]:\n", " fname = '{}_{}'.format(fname, item)\n", " fname = '{}_ASIC_{}.png'.format(fname, key)\n", - " \n", - " fig = plt.figure(figsize=(12,12) )\n", - " for i in range(len(mod_data[key])):\n", - " plt.scatter(mod_times[i], mod_data[key][i], label=mod_names[i])\n", - " plt.grid()\n", - " plt.xlabel('Creation Time')\n", - " plt.ylabel('{}, {} {}'.format(const[0], keys[key][2], unit)) \n", - " plt.legend(loc='best guess')\n", - " plt.title(title)\n", - " fig.savefig(fname)\n" + "\n", + " if nMemToShow>1:\n", + " htype=HMType.INSET_AXIS\n", + " else: \n", + " htype=HMType.mro\n", + "\n", + " hm_combine(pdata[key][::-1].astype(float), htype=htype,\n", + " x_label='Creation Time', y_label=sp_name,\n", + " x_ticklabels=ctimes_ticks,\n", + " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", + " title=title, cb_label=cb_label,\n", + " vmin=vmin, vmax=vmax,\n", + " fname=fname,\n", + " pad=[0.125, 0.125, 0.12, 0.185])\n", + "\n", + " if nMemToShow>1:\n", + " vmin,vmax = get_range(adata[key][::-1].flatten(), plot_range)\n", + " hm_combine(adata[key].astype(float), htype=HMType.mro,\n", + " x_label='Creation Time', y_label='Memory cell ID',\n", + " x_ticklabels=ctimes_ticks,\n", + " x_ticks=np.arange(len(ctimes_ticks))+0.3,\n", + " title=title, cb_label=cb_label,\n", + " fname=fname.replace('ASIC', 'MEM'),\n", + " vmin=vmin, vmax=vmax)\n", + "\n", + " plt.show()\n", + " # Summary over modules\n", + " for key in mod_data:\n", + " if dclass in ['AGIPD', 'LPD']:\n", + " continue\n", + "\n", + " if key == 'NBP':\n", + " unit = '[%]'\n", + " title = 'BadPixelsDark'\n", + " else:\n", + " unit = '[ADU]'\n", + " title = const[0]\n", + "\n", + " title += ', module {}'.format(mod)\n", + " if keys[key][1] != '':\n", + " title += ', {}'.format(keys[key][1])\n", + " if gain is not None:\n", + " title += ', {}'.format(gain_titles[gain])\n", + "\n", + " fname = '{}/{}_{}'.format(out_folder, const[0], 'all')\n", + " for item in const[1:]:\n", + " fname = '{}_{}'.format(fname, item)\n", + " fname = '{}_ASIC_{}.png'.format(fname, key)\n", + "\n", + " fig = plt.figure(figsize=(12,12) )\n", + " for i in range(len(mod_data[key])):\n", + " plt.scatter(mod_times[i], mod_data[key][i], label=mod_names[i])\n", + " plt.grid()\n", + " plt.xlabel('Creation Time')\n", + " plt.ylabel('{}, {} {}'.format(const[0], keys[key][2], unit)) \n", + " plt.legend(loc='best guess')\n", + " plt.title(title)\n", + " fig.savefig(fname)\n" ] } ], diff --git a/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb b/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb index a071734e74d3b3b25ec13daaf605b21889ce312d..8d6d6e884b1bfe342e963782e9a51f30feaabe7a 100644 --- a/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb +++ b/notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb @@ -6,9 +6,13 @@ "source": [ "# Statistical analysis of calibration factors#\n", "\n", - "Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.2\n", + "Author: Mikhail Karnevskiy, Version 0.2\n", "\n", - "Plot calibration constants for AGIPD1M1 detector aggregated within detector modules. Input information is taken from folder `use_existing`. Corresponding files are prepared by `PlotFromCalDB` notebook." + "Plot calibration constants retrieved from the cal. DB.\n", + "\n", + "To be visualized, calibration constants are averaged per group of pixels. Plots shows calibration constant over time for each constant.\n", + "\n", + "Values shown in plots are saved in h5 files." ] }, { @@ -18,8 +22,8 @@ "outputs": [], "source": [ "cluster_profile = \"noDB\" # The ipcluster profile to use\n", - "out_folder = \"/gpfs/exfel/data/scratch/karnem/PlotCalDB/MID/AGIPD1M2/\" # Output folder, required\n", - "use_existing = \"/gpfs/exfel/data/scratch/karnem/PlotCalDB/MID/AGIPD1M2/\" # Input folder\n", + "out_folder = \"/gpfs/exfel/data/scratch/karnem/test_bla4/\" # Output folder, required\n", + "use_existing = \"/home/karnem/myscratch/PlotCalDB/SPB/AGIPD1M1/\" # Input folder\n", "dclass = \"AGIPD\" # Detector class\n", "nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs\n", "range_offset = [4000., 5500, 6500, 8500] # plotting range for offset: high gain l, r, medium gain l, r \n", @@ -30,7 +34,9 @@ "range_slopesCI = [22.0, 27.0, -0.5, 1.5] # plotting range for slope CI: high gain l, r, medium gain l, r \n", "range_slopesFF = [0.8, 1.2, 0.6, 1.2] # plotting range for slope FF: high gain l, r, medium gain l, r \n", "plot_range = 3 # range for plotting in units of median absolute deviations\n", - "x_labels = ['Sensor Bias Voltage', 'Memory cells'] # parameters to be shown on X axis" + "x_labels = ['Sensor Bias Voltage', 'Memory cells'] # parameters to be shown on X axis\n", + "spShape = [64, 64] # Shape of superpixel\n", + "gain_titles = ['High gain', 'Medium gain', 'Low gain'] # Title inset related to gain" ] }, { @@ -68,7 +74,7 @@ }, "outputs": [], "source": [ - "print('Load data from {}/CalDBAna_{}_Q1M2.h5'.format(use_existing, dclass))\n", + "print('Load data from {}/CalDBAna_{}_*.h5'.format(use_existing, dclass))\n", "ret_constants = load_data_from_hdf5(\n", " '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass))\n", "\n", @@ -100,20 +106,18 @@ " 'Mean': ['data', '', 'Mean over pixels'],\n", " 'std': ['dataStd', '', '$\\sigma$ over pixels'],\n", " 'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],\n", - " 'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],\n", + " 'NBP': ['nBP', '', 'Fraction of BP'],\n", " 'stdBP': ['dataBPStd', 'Good pixels only', '$\\sigma$ over pixels'],\n", " 'stdASIC': ['', '', '$\\sigma$ over ASICs'],\n", " 'stdCell': ['', '', '$\\sigma$ over Cells'],\n", - "}\n", - "\n", - "gain_name = ['High', 'Medium', 'Low']" + "}\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "scrolled": false + "scrolled": true }, "outputs": [], "source": [ @@ -122,134 +126,127 @@ "# loop over constat type\n", "for const, modules in ret_constants.items():\n", "\n", - " # Loop over gain\n", - " for gain in range(2):\n", - " print('Const: {}, gain : {}'.format(const, gain))\n", - "\n", - " if const in [\"Gain\", \"Noise-e\"] and gain == 1:\n", - " continue\n", - "\n", - " # loop over modules\n", - " mod_data = {}\n", - " mod_data['stdASIC'] = []\n", - " mod_data['stdCell'] = []\n", - " mod_names = []\n", - " mod_times = []\n", + " const = const.split(\"_\")\n", + " gain = [int(x[1]) for x in const if 'g' in x]\n", + " gain = gain[0] if len(gain)>0 else None\n", + " print('Const: {}, gain {}'.format(const, gain))\n", "\n", - " # Loop over modules\n", - " for mod, data in modules.items():\n", - " ctimes = np.array(data[\"ctime\"])\n", - " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", + " # loop over modules\n", + " mod_data = {}\n", + " mod_data['stdASIC'] = []\n", + " mod_data['stdCell'] = []\n", + " mod_names = []\n", + " mod_times = []\n", "\n", - " if (\"mdata\" in data):\n", - " cmdata = np.array(data[\"mdata\"])\n", - " for i, tick in enumerate(ctimes_ticks):\n", - " for entr in x_labels:\n", - " ctimes_ticks[i] += ', {}={}'.format(entr[0].upper(), \n", - " cmdata[i].get(entr, None))\n", + " # Loop over modules\n", + " for mod, data in modules.items():\n", + " ctimes = np.array(data[\"ctime\"])\n", + " ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]\n", "\n", - " sort_ind = np.argsort(ctimes_ticks)\n", - " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", + " if (\"mdata\" in data):\n", + " cmdata = np.array(data[\"mdata\"])\n", + " for i, tick in enumerate(ctimes_ticks):\n", + " for entr in x_labels:\n", + " key = entr[0].upper()\n", + " val = cmdata[i].get(entr, None)\n", + " if val is not None:\n", + " ctimes_ticks[i] += ', {}={:.1f}'.format(key, val)\n", "\n", - " # Create sorted by data dataset\n", - " rdata = {}\n", - " for key, item in keys.items():\n", - " if item[0] in data:\n", - " rdata[key] = np.array(data[item[0]])[sort_ind]\n", + " sort_ind = np.argsort(ctimes_ticks)\n", + " ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])\n", "\n", - " nTimes = rdata['Mean'].shape[0]\n", - " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", - " nBins = nMemToShow * nPixels\n", + " # Create sorted by data dataset\n", + " rdata = {}\n", + " for key, item in keys.items():\n", + " if item[0] in data:\n", + " rdata[key] = np.array(data[item[0]])[sort_ind]\n", "\n", - " # Select gain\n", - " if const not in [\"Gain\", \"Noise-e\"]:\n", - " for key in rdata:\n", - " rdata[key] = rdata[key][..., gain]\n", + " nTimes = rdata['Mean'].shape[0]\n", + " nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]\n", + " nBins = nMemToShow * nPixels\n", "\n", - " if 'NBP' in rdata:\n", - " rdata[\"NBP\"] = rdata[\"NBP\"] / (64 * 64) * 100\n", + " if 'NBP' in rdata:\n", + " rdata[\"NBP\"] = rdata[\"NBP\"] / (spShape[0] * spShape[1]) * 100\n", "\n", - " # Reshape: ASICs over cells for plotting\n", - " pdata = {}\n", - " for key in rdata:\n", - " pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(\n", - " nTimes, nBins).swapaxes(0, 1)\n", + " # Reshape: ASICs over cells for plotting\n", + " pdata = {}\n", + " for key in rdata:\n", + " pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(\n", + " nTimes, nBins).swapaxes(0, 1)\n", "\n", - " # Summary information over modules\n", - " for key in pdata:\n", - " if key not in mod_data:\n", - " mod_data[key] = []\n", - " \n", - " mod_data[key].append(np.nanmean(pdata[key], axis=0))\n", - " # Avoid too low values\n", - " if const in [\"Noise\", \"Offset\", \"Noise-e\"] and key in ['Mean', 'MeanBP']:\n", - " mod_data[key][-1][mod_data[key][-1] == 0.0] = IMType.STRANGE_VAL.value\n", - " if key=='NBP':\n", - " if 'Mean' in mod_data:\n", - " mod_data['Mean'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n", - " if 'MeanBP' in mod_data:\n", - " mod_data['MeanBP'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n", - " mod_data[key][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n", - " \n", + " # Summary information over modules\n", + " for key in pdata:\n", + " if key not in mod_data:\n", + " mod_data[key] = []\n", "\n", - " mod_data['stdASIC'].append(np.nanstd(\n", - " np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=(1, 2)), axis=1))\n", - " mod_data['stdCell'].append(np.nanstd(\n", - " np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=3), axis=(1, 2)))\n", + " mod_data[key].append(np.nanmean(pdata[key], axis=0))\n", + " # Avoid too low values\n", + " if const[0] in [\"Noise\", \"Offset\", \"Noise-e\"] and key in ['Mean', 'MeanBP']:\n", + " mod_data[key][-1][mod_data[key][-1] == 0.0] = IMType.STRANGE_VAL.value\n", + " if key=='NBP':\n", + " if 'Mean' in mod_data:\n", + " mod_data['Mean'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n", + " if 'MeanBP' in mod_data:\n", + " mod_data['MeanBP'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n", + " mod_data[key][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value\n", "\n", - " mod_names.append(mod)\n", - " mod_times.append(ctimes_ticks)\n", "\n", - " # Incert nans to get array-like list of data\n", - " uTime = mod_times[0]\n", - " for tlist in mod_times:\n", - " uTime = sorted(multi_union(uTime, tlist))\n", + " mod_data['stdASIC'].append(np.nanstd(\n", + " np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=(1, 2)), axis=1))\n", + " mod_data['stdCell'].append(np.nanstd(\n", + " np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=3), axis=(1, 2)))\n", "\n", - " for i, tlist in enumerate(mod_times):\n", - " for t, time in enumerate(uTime):\n", - " if t == len(tlist) or time != tlist[t]:\n", - " tlist.insert(t, time)\n", - " for key in mod_data:\n", - " mod_data[key][i] = np.insert(\n", - " mod_data[key][i], t, IMType.NO_CONST.value )\n", + " mod_names.append(mod)\n", + " mod_times.append(ctimes_ticks)\n", "\n", - " # Plotting\n", - " nModules = len(mod_names)\n", - " mod_idx = np.argsort(mod_names)\n", - " for key in mod_data:\n", - " vmin,vmax = get_range(np.array(mod_data[key])[mod_idx][::-1].flatten(), plot_range)\n", - " if const in rangevals and key in ['Mean', 'MeanBP']:\n", - " vmin = rangevals[const][gain][0]\n", - " vmax = rangevals[const][gain][1]\n", + " # Insert nans to get array-like list of data\n", + " uTime = mod_times[0]\n", + " for tlist in mod_times:\n", + " uTime = sorted(multi_union(uTime, tlist))\n", "\n", - " htype = None\n", - " if const in ['SlopesFF', 'SlopesPC', 'SlopesCI']:\n", - " htype = HMType.INSET_1D\n", + " for i, tlist in enumerate(mod_times):\n", + " for t, time in enumerate(uTime):\n", + " if t == len(tlist) or time != tlist[t]:\n", + " tlist.insert(t, time)\n", + " for key in mod_data:\n", + " mod_data[key][i] = np.insert(\n", + " mod_data[key][i], t, IMType.NO_CONST.value )\n", "\n", - " if key == 'NBP':\n", - " unit = '[%]'\n", - " else:\n", - " unit = '[ADU]'\n", - " if const == 'Noise-e':\n", - " unit = '[$e^-$]'\n", + " # Plotting\n", + " nModules = len(mod_names)\n", + " mod_idx = np.argsort(mod_names)\n", + " for key in mod_data:\n", + " vmin,vmax = get_range(np.array(mod_data[key])[mod_idx][::-1].flatten(), plot_range)\n", "\n", - " title = '{}, All modules, {} gain, {}'.format(\n", - " const, gain_name[gain], keys[key][1])\n", - " cb_label = '{}, {} {}'.format(const, keys[key][2], unit)\n", + " htype = None\n", + " if const[0] in ['SlopesFF', 'SlopesPC', 'SlopesCI']:\n", + " htype = HMType.INSET_1D\n", + " \n", + " if key == 'NBP':\n", + " unit = '[%]'\n", + " title = 'BadPixelsDark'\n", + " else:\n", + " unit = '[ADU]'\n", + " title = const[0]\n", "\n", - " hm_combine(np.array(mod_data[key])[mod_idx][::-1],\n", - " y_ticks=np.arange(nModules)[::-1]+0.8,\n", - " y_ticklabels=np.array(mod_names)[mod_idx],\n", - " x_label='Creation Time', y_label='Module ID',\n", - " x_ticklabels=ctimes_ticks, x_ticks=np.arange(\n", - " len(ctimes_ticks))+0.3,\n", - " title=title, cb_label=cb_label,\n", - " fname='{}/{}_all_g{}_{}.png'.format(\n", - " out_folder, const, gain, key),\n", - " vmin=vmin, vmax=vmax,\n", - " pad=[0.125, 0.151, 0.12, 0.17], htype=htype)\n", - " #break\n", - " #break" + " title += ', All modules'\n", + " if keys[key][1] != '':\n", + " title += ', {}'.format(keys[key][1])\n", + " if gain is not None:\n", + " title += ', {}'.format(gain_titles[gain])\n", + " \n", + " cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)\n", + " hm_combine(np.array(mod_data[key])[mod_idx][::-1],\n", + " y_ticks=np.arange(nModules)[::-1]+0.8,\n", + " y_ticklabels=np.array(mod_names)[mod_idx],\n", + " x_label='Creation Time', y_label='Module ID',\n", + " x_ticklabels=ctimes_ticks, x_ticks=np.arange(\n", + " len(ctimes_ticks))+0.3,\n", + " title=title, cb_label=cb_label,\n", + " fname='{}/{}_all_g{}_{}.png'.format(\n", + " out_folder, const[0], gain, key),\n", + " vmin=vmin, vmax=vmax,\n", + " pad=[0.125, 0.151, 0.12, 0.17], htype=htype)\n" ] } ], diff --git a/reportservice/report_conf.yaml b/reportservice/report_conf.yaml index 2fc336768a8131196c7da0fc17d2060314d05600..b8fbc0c3e49415474dcb3e441976f082b8516ae6 100644 --- a/reportservice/report_conf.yaml +++ b/reportservice/report_conf.yaml @@ -18,22 +18,110 @@ GLOBAL: SPB: AGIPD1M1: det-type: - - "AGIPD" - - "STATS_FROM_DB" + - "GENERIC" + - "STATS_FROM_DB2" + modules: + - "AGIPD1M1" start-date: "2019-01-01" - end-date: "2019-12-12" + end-date: "NOW" + nconstants: 20 constants: - "Noise" - "SlopesFF" - "SlopesPC" - "Offset" - modules: "0-16" - bias-voltages: + dclass: "AGIPD" + submodules: "0-16" + bias-voltage: - 300 - 500 - mem-cells: + memory-cells: - 128 - 176 + - 250 + acquisition-rate: + - 1.1 + - 2.2 + - 4.5 + photon-energy: 9.2 + separate-plot: + - "gain_setting" + parameter-names: + - "bias_voltage" + - "acquisition_rate" + - "memory_cells" + spShape: + - 64 + - 64 + gain-titles: + - "High gain" + - "Medium gain" + - "Low gain" + x-labels: + - "Acquisition rate" + - "Memory cells" + sp-name: "ASICs id" + nMemToShow: 32 + use-existing: "''" + out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" + cal-db-timeout: 180000 + cal-db-interface: "tcp://max-exfl016:8015#8025" + + JUNGFRAU: + det-type: + - "GENERIC" + - "STATS_FROM_DB" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 + constants: + - "Noise" + - "Offset" + dclass: "jungfrau" + nMemToShow: 1 + modules: + - "Jungfrau_M035" + - "Jungfrau_M203" + - "Jungfrau_M221" + bias-voltage: + - 90 + - 180 + memory-cells: + - 1 + pixels-x: + - 1024 + pixels-y: + - 512 + - 1024 + temperature: + - 291 + integration-time: + - 50 + - 250 + gain-setting: + - 0 + separate-plot: + - "integration_time" + - "gain_setting" + parameter-names: + - "bias_voltage" + - "integration_time" + - "pixels_x" + - "pixels_y" + - "gain_setting" + - "temperature" + - "memory_cells" + spShape: + - 256 + - 64 + gain-titles: + - "High gain" + - "Medium gain" + - "Low gain" + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "Supercolumn 256*64" photon-energy: 9.2 use-existing: "''" out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" @@ -43,65 +131,67 @@ SPB: MID: AGIPD1M2: det-type: - - "AGIPD" - - "STATS_FROM_DB" - db-module: "AGIPD1M2" + - "GENERIC" + - "STATS_FROM_DB2" + modules: + - "AGIPD1M2" start-date: "2019-01-01" - end-date: "2019-12-12" + end-date: "NOW" + nconstants: 20 constants: - "Noise" - "SlopesFF" - "SlopesPC" - "Offset" - modules: "0-16" - bias-voltages: + dclass: "AGIPD" + submodules: "0-16" + bias-voltage: - 300 - 500 - mem-cells: + memory-cells: + - 128 - 176 + - 250 + acquisition-rate: + - 1.1 + - 2.2 + - 4.5 photon-energy: 9.2 + separate-plot: + - "gain_setting" + parameter-names: + - "bias_voltage" + - "acquisition_rate" + - "memory_cells" + spShape: + - 64 + - 64 + gain-titles: + - "High gain" + - "Medium gain" + - "Low gain" + x-labels: + - "Acquisition rate" + - "Memory cells" + sp-name: "ASICs id" + nMemToShow: 32 use-existing: "''" out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" cal-db-timeout: 180000 cal-db-interface: "tcp://max-exfl016:8015#8025" - range-offset: - - 800. - - 1500. - - 600 - - 900 - range-noise: - - 2.0 - - 16. - - 1.0 - - 7.0 - range-gain: - - 20 - - 30 - - 20 - - 30 - range-noise-e: - - 100. - - 600. - - 100. - - 600. - range-slopesPC: - - 0.95 - - 1.05 - - 0.0 - - 0.5 - range-slopesFF: - - 0.8 - - 1.2 - - 0.8 - - 1.2 - - EPIX01: + + EPIX: det-type: - - "EPIX" + - "GENERIC" - "STATS_FROM_DB" - db-module: "ePix100_M15" start-date: "2019-01-01" - end-date: "2019-05-12" + end-date: "NOW" + nconstants: 20 + dclass: "ePix100" + nMemToShow: 1 + modules: + - "ePix100_M15" + - "ePix100_M18" constants: - "Noise" - "Offset" @@ -113,18 +203,322 @@ MID: - 1 - 50 photon-energy: 9.2 + separate-plot: + - "integration_time" + parameter-names: + - "bias_voltage" + - "integration_time" + - "temperature" + - "in_vacuum" + spShape: + - 354 + - 96 + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "ASICs id" + use-existing: "''" + out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" + cal-db-timeout: 180000 + cal-db-interface: "tcp://max-exfl016:8015#8025" + + +FXE: + LPD1M1: + det-type: + - "GENERIC" + - "STATS_FROM_DB2" + modules: + - "LPD1M1" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 + constants: + - "Noise" + - "SlopesFF" + - "SlopesCI" + - "Offset" + dclass: "LPD" + submodules: "0-16" + bias-voltage: + - 250 + - 500 + memory-cells: + - 1 + - 128 + - 512 + photon-energy: 9.2 + separate-plot: + - "gain_setting" + parameter-names: + - "bias_voltage" + - "memory_cells" + spShape: + - 64 + - 64 + gain-titles: + - "High gain" + - "Medium gain" + - "Low gain" + x-labels: + - "Memory cells" + sp-name: "ASICs id" + nMemToShow: 32 + use-existing: "''" + out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" + cal-db-timeout: 180000 + cal-db-interface: "tcp://max-exfl016:8015#8025" + + JUNGFRAU: + det-type: + - "GENERIC" + - "STATS_FROM_DB" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 + constants: + - "Noise" + - "Offset" + dclass: "jungfrau" + modules: + - "Jungfrau_M233" + - "Jungfrau_M125" + - "Jungfrau_M260" + bias-voltage: + - 90 + - 180 + memory-cells: + - 1 + pixels-x: + - 1024 + pixels-y: + - 512 + - 1024 + temperature: + - 291 + integration-time: + - 50 + - 250 + gain-setting: + - 0 + separate-plot: + - "integration_time" + - "gain_setting" + parameter-names: + - "bias_voltage" + - "integration_time" + - "pixels_x" + - "pixels_y" + - "gain_setting" + - "temperature" + - "memory_cells" + spShape: + - 256 + - 64 + gain-titles: + - "High gain" + - "Medium gain" + - "Low gain" + nMemToShow: 1 + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "Supercolumn 256*64" + photon-energy: 9.2 + use-existing: "''" + out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" + cal-db-timeout: 180000 + cal-db-interface: "tcp://max-exfl016:8015#8025" + +DETLAB: + FASTCCD: + det-type: + - "GENERIC" + - "STATS_FROM_DB" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 + constants: + - "Noise" + - "Offset" + dclass: "CCD" + nMemToShow: 1 + modules: + - "fastCCD1" + bias-voltage: + - 79 + temperature: + - 235 + - 216 + - 245 + integration-time: + - 1 + - 50 + gain-setting: + - 0 + - 1 + - 2 + - 8 + pixels-x: + - 1934 + pixels-y: + - 960 + separate-plot: + - "integration_time" + - "gain_setting" + - "temperature" + parameter-names: + - "bias_voltage" + - "integration_time" + - "pixels_x" + - "pixels_y" + - "gain_setting" + - "temperature" + spShape: + - 967 + - 10 + gain-titles: + - "gain 0x" + - "gain 1x" + - "gain 2x" + - "gain 8x" + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "Supercolumn 967*10" + photon-energy: 9.2 + use-existing: "''" + out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" + cal-db-timeout: 180000 + cal-db-interface: "tcp://max-exfl016:8015#8025" + + +SCS: + FASTCCD: + det-type: + - "GENERIC" + - "STATS_FROM_DB" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 + constants: + - "Noise" + - "Offset" + dclass: "CCD" + nMemToShow: 1 + modules: + - "fastCCD1" + bias-voltage: + - 79 + temperature: + - 235 + - 216 + - 245 + integration-time: + - 1 + - 50 + gain-setting: + - 0 + - 1 + - 2 + - 8 + pixels-x: + - 1934 + pixels-y: + - 960 + separate-plot: + - "integration_time" + - "gain_setting" + - "temperature" + parameter-names: + - "bias_voltage" + - "integration_time" + - "pixels_x" + - "pixels_y" + - "gain_setting" + - "temperature" + spShape: + - 967 + - 10 + gain-titles: + - "gain 0x" + - "gain 1x" + - "gain 2x" + - "gain 8x" + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "Supercolumn 967*10" + photon-energy: 9.2 + use-existing: "''" + out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" + cal-db-timeout: 180000 + cal-db-interface: "tcp://max-exfl016:8015#8025" + +SQS: + PNCCD: + det-type: + - "GENERIC" + - "STATS_FROM_DB" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 + constants: + - "Noise" + - "Offset" + dclass: "CCD" + nMemToShow: 1 + modules: + - "PnCCD1" + bias-voltage: + - 300 + temperature: + - 235 + integration-time: + - 1 + - 50 + gain-setting: + - 0 + pixels-x: + - 1024 + pixels-y: + - 1024 + separate-plot: + - "integration_time" + - "temperature" + parameter-names: + - "bias_voltage" + - "integration_time" + - "pixels_x" + - "pixels_y" + - "gain_setting" + - "temperature" + spShape: + - 256 + - 256 + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "ASICs id" + photon-energy: 9.2 use-existing: "''" out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" cal-db-timeout: 180000 cal-db-interface: "tcp://max-exfl016:8015#8025" - EPIX02: +HED: + EPIX: det-type: - - "EPIX" + - "GENERIC" - "STATS_FROM_DB" - db-module: "ePix100_M18" start-date: "2019-01-01" - end-date: "2019-05-12" + end-date: "NOW" + nconstants: 20 + dclass: "ePix100" + nMemToShow: 1 + modules: + - "ePix100_M16" + - "ePix100_M17" constants: - "Noise" - "Offset" @@ -136,35 +530,83 @@ MID: - 1 - 50 photon-energy: 9.2 + separate-plot: + - "integration_time" + parameter-names: + - "bias_voltage" + - "integration_time" + - "temperature" + - "in_vacuum" + spShape: + - 354 + - 96 + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "superpixel id" use-existing: "''" out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" cal-db-timeout: 180000 cal-db-interface: "tcp://max-exfl016:8015#8025" -FXE: - LPD1M1: + JUNGFRAU: det-type: - - "LPD" + - "GENERIC" - "STATS_FROM_DB" - db-module: "LPD1M1" - start-date: "2018-01-01" - end-date: "2018-12-12" + start-date: "2019-01-01" + end-date: "NOW" + nconstants: 20 constants: - "Noise" - - "SlopesFF" - - "SlopesCI" - "Offset" - modules: "0-16" - bias-voltages: - - 250 - - 500 - mem-cells: + dclass: "jungfrau" + nMemToShow: 1 + modules: + - "Jungfrau_M039" + - "Jungfrau_M266" + bias-voltage: + - 90 + - 180 + memory-cells: - 1 - - 128 - - 256 + pixels-x: + - 1024 + pixels-y: - 512 + - 1024 + temperature: + - 291 + integration-time: + - 50 + - 250 + gain-setting: + - 0 + separate-plot: + - "integration_time" + - "gain_setting" + parameter-names: + - "bias_voltage" + - "integration_time" + - "pixels_x" + - "pixels_y" + - "gain_setting" + - "temperature" + - "memory_cells" + spShape: + - 256 + - 64 + gain-titles: + - "High gain" + - "Medium gain" + - "Low gain" + x-labels: + - "Sensor Temperature" + - "Integration Time" + sp-name: "Supercolumn 256*64" photon-energy: 9.2 use-existing: "''" out-folder: "/gpfs/exfel/data/scratch/xcal/report_service/tmp/{instrument}/{detector}/" cal-db-timeout: 180000 cal-db-interface: "tcp://max-exfl016:8015#8025" + + diff --git a/webservice/serve_overview.py b/webservice/serve_overview.py index 333cb40dfb479610acce25f2e641461bb30fa30e..d06fcd0e88ae030727fef10a471f56a5108dc1f2 100644 --- a/webservice/serve_overview.py +++ b/webservice/serve_overview.py @@ -175,12 +175,12 @@ class RequestHandler(BaseHTTPRequestHandler): instrument, det_inset) files = glob.glob(tmpl + '/*pdf') files += glob.glob(tmpl + '/*/*pdf') - files.sort(key=os.path.getmtime) + files.sort(key=os.path.getmtime, reverse=True) file_info = [] for i, file in enumerate(files): if 'xfel.pdf' in file: continue - if (i % 2) == 0: + if (len(file_info) % 2) == 0: bgcolor = 'EEEEEE' else: bgcolor = 'FFFFFF' @@ -271,22 +271,23 @@ class RequestHandler(BaseHTTPRequestHandler): ls = l.split() if key not in ls: return None - detector = ls[ls.index(key) - 1] + dclass = ls[ls.index(key) - 1] + in_folder = ls[ls.index("--in-folder") + 1] + out_folder = ls[ls.index("--out-folder") + 1] + + if "--db-module" in ls: + detector = ls[ls.index("--db-module") + 1] + else: + detector = dclass if "--instrument" in ls: instrument = ls[ls.index("--instrument") + 1] else: - if "--db-module" in ls: - instrument = ls[ls.index("--db-module") + 1] - elif detector == "PNCCD": + if detector == "PNCCD": instrument = "SQS" - elif detector == "FASTCCD": - instrument = "SCS" else: - instrument = "" + instrument = in_folder.split('/')[4] - in_folder = ls[ls.index("--in-folder") + 1] - out_folder = ls[ls.index("--out-folder") + 1] runs = [] for rc in self.run_candidates: if rc in ls: @@ -294,8 +295,8 @@ class RequestHandler(BaseHTTPRequestHandler): requested = "{} {}".format(ls[0], ls[1]) - return [detector, instrument, in_folder, out_folder, runs, - requested] + return [dclass, detector, instrument, in_folder, out_folder, + runs, requested] last_chars = {} last_calib = {} @@ -305,7 +306,7 @@ class RequestHandler(BaseHTTPRequestHandler): info = get_run_info(l, 'DARK') if info is not None: - detector, instrument, in_folder, out_folder, runs, requested = info # noqa + dclass, detector, instrument, in_folder, out_folder, runs, requested = info # noqa if f"{instrument}-{detector}" in last_chars: continue @@ -327,9 +328,9 @@ class RequestHandler(BaseHTTPRequestHandler): tsize = 0 for run in runs: run = int(run) - if detector not in self.mappings: + if dclass not in self.mappings: continue - for mp in self.mappings[detector]: + for mp in self.mappings[dclass]: for f in glob.glob( f"{in_folder}/r{run:04d}/*{mp}*.h5"): tsize += os.stat(f).st_size @@ -347,7 +348,7 @@ class RequestHandler(BaseHTTPRequestHandler): info = get_run_info(l, 'CORRECT') if info is not None: - _, _, in_folder, out_folder, runs, requested = info + _, _, _, in_folder, out_folder, runs, requested = info instrument = in_folder.split('/')[4] if instrument not in last_calib: last_calib[instrument] = [] @@ -365,7 +366,9 @@ class RequestHandler(BaseHTTPRequestHandler): pdfs]) tmpl = self.templates["last-characterizations"] - last_characterizations_r = Template(tmpl).render(char_runs=last_chars) + last_characterizations_r = Template(tmpl).render(char_runs=last_chars, + host = host, + port = port) tmpl = self.templates["last-correction"] last_correction_r = Template(tmpl).render(info=last_calib, host=host, diff --git a/webservice/serve_overview.yaml b/webservice/serve_overview.yaml index 255bb62a5d4c179eff85ef2b857c2b275cc8af70..dea9536613ad3552f1fb75e50b6025e454f5da42 100644 --- a/webservice/serve_overview.yaml +++ b/webservice/serve_overview.yaml @@ -37,6 +37,7 @@ mappings: - EPIX10K FASTCCD: - DA05 + - DA01 run-candidates: - "--run-high" diff --git a/webservice/templates/last_characterizations.html b/webservice/templates/last_characterizations.html index a37ecc37b18a642100597043f8fad509694ff458..1eefe10fe21605426ccc890c57d04c6602caed2f 100644 --- a/webservice/templates/last_characterizations.html +++ b/webservice/templates/last_characterizations.html @@ -17,5 +17,6 @@ </dd> </dl> {% endfor %} - + <br> +<a href="http://{{ host}}:{{ port }}/dark?" target="_blank">Full list of dark characterizations</a> </div> diff --git a/webservice/templates/last_correction.html b/webservice/templates/last_correction.html index 3f92fb257939f986f2d469a0845b6c5a7a48d80b..893266cb2ec311c144f1a8f73f0744609ec842ff 100644 --- a/webservice/templates/last_correction.html +++ b/webservice/templates/last_correction.html @@ -3,7 +3,7 @@ {% for instrument, items in info.items() %} <h3>{{ instrument }}</h3> - <table cellspacing="10"> + <table cellspacing="5"> <tr> <th> Time of request </th> <th> Proposal </th> diff --git a/webservice/update_config.py b/webservice/update_config.py index d547e416d47e32424c772cbb60281b9ae38d1267..b732c06e4d6a40aa40b8fb6ddcfbb86028be3df3 100644 --- a/webservice/update_config.py +++ b/webservice/update_config.py @@ -26,7 +26,6 @@ required_args.add_argument('--instrument', type=str, "DETLAB"], help='The instrument') # noqa required_args.add_argument('--cycle', type=str, help='The facility cycle') parser.add_argument('--apply', action='store_true') - # remove help calls as they will cause the argument parser to exit add_help = False if "-h" in sys.argv: @@ -36,6 +35,17 @@ if "--help" in sys.argv: sys.argv.remove("--help") add_help = True +# Save available_options before adding bool keys with "no-". +prev_available_opt = available_options + +for det, val in available_options.items(): + bool_keys = [] + for k, v in val.items(): + if v == bool: + bool_keys.append(k) + for b in bool_keys: + available_options[det]['no-{}'.format(b)] = bool + known, remaining = parser.parse_known_args() args = vars(known) detector = args["detector"] @@ -67,9 +77,17 @@ new_conf = {task: {instrument: {detector: {}}}} for key, value in args.items(): key = key.replace("_", "-") if key in available_options[detector] and value is not None: + if isinstance(value, list): for v in value: value[value.index(v)] = ''.join(v) + + if 'no-' in key and isinstance(value, bool): + if key not in prev_available_opt[detector].keys(): + new_conf[task][instrument][detector][key.replace('no-','')] = False + # avoid saving the "no-"key into the updated config + continue + new_conf[task][instrument][detector][key] = value pyaml = yaml.dump(new_conf, default_flow_style=False) diff --git a/webservice/webservice.py b/webservice/webservice.py index 0f24fd865e0f6d46c57116fb6229ef86c3c6cde6..9c438a46b41ce9d011d448131557cfa1ef9f893d 100644 --- a/webservice/webservice.py +++ b/webservice/webservice.py @@ -245,9 +245,11 @@ async def parse_config(cmd, config): cmd += ["--{}".format(key)] cmd += [str(v) for v in value] elif isinstance(value, bool): - cmd += ["--{}".format(key)] + if value: + cmd += ["--{}".format(key)] else: cmd += ["--{}".format(key), str(value)] + return cmd diff --git a/xfel_calibrate/calibrate.py b/xfel_calibrate/calibrate.py index 8e73594a2bc088ae604d043c95bff1d3d15bfb09..ec2aa7835113a4451aef7f3245f75d9c862bc39d 100755 --- a/xfel_calibrate/calibrate.py +++ b/xfel_calibrate/calibrate.py @@ -23,6 +23,7 @@ import textwrap from cal_tools.tools import tex_escape + # Add a class combining raw description formatting with # Metavariable default outputs class RawTypeFormatter(argparse.RawDescriptionHelpFormatter, @@ -43,7 +44,7 @@ def make_initial_parser(): help='The detector to calibrate') parser.add_argument('type', metavar='TYPE', type=str, - help='Type of calibration: '+",".join(notebooks.keys())) + help='Type of calibration: ' + ",".join(notebooks.keys())) parser.add_argument('--no-cluster-job', action="store_true", @@ -68,6 +69,7 @@ def make_initial_parser(): parser = make_initial_parser() + # Helper functions for parser extensions def make_intelli_list(ltype): @@ -86,17 +88,20 @@ def make_intelli_list(ltype): parsed_values = [] values = ",".join(values) - if isinstance(values, str): - for rcomp in values.split(","): - if "-" in rcomp: - start, end = rcomp.split("-") - parsed_values += list(range(int(start), int(end))) - else: - parsed_values += [int(rcomp)] - elif isinstance(values, (list, tuple)): - parsed_values = values - else: - parsed_values = [values, ] + try: + if isinstance(values, str): + for rcomp in values.split(","): + if "-" in rcomp: + start, end = rcomp.split("-") + parsed_values += list(range(int(start), int(end))) + else: + parsed_values += [int(rcomp)] + elif isinstance(values, (list, tuple)): + parsed_values = values + else: + parsed_values = [values, ] + except Exception as e: + print('ERROR:', e) parsed_values = [self.ltype(p) for p in parsed_values] print("Parsed input {} to {}".format(values, parsed_values)) setattr(namespace, self.dest, parsed_values) @@ -193,7 +198,7 @@ def make_epilog(nb, caltype=None): plines = pp.pformat(lines[1:])[1:-1].split("\n") for line in plines: sline = line.replace("'", "", 1) - sline = sline.replace("', '", " "*(17 if caltype else 0), 1) + sline = sline.replace("', '", " " * (17 if caltype else 0), 1) sline = sline[::-1].replace("'", "", 1)[::-1] sline = sline.replace(" ,", " ") if len(sline) > 1 and sline[0] == ",": @@ -243,7 +248,7 @@ if len(sys.argv) == 3 and "-h" in sys.argv[2]: exit() msg = "Options for detector {}\n".format(detector) - msg += "*"*len(msg)+"\n\n" + msg += "*" * len(msg) + "\n\n" # basically, this creates help in the form of # @@ -285,6 +290,7 @@ elif len(sys.argv) >= 3: ext_func = notebooks[detector][caltype].get("extend parms", None) + def do_parse(nb, parser, overwrite_reqs=False): parser.description = make_epilog(nb) parms = extract_parameters(nb) @@ -326,12 +332,23 @@ elif len(sys.argv) >= 3: required=required, action=make_intelli_list(ltype) if range_allowed else None) elif p.type == bool: - pars_group.add_argument("--{}".format(consolize_name(p.name)), - action="store_true", - default=default, - help=helpstr, - required=required) - + # check if an input arg is given with an extra "-no" for + # forcing to convert a bool to False. + # Otherwise leave the default value from the notebook + # or convert to true if the bool arg is given. + if consolize_name("--no-{}".format(p.name)) in sys.argv: + pars_group.add_argument("--{}".format(consolize_name(p.name)), + action="store_false", + default=False, + help=helpstr, + required=required) + sys.argv.remove(consolize_name("--no-{}".format(p.name))) + else: + pars_group.add_argument("--{}".format(consolize_name(p.name)), + action="store_true", + default=default, + help=helpstr, + required=required) else: pars_group.add_argument("--{}".format(consolize_name(p.name)), type=p.type, @@ -376,7 +393,7 @@ elif len(sys.argv) >= 3: extention = f(*callargs) fcc = first_code_cell(nb) - fcc["source"] += "\n"+extention + fcc["source"] += "\n" + extention parser = make_initial_parser() do_parse(nb, parser, False) @@ -608,7 +625,6 @@ def run(): if func is None: warnings.warn("Didn't find concurrency function {} in notebook".format(ext_func), RuntimeWarning) - else: # remove help calls as they will cause the argument parser to exit known, remaining = parser.parse_known_args() @@ -624,7 +640,7 @@ def run(): extention = f(*callargs) fcc = first_code_cell(nb) - fcc["source"] += "\n"+extention + fcc["source"] += "\n" + extention parms = extract_parameters(nb) @@ -642,14 +658,14 @@ def run(): run_uuid = uuid4() # check that a modules field is present if we run concurrently - if not has_parm(parms, concurrency["parameter"]) and concurrency["parameter"] is not None: + if not has_parm(parms, concurrency["parameter"]) and concurrency["parameter"] is not None: msg = "Notebook cannot be run concurrently: no {} parameter".format( concurrency["parameter"]) warnings.warn(msg, RuntimeWarning) if not has_parm(parms, "cluster_profile"): warnings.warn("Notebook has no cluster_profile parameter, " + - "running on cluster will likeyl fail!", RuntimeWarning) + "running on cluster will likely fail!", RuntimeWarning) elif "cluster_profile" not in args or args["cluster_profile"] == parser.get_default('cluster_profile'): args["cluster_profile"] = "slurm_prof_{}".format(run_uuid) @@ -740,7 +756,6 @@ def run(): if func is None: warnings.warn("Didn't find concurrency function {} in notebook".format(con_func), RuntimeWarning) - else: df = {} exec(func, df) @@ -749,7 +764,6 @@ def run(): sig = inspect.signature(f) callargs = [] if cvals: - # in case default needs to be used for function call args[cvar] = cvals for arg in sig.parameters: @@ -769,7 +783,7 @@ def run(): # Job is not final if there are dependent notebooks jobid = concurrent_run(run_tmp_path, nb, notebook, args, cvar, [cval, ] if not isinstance( - cval, list) and cvtype is list else cval, + cval, list) and cvtype is list else cval, cnum == len(list(cvals)) - 1 and len(dep_notebooks) == 0, joblist, fmtcmd, @@ -777,7 +791,6 @@ def run(): show_title=show_title, ureservation=ureservation) joblist.append(jobid) - # Run dependent notebooks for i, notebook in enumerate(dep_notebooks): notebook_path = os.path.abspath( @@ -786,12 +799,12 @@ def run(): nb = nbformat.read(f, as_version=4) final_job = i == len(dep_notebooks) - 1 jobid = concurrent_run(run_tmp_path, nb, os.path.basename(notebook), - args, - final_job=final_job, - job_list=joblist, fmtcmd=fmtcmd, - cluster_cores=cluster_cores, - sequential=sequential, priority=priority, - dependent=True, ureservation=ureservation) + args, + final_job=final_job, + job_list=joblist, fmtcmd=fmtcmd, + cluster_cores=cluster_cores, + sequential=sequential, priority=priority, + dependent=True, ureservation=ureservation) joblist.append(jobid) if not all([j is None for j in joblist]): diff --git a/xfel_calibrate/notebooks.py b/xfel_calibrate/notebooks.py index 6baf79bd43d40cd5cafe3bb4763779f4a892b073..c5375bc0e052aa4d304dafc873ce2eb060b2bdda 100644 --- a/xfel_calibrate/notebooks.py +++ b/xfel_calibrate/notebooks.py @@ -113,6 +113,13 @@ notebooks = { "default concurrency": None, "cluster cores": 1}, }, + "STATS_FROM_DB2": { + "notebook": "notebooks/generic/PlotFromCalDB_NBC.ipynb", + "dep_notebooks": ["notebooks/generic/PlotFromCalDB_Summary_NBC.ipynb"], + "concurrency": {"parameter": "submodules", + "default concurrency": None, + "cluster cores": 1}, + }, }, "TUTORIAL": { "TEST": {