Skip to content
Snippets Groups Projects

[EPIX100] [DARK]Adding Bad Pixels Map

Merged Nuno Duarte requested to merge feat/ePixDarkBadPixels into master
1 file
+ 267
154
Compare changes
  • Side-by-side
  • Inline
@@ -2,7 +2,9 @@
@@ -2,7 +2,9 @@
"cells": [
"cells": [
{
{
"cell_type": "markdown",
"cell_type": "markdown",
"metadata": {},
"metadata": {
 
"tags": []
 
},
"source": [
"source": [
"# ePix100 Dark Characterization\n",
"# ePix100 Dark Characterization\n",
"\n",
"\n",
@@ -19,14 +21,14 @@
@@ -19,14 +21,14 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"in_folder = '/gpfs/exfel/exp/HED/202030/p900136/raw' # input folder, required\n",
"in_folder = '/gpfs/exfel/exp/HED/202230/p900247/raw' # input folder, required\n",
"out_folder = '' # output folder, required\n",
"out_folder = '' # output folder, required\n",
"sequence = 0 # sequence file to use\n",
"sequence = 0 # sequence file to use\n",
"run = 182 # which run to read data from, required\n",
"run = 45 # which run to read data from, required\n",
"\n",
"\n",
"# Parameters for accessing the raw data.\n",
"# Parameters for accessing the raw data.\n",
"karabo_id = \"HED_IA1_EPX100-2\" # karabo karabo_id\n",
"karabo_id = \"HED_IA1_EPX100-1\" # karabo karabo_id\n",
"karabo_da = [\"EPIX02\"] # data aggregators\n",
"karabo_da = [\"EPIX01\"] # data aggregators\n",
"receiver_template = \"RECEIVER\" # detector receiver template for accessing raw data files\n",
"receiver_template = \"RECEIVER\" # detector receiver template for accessing raw data files\n",
"path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
"path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data\n",
"instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files\n",
"instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files\n",
@@ -36,13 +38,15 @@
@@ -36,13 +38,15 @@
"cal_db_interface = \"tcp://max-exfl016:8020\" # calibration DB interface to use\n",
"cal_db_interface = \"tcp://max-exfl016:8020\" # calibration DB interface to use\n",
"cal_db_timeout = 300000 # timeout on caldb requests\n",
"cal_db_timeout = 300000 # timeout on caldb requests\n",
"db_output = False # Output constants to the calibration database\n",
"db_output = False # Output constants to the calibration database\n",
"local_output = True # output constants locally\n",
"local_output = True # Output constants locally\n",
"\n",
"\n",
"# Conditions used for injected calibration constants.\n",
"# Conditions used for injected calibration constants.\n",
"bias_voltage = 200 # bias voltage\n",
"bias_voltage = 200 # Bias voltage\n",
"in_vacuum = False # detector operated in vacuum\n",
"in_vacuum = False # Detector operated in vacuum\n",
"fix_temperature = 290. # fix temperature to this value\n",
"fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file\n",
"temp_limits = 5 # limit for parameter Operational temperature\n",
"fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file\n",
 
"temp_limits = 5 # Limit for parameter Operational temperature\n",
 
"badpixel_threshold_sigma = 5. # Bad pixels defined by values outside n times this std from median\n",
"\n",
"\n",
"# Parameters used during selecting raw data trains.\n",
"# Parameters used during selecting raw data trains.\n",
"min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.\n",
"min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.\n",
@@ -71,6 +75,7 @@
@@ -71,6 +75,7 @@
"import XFELDetAna.xfelprofiler as xprof\n",
"import XFELDetAna.xfelprofiler as xprof\n",
"from XFELDetAna import xfelpyanatools as xana\n",
"from XFELDetAna import xfelpyanatools as xana\n",
"from XFELDetAna.plotting.util import prettyPlotting\n",
"from XFELDetAna.plotting.util import prettyPlotting\n",
 
"from cal_tools.enums import BadPixels\n",
"from cal_tools.tools import (\n",
"from cal_tools.tools import (\n",
" get_dir_creation_date,\n",
" get_dir_creation_date,\n",
" get_pdu_from_db,\n",
" get_pdu_from_db,\n",
@@ -109,8 +114,6 @@
@@ -109,8 +114,6 @@
"# Read report path and create file location tuple to add with the injection\n",
"# Read report path and create file location tuple to add with the injection\n",
"proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n",
"proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]\n",
"file_loc = f'proposal:{proposal} runs:{run}'\n",
"file_loc = f'proposal:{proposal} runs:{run}'\n",
"pixels_x = 708\n",
"pixels_y = 768\n",
"report = get_report(out_folder)\n",
"report = get_report(out_folder)\n",
"\n",
"\n",
"ped_dir = os.path.join(in_folder, f\"r{run:04d}\")\n",
"ped_dir = os.path.join(in_folder, f\"r{run:04d}\")\n",
@@ -131,10 +134,16 @@
@@ -131,10 +134,16 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"pixel_data = (instrument_src, \"data.image.pixels\")\n",
"# Read sensor size\n",
 
"sensor_size = run_dir[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected\n",
 
"sensor_size = sensor_size[sensor_size != 1] # data.image.dims for old data is [768, 708, 1]\n",
 
"assert np.array_equal(sensor_size, [768, 708]), 'Unexpected sensor dimensions.' \n",
 
"\n",
 
"# Path to pixels ADC values\n",
 
"pixels_src = (instrument_src, \"data.image.pixels\")\n",
"\n",
"\n",
"# Specifies total number of images to proceed\n",
"# Specifies total number of images to proceed\n",
"n_trains = run_dir.get_data_counts(*pixel_data).shape[0]\n",
"n_trains = run_dir.get_data_counts(*pixels_src).shape[0]\n",
"\n",
"\n",
"# Modify n_trains to process based on given maximum\n",
"# Modify n_trains to process based on given maximum\n",
"# and minimun number of trains.\n",
"# and minimun number of trains.\n",
@@ -146,25 +155,31 @@
@@ -146,25 +155,31 @@
" f\"Less than {min_trains} trains are available in RAW data.\"\n",
" f\"Less than {min_trains} trains are available in RAW data.\"\n",
" \" Not enough data to process darks.\")\n",
" \" Not enough data to process darks.\")\n",
"\n",
"\n",
"print(f\"Number of dark images to analyze: {n_trains}.\")\n",
"print(f\"Number of dark images to analyze: {n_trains}\")\n",
"\n",
"integration_time = int(run_dir.get_array(\n",
" f\"{karabo_id}/DET/CONTROL\",\n",
" \"expTime.value\")[0])\n",
"temperature = np.mean(run_dir.get_array(\n",
" instrument_src,\n",
" \"data.backTemp\").values) / 100.\n",
"\n",
"\n",
"if fix_temperature != 0:\n",
"# Read integration time\n",
" temperature_k = fix_temperature\n",
"if fix_integration_time == -1:\n",
" print(\"Temperature is fixed!\")\n",
" integration_time = run_dir[f\"{karabo_id}/DET/CONTROL\", 'expTime.value'].as_single_value(reduce_by='first')\n",
 
" integration_time_str_add = ''\n",
"else:\n",
"else:\n",
 
" integration_time = fix_integration_time\n",
 
" integration_time_str_add = '(manual input)'\n",
 
" \n",
 
"# Read temperature \n",
 
"if fix_temperature == -1:\n",
 
" temperature = run_dir[instrument_src, 'data.backTemp'].ndarray().mean()/100.\n",
" temperature_k = temperature + 273.15\n",
" temperature_k = temperature + 273.15\n",
"\n",
" temp_str_add = ''\n",
"print(f\"Bias voltage is {bias_voltage} V\")\n",
"else:\n",
"print(f\"Detector integration time is set to {integration_time}\")\n",
" temperature_k = fix_temperature\n",
"print(f\"Mean temperature was {temperature:0.2f} °C / {temperature_k:0.2f} K\")\n",
" temperature = fix_temperature - 273.15\n",
"print(f\"Operated in vacuum: {in_vacuum} \")"
" temp_str_add = '(manual input)'\n",
 
" \n",
 
"# Print operating conditions\n",
 
"print(f\"Bias voltage: {bias_voltage} V\")\n",
 
"print(f\"Detector integration time: {integration_time} \\u03BCs {integration_time_str_add}\")\n",
 
"print(f\"Mean temperature: {temperature:0.2f}°C / {temperature_k:0.2f} K {temp_str_add}\")\n",
 
"print(f\"Operated in vacuum: {in_vacuum}\")"
]
]
},
},
{
{
@@ -175,14 +190,27 @@
@@ -175,14 +190,27 @@
},
},
"outputs": [],
"outputs": [],
"source": [
"source": [
 
"# Calculate noise and offset per pixel and global average, std and median\n",
"data_dc = run_dir.select(\n",
"data_dc = run_dir.select(\n",
" *pixel_data, require_all=True).select_trains(np.s_[:n_trains])\n",
" *pixels_src, require_all=True).select_trains(np.s_[:n_trains])\n",
"print(f\"Reading data from: {data_dc.files}\\n\")\n",
"print(f\"Reading data from: {data_dc.files}\\n\")\n",
"\n",
"\n",
"data = data_dc[pixel_data].ndarray()\n",
"data = data_dc[pixels_src].ndarray()\n",
"\n",
"\n",
"noise_data = np.std(data, axis=0)\n",
"noise_data = np.std(data, axis=0)\n",
"offset_data = np.mean(data, axis=0)"
"offset_data = np.mean(data, axis=0)\n",
 
"\n",
 
"noise_mean = np.mean(noise_data)\n",
 
"noise_sigma = np.std(noise_data)\n",
 
"noise_median = np.median(noise_data)\n",
 
"noise_min = np.min(noise_data)\n",
 
"noise_max = np.max(noise_data)\n",
 
"\n",
 
"offset_mean = np.mean(offset_data)\n",
 
"offset_sigma = np.std(offset_data)\n",
 
"offset_median = np.median(offset_data)\n",
 
"offset_min = np.min(offset_data)\n",
 
"offset_max = np.max(offset_data)"
]
]
},
},
{
{
@@ -191,10 +219,17 @@
@@ -191,10 +219,17 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
 
"# Create and fill offset and noise maps\n",
"constant_maps = {}\n",
"constant_maps = {}\n",
"constant_maps['Offset'] = offset_data[..., np.newaxis]\n",
"constant_maps['Offset'] = offset_data[..., np.newaxis]\n",
"constant_maps['Noise'] = noise_data[..., np.newaxis]\n",
"constant_maps['Noise'] = noise_data[..., np.newaxis]"
"print(\"Initial constant maps are created\")"
]
 
},
 
{
 
"cell_type": "markdown",
 
"metadata": {},
 
"source": [
 
"## Offset ###"
]
]
},
},
{
{
@@ -203,48 +238,237 @@
@@ -203,48 +238,237 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"#**************OFFSET MAP HISTOGRAM***********#\n",
"#************** OFFSET HEAT MAP **************#\n",
"ho, co = np.histogram(constant_maps['Offset'].flatten(), bins=700)\n",
"fig = xana.heatmapPlot(\n",
"\n",
" constant_maps['Offset'][:, :, 0],\n",
 
" lut_label='[ADU]',\n",
 
" x_label='Column',\n",
 
" y_label='Row',\n",
 
" x_range=(0, sensor_size[0]),\n",
 
" y_range=(0, sensor_size[1]), \n",
 
" vmin=max(0, offset_median - badpixel_threshold_sigma*offset_sigma), \n",
 
" vmax=min(np.power(2,14)-1, offset_median + badpixel_threshold_sigma*offset_sigma)\n",
 
")\n",
 
"fig.suptitle('Offset Map', x=.48, y=.9, fontsize=16)\n",
 
"fig.set_size_inches(h=15, w=15)\n",
 
"\n",
 
"#************** OFFSET HISTOGRAM **************#\n",
 
"binwidth = offset_sigma/100\n",
 
"ho, co = np.histogram(\n",
 
" constant_maps['Offset'].flatten(),\n",
 
" bins = np.arange(offset_min, offset_max, binwidth)\n",
 
")\n",
"do = {'x': co[:-1],\n",
"do = {'x': co[:-1],\n",
" 'y': ho,\n",
" 'y': ho,\n",
" 'y_err': np.sqrt(ho[:]),\n",
" 'y_err': np.sqrt(ho[:]),\n",
" 'drawstyle': 'bars',\n",
" 'drawstyle': 'bars',\n",
" 'color': 'cornflowerblue',\n",
" 'color': 'cornflowerblue'}\n",
" }\n",
"\n",
"\n",
"fig = xana.simplePlot(\n",
"fig = xana.simplePlot(do, figsize='1col', aspect=2,\n",
" do, \n",
" x_label='Offset (ADU)',\n",
" aspect=1.5,\n",
" y_label=\"Counts\", y_log=True,\n",
" x_label='Offset [ADU]',\n",
" )\n",
" y_label='Counts',\n",
"\n",
" x_range=(0, np.power(2,14)-1),\n",
"#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#\n",
" y_range=(0, max(ho)*1.1),\n",
"hn, cn = np.histogram(constant_maps['Noise'].flatten(), bins=200)\n",
" y_log=True\n",
"\n",
")\n",
 
"fig.suptitle('Offset Distribution', x=.5,y =.92, fontsize=16)\n",
 
"\n",
 
"stats_str = (\n",
 
" f'mean: {np.round(offset_mean,2)}\\n'\n",
 
" f'std : {np.round(offset_sigma,2)}\\n'\n",
 
" f'median: {np.round(offset_median,2)}\\n'\n",
 
" f'min: {np.round(offset_min,2)}\\n'\n",
 
" f'max: {np.round(offset_max,2)}'\n",
 
")\n",
 
"fig.text(\n",
 
" s=stats_str,\n",
 
" x=.7,\n",
 
" y=.7,\n",
 
" fontsize=14,\n",
 
" bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1));"
 
]
 
},
 
{
 
"cell_type": "markdown",
 
"metadata": {
 
"tags": []
 
},
 
"source": [
 
"## Noise ###"
 
]
 
},
 
{
 
"cell_type": "code",
 
"execution_count": null,
 
"metadata": {},
 
"outputs": [],
 
"source": [
 
"#************** NOISE HEAT MAP **************#\n",
 
"fig = xana.heatmapPlot(\n",
 
" constant_maps['Noise'][:, :, 0],\n",
 
" lut_label='[ADU]',\n",
 
" x_label='Column', \n",
 
" y_label='Row',\n",
 
" x_range=(0, sensor_size[0]),\n",
 
" y_range=(0, sensor_size[1]),\n",
 
" vmin=max(0, noise_median - badpixel_threshold_sigma*noise_sigma), \n",
 
" vmax=noise_median + badpixel_threshold_sigma*noise_sigma\n",
 
")\n",
 
"fig.suptitle('Noise Map', x=.5, y=.9, fontsize=16)\n",
 
"fig.set_size_inches(h=15, w=15)\n",
 
"\n",
 
"#************** NOISE HISTOGRAM **************#\n",
 
"binwidth = noise_sigma/100\n",
 
"hn, cn = np.histogram(constant_maps['Noise'].flatten(),\n",
 
" bins=np.arange((min(constant_maps['Noise'].flatten())),\n",
 
" max(constant_maps['Noise'].flatten()) + binwidth,\n",
 
" binwidth))\n",
"dn = {'x': cn[:-1],\n",
"dn = {'x': cn[:-1],\n",
" 'y': hn,\n",
" 'y': hn,\n",
" 'y_err': np.sqrt(hn[:]),\n",
" 'y_err': np.sqrt(hn[:]),\n",
" 'drawstyle': 'bars',\n",
" 'drawstyle': 'bars',\n",
" 'color': 'cornflowerblue',\n",
" 'color': 'cornflowerblue'}\n",
" }\n",
"\n",
"\n",
"fig = xana.simplePlot(\n",
"fig = xana.simplePlot(dn, figsize='1col', aspect=2,\n",
" dn,\n",
" x_label='Noise (ADU)',\n",
" aspect=1.5,\n",
" y_label=\"Counts\",\n",
" x_label='Noise [ADU]',\n",
" y_log=True)\n",
" y_label='Counts',\n",
"\n",
" x_range=(max(0, noise_median - badpixel_threshold_sigma*noise_sigma),\n",
"#**************HEAT MAPS*******************#\n",
" noise_median + badpixel_threshold_sigma*noise_sigma),\n",
"fig = xana.heatmapPlot(constant_maps['Offset'][:, :, 0],\n",
" y_range=(0, max(hn)*1.1),\n",
" x_label='Columns', y_label='Rows',\n",
" y_log=True\n",
" lut_label='Offset (ADU)',\n",
")\n",
" x_range=(0, pixels_y),\n",
"fig.suptitle('Noise Distribution',x=.5,y=.92,fontsize=16);\n",
" y_range=(0, pixels_x), vmin=1000, vmax=4000)\n",
"\n",
"\n",
"stats_str = (\n",
"fig = xana.heatmapPlot(constant_maps['Noise'][:, :, 0],\n",
" f'mean: {np.round(noise_mean,2)}\\n'\n",
" x_label='Columns', y_label='Rows',\n",
" f'std: {np.round(noise_sigma,2)}\\n'\n",
" lut_label='Noise (ADU)',\n",
" f'median: {np.round(noise_median,2)}\\n'\n",
" x_range=(0, pixels_y),\n",
" f'min: {np.round(noise_min,2)}\\n'\n",
" y_range=(0, pixels_x), vmax=2 * np.mean(constant_maps['Noise']))"
" f'max: {np.round(noise_max,2)}'\n",
 
")\n",
 
"fig.text(\n",
 
" s=stats_str,\n",
 
" x=.7,\n",
 
" y=.7,\n",
 
" fontsize=14,\n",
 
" bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1));"
 
]
 
},
 
{
 
"cell_type": "markdown",
 
"metadata": {},
 
"source": [
 
"## Bad Pixels ###\n",
 
"\n",
 
"The bad pixel map is deduced by comparing offset and noise of each pixel ($v_i$) against the median value of the respective maps ($v_k$):\n",
 
"\n",
 
"$$ \n",
 
"v_i > \\mathrm{median}(v_{k}) + n \\sigma_{v_{k}}\n",
 
"$$\n",
 
"or\n",
 
"$$\n",
 
"v_i < \\mathrm{median}(v_{k}) - n \\sigma_{v_{k}} \n",
 
"$$\n",
 
"\n",
 
"Values are encoded in a 32 bit mask, where for the dark image deduced bad pixels the following non-zero entries are relevant:"
 
]
 
},
 
{
 
"cell_type": "code",
 
"execution_count": null,
 
"metadata": {},
 
"outputs": [],
 
"source": [
 
"def print_bp_entry(bp):\n",
 
" '''\n",
 
" Prints bad pixels bit encoding.\n",
 
" \n",
 
" Parameters\n",
 
" ----------\n",
 
" bp : enum 'BadPixels'\n",
 
" '''\n",
 
" print('{:<30s} {:032b}'.format(bp.name, bp.value))\n",
 
"\n",
 
"print_bp_entry(BadPixels.OFFSET_OUT_OF_THRESHOLD)\n",
 
"print_bp_entry(BadPixels.NOISE_OUT_OF_THRESHOLD)\n",
 
"print_bp_entry(BadPixels.OFFSET_NOISE_EVAL_ERROR)"
 
]
 
},
 
{
 
"cell_type": "code",
 
"execution_count": null,
 
"metadata": {},
 
"outputs": [],
 
"source": [
 
"def eval_bpidx(const_map, n_sigma):\n",
 
" '''\n",
 
" Evaluates bad pixels by comparing offset and noise of each\n",
 
" pixel against the median value of the respective maps.\n",
 
" \n",
 
" Returns boolean array\n",
 
" \n",
 
" Parameters\n",
 
" ----------\n",
 
" const_map : ndarray\n",
 
" Offset or noise constant map to input.\n",
 
" n_sigma : float\n",
 
" Standard deviation multiplicity interval outside\n",
 
" which bad pixels are defined.\n",
 
" '''\n",
 
" \n",
 
" mdn = np.nanmedian(const_map)\n",
 
" std = np.nanstd(const_map) \n",
 
" idx = ( (const_map > mdn + n_sigma*std)\n",
 
" | (const_map < mdn - n_sigma*std) )\n",
 
"\n",
 
" return idx"
 
]
 
},
 
{
 
"cell_type": "code",
 
"execution_count": null,
 
"metadata": {
 
"tags": []
 
},
 
"outputs": [],
 
"source": [
 
"# Add BadPixels to constant_maps\n",
 
"constant_maps['BadPixelsDark'] = np.zeros(constant_maps['Offset'].shape, np.uint32)\n",
 
"\n",
 
"# Noise related bad pixels\n",
 
"constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value\n",
 
"constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
 
"\n",
 
"# Offset related bad pixels\n",
 
"constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value\n",
 
"constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value\n",
 
"\n",
 
"num_bad_pixels = np.sum(constant_maps['BadPixelsDark']!=0)\n",
 
"\n",
 
"print('Number of Bad Pixels: ' + str(num_bad_pixels) + ' (' + str(np.round(100*num_bad_pixels/(sensor_size[0]*sensor_size[1]),2)) + '%)')"
 
]
 
},
 
{
 
"cell_type": "code",
 
"execution_count": null,
 
"metadata": {},
 
"outputs": [],
 
"source": [
 
"#************** BAD PIXELS HEAT MAP **************#\n",
 
"fig = xana.heatmapPlot(\n",
 
" np.log2(constant_maps['BadPixelsDark'][:, :, 0]),\n",
 
" lut_label='Bad pixel bit assinged',\n",
 
" x_label='Column',\n",
 
" y_label='Row',\n",
 
" x_range=(0, sensor_size[0]),\n",
 
" y_range=(0, sensor_size[1])\n",
 
")\n",
 
"fig.suptitle('Bad Pixels Map', x=.5, y=.9, fontsize=16)\n",
 
"fig.set_size_inches(h=15, w=15)"
]
]
},
},
{
{
@@ -254,52 +478,74 @@
@@ -254,52 +478,74 @@
"outputs": [],
"outputs": [],
"source": [
"source": [
"# Save constants to DB\n",
"# Save constants to DB\n",
"dclass=\"ePix100\"\n",
"md = None\n",
"md = None\n",
"\n",
"\n",
"for const_name in constant_maps.keys():\n",
"for const_name in constant_maps.keys():\n",
" det = getattr(Constants, dclass)\n",
" const = getattr(Constants.ePix100, const_name)()\n",
" const = getattr(det, const_name)()\n",
" const.data = constant_maps[const_name].data\n",
" const.data = constant_maps[const_name].data\n",
"\n",
"\n",
" # set the operating condition\n",
" # Set the operating condition\n",
" condition = getattr(Conditions.Dark, dclass)(bias_voltage=bias_voltage,\n",
" condition = Conditions.Dark.ePix100(\n",
" integration_time=integration_time,\n",
" bias_voltage=bias_voltage,\n",
" temperature=temperature_k,\n",
" integration_time=integration_time,\n",
" in_vacuum=in_vacuum)\n",
" temperature=temperature_k,\n",
 
" in_vacuum=in_vacuum)\n",
 
" \n",
" for parm in condition.parameters:\n",
" for parm in condition.parameters:\n",
" if parm.name == \"Sensor Temperature\":\n",
" if parm.name == \"Sensor Temperature\":\n",
" parm.lower_deviation = temp_limits\n",
" parm.lower_deviation = temp_limits\n",
" parm.upper_deviation = temp_limits\n",
" parm.upper_deviation = temp_limits\n",
"\n",
"\n",
" db_module = get_pdu_from_db(karabo_id, karabo_da, const,\n",
" # Get physical detector unit\n",
" condition, cal_db_interface,\n",
" db_module = get_pdu_from_db(\n",
" snapshot_at=creation_time)[0]\n",
" karabo_id=karabo_id,\n",
 
" karabo_da=karabo_da,\n",
 
" constant=const,\n",
 
" condition=condition,\n",
 
" cal_db_interface=cal_db_interface,\n",
 
" snapshot_at=creation_time)[0]\n",
"\n",
"\n",
 
" # Inject or save calibration constants\n",
" if db_output:\n",
" if db_output:\n",
" md = send_to_db(db_module, karabo_id, const, condition,\n",
" md = send_to_db(\n",
" file_loc=file_loc, report_path=report,\n",
" db_module=db_module,\n",
" cal_db_interface=cal_db_interface,\n",
" karabo_id=karabo_id,\n",
" creation_time=creation_time,\n",
" constant=const,\n",
" timeout=cal_db_timeout)\n",
" condition=condition,\n",
 
" file_loc=file_loc,\n",
 
" report_path=report,\n",
 
" cal_db_interface=cal_db_interface,\n",
 
" creation_time=creation_time,\n",
 
" timeout=cal_db_timeout\n",
 
" )\n",
" if local_output:\n",
" if local_output:\n",
" md = save_const_to_h5(db_module, karabo_id, const, condition,\n",
" md = save_const_to_h5(\n",
" const.data, file_loc, report,\n",
" db_module=db_module,\n",
" creation_time, out_folder)\n",
" karabo_id=karabo_id,\n",
 
" constant=const,\n",
 
" condition=condition,\n",
 
" data=const.data,\n",
 
" file_loc=file_loc,\n",
 
" report=report,\n",
 
" creation_time=creation_time,\n",
 
" out_folder=out_folder\n",
 
" )\n",
" print(f\"Calibration constant {const_name} is stored locally at {out_folder} \\n\")\n",
" print(f\"Calibration constant {const_name} is stored locally at {out_folder} \\n\")\n",
"\n",
"\n",
"print(\"Constants parameter conditions are:\\n\")\n",
"print(\"Constants parameter conditions are:\\n\"\n",
"print(f\"• Bias voltage: {bias_voltage}\\n• Integration time: {integration_time}\\n\"\n",
" f\"• Bias voltage: {bias_voltage}\\n\"\n",
" f\"• Temperature: {temperature_k}\\n• In Vacuum: {in_vacuum}\\n\"\n",
" f\"• Integration time: {integration_time}\\n\"\n",
" f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
" f\"• Temperature: {temperature_k}\\n\"\n",
 
" f\"• In Vacuum: {in_vacuum}\\n\"\n",
 
" f\"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\\n\")"
]
]
}
}
],
],
"metadata": {
"metadata": {
"kernelspec": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "cal_venv",
"language": "python",
"language": "python",
"name": "python3"
"name": "cal_venv"
},
},
"language_info": {
"language_info": {
"codemirror_mode": {
"codemirror_mode": {
@@ -329,8 +575,12 @@
@@ -329,8 +575,12 @@
"latex_user_defs": false,
"latex_user_defs": false,
"report_style_numbering": false,
"report_style_numbering": false,
"user_envs_cfg": false
"user_envs_cfg": false
}
},
 
"toc-autonumbering": false,
 
"toc-showcode": false,
 
"toc-showmarkdowntxt": false,
 
"toc-showtags": false
},
},
"nbformat": 4,
"nbformat": 4,
"nbformat_minor": 1
"nbformat_minor": 4
}
}
Loading