Skip to content
Snippets Groups Projects
Commit 5c9b396b authored by Nuno Duarte's avatar Nuno Duarte Committed by Karim Ahmed
Browse files

badpixel_offset_sigma=2

parent 719d1785
No related branches found
No related tags found
1 merge request!824[EPIX][DARK] Mark dead pixels as Bad Pixels
%% Cell type:markdown id: tags:
# ePix100 Dark Characterization
Author: European XFEL Detector Group, Version: 2.0
The following notebook provides dark image analysis and calibration constants of the ePix100 detector.
Dark characterization evaluates offset and noise of the detector and gives information about bad pixels.
Noise and bad pixels maps are calculated independently for each of the 4 ASICs of ePix100, since their noise behaviour can be significantly different.
Common mode correction can be applied to increase sensitivity to noise related bad pixels. Common mode correction is achieved by subtracting the median of all pixels that are read out at the same time along a row/column. This is done in an iterative process, by which a new bad pixels map is calculated and used to mask data as the common mode values across the rows/columns is updated.
Resulting maps are saved as .h5 files for a later use and injected to calibration DB.
%% Cell type:code id: tags:
``` python
in_folder = '/gpfs/exfel/exp/HED/202201/p002804/raw' # input folder, required
in_folder = '/gpfs/exfel/exp/HED/202330/p900338/raw' # input folder, required
out_folder = '' # output folder, required
metadata_folder = '' # Directory containing calibration_metadata.yml when run by xfel-calibrate
sequence = 0 # sequence file to use
run = 281 # which run to read data from, required
run = 176 # which run to read data from, required
# Parameters for accessing the raw data.
karabo_id = "HED_IA1_EPX100-1" # karabo karabo_id
karabo_da = ["EPIX01"] # data aggregators
receiver_template = "RECEIVER" # detector receiver template for accessing raw data files
path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
instrument_source_template = '{}/DET/{}:daqOutput' # instrument detector data source in h5files
# Parameters for the calibration database.
use_dir_creation_date = True
cal_db_interface = "tcp://max-exfl016:8020" # calibration DB interface to use
cal_db_timeout = 300000 # timeout on caldb requests
db_output = False # Output constants to the calibration database
local_output = True # Output constants locally
# Conditions used for injected calibration constants.
bias_voltage = 200 # Bias voltage
in_vacuum = False # Detector operated in vacuum
fix_integration_time = -1 # Integration time. Set to -1 to read from .h5 file
fix_temperature = -1 # Fixed temperature in Kelvin. Set to -1 to read from .h5 file
temp_limits = 5 # Limit for parameter Operational temperature
badpixel_threshold_sigma = 5. # Bad pixels defined by values outside n times this std from median. Default: 5
badpixel_noise_sigma = 5 # Bad pixels defined by values outside n times this std from median. Default: 5
badpixel_offset_sigma = 2 # Bad pixels defined by values outside n times this std from median. Default: 5
CM_N_iterations = 2 # Number of iterations for common mode correction. Set to 0 to skip it
# Parameters used during selecting raw data trains.
min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.
max_trains = 1000 # Maximum number of trains to use for processing dark constants. Set to 0 to use all available trains.
# Don't delete! myMDC sends this by default.
operation_mode = '' # Detector operation mode, optional
# TODO: delete after removing from calibration_configurations
db_module = '' # ID of module in calibration database, this parameter is ignore in the notebook. TODO: remove from calibration_configurations.
```
%% Cell type:code id: tags:
``` python
import os
import warnings
import matplotlib.pyplot as plt
import numpy as np
from IPython.display import display, Markdown
from prettytable import PrettyTable
from extra_data import RunDirectory
from pathlib import Path
import XFELDetAna.xfelprofiler as xprof
from XFELDetAna import xfelpyanatools as xana
from XFELDetAna import xfelpycaltools as xcal
from XFELDetAna.plotting.util import prettyPlotting
from cal_tools.enums import BadPixels
from cal_tools.step_timing import StepTimer
from cal_tools.epix100 import epix100lib
from cal_tools.tools import (
get_dir_creation_date,
get_pdu_from_db,
get_report,
save_const_to_h5,
send_to_db,
)
from iCalibrationDB import Conditions, Constants
```
%% Cell type:code id: tags:
``` python
%matplotlib inline
warnings.filterwarnings('ignore')
prettyPlotting = True
profiler = xprof.Profiler()
profiler.disable()
instrument_src = instrument_source_template.format(karabo_id, receiver_template)
```
%% Cell type:code id: tags:
``` python
# Read report path and create file location tuple to add with the injection
proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]
file_loc = f'proposal:{proposal} runs:{run}'
report = get_report(metadata_folder)
ped_dir = os.path.join(in_folder, f"r{run:04d}")
fp_name = path_template.format(run, karabo_da[0]).format(sequence)
run_dir = RunDirectory(ped_dir)
print(f"Reading from: {Path(ped_dir) / fp_name}")
print(f"Run number: {run}")
print(f"Instrument H5File source: {instrument_src}")
if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run)
print(f"Using {creation_time.isoformat()} as creation time")
os.makedirs(out_folder, exist_ok=True)
```
%% Cell type:code id: tags:
``` python
# Read sensor size
sensor_size = run_dir[instrument_src, 'data.image.dims'].as_single_value(reduce_by='first') # (x=768, y=708) expected
sensor_size = sensor_size[sensor_size != 1] # data.image.dims for old data is [768, 708, 1]
assert np.array_equal(sensor_size, [768, 708]), 'Unexpected sensor dimensions.'
# Path to pixels ADC values
pixels_src = (instrument_src, "data.image.pixels")
# Specifies total number of images to proceed
n_trains = run_dir.get_data_counts(*pixels_src).shape[0]
# Modify n_trains to process based on given maximum
# and minimun number of trains.
if max_trains:
n_trains = min(max_trains, n_trains)
if n_trains < min_trains:
raise ValueError(
f"Less than {min_trains} trains are available in RAW data."
" Not enough data to process darks.")
print(f"Number of dark images to analyze: {n_trains}")
```
%% Cell type:code id: tags:
``` python
ctrl_data = epix100lib.epix100Ctrl(
run_dc=run_dir,
instrument_src=instrument_src,
ctrl_src=f"{karabo_id}/DET/CONTROL",
)
# Read integration time
if fix_integration_time == -1:
integration_time = ctrl_data.get_integration_time()
integration_time_str_add = ''
else:
integration_time = fix_integration_time
integration_time_str_add = '(manual input)'
# Read temperature
if fix_temperature == -1:
temperature = ctrl_data.get_temprature()
temperature_k = temperature + 273.15
temp_str_add = ''
else:
temperature_k = fix_temperature
temperature = fix_temperature - 273.15
temp_str_add = '(manual input)'
# Print operating conditions
print(f"Bias voltage: {bias_voltage} V")
print(f"Detector integration time: {integration_time} \u03BCs {integration_time_str_add}")
print(f"Mean temperature: {temperature:0.2f}°C / {temperature_k:0.2f} K {temp_str_add}")
print(f"Operated in vacuum: {in_vacuum}")
```
%% Cell type:code id: tags:
``` python
# Passing repetitive code along the notebook to a function
def stats_calc(data):
'''
Calculates basic statistical parameters of the input data:
mean, standard deviation, median, minimum and maximum.
Returns dictionary with keys: 'mean', 'std', 'median',
'min', 'max' and 'legend'.
Parameters
----------
data : ndarray
Data to analyse.
'''
stats = {}
stats['mean'] = np.nanmean(data)
stats['std'] = np.nanstd(data)
stats['median'] = np.nanmedian(data)
stats['min'] = np.nanmin(data)
stats['max'] = np.nanmax(data)
stats['legend'] = ('mean: ' + str(np.round(stats['mean'],2)) +
'\nstd: ' + str(np.round(stats['std'],2)) +
'\nmedian: ' + str(np.round(stats['median'],2)) +
'\nmin: ' + str(np.round(stats['min'],2)) +
'\nmax: ' + str(np.round(stats['max'],2)))
return stats
```
%% Cell type:code id: tags:
``` python
step_timer = StepTimer()
step_timer.start()
# Read data
data_dc = run_dir.select(
*pixels_src, require_all=True).select_trains(np.s_[:n_trains])
data = data_dc[pixels_src].ndarray()
# Instantiate constant maps to be filled with offset, noise and bad pixels maps
constant_maps = {}
step_timer.done_step('Darks loaded. Elapsed Time')
```
%% Cell type:markdown id: tags:
## Offset Map
%% Cell type:code id: tags:
``` python
step_timer.start()
# Calculate offset per pixel and store it in constant_maps
constant_maps['Offset'] = np.nanmean(data, axis=0)[..., np.newaxis]
# Calculate basic statistical parameters
stats = stats_calc(constant_maps['Offset'].flatten())
# Offset map
fig = xana.heatmapPlot(
constant_maps['Offset'].squeeze(),
lut_label='[ADU]',
x_label='Column',
y_label='Row',
vmin=max(0, np.round((stats['median']-stats['std'])/250)*250), # Force cb label to be multiple of 250 for reproducibility
vmax=min(np.power(2,14)-1, np.round((stats['median']+stats['std'])/250)*250)
)
fig.suptitle('Offset Map', x=.48, y=.9, fontsize=16)
fig.set_size_inches(h=15, w=15)
# Offset Histogram
h, c = np.histogram(
constant_maps['Offset'].flatten(),
bins = np.arange(stats['min'], stats['max'], stats['std']/100)
)
d = {'x': c[:-1],
'y': h,
'color': 'blue'
}
fig = xana.simplePlot(
d,
aspect=1.5,
x_label='Offset [ADU]',
y_label='Counts',
x_range=(0, np.power(2,14)-1),
y_range=(0, max(h)*1.1),
y_log=True
)
fig.text(
s=stats['legend'],
x=.7,
y=.7,
fontsize=14,
bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1)
)
fig.suptitle('Offset Map Histogram', x=.5,y =.92, fontsize=16)
step_timer.done_step('Offset Map created. Elapsed Time')
```
%% Cell type:markdown id: tags:
## Noise Map
%% Cell type:code id: tags:
``` python
step_timer.start()
# Calculate noise per pixel and store it in constant_maps
constant_maps['Noise'] = np.nanstd(data, axis=0)[..., np.newaxis]
# Calculate basic statistical parameters
stats = stats_calc(constant_maps['Noise'].flatten())
# Noise heat map
fig = xana.heatmapPlot(
constant_maps['Noise'].squeeze(),
lut_label='[ADU]',
x_label='Column',
y_label='Row',
vmin=max(0, np.round((stats['median'] - badpixel_threshold_sigma*stats['std']))),
vmax=np.round(stats['median'] + badpixel_threshold_sigma*stats['std'])
vmin=max(0, np.round((stats['median'] - badpixel_noise_sigma*stats['std']))),
vmax=np.round(stats['median'] + badpixel_noise_sigma*stats['std'])
)
fig.suptitle('Noise Map', x=.5, y=.9, fontsize=16)
fig.set_size_inches(h=15, w=15)
# Calculate overall noise histogram
bins = np.arange(max(0, stats['mean'] - badpixel_threshold_sigma*stats['std']),
stats['mean'] + badpixel_threshold_sigma*stats['std'],
bins = np.arange(max(0, stats['mean'] - badpixel_noise_sigma*stats['std']),
stats['mean'] + badpixel_noise_sigma*stats['std'],
stats['std']/100)
h, c = np.histogram(
constant_maps['Noise'].flatten(),
bins = bins
)
d = [{'x': c[:-1],
'y': h,
'color': 'black',
'label': 'Total'
}]
# Calculate per ASIC histogram
asic = []
asic.append({'noise': constant_maps['Noise'][:int(sensor_size[1]//2), int(sensor_size[0]//2):],
'label': 'ASIC 0 (bottom right)',
'color': 'blue'})
asic.append({'noise': constant_maps['Noise'][int(sensor_size[1]//2):, int(sensor_size[0]//2):],
'label': 'ASIC 1 (top right)',
'color': 'red'})
asic.append({'noise': constant_maps['Noise'][int(sensor_size[1]//2):, :int(sensor_size[0]//2)],
'label': 'ASIC 2 (top left)',
'color': 'green'})
asic.append({'noise': constant_maps['Noise'][:int(sensor_size[1]//2), :int(sensor_size[0]//2)],
'label': 'ASIC 3 (bottom left)',
'color': 'magenta'})
min_std = np.inf
for a in asic:
h, c = np.histogram(a['noise'].flatten(), bins=bins)
d.append({'x': c[:-1], 'y': h, 'color': a['color'], 'label': a['label']})
min_std = np.nanmin((min_std, np.nanstd(a['noise'])))
print(a['label'][:6] +
': median = ' + "{:.2f}".format(np.round(np.nanmedian(a['noise']),2)) +
' | std = ' + "{:.2f}".format(np.round(np.nanstd(a['noise']),2)) +
a['label'][6:])
arg_max_median = 0
for i in range(0,np.size(d)):
arg_max_median = np.max((arg_max_median, np.argmax(d[i]['y'])))
# Plot noise histogram
fig = xana.simplePlot(
d,
aspect=1.5,
x_label='Noise [ADU]',
y_label='Counts',
x_range=(max(0, stats['median'] - badpixel_threshold_sigma*stats['std']),
stats['median'] + badpixel_threshold_sigma*stats['std']),
x_range=(max(0, stats['median'] - badpixel_noise_sigma*stats['std']),
stats['median'] + badpixel_noise_sigma*stats['std']),
y_range=(0, max(d[0]['y'])*1.1),
)
plt.grid(linestyle = ':')
leg = fig.legend(bbox_to_anchor=(.42, .88),fontsize = 14)
fig.text(
s=stats['legend'],
x=.75,
y=.7,
fontsize=14,
bbox=dict(facecolor='yellow', edgecolor='black', alpha=.1)
)
fig.suptitle('Noise Map Histogram',x=.5,y=.92,fontsize=16)
step_timer.done_step('\nNoise Map created. Elapsed Time')
```
%% Cell type:markdown id: tags:
## Bad Pixels
The bad pixel map is deduced by comparing offset and noise of each pixel ($v_i$) against the median value of the respective maps for each ASIC ($v_k$):
$$
v_i > \mathrm{median}(v_{k}) + n \sigma_{v_{k}}
$$
or
$$
v_i < \mathrm{median}(v_{k}) - n \sigma_{v_{k}}
$$
Values are encoded in a 32 bit mask, where for the dark image deduced bad pixels the following non-zero entries are relevant:
%% Cell type:code id: tags:
``` python
def print_bp_entry(bp):
'''
Prints bad pixels bit encoding.
Parameters
----------
bp : enum 'BadPixels'
'''
print('{:<23s}: {:032b} ({})'.format(bp.name, bp.value, bp.real))
print_bp_entry(BadPixels.OFFSET_OUT_OF_THRESHOLD)
print_bp_entry(BadPixels.NOISE_OUT_OF_THRESHOLD)
print_bp_entry(BadPixels.OFFSET_NOISE_EVAL_ERROR)
```
%% Cell type:code id: tags:
``` python
def eval_bpidx(const_map, n_sigma, block_size):
'''
Evaluates bad pixels by comparing the average offset and
noise of each pixel against the median value of the
respective maps of each ASIC.
Returns boolean array.
Parameters
----------
const_map : ndarray
Offset or noise constant map to input.
n_sigma : float
Standard deviation multiplicity interval outside
which bad pixels are defined.
block_size : ndarray
dimensions ([x,y]) of each ASIC.
'''
blocks = {} # Each block corresponds to 1 ASIC
blocks['0'] = const_map[:int(block_size[1]), int(block_size[0]):] # bottom right
blocks['1'] = const_map[int(block_size[1]):, int(block_size[0]):] # top right
blocks['2'] = const_map[int(block_size[1]):, :int(block_size[0])] # top left
blocks['3'] = const_map[:int(block_size[1]), :int(block_size[0])] # bottom left
idx = {}
for b in range(0, len(blocks)):
mdn = np.nanmedian(blocks[str(b)])
std = np.nanstd(blocks[str(b)])
idx[str(b)] = ( (blocks[str(b)] > mdn + n_sigma*std) | (blocks[str(b)] < mdn - n_sigma*std) )
idx_output = np.zeros(const_map.shape, dtype=bool)
idx_output[:int(block_size[1]), int(block_size[0]):] = idx['0']
idx_output[int(block_size[1]):, int(block_size[0]):] = idx['1']
idx_output[int(block_size[1]):, :int(block_size[0])] = idx['2']
idx_output[:int(block_size[1]), :int(block_size[0])] = idx['3']
return idx_output
```
%% Cell type:code id: tags:
``` python
def bp_analysis_table(bad_pixels_map, title = ''):
'''
Prints a table with short analysis of the number and
percentage of bad pixels on count of offset or noise
out of threshold, or evaluation error.
Returns PrettyTable.
Parameters
----------
bad_pixels_map : ndarray
Bad pixel map to analyse.
title : string, optional
Table title to be used.
'''
num_bp = np.sum(bad_pixels_map!=0)
offset_bp = np.sum(bad_pixels_map==1)
noise_bp = np.sum(bad_pixels_map==2)
eval_error_bp = np.sum(bad_pixels_map==4)
t = PrettyTable()
t.field_names = [title]
t.add_row(['Total number of Bad Pixels : {:<5} ({:<.2f}%)'.format(num_bp, 100*num_bp/np.prod(bad_pixels_map.shape))])
t.add_row([' Offset out of threshold : {:<5} ({:<.2f}%)'.format(offset_bp, 100*offset_bp/np.prod(bad_pixels_map.shape))])
t.add_row([' Noise out of threshold : {:<5} ({:<.2f}%)'.format(noise_bp, 100*noise_bp/np.prod(bad_pixels_map.shape))])
t.add_row([' Evaluation error : {:<5} ({:<.2f}%)'.format(eval_error_bp, 100*eval_error_bp/np.prod(bad_pixels_map.shape))])
return t
```
%% Cell type:code id: tags:
``` python
# Add bad pixels from darks to constant_maps
constant_maps['BadPixelsDark'] = np.zeros(constant_maps['Offset'].shape, np.uint32)
# Find noise related bad pixels
constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Noise'], badpixel_noise_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Noise'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# Find offset related bad pixels
constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_threshold_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value
constant_maps['BadPixelsDark'][eval_bpidx(constant_maps['Offset'], badpixel_offset_sigma, sensor_size//2)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value
constant_maps['BadPixelsDark'][~np.isfinite(constant_maps['Offset'])] = BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# Plot Bad Pixels Map
fig = xana.heatmapPlot(
np.exp(np.nan_to_num(np.log(constant_maps['BadPixelsDark']), neginf=np.nan)).squeeze(), # convert zeros to NaN
lut_label='Bad pixel value [ADU]', # for plotting purposes
x_label='Column',
y_label='Row',
x_range=(0, sensor_size[0]),
y_range=(0, sensor_size[1])
)
fig.suptitle('Initial Bad Pixels Map', x=.5, y=.9, fontsize=16)
fig.set_size_inches(h=15, w=15)
step_timer.done_step('Initial Bad Pixels Map created. Elapsed Time')
print(bp_analysis_table(constant_maps['BadPixelsDark'], title='Initial Bad Pixel Analysis'))
```
%% Cell type:markdown id: tags:
## Common Mode Correction
Common mode correction is applied here to increase sensitivy to bad pixels. This is done in an iterative process. Each iteration is composed by the follwing steps:
1. Common mode noise is calculated and subtracted from data.
2. Noise map is recalculated.
3. Bad pixels are recalulated based on the new noise map.
4. Data is masked based on the new bad pixels map.
%% Cell type:code id: tags:
``` python
if CM_N_iterations < 1:
print('Common mode correction not applied.')
else:
commonModeBlockSize = sensor_size//2
commonModeBlockSize = (sensor_size//[8,2]).astype(int) # bank size (x=96,y=354) pixels
# Instantiate common mode calculators for column and row CM correction
cmCorrection_col = xcal.CommonModeCorrection(
sensor_size.tolist(),
commonModeBlockSize.tolist(),
'col',
parallel=False)
cmCorrection_row = xcal.CommonModeCorrection(
sensor_size.tolist(),
commonModeBlockSize.tolist(),
'row',
parallel=False)
```
%% Cell type:code id: tags:
``` python
if CM_N_iterations > 0:
data = data.astype(float) # This conversion is needed for offset subtraction
# Subtract Offset
data -= constant_maps['Offset'].squeeze()
noise_map_corrected = np.copy(constant_maps['Noise'])
bp_offset = [np.sum(constant_maps['BadPixelsDark']==1)]
bp_noise = [np.sum(constant_maps['BadPixelsDark']==2)]
for it in range (0,CM_N_iterations):
step_timer.start()
# Mask bad pixels
BadPixels_mask = np.squeeze(constant_maps['BadPixelsDark'] != 0)
BadPixels_mask = np.repeat(BadPixels_mask[np.newaxis,...],data.shape[0],axis=0)
data[BadPixels_mask] = np.nan
# Common mode correction
data = np.swapaxes(data,0,-1)
data = cmCorrection_col.correct(data)
data = cmCorrection_row.correct(data)
data = np.swapaxes(data,0,-1)
# Update noise map
noise_map_corrected = np.nanstd(data, axis=0)[..., np.newaxis]
# Update bad pixels map
constant_maps['BadPixelsDark'][eval_bpidx(noise_map_corrected, badpixel_threshold_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
constant_maps['BadPixelsDark'][eval_bpidx(noise_map_corrected, badpixel_noise_sigma, sensor_size//2)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp_offset.append(np.sum(constant_maps['BadPixelsDark']==1))
bp_noise.append(np.sum(constant_maps['BadPixelsDark']==2))
print(bp_analysis_table(constant_maps['BadPixelsDark'], title=f'{it+1} CM correction iterations'))
step_timer.done_step('Elapsed Time')
print('\n')
# Apply final bad pixels mask
BadPixels_mask = np.broadcast_to(np.squeeze(constant_maps['BadPixelsDark'] != 0), data.shape)
data[BadPixels_mask] = np.nan
it = np.arange(0, CM_N_iterations+1)
plt.figure()
plt.plot(it, np.sum((bp_offset,bp_noise),axis=0), c = 'black', ls = '--', marker = 'o', label = 'Total')
plt.plot(it, bp_noise, c = 'red', ls = '--', marker = 'v', label = 'Noise out of threshold')
plt.plot(it, bp_offset, c = 'blue', ls = '--', marker = 's',label = 'Offset out of threshold')
plt.xticks(it)
plt.xlabel('CM correction iteration')
plt.ylabel('# Bad Pixels')
plt.legend()
plt.grid(linestyle = ':')
```
%% Cell type:code id: tags:
``` python
if CM_N_iterations > 0:
display(Markdown(f'## Common-Mode Corrected Bad Pixels Map\n'))
```
%% Cell type:code id: tags:
``` python
if CM_N_iterations > 0:
# Plot final bad pixels map
fig = xana.heatmapPlot(
np.exp(np.nan_to_num(np.log(constant_maps['BadPixelsDark']),neginf=np.nan)).squeeze(), # convert zeros to NaN
lut_label='Bad pixel value [ADU]', # for plotting purposes
x_label='Column',
y_label='Row',
x_range=(0, sensor_size[0]),
y_range=(0, sensor_size[1])
)
fig.suptitle('Final Bad Pixels Map', x=.5, y=.9, fontsize=16)
fig.set_size_inches(h=15, w=15)
print(bp_analysis_table(constant_maps['BadPixelsDark'], title='Final Bad Pixels Analysis'))
```
%% Cell type:markdown id: tags:
## Calibration Constants DB
Send the dark constants to the database and/or save them locally.
%% Cell type:code id: tags:
``` python
# Save constants to DB
md = None
for const_name in constant_maps.keys():
const = getattr(Constants.ePix100, const_name)()
const.data = constant_maps[const_name].data
# Set the operating condition
condition = Conditions.Dark.ePix100(
bias_voltage=bias_voltage,
integration_time=integration_time,
temperature=temperature_k,
in_vacuum=in_vacuum)
for parm in condition.parameters:
if parm.name == "Sensor Temperature":
parm.lower_deviation = temp_limits
parm.upper_deviation = temp_limits
# Get physical detector unit
db_module = get_pdu_from_db(
karabo_id=karabo_id,
karabo_da=karabo_da,
constant=const,
condition=condition,
cal_db_interface=cal_db_interface,
snapshot_at=creation_time)[0]
# Inject or save calibration constants
if db_output:
md = send_to_db(
db_module=db_module,
karabo_id=karabo_id,
constant=const,
condition=condition,
file_loc=file_loc,
report_path=report,
cal_db_interface=cal_db_interface,
creation_time=creation_time,
timeout=cal_db_timeout
)
if local_output:
md = save_const_to_h5(
db_module=db_module,
karabo_id=karabo_id,
constant=const,
condition=condition,
data=const.data,
file_loc=file_loc,
report=report,
creation_time=creation_time,
out_folder=out_folder
)
print(f"Calibration constant {const_name} is stored locally at {out_folder} \n")
print("Constants parameter conditions are:\n"
f"• Bias voltage: {bias_voltage}\n"
f"• Integration time: {integration_time}\n"
f"• Temperature: {temperature_k}\n"
f"• In Vacuum: {in_vacuum}\n"
f"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n")
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment