Skip to content
Snippets Groups Projects
Commit 83ee763e authored by Mikhail Karnevskiy's avatar Mikhail Karnevskiy
Browse files

Merge branch 'feat/plot_cal_FCCD' into 'master'

Feat: Add plotting constants of FCCD

See merge request detectors/pycalibration!132
parents b84511dc 48439e07
No related branches found
No related tags found
1 merge request!132Feat: Add plotting constants of FCCD
......@@ -352,6 +352,19 @@ class HMType(Enum):
INSET_AXIS = 2
def get_range(data, scale):
"""
Return a range calculated by median absolute deviations
:param data: numpy.array of data points
:param scale: range in units of median absolute deviations
:return:
"""
med = np.nanmedian(data)
mad = np.nanmedian(np.abs(data.flatten() - med))
return med - scale * mad, med + scale * mad
def hm_combine(data, fname=None, htype=None, **kwargs):
"""
Plot heatmap for calibration report
......
This diff is collapsed.
This diff is collapsed.
%% Cell type:markdown id: tags:
# Statistical analysis of calibration factors#
Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.2
Calibration constants for LPD1M detector from the data base with injection time between start_date and end_date are considered.
To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant and for each module. Summary plots overall modules are created.
In additional gain-slopes flat-field and pulse-capacitor are combined to relative-gain constant and presented as well. Noise in electron units is derived using gain factors and presented.
Values shown in plots are saved in h5 files.
All presented values corresponds to high and medium gain stages.
%% Cell type:code id: tags:
``` python
cluster_profile = "noDB" # The ipcluster profile to use
start_date = "2018-01-30" # Date to start investigation interval from
end_date = "2018-12-12" # Date to end investigation interval at, can be "now"
nconstants = 10 # Number of time stamps to plot. If not 0, overcome start_date.
constants = ["Offset", "Noise", "SlopesFF", "SlopesCI"] # constants to plot
modules = [2] # Modules, set to -1 for all, range allowed
bias_voltages = [250, 500] # Bias voltage
mem_cells = [1, 128, 256, 512] # Number of used memory cells. Typically: 4,32,64,128,176.
mem_cells = [128, 256, 512] # Number of used memory cells.
photon_energy = 9.2 # Photon energy of the beam
out_folder = "/gpfs/exfel/data/scratch/karnem/testLPD_11/" # Output folder, required
out_folder = "/gpfs/exfel/data/scratch/karnem/test_LPD/" # Output folder, required
use_existing = "" # If not empty, constants stored in given folder will be used
cal_db_timeout = 180000 # timeout on caldb requests",
adu_to_photon = 33.17 # ADU to photon conversion factor (8000 / 3.6 / 67.)
nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs
db_module = "LPD1M1" # detector entry in the DB to investigate
dclass = "LPD" # Detector class
cal_db_interface = "tcp://max-exfl016:8015#8025" # the database interface to use
max_time = 15 # the max margin in min. for the matching closest bad pixels
range_offset = [800., 1500, 600, 900] # plotting range for offset: high gain l, r, medium gain l, r
range_noise = [2.0, 16, 1.0, 7.0] # plotting range for noise: high gain l, r, medium gain l, r
range_gain = [20, 30, 20, 30] # plotting range for gain: high gain l, r, medium gain l, r
range_noise_e = [100., 600., 100., 600.] # plotting range for noise in [e-]: high gain l, r, medium gain l, r
range_slopesCI = [0.95, 1.05, 0.0, 0.5] # plotting range for slope CI: high gain l, r, medium gain l, r
range_slopesFF = [0.8, 1.2, 0.8, 1.2] # plotting range for slope FF: high gain l, r, medium gain l, r
plot_range = 3 # range for plotting in units of median absolute deviations
```
%% Cell type:code id: tags:
``` python
import copy
import datetime
import dateutil.parser
import numpy as np
import os
import sys
import warnings
warnings.filterwarnings('ignore')
import h5py
from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData
from cal_tools.tools import get_from_db, get_random_db_interface
from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5,
combine_constants, HMType,
hm_combine, combine_lists)
hm_combine, combine_lists, get_range)
```
%% Cell type:code id: tags:
``` python
# Prepare variables
nMem = max(mem_cells) # Number of mem Cells to store
spShape = (64,64) # Shape of superpixel
if modules[0] == -1:
modules = range(16)
modules = ["Q{}M{}".format(x // 4 + 1, x % 4 + 1) for x in modules]
constantsDark = {"SlopesFF": 'BadPixelsFF',
'SlopesPC': 'BadPixelsPC',
'Noise': 'BadPixelsDark',
'Offset': 'BadPixelsDark'}
print('Bad pixels data: ', constantsDark)
# Define parameters in order to perform loop over time stamps
start = datetime.datetime.now() if start_date.upper() == "NOW" else dateutil.parser.parse(
start_date)
end = datetime.datetime.now() if end_date.upper() == "NOW" else dateutil.parser.parse(
end_date)
# Create output folder
os.makedirs(out_folder, exist_ok=True)
# Get getector conditions
det = getattr(Detectors, db_module)
dconstants = getattr(Constants, dclass)
print('CalDB Interface: {}'.format(cal_db_interface))
print('Start time at: ', start)
print('End time at: ', end)
print('Modules: ', modules)
```
%% Cell type:code id: tags:
``` python
parameter_list = combine_lists(bias_voltages, modules, mem_cells, names = ['bias_voltage', 'module', 'mem_cells'])
print(parameter_list)
```
%% Cell type:code id: tags:
``` python
# Retrieve list of meta-data
constant_versions = []
constant_parameters = []
constantBP_versions = []
# Loop over constants
for c, const in enumerate(constants):
if use_existing != "":
break
# Loop over parameters
for pars in parameter_list:
if (const in ["Offset", "Noise", "SlopesPC"] or "DARK" in const.upper()):
dcond = Conditions.Dark
mcond = getattr(dcond, dclass)(
memory_cells=pars['mem_cells'],
bias_voltage=pars['bias_voltage'])
else:
dcond = Conditions.Illuminated
mcond = getattr(dcond, dclass)(
memory_cells=pars['mem_cells'],
bias_voltage=pars['bias_voltage'],
photon_energy=photon_energy)
print('Request: ', const, 'with paramters:', pars)
# Request Constant versions for given parameters and module
data = get_from_db(getattr(det, pars['module']),
getattr(dconstants,
const)(),
copy.deepcopy(mcond), None,
cal_db_interface,
creation_time=start,
verbosity=0,
timeout=cal_db_timeout,
meta_only=True,
version_info=True)
print(data)
# Request BP constant versions
print('constantDark:', constantsDark[const], )
dataBP = get_from_db(getattr(det, pars['module']),
getattr(dconstants,
constantsDark[const])(),
copy.deepcopy(mcond), None,
cal_db_interface,
creation_time=start,
verbosity=0,
timeout=cal_db_timeout,
meta_only=True,
version_info=True)
print('BP!!!!!', dataBP)
if not isinstance(data, list) or not isinstance(dataBP, list):
continue
found_BPmatch = False
for d in data:
# Match proper BP constant version
# and get constant version within
# requested time range
if d is None:
print('Time or data is not found!')
continue
dt = dateutil.parser.parse(d['begin_at'])
if dt.replace(tzinfo=None) > end or dt.replace(tzinfo=None) < start:
if (dt.replace(tzinfo=None) > end or
(nconstants==0 and dt.replace(tzinfo=None) < start)):
continue
closest_BP = None
closest_BPtime = None
for dBP in dataBP:
if dBP is None:
print("Bad pixels are not found!")
continue
dt = dateutil.parser.parse(d['begin_at'])
dBPt = dateutil.parser.parse(dBP['begin_at'])
if dt == dBPt:
found_BPmatch = True
else:
if np.abs(dBPt-dt).seconds < (max_time*60):
if closest_BP is None:
closest_BP = dBP
closest_BPtime = dBPt
else:
if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):
closest_BP = dBP
closest_BPtime = dBPt
if dataBP.index(dBP) == len(dataBP)-1:
if closest_BP:
dBP = closest_BP
dBPt = closest_BPtime
found_BPmatch = True
else:
print('Bad pixels are not found!')
if found_BPmatch:
print("Found constant {}: begin at {}".format(const, dt))
print("Found bad pixels at {}".format(dBPt))
constantBP_versions.append(dBP)
constant_versions.append(d)
constant_parameters.append(copy.deepcopy(pars))
found_BPmatch = False
break
print('Number of retrieved constants with a bad pixel match is {}'.format(len(constant_versions)))
```
%% Cell type:code id: tags:
``` python
def prepare_to_store(a, nMem):
shape = list(a.shape[:2])+[nMem, 2]
b = np.full(shape, np.nan)
b[:, :, :a.shape[2]] = a[:, :, :, :2]
return b
def get_rebined(a, rebin):
return a.reshape(
int(a.shape[0] / rebin[0]),
rebin[0],
int(a.shape[1] / rebin[1]),
rebin[1],
a.shape[2],
a.shape[3])
def modify_const(const, data):
if const in ['SlopesFF']:
data = data[..., None, None]
if(len(data.shape)==5):
data = data[:,:,:,:,0]
if len(data.shape) < 4:
print(data.shape, "Unexpected shape!")
if data.shape[0] != 256:
data = data.swapaxes(0, 2).swapaxes(1,3).swapaxes(2,3)
return data
ret_constants = {}
constand_data = ConstantMetaData()
constant_BP = ConstantMetaData()
for i, constant_version in enumerate(constant_versions):
const = constant_version['data_set_name'].split('/')[-2]
# sort over begin_at
idxs, _ = zip(*sorted(enumerate(constant_versions),
key=lambda x: x[1]['begin_at'], reverse=True))
for i in idxs:
const = constant_versions[i]['data_set_name'].split('/')[-2]
qm = constant_parameters[i]['module']
constand_data.retrieve_from_version_info(constant_version)
if not const in ret_constants:
ret_constants[const] = {}
if not qm in ret_constants[const]:
ret_constants[const][qm] = []
if nconstants>0 and len(ret_constants[const][qm])>=nconstants:
continue
constand_data.retrieve_from_version_info(constant_versions[i])
constant_BP.retrieve_from_version_info(constantBP_versions[i])
cdata = constand_data.calibration_constant.data
cdataBP = constant_BP.calibration_constant.data
ctime = constand_data.calibration_constant_version.begin_at
print("constant: {}, module {}, begin_at {}".format(const, qm, ctime))
if not const in ret_constants:
ret_constants[const] = {}
if not qm in ret_constants[const]:
ret_constants[const][qm] = []
cdata = modify_const(const, cdata)
cdataBP = modify_const(const, cdataBP)
if cdataBP.shape != cdata.shape:
print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))
continue
# Apply bad pixel mask
cdataABP = np.copy(cdata)
cdataABP[cdataBP > 0] = np.nan
# Create superpixels for constants with BP applied
cdataABP = get_rebined(cdataABP, spShape)
toStoreBP = prepare_to_store(np.nanmean(cdataABP, axis=(1, 3)), nMem)
toStoreBPStd = prepare_to_store(np.nanstd(cdataABP, axis=(1, 3)), nMem)
# Prepare number of bad pixels per superpixels
cdataBP = get_rebined(cdataBP, spShape)
cdataNBP = prepare_to_store(np.nansum(cdataBP > 0, axis=(1, 3)), nMem)
# Create superpixels for constants without BP applied
cdata = get_rebined(cdata, spShape)
toStoreStd = prepare_to_store(np.nanstd(cdata, axis=(1, 3)), nMem)
toStore = prepare_to_store(np.nanmean(cdata, axis=(1, 3)), nMem)
dpar = {p.name: p.value for p in constand_data.detector_condition.parameters}
print("Store values in dict", const, qm, ctime)
ret_constants[const][qm].append({'ctime': ctime,
'nBP': cdataNBP,
'dataBP': toStoreBP,
'dataBPStd': toStoreBPStd,
'data': toStore,
'dataStd': toStoreStd,
'mdata': dpar})
```
%% Cell type:code id: tags:
``` python
if use_existing == "":
print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, modules[0]))
save_dict_to_hdf5(ret_constants,
'{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, modules[0]))
```
%% Cell type:code id: tags:
``` python
if use_existing == "":
fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)
else:
fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)
print('Load data from {}'.format(fpath))
ret_constants = load_data_from_hdf5(fpath)
```
%% Cell type:code id: tags:
``` python
# Combine FF and PC data to calculate Gain
# Estimate Noise in units of electrons
print ('Calculate Gain and Noise in electron units')
ret_constants["Gain"] = {}
ret_constants["Noise-e"] = {}
for mod in list(range(16)):
if ("SlopesFF" not in ret_constants or
"SlopesCI" not in ret_constants):
break
qm = "Q{}M{}".format(mod // 4 + 1, mod % 4 + 1)
print(qm)
if (qm not in ret_constants["SlopesFF"] or
qm not in ret_constants["SlopesCI"]):
continue
ret_constants["Gain"][qm] = {}
dataFF = ret_constants["SlopesFF"][qm]
dataPC = ret_constants["SlopesCI"][qm]
if (len(dataFF) == 0 or len(dataPC) == 0):
continue
ctimesFF = np.array(dataFF["ctime"])
ctimesPC = np.array(dataPC["ctime"])
ctime, icomb = combine_constants(ctimesFF, ctimesPC)
cdataPC_vs_time = np.array(dataPC["data"])[..., 0]
cdataFF_vs_time = np.array(dataFF["data"])[..., 0]
cdataFF_vs_time = np.nanmedian(cdataFF_vs_time, axis=3)[..., None]
cdataFF_vs_time /= np.nanmedian(cdataFF_vs_time, axis=(1, 2, 3))[:, None,
None, None]
cdataPC_vs_time /= np.nanmedian(cdataPC_vs_time, axis=(1, 2, 3))[:, None,
None, None]
gain_vs_time = []
for iFF, iPC in icomb:
gain_vs_time.append(cdataFF_vs_time[iFF] * cdataPC_vs_time[iPC])
print(np.array(gain_vs_time).shape)
ctime_ts = [t.timestamp() for t in ctime]
ret_constants["Gain"][qm]["ctime"] = ctime
ret_constants["Gain"][qm]["data"] = np.array(gain_vs_time)
# Fill missing data for compatibility with plotting code
ret_constants["Gain"][qm]["dataBP"] = np.array(gain_vs_time)
ret_constants["Gain"][qm]["nBP"] = np.array(gain_vs_time)
if "Noise" not in ret_constants:
continue
if qm not in ret_constants["Noise"]:
continue
dataN = ret_constants["Noise"][qm]
if len(dataN) == 0:
continue
ret_constants["Noise-e"][qm] = {}
ctimesG = np.array(ctime)
ctimesN = np.array(dataN["ctime"])
ctime, icomb = combine_constants(ctimesG, ctimesN)
cdataG_vs_time = np.array(gain_vs_time)
cdataN_vs_time = np.array(dataN["data"])[..., 0]
data_vs_time = []
for iG, iN in icomb:
data_vs_time.append(
cdataN_vs_time[iN] * adu_to_photon / cdataG_vs_time[iG])
print(np.array(gain_vs_time).shape)
ctime_ts = [t.timestamp() for t in ctime]
ret_constants["Noise-e"][qm]["ctime"] = ctime
ret_constants["Noise-e"][qm]["data"] = np.array(data_vs_time)
# Fill missing data for compatibility with plotting code
ret_constants["Noise-e"][qm]["dataBP"] = np.array(data_vs_time)
ret_constants["Noise-e"][qm]["nBP"] = np.array(data_vs_time)
save_dict_to_hdf5({k:v for k,v in ret_constants.items() if k in ['Gain', 'Noise-e']},
'{}/CalDBAna_{}_Gain.h5'.format(out_folder, dclass))
```
%% Cell type:code id: tags:
``` python
# Parameters for plotting
# Define range for plotting
rangevals = {
"Offset": [range_offset[0:2], range_offset[2:4]],
"Noise": [range_noise[0:2], range_noise[2:4]],
"Gain": [range_gain[0:2], range_gain[2:4]],
"Noise-e": [range_noise_e[0:2], range_noise_e[2:4]],
"SlopesCI": [range_slopesCI[0:2], range_slopesCI[2:4]],
"SlopesFF": [range_slopesFF[0:2], range_slopesFF[2:4]]
}
keys = {
'Mean': ['data', '', 'Mean over pixels'],
'std': ['dataStd', '', '$\sigma$ over pixels'],
'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],
'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],
'stdBP': ['dataBPStd', 'Good pixels only', '$\sigma$ over pixels'],
'stdASIC': ['', '', '$\sigma$ over ASICs'],
'stdCell': ['', '', '$\sigma$ over Cells'],
}
gain_name = ['High', 'Medium', 'Low']
```
%% Cell type:code id: tags:
``` python
print('Plot calibration constants')
# loop over constat type
for const, mods in ret_constants.items():
# Loop over gain
for gain in range(2):
print('Const: {}, gain {}'.format(const, gain))
if const in ["Gain", "Noise-e"] and gain == 1:
continue
else:
pass
# loop over modules
mod_data = {}
mod_data['stdASIC'] = []
mod_data['stdCell'] = []
mod_names = []
mod_times = []
# Loop over modules
for mod, data in mods.items():
if mod not in modules:
continue
print(mod)
ctimes = np.array(data["ctime"])
ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]
if ("mdata" in data):
cmdata = np.array(data["mdata"])
for i, tick in enumerate(ctimes_ticks):
ctimes_ticks[i] = ctimes_ticks[i] + \
', V={:1.0f}'.format(cmdata[i]['Sensor Bias Voltage']) + \
', M={:1.0f}'.format(
cmdata[i]['Memory cells'])
sort_ind = np.argsort(ctimes_ticks)
ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])
# Create sorted by data dataset
rdata = {}
for key, item in keys.items():
if item[0] in data:
rdata[key] = np.array(data[item[0]])[sort_ind]
nTimes = rdata['Mean'].shape[0]
nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]
nBins = nMemToShow * nPixels
# Select gain
if const not in ["Gain", "Noise-e"]:
for key in rdata:
rdata[key] = rdata[key][..., gain]
# Avoid to low values
if const in ["Noise", "Offset", "Noise-e"]:
rdata['Mean'][rdata['Mean'] < 0.1] = np.nan
if 'MeanBP' in rdata:
rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan
if 'NBP' in rdata:
rdata["NBP"][rdata["NBP"] == 4096] = np.nan
rdata["NBP"] = rdata["NBP"] / (64 * 64) * 100
rdata['NBP'] = rdata['NBP'].astype(float)
rdata["NBP"][rdata["NBP"] == (spShape[0] * spShape[1])] = np.nan
rdata["NBP"] = rdata["NBP"] / (spShape[0] * spShape[1]) * 100
# Reshape: ASICs over cells for plotting
pdata = {}
for key in rdata:
pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(
nTimes, nBins).swapaxes(0, 1)
# Summary over ASICs
adata = {}
for key in rdata:
adata[key] = np.nanmean(rdata[key], axis=(1, 2)).swapaxes(0, 1)
# Summary information over modules
for key in pdata:
if key not in mod_data:
mod_data[key] = []
mod_data[key].append(np.nanmean(pdata[key], axis=0))
mod_data['stdASIC'].append(np.nanstd(
np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=(1, 2)), axis=1))
mod_data['stdCell'].append(np.nanstd(
np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=3), axis=(1, 2)))
mod_names.append(mod)
mod_times.append(ctimes_ticks)
# Plotting
for key in pdata:
vmin = None
vmax = None
vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)
if const in rangevals and key in ['Mean', 'MeanBP']:
vmin = rangevals[const][gain][0]
vmax = rangevals[const][gain][1]
if key == 'NBP':
unit = '[%]'
else:
unit = '[ADU]'
if const == 'Noise-e':
unit = '[$e^-$]'
title = '{}, module {}, {} gain, {}'.format(
const, mod, gain_name[gain], keys[key][1])
cb_label = '{}, {} {}'.format(const, keys[key][2], unit)
hm_combine(pdata[key][::-1], htype=HMType.INSET_AXIS,
x_label='Creation Time', y_label='ASIC ID',
x_ticklabels=ctimes_ticks,
x_ticks=np.arange(len(ctimes_ticks))+0.3,
title=title, cb_label=cb_label,
vmin=vmin, vmax=vmax,
fname='{}/{}_{}_g{}_ASIC_{}.png'.format(
out_folder, const, mod, gain, key),
y_ticks=np.arange(nBins, step=nMemToShow)+16,
y_ticklabels=np.arange(nPixels)[::-1]+1,
pad=[0.125, 0.125, 0.12, 0.185])
hm_combine(adata[key],
x_label='Creation Time', y_label='Memory cell ID',
x_ticklabels=ctimes_ticks,
x_ticks=np.arange(len(ctimes_ticks))+0.3,
title=title, cb_label=cb_label,
fname='{}/{}_{}_g{}_MEM_{}.png'.format(
out_folder, const, mod, gain, key),
vmin=vmin, vmax=vmax)
```
......
%% Cell type:markdown id: tags:
# Statistical analysis of calibration factors#
Author: Mikhail Karnevskiy, Steffen Hauf, Version 0.1
Calibration constants for ePix100 detector from the data base with injection time between start_date and end_date are considered.
To be visualized, calibration constants are averaged per ASICs. Plots shows calibration constant over time for each constant.
Values shown in plots are saved in h5 files.
%% Cell type:code id: tags:
``` python
cluster_profile = "noDB" # The ipcluster profile to use
start_date = "2019-01-30" # date to start investigation interval from
end_date = "2019-05-01" # date to end investigation interval at, can be "now"
nconstants = 10 # Number of time stamps to plot. If not 0, overcome start_date.
dclass="ePix100" # Detector class
db_module = "ePix100_M15" # detector entry in the DB to investigate
constants = ["Noise", "Offset"] # constants to plot
bias_voltage = [200] # Bias voltage
temperature = [288] # Operation temperature
integration_time = [1, 50] # Integration time
in_vacuum = [0] # 0 if detector is operated in room pressure
parameter_names = ['bias_voltage', 'integration_time', 'temperature', 'in_vacuum'] # names of parameters
photon_energy = 9.2 # Photon energy of the beam
out_folder = "/gpfs/exfel/data/scratch/karnem/test_ePix/" # output folder
use_existing = "" # If not empty, constants stored in given folder will be used
cal_db_interface = "tcp://max-exfl016:8016" # the database interface to use
cal_db_interface = "tcp://max-exfl016:8015#8025" # the database interface to use
cal_db_timeout = 180000 # timeout on caldb requests",
range_offset = [1000., 2200] # plotting range for offset: high gain l, r, medium gain l, r
range_noise = [1.5, 3.3] # plotting range for noise: high gain l, r, medium gain l, r
range_offset = [1000., 2200] # plotting range for offset
range_noise = [1.5, 3.3] # plotting range for noise
plot_range = 3 # range for plotting in units of median absolute deviations
```
%% Cell type:code id: tags:
``` python
import copy
import datetime
import dateutil.parser
import numpy as np
import os
import sys
import warnings
warnings.filterwarnings('ignore')
import h5py
from iCalibrationDB import Constants, Conditions, Detectors, ConstantMetaData
from cal_tools.tools import get_from_db, get_random_db_interface
from cal_tools.ana_tools import (save_dict_to_hdf5, load_data_from_hdf5,
combine_constants, HMType,
hm_combine, combine_lists)
HMType, hm_combine,
combine_lists, get_range)
```
%% Cell type:code id: tags:
``` python
# Prepare variables
spShape = (354, 96) # Shape of superpixel
parameters = [globals()[x] for x in parameter_names]
constantsDark = {'Noise_': 'BadPixelsDark',
'Offset_': 'BadPixelsDark'}
print('Bad pixels data: ', constantsDark)
# Define parameters in order to perform loop over time stamps
start = datetime.datetime.now() if start_date.upper() == "NOW" else dateutil.parser.parse(
start_date)
end = datetime.datetime.now() if end_date.upper() == "NOW" else dateutil.parser.parse(
end_date)
# Create output folder
os.makedirs(out_folder, exist_ok=True)
# Get getector conditions
det = getattr(Detectors, db_module)
dconstants = getattr(Constants, dclass)
print('CalDB Interface: {}'.format(cal_db_interface))
print('Start time at: ', start)
print('End time at: ', end)
```
%% Cell type:code id: tags:
``` python
parameter_list = combine_lists(*parameters, names = parameter_names)
print(parameter_list)
```
%% Cell type:code id: tags:
``` python
# Retrieve list of meta-data
constant_versions = []
constant_parameters = []
constantBP_versions = []
# Loop over constants
for c, const in enumerate(constants):
if use_existing != "":
break
# Loop over parameters
for pars in parameter_list:
if (const in ["Offset", "Noise", "SlopesPC"] or "DARK" in const.upper()):
dcond = Conditions.Dark
mcond = getattr(dcond, dclass)(**pars)
else:
dcond = Conditions.Illuminated
mcond = getattr(dcond, dclass)(**pars,
photon_energy=photon_energy)
print('Request: ', const, 'with paramters:', pars)
# Request Constant versions for given parameters and module
data = get_from_db(det,
getattr(dconstants,
const)(),
copy.deepcopy(mcond), None,
cal_db_interface,
creation_time=start,
verbosity=0,
timeout=cal_db_timeout,
meta_only=True,
version_info=True)
if not isinstance(data, list):
continue
if const in constantsDark:
# Request BP constant versions
print('constantDark:', constantsDark[const], )
dataBP = get_from_db(det,
getattr(dconstants,
constantsDark[const])(),
copy.deepcopy(mcond), None,
cal_db_interface,
creation_time=start,
verbosity=0,
timeout=cal_db_timeout,
meta_only=True,
version_info=True)
print(dataBP)
if not isinstance(data, list) or not isinstance(dataBP, list):
continue
found_BPmatch = False
for d in data:
# Match proper BP constant version
# and get constant version within
# requested time range
if d is None:
print('Time or data is not found!')
continue
dt = dateutil.parser.parse(d['begin_at'])
if dt.replace(tzinfo=None) > end or dt.replace(tzinfo=None) < start:
if (dt.replace(tzinfo=None) > end or
(nconstants==0 and dt.replace(tzinfo=None) < start)):
continue
closest_BP = None
closest_BPtime = None
for dBP in dataBP:
if dBP is None:
print("Bad pixels are not found!")
continue
dt = dateutil.parser.parse(d['begin_at'])
dBPt = dateutil.parser.parse(dBP['begin_at'])
if dt == dBPt:
found_BPmatch = True
else:
if np.abs(dBPt-dt).seconds < (max_time*60):
if closest_BP is None:
closest_BP = dBP
closest_BPtime = dBPt
else:
if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):
closest_BP = dBP
closest_BPtime = dBPt
if dataBP.index(dBP) == len(dataBP)-1:
if closest_BP:
dBP = closest_BP
dBPt = closest_BPtime
found_BPmatch = True
else:
print('Bad pixels are not found!')
if found_BPmatch:
print("Found constant {}: begin at {}".format(const, dt))
print("Found bad pixels at {}".format(dBPt))
constantBP_versions.append(dBP)
constant_versions.append(d)
constant_parameters.append(copy.deepcopy(pars))
found_BPmatch = False
break
else:
constant_versions += data
constant_parameters += [copy.deepcopy(pars)]*len(data)
# Remove dublications
constant_versions_tmp = []
constant_parameters_tmp = []
constantBP_versions_tmp = []
for i, x in enumerate(constant_versions):
if x not in constant_versions_tmp:
constant_versions_tmp.append(x)
constant_parameters_tmp.append(constant_parameters[i])
if i<len(constantBP_versions)-1:
constantBP_versions_tmp.append(constantBP_versions[i])
constant_versions=constant_versions_tmp
constantBP_versions=constantBP_versions_tmp
constant_parameters=constant_parameters_tmp
print('Number of stored constant versions is {}'.format(len(constant_versions)))
```
%% Cell type:code id: tags:
``` python
def get_rebined(a, rebin):
return a[:,:,0].reshape(
int(a.shape[0] / rebin[0]),
rebin[0],
int(a.shape[1] / rebin[1]),
rebin[1])
def modify_const(const, data, isBP = False):
return data
ret_constants = {}
constand_data = ConstantMetaData()
constant_BP = ConstantMetaData()
for i, constant_version in enumerate(constant_versions):
const = constant_version['data_set_name'].split('/')[-2]
qm = db_module
print("constant: {}, module {}".format(const,qm))
# sort over begin_at
idxs, _ = zip(*sorted(enumerate(constant_versions),
key=lambda x: x[1]['begin_at'], reverse=True))
constand_data.retrieve_from_version_info(constant_version)
for i in idxs:
const = constant_versions[i]['data_set_name'].split('/')[-2]
qm = db_module
if not const in ret_constants:
ret_constants[const] = {}
if not qm in ret_constants[const]:
ret_constants[const][qm] = []
if nconstants>0 and len(ret_constants[const][qm])>=nconstants:
continue
print("constant: {}, module {}".format(const,qm))
constand_data.retrieve_from_version_info(constant_versions[i])
cdata = constand_data.calibration_constant.data
ctime = constand_data.calibration_constant_version.begin_at
cdata = modify_const(const, cdata)
# Create superpixels for constants without BP applied
cdata = get_rebined(cdata, spShape)
toStoreStd = np.nanstd(cdata, axis=(1, 3))
toStore = np.nanmean(cdata, axis=(1, 3))
# Convert parameters to dict
dpar = {p.name: p.value for p in constand_data.detector_condition.parameters}
print("Store values in dict", const, qm, ctime)
ret_constants[const][qm].append({'ctime': ctime,
'nBP': 0,
'dataBP': 0,
'dataBPStd': 0,
'data': toStore,
'dataStd': toStoreStd,
'mdata': dpar})
```
%% Cell type:code id: tags:
``` python
if use_existing == "":
print('Save data to /CalDBAna_{}_{}.h5'.format(dclass, db_module))
save_dict_to_hdf5(ret_constants,
'{}/CalDBAna_{}_{}.h5'.format(out_folder, dclass, db_module))
```
%% Cell type:code id: tags:
``` python
if use_existing == "":
fpath = '{}/CalDBAna_{}_*.h5'.format(out_folder, dclass)
else:
fpath = '{}/CalDBAna_{}_*.h5'.format(use_existing, dclass)
print('Load data from {}'.format(fpath))
ret_constants = load_data_from_hdf5(fpath)
```
%% Cell type:code id: tags:
``` python
# Parameters for plotting
# Define range for plotting
rangevals = {
"OffsetEPix100": [range_offset[0:2], range_offset[2:4]],
"NoiseEPix100": [range_noise[0:2], range_noise[2:4]],
}
keys = {
'Mean': ['data', '', 'Mean over pixels'],
'std': ['dataStd', '', '$\sigma$ over pixels'],
'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],
'NBP': ['nBP', 'Fraction of BP', 'Fraction of BP'],
'stdBP': ['dataBPStd', 'Good pixels only', '$\sigma$ over pixels'],
'stdASIC': ['', '', '$\sigma$ over ASICs'],
'stdCell': ['', '', '$\sigma$ over Cells'],
}
gain_name = ['High', 'Medium', 'Low']
```
%% Cell type:code id: tags:
``` python
print('Plot calibration constants')
# loop over constat type
for const, modules in ret_constants.items():
print('Const: {}'.format(const))
# loop over modules
mod_data = {}
mod_data['stdASIC'] = []
mod_data['stdCell'] = []
mod_names = []
mod_times = []
# Loop over modules
for mod, data in modules.items():
print(mod)
ctimes = np.array(data["ctime"])
ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]
if ("mdata" in data):
cmdata = np.array(data["mdata"])
for i, tick in enumerate(ctimes_ticks):
ctimes_ticks[i] = ctimes_ticks[i] + \
', V={:1.0f}'.format(cmdata[i]['Sensor Temperature']) + \
', T={:1.0f}'.format(
cmdata[i]['Integration Time'])
sort_ind = np.argsort(ctimes_ticks)
ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])
# Create sorted by data dataset
rdata = {}
for key, item in keys.items():
if item[0] in data:
rdata[key] = np.array(data[item[0]])[sort_ind]
nTimes = rdata['Mean'].shape[0]
nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]
nBins = nPixels
# Avoid to low values
if const in ["Noise", "Offset", "Noise-e"]:
rdata['Mean'][rdata['Mean'] < 0.1] = np.nan
if 'MeanBP' in rdata:
rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan
# Reshape: ASICs over cells for plotting
pdata = {}
for key in rdata:
if key not in ['Mean', 'std']:
continue
pdata[key] = rdata[key][:, :, :].reshape(nTimes, nBins).swapaxes(0, 1)
# Summary over ASICs
adata = {}
for key in rdata:
if key not in ['Mean', 'std']:
continue
adata[key] = np.nanmean(rdata[key], axis=(1, 2))
# Summary information over modules
for key in pdata:
if key not in mod_data:
mod_data[key] = []
mod_data[key].append(np.nanmean(pdata[key], axis=0))
mod_data['stdASIC'].append(np.nanstd(rdata['Mean'], axis=(1, 2)))
mod_names.append(mod)
mod_times.append(ctimes_ticks)
# Plotting
for key in pdata:
if key not in ['Mean', 'std']:
continue
vmin = None
vmax = None
vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)
if const in rangevals and key in ['Mean', 'MeanBP']:
vmin = rangevals[const][0][0]
vmax = rangevals[const][0][1]
if key == 'NBP':
unit = '[%]'
else:
unit = '[ADU]'
if const == 'Noise-e':
unit = '[$e^-$]'
title = '{}, module {}, {}'.format(
const, mod, keys[key][1])
cb_label = '{}, {} {}'.format(const, keys[key][2], unit)
hm_combine(pdata[key][::-1], htype=HMType.mro,
x_label='Creation Time', y_label='ASIC ID',
x_ticklabels=ctimes_ticks,
x_ticks=np.arange(len(ctimes_ticks))+0.3,
title=title, cb_label=cb_label,
vmin=vmin, vmax=vmax,
fname='{}/{}_{}_g{}_ASIC_{}.png'.format(
out_folder, const, mod.replace('_', ''), 0, key),
pad=[0.125, 0.125, 0.12, 0.185])
```
......
......@@ -129,6 +129,12 @@ notebooks = {
"use function": "balance_sequences",
"cluster cores": 4},
},
"STATS_FROM_DB": {
"notebook": "notebooks/FastCCD/PlotFromCalDB_FastCCD_NBC.ipynb",
"concurrency": {"parameter": None,
"default concurrency": None,
"cluster cores": 1},
},
},
"JUNGFRAU": {
"DARK": {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment