Skip to content
Snippets Groups Projects
Commit bed8bf80 authored by Thomas Kluyver's avatar Thomas Kluyver
Browse files

Remove cluster_profile parameter from notebooks which don't use it

parent 119a1ff5
No related branches found
No related tags found
1 merge request!570Remove cluster_profile parameter from notebooks which don't use it
%% Cell type:markdown id: tags:
# Statistical analysis of calibration factors#
Author: Mikhail Karnevskiy, Version 0.2
Plot calibration constants retrieved from the cal. DB.
To be visualized, calibration constants are averaged per group of pixels. Plots shows calibration constant over time for each constant.
Values shown in plots are saved in h5 files.
%% Cell type:code id: tags:
``` python
cluster_profile = "noDB" # The ipcluster profile to use
start_date = "2019-01-01" # date to start investigation interval from
end_date = "NOW" # date to end investigation interval at, can be "now"
dclass="jungfrau" # Detector class
modules = ["Jungfrau_M035"] # detector entry in the DB to investigate
submodules = [2] # module index of a modular detector (1 for Q1M1 of AGIPD), range allowed
constants = ['RelativeGain'] # constants to plot
nconstants = 20 # Number of time stamps to plot. If not 0, overcome start_date.
max_time = 15 # max time margin in minutes to match bad pixels
nMemToShow = 16 # Number of memory cells to be shown in plots
gain_setting = [0,1,2] # gain stages
bias_voltage = [90, 180] # Bias voltage
temperature = [291] # Operation temperature
integration_time = [4.96, 10, 50, 250] # Integration time
pixels_x=[1024] # number of pixels along X axis
pixels_y=[512] # number of pixels along Y axis
in_vacuum = [0] # 0 if detector is operated in room pressure
memory_cells = [1, 16] # number of memory cells
acquisition_rate = [1.1] # aquisition rate
parameter_names = ['bias_voltage', 'integration_time', 'pixels_x', 'pixels_y', 'temperature', 'memory_cells'] # names of parameters
separate_plot = ['gain_setting', 'memory_cells', 'integration_time'] # Plot on separate plots
x_labels = ['Sensor Temperature', 'Integration Time'] # parameters to be shown on X axis: Acquisition rate, Memory cells, Sensor Temperature, Integration Time
photon_energy = 9.2 # Photon energy of the beam
out_folder = "/gpfs/exfel/data/scratch/karnem/test_bla4/" # output folder
use_existing = "" # If not empty, constants stored in given folder will be used
cal_db_interface = "tcp://max-exfl016:8016" # the database interface to use
cal_db_timeout = 180000 # timeout on caldb requests",
plot_range = 3 # range for plotting in units of median absolute deviations
spShape = [256, 64] # Shape of superpixel
sp_name = 'ASIC IDs' # name of superpixel
gain_titles = ['High gain', 'Medium gain', 'Low gain'] # Title inset related to gain
```
%% Cell type:code id: tags:
``` python
import copy
import datetime
import os
import sys
import warnings
import dateutil.parser
import numpy as np
warnings.filterwarnings('ignore')
import matplotlib.pyplot as plt
from cal_tools.ana_tools import (
HMType,
IMType,
combine_constants,
combine_lists,
get_range,
hm_combine,
load_data_from_hdf5,
save_dict_to_hdf5,
)
from cal_tools.tools import get_from_db, get_random_db_interface
from iCalibrationDB import Conditions, ConstantMetaData, Constants, Detectors
```
%% Cell type:code id: tags:
``` python
# Prepare variables
submodules = ["Q{}M{}".format(x // 4 + 1, x % 4 + 1) for x in submodules]
# No submodules for small detectors
if dclass not in ['AGIPD', 'LPD']:
submodules = ['']
# 0 is considered as None.
acquisition_rate = [x if x > 0 else None for x in acquisition_rate]
nMem = max(memory_cells) # Number of mem Cells to store
parameters = [globals()[x] for x in parameter_names]
# Empty list from the command line may not work
if separate_plot == ['']:
separate_plot = []
# Mapping between consatnts and their bad pixel maps
constantsDark = {"SlopesFF": 'BadPixelsFF',
'SlopesPC': 'BadPixelsPC',
'SlopesCI': 'BadPixelsCI',
'Noise': 'BadPixelsDark',
'Offset': 'BadPixelsDark'}
print('Bad pixels data: ', constantsDark)
# Define parameters in order to perform loop over time stamps
start = datetime.datetime.now() if start_date.upper() == "NOW" else dateutil.parser.parse(
start_date)
end = datetime.datetime.now() if end_date.upper() == "NOW" else dateutil.parser.parse(
end_date)
# Create output folder
os.makedirs(out_folder, exist_ok=True)
print('CalDB Interface: {}'.format(cal_db_interface))
print('Start time at: ', start)
print('End time at: ', end)
```
%% Cell type:code id: tags:
``` python
parameter_list = combine_lists(*parameters, names=parameter_names)
print(parameter_list)
```
%% Cell type:code id: tags:
``` python
# Retrieve list of meta-data
constant_versions = []
constant_parameters = []
constantBP_versions = []
# Loop over constants
for c, const in enumerate(constants):
for db_module in modules:
det = getattr(Detectors, db_module)
if dclass in ['AGIPD', 'LPD']:
det = getattr(det, submodules[0])
# Get getector conditions
if dclass=='CCD':
dconstants = getattr(Constants, dclass)(det.detector_type)
else:
dconstants = getattr(Constants, dclass)
if use_existing != "":
break
# Loop over parameters
for pars in parameter_list:
if (const in ["Offset", "Noise", "SlopesPC", "SlopesCI", "RelativeGain"] or "DARK" in const.upper()):
dcond = Conditions.Dark
mcond = getattr(dcond, dclass)(**pars)
else:
dcond = Conditions.Illuminated
mcond = getattr(dcond, dclass)(**pars,
photon_energy=photon_energy)
print('Request: ', const, 'with paramters:', pars)
# Request Constant versions for given parameters and module
data = get_from_db(det,
getattr(dconstants,
const)(),
copy.deepcopy(mcond), None,
cal_db_interface,
creation_time=start,
verbosity=0,
timeout=cal_db_timeout,
meta_only=True,
version_info=True)
if not isinstance(data, list):
continue
if const in constantsDark:
# Request BP constant versions
print('constantDark:', constantsDark[const], )
dataBP = get_from_db(det,
getattr(dconstants,
constantsDark[const])(),
copy.deepcopy(mcond), None,
cal_db_interface,
creation_time=start,
verbosity=0,
timeout=cal_db_timeout,
meta_only=True,
version_info=True)
if not isinstance(dataBP, list):
constant_versions += data
constant_parameters += [copy.deepcopy(pars)]*len(data)
constantBP_versions += [None]*len(data)
continue
for d in data:
# Match proper BP constant version
# and get constant version within
# requested time range
if d is None:
print('Time or data is not found!')
continue
dt = dateutil.parser.parse(d['begin_at'])
if (dt.replace(tzinfo=None) > end or
(nconstants==0 and dt.replace(tzinfo=None) < start)):
continue
closest_BP = None
closest_BPtime = None
found_BPmatch = False
for dBP in dataBP:
if dBP is None:
constantBP_versions.append(None)
constant_versions.append(d)
constant_parameters.append(copy.deepcopy(pars))
print("Bad pixels are not found!")
continue
dt = dateutil.parser.parse(d['begin_at'])
dBPt = dateutil.parser.parse(dBP['begin_at'])
if dt == dBPt:
found_BPmatch = True
else:
if np.abs(dBPt-dt).total_seconds() < (max_time*60):
if closest_BP is None:
closest_BP = dBP
closest_BPtime = dBPt
else:
if np.abs(dBPt-dt) < np.abs(closest_BPtime-dt):
closest_BP = dBP
closest_BPtime = dBPt
if dataBP.index(dBP) == len(dataBP)-1:
if closest_BP:
dBP = closest_BP
dBPt = closest_BPtime
found_BPmatch = True
else:
print('Bad pixels are not found!')
if found_BPmatch:
print("Found constant {}: begin at {}".format(const, dt))
print("Found bad pixels at {}".format(dBPt))
constantBP_versions.append(dBP)
constant_versions.append(d)
constant_parameters.append(copy.deepcopy(pars))
found_BPmatch = False
break
if not found_BPmatch:
print('Bad pixels are not matched')
constantBP_versions.append(None)
constant_versions.append(d)
constant_parameters.append(copy.deepcopy(pars))
else:
constant_versions += data
constant_parameters += [copy.deepcopy(pars)]*len(data)
constantBP_versions += [None]*len(data)
# Remove dublications
constant_versions_tmp = []
constant_parameters_tmp = []
constantBP_versions_tmp = []
for i, x in enumerate(constant_versions):
if x not in constant_versions_tmp:
constant_versions_tmp.append(x)
constant_parameters_tmp.append(constant_parameters[i])
if i<len(constantBP_versions):
constantBP_versions_tmp.append(constantBP_versions[i])
constant_versions=constant_versions_tmp
constantBP_versions=constantBP_versions_tmp
constant_parameters=constant_parameters_tmp
print('Number of stored constant versions is {}'.format(len(constant_versions)))
```
%% Cell type:code id: tags:
``` python
def prepare_to_store(a, nMem):
"""
Different constants for AGIPD and LPD may have different array shape.
This function unify array shape.
"""
if dclass in ['AGIPD', 'LPD']:
shape = list(a.shape[:2])+[nMem]
b = np.full(shape, np.nan)
b[:, :, :a.shape[2]] = a[:, :, :]
return b
else:
return a
def get_rebined(a, rebin):
"""
Group of pixels are formed here for better visialization
"""
if dclass in ['AGIPD', 'LPD', 'jungfrau']:
return a.reshape(
int(a.shape[0] / rebin[0]),
rebin[0],
int(a.shape[1] / rebin[1]),
rebin[1],
a.shape[2],
a.shape[3])
else:
return a[:,:,0].reshape(
int(a.shape[0] / rebin[0]),
rebin[0],
int(a.shape[1] / rebin[1]),
rebin[1])
def modify_const(const, data, isBP = False):
"""
Shape of an array for some constants changes over time.
Modification is needed to unify shape of array and
make possible to show constants on the same plot.
"""
if dclass=="jungfrau" and data.shape[1] == 512:
data = data.swapaxes(0, 1)
return data
if dclass=="AGIPD":
const = const.split('_')[0]
if const in ['SlopesFF']:
if (len(data.shape) == 4):
data = data[:, :, :, 0][..., None]
else:
data = data[..., None]
if data.shape[2]<3:
data = data[:,:,0,None]
if not isBP:
if data.shape[0] != 128:
data = data.swapaxes(0, 2).swapaxes(1, 3).swapaxes(2, 3)
# Copy slope medium to be saved later
if const in ['SlopesPC']:
data[:, :, :, 1] = data[:, :, :, 3]
else:
if const in ['SlopesPC']:
if len(data.shape) == 3:
data = data[:, :, :, None].repeat(10, axis=3)
if data.shape[0] != 128:
data = data.swapaxes(0, 1).swapaxes(1, 2)
if len(data.shape) < 4:
print(data.shape, "Unexpected shape!")
return data
if dclass=="LPD":
const = const.split('_')[0]
if const in ['SlopesFF']:
data = data[..., None, None]
if(len(data.shape)==5):
data = data[:,:,:,:,0]
if len(data.shape) < 4:
print(data.shape, "Unexpected shape!")
if data.shape[0] != 256:
data = data.swapaxes(0, 2).swapaxes(1,3).swapaxes(2,3)
return data
ret_constants = {}
constant_data = ConstantMetaData()
constant_BP = ConstantMetaData()
# sort over begin_at
idxs, _ = zip(*sorted(enumerate(constant_versions),
key=lambda x: x[1]['begin_at'], reverse=True))
for i in idxs:
const = constant_versions[i]['data_set_name'].split('/')[-2]
qm = constant_versions[i]['physical_device']['name']
# fix naming for Jungfrau039
if qm == 'Jungfrau1':
qm = 'JungfrauM039'
# use submodule name for big detectors
if dclass in ['AGIPD', 'LPD']:
qm = submodules[0]
# Add insets for parameters
for key in separate_plot:
# Several constant already contains gain stages
if key == 'gain_setting' and dclass in ['AGIPD', 'LPD', 'jungfrau']:
val = 0
else:
val = constant_parameters[i][key]
const = '{}_{}{}'.format(const, key[0], val)
if not const in ret_constants:
ret_constants[const] = {}
if not qm in ret_constants[const]:
ret_constants[const][qm] = []
if nconstants>0 and len(ret_constants[const][qm])>=nconstants:
continue
constant_data.retrieve_from_version_info(constant_versions[i])
cdata = constant_data.calibration_constant.data
ctime = constant_data.calibration_constant_version.begin_at
cdata = modify_const(const, cdata)
print("constant: {}, module {}, begin_at {}".format(const, qm, ctime))
if constantBP_versions[i]:
constant_BP.retrieve_from_version_info(constantBP_versions[i])
cdataBP = constant_BP.calibration_constant.data
cdataBP = modify_const(const, cdataBP, True)
if cdataBP.shape != cdata.shape:
print('Wrong bad pixel shape! {}, expected {}'.format(cdataBP.shape, cdata.shape))
cdataBP = np.full_like(cdata, IMType.NO_BPMAP.value)
# Apply bad pixel mask
cdataABP = np.copy(cdata)
cdataABP[cdataBP > 0] = np.nan
# Create superpixels for constants with BP applied
cdataABP = get_rebined(cdataABP, spShape)
toStoreBP = np.nanmean(cdataABP, axis=(1, 3))
toStoreBPStd = np.nanstd(cdataABP, axis=(1, 3))
# Prepare number of bad pixels per superpixels
cdataBP = get_rebined(cdataBP, spShape)
cdataNBP = np.nansum(cdataBP > 0, axis=(1, 3))
# Create superpixels for constants without BP applied
cdata = get_rebined(cdata, spShape)
toStoreStd = np.nanstd(cdata, axis=(1, 3))
toStore = np.nanmean(cdata, axis=(1, 3))
if not constantBP_versions[i]:
toStoreBP = np.full_like(toStore, IMType.NO_BPMAP.value)
toStoreBPStd = np.full_like(toStore, IMType.NO_BPMAP.value)
cdataNBP = np.full_like(toStore, IMType.NO_BPMAP.value)
# Convert parameters to dict
dpar = {p.name: p.value for p in constant_data.detector_condition.parameters}
# Several constants have dimensions running over gain.
# All gain stages are stored as separate arrays.
if len(toStore.shape)==4:
for i in range(3):
if i>0 and 'gain_setting' in separate_plot:
const = const.replace('_g{}'.format(i-1), '_g{}'.format(i))
# FF has only high gain
if 'SlopesFF' in const and i>0:
continue
# PC only high and medium.
if 'SlopesPC' in const and i>1:
continue
if not const in ret_constants:
ret_constants[const] = {}
if not qm in ret_constants[const]:
ret_constants[const][qm] = []
print("Store values in dict", const, qm, ctime)
ret_constants[const][qm].append({'ctime': ctime,
'nBP': prepare_to_store(cdataNBP[:,:,:,i], nMem),
'dataBP': prepare_to_store(toStoreBP[:,:,:,i], nMem),
'dataBPStd': prepare_to_store(toStoreBPStd[:,:,:,i], nMem),
'data': prepare_to_store(toStore[:,:,:,i], nMem),
'dataStd': prepare_to_store(toStoreStd[:,:,:,i], nMem),
'mdata': dpar})
else:
print("Store values in dict", const, qm, ctime)
ret_constants[const][qm].append({'ctime': ctime,
'nBP': cdataNBP,
'dataBP': toStoreBP,
'dataBPStd': toStoreBPStd,
'data': toStore,
'dataStd': toStoreStd,
'mdata': dpar})
```
%% Cell type:code id: tags:
``` python
if use_existing == "":
print('Save data to {}/CalDBAna_{}_{}_{}.h5'.format(out_folder, dclass, db_module, submodules[0]))
save_dict_to_hdf5(ret_constants,
'{}/CalDBAna_{}_{}_{}.h5'.format(out_folder, dclass, db_module, submodules[0]))
```
%% Cell type:code id: tags:
``` python
if use_existing == "":
fpath = '{}/CalDBAna_{}_{}_{}.h5'.format(out_folder, dclass, db_module, submodules[0])
else:
fpath = '{}/CalDBAna_{}_{}_{}.h5'.format(use_existing, dclass, db_module, submodules[0])
print('Load data from {}'.format(fpath))
ret_constants = load_data_from_hdf5(fpath)
```
%% Cell type:code id: tags:
``` python
# For AGIPD and LPD
# Combine FF and PC data to calculate Gain
# Estimate Noise in units of electrons
ret_constants["Gain_g0"] = {}
ret_constants["Noise-e_g0"] = {}
for mod in list(range(16)):
# The check is perform inside the for loop
# in order to use break
# This make code more readable
if ("SlopesFF_g0" not in ret_constants or
"SlopesPC_g0" not in ret_constants):
break
qm = "Q{}M{}".format(mod // 4 + 1, mod % 4 + 1)
if (qm not in ret_constants["SlopesFF_g0"] or
qm not in ret_constants["SlopesPC_g0"]):
continue
print(qm)
ret_constants["Gain_g0"][qm] = {}
dataFF = ret_constants["SlopesFF_g0"][qm]
dataPC = ret_constants["SlopesPC_g0"][qm]
if (len(dataFF) == 0 or len(dataPC) == 0):
continue
ctimesFF = np.array(dataFF["ctime"])
ctimesPC = np.array(dataPC["ctime"])
ctime, icomb = combine_constants(ctimesFF, ctimesPC)
cdataPC_vs_time = np.array(dataPC["data"])[...]
cdataFF_vs_time = np.array(dataFF["data"])[...]
cdataFF_vs_time = np.nanmedian(cdataFF_vs_time, axis=3)[..., None]
cdataFF_vs_time /= np.nanmedian(cdataFF_vs_time, axis=(1, 2, 3))[:, None,
None, None]
cdataPC_vs_time /= np.nanmedian(cdataPC_vs_time, axis=(1, 2, 3))[:, None,
None, None]
gain_vs_time = []
for iFF, iPC in icomb:
gain_vs_time.append(cdataFF_vs_time[iFF] * cdataPC_vs_time[iPC])
print('Shape of gain array: ', np.array(gain_vs_time).shape)
ctime_ts = [t.timestamp() for t in ctime]
ret_constants["Gain_g0"][qm]["ctime"] = ctime
ret_constants["Gain_g0"][qm]["data"] = np.array(gain_vs_time)
if "Noise_g0" not in ret_constants:
continue
if qm not in ret_constants["Noise_g0"]:
continue
dataN = ret_constants["Noise_g0"][qm]
if len(dataN) == 0:
continue
ret_constants["Noise-e_g0"][qm] = {}
ctimesG = np.array(ctime)
ctimesN = np.array(dataN["ctime"])
ctime, icomb = combine_constants(ctimesG, ctimesN)
cdataG_vs_time = np.array(gain_vs_time)
cdataN_vs_time = np.array(dataN["data"])[...]
data_vs_time = []
for iG, iN in icomb:
data_vs_time.append(
cdataN_vs_time[iN] * adu_to_photon / cdataG_vs_time[iG])
print('Shape of gain array: ',np.array(gain_vs_time).shape)
ctime_ts = [t.timestamp() for t in ctime]
ret_constants["Noise-e_g0"][qm]["ctime"] = ctime
ret_constants["Noise-e_g0"][qm]["data"] = np.array(data_vs_time)
save_dict_to_hdf5({k:v for k,v in ret_constants.items() if k in ['Gain_g0', 'Noise-e_g0']},
'{}/CalDBAna_{}_Gain.h5'.format(out_folder, dclass))
```
%% Cell type:code id: tags:
``` python
# Parameters for plotting
keys = {
'Mean': ['data', '', 'Mean over pixels'],
'std': ['dataStd', '', '$\sigma$ over pixels'],
'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],
'NBP': ['nBP', '', 'Fraction of BP'],
'stdBP': ['dataBPStd', 'Good pixels only', '$\sigma$ over pixels'],
}
```
%% Cell type:code id: tags:
``` python
print('Plot calibration constants')
# loop over constat type
for const, modules in ret_constants.items():
# split key to constant name and list of insets
const = const.split("_")
gain = [int(x[1]) for x in const if 'g' in x]
gain = gain[0] if len(gain)>0 else None
print('Const: {}'.format(const))
# summary over modules
mod_data = {}
mod_names = []
mod_times = []
# Loop over modules
for mod, data in modules.items():
print('Module: {}'.format(mod))
ctimes = np.array(data["ctime"])
ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]
if ("mdata" in data):
cmdata = np.array(data["mdata"])
for i, tick in enumerate(ctimes_ticks):
for entr in x_labels:
key = entr[0].upper()
val = cmdata[i].get(entr, None)
if val is not None:
ctimes_ticks[i] += ', {}={:.1f}'.format(key, val)
sort_ind = np.argsort(ctimes_ticks)
ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])
# Create sorted by data dataset
rdata = {}
for key, item in keys.items():
if item[0] in data:
rdata[key] = np.array(data[item[0]])[sort_ind]
# Limit number of memory cells to show
# with respect to one available in data
if len(rdata['Mean'].shape)==4:
nMem = min(nMemToShow, rdata['Mean'].shape[3])
if len(rdata['Mean'].shape)<4:
nMem = 1
nTimes = rdata['Mean'].shape[0]
nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]
nBins = nMem * nPixels
# Avoid too low values
if const[0] in ["Noise10Hz", "Offset10Hz"]:
rdata['Mean'][rdata['Mean'] < 0.1] = np.nan
if 'MeanBP' in rdata:
rdata['MeanBP'][rdata['MeanBP'] < 0.1] = np.nan
if 'NBP' in rdata:
rdata['NBP'] = rdata['NBP'].astype(float)
rdata['NBP'][rdata['NBP'] == spShape[0]*spShape[1]] = np.nan
rdata["NBP"] = rdata["NBP"] / (spShape[0] * spShape[1]) * 100
# Reshape: ASICs over cells for plotting
pdata = {}
for key in rdata:
if len(rdata[key].shape)<3:
continue
if len(rdata[key].shape)==4:
# take first nMem memory cells
pdata[key] = rdata[key][:, :, :, :nMem].reshape(
nTimes, nBins).swapaxes(0, 1)
else:
# case of no memory cells
pdata[key] = rdata[key].reshape(nTimes, nBins).swapaxes(0, 1)
# Summary over ASICs
adata = {}
for key in rdata:
if len(rdata[key].shape)<3 or nMem==1:
continue
adata[key] = np.nanmean(rdata[key], axis=(1, 2)).swapaxes(0, 1)
# Summary information over modules
for key in pdata:
if key not in mod_data:
mod_data[key] = []
if key == 'NBP':
mod_data[key].append(np.nansum(pdata[key], axis=0))
else:
mod_data[key].append(np.nanmean(pdata[key], axis=0))
mod_names.append(mod)
mod_times.append(ctimes[sort_ind])
# Plotting
for key in pdata:
if len(pdata[key].shape)<2:
continue
vmin,vmax = get_range(pdata[key][::-1].flatten(), plot_range)
if key == 'NBP':
unit = '[%]'
title = 'BadPixelsDark'
else:
unit = '[ADU]'
title = const[0]
title += ', module {}'.format(mod)
if keys[key][1] != '':
title += ', {}'.format(keys[key][1])
if gain is not None:
title += ', {}'.format(gain_titles[gain])
cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)
fname = '{}/{}_{}'.format(out_folder, const[0], mod.replace('_', ''))
for item in const[1:]:
fname = '{}_{}'.format(fname, item)
fname = '{}_ASIC_{}.png'.format(fname, key)
if nMem>1:
htype=HMType.INSET_AXIS
else:
htype=HMType.mro
hm_combine(pdata[key][::-1].astype(float), htype=htype, mem_cells=nMem,
x_label='Creation Time', y_label=sp_name,
y_ticks=np.arange(0, nBins+1, nBins//16),
y_ticklabels=np.arange(0, nPixels+1, nPixels//16),
x_ticklabels=ctimes_ticks,
x_ticks=np.arange(len(ctimes_ticks))+0.3,
title=title, cb_label=cb_label,
vmin=vmin, vmax=vmax,
fname=fname,
pad=[0.125, 0.125, 0.12, 0.185])
if nMem>1:
vmin,vmax = get_range(adata[key][::-1].flatten(), plot_range)
hm_combine(adata[key].astype(float), htype=HMType.mro,
x_label='Creation Time', y_label='Memory cell ID',
x_ticklabels=ctimes_ticks,
x_ticks=np.arange(len(ctimes_ticks))+0.3,
title=title, cb_label=cb_label,
fname=fname.replace('ASIC', 'MEM'),
vmin=vmin, vmax=vmax)
plt.show()
# Summary over modules
for key in mod_data:
if dclass in ['AGIPD', 'LPD']:
continue
if key == 'NBP':
unit = '[%]'
title = 'BadPixelsDark'
else:
unit = '[ADU]'
title = const[0]
title += ', module {}'.format(mod)
if keys[key][1] != '':
title += ', {}'.format(keys[key][1])
if gain is not None:
title += ', {}'.format(gain_titles[gain])
fname = '{}/{}_{}'.format(out_folder, const[0], 'all')
for item in const[1:]:
fname = '{}_{}'.format(fname, item)
fname = '{}_ASIC_{}.png'.format(fname, key)
fig = plt.figure(figsize=(12,12) )
for i in range(len(mod_data[key])):
plt.scatter(mod_times[i], mod_data[key][i], label=mod_names[i])
plt.grid()
plt.xlabel('Creation Time')
plt.ylabel('{}, {} {}'.format(const[0], keys[key][2], unit))
plt.legend(loc='best guess')
plt.title(title)
fig.savefig(fname)
```
......
%% Cell type:markdown id: tags:
# Statistical analysis of calibration factors#
Author: Mikhail Karnevskiy, Version 0.2
Plot calibration constants retrieved from the cal. DB.
To be visualized, calibration constants are averaged per group of pixels. Plots shows calibration constant over time for each constant.
Values shown in plots are saved in h5 files.
%% Cell type:code id: tags:
``` python
cluster_profile = "noDB" # The ipcluster profile to use
out_folder = "/gpfs/exfel/data/scratch/karnem/test_bla4/" # Output folder, required
use_existing = "/home/karnem/myscratch/PlotCalDB/SPB/AGIPD1M1/" # Input folder
dclass = "AGIPD" # Detector class
nMemToShow = 32 # Number of memory cells to be shown in plots over ASICs
range_offset = [4000., 5500, 6500, 8500] # plotting range for offset: high gain l, r, medium gain l, r
range_noise = [2.5, 15, 7.5, 17.0] # plotting range for noise: high gain l, r, medium gain l, r
range_gain = [0.8, 1.2, 0.8, 1.2] # plotting range for gain: high gain l, r, medium gain l, r
range_noise_e = [85., 500., 85., 500.] # plotting range for noise in [e-]: high gain l, r, medium gain l, r
range_slopesPC = [22.0, 27.0, -0.5, 1.5] # plotting range for slope PC: high gain l, r, medium gain l, r
range_slopesCI = [22.0, 27.0, -0.5, 1.5] # plotting range for slope CI: high gain l, r, medium gain l, r
range_slopesFF = [0.8, 1.2, 0.6, 1.2] # plotting range for slope FF: high gain l, r, medium gain l, r
plot_range = 3 # range for plotting in units of median absolute deviations
x_labels = ['Sensor Bias Voltage', 'Memory cells'] # parameters to be shown on X axis
spShape = [64, 64] # Shape of superpixel
gain_titles = ['High gain', 'Medium gain', 'Low gain'] # Title inset related to gain
```
%% Cell type:code id: tags:
``` python
import warnings
warnings.filterwarnings('ignore')
import numpy as np
from cal_tools.ana_tools import (
HMType,
IMType,
get_range,
hm_combine,
load_data_from_hdf5,
multi_union,
)
```
%% Cell type:code id: tags:
``` python
if use_existing == '':
use_existing = out_folder
```
%% Cell type:code id: tags:
``` python
print('Load data from {}/CalDBAna_{}_*.h5'.format(use_existing, dclass))
ret_constants = load_data_from_hdf5(
'{}/CalDBAna_{}_*.h5'.format(use_existing, dclass))
print('Evaluated data from {}/CalDBAna_{}_Gain*.h5'.format(out_folder, dclass))
ret_constants.update(load_data_from_hdf5(
'{}/CalDBAna_{}_Gain*.h5'.format(out_folder, dclass)))
```
%% Cell type:code id: tags:
``` python
# Parameters for plotting
# Define range for plotting
rangevals = {
"Offset": [range_offset[0:2], range_offset[2:4]],
"Noise": [range_noise[0:2], range_noise[2:4]],
"Gain": [range_gain[0:2], range_gain[2:4]],
"Noise-e": [range_noise_e[0:2], range_noise_e[2:4]],
"SlopesPC": [range_slopesPC[0:2], range_slopesPC[2:4]],
"SlopesCI": [range_slopesCI[0:2], range_slopesCI[2:4]],
"SlopesFF": [range_slopesFF[0:2], range_slopesFF[2:4]]
}
keys = {
'Mean': ['data', '', 'Mean over pixels'],
'std': ['dataStd', '', '$\sigma$ over pixels'],
'MeanBP': ['dataBP', 'Good pixels only', 'Mean over pixels'],
'NBP': ['nBP', '', 'Fraction of BP'],
'stdBP': ['dataBPStd', 'Good pixels only', '$\sigma$ over pixels'],
'stdASIC': ['', '', '$\sigma$ over ASICs'],
'stdCell': ['', '', '$\sigma$ over Cells'],
}
```
%% Cell type:code id: tags:
``` python
print('Plot calibration constants')
# loop over constat type
for const, modules in ret_constants.items():
const = const.split("_")
gain = [int(x[1]) for x in const if 'g' in x]
gain = gain[0] if len(gain)>0 else None
print('Const: {}, gain {}'.format(const, gain))
# loop over modules
mod_data = {}
mod_data['stdASIC'] = []
mod_data['stdCell'] = []
mod_names = []
mod_times = []
# Loop over modules
for mod, data in modules.items():
ctimes = np.array(data["ctime"])
ctimes_ticks = [x.strftime('%y-%m-%d') for x in ctimes]
if ("mdata" in data):
cmdata = np.array(data["mdata"])
for i, tick in enumerate(ctimes_ticks):
for entr in x_labels:
key = entr[0].upper()
val = cmdata[i].get(entr, None)
if val is not None:
ctimes_ticks[i] += ', {}={:.1f}'.format(key, val)
sort_ind = np.argsort(ctimes_ticks)
ctimes_ticks = list(np.array(ctimes_ticks)[sort_ind])
# Create sorted by data dataset
rdata = {}
for key, item in keys.items():
if item[0] in data:
rdata[key] = np.array(data[item[0]])[sort_ind]
nTimes = rdata['Mean'].shape[0]
nPixels = rdata['Mean'].shape[1] * rdata['Mean'].shape[2]
nBins = nMemToShow * nPixels
if 'NBP' in rdata:
rdata["NBP"] = rdata["NBP"] / (spShape[0] * spShape[1]) * 100
# Reshape: ASICs over cells for plotting
pdata = {}
for key in rdata:
pdata[key] = rdata[key][:, :, :, :nMemToShow].reshape(
nTimes, nBins).swapaxes(0, 1)
# Summary information over modules
for key in pdata:
if key not in mod_data:
mod_data[key] = []
mod_data[key].append(np.nanmean(pdata[key], axis=0))
# Avoid too low values
if const[0] in ["Noise", "Offset", "Noise-e"] and key in ['Mean', 'MeanBP']:
mod_data[key][-1][mod_data[key][-1] == 0.0] = IMType.STRANGE_VAL.value
if key=='NBP':
if 'Mean' in mod_data:
mod_data['Mean'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value
if 'MeanBP' in mod_data:
mod_data['MeanBP'][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value
mod_data[key][-1][mod_data[key][-1] == 100] = IMType.ALL_BAD.value
mod_data['stdASIC'].append(np.nanstd(
np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=(1, 2)), axis=1))
mod_data['stdCell'].append(np.nanstd(
np.nanmean(rdata['Mean'][:, :, :, :nMemToShow], axis=3), axis=(1, 2)))
mod_names.append(mod)
mod_times.append(ctimes_ticks)
# Insert nans to get array-like list of data
uTime = mod_times[0]
for tlist in mod_times:
uTime = sorted(multi_union(uTime, tlist))
for i, tlist in enumerate(mod_times):
for t, time in enumerate(uTime):
if t == len(tlist) or time != tlist[t]:
tlist.insert(t, time)
for key in mod_data:
mod_data[key][i] = np.insert(
mod_data[key][i], t, IMType.NO_CONST.value )
# Plotting
nModules = len(mod_names)
mod_idx = np.argsort(mod_names)
for key in mod_data:
vmin,vmax = get_range(np.array(mod_data[key])[mod_idx][::-1].flatten(), plot_range)
htype = None
if const[0] in ['SlopesFF', 'SlopesPC', 'SlopesCI']:
htype = HMType.INSET_1D
if key == 'NBP':
unit = '[%]'
title = 'BadPixelsDark'
else:
unit = '[ADU]'
title = const[0]
title += ', All modules'
if keys[key][1] != '':
title += ', {}'.format(keys[key][1])
if gain is not None:
title += ', {}'.format(gain_titles[gain])
cb_label = '{}, {} {}'.format(const[0], keys[key][2], unit)
hm_combine(np.array(mod_data[key])[mod_idx][::-1],
y_ticks=np.arange(nModules)[::-1]+0.8,
y_ticklabels=np.array(mod_names)[mod_idx],
x_label='Creation Time', y_label='Module ID',
x_ticklabels=ctimes_ticks, x_ticks=np.arange(
len(ctimes_ticks))+0.3,
title=title, cb_label=cb_label,
fname='{}/{}_all_g{}_{}.png'.format(
out_folder, const[0], gain, key),
vmin=vmin, vmax=vmax,
pad=[0.125, 0.151, 0.12, 0.17], htype=htype)
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment