Skip to content
Snippets Groups Projects
Commit b3dfdf22 authored by Ivana Klackova's avatar Ivana Klackova
Browse files

Fixing FF summary perfmorance plots when liframe enabled in DAQ.

parent eb54e6ee
No related branches found
No related tags found
1 merge request!890[AGIPD] [FF] Fixing FF summary performance plots
%% Cell type:markdown id: tags:
# Gain Characterization Summary #
%% Cell type:code id: tags:
``` python
in_folder = "" # in this notebook, in_folder is not used as the data source is in the destination folder
out_folder = "" # the folder to output to, required
metadata_folder = "" # Directory containing calibration_metadata.yml when run by xfel-calibrate
hist_file_template = "hists_m{:02d}_sum.h5"
proc_folder = "" # Path to corrected image data used to create histograms and validation plots
raw_folder = "/gpfs/exfel/exp/MID/202030/p900137/raw" # folder of raw data. This is used to save information of source data of generated constants, required
run = 449 # runs of image data used to create histograms
raw_folder = "" # folder of raw data. This is used to save information of source data of generated constants, required
run = 38 # runs of image data used to create histograms
karabo_id = "MID_DET_AGIPD1M-1" # karabo karabo_id
karabo_id = "SPB_DET_AGIPD1M-1" # karabo karabo_id
ctrl_source_template = '{}/MDL/FPGA_COMP' # path to control information
karabo_id_control = "MID_EXP_AGIPD1M1" # karabo-id for control device
karabo_id_control = "SPB_IRU_AGIPD1M1" # karabo-id for control device
use_dir_creation_date = True # use the creation data of the input dir for database queries
cal_db_interface = "tcp://max-exfl-cal001:8015#8045" # the database interface to use
cal_db_timeout = 30000 # in milli seconds
local_output = True # output constants locally
local_output = False # output constants locally
db_output = False # output constants to database
# Fit parameters
peak_range = [-30,30,35,65,80,130,145,200] # where to look for the peaks, [a0, b0, a1, b1, ...] exactly 8 elements
# Bad-pixel thresholds
d0_lim = [10, 70] # hard limits for d0 value (distance between noise and first peak)
gain_lim = [0.80, 1.2] # Threshold on gain in relative number. Contribute to BadPixel bit "Gain_deviation"
cell_range = [1,5] # range of cell to be considered, [0,0] for all
cell_range = [0,352] # range of cell to be considered, [0,0] for all
pixel_range = [0,0,512,128] # range of pixels x1,y1,x2,y2 to consider [0,0,512,128] for all
n_peaks_fit = 4 # Number of gaussian peaks to fit including noise peak
# Detector conditions
mem_cells = -1 # number of memory cells used, negative values for auto-detection.
bias_voltage = 0. # Bias voltage
acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine
gain_setting = -1 # the gain setting, negative values for auto-detection.
photon_energy = 8.05 # photon energy in keV
integration_time = -1 # integration time, negative values for auto-detection.
```
%% Cell type:code id: tags:
``` python
import os
import re
import traceback
import warnings
from multiprocessing import Pool
import h5py
import matplotlib.pyplot as plt
import numpy as np
import tabulate
from cal_tools.agipdlib import AgipdCtrl
from cal_tools.agipdutils_ff import (
BadPixelsFF,
any_in,
fit_n_peaks,
gaussian_sum,
get_starting_parameters,
)
from cal_tools.ana_tools import get_range, save_dict_to_hdf5
from cal_tools.enums import BadPixels
from cal_tools.tools import (
get_dir_creation_date,
get_pdu_from_db,
get_report,
module_index_to_qm,
send_to_db
)
from extra_data import RunDirectory, stack_detector_data
from extra_geom import AGIPD_1MGeometry, AGIPD_500K2GGeometry
from iCalibrationDB import Conditions, Constants
from iminuit import Minuit
from IPython.display import Latex, display
from XFELDetAna.plotting.simpleplot import simplePlot
%matplotlib inline
warnings.filterwarnings('ignore')
```
%% Cell type:code id: tags:
``` python
peak_range = np.reshape(peak_range,(4,2))
```
%% Cell type:code id: tags:
``` python
# Get operation conditions
ctrl_source = ctrl_source_template.format(karabo_id_control)
run_folder = f'{raw_folder}/r{run:04d}/'
raw_dc = RunDirectory(run_folder)
# Read operating conditions from AGIPD00 files
instrument_src_mod = [
s for s in list(raw_dc.all_sources) if "0CH" in s][0]
ctrl_src = [
s for s in list(raw_dc.all_sources) if ctrl_source in s][0]
# Evaluate creation time
creation_time = None
if use_dir_creation_date:
creation_time = get_dir_creation_date(raw_folder, run)
agipd_cond = AgipdCtrl(
run_dc=raw_dc,
image_src=instrument_src_mod,
ctrl_src=ctrl_src,
raise_error=False, # to be able to process very old data without mosetting value
)
if mem_cells < 0:
mem_cells = agipd_cond.get_num_cells()
if mem_cells is None:
raise ValueError(f"No raw images found in {run_folder}")
if acq_rate == 0.:
acq_rate = agipd_cond.get_acq_rate()
if gain_setting < 0:
gain_setting = agipd_cond.get_gain_setting(creation_time)
if bias_voltage == 0.:
bias_voltage = agipd_cond.get_bias_voltage(karabo_id_control)
if integration_time < 0:
integration_time = agipd_cond.get_integration_time()
# Evaluate detector instance for mapping
instrument = karabo_id.split("_")[0]
if instrument == "HED":
nmods = 8
else:
nmods = 16
print(f"Using {creation_time} as creation time")
print(f"Operating conditions are:\n• Bias voltage: {bias_voltage}\n• Memory cells: {mem_cells}\n"
f"• Acquisition rate: {acq_rate}\n• Gain setting: {gain_setting}\n• Integration time: {integration_time}\n"
f"• Photon Energy: {photon_energy}\n")
```
%% Cell type:code id: tags:
``` python
# Load constants for all modules
keys = ['g0mean', 'g1mean', 'gain', 'chi2_ndof', 'mask']
all_keys = set(keys)
for i in range(n_peaks_fit) :
all_keys.add(f'g{i}mean')
all_keys.add(f'g{i}sigma')
fit_data = {}
labels = {'g0mean': 'Noise peak position [ADU]',
'g1mean': 'First photon peak [ADU]',
'gain': f"Gain [ADU/keV], $\gamma$={photon_energy} [keV]",
'chi2_ndof': '$\chi^2$/nDOF',
'mask': 'Fraction of bad pixels over cells' }
modules = []
karabo_da = []
for mod in range(nmods):
qm = module_index_to_qm(mod)
fit_data[mod] = {}
try:
hf = h5py.File(f'{out_folder}/fits_m{mod:02d}.h5', 'r')
shape = hf['data/g0mean'].shape
for key in keys:
fit_data[mod][key] = hf[f'data/{key}'][()]
print(f"{in_folder}/{hist_file_template.format(mod)}")
modules.append(mod)
karabo_da.append(f"AGIPD{mod:02d}")
except Exception as e:
err = f"Error: {e}\nError traceback: {traceback.format_exc()}"
print(f"No fit data available for module {qm}, {err}")
```
%% Cell type:code id: tags:
``` python
# Calculate SlopesFF and BadPixels to be send to DB
bpmask = {}
slopesFF = {}
for mod in modules:
bpmask[mod] = np.zeros(fit_data[mod]['mask'].shape).astype(np.int32)
bpmask[mod][ any_in(fit_data[mod]['mask'], BadPixelsFF.NO_ENTRY.value) ] = BadPixels.FF_NO_ENTRIES.value
bpmask[mod][ any_in(fit_data[mod]['mask'],
BadPixelsFF.GAIN_DEVIATION.value) ] |= BadPixels.FF_GAIN_DEVIATION.value
bpmask[mod][ any_in(fit_data[mod]['mask'],
BadPixelsFF.FIT_FAILED.value | BadPixelsFF.ACCURATE_COVAR.value |
BadPixelsFF.CHI2_THRESHOLD.value | BadPixelsFF.GAIN_THRESHOLD.value |
BadPixelsFF.NOISE_PEAK_THRESHOLD.value | BadPixelsFF.PEAK_WIDTH_THRESHOLD.value) ] |= BadPixels.FF_GAIN_EVAL_ERROR.value
# Set value for bad pixel to average across pixels for a given module
slopesFF[mod] = np.copy(fit_data[mod]['gain'])
slopesFF[mod][fit_data[mod]['mask']>0] = np.nan
gain_mean = np.nanmean(slopesFF[mod], axis=(1,2))
for i in range(slopesFF[mod].shape[0]):
slopesFF[mod][i][ fit_data[mod]['mask'][i] > 0 ] = gain_mean[i]
```
%% Cell type:code id: tags:
``` python
# Read report path and create file location tuple to add with the injection
proposal = list(filter(None, raw_folder.strip('/').split('/')))[-2]
file_loc = f'Proposal: {proposal}, Run: {run}'
report = get_report(metadata_folder)
```
%% Cell type:code id: tags:
``` python
# set the operating condition
condition = Conditions.Illuminated.AGIPD(mem_cells, bias_voltage, 9.2,
pixels_x=512, pixels_y=128, beam_energy=None,
acquisition_rate=acq_rate, gain_setting=gain_setting,
integration_time=integration_time)
# Modify acceptable deviations for integration time condition if and only if
# the integration time is not using the standard value (12).
if integration_time != 12:
for p in condition.parameters:
if p.name == 'Integration Time':
p.lower_deviation = 5
p.upper_deviation = 5
# Add a deviation to enable constant retrieval
# for runs with operating memory cells less than or
# equal to the memory cells of this injected SlopesFF constant.
for p in condition.parameters:
if p.name == 'Memory cells':
p.lower_deviation = mem_cells
# Retrieve a list of all modules corresponding to processed karabo_das
db_modules = get_pdu_from_db(karabo_id, karabo_da, Constants.AGIPD.SlopesFF(),
condition, cal_db_interface,
snapshot_at=creation_time)
```
%% Cell type:code id: tags:
``` python
# Send constants to DB
def send_const(mod, pdu):
try:
# gain
constant = Constants.AGIPD.SlopesFF()
constant.data = np.moveaxis(np.moveaxis(slopesFF[mod], 0, 2), 0, 1)
send_to_db(
pdu, karabo_id, constant, condition, file_loc,
report, cal_db_interface, creation_time,
timeout=cal_db_timeout,
)
# bad pixels
constant_bp = Constants.AGIPD.BadPixelsFF()
constant_bp.data = np.moveaxis(np.moveaxis(bpmask[mod], 0, 2), 0, 1)
send_to_db(
pdu, karabo_id, constant_bp, condition, file_loc,
report, cal_db_interface, creation_time,
timeout=cal_db_timeout,
)
except Exception as e:
err = f"Error: {e}\nError traceback: {traceback.format_exc()}"
when = None
# Check, if we have a shape we expect
if db_output:
if slopesFF[modules[0]].shape == (mem_cells, 512, 128):
with Pool(processes=len(modules)) as pool:
const_out = pool.starmap(send_const, zip(modules, db_modules))
else:
print(f"Constants are not sent to the DB because of the shape mismatsh")
print(f"Expected {(mem_cells, 512, 128)}, observed {slopesFF[modules[0]].shape}")
condition_dict ={}
for entry in condition.to_dict()['parameters']:
key = entry.pop('parameter_name')
del entry['description']
del entry['flg_available']
condition_dict[key] = entry
# Create the same file structure as database constants files, in which
# each constant type has its corresponding condition and data.
if local_output:
for mod, pdu in zip(modules, db_modules):
qm = module_index_to_qm(mod)
file = f"{out_folder}/slopesff_bpmask_module_{qm}.h5"
dic = {
pdu:{
'SlopesFF': {
0:{
'condition': condition_dict,
'data': np.moveaxis(np.moveaxis(slopesFF[mod],0,2),0,1)}
},
'BadPixelsFF':{
0:{
'condition': condition_dict,
'data': np.moveaxis(np.moveaxis(bpmask[mod],0,2),0,1)}
},
}
}
save_dict_to_hdf5(dic, file)
```
%% Cell type:code id: tags:
``` python
#Define AGIPD geometry
#To do: find the better way to do it?
if instrument == "HED":
geom = AGIPD_500K2GGeometry.from_origin()
else:
geom = AGIPD_1MGeometry.from_quad_positions(quad_pos=[
(-525, 625),
(-550, -10),
(520, -160),
(542.5, 475),
])
```
%% Cell type:code id: tags:
``` python
# Create the arrays that will be used for figures.
# A dictionary contains all the data for each of the processing stages (gains, mean, slopesFF...).
# Each array correponds to the data for all processed modules.
# These are updated with their fit/slopes data in the following loops.
if cell_range==[0,0]:
cell_range[1] = shape[0]
const_data = {}
for key in keys:
const_data[key] = np.full((nmods, shape[0],512,128), np.nan)
for i in range(nmods):
if key in fit_data[i]:
const_data[key][i,:,pixel_range[0]:pixel_range[2],
pixel_range[1]:pixel_range[3]] = fit_data[i][key]
const_data['slopesFF'] = np.full((nmods, shape[0],512,128), np.nan)
labels['slopesFF'] = f'slopesFF [ADU/keV], $\gamma$={photon_energy} [keV]'
for i in range(nmods):
if i in slopesFF:
const_data['slopesFF'][i,:,pixel_range[0]:pixel_range[2],
pixel_range[1]:pixel_range[3]] = slopesFF[i]
```
%% Cell type:markdown id: tags:
## Summary across pixels ##
%% Cell type:code id: tags:
``` python
for key in const_data.keys():
fig = plt.figure(figsize=(20,20))
ax = fig.add_subplot(111)
if key=='mask':
data = np.nanmean(const_data[key]>0, axis=1)
vmin, vmax = (0,1)
else:
data = np.nanmean(const_data[key], axis=1)
vmin, vmax = get_range(data, 5)
ax = geom.plot_data_fast(data, ax=ax, cmap="jet", vmin=vmin, vmax=vmax, figsize=(20,20))
_ = ax.set_title(labels[key])
```
%% Cell type:markdown id: tags:
## Summary histograms ##
%% Cell type:code id: tags:
``` python
sel = (const_data['mask'] == 0)
module_mean = np.nanmean(const_data[f"gain"],axis=(1,2,3))
module_mean = module_mean[:,np.newaxis,np.newaxis,np.newaxis]
dsets = {'d01 [ADU]':const_data[f"g1mean"]-const_data[f"g0mean"],
'gain [ADU/keV]':const_data[f"gain"],
'gain relative to module mean':const_data[f"gain"]/module_mean,
}
fig = plt.figure(figsize=(16,5))
for i, (par, data) in enumerate(dsets.items()):
ax = fig.add_subplot(1, 3, i+1)
plt_range= np.nanmin(data), np.nanmax(data)
if 'd01' in par :
ax.axvline(d0_lim[0])
ax.axvline(d0_lim[1])
elif 'rel' in par :
ax.axvline(gain_lim[0])
ax.axvline(gain_lim[1])
num_bins = 100
_ = ax.hist(data.flatten(),
bins= num_bins,range=plt_range,
log=True,color='red',
label='all fits',)
a = ax.hist(data[sel].flatten(),
bins=num_bins, range=plt_range,
log=True,color='g',
label='good fits only',
)
ax.set_xlabel(f"{par}")
ax.legend()
```
%% Cell type:markdown id: tags:
## Summary across cells ##
Good pixels only.
%% Cell type:code id: tags:
``` python
for key in const_data.keys():
data = np.copy(const_data[key])
if key=='mask':
data = data>0
else:
data[const_data['mask']>0] = np.nan
d = []
for i in range(nmods):
d.append({'x': np.arange(data[i].shape[0]),
'y': np.nanmean(data[i], axis=(1,2)),
'drawstyle': 'steps-pre',
'label': f'{i}',
'linewidth': 2,
'linestyle': '--' if i>7 else '-'
})
fig = plt.figure(figsize=(15, 6))
ax = fig.add_subplot(111)
_ = simplePlot(d, xrange=(-12, 510),
x_label='Memory Cell ID',
y_label=labels[key],
use_axis=ax,
legend='top-left-frame-ncol8',)
ylim = ax.get_ylim()
ax.set_ylim(ylim[0], ylim[1] + np.abs(ylim[1]-ylim[0])*0.2)
ax.grid()
```
%% Cell type:markdown id: tags:
## Summary table ##
%% Cell type:code id: tags:
``` python
table = []
for i in modules:
table.append((i,
f"{np.nanmean(slopesFF[i]):0.1f} +- {np.nanstd(slopesFF[i]):0.2f}",
f"{np.nanmean(bpmask[i]>0)*100:0.1f} ({np.nansum(bpmask[i]>0)})"
))
all_SFF = np.array([list(sff) for sff in slopesFF.values()])
all_MSK = np.array([list(msk) for msk in bpmask.values()])
table.append(('overall',
f"{np.nanmean(all_SFF):0.1f} +- {np.nanstd(all_SFF):0.2f}",
f"{np.nanmean(all_MSK>0)*100:0.1f} ({np.nansum(all_MSK>0)})"
))
md = display(Latex(tabulate.tabulate(table, tablefmt='latex',
headers=["Module", "Gain [ADU/keV]", "Bad pixels [%(Count)]"])))
```
%% Cell type:markdown id: tags:
## Performance plots
%% Cell type:code id: tags:
``` python
def get_trains_data(run_folder, source, include, tid=None):
"""
Load single train for all module
:param run_folder: Path to folder with data
:param source: Data source to be loaded
:param include: Inset of file name to be considered
:param tid: Train Id to be loaded. First train is considered if None is given
"""
run_data = RunDirectory(run_folder, include)
if tid:
tid, data = run_data.select('*/DET/*', source).train_from_id(tid)
return tid, stack_detector_data(data, source, modules=nmods)
else:
for tid, data in run_data.select('*/DET/*', source).trains(require_all=True):
return tid, stack_detector_data(data, source, modules=nmods)
return None, None
include = '*S00000*'
tid, orig = get_trains_data(f'{proc_folder}/r{run:04d}/', 'image.data', include)
orig = orig[cell_range[0]:cell_range[1], ...]
```
%% Cell type:code id: tags:
``` python
# Get cell Ids from 1st train
run_data = RunDirectory(run_folder, include)
sel = run_data.select(f'{karabo_id}/DET/0CH0:xtdf', 'image.cellId')
tid0, cell = sel.train_from_index(0)
stacked_cells = stack_detector_data(cell, 'image.cellId')[0, :, 0]
```
%% Cell type:code id: tags:
``` python
# FIXME: mask bad pixels from median
# mask = const_data['BadPixelsFF']
corrections = const_data['slopesFF'] # (16,shape[0],512,128) shape[0]= cell_range[1]-cell_range[0] /
corrections = np.moveaxis(corrections, 1, 0) # (shape[0],16,512,128)
rel_corr = corrections/np.nanmedian(corrections)
# this is needed if LitFrame is enabled in DAQ to avoid shape mismatch
# and correction of the right cells
if np.diff(cell_range)[0] == mem_cells:
rel_corr = rel_corr[stacked_cells[0]:stacked_cells[-1]+1]
corrected = orig / rel_corr
```
%% Cell type:markdown id: tags:
### Mean value not corrected (train 0)
%% Cell type:code id: tags:
``` python
fig = plt.figure(figsize=(20,20))
ax = fig.add_subplot(111)
odata = np.nanmean(orig, axis=0)
vmin, vmax = get_range(odata, 5)
ax = geom.plot_data_fast(odata, ax=ax, cmap="jet", vmin=vmin, vmax=vmax, figsize=(20,20))
_ = ax.set_title("Original data, mean across one train")
```
%% Cell type:markdown id: tags:
### Mean value corrected (train 0)
%% Cell type:code id: tags:
``` python
fig = plt.figure(figsize=(20,20))
ax = fig.add_subplot(111)
cdata = np.nanmean(corrected, axis=0)
ax = geom.plot_data_fast(cdata, ax=ax, cmap="jet", vmin=vmin, vmax=vmax, figsize=(20,20))
_ = ax.set_title("Corrected data, mean across one train")
```
%% Cell type:markdown id: tags:
### Laplace transform of mean image
%% Cell type:code id: tags:
``` python
from scipy.ndimage import laplace
cmax = np.max(cdata)
omax = np.max(odata)
clap = np.zeros_like(cdata)
olap = np.zeros_like(odata)
for i in range(nmods) :
clap[i] = np.abs(laplace(cdata[i].astype(float)/cmax))
olap[i] = np.abs(laplace(odata[i].astype(float)/omax))
fig = plt.figure(figsize=(20,10))
vmin, vmax = get_range(olap, 2)
ax = fig.add_subplot(121)
ax = geom.plot_data_fast(olap, ax=ax, cmap="jet", vmin=vmin, vmax=vmax, )
_ = ax.set_title("Laplace (original data)")
ax = fig.add_subplot(122)
ax = geom.plot_data_fast(clap, ax=ax, cmap="jet", vmin=vmin, vmax=vmax, )
_ = ax.set_title("Laplace (gain corrected data)")
```
%% Cell type:markdown id: tags:
### Histogram of corrected and uncorrected spectrum (train 0)
%% Cell type:code id: tags:
``` python
######################################
# FIT PEAKS
######################################
x_range = [peak_range[0][0], peak_range[-1][-1]]
nb = x_range[1] - x_range[0]+1
sel = ~np.isnan(corrected)
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111)
y,xe, _ = ax.hist(corrected[sel].flatten(), bins=nb, range=x_range, label='corrected', alpha=0.5)
# get the bin centers from the bin edges
xc=xe[:-1]+(xe[1]-xe[0])/2
pars, _ = get_starting_parameters(xc, y, peak_range,4)
minuit = fit_n_peaks(xc, y, pars, x_range,fix_d01=False,sigma_limit=1)
pc = minuit.args
resc=minuit.fitarg
yfc = gaussian_sum(xc,4, *pc)
plt.plot(xc, yfc, label='corrected fit')
y,_, _ = ax.hist(orig[sel].flatten(), bins=nb, range=x_range, label='original',alpha=0.5)
pars, _ = get_starting_parameters(xc, y, peak_range,4)
minuit = fit_n_peaks(xc, y, pars, x_range,fix_d01=False,sigma_limit=1)
po = minuit.args
reso=minuit.fitarg
yfo = gaussian_sum(xc,4, *po)
plt.plot(xc, yfo, label='original fit')
plt.title(f"Signal spectrum, first train")
plt.xlabel('[ADU]')
plt.legend()
plt.show()
```
%% Cell type:markdown id: tags:
### Summary table ##
%% Cell type:code id: tags:
``` python
from scipy.stats import median_absolute_deviation as mad
table = []
headers = ["Parameter",
"Value (original data)",
"Value (gain corrected data)",
"Relative difference"]
for i in range(4):
table.append((f"Sigma{i} (ADU)",
f"{reso[f'g{i}sigma']:0.2f} ",
f"{resc[f'g{i}sigma']:0.2f} ",
f"{(reso[f'g{i}sigma']-resc[f'g{i}sigma'])/reso[f'g{i}sigma']:0.2f} ",
))
ovar = np.std(odata)
cvar = np.std(cdata)
table.append((f"RMS of mean image",
f"{ovar:0.3f} ",
f"{cvar:0.3f} ",
f"{(ovar-cvar)/ovar:0.3f} ",
))
omin, omax = get_range(odata, 5)
cmin, cmax = get_range(cdata, 5)
ovar = np.std(odata[(odata > omin) & (odata<omax)])
cvar = np.std(cdata[(cdata > cmin) & (cdata<cmax)])
table.append((f"RMS of mean image (mu+-5sigma)",
f"{ovar:0.3f} ",
f"{cvar:0.3f} ",
f"{(ovar-cvar)/ovar:0.3f} ",
))
ovar = mad(odata.flatten())
cvar = mad(cdata.flatten())
table.append((f"MAD of mean image",
f"{ovar:0.3f} ",
f"{cvar:0.3f} ",
f"{(ovar-cvar)/ovar:0.3f} ",
))
ovar = np.median(olap)
cvar = np.median(clap)
table.append((f"Median Laplace",
f"{ovar:0.3f} ",
f"{cvar:0.3f} ",
f"{(ovar-cvar)/ovar:0.3f} ",
))
md = display(Latex(tabulate.tabulate(table,
tablefmt='latex',
headers=headers)))
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment