Skip to content
Snippets Groups Projects
Commit e8ffcd11 authored by Steffen Hauf's avatar Steffen Hauf
Browse files

Update Jungfrau_dark_analysis_all_gains_NBC.ipynb

parent 4370a8ea
No related branches found
No related tags found
1 merge request!109Add a manual slow control data to Jungfrau notebooks
%% Cell type:markdown id: tags:
# Jungfrau Dark Image Characterization #
Version: 0.1, Author: M. Ramilli, S. Hauf
Analyzes Jungfrau dark image data to deduce offset, noise and resulting bad pixel maps
%% Cell type:code id: tags:
``` python
in_folder = '/gpfs/exfel/exp/SPB/201921/p002429/raw/' # folder under which runs are located, required
out_folder = '' # path to place reports at, required
sequences = 1 # number of sequence files in that run
path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # template to use for file name, double escape sequence number
path_inset = "DA06" # file inset for image data
path_inset_control = "DA06" # file inset for control data
cluster_profile = 'noDB' # the ipcluster profile name
cal_db_interface = 'tcp://max-exfl016:8016' # calibrate db interface to connect to
integration_time = 1000 # integration time in us, will be overwritten by value in file
bias_voltage = 90 # sensor bias voltage in V, will be overwritten by value in file
badpixel_threshold_sigma = 20. # bad pixels defined by values outside n times this std from median
offset_abs_threshold_low = [1000, 10000, 10000] # absolute bad pixel threshold in terms of offset, lower values
offset_abs_threshold_high = [8000, 15000, 15000] # absolute bad pixel threshold in terms of offset, upper values
chunkSize = 10 # iteration chunk size, needs to match or be less than number of images in a sequence file
imageRange = [0, 500] # image range in which to evaluate
memoryCells = 1 # number of memory cells
h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data' # path in H5 file under which images are located
run_high = 0 # run number for G0 dark run, required
run_med = 0 # run number for G1 dark run, required
run_low = 0 # run number for G2 dark run, required
karabo_id = "FXE_XAD_JF500K" # karabo prefix of Jungfrau devices
receiver_id = "RECEIVER" # inset for receiver devices\
receiver_id = "RECEIVER" # inset for receiver devices
control_id = "CONTROL" # inset for control devices
db_module = "Jungfrau_M233" # ID of module in calibration database
use_dir_creation_date = True # use dir creation date
manual_slow_data = False # set this flag to not use slow control data from file, but manual entries
```
%% Cell type:code id: tags:
``` python
import warnings
warnings.filterwarnings('ignore')
import matplotlib
matplotlib.use('agg')
import numpy as np
import XFELDetAna.xfelpyanatools as xana
import XFELDetAna.xfelpycaltools as xcal
from cal_tools.enums import BadPixels
from cal_tools.tools import get_dir_creation_date
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
from XFELDetAna.util import env
env.iprofile = cluster_profile
from XFELDetAna.detectors.jungfrau import readerPSI as jfreaderPSI
from XFELDetAna.detectors.jungfrau import reader as jfreader
from XFELDetAna.detectors.jungfrau.jf_chunk_reader import JFChunkReader
from XFELDetAna.detectors.jungfrau.util import non_empty_trains
# so constants relevant for the analysis
run_nums = [run_high, run_med, run_low] # run number for G0/HG0, G1, G2
cpuCores = 1
is_parallel = False
sensorSize = [1024, 512]
blockSize = [1024, 512]
xRange = [0, 0+sensorSize[0]]
yRange = [0, 0+sensorSize[1]]
gains = [0, 1, 2]
h5path = h5path.format(karabo_id, receiver_id)
creation_time = None
if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run_high)
print("Using {} as creation time".format(creation_time))
offset_abs_threshold = [offset_abs_threshold_low, offset_abs_threshold_high]
```
%% Cell type:code id: tags:
``` python
noiseCal = xcal.NoiseCalculator(sensorSize, nCells=memoryCells, cores=cpuCores, parallel=is_parallel, gains=gains,
blockSize=blockSize)
```
%% Cell type:code id: tags:
``` python
import h5py
for i_run, run in enumerate(run_nums):
if run is not None:
ped_dir = "{}/r{:04d}".format(in_folder, run)
fp_name = path_template.format(run, path_inset_control)
fp_path = '{}/{}'.format(ped_dir, fp_name)
if not manual_slow_data:
with h5py.File(fp_path.format(0), 'r') as f:
integration_time = int(f['/RUN/{}/DET/{}/exposureTime/value'.format(karabo_id, control_id)][()]*1e6)
bias_voltage = int(np.squeeze(f['/RUN/{}/DET/{}/vHighVoltage/value'.format(karabo_id, control_id)])[0])
print("Integration time is {} us".format(integration_time))
print("Bias voltage is {} V".format(bias_voltage))
fp_name = path_template.format(run, path_inset)
fp_path = '{}/{}'.format(ped_dir, fp_name)
filep_size = 500
myRange_P = range(0, sequences)
path = h5path
print("Reading data from {}".format(fp_path))
print("Run is: {}".format(run))
print("HDF5 path: {}".format(h5path))
reader = JFChunkReader(filename = fp_path, readFun = jfreader.readData, size = filep_size, chunkSize = chunkSize,
path = path, image_range=imageRange, pixels_x = sensorSize[0], pixels_y = sensorSize[1],
x_range = xRange, y_range = yRange, imagesPerChunk=chunkSize, filesRange = myRange_P,
memoryCells=memoryCells, blockSize=blockSize)
for data in reader.readChunks():
images = np.squeeze(data[0])
gainmaps = np.squeeze(data[1])
trainId = np.array(data[2])
idxs = non_empty_trains(trainId)
noiseCal.fill(data=images[..., idxs], gainMap=gainmaps[..., idxs])
else:
print("dark run for G{} is missing".format(i_run))
offset_map = noiseCal.getOffset()
noise_map = noiseCal.get()
```
%% Cell type:markdown id: tags:
## Offset and Noise Maps ##
Below offset and noise maps for the high ($g_0$) gain stage are shown, alongside the distribution of these values. One expects block-like structures mapping to the ASICs of the detector
%% Cell type:code id: tags:
``` python
import matplotlib.pyplot as plt
%matplotlib inline
from XFELDetAna.plotting.histogram import histPlot
from XFELDetAna.plotting.heatmap import heatmapPlot
g_name = ['G0', 'G1', 'G2']
g_range = [(0, 8000), (8000, 16000), (8000, 16000)]
for g_idx in gains:
off = offset_map[:, :, 0, g_idx]
fo_0 = heatmapPlot(np.swapaxes(off, 0, 1),
y_label="Row",
x_label="Column",
lut_label="Pedestal {} [ADCu]".format(g_name[g_idx]),
vmin=g_range[g_idx][0],
vmax=g_range[g_idx][1])
fo_01, axo_01 = plt.subplots()
ho0, eo0 , xo0, stato0 = histPlot(axo_01, off,
bins=800,
range=g_range[g_idx],
facecolor='b',
histotype='stepfilled')
axo_01.tick_params(axis='both',which='major',labelsize=15)
axo_01.set_title('Module pedestal distribution', fontsize=15)
axo_01.set_xlabel('Pedestal {} [ADCu]'.format(g_name[g_idx]),fontsize=15)
axo_01.set_yscale('log')
noise = noise_map[:, :, 0, g_idx]
fn_0 = heatmapPlot(np.swapaxes(noise, 0, 1),
y_label="Row",
x_label="Column",
lut_label="RMS noise {} [ADCu]".format(g_name[g_idx]),
vmin=0,
vmax=50)
fn_01, axn_01 = plt.subplots()
hn0, en0 , xn0, statn0 = histPlot(axn_01, noise,
bins=100,
range=(0, 50),
facecolor='b',
histotype='stepfilled')
axn_01.tick_params(axis='both',which='major',labelsize=15)
axn_01.set_title('Module noise distribution', fontsize=15)
axn_01.set_xlabel('RMS noise {} [ADCu]'.format(g_name[g_idx]),fontsize=15)
axn_01.set_yscale('log')
plt.show()
```
%% Cell type:markdown id: tags:
## Bad Pixel Map ###
The bad pixel map is deduced by comparing offset and noise of each pixel ($v_i$) and each gain ($g$) against the median value for that gain stage:
$$
v_i > \mathrm{median}(v_{k,g}) + n \sigma_{v_{k,g}}
$$
or
$$
v_i < \mathrm{median}(v_{k,g}) - n \sigma_{v_{k,g}}
$$
Values are encoded in a 32 bit mask, where for the dark image deduced bad pixels the following non-zero entries are relevant:
%% Cell type:code id: tags:
``` python
def print_bp_entry(bp):
print("{:<30s} {:032b}".format(bp.name, bp.value))
print_bp_entry(BadPixels.OFFSET_OUT_OF_THRESHOLD)
print_bp_entry(BadPixels.NOISE_OUT_OF_THRESHOLD)
print_bp_entry(BadPixels.OFFSET_NOISE_EVAL_ERROR)
```
%% Cell type:code id: tags:
``` python
bad_pixels_map = np.zeros(noise_map.shape, np.uint32)
def eval_bpidx(d):
mdn = np.nanmedian(d, axis=(0, 1, 2))[None, None, None, :]
std = np.nanstd(d, axis=(0, 1, 2))[None, None, None, :]
idx = (d > badpixel_threshold_sigma*std+mdn) | (d < (-badpixel_threshold_sigma)*std+mdn)
return idx
offset_abs_threshold = np.array(offset_abs_threshold)
bad_pixels_map[eval_bpidx(offset_map)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bad_pixels_map[~np.isfinite(offset_map)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
bad_pixels_map[eval_bpidx(noise_map)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bad_pixels_map[~np.isfinite(noise_map)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
bad_pixels_map[(offset_map < offset_abs_threshold[0][None, None, None, :]) | (offset_map > offset_abs_threshold[1][None, None, None, :])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
for g_idx in gains:
bad_pixels = bad_pixels_map[:, :, 0, g_idx]
fn_0 = heatmapPlot(np.swapaxes(bad_pixels, 0, 1),
y_label="Row",
x_label="Column",
lut_label="Badpixels {} [ADCu]".format(g_name[g_idx]),
vmin=0)
```
%% Cell type:code id: tags:
``` python
## offset
metadata = ConstantMetaData()
offset = Constants.jungfrau.Offset()
offset.data = np.moveaxis(offset_map, 0, 1)
metadata.calibration_constant = offset
# set the operating condition
condition = Conditions.Dark.jungfrau(memory_cells=1, bias_voltage=bias_voltage,
integration_time=integration_time)
device = getattr(Detectors, db_module)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
metadata.retrieve(cal_db_interface)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device,
start=creation_time)
metadata.send(cal_db_interface)
## noise
metadata = ConstantMetaData()
noise = Constants.jungfrau.Noise()
noise.data = np.moveaxis(noise_map, 0, 1)
metadata.calibration_constant = noise
# set the operating condition
condition = Conditions.Dark.jungfrau(memory_cells=1, bias_voltage=bias_voltage,
integration_time=integration_time)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
metadata.retrieve(cal_db_interface)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device,
start=creation_time)
metadata.send(cal_db_interface)
## bad pixels
metadata = ConstantMetaData()
bpix = Constants.jungfrau.BadPixelsDark()
bpix.data = np.moveaxis(bad_pixels_map, 0, 1)
metadata.calibration_constant = bpix
# set the operating condition
condition = Conditions.Dark.jungfrau(memory_cells=1, bias_voltage=bias_voltage,
integration_time=integration_time)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
metadata.retrieve(cal_db_interface)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device,
start=creation_time)
metadata.send(cal_db_interface)
```
%% Cell type:markdown id: tags:
## Verification ##
For verification we correct the input data with the offset deduced from it. We expect a distribution centered around zero.
%% Cell type:code id: tags:
``` python
fd_path = fp_path
d_path = path
filed_size = 500
myRange_D = range(0, 2)
imageRange = [0, 500]
chunkSize = 25
reader = JFChunkReader(filename = fd_path, readFun = jfreader.readData, size = filed_size, chunkSize = chunkSize,
path = d_path, image_range=imageRange, pixels_x = sensorSize[0], pixels_y = sensorSize[1],
x_range = xRange, y_range = yRange, imagesPerChunk=chunkSize, filesRange = myRange_D,
memoryCells=memoryCells, blockSize=blockSize)
offsetCorr = xcal.OffsetCorrection(shape=sensorSize, offsetMap=offset_map, nCells=memoryCells,
cores=cpuCores, parallel=is_parallel, gains=gains, blockSize=blockSize)
pixel_data = []
for data_raw in reader.readChunks():
images = np.array(data_raw[0]).astype(np.float32)
gainmaps = np.array(data_raw[1])
trainID = np.array(data_raw[2])
idxs = non_empty_trains(trainId)
data_out = offsetCorr.correct(data=images[..., idxs], gainMap=gainmaps[..., idxs])
pixel_data.append(data_out)
pixel_data = np.array(pixel_data)
```
%% Cell type:code id: tags:
``` python
is_log = True
h_range = (-500, 500)
h_bins = 300
f_sp, ax_sp = plt.subplots()
h, e , x, stat = histPlot(ax_sp, pixel_data.flatten(),
bins=h_bins,
range=h_range,
facecolor='b',
log=is_log,
histotype='stepfilled')
ax_sp.tick_params(axis='both',which='major',labelsize=15)
ax_sp.set_xlabel('Energy [ADCu]', fontsize=15)
plt.show()
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment