Skip to content
Snippets Groups Projects
Commit 13acf6b1 authored by Karim Ahmed's avatar Karim Ahmed
Browse files

Draft: Gotthard2 dark notebook

parent 2890a107
No related branches found
No related tags found
No related merge requests found
This commit is part of merge request !684. Comments created here will be created in the context of that merge request.
%% Cell type:markdown id:49b6577f-96a5-4dd2-bdd9-da661b2c4619 tags:
# Gotthard2 Dark Image Characterization
Author: European XFEL Detector Group, Version: 1.0
%% Cell type:code id:818e24e8 tags:
``` python
in_folder = "/gpfs/exfel/exp/DETLAB/202230/p900276/raw" # the folder to read data from, required
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/gotthard2/darks" # the folder to output to, required
run_high = 10 # run number for G0 dark run, required
run_med = 10 # run number for G1 dark run, required
run_low = 10 # run number for G2 dark run, required
sequences = [-1] # sequences to correct, set to [-1] for all, range allowed
sequences_per_node = 1 # number of sequence files per node if notebook executed through xfel-calibrate, set to 0 to not run SLURM parallel
# Parameters used to access raw data.
karabo_id = "DET_LAB_G2" # karabo prefix of Jungfrau devices
karabo_da = ["DA01"] # data aggregators
receiver_template = "GOT{:02d}" # receiver template used to read INSTRUMENT keys.
control_template = "CTRL{:02d}" # control template used to read CONTROL keys.
instrument_source_template = '{}/DET/{}:daqOutput' # template for source name (filled with karabo_id & receiver_id). e.g. 'SPB_IRDA_JF4M/DET/JNGFR01:daqOutput'
ctrl_source_template = '{}/DET/{}' # template for control source name (filled with karabo_id_control)
karabo_id_control = "" # if control is on a different ID, set to empty string if it is the same a karabo-id
# Parameters for the calibration database.
use_dir_creation_date = True
cal_db_interface = "tcp://max-exfl016:8020" # calibration DB interface to use
cal_db_timeout = 300000 # timeout on caldb requests
db_output = False # Output constants to the calibration database
local_output = True # Output constants locally
# Conditions used for injected calibration constants.
# Parameters used during selecting raw data trains.
min_trains = 1 # Minimum number of trains that should be available to process dark constants. Default 1.
max_trains = 1000 # Maximum number of trains to use for processing dark constants. Set to 0 to use all available trains.
# Don't delete! myMDC sends this by default.
operation_mode = '' # Detector operation mode, optional
```
%% Cell type:code id:8085f9aa tags:
``` python
import h5py
import numpy as np
import matplotlib.pyplot as plt
import multiprocessing
import pasha as psh
from extra_data import RunDirectory
from pathlib import Path
from cal_tools.enums import BadPixels
from cal_tools.gotthard2algs import convert_to_10bit
from cal_tools.step_timing import StepTimer
from cal_tools.tools import (
get_dir_creation_date,
get_from_db,
get_pdu_from_db,
get_report,
save_const_to_h5,
send_to_db,
)
from iCalibrationDB import Conditions, Constants
```
%% Cell type:code id:18fe4379 tags:
``` python
run_nums = [run_high, run_med, run_low]
in_folder = Path(in_folder)
out_folder = Path(out_folder)
out_folder.mkdir(exist_ok=True)
print(f"Process modules: {karabo_da}")
run_dc = RunDirectory(in_folder / f"r{run_high:04d}")
file_loc = f"proposal:{run_dc.run_metadata()['proposalNumber']} runs:{run_high} {run_med} {run_low}" # noqa
instrument_src = instrument_source_template.format(
karabo_id, receiver_template)
ctrl_src = ctrl_source_template.format(
karabo_id_control, control_template)
# Read report path to associate it later with injected constants.
report = get_report(out_folder)
if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run_high)
print(f"Using {creation_time.isoformat()} as creation time")
if not karabo_id_control:
karabo_id_control = karabo_id
```
%% Cell type:code id:108be688 tags:
``` python
step_timer = StepTimer()
```
%% Cell type:code id:fb80b98e tags:
``` python
# load constants temporarely using defined local paths.
constants_file = "/gpfs/exfel/data/user/mramilli/gotthard2/constants/GH2-0124/calibration_constants_GH2-0124.h5"
with h5py.File(constants_file, 'r') as cfile:
lut = cfile["LUT"][()]
```
%% Cell type:code id:ff9149fc tags:
``` python
# Read parameter conditions
step_timer.start()
run_dcs_dict = dict()
ctrl_src = ctrl_source_template.format(karabo_id_control, control_template)
for gain, run in enumerate(run_nums):
run_dc = RunDirectory(in_folder / f"r{run:04d}/")
run_dcs_dict[run] = [gain, run_dc]
# TODO: Read every condition from slow data.
# TODO: Validate that the conditions are the same and as expected for all runs.
```
%% Cell type:code id:ac9c5dc3-bc66-4e7e-b6a1-360259be535c tags:
``` python
def specifiy_trains_to_process(
img_key_data: "extra_data.KeyData", # noqa
max_trains: int = 0,
min_trains: int = 0,
):
"""Specify total number of trains to process.
Based on given min_trains and max_trains, if given.
Print number of trains to process and number of empty trains.
Raise ValueError if specified trains are less than min_trains.
"""
# Specifies total number of trains to proccess.
n_trains = img_key_data.shape[0]
all_trains = len(img_key_data.train_ids)
print(f"{mod} has {all_trains - n_trains} "
f"trains with empty frames out of {all_trains} trains")
if n_trains < min_trains:
raise ValueError(
f"Less than {min_trains} trains are available in RAW data."
" Not enough data to process darks.")
if max_trains > 0:
n_trains = min(n_trains, max_trains)
print(f"Processing {n_trains} trains.")
return n_trains
```
%% Cell type:code id:e2eb2fc0-df9c-4887-9691-f81474f8c131 tags:
``` python
def convert_train(wid, index, tid, d):
"""Convert a Gotthard2 train from 12bit to 10bit."""
d_10bit = np.zeros_like(d[instr_mod_src]["data.adc"], dtype=np.float32)
convert_to_10bit(d[instr_mod_src]["data.adc"], lut, d_10bit)
data_10bit[index, ...] = d_10bit
```
%% Cell type:code id:4e8ffeae tags:
``` python
# Calculate noise and offset per pixel and global average, std and median
noise_map = dict()
offset_map = dict()
bad_pixels_map = dict()
context = psh.context.ProcessContext(num_workers=multiprocessing.cpu_count())
for mod in karabo_da:
# Path to pixels ADC values
instr_mod_src = instrument_src.format(int(mod[-2:]))
pixels_src = (instr_mod_src, "data.adc")
data_path = "INSTRUMENT/"+instr_mod_src+"/data"
# TODO: Validate the final shape to store constants.
cshape = (3, 2, 1280)
offset_map[mod] = np.zeros(cshape, dtype=np.float32)
noise_map[mod] = np.zeros_like(offset_map[mod])
bad_pixels_map[mod] = np.zeros_like(offset_map[mod], dtype=np.uint32)
for run_num, [gain, run_dc] in run_dcs_dict.items():
step_timer.start()
n_trains = specifiy_trains_to_process(run_dc[pixels_src])
# Select requested number of trains to process.
dc = run_dc.select(*pixels_src, require_all=True).select_trains(np.s_[:n_trains]) # noqa
dshape = dc[instr_mod_src, "data.adc"].shape
step_timer.done_step("preparing raw data")
step_timer.start()
# Convert 12bit data to 10bit
data_10bit = context.alloc(shape=dshape, dtype=np.float32)
context.map(convert_train, dc)
step_timer.done_step("convert to 10bit")
step_timer.start()
even_data = data_10bit[:, ::2, :]
odd_data = data_10bit[:, 1::2, :]
def offset_noise_cell(wid, index, d):
offset[index] = np.mean(d, axis=(0, 1))
noise[index] = np.std(d, axis=(0, 1))
offset = context.alloc(shape=cshape[-2:], dtype=np.float32)
noise = context.alloc(like=offset)
context.map(offset_noise_cell, (even_data, odd_data))
offset_map[mod][gain, ...] = offset.copy()
noise_map[mod][gain, ...] = noise.copy()
step_timer.done_step("Processing darks")
```
%% Cell type:code id:6e10ed93-66de-4fb1-bf97-f8d25af22edb tags:
``` python
# # set the operating condition
# # TODO: add the final conditions for constants.
# condition = Conditions.Dark.Gotthard2(
# bias_voltage=bias_voltage,
# )
# db_modules = get_pdu_from_db(
# karabo_id=karabo_id,
# karabo_da=karabo_da,
# constant=Constants.Gotthard2.Offset(),
# condition=condition,
# cal_db_interface=cal_db_interface,
# snapshot_at=creation_time)
```
%% Cell type:code id:fde8e1cf-bc74-462f-b6e5-cfee8279090d tags:
``` python
from XFELDetAna.plotting.heatmap import heatmapPlot
unit = '[ADCu]'
for mod in karabo_da:
for _, [gain, _] in run_dcs_dict.items():
heatmapPlot(
offset_map[mod][gain],
y_label="Row",
x_label="Column",
lut_label=unit,
title=f"Even / Odd Offset map G{gain} - Module {mod}", # TODO: add PDU name({pdu})',
)
plt.show()
heatmapPlot(
noise_map[mod][gain],
y_label="Row",
x_label="Column",
lut_label=unit,
title=f"Even / Odd noise map G{gain} - Module {mod}", # TODO: add PDU name({pdu})',
)
plt.show()
```
%% Cell type:code id:1c4eddf7-7d6e-49f4-8cbb-12d2bc496a8f tags:
``` python
step_timer.start()
for mod, db_mod in zip(karabo_da, db_modules):
constants = {
'Offset': offset_map[mod],
'Noise': noise_map[mod],
}
md = None
for key, const_data in constants.items():
const = getattr(Constants.Gotthard2, key)()
const.data = const_data
if db_output:
md = send_to_db(
db_module=db_mod,
karabo_id=karabo_id,
constant=const,
condition=condition,
file_loc=file_loc,
report_path=report,
cal_db_interface=cal_db_interface,
creation_time=creation_time,
timeout=cal_db_timeout,
)
if local_output:
md = save_const_to_h5(
db_module=db_mod,
karabo_id=karabo_id,
constant=const,
condition=condition,
data=const.data,
file_loc=file_loc,
report=report,
creation_time=creation_time,
out_folder=out_folder,
)
print(f"Calibration constant {key} is stored locally at {out_folder}.\n")
print("Constants parameter conditions are:\n")
# TODO: add the final conditions for constants.
print(
f"• Bias voltage: {bias_voltage}\n"
f"• Creation time: {md.calibration_constant_version.begin_at if md is not None else creation_time}\n") # noqa
step_timer.done_step("Injecting constants.")
```
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment