Skip to content
Snippets Groups Projects
Commit c9935425 authored by Steffen Hauf's avatar Steffen Hauf
Browse files

Add a per-memory-cell option to correction notebook

parent 87656ae5
No related branches found
No related tags found
1 merge request!120Add a per-memory-cell option to correction notebook
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Characterize Dark Images # # Characterize Dark Images #
Author: S. Hauf, Version: 0.1 Author: S. Hauf, Version: 0.1
The following code analyzes a set of dark images taken with the AGIPD detector to deduce detector offsets and noise. Data for the detector's three gain stages needs to be present, separated into separate runs. The following code analyzes a set of dark images taken with the AGIPD detector to deduce detector offsets and noise. Data for the detector's three gain stages needs to be present, separated into separate runs.
The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data. The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cluster_profile = "noDB" # The ipcluster profile to use cluster_profile = "noDB" # The ipcluster profile to use
in_folder = "/gpfs/exfel/exp/SCS/201930/p900079/raw" # path to input data, required in_folder = "/gpfs/exfel/exp/SCS/201930/p900079/raw" # path to input data, required
out_folder = "/gpfs/exfel/data/scratch/haufs/test/" # path to output to, required out_folder = "/gpfs/exfel/data/scratch/haufs/test/" # path to output to, required
sequences = [0] # sequence files to evaluate. sequences = [0] # sequence files to evaluate.
run = 20 # run number in which data was recorded, required run = 20 # run number in which data was recorded, required
mem_cells = 0 # number of memory cells used, set to 0 to automatically infer mem_cells = 0 # number of memory cells used, set to 0 to automatically infer
local_output = False # output constants locally local_output = False # output constants locally
db_output = True # output constants to database db_output = True # output constants to database
bias_voltage = 300 # detector bias voltage bias_voltage = 300 # detector bias voltage
cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use
rawversion = 2 # RAW file format version rawversion = 2 # RAW file format version
dont_use_dir_date = False # don't use the dir creation date for determining the creation time dont_use_dir_date = False # don't use the dir creation date for determining the creation time
thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels
thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels
thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels
instrument = "SCS" # the instrument instrument = "SCS" # the instrument
high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h
modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] # module to run for modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] # module to run for
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# imports and things that do not usually need to be changed # imports and things that do not usually need to be changed
from datetime import datetime from datetime import datetime
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
from collections import OrderedDict from collections import OrderedDict
import os import os
import h5py import h5py
import numpy as np import numpy as np
import matplotlib import matplotlib
matplotlib.use('agg') matplotlib.use('agg')
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
%matplotlib inline %matplotlib inline
from cal_tools.tools import gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, get_dir_creation_date from cal_tools.tools import gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, get_dir_creation_date
from cal_tools.influx import InfluxLogger from cal_tools.influx import InfluxLogger
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from cal_tools.plotting import show_overview, plot_badpix_3d, create_constant_overview from cal_tools.plotting import show_overview, plot_badpix_3d, create_constant_overview
# make sure a cluster is running with ipcluster start --n=32, give it a while to start # make sure a cluster is running with ipcluster start --n=32, give it a while to start
from ipyparallel import Client from ipyparallel import Client
view = Client(profile=cluster_profile)[:] view = Client(profile=cluster_profile)[:]
view.use_dill() view.use_dill()
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
# no need to change this # no need to change this
QUADRANTS = 4 QUADRANTS = 4
MODULES_PER_QUAD = 4 MODULES_PER_QUAD = 4
DET_FILE_INSET = "DSSC" DET_FILE_INSET = "DSSC"
max_cells = mem_cells max_cells = mem_cells
offset_runs = OrderedDict() offset_runs = OrderedDict()
offset_runs["high"] = parse_runs(run)[0] offset_runs["high"] = parse_runs(run)[0]
creation_time=None creation_time=None
if not dont_use_dir_date: if not dont_use_dir_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
run, prop, seq = run_prop_seq_from_path(in_folder) run, prop, seq = run_prop_seq_from_path(in_folder)
logger = InfluxLogger(detector="DSSC", instrument=instrument, mem_cells=mem_cells, logger = InfluxLogger(detector="DSSC", instrument=instrument, mem_cells=mem_cells,
notebook=get_notebook_name(), proposal=prop) notebook=get_notebook_name(), proposal=prop)
print("Using {} as creation time of constant.".format(creation_time)) print("Using {} as creation time of constant.".format(creation_time))
loc = None loc = None
if instrument == "SCS": if instrument == "SCS":
loc = "SCS_DET_DSSC1M-1" loc = "SCS_DET_DSSC1M-1"
dinstance = "DSSC1M1" dinstance = "DSSC1M1"
print("Detector in use is {}".format(loc)) print("Detector in use is {}".format(loc))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print("Parameters are:") print("Parameters are:")
print("Proposal: {}".format(prop)) print("Proposal: {}".format(prop))
print("Memory cells: {}/{}".format(mem_cells, max_cells)) print("Memory cells: {}/{}".format(mem_cells, max_cells))
print("Runs: {}".format([ v for v in offset_runs.values()])) print("Runs: {}".format([ v for v in offset_runs.values()]))
print("Sequences: {}".format(sequences)) print("Sequences: {}".format(sequences))
print("Using DB: {}".format(db_output)) print("Using DB: {}".format(db_output))
print("Input: {}".format(in_folder)) print("Input: {}".format(in_folder))
print("Output: {}".format(out_folder)) print("Output: {}".format(out_folder))
print("Bias voltage: {}V".format(bias_voltage)) print("Bias voltage: {}V".format(bias_voltage))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
The following lines will create a queue of files which will the be executed module-parallel. Distiguishing between different gains. The following lines will create a queue of files which will the be executed module-parallel. Distiguishing between different gains.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set everything up filewise # set everything up filewise
if not os.path.exists(out_folder): if not os.path.exists(out_folder):
os.makedirs(out_folder) os.makedirs(out_folder)
gmf = gain_map_files(in_folder, offset_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD) gmf = gain_map_files(in_folder, offset_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD)
gain_mapped_files, total_sequences, total_file_size = gmf gain_mapped_files, total_sequences, total_file_size = gmf
print("Will process at total of {} sequences: {:0.2f} GB of data.".format(total_sequences, total_file_size)) print("Will process at total of {} sequences: {:0.2f} GB of data.".format(total_sequences, total_file_size))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Calculate Offsets, Noise and Thresholds ## ## Calculate Offsets, Noise and Thresholds ##
The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array. The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import copy import copy
from functools import partial from functools import partial
def characterize_module(cells, bp_thresh, rawversion, loc, inp): def characterize_module(cells, bp_thresh, rawversion, loc, inp):
import numpy as np import numpy as np
import copy import copy
import h5py import h5py
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
def get_num_cells(fname, loc, module): def get_num_cells(fname, loc, module):
with h5py.File(fname, "r") as f: with h5py.File(fname, "r") as f:
cells = f["INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, module)][()] cells = f["INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, module)][()]
maxcell = np.max(cells) maxcell = np.max(cells)
options = [100, 200, 400, 500, 600, 700, 800] options = [100, 200, 400, 500, 600, 700, 800]
dists = [abs(o-maxcell) for o in options] dists = np.array([(o-maxcell) for o in options])
dists[dists<0] = 10000 # assure to always go higher
return options[np.argmin(dists)] return options[np.argmin(dists)]
filename, filename_out, channel = inp filename, filename_out, channel = inp
if cells == 0: if cells == 0:
cells = get_num_cells(filename, loc, channel) cells = get_num_cells(filename, loc, channel)
thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh
infile = h5py.File(filename, "r", driver="core") infile = h5py.File(filename, "r", driver="core")
if rawversion == 2: if rawversion == 2:
count = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/count".format(loc, channel)]) count = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/count".format(loc, channel)])
first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)]) first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)])
last_index = int(first[count != 0][-1]+count[count != 0][-1]) last_index = int(first[count != 0][-1]+count[count != 0][-1])
first_index = int(first[count != 0][0]) first_index = int(first[count != 0][0])
else: else:
status = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/status".format(loc, channel)]) status = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/status".format(loc, channel)])
if np.count_nonzero(status != 0) == 0: if np.count_nonzero(status != 0) == 0:
return return
last = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/last".format(loc, channel)]) last = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/last".format(loc, channel)])
first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)]) first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)])
last_index = int(last[status != 0][-1]) + 1 last_index = int(last[status != 0][-1]) + 1
first_index = int(first[status != 0][0]) first_index = int(first[status != 0][0])
im = np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, channel)][first_index:last_index,...]) im = np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, channel)][first_index:last_index,...])
cellIds = np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, channel)][first_index:last_index,...]) cellIds = np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, channel)][first_index:last_index,...])
infile.close() infile.close()
im = im[:, 0, ...].astype(np.float32) im = im[:, 0, ...].astype(np.float32)
im = np.rollaxis(im, 2) im = np.rollaxis(im, 2)
im = np.rollaxis(im, 2, 1) im = np.rollaxis(im, 2, 1)
mcells = cells mcells = cells
offset = np.zeros((im.shape[0], im.shape[1], mcells)) offset = np.zeros((im.shape[0], im.shape[1], mcells))
noise = np.zeros((im.shape[0], im.shape[1], mcells)) noise = np.zeros((im.shape[0], im.shape[1], mcells))
for cc in np.unique(cellIds[cellIds < mcells]): for cc in np.unique(cellIds[cellIds < mcells]):
cellidx = cellIds == cc cellidx = cellIds == cc
offset[...,cc] = np.median(im[..., cellidx], axis=2) offset[...,cc] = np.median(im[..., cellidx], axis=2)
noise[...,cc] = np.std(im[..., cellidx], axis=2) noise[...,cc] = np.std(im[..., cellidx], axis=2)
# bad pixels # bad pixels
bp = np.zeros(offset.shape, np.uint32) bp = np.zeros(offset.shape, np.uint32)
# offset related bad pixels # offset related bad pixels
offset_mn = np.nanmedian(offset, axis=(0,1)) offset_mn = np.nanmedian(offset, axis=(0,1))
offset_std = np.nanstd(offset, axis=(0,1)) offset_std = np.nanstd(offset, axis=(0,1))
bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) | bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |
(offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value (offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[(offset < thresholds_offset_hard[0]) | (offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value bp[(offset < thresholds_offset_hard[0]) | (offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# noise related bad pixels # noise related bad pixels
noise_mn = np.nanmedian(noise, axis=(0,1)) noise_mn = np.nanmedian(noise, axis=(0,1))
noise_std = np.nanstd(noise, axis=(0,1)) noise_std = np.nanstd(noise, axis=(0,1))
bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) | bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |
(noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
return offset, noise, bp, cells return offset, noise, bp, cells
offset_g = OrderedDict() offset_g = OrderedDict()
noise_g = OrderedDict() noise_g = OrderedDict()
gain_g = OrderedDict() gain_g = OrderedDict()
badpix_g = OrderedDict() badpix_g = OrderedDict()
gg = 0 gg = 0
start = datetime.now() start = datetime.now()
all_cells = [] all_cells = []
for gain, mapped_files in gain_mapped_files.items(): for gain, mapped_files in gain_mapped_files.items():
inp = [] inp = []
dones = [] dones = []
for i in modules: for i in modules:
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1) qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm in mapped_files and not mapped_files[qm].empty(): if qm in mapped_files and not mapped_files[qm].empty():
fname_in = mapped_files[qm].get() fname_in = mapped_files[qm].get()
dones.append(mapped_files[qm].empty()) dones.append(mapped_files[qm].empty())
else: else:
continue continue
fout = os.path.abspath("{}/{}".format(out_folder, (os.path.split(fname_in)[-1]).replace("RAW", "CORR"))) fout = os.path.abspath("{}/{}".format(out_folder, (os.path.split(fname_in)[-1]).replace("RAW", "CORR")))
inp.append((fname_in, fout, i)) inp.append((fname_in, fout, i))
first = False first = False
p = partial(characterize_module, max_cells, p = partial(characterize_module, max_cells,
(thresholds_offset_hard, thresholds_offset_sigma, (thresholds_offset_hard, thresholds_offset_sigma,
thresholds_noise_hard, thresholds_noise_sigma), rawversion, loc) thresholds_noise_hard, thresholds_noise_sigma), rawversion, loc)
results = list(map(p, inp)) results = list(map(p, inp))
for ii, r in enumerate(results): for ii, r in enumerate(results):
i = modules[ii] i = modules[ii]
offset, noise, bp, thiscell = r offset, noise, bp, thiscell = r
all_cells.append(thiscell) all_cells.append(thiscell)
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1) qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm not in offset_g: if qm not in offset_g:
offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2])) offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2]))
noise_g[qm] = np.zeros_like(offset_g[qm]) noise_g[qm] = np.zeros_like(offset_g[qm])
badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32) badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)
offset_g[qm][...] = offset offset_g[qm][...] = offset
noise_g[qm][...] = noise noise_g[qm][...] = noise
badpix_g[qm][...] = bp badpix_g[qm][...] = bp
gg +=1 gg +=1
duration = (datetime.now()-start).total_seconds() duration = (datetime.now()-start).total_seconds()
logger.runtime_summary_entry(success=True, runtime=duration, logger.runtime_summary_entry(success=True, runtime=duration,
total_sequences=total_sequences, total_sequences=total_sequences,
filesize=total_file_size) filesize=total_file_size)
logger.send() logger.send()
max_cells = np.max(all_cells) max_cells = np.max(all_cells)
print("Using {} memory cells".format(max_cells)) print("Using {} memory cells".format(max_cells))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
res = OrderedDict() res = OrderedDict()
for i in modules: for i in modules:
qm = "Q{}M{}".format(i//4+1, i%4+1) qm = "Q{}M{}".format(i//4+1, i%4+1)
res[qm] = {'Offset': offset_g[qm], res[qm] = {'Offset': offset_g[qm],
'Noise': noise_g[qm], 'Noise': noise_g[qm],
'BadPixels': badpix_g[qm] 'BadPixels': badpix_g[qm]
} }
if local_output: if local_output:
for qm in offset_g.keys(): for qm in offset_g.keys():
ofile = "{}/dssc_offset_store_{}_{}.h5".format(out_folder, "_".join(offset_runs.values()), qm) ofile = "{}/dssc_offset_store_{}_{}.h5".format(out_folder, "_".join(offset_runs.values()), qm)
store_file = h5py.File(ofile, "w") store_file = h5py.File(ofile, "w")
store_file["{}/Offset/0/data".format(qm)] = offset_g[qm] store_file["{}/Offset/0/data".format(qm)] = offset_g[qm]
store_file["{}/Noise/0/data".format(qm)] = noise_g[qm] store_file["{}/Noise/0/data".format(qm)] = noise_g[qm]
store_file["{}/BadPixels/0/data".format(qm)] = badpix_g[qm] store_file["{}/BadPixels/0/data".format(qm)] = badpix_g[qm]
store_file.close() store_file.close()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if db_output: if db_output:
for qm in offset_g.keys(): for qm in offset_g.keys():
metadata = ConstantMetaData() metadata = ConstantMetaData()
offset = Constants.DSSC.Offset() offset = Constants.DSSC.Offset()
offset.data = offset_g[qm] offset.data = offset_g[qm]
metadata.calibration_constant = offset metadata.calibration_constant = offset
# set the operating condition # set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage) condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage)
detinst = getattr(Detectors, dinstance) detinst = getattr(Detectors, dinstance)
device = getattr(detinst, qm) device = getattr(detinst, qm)
metadata.detector_condition = condition metadata.detector_condition = condition
# specify the a version for this constant # specify the a version for this constant
if creation_time is None: if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device) metadata.calibration_constant_version = Versions.Now(device=device)
else: else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time) metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000) metadata.send(cal_db_interface, timeout=3000000)
metadata = ConstantMetaData() metadata = ConstantMetaData()
noise = Constants.DSSC.Noise() noise = Constants.DSSC.Noise()
noise.data = noise_g[qm] noise.data = noise_g[qm]
metadata.calibration_constant = noise metadata.calibration_constant = noise
# set the operating condition # set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage) condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage)
metadata.detector_condition = condition metadata.detector_condition = condition
# specify the a version for this constant # specify the a version for this constant
if creation_time is None: if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device) metadata.calibration_constant_version = Versions.Now(device=device)
else: else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time) metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000) metadata.send(cal_db_interface, timeout=3000000)
continue # no bad pixels yet continue # no bad pixels yet
metadata = ConstantMetaData() metadata = ConstantMetaData()
badpixels = Constants.DSSC.BadPixelsDark() badpixels = Constants.DSSC.BadPixelsDark()
badpixels.data = badpix_g[qm] badpixels.data = badpix_g[qm]
metadata.calibration_constant = badpixels metadata.calibration_constant = badpixels
# set the operating condition # set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage) condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage)
metadata.detector_condition = condition metadata.detector_condition = condition
# specify the a version for this constant # specify the a version for this constant
if creation_time is None: if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device) metadata.calibration_constant_version = Versions.Now(device=device)
else: else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time) metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000) metadata.send(cal_db_interface, timeout=3000000)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Single-Cell Overviews ## ## Single-Cell Overviews ##
Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible. Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cell = 3 cell = 3
gain = 0 gain = 0
out_folder = None out_folder = None
show_overview(res, cell, gain, out_folder=out_folder, infix="_".join(offset_runs.values())) show_overview(res, cell, gain, out_folder=out_folder, infix="_".join(offset_runs.values()))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Global Bad Pixel Behaviour ## ## Global Bad Pixel Behaviour ##
The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type. The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'), cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),
BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'), BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'), BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')} BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}
rebin = 8 if not high_res_badpix_3d else 2 rebin = 8 if not high_res_badpix_3d else 2
gain = 0 gain = 0
for mod, data in badpix_g.items(): for mod, data in badpix_g.items():
plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin) plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Aggregate values, and per Cell behaviour ## ## Aggregate values, and per Cell behaviour ##
The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior. The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
create_constant_overview(offset_g, "Offset (ADU)", max_cells, 4000, 8000, create_constant_overview(offset_g, "Offset (ADU)", max_cells, 4000, 8000,
out_folder=out_folder, infix="_".join(offset_runs.values())) out_folder=out_folder, infix="_".join(offset_runs.values()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100, create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100,
out_folder=out_folder, infix="_".join(offset_runs.values())) out_folder=out_folder, infix="_".join(offset_runs.values()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
bad_pixel_aggregate_g = OrderedDict() bad_pixel_aggregate_g = OrderedDict()
for m, d in badpix_g.items(): for m, d in badpix_g.items():
bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float) bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)
create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, 0, 0.10, 3, create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, 0, 0.10, 3,
out_folder=out_folder, infix="_".join(offset_runs.values())) out_folder=out_folder, infix="_".join(offset_runs.values()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
......
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# DSSC Offline Correction # # DSSC Offline Correction #
Author: European XFEL Detector Group, Version: 1.0 Author: European XFEL Detector Group, Version: 1.0
Offline Calibration for the DSSC Detector Offline Calibration for the DSSC Detector
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
in_folder = "/gpfs/exfel/exp/SCS/201802/p002252/raw/" # the folder to read data from, required in_folder = "/gpfs/exfel/exp/SCS/201901/p002212/raw/" # the folder to read data from, required
run = 20 # runs to process, required run = 29 # runs to process, required
out_folder = "/gpfs/exfel/data/scratch/xcal/test/" # the folder to output to, required out_folder = "/gpfs/exfel/data/scratch/xcal/test/" # the folder to output to, required
sequences = [-1] # sequences to correct, set to -1 for all, range allowed sequences = [-1] # sequences to correct, set to -1 for all, range allowed
mem_cells = 0 # number of memory cells used, set to 0 to automatically infer mem_cells = 0 # number of memory cells used, set to 0 to automatically infer
overwrite = True # set to True if existing data should be overwritten overwrite = True # set to True if existing data should be overwritten
cluster_profile = "noDB" # cluster profile to use cluster_profile = "noDB" # cluster profile to use
max_pulses = 500 # maximum number of pulses per train max_pulses = 500 # maximum number of pulses per train
bias_voltage = 300 # detector bias voltage bias_voltage = 100 # detector bias voltage
cal_db_interface = "tcp://max-exfl016:8020#8025" # the database interface to use cal_db_interface = "tcp://max-exfl016:8020#8025" # the database interface to use
use_dir_creation_date = True # use the creation data of the input dir for database queries use_dir_creation_date = True # use the creation data of the input dir for database queries
sequences_per_node = 2 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel sequences_per_node = 2 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel
cal_db_timeout = 30000 # in milli seconds cal_db_timeout = 300000 # in milli seconds
chunk_size_idim = 1 # chunking size of imaging dimension, adjust if user software is sensitive to this. chunk_size_idim = 1 # chunking size of imaging dimension, adjust if user software is sensitive to this.
instrument = "SCS" # the instrument the detector is installed at, required instrument = "SCS" # the instrument the detector is installed at, required
mask_noisy_asic = 0.25 # set to a value other than 0 and below 1 to mask entire ADC if fraction of noisy pixels is above mask_noisy_asic = 0.25 # set to a value other than 0 and below 1 to mask entire ADC if fraction of noisy pixels is above
offset_image = "-1" # last one offset_image = "PP" # last one
mask_cold_asic = 0.25 # mask cold ASICS if number of pixels with negligable standard deviation is larger than this fraction mask_cold_asic = 0.25 # mask cold ASICS if number of pixels with negligable standard deviation is larger than this fraction
noisy_pix_threshold = 1. # threshold above which ap pixel is considered noisy. noisy_pix_threshold = 1. # threshold above which ap pixel is considered noisy.
geo_file = "/gpfs/exfel/data/scratch/xcal/dssc_geo_june19.h5" # detector geometry file geo_file = "/gpfs/exfel/data/scratch/xcal/dssc_geo_june19.h5" # detector geometry file
dinstance = "DSSC1M1"
def balance_sequences(in_folder, run, sequences, sequences_per_node): def balance_sequences(in_folder, run, sequences, sequences_per_node):
import glob import glob
import re import re
import numpy as np import numpy as np
if sequences[0] == -1: if sequences[0] == -1:
sequence_files = glob.glob("{}/r{:04d}/*-S*.h5".format(in_folder, run)) sequence_files = glob.glob("{}/r{:04d}/*-S*.h5".format(in_folder, run))
seq_nums = set() seq_nums = set()
for sf in sequence_files: for sf in sequence_files:
seqnum = re.findall(r".*-S([0-9]*).h5", sf)[0] seqnum = re.findall(r".*-S([0-9]*).h5", sf)[0]
seq_nums.add(int(seqnum)) seq_nums.add(int(seqnum))
seq_nums -= set(sequences) seq_nums -= set(sequences)
else: else:
seq_nums = set(sequences) seq_nums = set(sequences)
nsplits = len(seq_nums)//sequences_per_node+1 nsplits = len(seq_nums)//sequences_per_node+1
while nsplits > 32: while nsplits > 32:
sequences_per_node += 1 sequences_per_node += 1
nsplits = len(seq_nums)//sequences_per_node+1 nsplits = len(seq_nums)//sequences_per_node+1
print("Changed to {} sequences per node to have a maximum of 8 concurrent jobs".format(sequences_per_node)) print("Changed to {} sequences per node to have a maximum of 8 concurrent jobs".format(sequences_per_node))
return [l.tolist() for l in np.array_split(list(seq_nums), nsplits) if l.size > 0] return [l.tolist() for l in np.array_split(list(seq_nums), nsplits) if l.size > 0]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import sys import sys
from collections import OrderedDict from collections import OrderedDict
# make sure a cluster is running with ipcluster start --n=32, give it a while to start # make sure a cluster is running with ipcluster start --n=32, give it a while to start
import os import os
import h5py import h5py
import numpy as np import numpy as np
import matplotlib import matplotlib
matplotlib.use("agg") matplotlib.use("agg")
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from ipyparallel import Client from ipyparallel import Client
print("Connecting to profile {}".format(cluster_profile)) print("Connecting to profile {}".format(cluster_profile))
view = Client(profile=cluster_profile)[:] view = Client(profile=cluster_profile)[:]
view.use_dill() view.use_dill()
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
from cal_tools.tools import (gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, from cal_tools.tools import (gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name,
get_dir_creation_date, get_constant_from_db) get_dir_creation_date, get_constant_from_db)
from dateutil import parser from dateutil import parser
from datetime import timedelta from datetime import timedelta
creation_time = None creation_time = None
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
print("Using {} as creation time".format(creation_time)) print("Using {} as creation time".format(creation_time))
in_folder = "{}/r{:04d}".format(in_folder, run) in_folder = "{}/r{:04d}".format(in_folder, run)
if sequences[0] == -1: if sequences[0] == -1:
sequences = None sequences = None
QUADRANTS = 4 QUADRANTS = 4
MODULES_PER_QUAD = 4 MODULES_PER_QUAD = 4
DET_FILE_INSET = "DSSC" DET_FILE_INSET = "DSSC"
CHUNK_SIZE = 512 CHUNK_SIZE = 512
MAX_PAR = 32 MAX_PAR = 32
if in_folder[-1] == "/": if in_folder[-1] == "/":
in_folder = in_folder[:-1] in_folder = in_folder[:-1]
out_folder = "{}/{}".format(out_folder, os.path.split(in_folder)[-1]) out_folder = "{}/{}".format(out_folder, os.path.split(in_folder)[-1])
print("Outputting to {}".format(out_folder)) print("Outputting to {}".format(out_folder))
if not os.path.exists(out_folder): if not os.path.exists(out_folder):
os.makedirs(out_folder) os.makedirs(out_folder)
elif not overwrite: elif not overwrite:
raise AttributeError("Output path exists! Exiting") raise AttributeError("Output path exists! Exiting")
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
loc = None loc = None
if instrument == "SCS": if instrument == "SCS":
loc = "SCS_DET_DSSC1M-1" loc = "SCS_DET_DSSC1M-1"
dinstance = "DSSC1M1" dinstance = "DSSC1M1"
print("Detector in use is {}".format(loc)) print("Detector in use is {}".format(loc))
offset_image = int(offset_image) if offset_image.upper() != "PP":
offset_image = int(offset_image)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def combine_stack(d, sdim): def combine_stack(d, sdim):
combined = np.zeros((sdim, 2048,2048)) combined = np.zeros((sdim, 2048,2048))
combined[...] = np.nan combined[...] = np.nan
d = np.moveaxis(d, 2, 3) d = np.moveaxis(d, 2, 3)
dy = 0 dy = 0
quad_pos = [ quad_pos = [
(0, 145), (0, 145),
(-5, 25), (-5, 25),
(130, 15), (130, 15),
(130, 140), (130, 140),
] ]
px = 0.236 px = 0.236
py = 0.204 py = 0.204
with h5py.File(geo_file, "r") as gf: with h5py.File(geo_file, "r") as gf:
for i in range(16): for i in range(16):
t1 = gf["Q{}M{}"] t1 = gf["Q{}M{}"]
try: try:
if i < 8: if i < 8:
dx = -512 dx = -512
if i > 3: if i > 3:
dx -= 25 dx -= 25
mx = 1 mx = 1
my = i % 8 my = i % 8
combined[:, my*128+dy:(my+1)*128+dy, combined[:, my*128+dy:(my+1)*128+dy,
mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,:,::-1] mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,:,::-1]
dy += 30 dy += 30
if i == 3: if i == 3:
dy += 30 dy += 30
elif i < 12: elif i < 12:
dx = 80 - 50 dx = 80 - 50
if i == 8: if i == 8:
dy = 4*30 + 30 +50 -30 dy = 4*30 + 30 +50 -30
mx = 1 mx = 1
my = i % 8 +4 my = i % 8 +4
combined[:, my*128+dy:(my+1)*128+dy, combined[:, my*128+dy:(my+1)*128+dy,
mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:] mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]
dy += 30 dy += 30
else: else:
dx = 100 - 50 dx = 100 - 50
if i == 11: if i == 11:
dy = 20 dy = 20
mx = 1 mx = 1
my = i - 14 my = i - 14
combined[:, my*128+dy:(my+1)*128+dy, combined[:, my*128+dy:(my+1)*128+dy,
mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:] mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]
dy += 30 dy += 30
except: except:
continue continue
return combined return combined
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set everything up filewise # set everything up filewise
from queue import Queue from queue import Queue
from collections import OrderedDict from collections import OrderedDict
def map_modules_from_files(filelist): def map_modules_from_files(filelist):
module_files = OrderedDict() module_files = OrderedDict()
mod_ids = OrderedDict() mod_ids = OrderedDict()
total_sequences = 0 total_sequences = 0
sequences_qm = {} sequences_qm = {}
one_module = None one_module = None
for quadrant in range(0, QUADRANTS): for quadrant in range(0, QUADRANTS):
for module in range(0, MODULES_PER_QUAD): for module in range(0, MODULES_PER_QUAD):
name = "Q{}M{}".format(quadrant + 1, module + 1) name = "Q{}M{}".format(quadrant + 1, module + 1)
module_files[name] = Queue() module_files[name] = Queue()
num = quadrant * 4 + module num = quadrant * 4 + module
mod_ids[name] = num mod_ids[name] = num
file_infix = "{}{:02d}".format(DET_FILE_INSET, num) file_infix = "{}{:02d}".format(DET_FILE_INSET, num)
sequences_qm[name] = 0 sequences_qm[name] = 0
for file in filelist: for file in filelist:
if file_infix in file: if file_infix in file:
if not one_module: if not one_module:
one_module = file, num one_module = file, num
module_files[name].put(file) module_files[name].put(file)
total_sequences += 1 total_sequences += 1
sequences_qm[name] += 1 sequences_qm[name] += 1
return module_files, mod_ids, total_sequences, sequences_qm, one_module return module_files, mod_ids, total_sequences, sequences_qm, one_module
dirlist = sorted(os.listdir(in_folder)) dirlist = sorted(os.listdir(in_folder))
file_list = [] file_list = []
for entry in dirlist: for entry in dirlist:
#only h5 file #only h5 file
abs_entry = "{}/{}".format(in_folder, entry) abs_entry = "{}/{}".format(in_folder, entry)
if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == ".h5": if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == ".h5":
if sequences is None: if sequences is None:
file_list.append(abs_entry) file_list.append(abs_entry)
else: else:
for seq in sequences: for seq in sequences:
if "{:05d}.h5".format(seq) in abs_entry: if "{:05d}.h5".format(seq) in abs_entry:
file_list.append(os.path.abspath(abs_entry)) file_list.append(os.path.abspath(abs_entry))
mapped_files, mod_ids, total_sequences, sequences_qm, one_module = map_modules_from_files(file_list) mapped_files, mod_ids, total_sequences, sequences_qm, one_module = map_modules_from_files(file_list)
MAX_PAR = min(MAX_PAR, total_sequences) MAX_PAR = min(MAX_PAR, total_sequences)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Processed Files ## ## Processed Files ##
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import copy import copy
from IPython.display import HTML, display, Markdown, Latex from IPython.display import HTML, display, Markdown, Latex
import tabulate import tabulate
print("Processing a total of {} sequence files in chunks of {}".format(total_sequences, MAX_PAR)) print("Processing a total of {} sequence files in chunks of {}".format(total_sequences, MAX_PAR))
table = [] table = []
mfc = copy.copy(mapped_files) mfc = copy.copy(mapped_files)
ti = 0 ti = 0
for k, files in mfc.items(): for k, files in mfc.items():
i = 0 i = 0
while not files.empty(): while not files.empty():
f = files.get() f = files.get()
if i == 0: if i == 0:
table.append((ti, k, i, f)) table.append((ti, k, i, f))
else: else:
table.append((ti, "", i, f)) table.append((ti, "", i, f))
i += 1 i += 1
ti += 1 ti += 1
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["#", "module", "# module", "file"]))) if len(table):
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["#", "module", "# module", "file"])))
# restore the queue # restore the queue
mapped_files, mod_ids, total_sequences, sequences_qm, one_module = map_modules_from_files(file_list) mapped_files, mod_ids, total_sequences, sequences_qm, one_module = map_modules_from_files(file_list)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import copy import copy
from functools import partial from functools import partial
def correct_module(total_sequences, sequences_qm, loc, dinstance, offset_image, def correct_module(total_sequences, sequences_qm, loc, dinstance, offset_image,
mask_noisy_asic, mask_cold_asic, noisy_pix_threshold, chunksize, inp): mask_noisy_asic, mask_cold_asic, noisy_pix_threshold, chunksize,
mem_cells, bias_voltage, cal_db_timeout, creation_time, cal_db_interface, inp):
import numpy as np import numpy as np
import copy import copy
import h5py import h5py
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from cal_tools.tools import get_dir_creation_date, get_constant_from_db_and_time, get_random_db_interface
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
filename, filename_out, channel, qm = inp filename, filename_out, channel, qm = inp
h5path = "INSTRUMENT/{}/DET/{}CH0:xtdf/".format(loc, channel) h5path = "INSTRUMENT/{}/DET/{}CH0:xtdf/".format(loc, channel)
h5path_idx = "INDEX/{}/DET/{}CH0:xtdf/".format(loc, channel) h5path_idx = "INDEX/{}/DET/{}CH0:xtdf/".format(loc, channel)
low_edges = None low_edges = None
hists_signal_low = None hists_signal_low = None
high_edges = None high_edges = None
hists_signal_high = None hists_signal_high = None
pulse_edges = None pulse_edges = None
def get_num_cells(fname, loc, module):
with h5py.File(fname, "r") as f:
cells = f["INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, module)][()]
maxcell = np.max(cells)
options = [100, 200, 400, 500, 600, 700, 800]
dists = np.array([(o-maxcell) for o in options])
dists[dists<0] = 10000 # assure to always go higher
return options[np.argmin(dists)]
if mem_cells == 0:
mem_cells = get_num_cells(filename, loc, channel)
print("Memcells: {}".format(mem_cells))
condition = Conditions.Dark.DSSC(bias_voltage=bias_voltage, memory_cells=mem_cells)
detinst = getattr(Detectors, dinstance)
device = getattr(detinst, qm)
offset, when = get_constant_from_db_and_time(device,
Constants.DSSC.Offset(),
condition,
None,
get_random_db_interface(cal_db_interface),
creation_time=creation_time,
timeout=cal_db_timeout)
if offset is not None:
offset = np.moveaxis(np.moveaxis(offset[...], 2, 0), 2, 1)
print(offset.shape)
def copy_and_sanitize_non_cal_data(infile, outfile): def copy_and_sanitize_non_cal_data(infile, outfile):
# these are touched in the correct function, do not copy them here # these are touched in the correct function, do not copy them here
dont_copy = ["data"] dont_copy = ["data"]
dont_copy = [h5path + "image/{}".format(do) dont_copy = [h5path + "image/{}".format(do)
for do in dont_copy] for do in dont_copy]
# a visitor to copy everything else # a visitor to copy everything else
def visitor(k, item): def visitor(k, item):
if k not in dont_copy: if k not in dont_copy:
if isinstance(item, h5py.Group): if isinstance(item, h5py.Group):
outfile.create_group(k) outfile.create_group(k)
elif isinstance(item, h5py.Dataset): elif isinstance(item, h5py.Dataset):
group = str(k).split("/") group = str(k).split("/")
group = "/".join(group[:-1]) group = "/".join(group[:-1])
infile.copy(k, outfile[group]) infile.copy(k, outfile[group])
infile.visititems(visitor) infile.visititems(visitor)
try: try:
with h5py.File(filename, "r", driver="core") as infile: with h5py.File(filename, "r", driver="core") as infile:
with h5py.File(filename_out, "w") as outfile: with h5py.File(filename_out, "w") as outfile:
copy_and_sanitize_non_cal_data(infile, outfile) copy_and_sanitize_non_cal_data(infile, outfile)
# get indices of last images in each train # get indices of last images in each train
first_arr = np.squeeze(infile[h5path_idx+"image/first"]).astype(np.int) first_arr = np.squeeze(infile[h5path_idx+"image/first"]).astype(np.int)
last_arr = np.concatenate((first_arr[1:], np.array([-1,]))).astype(np.int) last_arr = np.concatenate((first_arr[1:], np.array([-1,]))).astype(np.int)
assert first_arr.size == last_arr.size assert first_arr.size == last_arr.size
oshape = list(infile[h5path+"image/data"].shape) oshape = list(infile[h5path+"image/data"].shape)
if len(oshape) == 4: if len(oshape) == 4:
oshape = [oshape[0],]+oshape[2:] oshape = [oshape[0],]+oshape[2:]
chunks = (chunksize, oshape[1], oshape[2]) chunks = (chunksize, oshape[1], oshape[2])
ddset = outfile.create_dataset(h5path + "image/data", ddset = outfile.create_dataset(h5path + "image/data",
oshape, chunks=chunks, oshape, chunks=chunks,
dtype=np.uint16, dtype=np.float32,
fletcher32=True) fletcher32=True)
mdset = outfile.create_dataset(h5path + "image/mask", mdset = outfile.create_dataset(h5path + "image/mask",
oshape, chunks=chunks, oshape, chunks=chunks,
dtype=np.uint32, dtype=np.uint32,
compression="gzip", compression="gzip",
compression_opts=1, compression_opts=1,
shuffle=True, shuffle=True,
fletcher32=True) fletcher32=True)
for train in range(first_arr.size): for train in range(first_arr.size):
first = first_arr[train] first = first_arr[train]
last = last_arr[train] last = last_arr[train]
data = np.squeeze(infile[h5path+"image/data"][first:last, ...].astype(np.float32)) data = np.squeeze(infile[h5path+"image/data"][first:last, ...].astype(np.float32))
cellId = np.squeeze(infile[h5path+"image/cellId"][first:last, ...])
pulseId = np.squeeze(infile[h5path+"image/pulseId"][first:last, ...]) pulseId = np.squeeze(infile[h5path+"image/pulseId"][first:last, ...])
data -= data[offset_image, ...] if offset_image != "PP" or offset is None:
data -= data[offset_image, ...]
else:
data -= offset[cellId,...]
if train == 0: if train == 0:
pulseId = np.repeat(pulseId[:, None], data.shape[1], axis=1) pulseId = np.repeat(pulseId[:, None], data.shape[1], axis=1)
pulseId = np.repeat(pulseId[:,:,None], data.shape[2], axis=2) pulseId = np.repeat(pulseId[:,:,None], data.shape[2], axis=2)
bins = (55, pulseId.max()) bins = (55, pulseId.max())
rnge = [[-5, 50], [0, pulseId.max()]] rnge = [[-5, 50], [0, pulseId.max()]]
hists_signal_low, low_edges, pulse_edges = np.histogram2d(data.flatten(), hists_signal_low, low_edges, pulse_edges = np.histogram2d(data.flatten(),
pulseId.flatten(), pulseId.flatten(),
bins=bins, bins=bins,
range=rnge) range=rnge)
rnge = [[-5, 300], [0, pulseId.max()]] rnge = [[-5, 300], [0, pulseId.max()]]
hists_signal_high, high_edges, _ = np.histogram2d(data.flatten(), hists_signal_high, high_edges, _ = np.histogram2d(data.flatten(),
pulseId.flatten(), pulseId.flatten(),
bins=bins, bins=bins,
range=rnge) range=rnge)
data[data < 0] = 0 # data[data < 0] = 0
ddset[first:last, ...] = data.astype(np.uint16) ddset[first:last, ...] = data
# find static and noisy values in dark images # find static and noisy values in dark images
data = infile[h5path+"image/data"][last, ...].astype(np.float32) data = infile[h5path+"image/data"][last, ...].astype(np.float32)
bpix = np.zeros(oshape[1:], np.uint32) bpix = np.zeros(oshape[1:], np.uint32)
dark_std = np.std(data, axis=0) dark_std = np.std(data, axis=0)
bpix[dark_std > noisy_pix_threshold] = BadPixels.NOISE_OUT_OF_THRESHOLD.value bpix[dark_std > noisy_pix_threshold] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
for i in range(8): for i in range(8):
for j in range(2): for j in range(2):
count_noise = np.count_nonzero(bpix[i*64:(i+1)*64, j*64:(j+1)*64]) count_noise = np.count_nonzero(bpix[i*64:(i+1)*64, j*64:(j+1)*64])
asic_std = np.std(data[:, i*64:(i+1)*64, j*64:(j+1)*64]) asic_std = np.std(data[:, i*64:(i+1)*64, j*64:(j+1)*64])
if mask_noisy_asic: if mask_noisy_asic:
if count_noise/(64*64) > mask_noisy_asic: if count_noise/(64*64) > mask_noisy_asic:
bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.NOISY_ADC.value bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.NOISY_ADC.value
if mask_cold_asic: if mask_cold_asic:
count_cold = np.count_nonzero(asic_std < 0.5) count_cold = np.count_nonzero(asic_std < 0.5)
if count_cold/(64*64) > mask_cold_asic: if count_cold/(64*64) > mask_cold_asic:
bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.ASIC_STD_BELOW_NOISE.value bpix[i*64:(i+1)*64, j*64:(j+1)*64] = BadPixels.ASIC_STD_BELOW_NOISE.value
mdset[...] = np.repeat(bpix[None,...], infile[h5path+"image/data"].shape[0], axis=0) mdset[...] = np.repeat(bpix[None,...], infile[h5path+"image/data"].shape[0], axis=0)
except Exception as e: except Exception as e:
print(e) print(e)
success = False success = False
reason = "Error" reason = "Error"
return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges) return (hists_signal_low, hists_signal_high, low_edges, high_edges, pulse_edges, when, qm)
done = False done = False
first_files = [] first_files = []
inp = [] inp = []
left = total_sequences left = total_sequences
hists_signal_low = 0 hists_signal_low = 0
hists_signal_high = 0 hists_signal_high = 0
low_edges, high_edges, pulse_edges = None, None, None low_edges, high_edges, pulse_edges = None, None, None
whens = []
qms = []
while not done: while not done:
dones = [] dones = []
first = True first = True
for i in range(16): for i in range(16):
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1) qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm in mapped_files and not mapped_files[qm].empty(): if qm in mapped_files and not mapped_files[qm].empty():
fname_in = str(mapped_files[qm].get()) fname_in = str(mapped_files[qm].get())
dones.append(mapped_files[qm].empty()) dones.append(mapped_files[qm].empty())
else: else:
print("Skipping {}".format(qm)) print("Skipping {}".format(qm))
first_files.append((None, None)) first_files.append((None, None))
continue continue
fout = os.path.abspath("{}/{}".format(out_folder, (os.path.split(fname_in)[-1]).replace("RAW", "CORR"))) fout = os.path.abspath("{}/{}".format(out_folder, (os.path.split(fname_in)[-1]).replace("RAW", "CORR")))
if first: if first:
first_files.append((fname_in, fout)) first_files.append((fname_in, fout))
inp.append((fname_in, fout, i, qm)) inp.append((fname_in, fout, i, qm))
first = False first = False
if len(inp) >= min(MAX_PAR, left): if len(inp) >= min(MAX_PAR, left):
print("Running {} tasks parallel".format(len(inp))) print("Running {} tasks parallel".format(len(inp)))
p = partial(correct_module, total_sequences, sequences_qm, p = partial(correct_module, total_sequences, sequences_qm,
loc, dinstance, offset_image, mask_noisy_asic, loc, dinstance, offset_image, mask_noisy_asic,
mask_cold_asic, noisy_pix_threshold, chunk_size_idim) mask_cold_asic, noisy_pix_threshold, chunk_size_idim,
mem_cells, bias_voltage, cal_db_timeout, creation_time, cal_db_interface)
r = view.map_sync(p, inp) r = view.map_sync(p, inp)
#r = list(map(p, inp)) #r = list(map(p, inp))
inp = [] inp = []
left -= MAX_PAR left -= MAX_PAR
for rr in r: for rr in r:
if rr is not None: if rr is not None:
hl, hh, low_edges, high_edges, pulse_edges = rr hl, hh, low_edges, high_edges, pulse_edges, when, qm = rr
whens.append(when)
qms.append(qm)
if hl is not None: # any one being None will also make the others None if hl is not None: # any one being None will also make the others None
hists_signal_low += hl.astype(np.float64) hists_signal_low += hl.astype(np.float64)
hists_signal_high += hh.astype(np.float64) hists_signal_high += hh.astype(np.float64)
done = all(dones) done = all(dones)
whens = [x for _,x in sorted(zip(qms,whens))]
qms = sorted(qms)
for i, qm in enumerate(qms):
print("Offset for {} was injected on {}".format(qm, whens[i].isoformat()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from mpl_toolkits.mplot3d import Axes3D from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from matplotlib import cm from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter from matplotlib.ticker import LinearLocator, FormatStrFormatter
import numpy as np import numpy as np
%matplotlib inline %matplotlib inline
def do_3d_plot(data, edges, x_axis, y_axis): def do_3d_plot(data, edges, x_axis, y_axis):
fig = plt.figure(figsize=(10,10)) fig = plt.figure(figsize=(10,10))
ax = fig.gca(projection='3d') ax = fig.gca(projection='3d')
# Make data. # Make data.
X = edges[0][:-1] X = edges[0][:-1]
Y = edges[1][:-1] Y = edges[1][:-1]
X, Y = np.meshgrid(X, Y) X, Y = np.meshgrid(X, Y)
Z = data.T Z = data.T
# Plot the surface. # Plot the surface.
surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm, surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,
linewidth=0, antialiased=False) linewidth=0, antialiased=False)
ax.set_xlabel(x_axis) ax.set_xlabel(x_axis)
ax.set_ylabel(y_axis) ax.set_ylabel(y_axis)
ax.set_zlabel("Counts") ax.set_zlabel("Counts")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def do_2d_plot(data, edges, y_axis, x_axis): def do_2d_plot(data, edges, y_axis, x_axis):
from matplotlib.colors import LogNorm from matplotlib.colors import LogNorm
fig = plt.figure(figsize=(10,10)) fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
extent = [np.min(edges[1]), np.max(edges[1]),np.min(edges[0]), np.max(edges[0])] extent = [np.min(edges[1]), np.max(edges[1]),np.min(edges[0]), np.max(edges[0])]
im = ax.imshow(data[::-1,:], extent=extent, aspect="auto", norm=LogNorm(vmin=1, vmax=np.max(data))) im = ax.imshow(data[::-1,:], extent=extent, aspect="auto", norm=LogNorm(vmin=1, vmax=np.max(data)))
ax.set_xlabel(x_axis) ax.set_xlabel(x_axis)
ax.set_ylabel(y_axis) ax.set_ylabel(y_axis)
cb = fig.colorbar(im) cb = fig.colorbar(im)
cb.set_label("Counts") cb.set_label("Counts")
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Mean Intensity per Pulse ## ## Mean Intensity per Pulse ##
The following plots show the mean signal for each pulse in a detailed and expanded intensity region. The following plots show the mean signal for each pulse in a detailed and expanded intensity region.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
do_3d_plot(hists_signal_low, [low_edges, pulse_edges], "Signal (ADU)", "Pulse id") do_3d_plot(hists_signal_low, [low_edges, pulse_edges], "Signal (ADU)", "Pulse id")
do_2d_plot(hists_signal_low, [low_edges, pulse_edges], "Signal (ADU)", "Pulse id") do_2d_plot(hists_signal_low, [low_edges, pulse_edges], "Signal (ADU)", "Pulse id")
do_3d_plot(hists_signal_high, [high_edges, pulse_edges], "Signal (ADU)", "Pulse id") do_3d_plot(hists_signal_high, [high_edges, pulse_edges], "Signal (ADU)", "Pulse id")
do_2d_plot(hists_signal_high, [high_edges, pulse_edges], "Signal (ADU)", "Pulse id") do_2d_plot(hists_signal_high, [high_edges, pulse_edges], "Signal (ADU)", "Pulse id")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
corrected = [] corrected = []
raw = [] raw = []
mask = [] mask = []
cell_fac = 1 cell_fac = 1
first_idx = 400*10+40 first_idx = 400*10+40
last_idx = 400*10+56 last_idx = 400*10+56
pulse_ids = [] pulse_ids = []
train_ids = [] train_ids = []
for i, ff in enumerate(first_files[:16]): for i, ff in enumerate(first_files[:16]):
try: try:
rf, cf = ff rf, cf = ff
if rf is None: if rf is None:
raise Exception("File not present") raise Exception("File not present")
infile = h5py.File(rf, "r") infile = h5py.File(rf, "r")
raw.append(np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, i)][first_idx:last_idx,0,...])) raw.append(np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, i)][first_idx:last_idx,0,...]))
infile.close() infile.close()
infile = h5py.File(cf, "r") infile = h5py.File(cf, "r")
corrected.append(np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, i)][first_idx:last_idx,...])) corrected.append(np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, i)][first_idx:last_idx,...]))
mask.append(np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/mask".format(loc, i)][first_idx:last_idx,...])) mask.append(np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/mask".format(loc, i)][first_idx:last_idx,...]))
pulse_ids.append(np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId".format(loc, i)][first_idx:last_idx,...])) pulse_ids.append(np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId".format(loc, i)][first_idx:last_idx,...]))
train_ids.append(np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/trainId".format(loc, i)][first_idx:last_idx,...])) train_ids.append(np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/trainId".format(loc, i)][first_idx:last_idx,...]))
infile.close() infile.close()
except Exception as e: except Exception as e:
print(e) print(e)
corrected.append(np.zeros((last_idx-first_idx, 512, 128))) corrected.append(np.zeros((last_idx-first_idx, 512, 128)))
mask.append(np.zeros((last_idx-first_idx, 512, 128))) mask.append(np.zeros((last_idx-first_idx, 512, 128)))
raw.append(np.zeros((last_idx-first_idx, 512, 128))) raw.append(np.zeros((last_idx-first_idx, 512, 128)))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def combine_stack(d, sdim): def combine_stack(d, sdim):
combined = np.zeros((sdim, 1300,1300), np.float32) combined = np.zeros((sdim, 1300,1300), np.float32)
combined[...] = 0 combined[...] = 0
dy = 0 dy = 0
quad_pos = [ quad_pos = [
(0, 145), (0, 145),
(130, 140), (130, 140),
(125, 15), (125, 15),
(0, 15), (0, 15),
] ]
px = 0.236 px = 0.236
py = 0.204 py = 0.204
with h5py.File(geo_file, "r") as gf: with h5py.File(geo_file, "r") as gf:
for i in range(16): for i in range(16):
mi = i mi = i
mi = 3-(i%4) mi = 3-(i%4)
mp = gf["Q{}/M{}/Position".format(i//4+1, mi%4+1)][()] mp = gf["Q{}/M{}/Position".format(i//4+1, mi%4+1)][()]
t1 = gf["Q{}/M{}/T01/Position".format(i//4+1, i%4+1)][()] t1 = gf["Q{}/M{}/T01/Position".format(i//4+1, i%4+1)][()]
t2 = gf["Q{}/M{}/T02/Position".format(i//4+1, i%4+1)][()] t2 = gf["Q{}/M{}/T02/Position".format(i//4+1, i%4+1)][()]
if i//4 < 2: if i//4 < 2:
t1, t2 = t2, t1 t1, t2 = t2, t1
if i // 4 == 0 or i // 4 == 1: if i // 4 == 0 or i // 4 == 1:
td = d[i][:,::-1,:] td = d[i][:,::-1,:]
else: else:
td = d[i][:,:,::-1] td = d[i][:,:,::-1]
t1d = td[:,:,:256] t1d = td[:,:,:256]
t2d = td[:,:,256:] t2d = td[:,:,256:]
x0t1 = int((t1[0]+mp[0])/px) x0t1 = int((t1[0]+mp[0])/px)
y0t1 = int((t1[1]+mp[1])/py) y0t1 = int((t1[1]+mp[1])/py)
x0t2 = int((t2[0]+mp[0])/px) x0t2 = int((t2[0]+mp[0])/px)
y0t2 = int((t2[1]+mp[1])/py) y0t2 = int((t2[1]+mp[1])/py)
x0t1 += int(quad_pos[i//4][1]/px) x0t1 += int(quad_pos[i//4][1]/px)
x0t2 += int(quad_pos[i//4][1]/px) x0t2 += int(quad_pos[i//4][1]/px)
y0t1 += int(quad_pos[i//4][0]/py)+combined.shape[1]//16 y0t1 += int(quad_pos[i//4][0]/py)+combined.shape[1]//16
y0t2 += int(quad_pos[i//4][0]/py)+combined.shape[1]//16 y0t2 += int(quad_pos[i//4][0]/py)+combined.shape[1]//16
combined[:,y0t1:y0t1+128,x0t1:x0t1+256] = t1d combined[:,y0t1:y0t1+128,x0t1:x0t1+256] = t1d
combined[:,y0t2:y0t2+128,x0t2:x0t2+256] = t2d combined[:,y0t2:y0t2+128,x0t2:x0t2+256] = t2d
return combined return combined
combined = combine_stack(corrected, last_idx-first_idx) combined = combine_stack(corrected, last_idx-first_idx)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
combined = combine_stack(corrected, last_idx-first_idx) combined = combine_stack(corrected, last_idx-first_idx)
combined_raw = combine_stack(raw, last_idx-first_idx) combined_raw = combine_stack(raw, last_idx-first_idx)
combined_mask = combine_stack(mask, last_idx-first_idx) combined_mask = combine_stack(mask, last_idx-first_idx)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Mean RAW Preview ### ### Mean RAW Preview ###
The per pixel mean of the first 128 images of the RAW data The per pixel mean of the first 128 images of the RAW data
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
%matplotlib inline %matplotlib inline
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
im = ax.imshow(np.mean(combined_raw[:,...],axis=0), im = ax.imshow(np.mean(combined_raw[:,...],axis=0),
vmin=min(0.75*np.median(combined_raw[combined_raw > 0]), -5), vmin=min(0.75*np.median(combined_raw[combined_raw > 0]), -5),
vmax=max(1.5*np.median(combined_raw[combined_raw > 0]), 50), cmap="jet") vmax=max(1.5*np.median(combined_raw[combined_raw > 0]), 50), cmap="jet")
cb = fig.colorbar(im, ax=ax) cb = fig.colorbar(im, ax=ax)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Single Shot Preview ### ### Single Shot Preview ###
A single shot image from cell 2 of the first train A single shot image from cell 2 of the first train
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
dim = combined[2,...] dim = combined[2,...]
im = ax.imshow(dim, vmin=-0, vmax=max(1.5*np.median(dim[dim > 0]), 50), cmap="jet", interpolation="nearest") im = ax.imshow(dim, vmin=-0, vmax=max(1.5*np.median(dim[dim > 0]), 50), cmap="jet", interpolation="nearest")
cb = fig.colorbar(im, ax=ax) cb = fig.colorbar(im, ax=ax)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
h = ax.hist(dim.flatten(), bins=100, range=(0, 100)) h = ax.hist(dim.flatten(), bins=100, range=(0, 100))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Mean CORRECTED Preview ### ### Mean CORRECTED Preview ###
The per pixel mean of the first 128 images of the CORRECTED data The per pixel mean of the first 128 images of the CORRECTED data
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
im = ax.imshow(np.mean(combined[:,...], axis=0), vmin=0, im = ax.imshow(np.mean(combined[:,...], axis=0), vmin=0,
vmax=max(1.5*np.median(combined[combined > 0]), 10), cmap="jet", interpolation="nearest") vmax=max(1.5*np.median(combined[combined > 0]), 10), cmap="jet", interpolation="nearest")
cb = fig.colorbar(im, ax=ax) cb = fig.colorbar(im, ax=ax)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Max CORRECTED Preview ### ### Max CORRECTED Preview ###
The per pixel maximum of the first 128 images of the CORRECTED data The per pixel maximum of the first 128 images of the CORRECTED data
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
im = ax.imshow(np.max(combined[:,...], axis=0), vmin=0, im = ax.imshow(np.max(combined[:,...], axis=0), vmin=0,
vmax=max(100*np.median(combined[combined > 0]), 20), cmap="jet", interpolation="nearest") vmax=max(100*np.median(combined[combined > 0]), 20), cmap="jet", interpolation="nearest")
cb = fig.colorbar(im, ax=ax) cb = fig.colorbar(im, ax=ax)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
combined[combined <= 0] = 0 combined[combined <= 0] = 0
h = ax.hist(combined.flatten(), bins=100, range=(-5, 100), log=True) h = ax.hist(combined.flatten(), bins=100, range=(-5, 100), log=True)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Bad Pixels ## ## Bad Pixels ##
The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as: The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as:
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from IPython.display import HTML, display, Markdown, Latex from IPython.display import HTML, display, Markdown, Latex
import tabulate import tabulate
table = [] table = []
for item in BadPixels: for item in BadPixels:
table.append((item.name, "{:016b}".format(item.value))) table.append((item.name, "{:016b}".format(item.value)))
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["Bad pixel type", "Bit mask"]))) md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["Bad pixel type", "Bit mask"])))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Full Train Bad Pixels ### ### Full Train Bad Pixels ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
im = ax.imshow(np.log2(np.max(combined_mask[:,...], axis=0)), vmin=0, im = ax.imshow(np.log2(np.max(combined_mask[:,...], axis=0)), vmin=0,
vmax=32, cmap="jet") vmax=32, cmap="jet")
cb = fig.colorbar(im, ax=ax) cb = fig.colorbar(im, ax=ax)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Full Train Bad Pixels - Only Dark Char. Related ### ### Full Train Bad Pixels - Only Dark Char. Related ###
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = plt.figure(figsize=(20,10)) fig = plt.figure(figsize=(20,10))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
im = ax.imshow(np.max((combined_mask.astype(np.uint32)[:,...] & BadPixels.NOISY_ADC.value) != 0, axis=0), vmin=0, im = ax.imshow(np.max((combined_mask.astype(np.uint32)[:,...] & BadPixels.NOISY_ADC.value) != 0, axis=0), vmin=0,
vmax=1, cmap="jet") vmax=1, cmap="jet")
cb = fig.colorbar(im, ax=ax) cb = fig.colorbar(im, ax=ax)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
......
  • Contributor

    Not related to the MR, the question is about the magic numbers in the rows: first_idx = 40010+40 last_idx = 40010+56

    is the 400 the pulse number? In the latest measurements the the frames were much lower and varied.

  • Maintainer

    Yes, this might cause issues then. We should come up with a more stable way of determining preview images.

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment