Skip to content
Snippets Groups Projects
Commit f8a940d2 authored by Karim Ahmed's avatar Karim Ahmed
Browse files

Merge branch 'back_propagate/DSSC_pulseid_checksum_condition' into 'master'

Back propagate/dssc pulseid checksum condition

See merge request detectors/pycalibration!152
parents cfd3c35d 107325a7
No related branches found
No related tags found
1 merge request!152Back propagate/dssc pulseid checksum condition
...@@ -5,7 +5,7 @@ import numpy as np ...@@ -5,7 +5,7 @@ import numpy as np
def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=None): def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=None):
for module, data in d.items(): for module, data in d.items():
fig = plt.figure(figsize=(20,20)) fig = plt.figure(figsize=(20,20))
grid = AxesGrid(fig, 111, grid = AxesGrid(fig, 111,
...@@ -54,9 +54,6 @@ def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=No ...@@ -54,9 +54,6 @@ def show_overview(d, cell_to_preview, gain_to_preview, out_folder=None, infix=No
fig.savefig("{}/dark_analysis_{}_module_{}.png".format(out_folder, fig.savefig("{}/dark_analysis_{}_module_{}.png".format(out_folder,
infix, infix,
module)) module))
def rebin(a, *args): def rebin(a, *args):
...@@ -107,22 +104,27 @@ def plot_badpix_3d(data, definitions, title=None, rebin_fac=2, azim=22.5): ...@@ -107,22 +104,27 @@ def plot_badpix_3d(data, definitions, title=None, rebin_fac=2, azim=22.5):
if title: if title:
t = ax.set_title(title) t = ax.set_title(title)
from IPython.display import HTML, display, Markdown, Latex from IPython.display import HTML, display, Markdown, Latex
import tabulate import tabulate
def create_constant_overview(constant, name, cells, vmin, vmax, entries=3, def create_constant_overview(constant, name, cells, vmin=None, vmax=None, entries=3,
out_folder=None, infix=None): out_folder=None, infix=None):
gmap = {0: 'High', 1: 'Medium', 2: 'Low'} gmap = {0: 'High', 1: 'Medium', 2: 'Low'}
for g in range(entries): for g in range(entries):
fig = plt.figure(figsize=(10, 5)) fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111) ax = fig.add_subplot(111)
table = [] table = []
for qm in constant.keys(): for qm in constant.keys():
d = constant[qm][...,g] if len(constant[qm].shape) == 4:
d = constant[qm][..., g]
else:
# This case was introduced for DSSC dark
# it uses this function but have a constant
# of shape (x, y, z), unlike AGIPD.
d = constant[qm]
#print("{} {}, gain {:0.2f}: mean: {:0.2f}, median: {:0.2f}, std: {:0.2f}".format(name, qm, g, #print("{} {}, gain {:0.2f}: mean: {:0.2f}, median: {:0.2f}, std: {:0.2f}".format(name, qm, g,
# np.nanmean(d), # np.nanmean(d),
# np.nanmedian(d), # np.nanmedian(d),
...@@ -132,7 +134,8 @@ def create_constant_overview(constant, name, cells, vmin, vmax, entries=3, ...@@ -132,7 +134,8 @@ def create_constant_overview(constant, name, cells, vmin, vmax, entries=3,
ax.set_xlabel("Memory cell") ax.set_xlabel("Memory cell")
ax.set_ylabel(name) ax.set_ylabel(name)
ax.set_title("{} Gain Median per Cell".format(gmap[g])) ax.set_title("{} Gain Median per Cell".format(gmap[g]))
ax.set_ylim(vmin, vmax) if vmin and vmax:
ax.set_ylim(vmin, vmax)
#if out_folder and infix: #if out_folder and infix:
# fig.savefig("{}/dark_analysis_{}_{}_per_cell_gain{}.png".format(out_folder, # fig.savefig("{}/dark_analysis_{}_{}_per_cell_gain{}.png".format(out_folder,
# infix, # infix,
......
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# Characterize Dark Images # # Characterize Dark Images #
Author: S. Hauf, Version: 0.1 Author: S. Hauf, Version: 0.1
The following code analyzes a set of dark images taken with the AGIPD detector to deduce detector offsets and noise. Data for the detector's three gain stages needs to be present, separated into separate runs. The following code analyzes a set of dark images taken with the DSSC detector to deduce detector offsets and noise. Data for the detector is presented in one run and don't acquire multiple gain stages.
The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data. The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cluster_profile = "noDB" # The ipcluster profile to use cluster_profile = "noDB" # The ipcluster profile to use
in_folder = "/gpfs/exfel/exp/SCS/201930/p900079/raw" # path to input data, required in_folder = "/gpfs/exfel/exp/SCS/201931/p900095/raw" # path to input data, required
out_folder = "/gpfs/exfel/data/scratch/haufs/test/" # path to output to, required out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/DSSC" # path to output to, required
sequences = [0] # sequence files to evaluate. sequences = [0] # sequence files to evaluate.
run = 20 # run number in which data was recorded, required run = 1497 # run number in which data was recorded, required
mem_cells = 0 # number of memory cells used, set to 0 to automatically infer mem_cells = 0 # number of memory cells used, set to 0 to automatically infer
local_output = False # output constants locally local_output = False # output constants locally
db_output = True # output constants to database db_output = True # output constants to database
bias_voltage = 300 # detector bias voltage bias_voltage = 300 # detector bias voltage
cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use
rawversion = 2 # RAW file format version rawversion = 2 # RAW file format version
dont_use_dir_date = False # don't use the dir creation date for determining the creation time dont_use_dir_date = True # don't use the dir creation date for determining the creation time
thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels
thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels thresholds_offset_hard = [4000, 8500] # thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels
thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels thresholds_noise_hard = [4, 20] # thresholds in absolute ADU terms for offset deduced bad pixels
instrument = "SCS" # the instrument instrument = "SCS" # the instrument
high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h
modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] # module to run for modules = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] # modules to run for
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# imports and things that do not usually need to be changed # imports and things that do not usually need to be changed
from datetime import datetime from datetime import datetime
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
from collections import OrderedDict from collections import OrderedDict
import os import os
import h5py import h5py
import numpy as np import numpy as np
import matplotlib import matplotlib
matplotlib.use('agg') matplotlib.use('agg')
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
%matplotlib inline %matplotlib inline
from cal_tools.tools import gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, get_dir_creation_date from cal_tools.tools import gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, get_dir_creation_date
from cal_tools.influx import InfluxLogger from cal_tools.influx import InfluxLogger
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from cal_tools.plotting import show_overview, plot_badpix_3d, create_constant_overview from cal_tools.plotting import show_overview, plot_badpix_3d, create_constant_overview
# make sure a cluster is running with ipcluster start --n=32, give it a while to start # make sure a cluster is running with ipcluster start --n=32, give it a while to start
from ipyparallel import Client from ipyparallel import Client
view = Client(profile=cluster_profile)[:] view = Client(profile=cluster_profile)[:]
view.use_dill() view.use_dill()
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
# no need to change this # no need to change this
QUADRANTS = 4 QUADRANTS = 4
MODULES_PER_QUAD = 4 MODULES_PER_QUAD = 4
DET_FILE_INSET = "DSSC" DET_FILE_INSET = "DSSC"
max_cells = mem_cells max_cells = mem_cells
offset_runs = OrderedDict() offset_runs = OrderedDict()
offset_runs["high"] = parse_runs(run)[0] offset_runs["high"] = parse_runs(run)[0]
creation_time=None creation_time=None
if not dont_use_dir_date: if not dont_use_dir_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
run, prop, seq = run_prop_seq_from_path(in_folder) run, prop, seq = run_prop_seq_from_path(in_folder)
logger = InfluxLogger(detector="DSSC", instrument=instrument, mem_cells=mem_cells, logger = InfluxLogger(detector="DSSC", instrument=instrument, mem_cells=mem_cells,
notebook=get_notebook_name(), proposal=prop) notebook=get_notebook_name(), proposal=prop)
print("Using {} as creation time of constant.".format(creation_time)) print("Using {} as creation time of constant.".format(creation_time))
loc = None loc = None
if instrument == "SCS": if instrument == "SCS":
loc = "SCS_DET_DSSC1M-1" loc = "SCS_DET_DSSC1M-1"
dinstance = "DSSC1M1" dinstance = "DSSC1M1"
print("Detector in use is {}".format(loc)) print("Detector in use is {}".format(loc))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print("Parameters are:") print("Parameters are:")
print("Proposal: {}".format(prop)) print("Proposal: {}".format(prop))
print("Memory cells: {}/{}".format(mem_cells, max_cells)) print("Memory cells: {}/{}".format(mem_cells, max_cells))
print("Runs: {}".format([ v for v in offset_runs.values()])) print("Runs: {}".format([ v for v in offset_runs.values()]))
print("Sequences: {}".format(sequences)) print("Sequences: {}".format(sequences))
print("Using DB: {}".format(db_output)) print("Using DB: {}".format(db_output))
print("Input: {}".format(in_folder)) print("Input: {}".format(in_folder))
print("Output: {}".format(out_folder)) print("Output: {}".format(out_folder))
print("Bias voltage: {}V".format(bias_voltage)) print("Bias voltage: {}V".format(bias_voltage))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
The following lines will create a queue of files which will the be executed module-parallel. Distiguishing between different gains. The following lines will create a queue of files which will the be executed module-parallel. Distinguishing between different gains.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set everything up filewise # set everything up filewise
if not os.path.exists(out_folder): if not os.path.exists(out_folder):
os.makedirs(out_folder) os.makedirs(out_folder)
gmf = gain_map_files(in_folder, offset_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD) gmf = gain_map_files(in_folder, offset_runs, sequences, DET_FILE_INSET, QUADRANTS, MODULES_PER_QUAD)
gain_mapped_files, total_sequences, total_file_size = gmf gain_mapped_files, total_sequences, total_file_size = gmf
print("Will process at total of {} sequences: {:0.2f} GB of data.".format(total_sequences, total_file_size)) print("Will process at total of {} sequences: {:0.2f} GB of data.".format(total_sequences, total_file_size))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Calculate Offsets, Noise and Thresholds ## ## Calculate Offsets, Noise and Thresholds ##
The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array. The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import copy import copy
from functools import partial from functools import partial
def characterize_module(cells, bp_thresh, rawversion, loc, inp): def characterize_module(cells, bp_thresh, rawversion, loc, inp):
import numpy as np import numpy as np
import copy import copy
import h5py import h5py
from cal_tools.enums import BadPixels from cal_tools.enums import BadPixels
from hashlib import blake2b
import struct
import binascii
def get_num_cells(fname, loc, module): def get_num_cells(fname, loc, module):
with h5py.File(fname, "r") as f: with h5py.File(fname, "r") as f:
cells = f["INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, module)][()] cells = f["INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, module)][()]
maxcell = np.max(cells) maxcell = np.max(cells)
options = [100, 200, 400, 500, 600, 700, 800] options = [100, 200, 400, 500, 600, 700, 800]
dists = np.array([(o-maxcell) for o in options]) dists = np.array([(o-maxcell) for o in options])
dists[dists<0] = 10000 # assure to always go higher dists[dists<0] = 10000 # assure to always go higher
return options[np.argmin(dists)] return options[np.argmin(dists)]
filename, filename_out, channel = inp filename, filename_out, channel = inp
if cells == 0: if cells == 0:
cells = get_num_cells(filename, loc, channel) cells = get_num_cells(filename, loc, channel)
pulseid_checksum = None
thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh
infile = h5py.File(filename, "r", driver="core") infile = h5py.File(filename, "r", driver="core")
if rawversion == 2: if rawversion == 2:
count = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/count".format(loc, channel)]) count = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/count".format(loc, channel)])
first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)]) first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)])
last_index = int(first[count != 0][-1]+count[count != 0][-1]) last_index = int(first[count != 0][-1]+count[count != 0][-1])
first_index = int(first[count != 0][0]) first_index = int(first[count != 0][0])
pulseids = infile["INSTRUMENT/{}/DET/{}CH0:xtdf/image/pulseId".format(loc, channel)][first_index:int(first[count != 0][1])]
bveto = blake2b(pulseids.data, digest_size=8)
pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]
else: else:
status = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/status".format(loc, channel)]) status = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/status".format(loc, channel)])
if np.count_nonzero(status != 0) == 0: if np.count_nonzero(status != 0) == 0:
return return
last = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/last".format(loc, channel)]) last = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/last".format(loc, channel)])
first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)]) first = np.squeeze(infile["/INDEX/{}/DET/{}CH0:xtdf/image/first".format(loc, channel)])
last_index = int(last[status != 0][-1]) + 1 last_index = int(last[status != 0][-1]) + 1
first_index = int(first[status != 0][0]) first_index = int(first[status != 0][0])
im = np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, channel)][first_index:last_index,...]) im = np.array(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/data".format(loc, channel)][first_index:last_index,...])
cellIds = np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, channel)][first_index:last_index,...]) cellIds = np.squeeze(infile["/INSTRUMENT/{}/DET/{}CH0:xtdf/image/cellId".format(loc, channel)][first_index:last_index,...])
infile.close() infile.close()
im = im[:, 0, ...].astype(np.float32) im = im[:, 0, ...].astype(np.float32)
im = np.rollaxis(im, 2) im = np.rollaxis(im, 2)
im = np.rollaxis(im, 2, 1) im = np.rollaxis(im, 2, 1)
mcells = cells mcells = cells
offset = np.zeros((im.shape[0], im.shape[1], mcells)) offset = np.zeros((im.shape[0], im.shape[1], mcells))
noise = np.zeros((im.shape[0], im.shape[1], mcells)) noise = np.zeros((im.shape[0], im.shape[1], mcells))
for cc in np.unique(cellIds[cellIds < mcells]): for cc in np.unique(cellIds[cellIds < mcells]):
cellidx = cellIds == cc cellidx = cellIds == cc
offset[...,cc] = np.median(im[..., cellidx], axis=2) offset[...,cc] = np.median(im[..., cellidx], axis=2)
noise[...,cc] = np.std(im[..., cellidx], axis=2) noise[...,cc] = np.std(im[..., cellidx], axis=2)
# bad pixels # bad pixels
bp = np.zeros(offset.shape, np.uint32) bp = np.zeros(offset.shape, np.uint32)
# offset related bad pixels # offset related bad pixels
offset_mn = np.nanmedian(offset, axis=(0,1)) offset_mn = np.nanmedian(offset, axis=(0,1))
offset_std = np.nanstd(offset, axis=(0,1)) offset_std = np.nanstd(offset, axis=(0,1))
bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) | bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |
(offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value (offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[(offset < thresholds_offset_hard[0]) | (offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value bp[(offset < thresholds_offset_hard[0]) | (offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# noise related bad pixels # noise related bad pixels
noise_mn = np.nanmedian(noise, axis=(0,1)) noise_mn = np.nanmedian(noise, axis=(0,1))
noise_std = np.nanstd(noise, axis=(0,1)) noise_std = np.nanstd(noise, axis=(0,1))
bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) | bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |
(noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value (noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
return offset, noise, bp, cells return offset, noise, bp, cells, pulseid_checksum
offset_g = OrderedDict() offset_g = OrderedDict()
noise_g = OrderedDict() noise_g = OrderedDict()
gain_g = OrderedDict() gain_g = OrderedDict()
badpix_g = OrderedDict() badpix_g = OrderedDict()
gg = 0 gg = 0
start = datetime.now() start = datetime.now()
all_cells = [] all_cells = []
checksums = {}
for gain, mapped_files in gain_mapped_files.items(): for gain, mapped_files in gain_mapped_files.items():
inp = [] inp = []
dones = [] dones = []
for i in modules: for i in modules:
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1) qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm in mapped_files and not mapped_files[qm].empty(): if qm in mapped_files and not mapped_files[qm].empty():
fname_in = mapped_files[qm].get() fname_in = mapped_files[qm].get()
dones.append(mapped_files[qm].empty()) dones.append(mapped_files[qm].empty())
else: else:
continue continue
fout = os.path.abspath("{}/{}".format(out_folder, (os.path.split(fname_in)[-1]).replace("RAW", "CORR"))) fout = os.path.abspath("{}/{}".format(out_folder, (os.path.split(fname_in)[-1]).replace("RAW", "CORR")))
inp.append((fname_in, fout, i)) inp.append((fname_in, fout, i))
first = False first = False
p = partial(characterize_module, max_cells, p = partial(characterize_module, max_cells,
(thresholds_offset_hard, thresholds_offset_sigma, (thresholds_offset_hard, thresholds_offset_sigma,
thresholds_noise_hard, thresholds_noise_sigma), rawversion, loc) thresholds_noise_hard, thresholds_noise_sigma), rawversion, loc)
results = list(map(p, inp)) results = list(map(p, inp))
for ii, r in enumerate(results): for ii, r in enumerate(results):
i = modules[ii] i = modules[ii]
offset, noise, bp, thiscell = r offset, noise, bp, thiscell, pulseid_checksum = r
all_cells.append(thiscell) all_cells.append(thiscell)
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1) qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm not in offset_g: if qm not in offset_g:
offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2])) offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2]))
noise_g[qm] = np.zeros_like(offset_g[qm]) noise_g[qm] = np.zeros_like(offset_g[qm])
badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32) badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)
checksums[qm] = pulseid_checksum
offset_g[qm][...] = offset offset_g[qm][...] = offset
noise_g[qm][...] = noise noise_g[qm][...] = noise
badpix_g[qm][...] = bp badpix_g[qm][...] = bp
gg +=1 gg +=1
duration = (datetime.now()-start).total_seconds() duration = (datetime.now()-start).total_seconds()
logger.runtime_summary_entry(success=True, runtime=duration, logger.runtime_summary_entry(success=True, runtime=duration,
total_sequences=total_sequences, total_sequences=total_sequences,
filesize=total_file_size) filesize=total_file_size)
logger.send() logger.send()
max_cells = np.max(all_cells) max_cells = np.max(all_cells)
print("Using {} memory cells".format(max_cells)) print("Using {} memory cells".format(max_cells))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
res = OrderedDict() res = OrderedDict()
for i in modules: for i in modules:
qm = "Q{}M{}".format(i//4+1, i%4+1) qm = "Q{}M{}".format(i//4+1, i%4+1)
res[qm] = {'Offset': offset_g[qm], res[qm] = {'Offset': offset_g[qm],
'Noise': noise_g[qm], 'Noise': noise_g[qm],
'BadPixels': badpix_g[qm] 'BadPixels': badpix_g[qm]
} }
if local_output: if local_output:
for qm in offset_g.keys(): for qm in offset_g.keys():
ofile = "{}/dssc_offset_store_{}_{}.h5".format(out_folder, "_".join(offset_runs.values()), qm) ofile = "{}/dssc_offset_store_{}_{}.h5".format(out_folder, "_".join(offset_runs.values()), qm)
store_file = h5py.File(ofile, "w") store_file = h5py.File(ofile, "w")
store_file["{}/Offset/0/data".format(qm)] = offset_g[qm] store_file["{}/Offset/0/data".format(qm)] = offset_g[qm]
store_file["{}/Noise/0/data".format(qm)] = noise_g[qm] store_file["{}/Noise/0/data".format(qm)] = noise_g[qm]
store_file["{}/BadPixels/0/data".format(qm)] = badpix_g[qm] store_file["{}/BadPixels/0/data".format(qm)] = badpix_g[qm]
store_file.close() store_file.close()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
if db_output: if db_output:
for qm in offset_g.keys(): for dont_use_pulseIds in [True, False]:
metadata = ConstantMetaData() for qm in offset_g.keys():
offset = Constants.DSSC.Offset() try:
offset.data = offset_g[qm] metadata = ConstantMetaData()
metadata.calibration_constant = offset offset = Constants.DSSC.Offset()
offset.data = offset_g[qm]
# set the operating condition metadata.calibration_constant = offset
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage) pidsum = None if dont_use_pulseIds else checksums[qm]
detinst = getattr(Detectors, dinstance) # set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,
device = getattr(detinst, qm) pulseid_checksum=pidsum)
detinst = getattr(Detectors, dinstance)
metadata.detector_condition = condition
device = getattr(detinst, qm)
# specify the a version for this constant
if creation_time is None: metadata.detector_condition = condition
metadata.calibration_constant_version = Versions.Now(device=device)
else: # specify the a version for this constant
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time) if creation_time is None:
metadata.send(cal_db_interface, timeout=3000000) metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata = ConstantMetaData() metadata.send(cal_db_interface, timeout=3000000)
noise = Constants.DSSC.Noise()
noise.data = noise_g[qm]
metadata.calibration_constant = noise metadata = ConstantMetaData()
noise = Constants.DSSC.Noise()
# set the operating condition noise.data = noise_g[qm]
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage) metadata.calibration_constant = noise
metadata.detector_condition = condition
# set the operating condition
# specify the a version for this constant condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,
if creation_time is None: pulseid_checksum=pidsum)
metadata.calibration_constant_version = Versions.Now(device=device) metadata.detector_condition = condition
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time) # specify the a version for this constant
metadata.send(cal_db_interface, timeout=3000000) if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
continue # no bad pixels yet else:
metadata = ConstantMetaData() metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
badpixels = Constants.DSSC.BadPixelsDark() metadata.send(cal_db_interface, timeout=3000000)
badpixels.data = badpix_g[qm]
metadata.calibration_constant = badpixels continue # no bad pixels yet
metadata = ConstantMetaData()
# set the operating condition badpixels = Constants.DSSC.BadPixelsDark()
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage) badpixels.data = badpix_g[qm]
metadata.detector_condition = condition metadata.calibration_constant = badpixels
# specify the a version for this constant # set the operating condition
if creation_time is None: condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,
metadata.calibration_constant_version = Versions.Now(device=device) pulseid_checksum=pidsum)
else: metadata.detector_condition = condition
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000) # specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000)
except Exception as e:
print(e)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Single-Cell Overviews ## ## Single-Cell Overviews ##
Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible. Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cell = 3 for r in res.values():
del r["BadPixels"]
```
%% Cell type:code id: tags:
``` python
cell = 9
gain = 0 gain = 0
out_folder = None out_folder = None
show_overview(res, cell, gain, out_folder=out_folder, infix="_".join(offset_runs.values())) show_overview(res, cell, gain, out_folder=out_folder, infix="_".join(offset_runs.values()))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Global Bad Pixel Behaviour ## ## Global Bad Pixel Behaviour ##
The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type. The following plots show the results of bad pixel evaluation for all evaluated memory cells. Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2. This excludes single bad pixels present only in disconnected pixels. Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated. Colors encode the bad pixel type, or mixed type.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'), cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),
BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'), BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'), BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')} BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}
rebin = 8 if not high_res_badpix_3d else 2 rebin = 8 if not high_res_badpix_3d else 2
gain = 0 gain = 0
for mod, data in badpix_g.items(): for mod, data in badpix_g.items():
plot_badpix_3d(data[...,gain], cols, title=mod, rebin_fac=rebin) plot_badpix_3d(data, cols, title=mod, rebin_fac=rebin)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Aggregate values, and per Cell behaviour ## ## Aggregate values, and per Cell behaviour ##
The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior. The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
create_constant_overview(offset_g, "Offset (ADU)", max_cells, 4000, 8000, create_constant_overview(offset_g, "Offset (ADU)", max_cells,
out_folder=out_folder, infix="_".join(offset_runs.values())) out_folder=out_folder, infix="_".join(offset_runs.values()), entries=1)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100, create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100,
out_folder=out_folder, infix="_".join(offset_runs.values())) out_folder=out_folder, infix="_".join(offset_runs.values()), entries=1)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
bad_pixel_aggregate_g = OrderedDict() bad_pixel_aggregate_g = OrderedDict()
for m, d in badpix_g.items(): for m, d in badpix_g.items():
bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float) bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)
create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, 0, 0.10, 3, create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, entries=1,
out_folder=out_folder, infix="_".join(offset_runs.values())) out_folder=out_folder, infix="_".join(offset_runs.values()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
%% Cell type:code id: tags:
``` python
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment