Skip to content
Snippets Groups Projects

Feat/dss cimprove master rebasing

Closed Andrey Samartsev requested to merge feat/DSSCimproveMasterRebasing into feat/DSSCdarksImprove
1 file
+ 1
1
Compare changes
  • Side-by-side
  • Inline
%% Cell type:markdown id: tags:
# Characterize Dark Images #
Author: S. Hauf, Version: 0.1
The following code analyzes a set of dark images taken with the DSSC detector to deduce detector offsets and noise. Data for the detector is presented in one run and don't acquire multiple gain stages.
The notebook explicitely does what pyDetLib provides in its offset calculation method for streaming data.
%% Cell type:code id: tags:
``` python
cluster_profile = "noDB" # The ipcluster profile to use
in_folder = "/gpfs/exfel/exp/SCS/202030/p900125/raw" # path to input data, required
<<<<<<< HEAD
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/DSSC" # path to output to, required
=======
out_folder = "/gpfs/exfel/data/scratch/samartse/test/" # path to output to, required
>>>>>>> modified notebook for DSSC specifics
sequences = [0] # sequence files to evaluate.
modules = [-1] # modules to run for
run = 222 # run numbr in which data was recorded, required
<<<<<<< HEAD
karabo_id = "SCS_DET_DSSC1M-1" # karabo karabo_id
karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators
receiver_id = "{}CH0" # inset for receiver devices
path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data
h5path = '/INSTRUMENT/{}/DET/{}:xtdf/image' # path in the HDF5 file to images
h5path_idx = '/INDEX/{}/DET/{}:xtdf/image' # path in the HDF5 file to images
=======
run = 203 # run number in which data was recorded, required
>>>>>>> modified notebook for DSSC specifics
use_dir_creation_date = True # use the dir creation date for determining the creation time
cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use
cal_db_timeout = 3000000 # timeout on caldb requests"
local_output = True # output constants locally
<<<<<<< HEAD
db_output = False # output constants to database
mem_cells = 0 # number of memory cells used, set to 0 to automatically infer
bias_voltage = 300 # detector bias voltage
=======
db_output = True # output constants to database
bias_voltage = 100 # detector bias voltage
cal_db_interface = "tcp://max-exfl016:8020" # the database interface to use
>>>>>>> modified notebook for DSSC specifics
rawversion = 2 # RAW file format version
thresholds_offset_sigma = 3. # thresholds in terms of n sigma noise for offset deduced bad pixels
thresholds_offset_hard = [4, 125] # thresholds in absolute ADU terms for offset deduced bad pixels
thresholds_noise_sigma = 5. # thresholds in terms of n sigma noise for offset deduced bad pixels
thresholds_noise_hard = [0.1, 10] # thresholds in absolute ADU terms for offset deduced bad pixels
offset_numpy_algorithm = "mean"
instrument = "SCS" # the instrument
<<<<<<< HEAD
high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h
=======
high_res_badpix_3d = False # set this to True if you need high-resolution 3d bad pixel plots. Runtime: ~ 1h
modules = [i for i in range(16)] # modules to run for
slow_data_pattern = 'RAW-R{}-DA{}-S[0-9]+\.h5'
slow_data_aggregators = {1:1,2:2,3:3,4:4} #quadrant/aggregator
>>>>>>> modified notebook for DSSC specifics
```
%% Cell type:code id: tags:
``` python
# imports and things that do not usually need to be changed
from datetime import datetime
import warnings
warnings.filterwarnings('ignore')
from collections import OrderedDict
import os
import h5py
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
%matplotlib inline
import re
from os import listdir
<<<<<<< HEAD
from cal_tools.tools import (map_gain_stages, parse_runs, run_prop_seq_from_path,
get_notebook_name, get_dir_creation_date,
get_random_db_interface, get_from_db, save_const_to_h5)
=======
from cal_tools.tools import gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name, get_dir_creation_date
>>>>>>> modified notebook for DSSC specifics
from cal_tools.influx import InfluxLogger
from cal_tools.enums import BadPixels
from cal_tools.plotting import (show_overview, plot_badpix_3d,
create_constant_overview,
show_processed_modules)
# make sure a cluster is running with ipcluster start --n=32, give it a while to start
from ipyparallel import Client
from IPython.display import display, Markdown, Latex
import tabulate
view = Client(profile=cluster_profile)[:]
view.use_dill()
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
h5path = h5path.format(karabo_id, receiver_id)
h5path_idx = h5path_idx.format(karabo_id, receiver_id)
gain_names = ['High', 'Medium', 'Low']
if karabo_da[0] == '-1':
if modules[0] == -1:
modules = list(range(16))
karabo_da = ["DSSC{:02d}".format(i) for i in modules]
else:
modules = [int(x[-2:]) for x in karabo_da]
max_cells = mem_cells
offset_runs = OrderedDict()
offset_runs["high"] = run
creation_time=None
if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run)
<<<<<<< HEAD
print(f"Using {creation_time} as creation time of constant.")
=======
run_number = run
>>>>>>> modified notebook for DSSC specifics
run, prop, seq = run_prop_seq_from_path(in_folder)
logger = InfluxLogger(detector="DSSC", instrument=instrument, mem_cells=mem_cells,
notebook=get_notebook_name(), proposal=prop)
dinstance = "DSSC1M1"
<<<<<<< HEAD
print(f"Detector in use is {karabo_id}")
cal_db_interface = get_random_db_interface(cal_db_interface)
=======
loc = None
ctrlloc = None
if instrument == "SCS":
loc = "SCS_DET_DSSC1M-1"
dinstance = "DSSC1M1"
print("Detector in use is {}".format(loc))
>>>>>>> modified notebook for DSSC specifics
```
%% Cell type:code id: tags:
``` python
print("Parameters are:")
print(f"Proposal: {prop}")
print(f"Memory cells: {mem_cells}/{max_cells}")
print("Runs: {}".format([ v for v in offset_runs.values()]))
print(f"Sequences: {sequences}")
print(f"Using DB: {db_output}")
print(f"Input: {in_folder}")
print(f"Output: {out_folder}")
print(f"Bias voltage: {bias_voltage}V")
```
%% Cell type:markdown id: tags:
The following lines will create a queue of files which will the be executed module-parallel. Distinguishing between different gains.
%% Cell type:code id: tags:
``` python
# set everything up filewise
os.makedirs(out_folder, exist_ok=True)
gmf = map_gain_stages(in_folder, offset_runs, path_template, karabo_da, sequences)
gain_mapped_files, total_sequences, total_file_size = gmf
print(f"Will process a total of {total_sequences} file.")
```
%% Cell type:code id: tags:
``` python
def getDSSCctrlData(in_folder):
from hashlib import blake2b
import struct
import binascii
## returned dictionaries
resFullConfigs = {}
resOperatingFreqs = {}
ctrlDataFiles = {}
for quadrant, aggregator in slow_data_aggregators.items():
quad_sd_pattern = slow_data_pattern.format("{:04d}".format(run_number), "{:02d}".format(aggregator))
ctrlDataFiles[quadrant] = [f for f in os.listdir(in_folder + '/'+ offset_runs["high"]) \
if re.match(quad_sd_pattern, f)]
if not len(ctrlDataFiles):
print("No Control Slow Data found!")
return
ctrlloc = h5py.File(in_folder + '/'+ offset_runs["high"] + '/' + \
next(iter( ctrlDataFiles.values() ))[0], 'r')['/METADATA/dataSources/deviceId'][0]
ctrlloc = ctrlloc.decode("utf-8")
ctrlloc = ctrlloc[:ctrlloc.find('/')]
fullConfigs = {}
operatingFreqs = {}
for quadrant, file in ctrlDataFiles.items():
fullConfig = h5py.File(in_folder + '/'+ offset_runs["high"] + '/' +\
file[0])['/RUN/{}/FPGA/PPT_Q{}/fullConfigFileName/value'\
.format(ctrlloc, quadrant)][0].decode("utf-8")
fullConfigs[quadrant] = fullConfig[fullConfig.rfind('/')+1:]
opFreq = h5py.File(in_folder + '/'+ offset_runs["high"] + '/' +\
file[0])['/RUN/{}/FPGA/PPT_Q{}/sequencer/cycleLength/value'\
.format(ctrlloc, quadrant)][0]
operatingFreqs[quadrant] = 4.5*(22.0/opFreq)
for quadrant, value in fullConfigs.items():
for module in range(1,5):
qm = 'Q{}M{}'.format(quadrant, module)
encodedvalue = blake2b(value.encode("utf-8"), digest_size=8)
resFullConfigs[qm] = struct.unpack('d', binascii.unhexlify(encodedvalue.hexdigest()))[0]
for quadrant, value in operatingFreqs.items():
for module in range(1,5):
qm = 'Q{}M{}'.format(quadrant, module)
resOperatingFreqs[qm] = value
return resFullConfigs, resOperatingFreqs
```
%% Cell type:markdown id: tags:
## Calculate Offsets, Noise and Thresholds ##
The calculation is performed per-pixel and per-memory-cell. Offsets are simply the median value for a set of dark data taken at a given gain, noise the standard deviation, and gain-bit values the medians of the gain array.
%% Cell type:code id: tags:
``` python
import copy
from functools import partial
def characterize_module(cells, bp_thresh, rawversion, karabo_id, h5path, h5path_idx, inp):
import numpy as np
import copy
import h5py
from cal_tools.enums import BadPixels
from hashlib import blake2b
import struct
import binascii
def get_num_cells(fname, h5path):
with h5py.File(fname, "r") as f:
cells = f[f"{h5path}/cellId"][()]
maxcell = np.max(cells)
options = [100, 200, 400, 500, 600, 700, 800]
dists = np.array([(o-maxcell) for o in options])
dists[dists<0] = 10000 # assure to always go higher
return options[np.argmin(dists)]
filename, channel = inp
h5path = h5path.format(channel)
h5path_idx = h5path_idx.format(channel)
if cells == 0:
cells = get_num_cells(filename, h5path)
<<<<<<< HEAD
print(f"Using {cells} memory cells")
pulseid_checksum = None
=======
pulseid_checksum = None
>>>>>>> modified notebook for DSSC specifics
thresholds_offset_hard, thresholds_offset_sigma, thresholds_noise_hard, thresholds_noise_sigma = bp_thresh
infile = h5py.File(filename, "r", driver="core")
if rawversion == 2:
count = np.squeeze(infile[f"{h5path_idx}/count"])
first = np.squeeze(infile[f"{h5path_idx}/first"])
last_index = int(first[count != 0][-1]+count[count != 0][-1])
first_index = int(first[count != 0][0])
pulseids = infile[f"{h5path}/pulseId"][first_index:int(first[count != 0][1])]
bveto = blake2b(pulseids.data, digest_size=8)
pulseid_checksum = struct.unpack('d', binascii.unhexlify(bveto.hexdigest()))[0]
else:
status = np.squeeze(infile[f"{h5path_idx}/status"])
if np.count_nonzero(status != 0) == 0:
return
last = np.squeeze(infile[f"{h5path_idx}/last"])
first = np.squeeze(infile[f"{h5path_idx}/first"])
last_index = int(last[status != 0][-1]) + 1
first_index = int(first[status != 0][0])
im = np.array(infile[f"{h5path}/data"][first_index:last_index,...])
cellIds = np.squeeze(infile[f"{h5path}/cellId"][first_index:last_index,...])
infile.close()
im = im[:, 0, ...].astype(np.float32)
im = np.rollaxis(im, 2)
im = np.rollaxis(im, 2, 1)
mcells = cells
offset = np.zeros((im.shape[0], im.shape[1], mcells))
noise = np.zeros((im.shape[0], im.shape[1], mcells))
for cc in np.unique(cellIds[cellIds < mcells]):
cellidx = cellIds == cc
if offset_numpy_algorithm == "mean":
offset[...,cc] = np.mean(im[..., cellidx], axis=2)
else:
offset[...,cc] = np.median(im[..., cellidx], axis=2)
noise[...,cc] = np.std(im[..., cellidx], axis=2)
# bad pixels
bp = np.zeros(offset.shape, np.uint32)
# offset related bad pixels
offset_mn = np.nanmedian(offset, axis=(0,1))
offset_std = np.nanstd(offset, axis=(0,1))
bp[(offset < offset_mn-thresholds_offset_sigma*offset_std) |
(offset > offset_mn+thresholds_offset_sigma*offset_std)] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[(offset < thresholds_offset_hard[0]) | (offset > thresholds_offset_hard[1])] |= BadPixels.OFFSET_OUT_OF_THRESHOLD.value
bp[~np.isfinite(offset)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
# noise related bad pixels
noise_mn = np.nanmedian(noise, axis=(0,1))
noise_std = np.nanstd(noise, axis=(0,1))
bp[(noise < noise_mn-thresholds_noise_sigma*noise_std) |
(noise > noise_mn+thresholds_noise_sigma*noise_std)] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[(noise < thresholds_noise_hard[0]) | (noise > thresholds_noise_hard[1])] |= BadPixels.NOISE_OUT_OF_THRESHOLD.value
bp[~np.isfinite(noise)] |= BadPixels.OFFSET_NOISE_EVAL_ERROR.value
return offset, noise, bp, cells, pulseid_checksum
offset_g = OrderedDict()
noise_g = OrderedDict()
gain_g = OrderedDict()
badpix_g = OrderedDict()
gg = 0
start = datetime.now()
all_cells = []
checksums = {}
fullConfigs, operatingFreqs = getDSSCctrlData(in_folder)
for gain, mapped_files in gain_mapped_files.items():
inp = []
dones = []
for i in modules:
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm in mapped_files and not mapped_files[qm].empty():
fname_in = mapped_files[qm].get()
print("Process file: ", fname_in)
dones.append(mapped_files[qm].empty())
else:
continue
inp.append((fname_in, i))
p = partial(characterize_module, max_cells,
(thresholds_offset_hard, thresholds_offset_sigma,
thresholds_noise_hard, thresholds_noise_sigma), rawversion, karabo_id, h5path, h5path_idx)
results = list(map(p, inp))
for ii, r in enumerate(results):
i = modules[ii]
offset, noise, bp, thiscell, pulseid_checksum = r
all_cells.append(thiscell)
qm = "Q{}M{}".format(i//4 +1, i % 4 + 1)
if qm not in offset_g:
offset_g[qm] = np.zeros((offset.shape[0], offset.shape[1], offset.shape[2]))
noise_g[qm] = np.zeros_like(offset_g[qm])
badpix_g[qm] = np.zeros_like(offset_g[qm], np.uint32)
checksums[qm] = pulseid_checksum
offset_g[qm][...] = offset
noise_g[qm][...] = noise
badpix_g[qm][...] = bp
gg +=1
duration = (datetime.now()-start).total_seconds()
logger.runtime_summary_entry(success=True, runtime=duration,
total_sequences=total_sequences,
filesize=total_file_size)
logger.send()
if len(all_cells) > 0:
max_cells = np.max(all_cells)
print(f"Using {max_cells} memory cells")
else:
raise ValueError("0 processed memory cells. No raw data available.")
```
%% Cell type:code id: tags:
``` python
# Retrieve existing constants for comparison
clist = ["Offset", "Noise"]
old_const = {}
old_mdata = {}
print('Retrieve pre-existing constants for comparison.')
detinst = getattr(Detectors, dinstance)
for qm in offset_g.keys():
device = getattr(detinst, qm)
for const in clist:
condition = Conditions.Dark.DSSC(memory_cells=max_cells,
bias_voltage=bias_voltage,
pulseid_checksum=checksums[qm])
data, mdata = get_from_db(device,
getattr(Constants.DSSC, const)(),
condition,
None,
cal_db_interface, creation_time=creation_time,
verbosity=2, timeout=cal_db_timeout)
old_const[const] = data
if mdata is not None and data is not None:
time = mdata.calibration_constant_version.begin_at
old_mdata[const] = time.isoformat()
os.makedirs(f'{out_folder}/old/', exist_ok=True)
save_const_to_h5(mdata, f'{out_folder}/old/')
else:
old_mdata[const] = "Not found"
```
%% Cell type:code id: tags:
``` python
res = OrderedDict()
for i in modules:
qm = f"Q{i//4+1}M{i%4+1}"
try:
res[qm] = {'Offset': offset_g[qm],
'Noise': noise_g[qm],
#TODO: No badpixelsdark, yet.
#'BadPixelsDark': badpix_g[qm]
}
except Exception as e:
print(f"Error: No constants for {qm}: {e}")
```
%% Cell type:code id: tags:
``` python
<<<<<<< HEAD
# Push the same constant two different times.
# One with the generated pulseID check sum setting for the offline calibration.
# And another for the online calibration as it doesn't have this pulseID checksum, yet.
for dont_use_pulseIds in [True, False]:
for qm in res.keys():
detinst = getattr(Detectors, dinstance)
device = getattr(detinst, qm)
for const in res[qm].keys():
metadata = ConstantMetaData()
dconst = getattr(Constants.DSSC, const)()
dconst.data = res[qm][const]
metadata.calibration_constant = dconst
pidsum = None if dont_use_pulseIds else checksums[qm]
# set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells,
bias_voltage=bias_voltage,
pulseid_checksum=pidsum)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
if db_output:
try:
metadata.send(cal_db_interface, timeout=cal_db_timeout)
except Exception as e:
print("Error", e)
if local_output:
# Don't save constant localy two times.
if dont_use_pulseIds:
save_const_to_h5(metadata, out_folder)
print(f"Calibration constant {const} is stored locally.\n")
if not dont_use_pulseIds:
print("Generated constants with conditions:\n")
print(f"• memory_cells: {max_cells}\n• bias_voltage: {bias_voltage}\n"
f"• pulseid_checksum: {pidsum}\n• creation_time: {creation_time}\n")
=======
if db_output:
for dont_use_pulseIds in [True, False]:
for qm in offset_g.keys():
try:
metadata = ConstantMetaData()
offset = Constants.DSSC.Offset()
offset.data = offset_g[qm]
metadata.calibration_constant = offset
pidsum = None if dont_use_pulseIds else checksums[qm]
opfreq = None if dont_use_pulseIds else operatingFreqs[qm]
configset = None if dont_use_pulseIds else 1.0 #fullConfigs[qm]
configset = None if dont_use_pulseIds else fullConfigs[qm]
# set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,
pulseid_checksum=pidsum, acquisition_rate=opfreq,
configid_checksum=configset)
detinst = getattr(Detectors, dinstance)
device = getattr(detinst, qm)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000)
metadata = ConstantMetaData()
noise = Constants.DSSC.Noise()
noise.data = noise_g[qm]
metadata.calibration_constant = noise
# set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,
pulseid_checksum=pidsum, acquisition_rate=opfreq,
configid_checksum=configset)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000)
continue # no bad pixels yet
metadata = ConstantMetaData()
badpixels = Constants.DSSC.BadPixelsDark()
badpixels.data = badpix_g[qm]
metadata.calibration_constant = badpixels
# set the operating condition
condition = Conditions.Dark.DSSC(memory_cells=max_cells, bias_voltage=bias_voltage,
pulseid_checksum=pidsum, acquisition_rate=opfreq,
configid_checksum=configset)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device, start=creation_time)
metadata.send(cal_db_interface, timeout=3000000)
except Exception as e:
print(e)
>>>>>>> modified notebook for DSSC specifics
```
%% Cell type:code id: tags:
``` python
mnames = []
for i in modules:
qm = f"Q{i//4+1}M{i % 4+1}"
display(Markdown(f'## Position of the module {mnames} and it\'s ASICs##'))
mnames.append(qm)
show_processed_modules(dinstance=dinstance, constants=None, mnames=mnames, mode="position")
```
%% Cell type:code id: tags:
``` python
## Single-Cell Overviews ##
Single cell overviews allow to identify potential effects on all memory cells, e.g. on sensor level. Additionally, they should serve as a first sanity check on expected behaviour, e.g. if structuring on the ASIC level is visible in the offsets, but otherwise no immediate artifacts are visible.
```
%% Cell type:code id: tags:
``` python
cell = 9
gain = 0
out_folder = None
show_overview(res, cell, gain, out_folder=out_folder, infix="_{}".format(run))
```
%% Cell type:code id: tags:
``` python
cols = {BadPixels.NOISE_OUT_OF_THRESHOLD.value: (BadPixels.NOISE_OUT_OF_THRESHOLD.name, '#FF000080'),
BadPixels.OFFSET_NOISE_EVAL_ERROR.value: (BadPixels.OFFSET_NOISE_EVAL_ERROR.name, '#0000FF80'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value: (BadPixels.OFFSET_OUT_OF_THRESHOLD.name, '#00FF0080'),
BadPixels.OFFSET_OUT_OF_THRESHOLD.value | BadPixels.NOISE_OUT_OF_THRESHOLD.value: ('MIXED', '#DD00DD80')}
if high_res_badpix_3d:
display(Markdown("""
## Global Bad Pixel Behaviour ##
The following plots show the results of bad pixel evaluation for all evaluated memory cells.
Cells are stacked in the Z-dimension, while pixels values in x/y are rebinned with a factor of 2.
This excludes single bad pixels present only in disconnected pixels.
Hence, any bad pixels spanning at least 4 pixels in the x/y-plane, or across at least two memory cells are indicated.
Colors encode the bad pixel type, or mixed type.
"""))
# set rebin_fac to 1 for avoiding rebining and
# losing real values of badpixels(High resolution).
gain = 0
for mod, data in badpix_g.items():
plot_badpix_3d(data, cols, title=mod, rebin_fac=2)
plt.show()
```
%% Cell type:markdown id: tags:
## Aggregate values, and per Cell behaviour ##
The following tables and plots give an overview of statistical aggregates for each constant, as well as per cell behavior.
%% Cell type:code id: tags:
``` python
create_constant_overview(offset_g, "Offset (ADU)", max_cells, entries=1)
```
%% Cell type:code id: tags:
``` python
create_constant_overview(noise_g, "Noise (ADU)", max_cells, 0, 100, entries=1)
```
%% Cell type:code id: tags:
``` python
bad_pixel_aggregate_g = OrderedDict()
for m, d in badpix_g.items():
bad_pixel_aggregate_g[m] = d.astype(np.bool).astype(np.float)
create_constant_overview(bad_pixel_aggregate_g, "Bad pixel fraction", max_cells, entries=1)
```
%% Cell type:markdown id: tags:
## Summary tables ##
The following tables show summary information for the evaluated module. Values for currently evaluated constants are compared with values for pre-existing constants retrieved from the calibration database.
%% Cell type:code id: tags:
``` python
header = ['Parameter',
"New constant", "Old constant ",
"New constant", "Old constant ",
"New constant", "Old constant "]
for const in ['Offset', 'Noise']:
table = [['','High gain', 'High gain']]
for qm in res.keys():
data = np.copy(res[qm][const])
if old_const[const] is not None:
dataold = np.copy(old_const[const])
f_list = [np.nanmedian, np.nanmean, np.nanstd, np.nanmin, np.nanmax]
n_list = ['Median', 'Mean', 'Std', 'Min', 'Max']
for i, f in enumerate(f_list):
line = [n_list[i]]
line.append('{:6.1f}'.format(f(data[...,gain])))
if old_const[const] is not None:
line.append('{:6.1f}'.format(f(dataold[...,gain])))
else:
line.append('-')
table.append(line)
display(Markdown('### {} [ADU], good and bad pixels ###'.format(const)))
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=header)))
```
%% Cell type:code id: tags:
``` python
```
%% Cell type:code id: tags:
``` python
```
Loading