Skip to content
Snippets Groups Projects
Commit 561a4dce authored by Mikhail Karnevskiy's avatar Mikhail Karnevskiy
Browse files

Use karabo_da for ePix

parent 15e5f84b
No related branches found
No related tags found
1 merge request!264Refactor: Use karabo_da for ePix
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# ePix100 Dark Characterization # ePix100 Dark Characterization
Author: M. Karnevskiy, Version 1.0 Author: M. Karnevskiy, Version 1.0
The following notebook provides dark image analysis of the ePix100 detector. The following notebook provides dark image analysis of the ePix100 detector.
Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. Resulting maps are saved as .h5 files for a latter use and injected to calibration DB. Dark characterization evaluates offset and noise of the detector and gives information about bad pixels. Resulting maps are saved as .h5 files for a latter use and injected to calibration DB.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cluster_profile = "noDB" # ipcluster profile to use
in_folder = '/gpfs/exfel/exp/MID/201930/p900071/raw' # input folder, required in_folder = '/gpfs/exfel/exp/MID/201930/p900071/raw' # input folder, required
out_folder = '/gpfs/exfel/exp/MID/201930/p900070/usr/dark/runs_4' # output folder, required out_folder = '/gpfs/exfel/exp/MID/201930/p900070/usr/dark/runs_4' # output folder, required
path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data sequence = 0 # sequence file to use
run = 466 # which run to read data from, required run = 466 # which run to read data from, required
number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used
cluster_profile = "noDB" # ipcluster profile to use karabo_id = "MID_EXP_EPIX-1" # karabo karabo_id
h5path = '/INSTRUMENT/{}/DET/RECEIVER:daqOutput/data/image/pixels' # path in the HDF5 file to images karabo_da = "DA01" # data aggregators
h5path_t = '/INSTRUMENT/{}/DET/RECEIVER:daqOutput/data/backTemp' # path to find temperature at receiver_id = "RECEIVER" # inset for receiver devices
path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data/image/pixels' # path in the HDF5 file to images
h5path_t = '/INSTRUMENT/{}/DET/{}:daqOutput/data/backTemp' # path to find temperature at
h5path_cntrl = '/CONTROL/{}/DET' # path to control data h5path_cntrl = '/CONTROL/{}/DET' # path to control data
use_dir_creation_date = True
cal_db_interface = "tcp://max-exfl016:8020" # calibration DB interface to use cal_db_interface = "tcp://max-exfl016:8020" # calibration DB interface to use
cal_db_timeout = 300000 # timeout on caldb requests
db_output = False # Output constants to the calibration database
local_output = True # output constants locally
number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used
temp_limits = 5 # limit for parameter Operational temperature temp_limits = 5 # limit for parameter Operational temperature
sequence = 0 # sequence file to use db_module = 'ePix100_M15' # detector karabo_id
use_dir_creation_date = True
db_module = 'ePix100_M15' # detector instance
bias_voltage = 200 # bias voltage bias_voltage = 200 # bias voltage
in_vacuum = False # detector operated in vacuum in_vacuum = False # detector operated in vacuum
instance = "MID_EXP_EPIX-1" # karabo instance
path_inset = "DA01" # file inset for image data
fix_temperature = 290. # fix temperature to this value fix_temperature = 290. # fix temperature to this value
db_output = False # Output constants to the calibration database
local_output = True # output constants locally
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import XFELDetAna.xfelprofiler as xprof import XFELDetAna.xfelprofiler as xprof
profiler = xprof.Profiler() profiler = xprof.Profiler()
profiler.disable() profiler.disable()
from XFELDetAna.util import env from XFELDetAna.util import env
env.iprofile = cluster_profile env.iprofile = cluster_profile
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
from XFELDetAna import xfelpycaltools as xcal from XFELDetAna import xfelpycaltools as xcal
from XFELDetAna import xfelpyanatools as xana from XFELDetAna import xfelpyanatools as xana
from XFELDetAna.plotting.util import prettyPlotting from XFELDetAna.plotting.util import prettyPlotting
prettyPlotting = True prettyPlotting = True
from XFELDetAna.xfelreaders import ChunkReader from XFELDetAna.xfelreaders import ChunkReader
from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5 from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5
from cal_tools.tools import get_dir_creation_date, save_const_to_h5, get_random_db_interface from cal_tools.tools import get_dir_creation_date, save_const_to_h5, get_random_db_interface
from iCalibrationDB import (ConstantMetaData, Constants, Conditions, Detectors, from iCalibrationDB import (ConstantMetaData, Constants, Conditions, Detectors,
Versions) Versions)
from iCalibrationDB.detectors import DetectorTypes from iCalibrationDB.detectors import DetectorTypes
import numpy as np import numpy as np
import h5py import h5py
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
%matplotlib inline %matplotlib inline
h5path = h5path.format(instance) h5path = h5path.format(karabo_id, receiver_id)
h5path_t = h5path_t.format(instance) h5path_t = h5path_t.format(karabo_id, receiver_id)
h5path_cntrl = h5path_cntrl.format(instance) h5path_cntrl = h5path_cntrl.format(karabo_id)
def nImagesOrLimit(nImages, limit): def nImagesOrLimit(nImages, limit):
if limit == 0: if limit == 0:
return nImages return nImages
else: else:
return min(nImages, limit) return min(nImages, limit)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
proposal = list(filter(None, in_folder.strip('/').split('/')))[-2] proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]
file_loc = 'proposal:{} runs:{}'.format(proposal, run) file_loc = 'proposal:{} runs:{}'.format(proposal, run)
x = 708 # rows of the xPix100 x = 708 # rows of the xPix100
y = 768 # columns of the xPix100 y = 768 # columns of the xPix100
ped_dir = "{}/r{:04d}".format(in_folder, run) ped_dir = "{}/r{:04d}".format(in_folder, run)
fp_name = path_template.format(run, path_inset).format(sequence) fp_name = path_template.format(run, karabo_da).format(sequence)
filename = '{}/{}'.format(ped_dir, fp_name) filename = '{}/{}'.format(ped_dir, fp_name)
print("Reading data from: {}\n".format(filename)) print("Reading data from: {}\n".format(filename))
print("Run number: {}".format(run)) print("Run number: {}".format(run))
print("HDF5 path: {}".format(h5path)) print("HDF5 path: {}".format(h5path))
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
print("Using {} as creation time".format(creation_time.isoformat())) print("Using {} as creation time".format(creation_time.isoformat()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
sensorSize = [x, y] sensorSize = [x, y]
chunkSize = 100 #Number of images to read per chunk chunkSize = 100 #Number of images to read per chunk
#Sensor area will be analysed according to blocksize #Sensor area will be analysed according to blocksize
blockSize = [sensorSize[0] // 2, sensorSize[1] // 2] blockSize = [sensorSize[0] // 2, sensorSize[1] // 2]
xcal.defaultBlockSize = blockSize xcal.defaultBlockSize = blockSize
cpuCores = 4 #Specifies the number of running cpu cores cpuCores = 4 #Specifies the number of running cpu cores
memoryCells = 1 #No mamery cells memoryCells = 1 #No mamery cells
#Specifies total number of images to proceed #Specifies total number of images to proceed
nImages = fastccdreaderh5.getDataSize(filename, h5path)[0] nImages = fastccdreaderh5.getDataSize(filename, h5path)[0]
nImages = nImagesOrLimit(nImages, number_dark_frames) nImages = nImagesOrLimit(nImages, number_dark_frames)
print("\nNumber of dark images to analyze: ", nImages) print("\nNumber of dark images to analyze: ", nImages)
run_parallel = False run_parallel = False
with h5py.File(filename, 'r') as f: with h5py.File(filename, 'r') as f:
integration_time = int(f['{}/CONTROL/expTime/value'.format(h5path_cntrl)][0]) integration_time = int(f['{}/CONTROL/expTime/value'.format(h5path_cntrl)][0])
temperature = np.mean(f[h5path_t])/100. temperature = np.mean(f[h5path_t])/100.
temperature_k = temperature + 273.15 temperature_k = temperature + 273.15
if fix_temperature != 0: if fix_temperature != 0:
temperature_k = fix_temperature temperature_k = fix_temperature
print("Temperature is fixed!") print("Temperature is fixed!")
print("Bias voltage is {} V".format(bias_voltage)) print("Bias voltage is {} V".format(bias_voltage))
print("Detector integration time is set to {}".format(integration_time)) print("Detector integration time is set to {}".format(integration_time))
print("Mean temperature was {:0.2f} °C / {:0.2f} K".format(temperature, print("Mean temperature was {:0.2f} °C / {:0.2f} K".format(temperature,
temperature_k)) temperature_k))
print("Operated in vacuum: {} ".format(in_vacuum)) print("Operated in vacuum: {} ".format(in_vacuum))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
reader = ChunkReader(filename, fastccdreaderh5.readData, reader = ChunkReader(filename, fastccdreaderh5.readData,
nImages, chunkSize, nImages, chunkSize,
path=h5path, path=h5path,
pixels_x=sensorSize[0], pixels_x=sensorSize[0],
pixels_y=sensorSize[1], ) pixels_y=sensorSize[1], )
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells,
cores=cpuCores, blockSize=blockSize, cores=cpuCores, blockSize=blockSize,
parallel=run_parallel) parallel=run_parallel)
histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000, histCalRaw = xcal.HistogramCalculator(sensorSize, bins=1000,
range=[0, 10000], parallel=False, range=[0, 10000], parallel=False,
memoryCells=memoryCells, memoryCells=memoryCells,
cores=cpuCores, blockSize=blockSize) cores=cpuCores, blockSize=blockSize)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
for data in reader.readChunks(): for data in reader.readChunks():
dx = np.count_nonzero(data, axis=(0, 1)) dx = np.count_nonzero(data, axis=(0, 1))
data = data[:, :, dx != 0] data = data[:, :, dx != 0]
histCalRaw.fill(data) histCalRaw.fill(data)
noiseCal.fill(data) #Fill calculators with data noiseCal.fill(data) #Fill calculators with data
constant_maps = {} constant_maps = {}
constant_maps['Offset'] = noiseCal.getOffset() #Produce offset map constant_maps['Offset'] = noiseCal.getOffset() #Produce offset map
constant_maps['Noise'] = noiseCal.get() #Produce noise map constant_maps['Noise'] = noiseCal.get() #Produce noise map
noiseCal.reset() #Reset noise calculator noiseCal.reset() #Reset noise calculator
print("Initial maps were created") print("Initial maps were created")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
#**************OFFSET MAP HISTOGRAM***********# #**************OFFSET MAP HISTOGRAM***********#
ho, co = np.histogram(constant_maps['Offset'].flatten(), bins=700) ho, co = np.histogram(constant_maps['Offset'].flatten(), bins=700)
do = {'x': co[:-1], do = {'x': co[:-1],
'y': ho, 'y': ho,
'y_err': np.sqrt(ho[:]), 'y_err': np.sqrt(ho[:]),
'drawstyle': 'bars', 'drawstyle': 'bars',
'color': 'cornflowerblue', 'color': 'cornflowerblue',
} }
fig = xana.simplePlot(do, figsize='1col', aspect=2, fig = xana.simplePlot(do, figsize='1col', aspect=2,
x_label='Offset (ADU)', x_label='Offset (ADU)',
y_label="Counts", y_log=True, y_label="Counts", y_log=True,
) )
#*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******# #*****NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA*******#
hn, cn = np.histogram(constant_maps['Noise'].flatten(), bins=200) hn, cn = np.histogram(constant_maps['Noise'].flatten(), bins=200)
dn = {'x': cn[:-1], dn = {'x': cn[:-1],
'y': hn, 'y': hn,
'y_err': np.sqrt(hn[:]), 'y_err': np.sqrt(hn[:]),
'drawstyle': 'bars', 'drawstyle': 'bars',
'color': 'cornflowerblue', 'color': 'cornflowerblue',
} }
fig = xana.simplePlot(dn, figsize='1col', aspect=2, fig = xana.simplePlot(dn, figsize='1col', aspect=2,
x_label='Noise (ADU)', x_label='Noise (ADU)',
y_label="Counts", y_label="Counts",
y_log=True) y_log=True)
#**************HEAT MAPS*******************# #**************HEAT MAPS*******************#
fig = xana.heatmapPlot(constant_maps['Offset'][:, :, 0], fig = xana.heatmapPlot(constant_maps['Offset'][:, :, 0],
x_label='Columns', y_label='Rows', x_label='Columns', y_label='Rows',
lut_label='Offset (ADU)', lut_label='Offset (ADU)',
x_range=(0, y), x_range=(0, y),
y_range=(0, x), vmin=1000, vmax=4000) y_range=(0, x), vmin=1000, vmax=4000)
fig = xana.heatmapPlot(constant_maps['Noise'][:, :, 0], fig = xana.heatmapPlot(constant_maps['Noise'][:, :, 0],
x_label='Columns', y_label='Rows', x_label='Columns', y_label='Rows',
lut_label='Noise (ADU)', lut_label='Noise (ADU)',
x_range=(0, y), x_range=(0, y),
y_range=(0, x), vmax=2 * np.mean(constant_maps['Noise'])) y_range=(0, x), vmax=2 * np.mean(constant_maps['Noise']))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Save constants to DB # Save constants to DB
dclass="ePix100" dclass="ePix100"
cal_db_interface = get_random_db_interface(cal_db_interface) cal_db_interface = get_random_db_interface(cal_db_interface)
for const_name in constant_maps.keys(): for const_name in constant_maps.keys():
metadata = ConstantMetaData() metadata = ConstantMetaData()
det = getattr(Constants, dclass) det = getattr(Constants, dclass)
const = getattr(det, const_name)() const = getattr(det, const_name)()
const.data = constant_maps[const_name].data const.data = constant_maps[const_name].data
metadata.calibration_constant = const metadata.calibration_constant = const
# set the operating condition # set the operating condition
dcond = Conditions.Dark dcond = Conditions.Dark
condition = getattr(dcond, dclass)(bias_voltage=bias_voltage, condition = getattr(dcond, dclass)(bias_voltage=bias_voltage,
integration_time=integration_time, integration_time=integration_time,
temperature=temperature_k, temperature=temperature_k,
in_vacuum=in_vacuum) in_vacuum=in_vacuum)
for parm in condition.parameters: for parm in condition.parameters:
if parm.name == "Sensor Temperature": if parm.name == "Sensor Temperature":
parm.lower_deviation = temp_limits parm.lower_deviation = temp_limits
parm.upper_deviation = temp_limits parm.upper_deviation = temp_limits
device = getattr(Detectors, db_module) device = getattr(Detectors, db_module)
metadata.detector_condition = condition metadata.detector_condition = condition
# specify the a version for this constant # specify the a version for this constant
if creation_time is None: if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device) metadata.calibration_constant_version = Versions.Now(device=device)
else: else:
metadata.calibration_constant_version = Versions.Timespan(device=device, metadata.calibration_constant_version = Versions.Timespan(device=device,
start=creation_time) start=creation_time)
metadata.calibration_constant_version.raw_data_location = file_loc metadata.calibration_constant_version.raw_data_location = file_loc
if db_output: if db_output:
try: try:
metadata.send(cal_db_interface) metadata.send(cal_db_interface, timeout=cal_db_timeout)
print("Inject {} constants from {}".format(const_name, print("Inject {} constants from {}".format(const_name,
metadata.calibration_constant_version.begin_at)) metadata.calibration_constant_version.begin_at))
except Exception as e: except Exception as e:
print(e) print(e)
if local_output: if local_output:
save_const_to_h5(metadata, out_folder) save_const_to_h5(metadata, out_folder)
print("Calibration constant {} is stored locally.".format(const)) print("Calibration constant {} is stored locally.".format(const))
``` ```
......
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# ePIX Data Correction ## # ePIX Data Correction ##
Authors: Q. Tian S. Hauf, Version 1.0 Authors: Q. Tian S. Hauf, Version 1.0
The following notebook provides Offset correction of images acquired with the ePix100 detector. The following notebook provides Offset correction of images acquired with the ePix100 detector.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cluster_profile = "noDB" # ipcluster profile to use
in_folder = "/gpfs/exfel/exp/MID/201930/p900071/raw" # input folder, required in_folder = "/gpfs/exfel/exp/MID/201930/p900071/raw" # input folder, required
out_folder = '/gpfs/exfel/data/scratch/karnem/test/' # output folder, required out_folder = '/gpfs/exfel/data/scratch/karnem/test/' # output folder, required
path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data sequences = [-1] # sequences to correct, set to -1 for all, range allowed
run = 466 # which run to read data from, required run = 466 # which run to read data from, required
h5path = '/INSTRUMENT/{}/DET/RECEIVER:daqOutput/data/image' # path in the HDF5 file to images
h5path_t = '/INSTRUMENT/{}/DET/RECEIVER:daqOutput/data/backTemp' # path to find temperature at
h5path_cntrl = '/CONTROL/{}/DET' # path to control data
path_inset = "DA01" # file inset for image data karabo_id = "MID_EXP_EPIX-1" # karabo karabo_id
instance = "MID_EXP_EPIX-1" # karabo instance karabo_da = "DA01" # data aggregators
cluster_profile = "noDB" # ipcluster profile to use receiver_id = "RECEIVER" # inset for receiver devices
cpuCores = 4 # Specifies the number of running cpu cores path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
h5path = '/INSTRUMENT/{}/DET/{}:daqOutput/data/image' # path in the HDF5 file to images
h5path_t = '/INSTRUMENT/{}/DET/{}:daqOutput/data/backTemp' # path to find temperature at
h5path_cntrl = '/CONTROL/{}/DET' # path to control data
use_dir_creation_date = True # date constants injected before directory creation time
cal_db_interface = "tcp://max-exfl016:8015#8025" # calibration DB interface to use cal_db_interface = "tcp://max-exfl016:8015#8025" # calibration DB interface to use
cal_db_timeout = 30000000 # timeout on caldb requests cal_db_timeout = 300000 # timeout on caldb requests
sequences = [-1] # sequences to correct, set to -1 for all, range allowed cpuCores = 4 # Specifies the number of running cpu cores
chunk_size_idim = 1 # H5 chunking size of output data chunk_size_idim = 1 # H5 chunking size of output data
overwrite = True # overwrite output folder overwrite = True # overwrite output folder
limit_images = 0 # process only first N images, 0 - process all limit_images = 0 # process only first N images, 0 - process all
use_dir_creation_date = True # date constants injected before directory creation time
db_module = "ePix100_M15" # module id in the database db_module = "ePix100_M15" # module id in the database
sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel sequences_per_node = 1 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel
bias_voltage = 200 # bias voltage bias_voltage = 200 # bias voltage
in_vacuum = False # detector operated in vacuum in_vacuum = False # detector operated in vacuum
fix_temperature = 290. # fix temperature to this value fix_temperature = 290. # fix temperature to this value
gain_photon_energy = 9.0 # Photon energy used for gain calibration gain_photon_energy = 9.0 # Photon energy used for gain calibration
photon_energy = 8.0 # Photon energy to calibrate in number of photons, 0 for calibration in keV photon_energy = 8.0 # Photon energy to calibrate in number of photons, 0 for calibration in keV
no_relative_gain = False # do not do gain correction no_relative_gain = False # do not do gain correction
split_evt_primary_threshold = 7. # primary threshold for split event correction split_evt_primary_threshold = 7. # primary threshold for split event correction
split_evt_secondary_threshold = 5. # secondary threshold for split event correction split_evt_secondary_threshold = 5. # secondary threshold for split event correction
split_evt_mip_threshold = 1000. # minimum ionizing particle threshold split_evt_mip_threshold = 1000. # minimum ionizing particle threshold
def balance_sequences(in_folder, run, sequences, sequences_per_node): def balance_sequences(in_folder, run, sequences, sequences_per_node, karabo_da):
import glob from xfel_calibrate.calibrate import balance_sequences as bs
import re return bs(in_folder, run, sequences, sequences_per_node, karabo_da)
import numpy as np
if sequences[0] == -1:
sequence_files = glob.glob("{}/r{:04d}/*-S*.h5".format(in_folder, run))
seq_nums = set()
for sf in sequence_files:
seqnum = re.findall(r".*-S([0-9]*).h5", sf)[0]
seq_nums.add(int(seqnum))
seq_nums -= set(sequences)
else:
seq_nums = set(sequences)
nsplits = len(seq_nums)//sequences_per_node+1
while nsplits > 8:
sequences_per_node += 1
nsplits = len(seq_nums)//sequences_per_node+1
print("Changed to {} sequences per node to have a maximum of 8 concurrent jobs".format(sequences_per_node))
return [l.tolist() for l in np.array_split(list(seq_nums), nsplits) if l.size > 0]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import XFELDetAna.xfelprofiler as xprof import XFELDetAna.xfelprofiler as xprof
profiler = xprof.Profiler() profiler = xprof.Profiler()
profiler.disable() profiler.disable()
from XFELDetAna.util import env from XFELDetAna.util import env
env.iprofile = cluster_profile env.iprofile = cluster_profile
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
from XFELDetAna import xfelpycaltools as xcal from XFELDetAna import xfelpycaltools as xcal
from XFELDetAna import xfelpyanatools as xana from XFELDetAna import xfelpyanatools as xana
from XFELDetAna.plotting.util import prettyPlotting from XFELDetAna.plotting.util import prettyPlotting
prettyPlotting=True prettyPlotting=True
from XFELDetAna.xfelreaders import ChunkReader from XFELDetAna.xfelreaders import ChunkReader
from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5 from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5
import numpy as np import numpy as np
import h5py import h5py
import time import time
import copy import copy
import os import os
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
from iCalibrationDB.detectors import DetectorTypes from iCalibrationDB.detectors import DetectorTypes
from cal_tools.tools import get_dir_creation_date, get_constant_from_db from cal_tools.tools import get_dir_creation_date, get_constant_from_db
%matplotlib inline %matplotlib inline
if sequences[0] == -1: if sequences[0] == -1:
sequences = None sequences = None
h5path = h5path.format(instance) h5path = h5path.format(karabo_id, receiver_id)
h5path_t = h5path_t.format(instance) h5path_t = h5path_t.format(karabo_id, receiver_id)
h5path_cntrl = h5path_cntrl.format(instance) h5path_cntrl = h5path_cntrl.format(karabo_id)
plot_unit = 'ADU' plot_unit = 'ADU'
if not no_relative_gain: if not no_relative_gain:
plot_unit = 'keV' plot_unit = 'keV'
if photon_energy>0: if photon_energy>0:
plot_unit = '$\gamma$' plot_unit = '$\gamma$'
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
x = 708 # rows of the ePix100 x = 708 # rows of the ePix100
y = 768 # columns of the ePix100 y = 768 # columns of the ePix100
ped_dir = "{}/r{:04d}".format(in_folder, run) ped_dir = "{}/r{:04d}".format(in_folder, run)
fp_name = path_template.format(run, path_inset) fp_name = path_template.format(run, karabo_da)
fp_path = '{}/{}'.format(ped_dir, fp_name) fp_path = '{}/{}'.format(ped_dir, fp_name)
print("Reading from: {}".format(fp_path)) print("Reading from: {}".format(fp_path))
print("Run is: {}".format(run)) print("Run is: {}".format(run))
print("HDF5 path: {}".format(h5path)) print("HDF5 path: {}".format(h5path))
print("Data is output to: {}".format(out_folder)) print("Data is output to: {}".format(out_folder))
import datetime import datetime
creation_time = None creation_time = None
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
if creation_time: if creation_time:
print("Using {} as creation time".format(creation_time.isoformat())) print("Using {} as creation time".format(creation_time.isoformat()))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
sensorSize = [x, y] sensorSize = [x, y]
chunkSize = 100 #Number of images to read per chunk chunkSize = 100 #Number of images to read per chunk
blockSize = [sensorSize[0]//2, sensorSize[1]//2] # Sensor area will be analysed according to blocksize blockSize = [sensorSize[0]//2, sensorSize[1]//2] # Sensor area will be analysed according to blocksize
xcal.defaultBlockSize = blockSize xcal.defaultBlockSize = blockSize
memoryCells = 1 # ePIX has no memory cell memoryCells = 1 # ePIX has no memory cell
run_parallel = True run_parallel = True
filename = fp_path.format(sequences[0] if sequences else 0) filename = fp_path.format(sequences[0] if sequences else 0)
with h5py.File(filename, 'r') as f: with h5py.File(filename, 'r') as f:
integration_time = int(f['{}/CONTROL/expTime/value'.format(h5path_cntrl)][0]) integration_time = int(f['{}/CONTROL/expTime/value'.format(h5path_cntrl)][0])
temperature = np.mean(f[h5path_t])/100. temperature = np.mean(f[h5path_t])/100.
temperature_k = temperature + 273.15 temperature_k = temperature + 273.15
if fix_temperature != 0: if fix_temperature != 0:
temperature_k = fix_temperature temperature_k = fix_temperature
print("Temperature is fixed!") print("Temperature is fixed!")
print("Bias voltage is {} V".format(bias_voltage)) print("Bias voltage is {} V".format(bias_voltage))
print("Detector integration time is set to {}".format(integration_time)) print("Detector integration time is set to {}".format(integration_time))
print("Mean temperature was {:0.2f} °C / {:0.2f} K at beginning of run".format(temperature, temperature_k)) print("Mean temperature was {:0.2f} °C / {:0.2f} K at beginning of run".format(temperature, temperature_k))
print("Operated in vacuum: {} ".format(in_vacuum)) print("Operated in vacuum: {} ".format(in_vacuum))
if not os.path.exists(out_folder): if not os.path.exists(out_folder):
os.makedirs(out_folder) os.makedirs(out_folder)
elif not overwrite: elif not overwrite:
raise AttributeError("Output path exists! Exiting") raise AttributeError("Output path exists! Exiting")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
dirlist = sorted(os.listdir(ped_dir)) dirlist = sorted(os.listdir(ped_dir))
file_list = [] file_list = []
total_sequences = 0 total_sequences = 0
fsequences = [] fsequences = []
for entry in dirlist: for entry in dirlist:
#only h5 file #only h5 file
abs_entry = "{}/{}".format(ped_dir, entry) abs_entry = "{}/{}".format(ped_dir, entry)
if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == ".h5": if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == ".h5":
if sequences is None: if sequences is None:
for seq in range(len(dirlist)): for seq in range(len(dirlist)):
if path_template.format(run, path_inset).format(seq) in abs_entry: if path_template.format(run, karabo_da).format(seq) in abs_entry:
file_list.append(abs_entry) file_list.append(abs_entry)
total_sequences += 1 total_sequences += 1
fsequences.append(seq) fsequences.append(seq)
else: else:
for seq in sequences: for seq in sequences:
if path_template.format(run, path_inset).format(seq) in abs_entry: if path_template.format(run, karabo_da).format(seq) in abs_entry:
file_list.append(os.path.abspath(abs_entry)) file_list.append(os.path.abspath(abs_entry))
total_sequences += 1 total_sequences += 1
fsequences.append(seq) fsequences.append(seq)
sequences = fsequences sequences = fsequences
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from IPython.display import HTML, display, Markdown, Latex from IPython.display import HTML, display, Markdown, Latex
import tabulate import tabulate
print("Processing a total of {} sequence files".format(total_sequences)) print("Processing a total of {} sequence files".format(total_sequences))
table = [] table = []
for k, f in enumerate(file_list): for k, f in enumerate(file_list):
table.append((k, f)) table.append((k, f))
if len(table): if len(table):
md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["#", "file"]))) md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=["#", "file"])))
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
As a first step, dark maps have to be loaded. As a first step, dark maps have to be loaded.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
dclass="ePix100" dclass="ePix100"
const_name = "Offset" const_name = "Offset"
offsetMap = None offsetMap = None
temp_limits = 5. temp_limits = 5.
# Offset # Offset
det = getattr(Detectors, db_module) det = getattr(Detectors, db_module)
dconstants = getattr(Constants, dclass) dconstants = getattr(Constants, dclass)
condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage, condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage,
integration_time=integration_time, integration_time=integration_time,
temperature=temperature_k, temperature=temperature_k,
in_vacuum=in_vacuum) in_vacuum=in_vacuum)
for parm in condition.parameters: for parm in condition.parameters:
if parm.name == "Sensor Temperature": if parm.name == "Sensor Temperature":
parm.lower_deviation = temp_limits parm.lower_deviation = temp_limits
parm.upper_deviation = temp_limits parm.upper_deviation = temp_limits
offsetMap = get_constant_from_db(det, offsetMap = get_constant_from_db(det,
getattr(dconstants, const_name)(), getattr(dconstants, const_name)(),
condition, condition,
None, None,
cal_db_interface, cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
print_once=2, print_once=2,
timeout=cal_db_timeout) timeout=cal_db_timeout)
# Noise # Noise
const_name = "Noise" const_name = "Noise"
condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage, condition = Conditions.Dark.ePix100(bias_voltage=bias_voltage,
integration_time=integration_time, integration_time=integration_time,
temperature=temperature_k, temperature=temperature_k,
in_vacuum=in_vacuum) in_vacuum=in_vacuum)
noiseMap = get_constant_from_db(det, noiseMap = get_constant_from_db(det,
getattr(dconstants, const_name)(), getattr(dconstants, const_name)(),
condition, condition,
None, None,
cal_db_interface, cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
print_once=2, print_once=2,
timeout=cal_db_timeout) timeout=cal_db_timeout)
# Gain # Gain
if not no_relative_gain: if not no_relative_gain:
const_name = "RelativeGain" const_name = "RelativeGain"
condition = Conditions.Illuminated.ePix100(photon_energy=gain_photon_energy, condition = Conditions.Illuminated.ePix100(photon_energy=gain_photon_energy,
bias_voltage=bias_voltage, bias_voltage=bias_voltage,
integration_time=integration_time, integration_time=integration_time,
temperature=temperature_k, temperature=temperature_k,
in_vacuum=in_vacuum) in_vacuum=in_vacuum)
gainMap = get_constant_from_db(det, gainMap = get_constant_from_db(det,
getattr(dconstants, const_name)(), getattr(dconstants, const_name)(),
condition, condition,
None, None,
cal_db_interface, cal_db_interface,
creation_time=creation_time, creation_time=creation_time,
print_once=2, print_once=2,
timeout=cal_db_timeout) timeout=cal_db_timeout)
if gainMap is None: if gainMap is None:
print("Gain map requisted, but not found") print("Gain map requisted, but not found")
print("No gain correction will be applied") print("No gain correction will be applied")
no_relative_gain = True no_relative_gain = True
plot_unit = 'ADU' plot_unit = 'ADU'
gainMap = np.ones(sensorSize, np.float32) gainMap = np.ones(sensorSize, np.float32)
else: else:
gainMap = np.ones(sensorSize, np.float32) gainMap = np.ones(sensorSize, np.float32)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
#************************Calculators************************# #************************Calculators************************#
offsetCorrection = xcal.OffsetCorrection(sensorSize, offsetCorrection = xcal.OffsetCorrection(sensorSize,
offsetMap, offsetMap,
nCells = memoryCells, nCells = memoryCells,
cores=cpuCores, gains=None, cores=cpuCores, gains=None,
blockSize=blockSize, blockSize=blockSize,
parallel=run_parallel) parallel=run_parallel)
gainCorrection = xcal.RelativeGainCorrection( gainCorrection = xcal.RelativeGainCorrection(
sensorSize, sensorSize,
1. / gainMap[..., None], 1. / gainMap[..., None],
nCells=memoryCells, nCells=memoryCells,
parallel=run_parallel, parallel=run_parallel,
cores=cpuCores, cores=cpuCores,
blockSize=blockSize, blockSize=blockSize,
gains=None) gains=None)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
#*****************Histogram Calculators******************# #*****************Histogram Calculators******************#
histCalOffsetCor = xcal.HistogramCalculator(sensorSize, histCalOffsetCor = xcal.HistogramCalculator(sensorSize,
bins=1050, bins=1050,
range=[-50, 1000], parallel=run_parallel, range=[-50, 1000], parallel=run_parallel,
nCells=memoryCells, nCells=memoryCells,
cores=cpuCores, cores=cpuCores,
blockSize=blockSize) blockSize=blockSize)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Applying corrections Applying corrections
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
histCalOffsetCor.debug() histCalOffsetCor.debug()
offsetCorrection.debug() offsetCorrection.debug()
gainCorrection.debug() gainCorrection.debug()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
#************************Calculators************************# #************************Calculators************************#
commonModeBlockSize = [x//2, y//2] commonModeBlockSize = [x//2, y//2]
commonModeAxisR = 'row' commonModeAxisR = 'row'
cmCorrection = xcal.CommonModeCorrection([x, y], cmCorrection = xcal.CommonModeCorrection([x, y],
commonModeBlockSize, commonModeBlockSize,
commonModeAxisR, commonModeAxisR,
nCells = memoryCells, nCells = memoryCells,
noiseMap = noiseMap, noiseMap = noiseMap,
runParallel=True, runParallel=True,
stats=True) stats=True)
patternClassifier = xcal.PatternClassifier([x, y], patternClassifier = xcal.PatternClassifier([x, y],
noiseMap, noiseMap,
split_evt_primary_threshold, split_evt_primary_threshold,
split_evt_secondary_threshold, split_evt_secondary_threshold,
split_evt_mip_threshold, split_evt_mip_threshold,
tagFirstSingles = 0, tagFirstSingles = 0,
nCells=memoryCells, nCells=memoryCells,
cores=cpuCores, cores=cpuCores,
allowElongated = False, allowElongated = False,
blockSize=[x, y], blockSize=[x, y],
runParallel=True) runParallel=True)
histCalCMCor = xcal.HistogramCalculator(sensorSize, histCalCMCor = xcal.HistogramCalculator(sensorSize,
bins=1050, bins=1050,
range=[-50, 1000], parallel=run_parallel, range=[-50, 1000], parallel=run_parallel,
nCells=memoryCells, nCells=memoryCells,
cores=cpuCores, cores=cpuCores,
blockSize=blockSize) blockSize=blockSize)
histCalSECor = xcal.HistogramCalculator(sensorSize, histCalSECor = xcal.HistogramCalculator(sensorSize,
bins=1050, bins=1050,
range=[-50, 1000], parallel=run_parallel, range=[-50, 1000], parallel=run_parallel,
nCells=memoryCells, nCells=memoryCells,
cores=cpuCores, cores=cpuCores,
blockSize=blockSize) blockSize=blockSize)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cmCorrection.debug() cmCorrection.debug()
patternClassifier.debug() patternClassifier.debug()
histCalCMCor.debug() histCalCMCor.debug()
histCalSECor.debug() histCalSECor.debug()
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
def copy_and_sanitize_non_cal_data(infile, outfile, h5base): def copy_and_sanitize_non_cal_data(infile, outfile, h5base):
""" Copy and sanitize data in `infile` that is not touched by `correctEPIX` """ Copy and sanitize data in `infile` that is not touched by `correctEPIX`
""" """
if h5base.startswith("/"): if h5base.startswith("/"):
h5base = h5base[1:] h5base = h5base[1:]
dont_copy = ['pixels'] dont_copy = ['pixels']
dont_copy = [h5base+"/{}".format(do) dont_copy = [h5base+"/{}".format(do)
for do in dont_copy] for do in dont_copy]
def visitor(k, item): def visitor(k, item):
if k not in dont_copy: if k not in dont_copy:
if isinstance(item, h5py.Group): if isinstance(item, h5py.Group):
outfile.create_group(k) outfile.create_group(k)
elif isinstance(item, h5py.Dataset): elif isinstance(item, h5py.Dataset):
group = str(k).split("/") group = str(k).split("/")
group = "/".join(group[:-1]) group = "/".join(group[:-1])
infile.copy(k, outfile[group]) infile.copy(k, outfile[group])
infile.visititems(visitor) infile.visititems(visitor)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
for k, f in enumerate(file_list): for k, f in enumerate(file_list):
with h5py.File(f, 'r') as infile: with h5py.File(f, 'r') as infile:
out_fileb = "{}/{}".format(out_folder, f.split("/")[-1]) out_fileb = "{}/{}".format(out_folder, f.split("/")[-1])
out_file = out_fileb.replace("RAW", "CORR") out_file = out_fileb.replace("RAW", "CORR")
#out_filed = out_fileb.replace("RAW", "CORR-SC") #out_filed = out_fileb.replace("RAW", "CORR-SC")
data = None data = None
with h5py.File(out_file, "w") as ofile: with h5py.File(out_file, "w") as ofile:
try: try:
copy_and_sanitize_non_cal_data(infile, ofile, h5path) copy_and_sanitize_non_cal_data(infile, ofile, h5path)
data = infile[h5path+"/pixels"][()] data = infile[h5path+"/pixels"][()]
data = np.compress(np.any(data>0, axis=(1,2)), data, axis=0) data = np.compress(np.any(data>0, axis=(1,2)), data, axis=0)
if limit_images > 0: if limit_images > 0:
data = data[:limit_images,...] data = data[:limit_images,...]
oshape = data.shape oshape = data.shape
data = np.moveaxis(data, 0, 2) data = np.moveaxis(data, 0, 2)
ddset = ofile.create_dataset(h5path+"/pixels", ddset = ofile.create_dataset(h5path+"/pixels",
oshape, oshape,
chunks=(chunk_size_idim, oshape[1], oshape[2]), chunks=(chunk_size_idim, oshape[1], oshape[2]),
dtype=np.float32) dtype=np.float32)
data = offsetCorrection.correct(data.astype(np.float32)) #correct for the offset data = offsetCorrection.correct(data.astype(np.float32)) #correct for the offset
if not no_relative_gain: if not no_relative_gain:
data = gainCorrection.correct(data.astype(np.float32)) #correct for the gain data = gainCorrection.correct(data.astype(np.float32)) #correct for the gain
if photon_energy>0: if photon_energy>0:
data /= photon_energy data /= photon_energy
histCalOffsetCor.fill(data) histCalOffsetCor.fill(data)
ddset[...] = np.moveaxis(data, 2, 0) ddset[...] = np.moveaxis(data, 2, 0)
except Exception as e: except Exception as e:
print("Couldn't calibrate data in {}: {}".format(f, e)) print("Couldn't calibrate data in {}: {}".format(f, e))
if False: if False:
with h5py.File(out_file, "a") as ofiled: with h5py.File(out_file, "a") as ofiled:
try: try:
#copy_and_sanitize_non_cal_data(infile, ofiled, h5path) #copy_and_sanitize_non_cal_data(infile, ofiled, h5path)
ddsetcm = ofiled.create_dataset(h5path+"/pixels_cm", ddsetcm = ofiled.create_dataset(h5path+"/pixels_cm",
oshape, oshape,
chunks=(chunk_size_idim, oshape[1], oshape[2]), chunks=(chunk_size_idim, oshape[1], oshape[2]),
dtype=np.float32) dtype=np.float32)
ddsetc = ofiled.create_dataset(h5path+"/pixels_classified", ddsetc = ofiled.create_dataset(h5path+"/pixels_classified",
oshape, oshape,
chunks=(chunk_size_idim, oshape[1], oshape[2]), chunks=(chunk_size_idim, oshape[1], oshape[2]),
dtype=np.float32, compression="gzip") dtype=np.float32, compression="gzip")
ddsetp = ofiled.create_dataset(h5path+"/patterns", ddsetp = ofiled.create_dataset(h5path+"/patterns",
oshape, oshape,
chunks=(chunk_size_idim, oshape[1], oshape[2]), chunks=(chunk_size_idim, oshape[1], oshape[2]),
dtype=np.int32, compression="gzip") dtype=np.int32, compression="gzip")
data = cmCorrection.correct(data) #correct for the row common mode data = cmCorrection.correct(data) #correct for the row common mode
histCalCMCor.fill(data) histCalCMCor.fill(data)
ddsetcm[...] = np.moveaxis(data, 2, 0) ddsetcm[...] = np.moveaxis(data, 2, 0)
data, patterns = patternClassifier.classify(data) data, patterns = patternClassifier.classify(data)
data[data < (split_evt_primary_threshold*noiseMap)] = 0 data[data < (split_evt_primary_threshold*noiseMap)] = 0
ddsetc[...] = np.moveaxis(data, 2, 0) ddsetc[...] = np.moveaxis(data, 2, 0)
ddsetp[...] = np.moveaxis(patterns, 2, 0) ddsetp[...] = np.moveaxis(patterns, 2, 0)
data[patterns != 100] = np.nan data[patterns != 100] = np.nan
histCalSECor.fill(data) histCalSECor.fill(data)
except Exception as e: except Exception as e:
print("Couldn't calibrate data in {}: {}".format(f, e)) print("Couldn't calibrate data in {}: {}".format(f, e))
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
ho,eo,co,so = histCalOffsetCor.get() ho,eo,co,so = histCalOffsetCor.get()
d = [{'x': co, d = [{'x': co,
'y': ho, 'y': ho,
'y_err': np.sqrt(ho[:]), 'y_err': np.sqrt(ho[:]),
'drawstyle': 'steps-mid', 'drawstyle': 'steps-mid',
'errorstyle': 'bars', 'errorstyle': 'bars',
'errorcoarsing': 2, 'errorcoarsing': 2,
'label': 'Offset corr.' 'label': 'Offset corr.'
}, },
] ]
if False: if False:
ho,eo,co,so = histCalCMCor.get() ho,eo,co,so = histCalCMCor.get()
d.append({'x': co, d.append({'x': co,
'y': ho, 'y': ho,
'y_err': np.sqrt(ho[:]), 'y_err': np.sqrt(ho[:]),
'drawstyle': 'steps-mid', 'drawstyle': 'steps-mid',
'errorstyle': 'bars', 'errorstyle': 'bars',
'errorcoarsing': 2, 'errorcoarsing': 2,
'label': 'CM corr.' 'label': 'CM corr.'
}) })
ho,eo,co,so = histCalSECor.get() ho,eo,co,so = histCalSECor.get()
d.append({'x': co, d.append({'x': co,
'y': ho, 'y': ho,
'y_err': np.sqrt(ho[:]), 'y_err': np.sqrt(ho[:]),
'drawstyle': 'steps-mid', 'drawstyle': 'steps-mid',
'errorstyle': 'bars', 'errorstyle': 'bars',
'errorcoarsing': 2, 'errorcoarsing': 2,
'label': 'Single split events' 'label': 'Single split events'
}) })
fig = xana.simplePlot(d, aspect=1, x_label='Energy({})'.format(plot_unit), fig = xana.simplePlot(d, aspect=1, x_label='Energy({})'.format(plot_unit),
y_label='Number of occurrences', figsize='2col', y_label='Number of occurrences', figsize='2col',
y_log=True, x_range=(-50,500), y_log=True, x_range=(-50,500),
legend='top-center-frame-2col') legend='top-center-frame-2col')
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Mean Image of last Sequence ## ## Mean Image of last Sequence ##
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = xana.heatmapPlot(np.nanmedian(data, axis=2), fig = xana.heatmapPlot(np.nanmedian(data, axis=2),
x_label='Columns', y_label='Rows', x_label='Columns', y_label='Rows',
lut_label='Signal ({})'.format(plot_unit), lut_label='Signal ({})'.format(plot_unit),
x_range=(0,y), x_range=(0,y),
y_range=(0,x), vmin=-50, vmax=50) y_range=(0,x), vmin=-50, vmax=50)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Single Shot of last Sequence ## ## Single Shot of last Sequence ##
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
fig = xana.heatmapPlot(data[...,0], fig = xana.heatmapPlot(data[...,0],
x_label='Columns', y_label='Rows', x_label='Columns', y_label='Rows',
lut_label='Signal ({})'.format(plot_unit), lut_label='Signal ({})'.format(plot_unit),
x_range=(0,y), x_range=(0,y),
y_range=(0,x), vmin=-50, vmax=50) y_range=(0,x), vmin=-50, vmax=50)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment