Skip to content
Snippets Groups Projects

[PNCCD] [DARK]+[CORRECT] Read voltage, gain and temperature

Merged Karim Ahmed requested to merge feat/extract_ctrl_data into master
1 unresolved thread
2 files
+ 108
80
Compare changes
  • Side-by-side
  • Inline
Files
2
%% Cell type:markdown id: tags:
# pnCCD Dark Characterization
Author: DET Group, modified by Kiana Setoodehnia, Version: 2.0
The following notebook provides dark image analysis of the pnCCD detector. Dark characterization evaluates offset and noise of the detector and gives information about bad pixels.
On the first iteration, the offset and noise maps are generated. Initial bad pixels map is obtained based on the offset and initial noise maps.
On the second iteration, the noise map is corrected for common mode. A second bad pixel map is generated based on the offset map and offset-and-common-mode-corrected noise map. Then, the hole in the center of the CCD is added to the second bad pixel map.
On the third and final iteration, the pixels that show up on the abovementioned bad pixels map are masked. Possible events due to cosmic rays are found and masked. The data are then again offset and common mode corrected and a new final noise and bad pixels maps are generated.
These latter resulting maps together with the offset map are saved as .h5 files to a local path for a later use. These dark constants are not automatically sent to the database.
%% Cell type:code id: tags:
``` python
cluster_profile = "noDB" # ipcluster profile to use
in_folder = "/gpfs/exfel/exp/SQS/202031/p900166/raw" # input folder, required
out_folder = '/gpfs/exfel/data/scratch/ahmedk/test/pnccd' # output folder, required
sequence = 0 # sequence file to use
run = 34 # which run to read data from, required
db_module = "PnCCD1"
karabo_da = ['PNCCD01'] # data aggregators
karabo_da_control = "PNCCD02" # file inset for control data
karabo_id = "SQS_NQS_PNCCD1MP" # karabo prefix of PNCCD devices
receiver_id = "PNCCD_FMT-0" # inset for receiver devices
path_template = 'RAW-R{:04d}-{}-S{{:05d}}.h5' # the template to use to access data
h5path = '/INSTRUMENT/{}/CAL/{}:output/data/image/' # path in the HDF5 file the data is at
h5path_ctrl = '/CONTROL/{}/CTRL/TCTRL'
# for database time derivation:
use_dir_creation_date = True # use dir creation date as data production reference date
cal_db_interface = "tcp://max-exfl016:8021" # calibration DB interface to use
cal_db_timeout = 300000 # timeout on caldb requests
db_output = False # if True, the notebook sends dark constants to the calibration database
local_output = True # if True, the notebook saves dark constants locally
creation_time = "" # To overwrite the measured creation_time. Required Format: YYYY-MM-DD HR:MN:SC.00 e.g. 2019-07-04 11:02:41.00
number_dark_frames = 0 # number of images to be used, if set to 0 all available images are used
chunkSize = 100 # number of images to read per chunk
fix_temperature = 233. # fix temperature in K, set to 0. to use value from slow data
gain = 1 # the detector's gain setting, only 1 and 64 is available.
bias_voltage = 300 # detector's bias voltage
fix_temperature = 0. # fix temperature in K, set to 0. to use value from slow data
gain = 1 # the detector's gain setting, It is later read from file and this value is overwritten
bias_voltage = 0. # the detector's bias voltage. set to 0. to use value from slow data.
integration_time = 70 # detector's integration time
commonModeAxis = 0 # axis along which common mode will be calculated (0: along rows, 1: along columns)
commonModeBlockSize = [512, 512] # size of the detector in pixels for common mode calculations
sigmaNoise = 10. # pixels whose signal value exceeds sigmaNoise*noise will be considered as cosmics and are masked
bad_pixel_offset_sigma = 5. # any pixel whose offset is beyond 5 standard deviations, is a bad pixel
bad_pixel_noise_sigma = 5. # any pixel whose noise is beyond 5 standard deviations, is a bad pixel
temp_limits = 5 # temperature limits in which calibration parameters are considered equal
run_parallel = True # for parallel computation
cpuCores = 40 # specifies the number of running cpu cores
```
%% Cell type:code id: tags:
``` python
import os
import copy
import datetime
import warnings
warnings.filterwarnings('ignore')
import h5py
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
from prettytable import PrettyTable
from IPython.display import display, Markdown
from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions
from iCalibrationDB.detectors import DetectorTypes
import XFELDetAna.xfelprofiler as xprof
profiler = xprof.Profiler()
profiler.disable()
from XFELDetAna.util import env
env.iprofile = cluster_profile
from XFELDetAna import xfelpycaltools as xcal
from XFELDetAna import xfelpyanatools as xana
from XFELDetAna.plotting.util import prettyPlotting
prettyPlotting=True
from XFELDetAna.xfelreaders import ChunkReader
from XFELDetAna.detectors.fastccd import readerh5 as fastccdreaderh5
from cal_tools.tools import get_dir_creation_date, save_const_to_h5, get_random_db_interface
from cal_tools.enums import BadPixels
```
%% Cell type:code id: tags:
``` python
def nImagesOrLimit(nImages, limit):
if limit == 0:
return nImages
else:
return min(nImages, limit)
```
%% Cell type:code id: tags:
``` python
proposal = list(filter(None, in_folder.strip('/').split('/')))[-2]
file_loc = f'Proposal: {proposal}, Run: {run}'
print("File Location:", file_loc)
```
%% Cell type:code id: tags:
``` python
# Calibration Database Settings, and Some Initial Run Parameters & Paths:
display(Markdown('### Initial Settings'))
pixels_x = 1024 # rows of the FastCCD to analyze in FS mode
pixels_y = 1024 # columns of the FastCCD to analyze in FS mode
print(f"pnCCD size is: {pixels_x}x{pixels_y} pixels.")
ped_dir = "{}/r{:04d}".format(in_folder, run)
fp_name = path_template.format(run, karabo_da[0])
fp_path = '{}/{}'.format(ped_dir, fp_name)
filename = fp_path.format(sequence)
h5path = h5path.format(karabo_id, receiver_id)
# Output Folder Creation:
os.makedirs(out_folder, exist_ok=True)
# Run's creation time:
if creation_time:
try:
creation_time = datetime.datetime.strptime(creation_time, '%Y-%m-%d %H:%M:%S.%f')
except Exception as e:
print(f"creation_time value error: {e}."
"Use same format as YYYY-MM-DD HR:MN:SC.ms e.g. 2019-07-04 11:02:41.00/n")
creation_time = None
print("Given creation time wont be used.")
else:
creation_time = None
if not creation_time and use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run)
print(f"Creation time: {creation_time}")
cal_db_interface = get_random_db_interface(cal_db_interface)
print('Calibration database interface: {}'.format(cal_db_interface))
print("Sending constants to the calibration database: {}".format(db_output))
print("HDF5 path to data: {}".format(h5path))
print("Reading data from: {}".format(filename))
print("Run number: {}".format(run))
```
%% Cell type:code id: tags:
``` python
# extract slow data
if karabo_da_control:
ctrl_fname = os.path.join(ped_dir, path_template.format(run, karabo_da_control)).format(sequence)
ctrl_path = h5path_ctrl.format(karabo_id)
mdl_ctrl_path = "/CONTROL/{}/MDL/".format(karabo_id)
mdl_ctrl_path = f"/CONTROL/{karabo_id}/MDL/"
with h5py.File(ctrl_fname, "r") as f:
bias_voltage = abs(f[os.path.join(mdl_ctrl_path, "DAQ_MPOD/u0voltage/value")][0])
if bias_voltage == 0.:
bias_voltage = abs(f[os.path.join(mdl_ctrl_path, "DAQ_MPOD/u0voltage/value")][0])
gain = f[os.path.join(mdl_ctrl_path, "DAQ_GAIN/spNCCDGain/value")][0]
fix_temperature = f[os.path.join(ctrl_path, "inputA/krdg/value")][0]
if fix_temperature == 0.:
fix_temperature = f[os.path.join(ctrl_path, "inputA/krdg/value")][0]
```
%% Cell type:code id: tags:
``` python
# Reading Parameters such as Detector Bias, Gain, etc. from the Data:
memoryCells = 1 # pnCCD has 1 memory cell
sensorSize = [pixels_x, pixels_y]
blockSize = [sensorSize[0]//2, sensorSize[1]//2]# sensor area will be analysed according to blocksize
xcal.defaultBlockSize = blockSize
nImages = fastccdreaderh5.getDataSize(filename, h5path)[0] # specifies total number of images to proceed
nImages = nImagesOrLimit(nImages, number_dark_frames)
profile = False
```
%% Cell type:code id: tags:
``` python
# Printing the Parameters Read from the Data File:
display(Markdown('### Detector Parameters'))
print("Bias voltage is {} V.".format(bias_voltage))
print("Detector gain is set to {}.".format(gain))
print("Detector integration time is set to {} ms".format(integration_time))
if fix_temperature != 0.:
print(f"Using a fixed temperature of {fix_temperature} K")
temperature_k = fix_temperature
else:
print("Temperature is not fixed.")
#TODO: remove this line after properly saving the temperature in control data.
temperature_k = 233.
print(f"Using a fixed temperature of {fix_temperature} K")
print(f"Bias voltage is {bias_voltage} V.")
print(f"Detector gain is set to {gain}.")
print(f"Detector integration time is set to {integration_time} ms")
print(f"Using a fixed temperature of {fix_temperature} K")
print("Number of dark images to analyze:", nImages)
```
%% Cell type:code id: tags:
``` python
# Reading Files in Chunks:
# Chunk reader returns an iterator to access the data in the file within the ranges:
reader = ChunkReader(filename, fastccdreaderh5.readData, nImages, chunkSize, path=h5path,
pixels_x=pixels_x, pixels_y=pixels_y)
```
%% Cell type:code id: tags:
``` python
# Calculators:
# noiseCal is a noise map calculator, which internally also produces a per-pixel mean map, i.e. an offset map:
noiseCal = xcal.NoiseCalculator(sensorSize, memoryCells, cores=cpuCores, blockSize=blockSize,
runParallel=run_parallel)
```
%% Cell type:markdown id: tags:
### First Iteration
Characterization of dark images with purpose to create dark maps (offset, noise and bad pixel maps) is an iterative process. The initial offset and noise maps are produced from the raw dark data.
%% Cell type:code id: tags:
``` python
counter1 = 1 # To count how many "if data.shape[2] >= chunkSize" instances are there.
counter2 = 0 # To count how many "if data.shape[2] < chunkSize" instances are there.
chunkSize_new = 0 # See below
for data in reader.readChunks():
data = data.astype(np.float32)
dx = np.count_nonzero(data, axis=(0, 1))
data = data[:,:,dx != 0]
# Some sequences may have less than 500 frames in them. To find out how many images there are, we will
# temporarily change chunkSize to be the same as whatever number of frames the last chunk of data has:
if data.shape[2] < chunkSize:
chunkSize_new = data.shape[2]
print("Number of images are less than chunkSize. chunkSize is temporarily changed to {}."
.format(chunkSize_new))
images = images + chunkSize_new
counter2 += 1
else:
images = counter1*chunkSize + counter2*chunkSize_new
counter1 += 1
noiseCal.fill(data) # Filling the histogram calculator with data
chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk
print('A total number of {} images are processed.'.format(images))
```
%% Cell type:code id: tags:
``` python
offsetMap = noiseCal.getOffset() # Producing offset map
noiseMap = noiseCal.get() # Producing noise map
noiseCal.reset() # Resetting noise calculator
print("Initial maps are created.")
```
%% Cell type:markdown id: tags:
### Offset and Noise Maps prior to Common Mode Correction
In the following, the histograms of the pnCCD offset and initial noise, and the heat maps of pnCCD offset, as well as the initial uncorrected noise are plotted.
%% Cell type:code id: tags:
``` python
#************** OFFSET MAP HISTOGRAM ***********#
ho, co = np.histogram(offsetMap.flatten(), bins=2000) # ho = offset histogram; co = offset bin centers
do = {'x': co[:-1],
'y': ho,
#'y_err': np.sqrt(ho[:]), Markus thinks these errors bars are not correctly calculated!
'drawstyle': 'bars',
'color': 'cornflowerblue',
'label': 'Raw Pedestal (ADU)'
}
fig = xana.simplePlot(do, figsize='1col', aspect=1, x_label = 'Raw Pedestal (ADU)', y_label="Counts",
title = 'Offset Histogram', x_range=(0, np.nanmax(offsetMap)), y_log=True)
# Calculating mean, median and standard deviation for the above histogram:
mids = 0.5*(co[1:] + co[:-1])
mean = np.average(mids, weights=ho)
variance = np.average((mids - mean)**2, weights=ho)
std = np.sqrt(variance)
# Table of statistics on raw signal:
t0 = PrettyTable()
t0.title = "Statistics on Raw Pedestal (Offset)"
t0.field_names = ["Mean", "Standard Deviation"]
t0.add_row(["{:0.3f} (ADU)".format(mean), "{:0.3f} (ADU)".format(std)])
print(t0,'\n')
#***** NOISE MAP HISTOGRAM FROM THE OFFSET CORRECTED DATA *******#
hn, cn = np.histogram(noiseMap.flatten(), bins=2000) # hn = noise histogram; cn = noise bin centers
dn = {'x': cn[:-1],
'y': hn,
#'y_err': np.sqrt(hn[:]),
'drawstyle': 'bars',
'color': 'cornflowerblue',
'label': 'Noise (ADU)'
}
fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label = 'Raw Noise (ADU)', y_label="Counts",
title = 'Noise Histogram', y_log=True, x_range=(0, 600))
#************** HEAT MAPS *******************#
fig = xana.heatmapPlot(offsetMap[:,:,0], x_label='Column Number', y_label='Row Number', aspect=1,
x_range=(0, pixels_y), y_range=(0, pixels_x), vmin=6000, vmax=14500, lut_label='Offset (ADU)',
panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)', title = 'Offset Map')
fig = xana.heatmapPlot(noiseMap[:,:,0], x_label='Column Number', y_label='Row Number', aspect=1,
lut_label='Noise (ADU)', x_range=(0, pixels_y),
y_range=(0, pixels_x), vmax=2*np.mean(noiseMap), panel_x_label='Row Stat (ADU)',
panel_y_label='Column Stat (ADU)', title = 'Uncorrected NoiseMap')
```
%% Cell type:markdown id: tags:
### Initial BadPixelMap
This is generated based on the offset and uncorrected noise maps.
%% Cell type:code id: tags:
``` python
bad_pixels = np.zeros(offsetMap.shape, np.uint32)
mnoffset = np.nanmedian(offsetMap)
stdoffset = np.nanstd(offsetMap)
bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |
(offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value
mnnoise = np.nanmedian(noiseMap)
stdnoise = np.nanstd(noiseMap)
bad_pixels[(noiseMap < mnnoise-bad_pixel_noise_sigma*stdnoise) |
(noiseMap > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),
x_label='Columns', y_label='Rows',
lut_label='Bad Pixel Value (ADU)',
x_range=(0, pixels_y),
y_range=(0, pixels_x), vmin=0, vmax=32,
panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)',
title = 'Initial Bad Pixels Map')
```
%% Cell type:code id: tags:
``` python
# Offset Correction:
offsetCorrection = xcal.OffsetCorrection(sensorSize, offsetMap, nCells = memoryCells, cores=cpuCores, gains=None,
runParallel=run_parallel, blockSize=blockSize)
# Common Mode Correction:
# In this method, the median of all pixels that are read out at the same time along a row is subtracted from the
# signal in each pixel:
cmCorrection = xcal.CommonModeCorrection(sensorSize,
commonModeBlockSize,
commonModeAxis, parallel=run_parallel, dType=np.float32, stride=1,
noiseMap=noiseMap.astype(np.float32), minFrac=0)
cmCorrection.debug()
```
%% Cell type:code id: tags:
``` python
# Histogram Calculators:
# The number of bins and bin ranges are based on the offset histogram. We set 1 ADU per bin and we consider some
# negative bin to ensure the offset and common mode corrections actually move the signal to zero:
# For offset corrected data:
histCalCorrected = xcal.HistogramCalculator(sensorSize, bins=1100, range=[-50, 1050], memoryCells=memoryCells,
cores=cpuCores, gains=None, blockSize=blockSize)
# For common mode corrected data:
histCalCMCorrected = xcal.HistogramCalculator(sensorSize, bins=1100, range=[-50, 1050], memoryCells=memoryCells,
cores=cpuCores, gains=None, blockSize=blockSize)
```
%% Cell type:markdown id: tags:
### Second Iteration
During the second iteration, the data are offset and common mode corrected to produce an offset-and-common-mode-corrected noise map. The common mode correction is calculated by subtracting out the median of all pixels that are read out at the same time along a row.
%% Cell type:code id: tags:
``` python
counter1 = 1 # To count how many "if data.shape[2] >= chunkSize" instances are there.
counter2 = 0 # To count how many "if data.shape[2] < chunkSize" instances are there.
chunkSize_new = 0 # See below
for data in reader.readChunks():
data = data.astype(np.float32)
dx = np.count_nonzero(data, axis=(0, 1))
data = data[:,:,dx != 0]
# Some sequences may have less than 500 frames in them. To find out how many images there are, we will
# temporarily change chunkSize to be the same as whatever number of frames the last chunk of data has:
if data.shape[2] < chunkSize:
chunkSize_new = data.shape[2]
print("Number of images are less than chunkSize. chunkSize is temporarily changed to {}."
.format(chunkSize_new))
images = images + chunkSize_new
counter2 += 1
else:
images = counter1*chunkSize + counter2*chunkSize_new
counter1 += 1
data -= offsetMap.data # Offset correction
offset_corr_data = copy.copy(data) # I am copying this so that I can have access to it in the table below
histCalCorrected.fill(data)
cellTable=np.zeros(data.shape[2], np.int32) # Common mode correction
data = cmCorrection.correct(data.astype(np.float32), cellTable=cellTable)
histCalCMCorrected.fill(data)
noiseCal.fill(data) # Filling calculators with data
chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk
print('A total number of {} images are processed.'.format(images))
print("Offset and common mode corrections are applied.")
```
%% Cell type:code id: tags:
``` python
noiseMapCM = noiseCal.get() # Producing common mode corrected noise map
ho, eo, co, so = histCalCorrected.get()
hCM, eCM, cCM, sCM = histCalCMCorrected.get()
```
%% Cell type:code id: tags:
``` python
# We are copying these so that we can replot them later after the calculators are reset:
ho_second_trial = copy.copy(ho)
co_second_trial = copy.copy(co)
hCM_second_trial = copy.copy(hCM)
cCM_second_trial = copy.copy(cCM)
```
%% Cell type:markdown id: tags:
### Signal after Offset and Common Mode Corrections
Here, the offset corrected signal is compared to the common-mode corrected signal (in the form of binned histograms).
%% Cell type:code id: tags:
``` python
do = [{'x': co,
'y': ho,
'drawstyle': 'steps-post',
'color': 'cornflowerblue',
'label': 'Offset Corrected Signal'
},
{'x': cCM,
'y': hCM,
'drawstyle': 'steps-post',
'color': 'red',
'label': 'Common Mode Corrected Signal'
}]
fig = xana.simplePlot(do, figsize='2col', aspect=1, x_label = 'Corrected Pedestal (ADU)', y_label="Counts",
x_range = (-50, 1050), y_log=True, legend='top-right-frame-1col',
title = 'Corrected Pedestal - 2nd Iteration')
t0 = PrettyTable()
t0.title = "Comparison of the First Round of Corrections - Bad Pixels Not Excluded"
t0.field_names = ["After Offset Correction", "After Common Mode Correction"]
t0.add_row(["Mean: {:0.3f} (ADU)".format(np.mean(offset_corr_data)), "Mean: {:0.3f} (ADU)"
.format(np.mean(data))])
t0.add_row(["Median: {:0.3f} (ADU)".format(np.median(offset_corr_data)), "Median: {:0.3f} (ADU)"
.format(np.median(data))])
t0.add_row(["Standard Deviation: {:0.3f} (ADU)".format(np.std(offset_corr_data)),
"Standard Deviation: {:0.3f} (ADU)"
.format(np.std(data))])
print(t0,'\n')
```
%% Cell type:markdown id: tags:
### Noise Map after Common Mode Correction
In the following, the effect of common mode correction on the noise is shown. Finally common mode corrected noise map (noiseMapCM) is displayed and compared to the initial uncorrected noise map.
%% Cell type:code id: tags:
``` python
#***** NOISE MAP HISTOGRAM FROM THE COMMON MODE CORRECTED DATA *******#
hn, cn = np.histogram(noiseMap.flatten(), bins=2000, range=(0, 600))
hn_CM, cn_CM = np.histogram(noiseMapCM.flatten(), bins=2000, range=(0, 600))
dn = [{'x': cn[:-1],
'y': hn,
'drawstyle': 'steps-post',
'color': 'blue',
'label': 'Uncorrected'
},
{'x': cn_CM[:-1],
'y': hn_CM,
'drawstyle': 'steps-post', # 'bars',
'color': 'crimson', # 'red',#'cornflowerblue',
'label': 'Common Mode Corrected'
}]
fig = xana.simplePlot(dn, figsize='1col', aspect=1, x_label='Noise (ADU)', y_label="Counts",
x_range=(0, 600), y_log=True, legend='top-center-frame-1col',
title='Noise Comparison')
fig = xana.heatmapPlot(noiseMapCM[:, :, 0],
x_label='Columns', y_label='Rows',
lut_label='Common Mode Corrected Noise (ADU)',
x_range=(0, pixels_y),
y_range=(0, pixels_x), vmax=2*np.mean(noiseMapCM), panel_x_label='Row Stat (ADU)',
panel_y_label='Column Stat (ADU)', title='Common Mode Corrected Noise Map')
```
%% Cell type:code id: tags:
``` python
# Resetting the calculators:
noiseCal.reset() # resetting noise calculator
histCalCorrected.reset() # resetting histogram calculator
histCalCMCorrected.reset() # resetting histogram calculator
cmCorrection.reset()
```
%% Cell type:markdown id: tags:
### Second BadPixelMap
This is generated based on the offset map and offset-and-common-mode-corrected noise map.
%% Cell type:code id: tags:
``` python
mnoffset = np.nanmedian(offsetMap)
stdoffset = np.nanstd(offsetMap)
bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |
(offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value
mnnoise = np.nanmedian(noiseMapCM)
stdnoise = np.nanstd(noiseMapCM)
bad_pixels[(noiseMapCM < mnnoise-bad_pixel_noise_sigma*stdnoise) |
(noiseMapCM > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),
x_label='Columns', y_label='Rows',
lut_label='Bad Pixel Value (ADU)',
x_range=(0, pixels_y),
y_range=(0, pixels_x), vmin=0, vmax=32,
panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)',
title = 'Second Bad Pixels Map')
```
%% Cell type:markdown id: tags:
Here, we are masking the pixels in the hole (center of the pnCCD).
%% Cell type:code id: tags:
``` python
hole_mask = np.zeros(bad_pixels.shape, np.uint32)
hole_mask[483:539,477:543,:] = BadPixels.NON_SENSITIVE.value
# Assigning this masked area as bad pixels:
bad_pixels = np.bitwise_or(bad_pixels, hole_mask)
fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),
x_label='Columns', y_label='Rows',
lut_label='Bad Pixel Value (ADU)',
x_range=(0, pixels_y),
y_range=(0, pixels_x), vmin=0, vmax=32, panel_x_label='Row Stat (ADU)',
panel_y_label='Column Stat (ADU)', title = 'Second Bad Pixels Map with the Hole in Center')
```
%% Cell type:markdown id: tags:
### Third Iteration
During the third iteration, the last bad pixel map is applied to the data. Bad pixels are masked. Possible cosmic ray events are also masked. Offset and common mode corrections are applied once again to the data, which now have bad pixdels excluded, to produce a newly corrected noise map.
%% Cell type:code id: tags:
``` python
event_threshold = sigmaNoise*np.median(noiseMapCM) # for exclusion of possible cosmic ray events
noiseCal.setBadPixelMask(bad_pixels != 0) # setting bad pixels map for the noise calculator
```
%% Cell type:code id: tags:
``` python
counter1 = 1 # To count how many "if data.shape[2] >= chunkSize" instances are there.
counter2 = 0 # To count how many "if data.shape[2] < chunkSize" instances are there.
chunkSize_new = 0 # See below
for data in reader.readChunks():
data = data.astype(np.float32)
dx = np.count_nonzero(data, axis=(0, 1))
data = data[:,:,dx != 0]
data_mask = np.repeat(bad_pixels, data.shape[2], axis=2) # Converting bad_pixels to the same shape as the data
data[data_mask != 0] = np.nan # masking data for bad pixels and equating the values to np.nan
# Some sequences may have less than 500 frames in them. To find out how many images there are, we will
# temporarily change chunkSize to be the same as whatever number of frames the last chunk of data has:
if data.shape[2] < chunkSize:
chunkSize_new = data.shape[2]
print("Number of images are less than chunkSize. chunkSize is temporarily changed to {}."
.format(chunkSize_new))
images = images + chunkSize_new
counter2 += 1
else:
images = counter1*chunkSize + counter2*chunkSize_new
counter1 += 1
data_copy = offsetCorrection.correct(copy.copy(data))
cellTable=np.zeros(data_copy.shape[2], np.int32)
data_copy = cmCorrection.correct(data_copy.astype(np.float32), cellTable=cellTable)
data[data_copy > event_threshold] = np.nan # discarding events caused by cosmic rays
data = np.ma.MaskedArray(data, np.isnan(data), fill_value=0) # masking cosmics, default fill_value = 1e+20
data -= offsetMap.data # Offset correction
offset_corr_data2 = copy.copy(data) # I am copying this so that I can have access to it in the table below
histCalCorrected.fill(data)
cellTable=np.zeros(data.shape[2], np.int32) # Common mode correction
data = cmCorrection.correct(data.astype(np.float32), cellTable=cellTable)
histCalCMCorrected.fill(data)
noiseCal.fill(data) # Filling calculators with data
chunkSize = 100 # resetting the chunkSize to its default value for the next sequence or data-chunk
print("Final iteration is Performed.")
print('A total number of {} images are processed.'.format(images))
```
%% Cell type:code id: tags:
``` python
noiseMapCM_2nd = noiseCal.get().filled(0) # the masked pixels are filled with 0
ho2, eo2, co2, so2 = histCalCorrected.get()
hCM2, eCM2, cCM2, sCM2 = histCalCMCorrected.get()
```
%% Cell type:markdown id: tags:
### Plots of the Final Results
The following plot and table compare the offset and common mode corrected signal with and without the bad pixels.
%% Cell type:code id: tags:
``` python
do_Final = [{'x': co_second_trial,
'y': ho_second_trial,
'drawstyle': 'steps-post',
'color': 'blue',
'label': 'Offset Correction, Bad Pixels Included - 2nd Trial'
},
{'x': cCM_second_trial,
'y': hCM_second_trial,
'drawstyle': 'steps-post',
'color': 'red',
'ecolor': 'crimson',
'label': 'Common Mode Correction, Bad Pixels Included - 2nd Trial'
},
{'x': co2,
'y': ho2,
'drawstyle': 'steps-post',
'color': 'black',
'label': 'Offset Correction, Bad Pixels Excluded - 3rd Trial'
},
{'x': cCM2,
'y': hCM2,
'drawstyle': 'steps-post',
'color': 'orange',
'label': 'Common Mode Correction, Bad Pixels Excluded - 3rd Trial'
}]
fig = xana.simplePlot(do_Final, figsize='2col', aspect=1, x_label = 'Corrected Signal (ADU)',
y_label="Counts (Logarithmic Scale)", y_log=True, x_range = (0, 1100),
legend='top-right-frame-1col', title = 'Comparison of Corrections')
t0 = PrettyTable()
t0.title = "Comparison of the Second Round of Corrections - Bad Pixels Excluded"
t0.field_names = ["After Offset Correction", "After Common Mode Correction"]
t0.add_row(["Mean: {:0.3f} (ADU)".format(np.nanmean(offset_corr_data2)), "Mean: {:0.3f} (ADU)"
.format(np.nanmean(data))])
t0.add_row(["Median: {:0.3f} (ADU)".format(np.nanmedian(offset_corr_data2)), "Median: {:0.3f} (ADU)"
.format(np.nanmedian(data))])
t0.add_row(["Standard Deviation: {:0.3f} (ADU)".format(np.nanstd(offset_corr_data2)),
"Standard Deviation: {:0.3f} (ADU)".format(np.nanstd(data))])
print(t0,'\n')
```
%% Cell type:markdown id: tags:
### Final NoiseMap
Finally offset and common mode corrected noise map with bad pixels and possible cosmic events excluded (noiseMapCM_2nd) is displayed.
%% Cell type:code id: tags:
``` python
#*****NOISE MAP HISTOGRAM FROM THE COMMON MODE CORRECTED DATA*******#
hn_CM2, cn_CM2 = np.histogram(noiseMapCM_2nd.flatten(), bins=2000, range=(0, 600))
dn2 = [{'x': cn[:-1],
'y': hn,
'drawstyle': 'steps-post',
'color': 'blue',
'label': 'Uncorrected'
},
{'x': cn_CM[:-1],
'y': hn_CM,
'drawstyle': 'steps-post',
'color': 'red',
'label': 'Common Mode Corrected prior to Bad Pixels Exclusion'
},
{'x': cn_CM2[:-1],
'y': hn_CM2,
'drawstyle': 'steps-post',
'color': 'black',
'label': 'Common Mode Corrected after Bad Pixels Exclusion'
}]
fig = xana.simplePlot(dn2, figsize='1col', aspect = 1, x_label = 'Noise (ADU)', y_label="Counts", y_log=True,
legend='top-right-frame-1col',
title = 'Final Noise Comparison')
fig = xana.heatmapPlot(noiseMapCM_2nd[:,:,0], aspect=1, x_label='Column Number', y_label='Row Number',
lut_label='Noise (ADU)', x_range=(0, pixels_y), y_range=(0, pixels_x),
title = 'Final Common Mode Corrected Noise\n (Bad Pixels Excluded)',
panel_x_label='Row Stat (ADU)', panel_y_label='Column Stat (ADU)')
```
%% Cell type:markdown id: tags:
### Final Bad Pixel Map
Lastly, the final bad pixel map is generated based on the OffsetMap and the noiseMapCM_2nd (offset and common mode corrected noise after exclusion of the bad pixels).
%% Cell type:code id: tags:
``` python
mnoffset = np.nanmedian(offsetMap)
stdoffset = np.nanstd(offsetMap)
bad_pixels[(offsetMap < mnoffset-bad_pixel_offset_sigma*stdoffset) |
(offsetMap > mnoffset+bad_pixel_offset_sigma*stdoffset)] = BadPixels.OFFSET_OUT_OF_THRESHOLD.value
mnnoise = np.nanmedian(noiseMapCM_2nd)
stdnoise = np.nanstd(noiseMapCM_2nd)
bad_pixels[(noiseMapCM_2nd < mnnoise-bad_pixel_noise_sigma*stdnoise) |
(noiseMapCM_2nd > mnnoise+bad_pixel_noise_sigma*stdnoise)] = BadPixels.NOISE_OUT_OF_THRESHOLD.value
bad_pixels = np.bitwise_or(bad_pixels, hole_mask)
fig = xana.heatmapPlot(np.log2(bad_pixels[:, :, 0]),
x_label='Columns', y_label='Rows',
lut_label='Bad Pixel Value (ADU)',
x_range=(0, pixels_y),
y_range=(0, pixels_x), vmin=0, vmax=32, panel_x_label='Row Stat (ADU)',
panel_y_label='Column Stat (ADU)', title = 'Final Bad Pixels Map')
```
%% Cell type:code id: tags:
``` python
display(Markdown('### Statistics on the Bad Pixels'))
num_bad_pixels = np.count_nonzero(bad_pixels)
num_all_pixels = pixels_x*pixels_y
percentage_bad_pixels = num_bad_pixels*100/num_all_pixels
print("Number of bad pixels: {:0.0f}, i.e. {:0.2f}% of all pixels".format(num_bad_pixels, percentage_bad_pixels))
```
%% Cell type:markdown id: tags:
### Calibration Constants
Here, we send the dark constants to the database and/or save them locally.
%% Cell type:code id: tags:
``` python
constant_maps = {
'Offset': offsetMap,
'Noise': noiseMapCM_2nd,
'BadPixelsDark': bad_pixels
}
for const_name in constant_maps.keys():
metadata = ConstantMetaData()
det = Constants.CCD(DetectorTypes.pnCCD)
const = getattr(det, const_name)()
const.data = constant_maps[const_name].data
metadata.calibration_constant = const
# set the operating condition
condition = Conditions.Dark.CCD(bias_voltage=bias_voltage,
integration_time=integration_time,
gain_setting=gain,
temperature=temperature_k,
pixels_x=pixels_x,
pixels_y=pixels_y)
for parm in condition.parameters:
if parm.name == "Sensor Temperature":
parm.lower_deviation = temp_limits
parm.upper_deviation = temp_limits
device = getattr(Detectors, db_module)
metadata.detector_condition = condition
# specify the a version for this constant
if creation_time is None:
metadata.calibration_constant_version = Versions.Now(device=device)
else:
metadata.calibration_constant_version = Versions.Timespan(device=device,
start=creation_time)
metadata.calibration_constant_version.raw_data_location = file_loc
if db_output:
try:
metadata.send(cal_db_interface)
print(f"Inject {const_name} constants from {metadata.calibration_constant_version.begin_at}")
except Exception as e:
print(e)
if local_output:
save_const_to_h5(metadata, out_folder)
print(f"Calibration constant {const.name} is stored to {out_folder}.\n")
print("Generated constants with conditions:\n")
print(f"• bias_voltage: {bias_voltage}\n• integration_time: {integration_time}\n"
f"• gain_setting: {gain}\n• temperature: {temperature_k}\n"
f"• creation_time: {creation_time}\n")
```
%% Cell type:code id: tags:
``` python
```
Loading