Skip to content
Snippets Groups Projects
Commit 72eb7a35 authored by Karim Ahmed's avatar Karim Ahmed
Browse files

Merge branch 'fix/get_creation_time_from_oldest_file' into 'master'

get creation time for a file and remove recursive prints

See merge request detectors/pycalibration!386
parents 7af03110 a0677ca3
No related branches found
No related tags found
1 merge request!386get creation time for a file and remove recursive prints
...@@ -264,13 +264,15 @@ def get_dir_creation_date(directory: str, run: int, ...@@ -264,13 +264,15 @@ def get_dir_creation_date(directory: str, run: int,
ntries = 100 ntries = 100
while ntries > 0: while ntries > 0:
try: try:
dates = [] rfiles = list(directory.glob('*.h5'))
for f in directory.glob('*.h5'): rfiles.sort(key=path.getmtime)
with h5py.File(f, 'r') as fin: # get creation time for oldest file,
cdate = fin['METADATA/creationDate'][0].decode() # as creation time between run files
cdate = datetime.datetime.strptime(cdate, "%Y%m%dT%H%M%SZ") # should be different only within few seconds
dates.append(cdate) with h5py.File(rfiles[0], 'r') as fin:
return min(dates) cdate = fin['METADATA/creationDate'][0].decode()
cdate = datetime.datetime.strptime(cdate, "%Y%m%dT%H%M%SZ")
return cdate
except (IOError, ValueError): except (IOError, ValueError):
ntries -= 1 ntries -= 1
except KeyError: # The files are here, but it's an older dataset except KeyError: # The files are here, but it's an older dataset
......
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# AGIPD Retrieving Constants Pre-correction # # AGIPD Retrieving Constants Pre-correction #
Author: European XFEL Detector Group, Version: 1.0 Author: European XFEL Detector Group, Version: 1.0
Retrieving Required Constants for Offline Calibration of the AGIPD Detector Retrieving Required Constants for Offline Calibration of the AGIPD Detector
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
cluster_profile = "noDB" cluster_profile = "noDB"
in_folder = "/gpfs/exfel/exp/SPB/202030/p900119/raw" # the folder to read data from, required in_folder = "/gpfs/exfel/exp/SPB/202030/p900119/raw" # the folder to read data from, required
out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_" # the folder to output to, required out_folder = "/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_" # the folder to output to, required
sequences = [-1] # sequences to correct, set to -1 for all, range allowed sequences = [-1] # sequences to correct, set to -1 for all, range allowed
modules = [-1] # modules to correct, set to -1 for all, range allowed modules = [-1] # modules to correct, set to -1 for all, range allowed
run = 80 # runs to process, required run = 80 # runs to process, required
karabo_id = "SPB_DET_AGIPD1M-1" # karabo karabo_id karabo_id = "SPB_DET_AGIPD1M-1" # karabo karabo_id
karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators karabo_da = ['-1'] # a list of data aggregators names, Default [-1] for selecting all data aggregators
path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data path_template = 'RAW-R{:04d}-{}-S{:05d}.h5' # the template to use to access data
h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP_TEST' # path to control information h5path_ctrl = '/CONTROL/{}/MDL/FPGA_COMP_TEST' # path to control information
karabo_id_control = "SPB_IRU_AGIPD1M1" # karabo-id for control device karabo_id_control = "SPB_IRU_AGIPD1M1" # karabo-id for control device
karabo_da_control = 'AGIPD1MCTRL00' # karabo DA for control infromation karabo_da_control = 'AGIPD1MCTRL00' # karabo DA for control infromation
use_dir_creation_date = True # use the creation data of the input dir for database queries use_dir_creation_date = True # use the creation data of the input dir for database queries
cal_db_interface = "tcp://max-exfl016:8015#8045" # the database interface to use cal_db_interface = "tcp://max-exfl016:8015#8045" # the database interface to use
creation_date_offset = "00:00:00" # add an offset to creation date, e.g. to get different constants creation_date_offset = "00:00:00" # add an offset to creation date, e.g. to get different constants
calfile = "" # path to calibration file. Leave empty if all data should come from DB calfile = "" # path to calibration file. Leave empty if all data should come from DB
nodb = False # if set only file-based constants will be used nodb = False # if set only file-based constants will be used
mem_cells = 0 # number of memory cells used, set to 0 to automatically infer mem_cells = 0 # number of memory cells used, set to 0 to automatically infer
bias_voltage = 300 bias_voltage = 300
acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine
gain_setting = 0.1 # the gain setting, use 0.1 to try to auto-determine gain_setting = 0.1 # the gain setting, use 0.1 to try to auto-determine
photon_energy = 9.2 # photon energy in keV photon_energy = 9.2 # photon energy in keV
max_cells_db_dark = 0 # set to a value different than 0 to use this value for dark data DB queries max_cells_db_dark = 0 # set to a value different than 0 to use this value for dark data DB queries
max_cells_db = 0 # set to a value different than 0 to use this value for DB queries max_cells_db = 0 # set to a value different than 0 to use this value for DB queries
# Correction Booleans # Correction Booleans
only_offset = False # Apply only Offset correction. if False, Offset is applied by Default. if True, Offset is only applied. only_offset = False # Apply only Offset correction. if False, Offset is applied by Default. if True, Offset is only applied.
rel_gain = False # do relative gain correction based on PC data rel_gain = False # do relative gain correction based on PC data
xray_gain = True # do relative gain correction based on xray data xray_gain = True # do relative gain correction based on xray data
blc_noise = False # if set, baseline correction via noise peak location is attempted blc_noise = False # if set, baseline correction via noise peak location is attempted
blc_stripes = False # if set, baseline corrected via stripes blc_stripes = False # if set, baseline corrected via stripes
blc_hmatch = False # if set, base line correction via histogram matching is attempted blc_hmatch = False # if set, base line correction via histogram matching is attempted
match_asics = False # if set, inner ASIC borders are matched to the same signal level match_asics = False # if set, inner ASIC borders are matched to the same signal level
adjust_mg_baseline = False # adjust medium gain baseline to match highest high gain value adjust_mg_baseline = False # adjust medium gain baseline to match highest high gain value
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Fill dictionaries comprising bools and arguments for correction and data analysis # Fill dictionaries comprising bools and arguments for correction and data analysis
# Here the herarichy and dependability for correction booleans are defined # Here the herarichy and dependability for correction booleans are defined
corr_bools = {} corr_bools = {}
# offset is at the bottom of AGIPD correction pyramid. # offset is at the bottom of AGIPD correction pyramid.
corr_bools["only_offset"] = only_offset corr_bools["only_offset"] = only_offset
# Dont apply any corrections if only_offset is requested # Dont apply any corrections if only_offset is requested
if not only_offset: if not only_offset:
corr_bools["adjust_mg_baseline"] = adjust_mg_baseline corr_bools["adjust_mg_baseline"] = adjust_mg_baseline
corr_bools["rel_gain"] = rel_gain corr_bools["rel_gain"] = rel_gain
corr_bools["xray_corr"] = xray_gain corr_bools["xray_corr"] = xray_gain
corr_bools["blc_noise"] = blc_noise corr_bools["blc_noise"] = blc_noise
corr_bools["blc_hmatch"] = blc_hmatch corr_bools["blc_hmatch"] = blc_hmatch
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import sys import sys
from collections import OrderedDict from collections import OrderedDict
# make sure a cluster is running with ipcluster start --n=32, give it a while to start # make sure a cluster is running with ipcluster start --n=32, give it a while to start
import os import os
import h5py import h5py
import numpy as np import numpy as np
import matplotlib import matplotlib
matplotlib.use("agg") matplotlib.use("agg")
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from ipyparallel import Client from ipyparallel import Client
print(f"Connecting to profile {cluster_profile}") print(f"Connecting to profile {cluster_profile}")
view = Client(profile=cluster_profile)[:] view = Client(profile=cluster_profile)[:]
view.use_dill() view.use_dill()
from iCalibrationDB import Constants, Conditions, Detectors from iCalibrationDB import Constants, Conditions, Detectors
from cal_tools.tools import (map_modules_from_folder, get_dir_creation_date) from cal_tools.tools import (map_modules_from_folder, get_dir_creation_date)
from cal_tools.agipdlib import get_gain_setting from cal_tools.agipdlib import get_gain_setting
from dateutil import parser from dateutil import parser
from datetime import timedelta from datetime import timedelta
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
max_cells = mem_cells max_cells = mem_cells
creation_time = None creation_time = None
if use_dir_creation_date: if use_dir_creation_date:
creation_time = get_dir_creation_date(in_folder, run) creation_time = get_dir_creation_date(in_folder, run)
offset = parser.parse(creation_date_offset) offset = parser.parse(creation_date_offset)
delta = timedelta(hours=offset.hour, minutes=offset.minute, seconds=offset.second) delta = timedelta(hours=offset.hour, minutes=offset.minute, seconds=offset.second)
creation_time += delta creation_time += delta
print(f"Using {creation_time} as creation time") print(f"Using {creation_time} as creation time")
if sequences[0] == -1: if sequences[0] == -1:
sequences = None sequences = None
if in_folder[-1] == "/": if in_folder[-1] == "/":
in_folder = in_folder[:-1] in_folder = in_folder[:-1]
print(f"Outputting to {out_folder}") print(f"Outputting to {out_folder}")
os.makedirs(out_folder, exist_ok=True) os.makedirs(out_folder, exist_ok=True)
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
from cal_tools.agipdlib import SnowResolution from cal_tools.agipdlib import SnowResolution
melt_snow = False if corr_bools["only_offset"] else SnowResolution.NONE melt_snow = False if corr_bools["only_offset"] else SnowResolution.NONE
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
control_fname = f'{in_folder}/r{run:04d}/RAW-R{run:04d}-{karabo_da_control}-S00000.h5' control_fname = f'{in_folder}/r{run:04d}/RAW-R{run:04d}-{karabo_da_control}-S00000.h5'
h5path_ctrl = h5path_ctrl.format(karabo_id_control) h5path_ctrl = h5path_ctrl.format(karabo_id_control)
if gain_setting == 0.1: if gain_setting == 0.1:
if creation_time.replace(tzinfo=None) < parser.parse('2020-01-31'): if creation_time.replace(tzinfo=None) < parser.parse('2020-01-31'):
print("Set gain-setting to None for runs taken before 2020-01-31") print("Set gain-setting to None for runs taken before 2020-01-31")
gain_setting = None gain_setting = None
else: else:
try: try:
gain_setting = get_gain_setting(control_fname, h5path_ctrl) gain_setting = get_gain_setting(control_fname, h5path_ctrl)
except Exception as e: except Exception as e:
print(f'ERROR: while reading gain setting from: \n{control_fname}') print(f'ERROR: while reading gain setting from: \n{control_fname}')
print(e) print(e)
print("Set gain setting to 0") print("Set gain setting to 0")
gain_setting = 0 gain_setting = 0
print(f"Gain setting: {gain_setting}") print(f"Gain setting: {gain_setting}")
print(f"Detector in use is {karabo_id}") print(f"Detector in use is {karabo_id}")
# Extracting Instrument string # Extracting Instrument string
instrument = karabo_id.split("_")[0] instrument = karabo_id.split("_")[0]
# Evaluate detector instance for mapping # Evaluate detector instance for mapping
if instrument == "SPB": if instrument == "SPB":
dinstance = "AGIPD1M1" dinstance = "AGIPD1M1"
nmods = 16 nmods = 16
elif instrument == "MID": elif instrument == "MID":
dinstance = "AGIPD1M2" dinstance = "AGIPD1M2"
nmods = 16 nmods = 16
# TODO: Remove DETLAB # TODO: Remove DETLAB
elif instrument == "HED" or instrument == "DETLAB": elif instrument == "HED" or instrument == "DETLAB":
dinstance = "AGIPD500K" dinstance = "AGIPD500K"
nmods = 8 nmods = 8
print(f"Instrument {instrument}") print(f"Instrument {instrument}")
print(f"Detector instance {dinstance}") print(f"Detector instance {dinstance}")
if karabo_da[0] == '-1': if karabo_da[0] == '-1':
if modules[0] == -1: if modules[0] == -1:
modules = list(range(nmods)) modules = list(range(nmods))
karabo_da = ["AGIPD{:02d}".format(i) for i in modules] karabo_da = ["AGIPD{:02d}".format(i) for i in modules]
else: else:
modules = [int(x[-2:]) for x in karabo_da] modules = [int(x[-2:]) for x in karabo_da]
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# set everything up filewise # set everything up filewise
print(f"Checking the files before retrieving constants") print(f"Checking the files before retrieving constants")
mmf = map_modules_from_folder(in_folder, run, path_template, karabo_da, sequences) mmf = map_modules_from_folder(in_folder, run, path_template, karabo_da, sequences)
mapped_files, mod_ids, total_sequences, sequences_qm, _ = mmf mapped_files, mod_ids, total_sequences, sequences_qm, _ = mmf
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Retrieve Constants ## ## Retrieve Constants ##
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from functools import partial from functools import partial
import yaml import yaml
def retrieve_constants(karabo_id, bias_voltage, max_cells, acq_rate, def retrieve_constants(karabo_id, bias_voltage, max_cells, acq_rate,
gain_setting, photon_energy, only_dark, nodb_with_dark, gain_setting, photon_energy, only_dark, nodb_with_dark,
cal_db_interface, creation_time, cal_db_interface, creation_time,
corr_bools, pc_bools, inp): corr_bools, pc_bools, inp):
""" """
Retreive constant for each module in parallel and produce a dictionary Retreive constant for each module in parallel and produce a dictionary
with the creation-time and constant file path. with the creation-time and constant file path.
:param karabo_id: (STR) Karabo ID :param karabo_id: (STR) Karabo ID
:param bias_voltage: (FLOAT) Bias Voltage :param bias_voltage: (FLOAT) Bias Voltage
:param max_cells: (INT) Memory cells :param max_cells: (INT) Memory cells
:param acq_rate: (FLOAT) Acquisition Rate :param acq_rate: (FLOAT) Acquisition Rate
:param gain_setting: (FLOAT) Gain setting :param gain_setting: (FLOAT) Gain setting
:param photon_energy: (FLOAT) Photon Energy :param photon_energy: (FLOAT) Photon Energy
:param only_dark: (BOOL) only retrieve dark constants :param only_dark: (BOOL) only retrieve dark constants
:param nodb_with_dark: (BOOL) no constant retrieval even for dark :param nodb_with_dark: (BOOL) no constant retrieval even for dark
:param cal_db_interface: (STR) the database interface port :param cal_db_interface: (STR) the database interface port
:param creation_time: (STR) raw data creation time :param creation_time: (STR) raw data creation time
:param corr_bools: (DICT) A dictionary with bools for applying requested corrections :param corr_bools: (DICT) A dictionary with bools for applying requested corrections
:param pc_bools: (LIST) list of bools to retrieve pulse capacitor constants :param pc_bools: (LIST) list of bools to retrieve pulse capacitor constants
:param inp: (LIST) input for the parallel cluster of the partial function :param inp: (LIST) input for the parallel cluster of the partial function
:return: :return:
mdata_dict: (DICT) dictionary with the metadata for the retrieved constants mdata_dict: (DICT) dictionary with the metadata for the retrieved constants
dev.device_name: (STR) device name dev.device_name: (STR) device name
""" """
import numpy as np import numpy as np
import sys import sys
import traceback import traceback
from cal_tools.agipdlib import get_num_cells, get_acq_rate from cal_tools.agipdlib import get_num_cells, get_acq_rate
from cal_tools.agipdutils import assemble_constant_dict from cal_tools.agipdutils import assemble_constant_dict
from cal_tools.tools import get_from_db from cal_tools.tools import get_from_db
from iCalibrationDB import Constants, Conditions, Detectors from iCalibrationDB import Constants, Conditions, Detectors
err = None err = None
qm_files, qm, dev, idx = inp qm_files, qm, dev, idx = inp
# get number of memory cells from a sequence file with image data # get number of memory cells from a sequence file with image data
for f in qm_files: for f in qm_files:
if not max_cells: if not max_cells:
max_cells = get_num_cells(f, karabo_id, idx) max_cells = get_num_cells(f, karabo_id, idx)
if max_cells is None: if max_cells is None:
if f != qm_files[-1]: if f != qm_files[-1]:
continue continue
else: else:
raise ValueError(f"No raw images found for {qm} for all sequences") raise ValueError(f"No raw images found for {qm} for all sequences")
else: else:
cells = np.arange(max_cells) cells = np.arange(max_cells)
# get out of the loop, # get out of the loop,
# if max_cells is successfully calculated. # if max_cells is successfully calculated.
break break
if acq_rate == 0.: if acq_rate == 0.:
acq_rate = get_acq_rate((f, karabo_id, idx)) acq_rate = get_acq_rate((f, karabo_id, idx))
print(f"Set memory cells to {max_cells}")
print(f"Set acquistion rate cells to {acq_rate} MHz")
# avoid creating retireving constant, if requested. # avoid creating retireving constant, if requested.
if not nodb_with_dark: if not nodb_with_dark:
const_dict = assemble_constant_dict(corr_bools, pc_bools, max_cells, bias_voltage, const_dict = assemble_constant_dict(corr_bools, pc_bools, max_cells, bias_voltage,
gain_setting, acq_rate, photon_energy, gain_setting, acq_rate, photon_energy,
beam_energy=None, only_dark=only_dark) beam_energy=None, only_dark=only_dark)
# Retrieve multiple constants through an input dictionary # Retrieve multiple constants through an input dictionary
# to return a dict of useful metadata. # to return a dict of useful metadata.
mdata_dict = dict() mdata_dict = dict()
for cname, cval in const_dict.items(): for cname, cval in const_dict.items():
try: try:
condition = getattr(Conditions, cval[2][0]).AGIPD(**cval[2][1]) condition = getattr(Conditions, cval[2][0]).AGIPD(**cval[2][1])
co, mdata = \ co, mdata = \
get_from_db(dev, getattr(Constants.AGIPD, cname)(), get_from_db(dev, getattr(Constants.AGIPD, cname)(),
condition, getattr(np, cval[0])(cval[1]), condition, getattr(np, cval[0])(cval[1]),
cal_db_interface, creation_time, meta_only=True) cal_db_interface, creation_time, meta_only=True, verbosity=0)
mdata_const = mdata.calibration_constant_version mdata_const = mdata.calibration_constant_version
# saving metadata in a dict # saving metadata in a dict
mdata_dict[cname] = dict() mdata_dict[cname] = dict()
# check if constant was sucessfully retrieved. # check if constant was sucessfully retrieved.
if mdata.comm_db_success: if mdata.comm_db_success:
mdata_dict[cname]["file-path"] = f"{mdata_const.hdf5path}" \ mdata_dict[cname]["file-path"] = f"{mdata_const.hdf5path}" \
f"{mdata_const.filename}" f"{mdata_const.filename}"
mdata_dict[cname]["creation-time"] = f"{mdata_const.begin_at}" mdata_dict[cname]["creation-time"] = f"{mdata_const.begin_at}"
else: else:
mdata_dict[cname]["file-path"] = const_dict[cname][:2] mdata_dict[cname]["file-path"] = const_dict[cname][:2]
mdata_dict[cname]["creation-time"] = None mdata_dict[cname]["creation-time"] = None
except Exception as e: except Exception as e:
err = f"Error: {e}, Traceback: {traceback.format_exc()}" err = f"Error: {e}, Traceback: {traceback.format_exc()}"
print(err) print(err)
return qm, mdata_dict, dev.device_name, acq_rate, max_cells, err return qm, mdata_dict, dev.device_name, acq_rate, max_cells, err
pc_bools = [corr_bools.get("rel_gain"), pc_bools = [corr_bools.get("rel_gain"),
corr_bools.get("adjust_mg_baseline"), corr_bools.get("adjust_mg_baseline"),
corr_bools.get('blc_noise'), corr_bools.get('blc_noise'),
corr_bools.get('blc_hmatch'), corr_bools.get('blc_hmatch'),
corr_bools.get('blc_stripes'), corr_bools.get('blc_stripes'),
melt_snow] melt_snow]
inp = [] inp = []
only_dark = False only_dark = False
nodb_with_dark = False nodb_with_dark = False
if not nodb: if not nodb:
only_dark=(calfile != "") only_dark=(calfile != "")
if calfile != "" and not corr_bools["only_offset"]: if calfile != "" and not corr_bools["only_offset"]:
nodb_with_dark = nodb nodb_with_dark = nodb
# A dict to connect virtual device # A dict to connect virtual device
# to actual device name. # to actual device name.
for i in modules: for i in modules:
qm = f"Q{i//4+1}M{i%4+1}" qm = f"Q{i//4+1}M{i%4+1}"
if qm in mapped_files and not mapped_files[qm].empty(): if qm in mapped_files and not mapped_files[qm].empty():
device = getattr(getattr(Detectors, dinstance), qm) device = getattr(getattr(Detectors, dinstance), qm)
qm_files = [str(mapped_files[qm].get()) for _ in range(mapped_files[qm].qsize())] qm_files = [str(mapped_files[qm].get()) for _ in range(mapped_files[qm].qsize())]
else: else:
print(f"Skipping {qm}") print(f"Skipping {qm}")
continue continue
inp.append((qm_files, qm, device, i)) inp.append((qm_files, qm, device, i))
p = partial(retrieve_constants, karabo_id, bias_voltage, max_cells, p = partial(retrieve_constants, karabo_id, bias_voltage, max_cells,
acq_rate, gain_setting, photon_energy, only_dark, nodb_with_dark, acq_rate, gain_setting, photon_energy, only_dark, nodb_with_dark,
cal_db_interface, creation_time, cal_db_interface, creation_time,
corr_bools, pc_bools) corr_bools, pc_bools)
results = view.map_sync(p, inp) results = view.map_sync(p, inp)
#results = list(map(p, inp)) #results = list(map(p, inp))
mod_dev = dict() mod_dev = dict()
mdata_dict = dict() mdata_dict = dict()
for r in results: for r in results:
if r: if r:
qm, md_dict, dname, acq_rate, max_cells, err = r qm, md_dict, dname, acq_rate, max_cells, err = r
mod_dev[dname] = {"mod": qm, "err": err} mod_dev[dname] = {"mod": qm, "err": err}
if err: if err:
print(f"Error for module {qm}: {err}") print(f"Error for module {qm}: {err}")
mdata_dict[dname] = md_dict mdata_dict[dname] = md_dict
# check if it is requested not to retrieve any constants from the database # check if it is requested not to retrieve any constants from the database
if not nodb_with_dark: if not nodb_with_dark:
with open(f"{out_folder}/retrieved_constants.yml", "w") as outfile: with open(f"{out_folder}/retrieved_constants.yml", "w") as outfile:
yaml.safe_dump(mdata_dict, outfile) yaml.safe_dump(mdata_dict, outfile)
print("\nRetrieved constants for modules: ", print("\nRetrieved constants for modules: ",
f"{[', '.join([f'Q{x//4+1}M{x%4+1}' for x in modules])]}") f"{[', '.join([f'Q{x//4+1}M{x%4+1}' for x in modules])]}")
print(f"Operating conditions are:\n• Bias voltage: {bias_voltage}\n• Memory cells: {max_cells}\n" print(f"Operating conditions are:\n• Bias voltage: {bias_voltage}\n• Memory cells: {max_cells}\n"
f"• Acquisition rate: {acq_rate}\n• Gain setting: {gain_setting}\n• Photon Energy: {photon_energy}\n") f"• Acquisition rate: {acq_rate}\n• Gain setting: {gain_setting}\n• Photon Energy: {photon_energy}\n")
print(f"Constant metadata is saved in retrieved_constants.yml\n") print(f"Constant metadata is saved in retrieved_constants.yml\n")
else: else:
print("No constants were retrieved as calibrated files will be used.") print("No constants were retrieved as calibrated files will be used.")
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
print("Constants are retrieved with creation time: ") print("Constants are retrieved with creation time: ")
i = 0 i = 0
when = dict() when = dict()
to_store = [] to_store = []
for dname, dinfo in mod_dev.items(): for dname, dinfo in mod_dev.items():
print(dinfo["mod"], ":") print(dinfo["mod"], ":")
line = [dinfo["mod"]] line = [dinfo["mod"]]
if dname in mdata_dict: if dname in mdata_dict:
for cname, mdata in mdata_dict[dname].items(): for cname, mdata in mdata_dict[dname].items():
if hasattr(mdata["creation-time"], 'strftime'): if hasattr(mdata["creation-time"], 'strftime'):
mdata["creation-time"] = mdata["creation-time"].strftime('%y-%m-%d %H:%M') mdata["creation-time"] = mdata["creation-time"].strftime('%y-%m-%d %H:%M')
print(f'{cname:.<12s}', mdata["creation-time"]) print(f'{cname:.<12s}', mdata["creation-time"])
# Store few time stamps if exists # Store few time stamps if exists
# Add NA to keep array structure # Add NA to keep array structure
for cname in ['Offset', 'SlopesPC', 'SlopesFF']: for cname in ['Offset', 'SlopesPC', 'SlopesFF']:
if not dname in mdata_dict or dinfo["err"]: if not dname in mdata_dict or dinfo["err"]:
line.append('Err') line.append('Err')
else: else:
if cname in mdata_dict[dname]: if cname in mdata_dict[dname]:
if mdata_dict[dname][cname]["creation-time"]: if mdata_dict[dname][cname]["creation-time"]:
line.append(mdata_dict[dname][cname]["creation-time"]) line.append(mdata_dict[dname][cname]["creation-time"])
else: else:
line.append('NA') line.append('NA')
else: else:
line.append('NA') line.append('NA')
to_store.append(line) to_store.append(line)
i += 1 i += 1
if sequences: if sequences:
seq_num = sequences[0] seq_num = sequences[0]
else: else:
# if sequences[0] changed to None as it was -1 # if sequences[0] changed to None as it was -1
seq_num = 0 seq_num = 0
with open(f"{out_folder}/retrieved_constants.yml","r") as fyml: with open(f"{out_folder}/retrieved_constants.yml","r") as fyml:
time_summary = yaml.safe_load(fyml) time_summary = yaml.safe_load(fyml)
time_summary.update({"time-summary": { time_summary.update({"time-summary": {
"SAll":to_store "SAll":to_store
}}) }})
with open(f"{out_folder}/retrieved_constants.yml","w") as fyml: with open(f"{out_folder}/retrieved_constants.yml","w") as fyml:
yaml.safe_dump(time_summary, fyml) yaml.safe_dump(time_summary, fyml)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment