Skip to content
Snippets Groups Projects
Commit 3dc50e85 authored by Loïc Le Guyader's avatar Loïc Le Guyader
Browse files

adds basic documentation to the class functions

parent 649fa4e7
No related branches found
No related tags found
1 merge request!45DSSC multiprocessing analysis
import multiprocessing
from time import strftime
import tempfile
......@@ -19,6 +18,14 @@ import h5py
class DSSC:
def __init__(self, semester, proposal, topic='SCS'):
""" Create a DSSC object to process DSSC data.
inputs:
semester: semester string
proposal: proposal number string
topic: topic, by default SCS
"""
self.semester = semester
self.proposal = proposal
self.topic = topic
......@@ -40,6 +47,14 @@ class DSSC:
shutil.rmtree(self.tempdir)
def open_run(self, run_nr, isDark=False):
""" Open a run with karabo-data and prepare the virtual dataset for multiprocessing
inputs:
run_nr: the run number
isDark: a boolean to specify if the run is a dark run or not
"""
print('Opening run data with karabo-data')
self.run_nr = run_nr
self.xgm = None
......@@ -80,9 +95,12 @@ class DSSC:
def define_scan(self, vname, bins):
"""
vname: variable name for the scan, can be a mnemonic string from ToolBox
or a dictionnary with ['source', 'key'] fields
bins: step size or bins
Prepare the binning of the DSSC data.
inputs:
vname: variable name for the scan, can be a mnemonic string from ToolBox
or a dictionnary with ['source', 'key'] fields
bins: step size (or bins_edge but not yet implemented)
"""
if type(vname) is dict:
......@@ -130,18 +148,27 @@ class DSSC:
plt.tight_layout()
def load_xgm(self):
""" Loads pulse resolved dedicated SAS3 data from the SCS XGM.
"""
if self.xgm is None:
self.xgm = self.run.get_array(tb.mnemonics['SCS_SA3']['source'],
tb.mnemonics['SCS_SA3']['key'], roi=kd.by_index[:self.nbunches])
def plot_xgm_hist(self, nbins):
def plot_xgm_hist(self, nbins=100):
""" Plots an histogram of the SCS XGM dedicated SAS3 data.
inputs:
nbins: number of the bins for the histogram.
"""
if self.xgm is None:
self.load_xgm()
hist, bins_edges = np.histogram(self.xgm, nbins, density=True)
width = 1.0 * (bins_edges[1] - bins_edges[0])
bins_center = 0.5*(bins_edges[:-1] + bins_edges[1:])
plt.figure()
plt.figure(figsize=(5,3))
plt.bar(bins_center, hist, align='center', width=width)
plt.xlabel(f"{tb.mnemonics['SCS_SA3']['source']}{tb.mnemonics['SCS_SA3']['key']}")
plt.ylabel('density')
......@@ -149,6 +176,14 @@ class DSSC:
plt.tight_layout()
def xgm_filter(self, xgm_low=-np.inf, xgm_high=np.inf):
""" Filters the data by train. If one pulse within a train has an SASE3 SCS XGM value below
xgm_low or above xgm_high, that train will be dropped from the dataset.
inputs:
xgm_low: low threshold value
xgm_high: high threshold value
"""
if self.xgm is None:
self.load_xgm()
......@@ -168,6 +203,8 @@ class DSSC:
f'thresholds: [{self.xgm_low}, {self.xgm_high}]'))
def load_geom(self):
""" Loads and return the DSSC geometry.
"""
quad_pos = [
(-124.100, 3.112), # TR
(-133.068, -110.604), # BR
......@@ -179,6 +216,13 @@ class DSSC:
return geom
def create_virtual_dssc_datasets(self, run, path=''):
""" Create virtual datasets for each 16 DSSC modules used for the multiprocessing.
input:
run: karabo-data run
path: string where the virtual files are created
"""
vds_list = []
for m in tqdm(range(16)):
vds_filename = os.path.join(path, f'dssc{m}_vds.h5')
......@@ -190,7 +234,9 @@ class DSSC:
return vds_list
def crunch(self):
""" Crunch through the DSSC data using multiprocessing.
"""
if self.vds_scan is None:
# probably a dark run with a dummy scan variable
self.vds_scan = os.path.join(self.tempdir, 'scan_variable.h5')
......@@ -235,6 +281,12 @@ class DSSC:
self.module_data = self.module_data.squeeze()
def save(self, save_folder=None, overwrite=False):
""" Save the crunched data.
inputs:
save_folder: string of the fodler where to save the data.
overwrite: boolean whether or not to overwrite existing files.
"""
if save_folder is None:
save_folder = this.save_folder
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment