diff --git a/Load.py b/Load.py index 4b9cb1179110445b64aa1a4252ff98d3070e3025..fb0fb68618d2d4655b93cd9c03196a5bae8c0e8e 100644 --- a/Load.py +++ b/Load.py @@ -6,7 +6,7 @@ Copyright (2019) SCS Team. """ import numpy as np -from karabo_data import RunDirectory +from karabo_data import RunDirectory, by_index import xarray as xr mnemonics = { @@ -119,10 +119,37 @@ mnemonics = { 'key':'actual_current.value', 'dim':None}, - # FastCCD + # FastCCD, if in raw folder, raw images + # if in proc folder, dark substracted and relative gain corrected "fastccd": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput', 'key':'data.image.pixels', 'dim':['x', 'y']}, + # FastCCD with common mode correction + "fastccd_cm": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput', + 'key':'data.image.pixels_cm', + 'dim':['x', 'y']}, + # FastCCD charge split correction in very low photon count regime + "fastccd_classified": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput', + 'key':'data.image.pixels_classified', + 'dim':['x', 'y']}, + # FastCCD event multiplicity from the charge split correction: + # 0: no events + # 100, 101: single events + # 200-203: charge split into two pixels in four different orientations + # 300-303: charge split into three pixels in four different orientations + # 400-403: charge split into four pixels in four different orientations + # 1000: charge in more than four neighboring pixels. Cannot be produced by a single photon alone. + "fastccd_patterns": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput', + 'key':'data.image.patterns', + 'dim':['x', 'y']}, + # FastCCD gain map, 0 high gain, 1 medium gain, 2 low gain + "fastccd_gain": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput', + 'key':'data.image.gain', + 'dim':['x', 'y']}, + # FastCCD mask, bad pixel map to be ignored if > 0 + "fastccd_mask": {'source':'SCS_CDIDET_FCCD2M/DAQ/FCCD:daqOutput', + 'key':'data.image.mask', + 'dim':['x', 'y']}, # TIM "MCP1apd": {'source':'SCS_UTC1_ADQ/ADC/1:network', @@ -149,14 +176,16 @@ mnemonics = { "MCP4raw": {'source':'SCS_UTC1_ADQ/ADC/1:network', 'key':'digitizers.channel_1_D.raw.samples', 'dim': ['samplesId']}, - + # KARABACON "KARABACON": {'source':'SCS_DAQ_SCAN/MDL/KARABACON', 'key': 'actualStep.value', 'dim': None} } -def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False, validate=False): +def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False, + validate=False, runpath='/gpfs/exfel/exp/{}/{}/{}/raw/r{:04d}/', + subset=by_index[:], rois={}): """ Load a run and extract the data. Output is an xarray with aligned trainIds Inputs: @@ -169,13 +198,21 @@ def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False, vali topic: string of the topic display: boolean, whether to show the run.info or not validate: boolean, whether to run karabo-data-validate or not + runpath: a string to fromat the run folder path with topic, + semesterNB, proposalNB and runNB + subset: a subset of train that can be load with by_index[:5] for the + first 5 trains + rois: a dictionnary of mnemonics with a list of rois definition and the desired + names, for example {'fastccd':{'ref':{'roi':by_index[730:890, 535:720], + 'dim': ['ref_x', 'ref_y']}, 'sam':{'roi':by_index[1050:1210, 535:720], + 'dim': ['sam_x', 'sam_y']}}} Outputs: res: an xarray DataSet with aligned trainIds """ - runFolder = '/gpfs/exfel/exp/{}/{}/{}/raw/r{:04d}/'.format(topic, semesterNB, proposalNB, runNB) - run = RunDirectory(runFolder) + runFolder = runpath.format(topic, semesterNB, proposalNB, runNB) + run = RunDirectory(runFolder).select_trains(subset) if validate: get_ipython().system('karabo-data-validate ' + runFolder) @@ -214,10 +251,16 @@ def load(fields, runNB, proposalNB, semesterNB, topic='SCS', display=False, vali print('Source {} not found in run. Skipping!'.format(v['source'])) continue - vals.append(run.get_array(v['source'], v['key'], extra_dims=v['dim'])) - - keys.append(k) - + if k not in rois: + # no ROIs selection, we read everything + vals.append(run.get_array(v['source'], v['key'], extra_dims=v['dim'])) + keys.append(k) + else: + # ROIs selection, for each ROI we select a region of the data and save it with new name and dimensions + for nk,nv in rois[k].items(): + vals.append(run.get_array(v['source'], v['key'], extra_dims=nv['dim'], roi=nv['roi'])) + keys.append(nk) + aligned_vals = xr.align(*vals, join='inner') result = dict(zip(keys, aligned_vals)) result = xr.Dataset(result)