diff --git a/Load.py b/Load.py
index 094c280b91a836c6c200e753ac57fb7b761fc624..a618b94d729626fc545af340f7000cc50181712d 100644
--- a/Load.py
+++ b/Load.py
@@ -10,6 +10,7 @@ from karabo_data import by_index, RunDirectory
 from karabo_data.read_machinery import find_proposal
 import xarray as xr
 import os
+import euxfel_bunch_pattern as bp
 
 mnemonics = {
     # Machine
@@ -28,6 +29,9 @@ mnemonics = {
     "bunchpattern": {'source':'SCS_RR_UTC/TSYS/TIMESERVER',
                      'key':'readBunchPatternTable.value',
                      'dim':None},
+    "bunchPatternTable": {'source':'SCS_RR_UTC/TSYS/TIMESERVER',
+                     'key':'bunchPatternTable.value',
+                     'dim':['pulse_slot']},
     "npulses_sase3": {'source':'SCS_RR_UTC/MDL/BUNCH_DECODER',
                       'key':'sase3.nPulses.value',
                       'dim':None},
@@ -209,6 +213,10 @@ mnemonics = {
     "PP800_TeleLens": {'source':'SCS_ILH_LAS/MOTOR/LT7',
                  'key':'actualPosition.value',
                  'dim':None},
+    "ILH_8CAM1": {'source':'SCS_ILH_LAS/CAM/8CAM1:daqOutput',
+                'key':'data.image.pixels',
+                'dim':['8cam1_y', '8cam1_x']},
+
     
     # FFT
     "scannerX": {'source':'SCS_CDIFFT_SAM/LMOTOR/SCANNERX',
@@ -229,6 +237,10 @@ mnemonics = {
     "magnet_old": {'source':'SCS_CDIFFT_MAG/SUPPLY/CURRENT',
                'key':'actualCurrent.value',
                'dim':None},
+    
+    "Vertical_FDM": {'source':'SCS_CDIFFT_LDM/CAM/CAMERA1A:daqOutput',
+                'key':'data.image.pixels',
+                'dim':['vfdm_y', 'vfdm_x']},
 
     # FastCCD, if in raw folder, raw images
     #          if in proc folder, dark substracted and relative gain corrected
@@ -364,8 +376,51 @@ mnemonics = {
                     'dim': ['gott_pId','pixelId']}
 }
 
+def extractSaseBunchPattern(runDir, sase=3):
+    ''' generate the "saseX" and "npulse_saseX" arrays directly from the bunch pattern table and not using the 
+        MDL device BUNCH_DECODER.
+        Inputs:
+            runDir: run directory obtained by karabo_data.runDirectory()
+            sase: int, sase number between 1 and 3
+            
+        Outputs:
+            sase: DataArray containing indices of the sase pulses for each train
+            npulses_sase: DataArray containing the number of pulses for each train
+                  
+    '''
+    if not (1 <= sase <= 3):
+        raise ValueError("Invalid SASE value {!r}, expected 1-3")
+    bp_mnemo = mnemonics['bunchPatternTable']
+    bp_table = runDir.get_array(bp_mnemo['source'],bp_mnemo['key'], 
+                                extra_dims=bp_mnemo['dim'])
+    destination = bp.DESTINATION_T5D if (sase == 2) else bp.DESTINATION_T4D
+    matched = (bp_table & bp.DESTINATION_MASK) == destination
+
+    if sase == 1:
+        # Pulses to SASE 1 when soft kick is off
+        matched &= (bp_table & bp.PHOTON_LINE_DEFLECTION) == 0
+    elif sase == 3:
+        # Pulses to SASE 3 when soft kick is on
+        matched &= (bp_table & bp.PHOTON_LINE_DEFLECTION) != 0
+
+    nz = np.nonzero(matched.values)
+    dim_pId = matched.shape[1]
+    sase_array = np.ones(matched.shape, dtype=np.uint64)*dim_pId
+    sase_array[nz] = nz[1]
+    sase_array = np.sort(sase_array)
+    sase_array[sase_array == dim_pId] = 0
+
+    sase_da = xr.DataArray(sase_array[:,:1000], dims=['trainId', 'bunchId'],
+                          coords={'trainId':matched.trainId}, 
+                          name=f'sase{sase}')
+    npulses_sase = xr.DataArray(np.count_nonzero(sase_da, axis=1), dims=['trainId'],
+                                coords={'trainId':matched.trainId}, 
+                                name=f'npulses_sase{sase}')
+    return sase_da, npulses_sase
+
+
 def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=False,
-         subset=by_index[:], rois={}):
+         subset=by_index[:], rois={}, useBPTable=True):
     """ Load a run and extract the data. Output is an xarray with aligned trainIds
 
         Inputs:
@@ -384,6 +439,9 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal
                 names, for example {'fastccd':{'ref':{'roi':by_index[730:890, 535:720],
                 'dim': ['ref_x', 'ref_y']}, 'sam':{'roi':by_index[1050:1210, 535:720],
                 'dim': ['sam_x', 'sam_y']}}}
+            useBPTable: If True, uses the raw bunch pattern table to extract sase pulse
+                number and indices in the trains. If false, load the data from BUNCH_DECODER
+                middle layer device.
 
         Outputs:
             res: an xarray DataSet with aligned trainIds
@@ -405,8 +463,18 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal
     keys = []
     vals = []
 
-    # always load pulse pattern infos
-    fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"]
+    # load pulse pattern infos
+    if useBPTable:
+        if mnemonics['bunchPatternTable']['source'] not in run.all_sources:
+            print('Source {} not found in run. Skipping!'.format(
+                                mnemonics['bunchPatternTable']['source']))
+        else:
+            sase1, npulses_sase1 = extractSaseBunchPattern(run, 1)
+            sase3, npulses_sase3 = extractSaseBunchPattern(run, 3)
+            keys += ["sase1", "npulses_sase1", "sase3", "npulses_sase3"]
+            vals += [sase1, npulses_sase1, sase3, npulses_sase3]
+    else:
+        fields += ["sase1", "sase3", "npulses_sase3", "npulses_sase1"]
 
     for f in fields:
 
@@ -444,7 +512,7 @@ def load(fields, runNB, proposalNB, subFolder='raw', display=False, validate=Fal
             for nk,nv in rois[k].items():
                 vals.append(run.get_array(v['source'], v['key'], extra_dims=nv['dim'], roi=nv['roi']))
                 keys.append(nk)
- 
+    
     aligned_vals = xr.align(*vals, join='inner')
     result = dict(zip(keys, aligned_vals))
     result = xr.Dataset(result)
@@ -474,3 +542,4 @@ def concatenateRuns(runs):
     result.attrs['run'] = [run.attrs['run'] for run in orderedRuns]
     result.attrs['runFolder'] = [run.attrs['runFolder'] for run in orderedRuns]
     return result
+