diff --git a/src/toolbox_scs/detectors/xgm.py b/src/toolbox_scs/detectors/xgm.py
index 6882fe30f49819babc56adfdfd1f717af6a764fc..380acb5fd11ac1edca85e4fe54b99f99f6932284 100644
--- a/src/toolbox_scs/detectors/xgm.py
+++ b/src/toolbox_scs/detectors/xgm.py
@@ -15,6 +15,7 @@ import matplotlib.pyplot as plt
 from ..misc.bunch_pattern_external import is_sase_1, is_sase_3
 from ..mnemonics_machinery import (mnemonics_to_process,
                                    mnemonics_for_run)
+from toolbox_scs.load import (get_array, load_bpt, get_sase_pId)
 
 __all__ = [
     'calibrate_xgm',
@@ -24,8 +25,153 @@ __all__ = [
 
 log = logging.getLogger(__name__)
 
+def get_xgm(run, mnemonics=None, merge_with=None,
+            indices=slice(0, None)):
+    """
+    Load and/or computes XGM data. Sources can be loaded on the
+    fly via the mnemonics argument, or processed from an existing dataset
+    (merge_with). The bunch pattern table is used to assign the pulse
+    id coordinates if the number of pulses has changed during the run.
+
+    Parameters
+    ----------
+    run: extra_data.DataCollection
+        DataCollection containing the xgm data.
+    mnemonics: str or list of str
+        mnemonics for XGM, e.g. "SCS_SA3" or ["XTD10_XGM", "SCS_XGM"].
+        If None, defaults to "SCS_SA3" in case no merge_with dataset
+        is provided.
+    merge_with: xarray Dataset
+        If provided, the resulting Dataset will be merged with this
+        one. The XGM variables of merge_with (if any) will also be
+        computed and merged.
+    indices: slice, list, 1D array
+        Pulse indices of the XGM array in case bunch pattern is missing.
+    Returns
+    -------
+    xarray Dataset with pulse-resolved XGM variables aligned,
+     merged with Dataset *merge_with* if provided.
+
+    Example
+    -------
+    >>> import toolbox_scs as tb
+    >>> run, ds = tb.load(2212, 213, 'SCS_SA3')
+    >>> 
+
+    """
+    xgm_mnemos = ['XTD10_SA', 'XTD10_XGM', 'SCS_SA', 'SCS_XGM']
+    m2 = []
+    if mnemonics is not None:
+        mnemonics = [mnemonics] if isinstance(mnemonics, str) else mnemonics
+        for m in mnemonics:
+            if any([(k in m) for k in xgm_mnemos]):
+                m2.append(m)
+    if merge_with is not None:
+        in_mw = []
+        for m, da in merge_with.items():
+            if any([(k in m) for k in xgm_mnemos]) and 'XGMbunchId' in da.dims:
+                in_mw.append(m)
+        m2 += in_mw
+    if len(m2) == 0:
+        log.info('no XGM mnemonics to process. Skipping.')
+        return merge_with
+    mnemonics = list(set(m2))    
+    # Prepare the dataset of non-XGM data to merge with
+    if bool(merge_with):
+        ds_mw = merge_with.drop(mnemonics, errors='ignore')
+    else:
+        ds_mw = xr.Dataset()
+
+    sase1 = sase3 = None
+    sase1_changed = sase3_changed = False
+    ds = xr.Dataset()
+    for m in mnemonics:
+        if merge_with is not None and m in merge_with:
+            da_xgm = merge_with[m]
+        else:
+            da_xgm = get_array(run, m)
+        if sase1 is None and ('XGM' in m or 'SA1' in m):
+            sase1, sase1_changed = get_sase_pId(run, 'sase1')
+        if sase3 is None and ('XGM' in m or 'SA3' in m):
+            sase3, sase3_changed = get_sase_pId(run, 'sase3')
+        if sase3_changed is False and sase1_changed is False:
+            ds_xgm = load_xgm_array(run, da_xgm, m, sase1, sase3)
+        else:
+            bpt = load_bpt(run, merge_with)
+            if bpt is not None:
+                ds_xgm = align_xgm_array(da_xgm, bpt)
+                if bpt.name not in ds_mw:
+                    log.warning('Number of pulses changed during '
+                                'the run. Loading bunch pattern table.')
+                    ds_mw = ds_mw.merge(bpt, join='inner')
+            else:
+                xgm_val = da_xgm.values
+                xgm_val[xgm_val==1] = np.nan
+                xgm_val[xgm_val==0] = np.nan
+                da_xgm.values = xgm_val
+                da_xgm = da_xgm.dropna(dim='XGMbunchId', how='all')
+                ds_xgm = da_xgm.fillna(0).sel(XGMbunchId=indices).to_dataset()
+        ds = ds.merge(ds_xgm, join='inner')
+    # merge with non-BAM dataset
+    ds = ds_mw.merge(ds, join='inner')
+    return ds
+
+
+def load_xgm_array(run, xgm, mnemonic, sase1, sase3):
+    """
+    from a raw array xgm, extract and assign pulse Id coordinates
+    when the number of pulses does not change during the run.
+    If 'XGM' in mnemonic, the data is split in two variables
+    'SA1' and 'SA3'.
+
+    Parameters
+    ----------
+    run: extra_data.DataCollection
+        DataCollection containing the xgm data.
+    xgm: xarray.DataArray
+        the raw XGM array
+    mnemonic: str
+        the XGM mnemonic
+    sase1: list or 1D array
+        the sase1 pulse ids
+    sase3: list or 1D array
+        the sase3 pulse ids
+
+    Returns
+    -------
+    ds_xgm: xarray.Dataset
+        the dataset containing the aligned XGM variable(s).
+    """
+    xgm_val = xgm.values
+    xgm_val[xgm_val==1] = np.nan
+    xgm_val[xgm_val==0] = np.nan
+    xgm.values = xgm_val
+    xgm = xgm.dropna(dim='XGMbunchId', how='all')
+    xgm = xgm.fillna(0)
+    if 'XGM' in mnemonic:
+        sase1_3 = np.sort(np.concatenate([sase1, sase3]))
+        sase1_idx = [np.argwhere(sase1_3==i)[0][0] for i in sase1]
+        sase3_idx = [np.argwhere(sase1_3==i)[0][0] for i in sase3]
+        xgm_sa1 = xgm.isel(XGMbunchId=sase1_idx).rename(XGMbunchId='sa1_pId')
+        xgm_sa1 = xgm_sa1.assign_coords(sa1_pId=sase1)
+        xgm_sa1 = xgm_sa1.rename(mnemonic.replace('XGM', 'SA1'))
+        xgm_sa3 = xgm.isel(XGMbunchId=sase3_idx).rename(XGMbunchId='sa3_pId')
+        xgm_sa3 = xgm_sa3.assign_coords(sa3_pId=sase3)
+        xgm_sa3 = xgm_sa3.rename(mnemonic.replace('XGM', 'SA3'))
+        xgm = xr.merge([xgm_sa1, xgm_sa3])
+    elif 'SA1' in mnemonic:
+        xgm = xgm.rename(XGMbunchId='sa1_pId')
+        xgm = xgm.assign_coords(sa1_pId=sase1).rename(mnemonic)
+        xgm = xgm.to_dataset()
+    elif 'SA3' in mnemonic:
+        xgm = xgm.rename(XGMbunchId='sa3_pId')
+        xgm = xgm.assign_coords(sa3_pId=sase3).rename(mnemonic)
+        xgm = xgm.to_dataset()
+    return xgm
+
 
-def get_xgm(run, mnemonics=None, merge_with=None, keepAllSase=False,
+'''
+def get_xgm_old(run, mnemonics=None, merge_with=None, keepAllSase=False,
             indices=slice(0, None)):
     """
     Load and/or computes XGM data. Sources can be loaded on the
@@ -103,6 +249,7 @@ def get_xgm(run, mnemonics=None, merge_with=None, keepAllSase=False,
             arr = arr.where(arr != 1., drop=True).sel(XGMbunchId=indices)
         ds = ds.merge(arr, join='inner')
     return ds
+'''
 
 
 def align_xgm_array(xgm_arr, bpt):