diff --git a/pes_to_spec/model.py b/pes_to_spec/model.py
index a232761e807d5f0a6ab186695aef3c9a9ebde3f6..dcd5265d6c371f505023541ce86e77a44584db6b 100644
--- a/pes_to_spec/model.py
+++ b/pes_to_spec/model.py
@@ -585,7 +585,7 @@ class Model(TransformerMixin, BaseEstimator):
                  channels:List[str]=[f"channel_{j}_{k}"
                                      for j, k in product(range(1, 5), ["A", "B", "C", "D"])],
                  n_pca_lr: int=1000,
-                 n_pca_hr: int=20,
+                 n_pca_hr: int=40,
                  high_res_sigma: float=0,
                  tof_start: Optional[int]=None,
                  delta_tof: Optional[int]=300,
@@ -596,7 +596,7 @@ class Model(TransformerMixin, BaseEstimator):
                 ):
         self.high_res_sigma = high_res_sigma
         # models
-        self.x_select = SelectRelevantLowResolution(channels, tof_start, delta_tof, poly=(model_type not in ["bnn", "bnn_rvm"]))
+        self.x_select = SelectRelevantLowResolution(channels, tof_start, delta_tof, poly=False) #(model_type not in ["bnn", "bnn_rvm"]))
         x_model_steps = list()
         x_model_steps += [
                           ('pca', PCA(n_pca_lr, whiten=True)),
@@ -713,7 +713,7 @@ class Model(TransformerMixin, BaseEstimator):
         avg_energy = energy[:, energy.shape[1]//2, np.newaxis]
         gaussian_bkg = np.exp(-0.5*(energy - avg_energy)**2/bkg_sigma**2)
         gaussian_bkg /= np.sum(gaussian_bkg, axis=1, keepdims=True)
-        gaussian = np.exp(-0.5*(energy - avg_energy)**2/self.high_res_sigma**2)
+        gaussian = np.exp(-0.5*(energy - avg_energy)**2/0.2**2)
         gaussian /= np.sum(gaussian, axis=1, keepdims=True)
         smooth = fftconvolve(hr_data, gaussian, mode="same", axes=1)
         bkg = fftconvolve(hr_data, gaussian_bkg, mode="same", axes=1)
@@ -747,6 +747,7 @@ class Model(TransformerMixin, BaseEstimator):
         print("Checking data quality in high-resolution data.")
         peaks = self.count_peaks(high_res_data, high_res_photon_energy)
         filter_hr = (peaks >= self.n_peaks)
+        print(f"Selected {np.sum(filter_hr)} of {high_res_data.shape[0]} samples")
 
         print("Fitting PCA on low-resolution data.")
         self.x_select.fit(low_res_data)
@@ -756,6 +757,7 @@ class Model(TransformerMixin, BaseEstimator):
         low_res_select = low_res_select.reshape((B*P, -1))
 
         n_components = min(self.x_model["pca"].n_components, low_res_select.shape[0])
+        print(f"Using {n_components} comp. for PES PCA (asked for {self.x_model['pca'].n_components}, out of {low_res_select.shape[1]}, in {low_res_select.shape[0]} samples).")
         self.x_model.set_params(pca__n_components=n_components)
         x_t = self.x_model.fit_transform(low_res_select)
         #print("PCA fraction of variance (LR): ", np.cumsum(self.x_model["pca"].explained_variance_ratio_))
diff --git a/pes_to_spec/test/offline_analysis.py b/pes_to_spec/test/offline_analysis.py
index 3911d0f630ed8f4239d528be1ff1bc288ea7ceae..0f772e196825f35fbd2ba2b99d4afae3b3a5be9b 100755
--- a/pes_to_spec/test/offline_analysis.py
+++ b/pes_to_spec/test/offline_analysis.py
@@ -46,12 +46,11 @@ def get_gas(run, tids):
 
 def save_result(filename: str,
                 spec_pred: Dict[str, np.ndarray],
+                spec: np.ndarray,
                 spec_smooth: np.ndarray,
                 spec_raw_pe: np.ndarray,
                 intensity: float,
-                #spec_raw_int: Optional[np.ndarray]=None,
                 pes: Optional[np.ndarray]=None,
-                pes_to_show: Optional[str]="",
                 first: Optional[int]=None,
                 last: Optional[int]=None,
                 ):
@@ -61,11 +60,10 @@ def save_result(filename: str,
     Args:
       filename: Output file name.
       spec_pred: Predicted result with uncertainty bands in a dictionary.
+      spec: The unsmoothed spectrum with shape (features,).
       spec_smooth: Smoothened expected result with shape (features,).
       spec_raw_pe: x axis with the photon energy in eV.
-      spec_raw_int: Original true expected result with shape (features,).
       pes: PES spectrum for the inset.
-      pes_to_show: Name of the channel shown.
       intensity: The XGM intensity in uJ.
 
     """
@@ -73,7 +71,8 @@ def save_result(filename: str,
     unc_pca = spec_pred["pca"]
     unc = np.sqrt(unc_stat**2 + unc_pca**2)
     df = pd.DataFrame(dict(energy=spec_raw_pe,
-                           spec=spec_smooth,
+                           spec=spec,
+                           spec_smooth=spec_smooth,
                            prediction=spec_pred["expected"],
                            unc=unc,
                            unc_pca=unc_pca,
@@ -100,13 +99,9 @@ def save_pes_result(filename: str,
 
     Args:
       filename: Output file name.
-      spec_pred: Predicted result with uncertainty bands in a dictionary.
-      spec_smooth: Smoothened expected result with shape (features,).
-      spec_raw_pe: x axis with the photon energy in eV.
-      spec_raw_int: Original true expected result with shape (features,).
       pes: PES spectrum for the inset.
-      pes_to_show: Name of the channel shown.
-      intensity: The XGM intensity in uJ.
+      first: Start of ROI.
+      last: End of ROI.
 
     """
     pes_data = deepcopy(pes)
@@ -293,14 +288,14 @@ def main():
     pca = PCA(None, whiten=True)
     pca.fit(pes_raw_select)
     df = pd.DataFrame(dict(variance_ratio=pca.explained_variance_ratio_,
-                           n_comp=600*np.ones_like(pca.explained_variance_ratio_),
+                           n_comp=1000*np.ones_like(pca.explained_variance_ratio_),
                            ))
     df.to_csv(os.path.join(args.directory, "pca_pes.csv"))
 
     pca_spec = PCA(None, whiten=True)
     pca_spec.fit(spec_raw_int[train_idx])
     df = pd.DataFrame(dict(variance_ratio=pca_spec.explained_variance_ratio_,
-                           n_comp=20*np.ones_like(pca_spec.explained_variance_ratio_),
+                           n_comp=40*np.ones_like(pca_spec.explained_variance_ratio_),
                            ))
     df.to_csv(os.path.join(args.directory, "pca_spec.csv"))
 
@@ -399,9 +394,9 @@ def main():
                        {k: item[idx, 0, ...] if k != "pca"
                            else item[0, ...]
                            for k, item in spec_pred.items()},
-                        spec_smooth[idx, :] if showSpec else None,
-                        spec_raw_pe_t[idx, :] if showSpec else None,
-                        #spec_raw_int_t[idx, :] if showSpec else None,
+                        spec_raw_int_t[idx, :],
+                        spec_smooth[idx, :],
+                        spec_raw_pe_t[idx, :],
                         intensity=xgm_flux_t[idx,0],
                         )
             save_pes_result(os.path.join(args.directory, f"test_q{q}_{tid}_pes.csv"),
diff --git a/pes_to_spec/test/prepare_plots.py b/pes_to_spec/test/prepare_plots.py
index 14c079418a95498295863d3061a89188754876c8..1298950456c86f61025bf6829778eb00d6bf618a 100755
--- a/pes_to_spec/test/prepare_plots.py
+++ b/pes_to_spec/test/prepare_plots.py
@@ -31,8 +31,8 @@ def plot_final(df: pd.DataFrame, filename: str):
     ax = fig.add_subplot(gs[0, 0])
     ax.plot(df.energy, df.spec, c='b', lw=3, label="Grating spectrometer")
     ax.plot(df.energy, df.prediction, c='r', ls='--', lw=3, label="Prediction")
-    ax.fill_between(df.energy, df.prediction - 2*df.unc, df.prediction + 2*df.unc, facecolor='gold', alpha=0.5, label="95% unc. (total)")
-    ax.fill_between(df.energy, df.prediction - 2*df.unc_pca, df.prediction + 2*df.unc_pca, facecolor='magenta', alpha=0.5, label="95% unc. (PCA only)")
+    ax.fill_between(df.energy, df.prediction - 1*df.unc, df.prediction + 1*df.unc, facecolor='gold', alpha=0.5, label="68% unc. (total)")
+    ax.fill_between(df.energy, df.prediction - 1*df.unc_pca, df.prediction + 1*df.unc_pca, facecolor='magenta', alpha=0.5, label="68% unc. (PCA only)")
     Y = np.amax(df.spec)
     ax.legend(frameon=False, borderaxespad=0, loc='upper left')
     ax.set_title(f"Beam intensity: {df.beam_intensity.iloc[0]:.1f} mJ", loc="left")
@@ -86,7 +86,7 @@ def plot_residue(df: pd.DataFrame, filename: str):
            ylabel="Counts [a.u.]",
            xlim=(-3, 3),
            )
-        ax.legend(frameon=False)
+        #ax.legend(frameon=False)
     fig.savefig(filename)
     plt.close(fig)
 
@@ -432,8 +432,8 @@ if __name__ == '__main__':
     plot_residue_corr(pd.read_csv(f'{indir}/quality.csv'), f'residue_corr.pdf')
 
     df_model = pd.read_csv(f'{indir}/model.csv')
-    df_model.impulse = df_model.impulse.str.replace('i','j').apply(lambda x: np.complex(x))
-    df_model.wiener_filter = df_model.wiener_filter.str.replace('i','j').apply(lambda x: np.complex(x))
+    df_model.impulse = df_model.impulse.str.replace('i','j').apply(lambda x: complex(x))
+    df_model.wiener_filter = df_model.wiener_filter.str.replace('i','j').apply(lambda x: complex(x))
     plot_impulse(df_model, f'impulse.pdf')
     plot_wiener(df_model, f'wiener.pdf')