From 2f2af63ed29a4efb601fbac3e83e56172f0a5bb4 Mon Sep 17 00:00:00 2001
From: Karim Ahmed <karim.ahmed@xfel.eu>
Date: Mon, 27 Apr 2020 10:51:16 +0200
Subject: [PATCH] avoid nb crash when error happens

---
 ...Jungfrau_Gain_Correct_and_Verify_NBC.ipynb | 126 +++++++++---------
 1 file changed, 65 insertions(+), 61 deletions(-)

diff --git a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
index 1a1f94545..d88429fc7 100644
--- a/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
+++ b/notebooks/Jungfrau/Jungfrau_Gain_Correct_and_Verify_NBC.ipynb
@@ -415,67 +415,71 @@
     "            out_file = \"{}/{}\".format(out_folder, f.split(\"/\")[-1])\n",
     "            out_file = out_file.replace(\"RAW\", \"CORR\")\n",
     "            print(f'Process file: {f}, with path {h5path_f}')\n",
-    "            \n",
-    "            with h5py.File(out_file, \"w\") as ofile:\n",
-    "                copy_and_sanitize_non_cal_data(infile, ofile, h5path_f)\n",
-    "\n",
-    "                oshape = infile[h5path_f+\"/adc\"].shape\n",
-    "                print(f'Data shape: {oshape}')\n",
-    "                ddset = ofile.create_dataset(h5path_f+\"/adc\",\n",
-    "                                             oshape,\n",
-    "                                             chunks=(chunk_size_idim, 1, oshape[1], oshape[2]),\n",
-    "                                             dtype=np.float32)\n",
-    "\n",
-    "                mskset = ofile.create_dataset(h5path_f+\"/mask\",\n",
-    "                                              oshape,\n",
-    "                                              chunks=(chunk_size_idim, 1, oshape[1], oshape[2]),\n",
-    "                                              dtype=np.uint32,\n",
-    "                                              compression=\"gzip\", compression_opts=1, shuffle=True)\n",
-    "\n",
-    "                # Run ip Cluster parallelization over chunks of images\n",
-    "                inp = []\n",
-    "                max_ind = oshape[0]\n",
-    "                ind = 0\n",
-    "\n",
-    "                # If chunk size is not given maximum 12+1 chunks is expected\n",
-    "                if chunk_size == 0:\n",
-    "                    chunk_size = max_ind//12\n",
-    "                    print(f'Chunk size: {chunk_size}')\n",
-    "\n",
-    "                ts = time.time()\n",
-    "                while ind<max_ind:\n",
-    "                    d = infile[h5path_f+\"/adc\"][ind:ind+chunk_size,...].astype(np.float32)\n",
-    "                    g = infile[h5path_f+\"/gain\"][ind:ind+chunk_size,...]\n",
-    "                    m = infile[h5path_f+\"/memoryCell\"][ind:ind+chunk_size,...]\n",
-    "                    print(f'To process: {d.shape}')\n",
-    "                    inp.append((d,g,m, ind, k==0))\n",
-    "                    ind += chunk_size\n",
-    "\n",
-    "                print('Preparation time: ', time.time() - ts)\n",
-    "                ts = time.time()\n",
-    "\n",
-    "                print(f'Run {len(inp)} processes')\n",
-    "                p = partial(correct_chunk, offset_map, mask, gain_map, memoryCells, no_relative_gain)\n",
-    "\n",
-    "                r = view.map_sync(p, inp)\n",
-    "                #r = list(map(p, inp))\n",
-    "                if k==0:\n",
-    "                    (_,_,_,\n",
-    "                     rim_data[db_module[i]], fim_data[db_module[i]],\n",
-    "                     gim_data[db_module[i]], msk_data[db_module[i]], _) = r[0]\n",
-    "\n",
-    "                print('Correction time: ', time.time() - ts)\n",
-    "                ts = time.time()\n",
-    "\n",
-    "                for rr in r:\n",
-    "                    ind, cdata, cmask, _,_,_,_, err = rr\n",
-    "                    data_size = cdata.shape[0]\n",
-    "                    ddset[ind:ind+data_size,...] = cdata\n",
-    "                    mskset[ind:ind+data_size,...] = cmask\n",
-    "                    if err != '':\n",
-    "                        print(f'Error: {err}')\n",
-    "\n",
-    "                print('Saving time: ', time.time() - ts)"
+    "            try:\n",
+    "                with h5py.File(out_file, \"w\") as ofile:\n",
+    "                    copy_and_sanitize_non_cal_data(infile, ofile, h5path_f)\n",
+    "\n",
+    "                    oshape = infile[h5path_f+\"/adc\"].shape\n",
+    "                    print(f'Data shape: {oshape}')\n",
+    "                    if not oshape[0]:\n",
+    "                        raise ValueError(f\"No image data: shape {oshape}\")\n",
+    "                    ddset = ofile.create_dataset(h5path_f+\"/adc\",\n",
+    "                                                 oshape,\n",
+    "                                                 chunks=(chunk_size_idim, 1, oshape[1], oshape[2]),\n",
+    "                                                 dtype=np.float32)\n",
+    "\n",
+    "                    mskset = ofile.create_dataset(h5path_f+\"/mask\",\n",
+    "                                                  oshape,\n",
+    "                                                  chunks=(chunk_size_idim, 1, oshape[1], oshape[2]),\n",
+    "                                                  dtype=np.uint32,\n",
+    "                                                  compression=\"gzip\", compression_opts=1, shuffle=True)\n",
+    "\n",
+    "                    # Run ip Cluster parallelization over chunks of images\n",
+    "                    inp = []\n",
+    "                    max_ind = oshape[0]\n",
+    "                    ind = 0\n",
+    "\n",
+    "                    # If chunk size is not given maximum 12+1 chunks is expected\n",
+    "                    if chunk_size == 0:\n",
+    "                        chunk_size = max_ind//12\n",
+    "                        print(f'Chunk size: {chunk_size}')\n",
+    "\n",
+    "                    ts = time.time()\n",
+    "                    while ind<max_ind:\n",
+    "                        d = infile[h5path_f+\"/adc\"][ind:ind+chunk_size,...].astype(np.float32)\n",
+    "                        g = infile[h5path_f+\"/gain\"][ind:ind+chunk_size,...]\n",
+    "                        m = infile[h5path_f+\"/memoryCell\"][ind:ind+chunk_size,...]\n",
+    "                        print(f'To process: {d.shape}')\n",
+    "                        inp.append((d,g,m, ind, k==0))\n",
+    "                        ind += chunk_size\n",
+    "\n",
+    "                    print('Preparation time: ', time.time() - ts)\n",
+    "                    ts = time.time()\n",
+    "\n",
+    "                    print(f'Run {len(inp)} processes')\n",
+    "                    p = partial(correct_chunk, offset_map, mask, gain_map, memoryCells, no_relative_gain)\n",
+    "\n",
+    "                    r = view.map_sync(p, inp)\n",
+    "                    #r = list(map(p, inp))\n",
+    "                    if k==0:\n",
+    "                        (_,_,_,\n",
+    "                         rim_data[db_module[i]], fim_data[db_module[i]],\n",
+    "                         gim_data[db_module[i]], msk_data[db_module[i]], _) = r[0]\n",
+    "\n",
+    "                    print('Correction time: ', time.time() - ts)\n",
+    "                    ts = time.time()\n",
+    "\n",
+    "                    for rr in r:\n",
+    "                        ind, cdata, cmask, _,_,_,_, err = rr\n",
+    "                        data_size = cdata.shape[0]\n",
+    "                        ddset[ind:ind+data_size,...] = cdata\n",
+    "                        mskset[ind:ind+data_size,...] = cmask\n",
+    "                        if err != '':\n",
+    "                            print(f'Error: {err}')\n",
+    "\n",
+    "                    print('Saving time: ', time.time() - ts)\n",
+    "            except Exception as e:\n",
+    "                print(f\"Error: {e}\")"
    ]
   },
   {
-- 
GitLab