diff --git a/cal_tools/cal_tools/lpdlib.py b/cal_tools/cal_tools/lpdlib.py index 7b9b8986525d6bc1f976834008a7373540300478..47392acdcaf67b36137179bbf12c1dc511a80216 100644 --- a/cal_tools/cal_tools/lpdlib.py +++ b/cal_tools/cal_tools/lpdlib.py @@ -170,6 +170,8 @@ class LpdCorrections: self.max_cells = np.min(allcells) if not self.karabo_data_mode: self.gen_valid_range() + if self.firange.size < self.chunksize: + self.chunksize = self.firange.size self.copy_and_sanitize_non_cal_data() self.create_output_datasets() self.initialized = True diff --git a/notebooks/LPD/LPD_Correct_and_Verify.ipynb b/notebooks/LPD/LPD_Correct_and_Verify.ipynb index d09bc106d3d1306c11328e3e507940429a38cbbc..9a1351fbdd6861170d4edde71ce2be3bf01ae9a1 100644 --- a/notebooks/LPD/LPD_Correct_and_Verify.ipynb +++ b/notebooks/LPD/LPD_Correct_and_Verify.ipynb @@ -20,16 +20,16 @@ }, "outputs": [], "source": [ - "in_folder = \"/gpfs/exfel/exp/FXE/201802/p002218/raw/\" # the folder to read data from, required\n", - "run = 115 # runs to process, required\n", - "out_folder = \"/gpfs/exfel/data/scratch/xcal/lpd_test/exclude\" # the folder to output to, required\n", + "in_folder = \"/gpfs/exfel/exp/FXE/201901/p002210/raw/\" # the folder to read data from, required\n", + "run = 133 # runs to process, required\n", + "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/lpd_test/exclude3\" # the folder to output to, required\n", "calfile = \"/gpfs/exfel/data/scratch/xcal/lpd_store_0519.h5\" # path to constants extracted from the db into a file\n", - "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n", + "sequences = [0] # sequences to correct, set to -1 for all, range allowed\n", "mem_cells = 512 # memory cells in data\n", "overwrite = True # set to True if existing data should be overwritten\n", "no_relative_gain = False # do not do relative gain correction\n", "no_flat_fields = False # do not do flat field correction\n", - "cluster_profile = \"noDB33\" # cluster profile to use\n", + "cluster_profile = \"noDB\" # cluster profile to use\n", "max_pulses = 512 # maximum number of pulses per train\n", "use_now_as_creation_date = False # do not use dir creation data, but now\n", "no_non_linear_corrections = False # do not apply non-linear corrections\n", @@ -135,7 +135,7 @@ "QUADRANTS = 4\n", "MODULES_PER_QUAD = 4\n", "DET_FILE_INSET = \"LPD\"\n", - "CHUNK_SIZE = 512\n", + "CHUNK_SIZE = 10\n", "MAX_PAR = 32\n", " \n", "out_folder = \"{}/r{:04d}\".format(out_folder, run)\n", @@ -329,49 +329,55 @@ " signal_edges = None\n", " when = None\n", " qm = None\n", + " err = None\n", " try:\n", " start = datetime.now()\n", " success = True\n", " reason = \"\"\n", " filename, filename_out, channel, qm = inp\n", - " \n", "\n", " infile = h5py.File(filename, \"r\", driver=\"core\")\n", " outfile = h5py.File(filename_out, \"w\")\n", - " \n", - " lpd_corr = LpdCorrections(infile, outfile, max_cells, channel, max_pulses,\n", - " bins_gain_vs_signal, bins_signal_low_range,\n", - " bins_signal_high_range, do_ff=do_ff, raw_fmt_version=index_v,\n", - " correct_non_linear=(not no_non_linear_corrections),\n", - " mark_non_lin_region=mark_non_lin_region, linear_between=linear_between,\n", - " nlc_version=nlc_version)\n", - " \n", " try:\n", - " lpd_corr.get_valid_image_idx() \n", - " except IOError:\n", - " return\n", - " if not nodb:\n", - " when = lpd_corr.initialize_from_db(dbparms, qm, only_dark=(fileparms != \"\"))\n", - " if fileparms != \"\":\n", - " lpd_corr.initialize_from_file(fileparms, qm, with_dark=nodb)\n", - " print(\"Initialized constants\")\n", + " lpd_corr = LpdCorrections(infile, outfile, max_cells, channel, max_pulses,\n", + " bins_gain_vs_signal, bins_signal_low_range,\n", + " bins_signal_high_range, do_ff=do_ff, raw_fmt_version=index_v,\n", + " correct_non_linear=(not no_non_linear_corrections),\n", + " mark_non_lin_region=mark_non_lin_region, linear_between=linear_between,\n", + " nlc_version=nlc_version)\n", + "\n", " \n", - " for irange in lpd_corr.get_iteration_range():\n", - " lpd_corr.correct_lpd(irange)\n", - " \n", - " print(\"All interations finished\")\n", - " hists, edges = lpd_corr.get_histograms()\n", - " hists_signal_low, hists_signal_high, hists_gain_vs_signal = hists\n", - " low_edges, high_edges, signal_edges = edges\n", - " outfile.close()\n", - " infile.close()\n", - " print(\"Closed files\")\n", + " try:\n", + " lpd_corr.get_valid_image_idx() \n", + " except IOError:\n", + " return\n", + " if not nodb:\n", + " when = lpd_corr.initialize_from_db(dbparms, qm, only_dark=(fileparms != \"\"))\n", + " if fileparms != \"\":\n", + " lpd_corr.initialize_from_file(fileparms, qm, with_dark=nodb)\n", + " print(\"Initialized constants\")\n", + "\n", + " for irange in lpd_corr.get_iteration_range():\n", + " lpd_corr.correct_lpd(irange)\n", + "\n", + " print(\"All interations finished\")\n", + " hists, edges = lpd_corr.get_histograms()\n", + " hists_signal_low, hists_signal_high, hists_gain_vs_signal = hists\n", + " low_edges, high_edges, signal_edges = edges\n", + " outfile.close()\n", + " infile.close()\n", + " print(\"Closed files\")\n", + " except Exception as e1:\n", + " err = e1\n", + " outfile.close()\n", + " infile.close()\n", " \n", " except Exception as e:\n", " print(e)\n", " success = False\n", " reason = \"Error\"\n", - " \n", + " err = e\n", + "\n", " finally:\n", " run = re.findall(r'.*r([0-9]{4}).*', filename)[0]\n", " proposal = re.findall(r'.*p([0-9]{6}).*', filename)[0]\n", @@ -380,7 +386,7 @@ " duration = (datetime.now()-start).total_seconds()\n", " influx = create_influx_entry(run, proposal, qm, sequence, filesize, CHUNK_SIZE, total_sequences, success, duration, reason)\n", " #client.write_points([influx])\n", - " return hists_signal_low, hists_signal_high, hists_gain_vs_signal, low_edges, high_edges, signal_edges, when, qm\n", + " return hists_signal_low, hists_signal_high, hists_gain_vs_signal, low_edges, high_edges, signal_edges, when, qm, err\n", " \n", "done = False\n", "first_files = []\n", @@ -430,8 +436,10 @@ " \n", " for rr in r:\n", " if rr is not None:\n", - " hl, hh, hg, low_edges, high_edges, signal_edges, when, qm = rr\n", - " whens[qm] = when\n", + " hl, hh, hg, low_edges, high_edges, signal_edges, when, qm, err = rr\n", + " whens[qm] = {}\n", + " whens[qm]['when'] = when\n", + " whens[qm]['err'] = err\n", " if hl is not None: # any one being None will also make the others None\n", " hists_signal_low += hl.astype(np.float64)\n", " hists_signal_high += hh.astype(np.float64)\n", @@ -448,10 +456,12 @@ "outputs": [], "source": [ "\n", - "print(\"Offset where injected on: \")\n", - "for qm, when in whens.items():\n", - " \n", - " print(\"{}: {}\".format(qm, when))" + "print(\"Offset was injected on: \")\n", + "for k, v in whens.items():\n", + " if v['err'] is None:\n", + " print(\"{}: {}: {}\".format(k, v['when']))\n", + " else:\n", + " print(\"{}: {}: {}\".format(k, v['when'], v['err']))" ] }, {