{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# AGIPD Offline Correction #\n", "\n", "Author: European XFEL Detector Group, Version: 1.0\n", "\n", "Offline Calibration for the AGIPD Detector" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-21T11:30:06.730220Z", "start_time": "2019-02-21T11:30:06.658286Z" } }, "outputs": [], "source": [ "in_folder = \"/gpfs/exfel/exp/MID/201931/p900107/raw\" # the folder to read data from, required\n", "run = 11 # runs to process, required\n", "out_folder = \"/gpfs/exfel/data/scratch/ahmedk/test/AGIPD_Corr\" # the folder to output to, required\n", "calfile = \"/gpfs/exfel/data/scratch/haufs/agipd_on_demand/agipd_store_mid.h5\" # path to calibration file. Leave empty if all data should come from DB\n", "sequences = [-1] # sequences to correct, set to -1 for all, range allowed\n", "mem_cells = 0 # number of memory cells used, set to 0 to automatically infer\n", "interlaced = False # whether data is in interlaced layout\n", "overwrite = True # set to True if existing data should be overwritten\n", "relative_gain = False # do relative gain correction\n", "cluster_profile = \"noDB\"\n", "max_pulses = [0, 500, 1] # range list [st, end, step] of maximum pulse indices. 3 allowed maximum list input elements. \n", "local_input = False\n", "bias_voltage = 300\n", "cal_db_interface = \"tcp://max-exfl016:8015#8045\" # the database interface to use\n", "use_dir_creation_date = True # use the creation data of the input dir for database queries\n", "sequences_per_node = 2 # number of sequence files per cluster node if run as slurm job, set to 0 to not run SLURM parallel\n", "photon_energy = 9.2 # photon energy in keV\n", "index_v = 2 # version of RAW index type\n", "nodb = False # if set only file-based constants will be used\n", "blc_noise_threshold = 5000 # above this mean signal intensity now baseline correction via noise is attempted\n", "blc_hist = False # if set, base line correction via histogram matching is attempted \n", "corr_asic_diag = False # if set, diagonal drop offs on ASICs are correted \n", "melt_snow = \"\" # if set to \"none\" snowy pixels are identified and resolved to NaN, if set to \"interpolate\", the value is interpolated from neighbouring pixels\n", "cal_db_timeout = 30000 # in milli seconds\n", "max_cells_db_dark = 0 # set to a value different than 0 to use this value for dark data DB queries\n", "max_cells_db = 0 # set to a value different than 0 to use this value for DB queries\n", "chunk_size_idim = 1 # chunking size of imaging dimension, adjust if user software is sensitive to this.\n", "creation_date_offset = \"00:00:00\" # add an offset to creation date, e.g. to get different constants\n", "instrument = \"MID\" # the instrument the detector is installed at, required\n", "force_hg_if_below = 1000 # set to a value other than 0 to force a pixel into high gain if it's high gain offset subtracted value is below this threshold\n", "force_mg_if_below = 1000 # set to a value other than 0 to force a pixel into medium gain if it's medium gain offset subtracted value is below this threshold\n", "mask_noisy_adc = 0.25 # set to a value other than 0 and below 1 to mask entire ADC if fraction of noisy pixels is above\n", "acq_rate = 0. # the detector acquisition rate, use 0 to try to auto-determine\n", "\n", "# Correction Booleans\n", "only_offset = False # Apply only Offset correction.\n", "pc_corr = False # Apply only Pulse Capictor correction.\n", "ff_corr = False # Apply only Flat Field correction.\n", "blc_noise = False # if set, baseline correction via noise peak location is attempted\n", "match_asics = False # if set, inner ASIC borders are matched to the same signal level\n", "adjust_mg_baseline = False # adjust medium gain baseline to match highest high gain value\n", "dont_zero_nans = False # do not zero NaN values in corrected data\n", "dont_zero_orange = False # do not zero very negative and very large values\n", "\n", "def balance_sequences(in_folder, run, sequences, sequences_per_node):\n", " import glob\n", " import re\n", " import numpy as np\n", " if sequences[0] == -1:\n", " sequence_files = glob.glob(\"{}/r{:04d}/*-S*.h5\".format(in_folder, run))\n", " seq_nums = set()\n", " for sf in sequence_files:\n", " seqnum = re.findall(r\".*-S([0-9]*).h5\", sf)[0]\n", " seq_nums.add(int(seqnum))\n", " seq_nums -= set(sequences)\n", " else:\n", " seq_nums = set(sequences)\n", " nsplits = len(seq_nums)//sequences_per_node+1\n", " while nsplits > 32:\n", " sequences_per_node += 1\n", " nsplits = len(seq_nums)//sequences_per_node+1\n", " print(\"Changed to {} sequences per node to have a maximum of 8 concurrent jobs\".format(sequences_per_node))\n", " return [l.tolist() for l in np.array_split(list(seq_nums), nsplits) if l.size > 0]\n", " " ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Fill dictionaries comprising bools and arguments for correction and data analysis\n", "\n", "# Here the herarichy and dependability for correction booleans are defined \n", "corr_bools = {}\n", "\n", "# offset is at the bottom of AGIPD correction pyramid.\n", "corr_bools[\"only_offset\"] = only_offset\n", "\n", "# Dont apply any corrections if only_offset is requested \n", "if not only_offset:\n", " \n", " # Apply PC correction only if requested\n", " if pc_corr:\n", " corr_bools[\"pc_corr\"] = pc_corr\n", " \n", " # Apply FF correction only if requested\n", " if ff_corr:\n", " corr_bools[\"ff_corr\"] = ff_corr\n", " \n", " corr_bools[\"adjust_mg_baseline\"] = adjust_mg_baseline\n", " corr_bools[\"do_rel_gain\"] = relative_gain\n", " corr_bools[\"blc_noise\"] = blc_noise\n", " corr_bools[\"match_asics\"] = match_asics\n", " corr_bools[\"corr_asic_diag\"] = corr_asic_diag\n", " corr_bools[\"dont_zero_nans\"] = dont_zero_nans\n", " corr_bools[\"dont_zero_orange\"] = dont_zero_orange\n", "\n", "# Here the herarichy and dependability for data analysis booleans and arguments are defined \n", "data_analysis_parms = {}" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-21T11:30:07.086286Z", "start_time": "2019-02-21T11:30:06.929722Z" } }, "outputs": [], "source": [ "import sys\n", "from collections import OrderedDict\n", "\n", "# make sure a cluster is running with ipcluster start --n=32, give it a while to start\n", "import os\n", "import h5py\n", "import numpy as np\n", "import matplotlib\n", "matplotlib.use(\"agg\")\n", "import matplotlib.pyplot as plt\n", "from ipyparallel import Client\n", "print(\"Connecting to profile {}\".format(cluster_profile))\n", "view = Client(profile=cluster_profile)[:]\n", "view.use_dill()\n", "\n", "from iCalibrationDB import ConstantMetaData, Constants, Conditions, Detectors, Versions\n", "from cal_tools.tools import (gain_map_files, parse_runs, run_prop_seq_from_path, get_notebook_name,\n", " get_dir_creation_date, get_constant_from_db)\n", "\n", "from dateutil import parser\n", "from datetime import timedelta\n", "\n", "il_mode = interlaced\n", "max_cells = mem_cells\n", "gains = np.arange(3)\n", "cells = np.arange(max_cells)\n", "\n", "creation_time = None\n", "if use_dir_creation_date:\n", " creation_time = get_dir_creation_date(in_folder, run)\n", " offset = parser.parse(creation_date_offset)\n", " delta = timedelta(hours=offset.hour, minutes=offset.minute, seconds=offset.second)\n", " creation_time += delta\n", " print(\"Using {} as creation time\".format(creation_time))\n", "\n", "in_folder = \"{}/r{:04d}\".format(in_folder, run)\n", "\n", "print(\"Working in IL Mode: {}. Actual cells in use are: {}\".format(il_mode, max_cells))\n", "\n", "if sequences[0] == -1:\n", " sequences = None\n", "\n", "QUADRANTS = 4\n", "MODULES_PER_QUAD = 4\n", "DET_FILE_INSET = \"AGIPD\"\n", "CHUNK_SIZE = 250\n", "MAX_PAR = 32\n", "\n", "if in_folder[-1] == \"/\":\n", " in_folder = in_folder[:-1]\n", "print(\"Outputting to {}\".format(out_folder))\n", "\n", "if not os.path.exists(out_folder):\n", " os.makedirs(out_folder)\n", "elif not overwrite:\n", " raise AttributeError(\"Output path exists! Exiting\")\n", "\n", "import warnings\n", "warnings.filterwarnings('ignore')\n", "\n", "from cal_tools.agipdlib import SnowResolution\n", "melt_snow = False if melt_snow == \"\" else SnowResolution(melt_snow)\n", "\n", "special_opts = blc_noise_threshold, blc_hist, melt_snow\n", "\n", "loc = None\n", "if instrument == \"SPB\":\n", " loc = \"SPB_DET_AGIPD1M-1\"\n", " dinstance = \"AGIPD1M1\"\n", "else:\n", " loc = \"MID_DET_AGIPD1M-1\"\n", " dinstance = \"AGIPD1M2\"\n", "print(\"Detector in use is {}\".format(loc)) " ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-21T11:30:07.263445Z", "start_time": "2019-02-21T11:30:07.217070Z" } }, "outputs": [], "source": [ "def combine_stack(d, sdim):\n", " combined = np.zeros((sdim, 2048, 2048))\n", " combined[...] = np.nan\n", " \n", " dy = 0\n", " for i in range(16):\n", " \n", " try:\n", " if i < 8:\n", " dx = -512\n", " if i > 3:\n", " dx -= 25\n", " mx = 1\n", " my = i % 8\n", " combined[:, my*128+dy:(my+1)*128+dy,\n", " mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,:,::-1]\n", " dy += 30\n", " if i == 3:\n", " dy += 30\n", "\n", " elif i < 12:\n", " dx = 80 - 50\n", " if i == 8:\n", " dy = 4*30 + 30 +50 -30\n", "\n", " mx = 1\n", " my = i % 8 +4\n", " combined[:, my*128+dy:(my+1)*128+dy,\n", " mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]\n", " dy += 30\n", " else:\n", " dx = 100 - 50\n", " if i == 11:\n", " dy = 20\n", "\n", " mx = 1\n", " my = i - 14\n", "\n", " combined[:, my*128+dy:(my+1)*128+dy,\n", " mx*512-dx:(mx+1)*512-dx] = np.rollaxis(d[i],2,1)[:,::-1,:]\n", " dy += 30\n", " except:\n", " continue\n", " \n", " return combined" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-21T11:30:07.974174Z", "start_time": "2019-02-21T11:30:07.914832Z" } }, "outputs": [], "source": [ "# set everything up filewise\n", "from queue import Queue\n", "from collections import OrderedDict\n", "\n", "def map_modules_from_files(filelist):\n", " module_files = OrderedDict()\n", " mod_ids = OrderedDict()\n", " total_sequences = 0\n", " sequences_qm = {}\n", " one_module = None\n", " for quadrant in range(0, QUADRANTS):\n", " for module in range(0, MODULES_PER_QUAD):\n", " name = \"Q{}M{}\".format(quadrant + 1, module + 1)\n", " module_files[name] = Queue()\n", " num = quadrant * 4 + module\n", " mod_ids[name] = num\n", " file_infix = \"{}{:02d}\".format(DET_FILE_INSET, num)\n", " sequences_qm[name] = 0\n", " for file in filelist:\n", " if file_infix in file:\n", " if not one_module:\n", " one_module = file, num\n", " module_files[name].put(file)\n", " total_sequences += 1\n", " sequences_qm[name] += 1\n", " \n", " return module_files, mod_ids, total_sequences, sequences_qm, one_module\n", "\n", "dirlist = sorted(os.listdir(in_folder))\n", "file_list = []\n", "\n", "\n", "for entry in dirlist:\n", " #only h5 file\n", " abs_entry = \"{}/{}\".format(in_folder, entry)\n", " if os.path.isfile(abs_entry) and os.path.splitext(abs_entry)[1] == \".h5\":\n", " \n", " if sequences is None:\n", " file_list.append(abs_entry)\n", " else:\n", " for seq in sequences:\n", " if \"{:05d}.h5\".format(seq) in abs_entry:\n", " file_list.append(os.path.abspath(abs_entry))\n", " \n", "mapped_files, mod_ids, total_sequences, sequences_qm, one_module = map_modules_from_files(file_list)\n", "MAX_PAR = min(MAX_PAR, total_sequences)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Processed Files ##" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-21T11:30:08.870802Z", "start_time": "2019-02-21T11:30:08.826285Z" } }, "outputs": [], "source": [ "import copy\n", "from IPython.display import HTML, display, Markdown, Latex\n", "import tabulate\n", "print(\"Processing a total of {} sequence files in chunks of {}\".format(total_sequences, MAX_PAR))\n", "table = []\n", "mfc = copy.copy(mapped_files)\n", "ti = 0\n", "for k, files in mfc.items():\n", " i = 0\n", " while not files.empty():\n", " f = files.get()\n", " if i == 0:\n", " table.append((ti, k, i, f))\n", " else:\n", " table.append((ti, \"\", i, f))\n", " i += 1\n", " ti += 1\n", "md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"#\", \"module\", \"# module\", \"file\"]))) \n", "# restore the queue\n", "mapped_files, mod_ids, total_sequences, sequences_qm, one_module = map_modules_from_files(file_list)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-21T11:30:16.057429Z", "start_time": "2019-02-21T11:30:10.082114Z" }, "scrolled": false }, "outputs": [], "source": [ "import copy\n", "from functools import partial\n", "def correct_module(max_cells, index_v, CHUNK_SIZE, total_sequences, sequences_qm, \n", " bins_gain_vs_signal, bins_signal_low_range, bins_signal_high_range,\n", " bins_dig_gain_vs_signal, max_pulses, dbparms, fileparms, nodb, chunk_size_idim,\n", " special_opts, il_mode, loc, dinstance, force_hg_if_below, force_mg_if_below,\n", " mask_noisy_adc, acq_rate, corr_bools, inp):\n", " print(\"foo\")\n", " import numpy as np\n", " import copy\n", " import h5py\n", " import socket\n", " from datetime import datetime\n", " import re\n", " import os\n", " from influxdb import InfluxDBClient\n", " from cal_tools.enums import BadPixels\n", " from cal_tools.agipdlib import AgipdCorrections, SnowResolution\n", " from cal_tools.agipdlib import get_num_cells, get_acq_rate\n", " \n", " \n", " #client = InfluxDBClient('exflqr18318', 8086, 'root', 'root', 'calstats')\n", "\n", " def create_influx_entry(run, proposal, qm, sequence, filesize, chunksize,\n", " total_sequences, success, runtime, reason=\"\"):\n", " return {\n", " \"measurement\": \"run_correction\",\n", " \"tags\": {\n", " \"host\": socket.gethostname(),\n", " \"run\": run,\n", " \"proposal\": proposal,\n", " \"mem_cells\": max_cells,\n", " \"sequence\": sequence,\n", " \"module\": qm,\n", " \"filesize\": filesize,\n", " \"chunksize\": chunksize,\n", " \"total_sequences\": total_sequences,\n", " \"sequences_module\": sequences_qm[qm],\n", " \"detector\": \"AGIPD\",\n", " \"instrument\": \"SPB\",\n", " \n", " },\n", " \"time\": datetime.utcnow().isoformat(),\n", " \"fields\": {\n", " \"success\": success,\n", " \"reason\": reason,\n", " \"runtime\": runtime, \n", " }\n", " }\n", " \n", " hists_signal_low = None\n", " hists_signal_high = None\n", " hists_gain_vs_signal = None\n", " hists_dig_gain_vs_signal = None\n", " hist_pulses = None\n", " low_edges = None\n", " high_edges = None\n", " signal_edges = None\n", " dig_signal_edges = None\n", " gain_stats = 0\n", " when = None\n", " err = None\n", " \n", " try:\n", " start = datetime.now()\n", " success = True\n", " reason = \"\"\n", " filename, filename_out, channel, qm = inp\n", " print(\"Have input\")\n", " \n", " if max_cells == 0:\n", " max_cells = get_num_cells(filename, loc, channel)\n", " if max_cells is None:\n", " raise ValueError(\"No raw images found for {}\".format(qm))\n", " else:\n", " cells = np.arange(max_cells)\n", " \n", " if acq_rate == 0.:\n", " acq_rate = get_acq_rate(filename, loc, channel)\n", " else:\n", " acq_rate = None\n", "\n", " if dbparms[2] == 0:\n", " dbparms[2] = max_cells\n", " if dbparms[5] == 0:\n", " dbparms[5] = dbparms[2]\n", "\n", " print(\"Set memory cells to {}\".format(max_cells))\n", " print(\"Set acquistion rate cells to {} MHz\".format(acq_rate))\n", "\n", "\n", " infile = h5py.File(filename, \"r\", driver=\"core\")\n", " outfile = h5py.File(filename_out, \"w\")\n", " try:\n", " agipd_corr = AgipdCorrections(infile, outfile, max_cells, channel, max_pulses,\n", " bins_gain_vs_signal, bins_signal_low_range,\n", " bins_signal_high_range, bins_dig_gain_vs_signal,\n", " chunk_size_idim=chunk_size_idim,\n", " il_mode=il_mode, raw_fmt_version=index_v, \n", " h5_data_path=\"INSTRUMENT/{}/DET/{{}}CH0:xtdf/\".format(loc),\n", " h5_index_path=\"INDEX/{}/DET/{{}}CH0:xtdf/\".format(loc),\n", " cal_det_instance=dinstance, force_hg_if_below=force_hg_if_below,\n", " force_mg_if_below=force_mg_if_below, mask_noisy_adc=mask_noisy_adc,\n", " acquisition_rate=acq_rate, corr_bools=corr_bools)\n", "\n", " blc_noise_threshold, blc_hist, melt_snow = special_opts\n", " if not corr_bools[\"only_offset\"]:\n", " blc_hist = False\n", " melt_snow = False\n", " agipd_corr.baseline_corr_noise_threshold = blc_noise_threshold\n", " agipd_corr.baseline_corr_using_hmatch = blc_hist\n", " agipd_corr.melt_snow = melt_snow\n", " try:\n", " agipd_corr.get_valid_image_idx()\n", " except IOError:\n", " return\n", " if not nodb:\n", " when = agipd_corr.initialize_from_db(dbparms, qm, only_dark=(fileparms != \"\"))\n", " if fileparms != \"\" and not corr_bools[\"only_offset\"]:\n", " agipd_corr.initialize_from_file(fileparms, qm, with_dark=nodb)\n", " print(\"Initialized constants\")\n", "\n", " for irange in agipd_corr.get_iteration_range():\n", " agipd_corr.correct_agipd(irange)\n", " print(\"Iterated\")\n", "\n", " print(\"All iterations are finished\")\n", " hists, edges = agipd_corr.get_histograms()\n", " hists_signal_low, hists_signal_high, hists_gain_vs_signal, hists_dig_gain_vs_signal, hist_pulses = hists\n", " low_edges, high_edges, signal_edges, dig_signal_edges = edges\n", " gain_stats = np.array(agipd_corr.gain_stats)\n", " finally:\n", " outfile.close()\n", " infile.close()\n", " print(\"Closed files\")\n", " \n", " except Exception as e:\n", " print(e)\n", " err = e\n", " success = False\n", " reason = \"Error\"\n", " \n", " finally:\n", " run = re.findall(r'.*r([0-9]{4}).*', filename)[0]\n", " proposal = re.findall(r'.*p([0-9]{6}).*', filename)[0]\n", " sequence = re.findall(r'.*S([0-9]{5}).*', filename)[0]\n", " filesize = os.path.getsize(filename)\n", " duration = (datetime.now()-start).total_seconds()\n", " #influx = create_influx_entry(run, proposal, qm, sequence, filesize, CHUNK_SIZE, total_sequences, success, duration, reason)\n", " #client.write_points([influx])\n", " return (hists_signal_low, hists_signal_high, hists_gain_vs_signal, hists_dig_gain_vs_signal, hist_pulses,\n", " low_edges, high_edges, signal_edges, dig_signal_edges, gain_stats, max_cells, when, qm, err)\n", " \n", "done = False\n", "first_files = []\n", "inp = []\n", "left = total_sequences\n", "\n", "# Display Information about the selected pulses indices for correction.\n", "pulses_lst = list(range(*max_pulses)) if not (len(max_pulses)==1 and max_pulses[0]==0) else max_pulses \n", "\n", "try:\n", " if len(pulses_lst) > 1: \n", " print(\"A range of {} pulse indices is selected: from {} to {} with a step of {}\"\n", " .format(len(pulses_lst), pulses_lst[0] , pulses_lst[-1] + (pulses_lst[1] - pulses_lst[0]),\n", " pulses_lst[1] - pulses_lst[0]))\n", " else:\n", " print(\"one pulse is selected: a pulse of idx {}\".format(pulses_lst[0]))\n", "except Exception as e:\n", " raise ValueError('max_pulses input Error: {}'.format(e))\n", " \n", "bins_gain_vs_signal = (100, 100)\n", "bins_signal_low_range = 100\n", "bins_signal_high_range = 100\n", "bins_dig_gain_vs_signal = (100, 4)\n", "\n", "hists_gain_vs_signal = np.zeros((bins_gain_vs_signal), np.float64)\n", "hists_dig_gain_vs_signal = np.zeros((bins_dig_gain_vs_signal), np.float64)\n", "gain_stats = 0\n", "\n", "low_edges, high_edges, signal_edges, dig_signal_edges = None, None, None, None\n", "dbparms = [cal_db_interface, creation_time, max_cells_db, bias_voltage,\n", " photon_energy, max_cells_db_dark, cal_db_timeout]\n", "\n", "fileparms = calfile\n", "\n", "all_cells = []\n", "whens = []\n", "errors = []\n", "while not done:\n", " \n", " dones = []\n", " first = True\n", " for i in range(16):\n", " qm = \"Q{}M{}\".format(i//4 +1, i % 4 + 1)\n", " if qm in mapped_files and not mapped_files[qm].empty():\n", " fname_in = str(mapped_files[qm].get())\n", " dones.append(mapped_files[qm].empty())\n", " else:\n", " print(\"Skipping {}\".format(qm))\n", " first_files.append((None, None))\n", " continue\n", " fout = os.path.abspath(\"{}/{}\".format(out_folder, (os.path.split(fname_in)[-1]).replace(\"RAW\", \"CORR\")))\n", " if first:\n", " first_files.append((fname_in, fout))\n", " inp.append((fname_in, fout, i, qm))\n", " first = False\n", " if len(inp) >= min(MAX_PAR, left):\n", " print(\"Running {} tasks parallel\".format(len(inp)))\n", " p = partial(correct_module, max_cells, index_v, CHUNK_SIZE, total_sequences,\n", " sequences_qm, bins_gain_vs_signal, bins_signal_low_range, bins_signal_high_range,\n", " bins_dig_gain_vs_signal, max_pulses, dbparms, fileparms, nodb, chunk_size_idim,\n", " special_opts, il_mode, loc, dinstance, force_hg_if_below, force_mg_if_below,\n", " mask_noisy_adc, acq_rate, corr_bools)\n", "\n", " r = view.map_sync(p, inp)\n", "\n", " #r = list(map(p, inp))\n", "\n", " inp = []\n", " left -= MAX_PAR\n", "\n", " init_hist = False\n", " for rr in r:\n", " if rr is not None:\n", " hl, hh, hg, hdg, hp, low_edges, high_edges, signal_edges, dig_signal_edges, gs, cells, when, qm, err = rr\n", " all_cells.append(cells)\n", " whens.append((qm, when))\n", " errors.append(err)\n", " if not init_hist:\n", " hists_signal_low = np.zeros((bins_signal_low_range, hp), np.float64)\n", " hists_signal_high = np.zeros((bins_signal_low_range, hp), np.float64)\n", " init_hist = True\n", " if hl is not None: # any one being None will also make the others None\n", " hists_signal_low += hl.astype(np.float64)\n", " hists_signal_high += hh.astype(np.float64)\n", " hists_gain_vs_signal += hg.astype(np.float64)\n", " hists_dig_gain_vs_signal += hdg.astype(np.float64)\n", " gain_stats += gs\n", " \n", " done = all(dones)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(\"Constants were injected on: \")\n", "to_store = []\n", "for i, (qm, when) in enumerate(whens):\n", " print(qm)\n", " line = [qm]\n", " # If correction is crashed\n", " if errors[i] is not None:\n", " print(\"Error: {}\".format(errors[i]) )\n", " else:\n", " for key, item in when.items():\n", " if hasattr(item, 'strftime'):\n", " item = item.strftime('%y-%m-%d %H:%M')\n", " # If constant retrieval is crashed\n", " else:\n", " item = 'None'\n", " when[key] = item\n", " print('{:.<12s}'.format(key), item)\n", " \n", " # Store few time stamps if exists\n", " # Add NA to keep array structure\n", " for key in ['offset', 'slopesPC', 'slopesFF']:\n", " if when and key in when:\n", " line.append(when[key])\n", " else:\n", " if errors[i] is not None:\n", " line.append('Err')\n", " else:\n", " line.append('NA')\n", " to_store.append(line)\n", "\n", "np.savetxt(\"{}/tmp_const_beginat_S{:05d}.csv\".format(out_folder, sequences[0]), \n", " np.array(to_store).astype(str), fmt='%s', delimiter = ',')" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:28:51.765030Z", "start_time": "2019-02-18T17:28:51.714783Z" } }, "outputs": [], "source": [ "from mpl_toolkits.mplot3d import Axes3D\n", "import matplotlib.pyplot as plt\n", "from matplotlib import cm\n", "from matplotlib.ticker import LinearLocator, FormatStrFormatter\n", "import numpy as np\n", "%matplotlib inline\n", "def do_3d_plot(data, edges, x_axis, y_axis):\n", " fig = plt.figure(figsize=(10,10))\n", " ax = fig.gca(projection='3d')\n", "\n", " # Make data.\n", " X = edges[0][:-1]\n", " Y = edges[1][:-1]\n", " X, Y = np.meshgrid(X, Y)\n", " \n", " Z = data.T\n", "\n", " # Plot the surface.\n", " surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,\n", " linewidth=0, antialiased=False)\n", " ax.set_xlabel(x_axis)\n", " ax.set_ylabel(y_axis)\n", " ax.set_zlabel(\"Counts\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Signal vs. Analogue Gain ##\n", "\n", "The following plot shows plots signal vs. gain for the first 128 images." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:28:52.857960Z", "start_time": "2019-02-18T17:28:51.767217Z" } }, "outputs": [], "source": [ "do_3d_plot(hists_gain_vs_signal, signal_edges, \"Signal (ADU)\", \"Analogue gain (ADU)\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:28:53.690522Z", "start_time": "2019-02-18T17:28:52.860143Z" } }, "outputs": [], "source": [ "def do_2d_plot(data, edges, y_axis, x_axis):\n", " from matplotlib.colors import LogNorm\n", " fig = plt.figure(figsize=(10,10))\n", " ax = fig.add_subplot(111)\n", " extent = [np.min(edges[1]), np.max(edges[1]),np.min(edges[0]), np.max(edges[0])]\n", " im = ax.imshow(data[::-1,:], extent=extent, aspect=\"auto\", norm=LogNorm(vmin=1, vmax=np.max(data)))\n", " ax.set_xlabel(x_axis)\n", " ax.set_ylabel(y_axis)\n", " cb = fig.colorbar(im)\n", " cb.set_label(\"Counts\")\n", " \n", " \n", "do_2d_plot(hists_gain_vs_signal, signal_edges, \"Signal (ADU)\", \"Gain Value (ADU)\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Signal vs. Digitized Gain ##\n", "\n", "The following plot shows plots signal vs. digitized gain for the first 128 images." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:28:54.370559Z", "start_time": "2019-02-18T17:28:53.691959Z" } }, "outputs": [], "source": [ "do_2d_plot(hists_dig_gain_vs_signal, dig_signal_edges, \"Signal (ADU)\", \"Gain Bit Value\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:31:51.668096Z", "start_time": "2019-02-18T17:31:51.529158Z" } }, "outputs": [], "source": [ "fig, ax = plt.subplots()\n", "ax.pie(gain_stats, labels=[\"high\", \"medium\", \"low\"], autopct='%1.2f%%',\n", " shadow=True, startangle=27)\n", "a = ax.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Mean Intensity per Pulse ##\n", "\n", "The following plots show the mean signal for each pulse in a detailed and expanded intensity region." ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:28:57.327702Z", "start_time": "2019-02-18T17:28:54.377061Z" }, "scrolled": false }, "outputs": [], "source": [ "do_3d_plot(hists_signal_low, low_edges, \"Signal (ADU)\", \"Pulse id\")\n", "do_2d_plot(hists_signal_low, low_edges, \"Signal (ADU)\", \"Pulse id\")\n", "do_3d_plot(hists_signal_high, high_edges, \"Signal (ADU)\", \"Pulse id\")\n", "do_2d_plot(hists_signal_high, high_edges, \"Signal (ADU)\", \"Pulse id\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:20.634480Z", "start_time": "2019-02-18T17:28:57.329231Z" } }, "outputs": [], "source": [ "\n", "corrected = []\n", "raw = []\n", "gains = []\n", "mask = []\n", "cell_fac = 1\n", "first_idx = 0\n", "last_idx = cell_fac*176+first_idx\n", "pulse_ids = []\n", "train_ids = []\n", "for i, ff in enumerate(first_files[:16]):\n", " try:\n", "\n", " rf, cf = ff\n", " #print(cf, i)\n", " if rf is None:\n", " \n", " raise Exception(\"File not present\")\n", " #print(rf)\n", " infile = h5py.File(rf, \"r\")\n", " #print(\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/data\".format(instrument, i))\n", " raw.append(np.array(infile[\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/data\".format(instrument, i)][first_idx:last_idx,0,...]))\n", " infile.close()\n", " \n", " infile = h5py.File(cf, \"r\")\n", " #print(\"/INSTRUMENT/SPB_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/data\".format(i))\n", " corrected.append(np.array(infile[\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/data\".format(instrument, i)][first_idx:last_idx,...]))\n", " gains.append(np.array(infile[\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/gain\".format(instrument, i)][first_idx:last_idx,...]))\n", " mask.append(np.array(infile[\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/mask\".format(instrument, i)][first_idx:last_idx,...]))\n", " pulse_ids.append(np.squeeze(infile[\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/pulseId\".format(instrument, i)][first_idx:last_idx,...]))\n", " train_ids.append(np.squeeze(infile[\"/INSTRUMENT/{}_DET_AGIPD1M-1/DET/{}CH0:xtdf/image/trainId\".format(instrument, i)][first_idx:last_idx,...]))\n", " infile.close()\n", " \n", " except Exception as e:\n", " print(e)\n", " corrected.append(np.zeros((last_idx-first_idx, 512, 128)))\n", " gains.append(np.zeros((last_idx-first_idx, 512, 128)))\n", " mask.append(np.zeros((last_idx-first_idx, 512, 128)))\n", " raw.append(np.zeros((last_idx-first_idx, 512, 128)))\n", " " ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:27.025667Z", "start_time": "2019-02-18T17:29:20.642029Z" } }, "outputs": [], "source": [ "domask = False\n", "if domask:\n", " for i, c in enumerate(corrected):\n", " c[mask[i] != 0] = 0\n", " #c[pats[i] < 200] = 0\n", " #c[np.abs(pats[i]) == 1000] = np.abs(c[np.abs(pats[i]) == 1000])\n", "combined = combine_stack(corrected, last_idx-first_idx)\n", "combined_raw = combine_stack(raw, last_idx-first_idx)\n", "combined_g = combine_stack(gains, last_idx-first_idx)\n", "combined_mask = combine_stack(mask, last_idx-first_idx)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Mean RAW Preview ###\n", "\n", "The per pixel mean of the first 128 images of the RAW data" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:33.226396Z", "start_time": "2019-02-18T17:29:27.027758Z" } }, "outputs": [], "source": [ "%matplotlib inline\n", "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "im = ax.imshow(np.mean(combined_raw[:,:1300,400:1600],axis=0),\n", " vmin=min(0.75*np.median(combined_raw[combined_raw > 0]), 4000),\n", " vmax=max(1.5*np.median(combined_raw[combined_raw > 0]), 7000), cmap=\"jet\")\n", "cb = fig.colorbar(im, ax=ax)\n", "\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Single Shot Preview ###\n", "\n", "A single shot image from cell 12 of the first train" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:33.761015Z", "start_time": "2019-02-18T17:29:33.227922Z" } }, "outputs": [], "source": [ "\n", "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "dim = combined[1,:1300,400:1600]\n", "\n", "im = ax.imshow(dim, vmin=-0, vmax=max(20*np.median(dim[dim > 0]), 100), cmap=\"jet\", interpolation=\"nearest\")\n", "cb = fig.colorbar(im, ax=ax)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:35.903487Z", "start_time": "2019-02-18T17:29:33.762568Z" } }, "outputs": [], "source": [ "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "h = ax.hist(dim.flatten(), bins=1000, range=(0, 2000))\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Mean CORRECTED Preview ###\n", "\n", "The per pixel mean of the first 128 images of the CORRECTED data" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:39.369686Z", "start_time": "2019-02-18T17:29:35.905152Z" } }, "outputs": [], "source": [ "\n", "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "im = ax.imshow(np.mean(combined[:,:1300,400:1600], axis=0), vmin=-50,\n", " vmax=max(100*np.median(combined[combined > 0]), 100), cmap=\"jet\", interpolation=\"nearest\")\n", "cb = fig.colorbar(im, ax=ax)\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:49.217848Z", "start_time": "2019-02-18T17:29:39.371232Z" } }, "outputs": [], "source": [ "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "combined[combined <= 0] = 0\n", "h = ax.hist(combined.flatten(), bins=1000, range=(-50, 1000), log=True)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:49.222484Z", "start_time": "2019-02-18T17:29:49.219933Z" } }, "outputs": [], "source": [ "#np.save('/gpfs/exfel/data/scratch/haufs/agipd_hist/prop_off_pcor_splits.npy', h)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Maximum GAIN Preview ###\n", "\n", "The per pixel maximum of the first 128 images of the digitized GAIN data" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:49.641675Z", "start_time": "2019-02-18T17:29:49.224167Z" } }, "outputs": [], "source": [ "\n", "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "im = ax.imshow(np.max(combined_g[1,:1300,400:1600][None,...], axis=0), vmin=0,\n", " vmax=3, cmap=\"jet\")\n", "cb = fig.colorbar(im, ax=ax)\n" ] }, { "cell_type": "markdown", "metadata": { "collapsed": true }, "source": [ "## Bad Pixels ##\n", "The mask contains dedicated entries for all pixels and memory cells as well as all three gains stages. Each mask entry is encoded in 32 bits as:" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:49.651913Z", "start_time": "2019-02-18T17:29:49.643556Z" } }, "outputs": [], "source": [ "from cal_tools.enums import BadPixels\n", "from IPython.display import HTML, display, Markdown, Latex\n", "import tabulate\n", "table = []\n", "for item in BadPixels:\n", " table.append((item.name, \"{:016b}\".format(item.value)))\n", "md = display(Latex(tabulate.tabulate(table, tablefmt='latex', headers=[\"Bad pixel type\", \"Bit mask\"])))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Single Shot Bad Pixels ###\n", "\n", "A single shot bad pixel map from cell 4 of the first train" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:50.086169Z", "start_time": "2019-02-18T17:29:49.653391Z" } }, "outputs": [], "source": [ "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "im = ax.imshow(np.log2(combined_mask[10,:1300,400:1600]), vmin=0,\n", " vmax=32, cmap=\"jet\")\n", "cb = fig.colorbar(im, ax=ax)" ] }, { "cell_type": "markdown", "metadata": { "collapsed": true }, "source": [ "### Full Train Bad Pixels ###" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:51.686562Z", "start_time": "2019-02-18T17:29:50.088883Z" } }, "outputs": [], "source": [ "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "im = ax.imshow(np.log2(np.max(combined_mask[:,:1300,400:1600], axis=0)), vmin=0,\n", " vmax=32, cmap=\"jet\")\n", "cb = fig.colorbar(im, ax=ax)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Full Train Bad Pixels - Only Dark Char. Related ###" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:53.662423Z", "start_time": "2019-02-18T17:29:51.688376Z" } }, "outputs": [], "source": [ "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "im = ax.imshow(np.max((combined_mask.astype(np.uint32)[:,:1300,400:1600] & BadPixels.NOISY_ADC.value) != 0, axis=0), vmin=0,\n", " vmax=1, cmap=\"jet\")\n", "cb = fig.colorbar(im, ax=ax)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2019-02-18T17:29:55.483270Z", "start_time": "2019-02-18T17:29:53.664226Z" } }, "outputs": [], "source": [ "from cal_tools.enums import BadPixels\n", "fig = plt.figure(figsize=(20,10))\n", "ax = fig.add_subplot(111)\n", "cm = combined_mask[:,:1300,400:1600]\n", "cm[cm > BadPixels.NO_DARK_DATA.value] = 0\n", "im = ax.imshow(np.log2(np.max(cm, axis=0)), vmin=0,\n", " vmax=32, cmap=\"jet\")\n", "cb = fig.colorbar(im, ax=ax)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.7" } }, "nbformat": 4, "nbformat_minor": 2 }